1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 _detect_unshare_task: Task<Option<()>>,
96 },
97}
98
99#[derive(Clone, Debug)]
100pub struct Collaborator {
101 pub user: Arc<User>,
102 pub peer_id: PeerId,
103 pub replica_id: ReplicaId,
104}
105
106#[derive(Clone, Debug, PartialEq)]
107pub enum Event {
108 ActiveEntryChanged(Option<ProjectEntry>),
109 WorktreeRemoved(WorktreeId),
110 DiskBasedDiagnosticsStarted,
111 DiskBasedDiagnosticsUpdated,
112 DiskBasedDiagnosticsFinished,
113 DiagnosticsUpdated(ProjectPath),
114}
115
116#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
117pub struct ProjectPath {
118 pub worktree_id: WorktreeId,
119 pub path: Arc<Path>,
120}
121
122#[derive(Clone, Debug, Default, PartialEq)]
123pub struct DiagnosticSummary {
124 pub error_count: usize,
125 pub warning_count: usize,
126 pub info_count: usize,
127 pub hint_count: usize,
128}
129
130#[derive(Debug)]
131pub struct Location {
132 pub buffer: ModelHandle<Buffer>,
133 pub range: Range<language::Anchor>,
134}
135
136#[derive(Debug)]
137pub struct DocumentHighlight {
138 pub range: Range<language::Anchor>,
139 pub kind: DocumentHighlightKind,
140}
141
142#[derive(Clone, Debug)]
143pub struct Symbol {
144 pub source_worktree_id: WorktreeId,
145 pub worktree_id: WorktreeId,
146 pub language_name: String,
147 pub path: PathBuf,
148 pub label: CodeLabel,
149 pub name: String,
150 pub kind: lsp::SymbolKind,
151 pub range: Range<PointUtf16>,
152 pub signature: [u8; 32],
153}
154
155#[derive(Default)]
156pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
157
158impl DiagnosticSummary {
159 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
160 let mut this = Self {
161 error_count: 0,
162 warning_count: 0,
163 info_count: 0,
164 hint_count: 0,
165 };
166
167 for entry in diagnostics {
168 if entry.diagnostic.is_primary {
169 match entry.diagnostic.severity {
170 DiagnosticSeverity::ERROR => this.error_count += 1,
171 DiagnosticSeverity::WARNING => this.warning_count += 1,
172 DiagnosticSeverity::INFORMATION => this.info_count += 1,
173 DiagnosticSeverity::HINT => this.hint_count += 1,
174 _ => {}
175 }
176 }
177 }
178
179 this
180 }
181
182 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
183 proto::DiagnosticSummary {
184 path: path.to_string_lossy().to_string(),
185 error_count: self.error_count as u32,
186 warning_count: self.warning_count as u32,
187 info_count: self.info_count as u32,
188 hint_count: self.hint_count as u32,
189 }
190 }
191}
192
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
194pub struct ProjectEntry {
195 pub worktree_id: WorktreeId,
196 pub entry_id: usize,
197}
198
199impl Project {
200 pub fn init(client: &Arc<Client>) {
201 client.add_entity_message_handler(Self::handle_add_collaborator);
202 client.add_entity_message_handler(Self::handle_buffer_reloaded);
203 client.add_entity_message_handler(Self::handle_buffer_saved);
204 client.add_entity_message_handler(Self::handle_close_buffer);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
207 client.add_entity_message_handler(Self::handle_remove_collaborator);
208 client.add_entity_message_handler(Self::handle_register_worktree);
209 client.add_entity_message_handler(Self::handle_unregister_worktree);
210 client.add_entity_message_handler(Self::handle_unshare_project);
211 client.add_entity_message_handler(Self::handle_update_buffer_file);
212 client.add_entity_message_handler(Self::handle_update_buffer);
213 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
214 client.add_entity_message_handler(Self::handle_update_worktree);
215 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
216 client.add_entity_request_handler(Self::handle_apply_code_action);
217 client.add_entity_request_handler(Self::handle_format_buffers);
218 client.add_entity_request_handler(Self::handle_get_code_actions);
219 client.add_entity_request_handler(Self::handle_get_completions);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
225 client.add_entity_request_handler(Self::handle_search_project);
226 client.add_entity_request_handler(Self::handle_get_project_symbols);
227 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
228 client.add_entity_request_handler(Self::handle_open_buffer);
229 client.add_entity_request_handler(Self::handle_save_buffer);
230 }
231
232 pub fn local(
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut MutableAppContext,
238 ) -> ModelHandle<Self> {
239 cx.add_model(|cx: &mut ModelContext<Self>| {
240 let (remote_id_tx, remote_id_rx) = watch::channel();
241 let _maintain_remote_id_task = cx.spawn_weak({
242 let rpc = client.clone();
243 move |this, mut cx| {
244 async move {
245 let mut status = rpc.status();
246 while let Some(status) = status.next().await {
247 if let Some(this) = this.upgrade(&cx) {
248 let remote_id = if status.is_connected() {
249 let response = rpc.request(proto::RegisterProject {}).await?;
250 Some(response.project_id)
251 } else {
252 None
253 };
254
255 if let Some(project_id) = remote_id {
256 let mut registrations = Vec::new();
257 this.update(&mut cx, |this, cx| {
258 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
259 registrations.push(worktree.update(
260 cx,
261 |worktree, cx| {
262 let worktree = worktree.as_local_mut().unwrap();
263 worktree.register(project_id, cx)
264 },
265 ));
266 }
267 });
268 for registration in registrations {
269 registration.await?;
270 }
271 }
272 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
273 }
274 }
275 Ok(())
276 }
277 .log_err()
278 }
279 });
280
281 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
282 Self {
283 worktrees: Default::default(),
284 collaborators: Default::default(),
285 opened_buffers: Default::default(),
286 shared_buffers: Default::default(),
287 loading_buffers: Default::default(),
288 loading_local_worktrees: Default::default(),
289 client_state: ProjectClientState::Local {
290 is_shared: false,
291 remote_id_tx,
292 remote_id_rx,
293 _maintain_remote_id_task,
294 },
295 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
296 subscriptions: Vec::new(),
297 active_entry: None,
298 languages,
299 client,
300 user_store,
301 fs,
302 language_servers_with_diagnostics_running: 0,
303 language_servers: Default::default(),
304 started_language_servers: Default::default(),
305 nonce: StdRng::from_entropy().gen(),
306 }
307 })
308 }
309
310 pub async fn remote(
311 remote_id: u64,
312 client: Arc<Client>,
313 user_store: ModelHandle<UserStore>,
314 languages: Arc<LanguageRegistry>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncAppContext,
317 ) -> Result<ModelHandle<Self>> {
318 client.authenticate_and_connect(&cx).await?;
319
320 let response = client
321 .request(proto::JoinProject {
322 project_id: remote_id,
323 })
324 .await?;
325
326 let replica_id = response.replica_id as ReplicaId;
327
328 let mut worktrees = Vec::new();
329 for worktree in response.worktrees {
330 let (worktree, load_task) = cx
331 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
332 worktrees.push(worktree);
333 load_task.detach();
334 }
335
336 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
337 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
338 let mut this = Self {
339 worktrees: Vec::new(),
340 loading_buffers: Default::default(),
341 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
342 shared_buffers: Default::default(),
343 loading_local_worktrees: Default::default(),
344 active_entry: None,
345 collaborators: Default::default(),
346 languages,
347 user_store: user_store.clone(),
348 fs,
349 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
350 client: client.clone(),
351 client_state: ProjectClientState::Remote {
352 sharing_has_stopped: false,
353 remote_id,
354 replica_id,
355 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
356 async move {
357 let mut status = client.status();
358 let is_connected =
359 status.next().await.map_or(false, |s| s.is_connected());
360 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
361 if !is_connected || status.next().await.is_some() {
362 if let Some(this) = this.upgrade(&cx) {
363 this.update(&mut cx, |this, cx| this.project_unshared(cx))
364 }
365 }
366 Ok(())
367 }
368 .log_err()
369 }),
370 },
371 language_servers_with_diagnostics_running: 0,
372 language_servers: Default::default(),
373 started_language_servers: Default::default(),
374 opened_buffers: Default::default(),
375 nonce: StdRng::from_entropy().gen(),
376 };
377 for worktree in worktrees {
378 this.add_worktree(&worktree, cx);
379 }
380 this
381 });
382
383 let user_ids = response
384 .collaborators
385 .iter()
386 .map(|peer| peer.user_id)
387 .collect();
388 user_store
389 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
390 .await?;
391 let mut collaborators = HashMap::default();
392 for message in response.collaborators {
393 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
394 collaborators.insert(collaborator.peer_id, collaborator);
395 }
396
397 this.update(cx, |this, _| {
398 this.collaborators = collaborators;
399 });
400
401 Ok(this)
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
406 let languages = Arc::new(LanguageRegistry::test());
407 let http_client = client::test::FakeHttpClient::with_404_response();
408 let client = client::Client::new(http_client.clone());
409 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
410 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
411 }
412
413 #[cfg(any(test, feature = "test-support"))]
414 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
415 self.opened_buffers
416 .get(&remote_id)
417 .and_then(|buffer| buffer.upgrade(cx))
418 }
419
420 #[cfg(any(test, feature = "test-support"))]
421 pub fn languages(&self) -> &Arc<LanguageRegistry> {
422 &self.languages
423 }
424
425 #[cfg(any(test, feature = "test-support"))]
426 pub fn check_invariants(&self, cx: &AppContext) {
427 if self.is_local() {
428 let mut worktree_root_paths = HashMap::default();
429 for worktree in self.worktrees(cx) {
430 let worktree = worktree.read(cx);
431 let abs_path = worktree.as_local().unwrap().abs_path().clone();
432 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
433 assert_eq!(
434 prev_worktree_id,
435 None,
436 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
437 abs_path,
438 worktree.id(),
439 prev_worktree_id
440 )
441 }
442 } else {
443 let replica_id = self.replica_id();
444 for buffer in self.opened_buffers.values() {
445 if let Some(buffer) = buffer.upgrade(cx) {
446 let buffer = buffer.read(cx);
447 assert_eq!(
448 buffer.deferred_ops_len(),
449 0,
450 "replica {}, buffer {} has deferred operations",
451 replica_id,
452 buffer.remote_id()
453 );
454 }
455 }
456 }
457 }
458
459 #[cfg(any(test, feature = "test-support"))]
460 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
461 let path = path.into();
462 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
463 self.opened_buffers.iter().any(|(_, buffer)| {
464 if let Some(buffer) = buffer.upgrade(cx) {
465 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
466 if file.worktree == worktree && file.path() == &path.path {
467 return true;
468 }
469 }
470 }
471 false
472 })
473 } else {
474 false
475 }
476 }
477
478 pub fn fs(&self) -> &Arc<dyn Fs> {
479 &self.fs
480 }
481
482 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
483 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
484 *remote_id_tx.borrow_mut() = remote_id;
485 }
486
487 self.subscriptions.clear();
488 if let Some(remote_id) = remote_id {
489 self.subscriptions
490 .push(self.client.add_model_for_remote_entity(remote_id, cx));
491 }
492 }
493
494 pub fn remote_id(&self) -> Option<u64> {
495 match &self.client_state {
496 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
497 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
498 }
499 }
500
501 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
502 let mut id = None;
503 let mut watch = None;
504 match &self.client_state {
505 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
506 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
507 }
508
509 async move {
510 if let Some(id) = id {
511 return id;
512 }
513 let mut watch = watch.unwrap();
514 loop {
515 let id = *watch.borrow();
516 if let Some(id) = id {
517 return id;
518 }
519 watch.next().await;
520 }
521 }
522 }
523
524 pub fn replica_id(&self) -> ReplicaId {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => 0,
527 ProjectClientState::Remote { replica_id, .. } => *replica_id,
528 }
529 }
530
531 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
532 &self.collaborators
533 }
534
535 pub fn worktrees<'a>(
536 &'a self,
537 cx: &'a AppContext,
538 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
539 self.worktrees
540 .iter()
541 .filter_map(move |worktree| worktree.upgrade(cx))
542 }
543
544 pub fn visible_worktrees<'a>(
545 &'a self,
546 cx: &'a AppContext,
547 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
548 self.worktrees.iter().filter_map(|worktree| {
549 worktree.upgrade(cx).and_then(|worktree| {
550 if worktree.read(cx).is_visible() {
551 Some(worktree)
552 } else {
553 None
554 }
555 })
556 })
557 }
558
559 pub fn worktree_for_id(
560 &self,
561 id: WorktreeId,
562 cx: &AppContext,
563 ) -> Option<ModelHandle<Worktree>> {
564 self.worktrees(cx)
565 .find(|worktree| worktree.read(cx).id() == id)
566 }
567
568 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
569 let rpc = self.client.clone();
570 cx.spawn(|this, mut cx| async move {
571 let project_id = this.update(&mut cx, |this, cx| {
572 if let ProjectClientState::Local {
573 is_shared,
574 remote_id_rx,
575 ..
576 } = &mut this.client_state
577 {
578 *is_shared = true;
579
580 for open_buffer in this.opened_buffers.values_mut() {
581 match open_buffer {
582 OpenBuffer::Strong(_) => {}
583 OpenBuffer::Weak(buffer) => {
584 if let Some(buffer) = buffer.upgrade(cx) {
585 *open_buffer = OpenBuffer::Strong(buffer);
586 }
587 }
588 OpenBuffer::Loading(_) => unreachable!(),
589 }
590 }
591
592 for worktree_handle in this.worktrees.iter_mut() {
593 match worktree_handle {
594 WorktreeHandle::Strong(_) => {}
595 WorktreeHandle::Weak(worktree) => {
596 if let Some(worktree) = worktree.upgrade(cx) {
597 *worktree_handle = WorktreeHandle::Strong(worktree);
598 }
599 }
600 }
601 }
602
603 remote_id_rx
604 .borrow()
605 .ok_or_else(|| anyhow!("no project id"))
606 } else {
607 Err(anyhow!("can't share a remote project"))
608 }
609 })?;
610
611 rpc.request(proto::ShareProject { project_id }).await?;
612
613 let mut tasks = Vec::new();
614 this.update(&mut cx, |this, cx| {
615 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
616 worktree.update(cx, |worktree, cx| {
617 let worktree = worktree.as_local_mut().unwrap();
618 tasks.push(worktree.share(project_id, cx));
619 });
620 }
621 });
622 for task in tasks {
623 task.await?;
624 }
625 this.update(&mut cx, |_, cx| cx.notify());
626 Ok(())
627 })
628 }
629
630 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
631 let rpc = self.client.clone();
632 cx.spawn(|this, mut cx| async move {
633 let project_id = this.update(&mut cx, |this, cx| {
634 if let ProjectClientState::Local {
635 is_shared,
636 remote_id_rx,
637 ..
638 } = &mut this.client_state
639 {
640 *is_shared = false;
641
642 for open_buffer in this.opened_buffers.values_mut() {
643 match open_buffer {
644 OpenBuffer::Strong(buffer) => {
645 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
646 }
647 _ => {}
648 }
649 }
650
651 for worktree_handle in this.worktrees.iter_mut() {
652 match worktree_handle {
653 WorktreeHandle::Strong(worktree) => {
654 if !worktree.read(cx).is_visible() {
655 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
656 }
657 }
658 _ => {}
659 }
660 }
661
662 remote_id_rx
663 .borrow()
664 .ok_or_else(|| anyhow!("no project id"))
665 } else {
666 Err(anyhow!("can't share a remote project"))
667 }
668 })?;
669
670 rpc.send(proto::UnshareProject { project_id })?;
671 this.update(&mut cx, |this, cx| {
672 this.collaborators.clear();
673 this.shared_buffers.clear();
674 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
675 worktree.update(cx, |worktree, _| {
676 worktree.as_local_mut().unwrap().unshare();
677 });
678 }
679 cx.notify()
680 });
681 Ok(())
682 })
683 }
684
685 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
686 if let ProjectClientState::Remote {
687 sharing_has_stopped,
688 ..
689 } = &mut self.client_state
690 {
691 *sharing_has_stopped = true;
692 self.collaborators.clear();
693 cx.notify();
694 }
695 }
696
697 pub fn is_read_only(&self) -> bool {
698 match &self.client_state {
699 ProjectClientState::Local { .. } => false,
700 ProjectClientState::Remote {
701 sharing_has_stopped,
702 ..
703 } => *sharing_has_stopped,
704 }
705 }
706
707 pub fn is_local(&self) -> bool {
708 match &self.client_state {
709 ProjectClientState::Local { .. } => true,
710 ProjectClientState::Remote { .. } => false,
711 }
712 }
713
714 pub fn is_remote(&self) -> bool {
715 !self.is_local()
716 }
717
718 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
719 if self.is_remote() {
720 return Err(anyhow!("creating buffers as a guest is not supported yet"));
721 }
722
723 let buffer = cx.add_model(|cx| {
724 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
725 });
726 self.register_buffer(&buffer, None, cx)?;
727 Ok(buffer)
728 }
729
730 pub fn open_buffer(
731 &mut self,
732 path: impl Into<ProjectPath>,
733 cx: &mut ModelContext<Self>,
734 ) -> Task<Result<ModelHandle<Buffer>>> {
735 let project_path = path.into();
736 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
737 worktree
738 } else {
739 return Task::ready(Err(anyhow!("no such worktree")));
740 };
741
742 // If there is already a buffer for the given path, then return it.
743 let existing_buffer = self.get_open_buffer(&project_path, cx);
744 if let Some(existing_buffer) = existing_buffer {
745 return Task::ready(Ok(existing_buffer));
746 }
747
748 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
749 // If the given path is already being loaded, then wait for that existing
750 // task to complete and return the same buffer.
751 hash_map::Entry::Occupied(e) => e.get().clone(),
752
753 // Otherwise, record the fact that this path is now being loaded.
754 hash_map::Entry::Vacant(entry) => {
755 let (mut tx, rx) = postage::watch::channel();
756 entry.insert(rx.clone());
757
758 let load_buffer = if worktree.read(cx).is_local() {
759 self.open_local_buffer(&project_path.path, &worktree, cx)
760 } else {
761 self.open_remote_buffer(&project_path.path, &worktree, cx)
762 };
763
764 cx.spawn(move |this, mut cx| async move {
765 let load_result = load_buffer.await;
766 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
767 // Record the fact that the buffer is no longer loading.
768 this.loading_buffers.remove(&project_path);
769 let buffer = load_result.map_err(Arc::new)?;
770 Ok(buffer)
771 }));
772 })
773 .detach();
774 rx
775 }
776 };
777
778 cx.foreground().spawn(async move {
779 loop {
780 if let Some(result) = loading_watch.borrow().as_ref() {
781 match result {
782 Ok(buffer) => return Ok(buffer.clone()),
783 Err(error) => return Err(anyhow!("{}", error)),
784 }
785 }
786 loading_watch.next().await;
787 }
788 })
789 }
790
791 fn open_local_buffer(
792 &mut self,
793 path: &Arc<Path>,
794 worktree: &ModelHandle<Worktree>,
795 cx: &mut ModelContext<Self>,
796 ) -> Task<Result<ModelHandle<Buffer>>> {
797 let load_buffer = worktree.update(cx, |worktree, cx| {
798 let worktree = worktree.as_local_mut().unwrap();
799 worktree.load_buffer(path, cx)
800 });
801 let worktree = worktree.downgrade();
802 cx.spawn(|this, mut cx| async move {
803 let buffer = load_buffer.await?;
804 let worktree = worktree
805 .upgrade(&cx)
806 .ok_or_else(|| anyhow!("worktree was removed"))?;
807 this.update(&mut cx, |this, cx| {
808 this.register_buffer(&buffer, Some(&worktree), cx)
809 })?;
810 Ok(buffer)
811 })
812 }
813
814 fn open_remote_buffer(
815 &mut self,
816 path: &Arc<Path>,
817 worktree: &ModelHandle<Worktree>,
818 cx: &mut ModelContext<Self>,
819 ) -> Task<Result<ModelHandle<Buffer>>> {
820 let rpc = self.client.clone();
821 let project_id = self.remote_id().unwrap();
822 let remote_worktree_id = worktree.read(cx).id();
823 let path = path.clone();
824 let path_string = path.to_string_lossy().to_string();
825 cx.spawn(|this, mut cx| async move {
826 let response = rpc
827 .request(proto::OpenBuffer {
828 project_id,
829 worktree_id: remote_worktree_id.to_proto(),
830 path: path_string,
831 })
832 .await?;
833 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
834 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
835 .await
836 })
837 }
838
839 fn open_local_buffer_via_lsp(
840 &mut self,
841 abs_path: lsp::Url,
842 lang_name: String,
843 lang_server: Arc<LanguageServer>,
844 cx: &mut ModelContext<Self>,
845 ) -> Task<Result<ModelHandle<Buffer>>> {
846 cx.spawn(|this, mut cx| async move {
847 let abs_path = abs_path
848 .to_file_path()
849 .map_err(|_| anyhow!("can't convert URI to path"))?;
850 let (worktree, relative_path) = if let Some(result) =
851 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
852 {
853 result
854 } else {
855 let worktree = this
856 .update(&mut cx, |this, cx| {
857 this.create_local_worktree(&abs_path, false, cx)
858 })
859 .await?;
860 this.update(&mut cx, |this, cx| {
861 this.language_servers
862 .insert((worktree.read(cx).id(), lang_name), lang_server);
863 });
864 (worktree, PathBuf::new())
865 };
866
867 let project_path = ProjectPath {
868 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
869 path: relative_path.into(),
870 };
871 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
872 .await
873 })
874 }
875
876 pub fn save_buffer_as(
877 &mut self,
878 buffer: ModelHandle<Buffer>,
879 abs_path: PathBuf,
880 cx: &mut ModelContext<Project>,
881 ) -> Task<Result<()>> {
882 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
883 cx.spawn(|this, mut cx| async move {
884 let (worktree, path) = worktree_task.await?;
885 worktree
886 .update(&mut cx, |worktree, cx| {
887 worktree
888 .as_local_mut()
889 .unwrap()
890 .save_buffer_as(buffer.clone(), path, cx)
891 })
892 .await?;
893 this.update(&mut cx, |this, cx| {
894 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
895 });
896 Ok(())
897 })
898 }
899
900 pub fn get_open_buffer(
901 &mut self,
902 path: &ProjectPath,
903 cx: &mut ModelContext<Self>,
904 ) -> Option<ModelHandle<Buffer>> {
905 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
906 self.opened_buffers.values().find_map(|buffer| {
907 let buffer = buffer.upgrade(cx)?;
908 let file = File::from_dyn(buffer.read(cx).file())?;
909 if file.worktree == worktree && file.path() == &path.path {
910 Some(buffer)
911 } else {
912 None
913 }
914 })
915 }
916
917 fn register_buffer(
918 &mut self,
919 buffer: &ModelHandle<Buffer>,
920 worktree: Option<&ModelHandle<Worktree>>,
921 cx: &mut ModelContext<Self>,
922 ) -> Result<()> {
923 let remote_id = buffer.read(cx).remote_id();
924 let open_buffer = if self.is_remote() || self.is_shared() {
925 OpenBuffer::Strong(buffer.clone())
926 } else {
927 OpenBuffer::Weak(buffer.downgrade())
928 };
929
930 match self.opened_buffers.insert(remote_id, open_buffer) {
931 None => {}
932 Some(OpenBuffer::Loading(operations)) => {
933 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
934 }
935 Some(OpenBuffer::Weak(existing_handle)) => {
936 if existing_handle.upgrade(cx).is_some() {
937 Err(anyhow!(
938 "already registered buffer with remote id {}",
939 remote_id
940 ))?
941 }
942 }
943 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
944 "already registered buffer with remote id {}",
945 remote_id
946 ))?,
947 }
948 self.assign_language_to_buffer(&buffer, worktree, cx);
949 Ok(())
950 }
951
952 fn assign_language_to_buffer(
953 &mut self,
954 buffer: &ModelHandle<Buffer>,
955 worktree: Option<&ModelHandle<Worktree>>,
956 cx: &mut ModelContext<Self>,
957 ) -> Option<()> {
958 let (path, full_path) = {
959 let file = buffer.read(cx).file()?;
960 (file.path().clone(), file.full_path(cx))
961 };
962
963 // If the buffer has a language, set it and start/assign the language server
964 if let Some(language) = self.languages.select_language(&full_path) {
965 buffer.update(cx, |buffer, cx| {
966 buffer.set_language(Some(language.clone()), cx);
967 });
968
969 // For local worktrees, start a language server if needed.
970 // Also assign the language server and any previously stored diagnostics to the buffer.
971 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
972 let worktree_id = local_worktree.id();
973 let worktree_abs_path = local_worktree.abs_path().clone();
974 let buffer = buffer.downgrade();
975 let language_server =
976 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
977
978 cx.spawn_weak(|_, mut cx| async move {
979 if let Some(language_server) = language_server.await {
980 if let Some(buffer) = buffer.upgrade(&cx) {
981 buffer.update(&mut cx, |buffer, cx| {
982 buffer.set_language_server(Some(language_server), cx);
983 });
984 }
985 }
986 })
987 .detach();
988 }
989 }
990
991 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
992 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
993 buffer.update(cx, |buffer, cx| {
994 buffer.update_diagnostics(diagnostics, None, cx).log_err();
995 });
996 }
997 }
998
999 None
1000 }
1001
1002 fn start_language_server(
1003 &mut self,
1004 worktree_id: WorktreeId,
1005 worktree_path: Arc<Path>,
1006 language: Arc<Language>,
1007 cx: &mut ModelContext<Self>,
1008 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
1009 enum LspEvent {
1010 DiagnosticsStart,
1011 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1012 DiagnosticsFinish,
1013 }
1014
1015 let key = (worktree_id, language.name().to_string());
1016 self.started_language_servers
1017 .entry(key.clone())
1018 .or_insert_with(|| {
1019 let language_server = self.languages.start_language_server(
1020 language.clone(),
1021 worktree_path,
1022 self.client.http_client(),
1023 cx,
1024 );
1025 let rpc = self.client.clone();
1026 cx.spawn_weak(|this, mut cx| async move {
1027 let language_server = language_server?.await.log_err()?;
1028 if let Some(this) = this.upgrade(&cx) {
1029 this.update(&mut cx, |this, _| {
1030 this.language_servers.insert(key, language_server.clone());
1031 });
1032 }
1033
1034 let disk_based_sources = language
1035 .disk_based_diagnostic_sources()
1036 .cloned()
1037 .unwrap_or_default();
1038 let disk_based_diagnostics_progress_token =
1039 language.disk_based_diagnostics_progress_token().cloned();
1040 let has_disk_based_diagnostic_progress_token =
1041 disk_based_diagnostics_progress_token.is_some();
1042 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1043
1044 // Listen for `PublishDiagnostics` notifications.
1045 language_server
1046 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1047 let diagnostics_tx = diagnostics_tx.clone();
1048 move |params| {
1049 if !has_disk_based_diagnostic_progress_token {
1050 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1051 }
1052 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1053 .ok();
1054 if !has_disk_based_diagnostic_progress_token {
1055 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1056 }
1057 }
1058 })
1059 .detach();
1060
1061 // Listen for `Progress` notifications. Send an event when the language server
1062 // transitions between running jobs and not running any jobs.
1063 let mut running_jobs_for_this_server: i32 = 0;
1064 language_server
1065 .on_notification::<lsp::notification::Progress, _>(move |params| {
1066 let token = match params.token {
1067 lsp::NumberOrString::Number(_) => None,
1068 lsp::NumberOrString::String(token) => Some(token),
1069 };
1070
1071 if token == disk_based_diagnostics_progress_token {
1072 match params.value {
1073 lsp::ProgressParamsValue::WorkDone(progress) => {
1074 match progress {
1075 lsp::WorkDoneProgress::Begin(_) => {
1076 running_jobs_for_this_server += 1;
1077 if running_jobs_for_this_server == 1 {
1078 block_on(
1079 diagnostics_tx
1080 .send(LspEvent::DiagnosticsStart),
1081 )
1082 .ok();
1083 }
1084 }
1085 lsp::WorkDoneProgress::End(_) => {
1086 running_jobs_for_this_server -= 1;
1087 if running_jobs_for_this_server == 0 {
1088 block_on(
1089 diagnostics_tx
1090 .send(LspEvent::DiagnosticsFinish),
1091 )
1092 .ok();
1093 }
1094 }
1095 _ => {}
1096 }
1097 }
1098 }
1099 }
1100 })
1101 .detach();
1102
1103 // Process all the LSP events.
1104 cx.spawn(|mut cx| async move {
1105 while let Ok(message) = diagnostics_rx.recv().await {
1106 let this = this.upgrade(&cx)?;
1107 match message {
1108 LspEvent::DiagnosticsStart => {
1109 this.update(&mut cx, |this, cx| {
1110 this.disk_based_diagnostics_started(cx);
1111 if let Some(project_id) = this.remote_id() {
1112 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1113 project_id,
1114 })
1115 .log_err();
1116 }
1117 });
1118 }
1119 LspEvent::DiagnosticsUpdate(mut params) => {
1120 language.process_diagnostics(&mut params);
1121 this.update(&mut cx, |this, cx| {
1122 this.update_diagnostics(params, &disk_based_sources, cx)
1123 .log_err();
1124 });
1125 }
1126 LspEvent::DiagnosticsFinish => {
1127 this.update(&mut cx, |this, cx| {
1128 this.disk_based_diagnostics_finished(cx);
1129 if let Some(project_id) = this.remote_id() {
1130 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1131 project_id,
1132 })
1133 .log_err();
1134 }
1135 });
1136 }
1137 }
1138 }
1139 Some(())
1140 })
1141 .detach();
1142
1143 Some(language_server)
1144 })
1145 .shared()
1146 })
1147 .clone()
1148 }
1149
1150 pub fn update_diagnostics(
1151 &mut self,
1152 params: lsp::PublishDiagnosticsParams,
1153 disk_based_sources: &HashSet<String>,
1154 cx: &mut ModelContext<Self>,
1155 ) -> Result<()> {
1156 let abs_path = params
1157 .uri
1158 .to_file_path()
1159 .map_err(|_| anyhow!("URI is not a file"))?;
1160 let mut next_group_id = 0;
1161 let mut diagnostics = Vec::default();
1162 let mut primary_diagnostic_group_ids = HashMap::default();
1163 let mut sources_by_group_id = HashMap::default();
1164 let mut supporting_diagnostic_severities = HashMap::default();
1165 for diagnostic in ¶ms.diagnostics {
1166 let source = diagnostic.source.as_ref();
1167 let code = diagnostic.code.as_ref().map(|code| match code {
1168 lsp::NumberOrString::Number(code) => code.to_string(),
1169 lsp::NumberOrString::String(code) => code.clone(),
1170 });
1171 let range = range_from_lsp(diagnostic.range);
1172 let is_supporting = diagnostic
1173 .related_information
1174 .as_ref()
1175 .map_or(false, |infos| {
1176 infos.iter().any(|info| {
1177 primary_diagnostic_group_ids.contains_key(&(
1178 source,
1179 code.clone(),
1180 range_from_lsp(info.location.range),
1181 ))
1182 })
1183 });
1184
1185 if is_supporting {
1186 if let Some(severity) = diagnostic.severity {
1187 supporting_diagnostic_severities
1188 .insert((source, code.clone(), range), severity);
1189 }
1190 } else {
1191 let group_id = post_inc(&mut next_group_id);
1192 let is_disk_based =
1193 source.map_or(false, |source| disk_based_sources.contains(source));
1194
1195 sources_by_group_id.insert(group_id, source);
1196 primary_diagnostic_group_ids
1197 .insert((source, code.clone(), range.clone()), group_id);
1198
1199 diagnostics.push(DiagnosticEntry {
1200 range,
1201 diagnostic: Diagnostic {
1202 code: code.clone(),
1203 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1204 message: diagnostic.message.clone(),
1205 group_id,
1206 is_primary: true,
1207 is_valid: true,
1208 is_disk_based,
1209 },
1210 });
1211 if let Some(infos) = &diagnostic.related_information {
1212 for info in infos {
1213 if info.location.uri == params.uri && !info.message.is_empty() {
1214 let range = range_from_lsp(info.location.range);
1215 diagnostics.push(DiagnosticEntry {
1216 range,
1217 diagnostic: Diagnostic {
1218 code: code.clone(),
1219 severity: DiagnosticSeverity::INFORMATION,
1220 message: info.message.clone(),
1221 group_id,
1222 is_primary: false,
1223 is_valid: true,
1224 is_disk_based,
1225 },
1226 });
1227 }
1228 }
1229 }
1230 }
1231 }
1232
1233 for entry in &mut diagnostics {
1234 let diagnostic = &mut entry.diagnostic;
1235 if !diagnostic.is_primary {
1236 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1237 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1238 source,
1239 diagnostic.code.clone(),
1240 entry.range.clone(),
1241 )) {
1242 diagnostic.severity = severity;
1243 }
1244 }
1245 }
1246
1247 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1248 Ok(())
1249 }
1250
1251 pub fn update_diagnostic_entries(
1252 &mut self,
1253 abs_path: PathBuf,
1254 version: Option<i32>,
1255 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1256 cx: &mut ModelContext<Project>,
1257 ) -> Result<(), anyhow::Error> {
1258 let (worktree, relative_path) = self
1259 .find_local_worktree(&abs_path, cx)
1260 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1261 if !worktree.read(cx).is_visible() {
1262 return Ok(());
1263 }
1264
1265 let project_path = ProjectPath {
1266 worktree_id: worktree.read(cx).id(),
1267 path: relative_path.into(),
1268 };
1269
1270 for buffer in self.opened_buffers.values() {
1271 if let Some(buffer) = buffer.upgrade(cx) {
1272 if buffer
1273 .read(cx)
1274 .file()
1275 .map_or(false, |file| *file.path() == project_path.path)
1276 {
1277 buffer.update(cx, |buffer, cx| {
1278 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1279 })?;
1280 break;
1281 }
1282 }
1283 }
1284 worktree.update(cx, |worktree, cx| {
1285 worktree
1286 .as_local_mut()
1287 .ok_or_else(|| anyhow!("not a local worktree"))?
1288 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1289 })?;
1290 cx.emit(Event::DiagnosticsUpdated(project_path));
1291 Ok(())
1292 }
1293
1294 pub fn format(
1295 &self,
1296 buffers: HashSet<ModelHandle<Buffer>>,
1297 push_to_history: bool,
1298 cx: &mut ModelContext<Project>,
1299 ) -> Task<Result<ProjectTransaction>> {
1300 let mut local_buffers = Vec::new();
1301 let mut remote_buffers = None;
1302 for buffer_handle in buffers {
1303 let buffer = buffer_handle.read(cx);
1304 let worktree;
1305 if let Some(file) = File::from_dyn(buffer.file()) {
1306 worktree = file.worktree.clone();
1307 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1308 let lang_server;
1309 if let Some(lang) = buffer.language() {
1310 if let Some(server) = self
1311 .language_servers
1312 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1313 {
1314 lang_server = server.clone();
1315 } else {
1316 return Task::ready(Ok(Default::default()));
1317 };
1318 } else {
1319 return Task::ready(Ok(Default::default()));
1320 }
1321
1322 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1323 } else {
1324 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1325 }
1326 } else {
1327 return Task::ready(Ok(Default::default()));
1328 }
1329 }
1330
1331 let remote_buffers = self.remote_id().zip(remote_buffers);
1332 let client = self.client.clone();
1333
1334 cx.spawn(|this, mut cx| async move {
1335 let mut project_transaction = ProjectTransaction::default();
1336
1337 if let Some((project_id, remote_buffers)) = remote_buffers {
1338 let response = client
1339 .request(proto::FormatBuffers {
1340 project_id,
1341 buffer_ids: remote_buffers
1342 .iter()
1343 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1344 .collect(),
1345 })
1346 .await?
1347 .transaction
1348 .ok_or_else(|| anyhow!("missing transaction"))?;
1349 project_transaction = this
1350 .update(&mut cx, |this, cx| {
1351 this.deserialize_project_transaction(response, push_to_history, cx)
1352 })
1353 .await?;
1354 }
1355
1356 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1357 let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
1358 capabilities
1359 } else {
1360 continue;
1361 };
1362
1363 let text_document = lsp::TextDocumentIdentifier::new(
1364 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1365 );
1366 let lsp_edits = if capabilities
1367 .document_formatting_provider
1368 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1369 {
1370 lang_server
1371 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1372 text_document,
1373 options: Default::default(),
1374 work_done_progress_params: Default::default(),
1375 })
1376 .await?
1377 } else if capabilities
1378 .document_range_formatting_provider
1379 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1380 {
1381 let buffer_start = lsp::Position::new(0, 0);
1382 let buffer_end = buffer
1383 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1384 .to_lsp_position();
1385 lang_server
1386 .request::<lsp::request::RangeFormatting>(
1387 lsp::DocumentRangeFormattingParams {
1388 text_document,
1389 range: lsp::Range::new(buffer_start, buffer_end),
1390 options: Default::default(),
1391 work_done_progress_params: Default::default(),
1392 },
1393 )
1394 .await?
1395 } else {
1396 continue;
1397 };
1398
1399 if let Some(lsp_edits) = lsp_edits {
1400 let edits = buffer
1401 .update(&mut cx, |buffer, cx| {
1402 buffer.edits_from_lsp(lsp_edits, None, cx)
1403 })
1404 .await?;
1405 buffer.update(&mut cx, |buffer, cx| {
1406 buffer.finalize_last_transaction();
1407 buffer.start_transaction();
1408 for (range, text) in edits {
1409 buffer.edit([range], text, cx);
1410 }
1411 if buffer.end_transaction(cx).is_some() {
1412 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1413 if !push_to_history {
1414 buffer.forget_transaction(transaction.id);
1415 }
1416 project_transaction.0.insert(cx.handle(), transaction);
1417 }
1418 });
1419 }
1420 }
1421
1422 Ok(project_transaction)
1423 })
1424 }
1425
1426 pub fn definition<T: ToPointUtf16>(
1427 &self,
1428 buffer: &ModelHandle<Buffer>,
1429 position: T,
1430 cx: &mut ModelContext<Self>,
1431 ) -> Task<Result<Vec<Location>>> {
1432 let position = position.to_point_utf16(buffer.read(cx));
1433 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1434 }
1435
1436 pub fn references<T: ToPointUtf16>(
1437 &self,
1438 buffer: &ModelHandle<Buffer>,
1439 position: T,
1440 cx: &mut ModelContext<Self>,
1441 ) -> Task<Result<Vec<Location>>> {
1442 let position = position.to_point_utf16(buffer.read(cx));
1443 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1444 }
1445
1446 pub fn document_highlights<T: ToPointUtf16>(
1447 &self,
1448 buffer: &ModelHandle<Buffer>,
1449 position: T,
1450 cx: &mut ModelContext<Self>,
1451 ) -> Task<Result<Vec<DocumentHighlight>>> {
1452 let position = position.to_point_utf16(buffer.read(cx));
1453
1454 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1455 }
1456
1457 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1458 if self.is_local() {
1459 let mut language_servers = HashMap::default();
1460 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1461 if let Some((worktree, language)) = self
1462 .worktree_for_id(*worktree_id, cx)
1463 .and_then(|worktree| worktree.read(cx).as_local())
1464 .zip(self.languages.get_language(language_name))
1465 {
1466 language_servers
1467 .entry(Arc::as_ptr(language_server))
1468 .or_insert((
1469 language_server.clone(),
1470 *worktree_id,
1471 worktree.abs_path().clone(),
1472 language.clone(),
1473 ));
1474 }
1475 }
1476
1477 let mut requests = Vec::new();
1478 for (language_server, _, _, _) in language_servers.values() {
1479 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1480 lsp::WorkspaceSymbolParams {
1481 query: query.to_string(),
1482 ..Default::default()
1483 },
1484 ));
1485 }
1486
1487 cx.spawn_weak(|this, cx| async move {
1488 let responses = futures::future::try_join_all(requests).await?;
1489
1490 let mut symbols = Vec::new();
1491 if let Some(this) = this.upgrade(&cx) {
1492 this.read_with(&cx, |this, cx| {
1493 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1494 language_servers.into_values().zip(responses)
1495 {
1496 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1497 |lsp_symbol| {
1498 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1499 let mut worktree_id = source_worktree_id;
1500 let path;
1501 if let Some((worktree, rel_path)) =
1502 this.find_local_worktree(&abs_path, cx)
1503 {
1504 worktree_id = worktree.read(cx).id();
1505 path = rel_path;
1506 } else {
1507 path = relativize_path(&worktree_abs_path, &abs_path);
1508 }
1509
1510 let label = language
1511 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1512 .unwrap_or_else(|| {
1513 CodeLabel::plain(lsp_symbol.name.clone(), None)
1514 });
1515 let signature = this.symbol_signature(worktree_id, &path);
1516
1517 Some(Symbol {
1518 source_worktree_id,
1519 worktree_id,
1520 language_name: language.name().to_string(),
1521 name: lsp_symbol.name,
1522 kind: lsp_symbol.kind,
1523 label,
1524 path,
1525 range: range_from_lsp(lsp_symbol.location.range),
1526 signature,
1527 })
1528 },
1529 ));
1530 }
1531 })
1532 }
1533
1534 Ok(symbols)
1535 })
1536 } else if let Some(project_id) = self.remote_id() {
1537 let request = self.client.request(proto::GetProjectSymbols {
1538 project_id,
1539 query: query.to_string(),
1540 });
1541 cx.spawn_weak(|this, cx| async move {
1542 let response = request.await?;
1543 let mut symbols = Vec::new();
1544 if let Some(this) = this.upgrade(&cx) {
1545 this.read_with(&cx, |this, _| {
1546 symbols.extend(
1547 response
1548 .symbols
1549 .into_iter()
1550 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1551 );
1552 })
1553 }
1554 Ok(symbols)
1555 })
1556 } else {
1557 Task::ready(Ok(Default::default()))
1558 }
1559 }
1560
1561 pub fn open_buffer_for_symbol(
1562 &mut self,
1563 symbol: &Symbol,
1564 cx: &mut ModelContext<Self>,
1565 ) -> Task<Result<ModelHandle<Buffer>>> {
1566 if self.is_local() {
1567 let language_server = if let Some(server) = self
1568 .language_servers
1569 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1570 {
1571 server.clone()
1572 } else {
1573 return Task::ready(Err(anyhow!(
1574 "language server for worktree and language not found"
1575 )));
1576 };
1577
1578 let worktree_abs_path = if let Some(worktree_abs_path) = self
1579 .worktree_for_id(symbol.worktree_id, cx)
1580 .and_then(|worktree| worktree.read(cx).as_local())
1581 .map(|local_worktree| local_worktree.abs_path())
1582 {
1583 worktree_abs_path
1584 } else {
1585 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1586 };
1587 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1588 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1589 uri
1590 } else {
1591 return Task::ready(Err(anyhow!("invalid symbol path")));
1592 };
1593
1594 self.open_local_buffer_via_lsp(
1595 symbol_uri,
1596 symbol.language_name.clone(),
1597 language_server,
1598 cx,
1599 )
1600 } else if let Some(project_id) = self.remote_id() {
1601 let request = self.client.request(proto::OpenBufferForSymbol {
1602 project_id,
1603 symbol: Some(serialize_symbol(symbol)),
1604 });
1605 cx.spawn(|this, mut cx| async move {
1606 let response = request.await?;
1607 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1608 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1609 .await
1610 })
1611 } else {
1612 Task::ready(Err(anyhow!("project does not have a remote id")))
1613 }
1614 }
1615
1616 pub fn completions<T: ToPointUtf16>(
1617 &self,
1618 source_buffer_handle: &ModelHandle<Buffer>,
1619 position: T,
1620 cx: &mut ModelContext<Self>,
1621 ) -> Task<Result<Vec<Completion>>> {
1622 let source_buffer_handle = source_buffer_handle.clone();
1623 let source_buffer = source_buffer_handle.read(cx);
1624 let buffer_id = source_buffer.remote_id();
1625 let language = source_buffer.language().cloned();
1626 let worktree;
1627 let buffer_abs_path;
1628 if let Some(file) = File::from_dyn(source_buffer.file()) {
1629 worktree = file.worktree.clone();
1630 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1631 } else {
1632 return Task::ready(Ok(Default::default()));
1633 };
1634
1635 let position = position.to_point_utf16(source_buffer);
1636 let anchor = source_buffer.anchor_after(position);
1637
1638 if worktree.read(cx).as_local().is_some() {
1639 let buffer_abs_path = buffer_abs_path.unwrap();
1640 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1641 server
1642 } else {
1643 return Task::ready(Ok(Default::default()));
1644 };
1645
1646 cx.spawn(|_, cx| async move {
1647 let completions = lang_server
1648 .request::<lsp::request::Completion>(lsp::CompletionParams {
1649 text_document_position: lsp::TextDocumentPositionParams::new(
1650 lsp::TextDocumentIdentifier::new(
1651 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1652 ),
1653 position.to_lsp_position(),
1654 ),
1655 context: Default::default(),
1656 work_done_progress_params: Default::default(),
1657 partial_result_params: Default::default(),
1658 })
1659 .await
1660 .context("lsp completion request failed")?;
1661
1662 let completions = if let Some(completions) = completions {
1663 match completions {
1664 lsp::CompletionResponse::Array(completions) => completions,
1665 lsp::CompletionResponse::List(list) => list.items,
1666 }
1667 } else {
1668 Default::default()
1669 };
1670
1671 source_buffer_handle.read_with(&cx, |this, _| {
1672 Ok(completions
1673 .into_iter()
1674 .filter_map(|lsp_completion| {
1675 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1676 lsp::CompletionTextEdit::Edit(edit) => {
1677 (range_from_lsp(edit.range), edit.new_text.clone())
1678 }
1679 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1680 log::info!("unsupported insert/replace completion");
1681 return None;
1682 }
1683 };
1684
1685 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1686 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1687 if clipped_start == old_range.start && clipped_end == old_range.end {
1688 Some(Completion {
1689 old_range: this.anchor_before(old_range.start)
1690 ..this.anchor_after(old_range.end),
1691 new_text,
1692 label: language
1693 .as_ref()
1694 .and_then(|l| l.label_for_completion(&lsp_completion))
1695 .unwrap_or_else(|| {
1696 CodeLabel::plain(
1697 lsp_completion.label.clone(),
1698 lsp_completion.filter_text.as_deref(),
1699 )
1700 }),
1701 lsp_completion,
1702 })
1703 } else {
1704 None
1705 }
1706 })
1707 .collect())
1708 })
1709 })
1710 } else if let Some(project_id) = self.remote_id() {
1711 let rpc = self.client.clone();
1712 let message = proto::GetCompletions {
1713 project_id,
1714 buffer_id,
1715 position: Some(language::proto::serialize_anchor(&anchor)),
1716 version: serialize_version(&source_buffer.version()),
1717 };
1718 cx.spawn_weak(|_, mut cx| async move {
1719 let response = rpc.request(message).await?;
1720
1721 source_buffer_handle
1722 .update(&mut cx, |buffer, _| {
1723 buffer.wait_for_version(deserialize_version(response.version))
1724 })
1725 .await;
1726
1727 response
1728 .completions
1729 .into_iter()
1730 .map(|completion| {
1731 language::proto::deserialize_completion(completion, language.as_ref())
1732 })
1733 .collect()
1734 })
1735 } else {
1736 Task::ready(Ok(Default::default()))
1737 }
1738 }
1739
1740 pub fn apply_additional_edits_for_completion(
1741 &self,
1742 buffer_handle: ModelHandle<Buffer>,
1743 completion: Completion,
1744 push_to_history: bool,
1745 cx: &mut ModelContext<Self>,
1746 ) -> Task<Result<Option<Transaction>>> {
1747 let buffer = buffer_handle.read(cx);
1748 let buffer_id = buffer.remote_id();
1749
1750 if self.is_local() {
1751 let lang_server = if let Some(language_server) = buffer.language_server() {
1752 language_server.clone()
1753 } else {
1754 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1755 };
1756
1757 cx.spawn(|_, mut cx| async move {
1758 let resolved_completion = lang_server
1759 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1760 .await?;
1761 if let Some(edits) = resolved_completion.additional_text_edits {
1762 let edits = buffer_handle
1763 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1764 .await?;
1765 buffer_handle.update(&mut cx, |buffer, cx| {
1766 buffer.finalize_last_transaction();
1767 buffer.start_transaction();
1768 for (range, text) in edits {
1769 buffer.edit([range], text, cx);
1770 }
1771 let transaction = if buffer.end_transaction(cx).is_some() {
1772 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1773 if !push_to_history {
1774 buffer.forget_transaction(transaction.id);
1775 }
1776 Some(transaction)
1777 } else {
1778 None
1779 };
1780 Ok(transaction)
1781 })
1782 } else {
1783 Ok(None)
1784 }
1785 })
1786 } else if let Some(project_id) = self.remote_id() {
1787 let client = self.client.clone();
1788 cx.spawn(|_, mut cx| async move {
1789 let response = client
1790 .request(proto::ApplyCompletionAdditionalEdits {
1791 project_id,
1792 buffer_id,
1793 completion: Some(language::proto::serialize_completion(&completion)),
1794 })
1795 .await?;
1796
1797 if let Some(transaction) = response.transaction {
1798 let transaction = language::proto::deserialize_transaction(transaction)?;
1799 buffer_handle
1800 .update(&mut cx, |buffer, _| {
1801 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1802 })
1803 .await;
1804 if push_to_history {
1805 buffer_handle.update(&mut cx, |buffer, _| {
1806 buffer.push_transaction(transaction.clone(), Instant::now());
1807 });
1808 }
1809 Ok(Some(transaction))
1810 } else {
1811 Ok(None)
1812 }
1813 })
1814 } else {
1815 Task::ready(Err(anyhow!("project does not have a remote id")))
1816 }
1817 }
1818
1819 pub fn code_actions<T: ToOffset>(
1820 &self,
1821 buffer_handle: &ModelHandle<Buffer>,
1822 range: Range<T>,
1823 cx: &mut ModelContext<Self>,
1824 ) -> Task<Result<Vec<CodeAction>>> {
1825 let buffer_handle = buffer_handle.clone();
1826 let buffer = buffer_handle.read(cx);
1827 let buffer_id = buffer.remote_id();
1828 let worktree;
1829 let buffer_abs_path;
1830 if let Some(file) = File::from_dyn(buffer.file()) {
1831 worktree = file.worktree.clone();
1832 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1833 } else {
1834 return Task::ready(Ok(Default::default()));
1835 };
1836 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1837
1838 if worktree.read(cx).as_local().is_some() {
1839 let buffer_abs_path = buffer_abs_path.unwrap();
1840 let lang_name;
1841 let lang_server;
1842 if let Some(lang) = buffer.language() {
1843 lang_name = lang.name().to_string();
1844 if let Some(server) = self
1845 .language_servers
1846 .get(&(worktree.read(cx).id(), lang_name.clone()))
1847 {
1848 lang_server = server.clone();
1849 } else {
1850 return Task::ready(Ok(Default::default()));
1851 };
1852 } else {
1853 return Task::ready(Ok(Default::default()));
1854 }
1855
1856 let lsp_range = lsp::Range::new(
1857 range.start.to_point_utf16(buffer).to_lsp_position(),
1858 range.end.to_point_utf16(buffer).to_lsp_position(),
1859 );
1860 cx.foreground().spawn(async move {
1861 if !lang_server
1862 .capabilities()
1863 .await
1864 .map_or(false, |capabilities| {
1865 capabilities.code_action_provider.is_some()
1866 })
1867 {
1868 return Ok(Default::default());
1869 }
1870
1871 Ok(lang_server
1872 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1873 text_document: lsp::TextDocumentIdentifier::new(
1874 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1875 ),
1876 range: lsp_range,
1877 work_done_progress_params: Default::default(),
1878 partial_result_params: Default::default(),
1879 context: lsp::CodeActionContext {
1880 diagnostics: Default::default(),
1881 only: Some(vec![
1882 lsp::CodeActionKind::QUICKFIX,
1883 lsp::CodeActionKind::REFACTOR,
1884 lsp::CodeActionKind::REFACTOR_EXTRACT,
1885 ]),
1886 },
1887 })
1888 .await?
1889 .unwrap_or_default()
1890 .into_iter()
1891 .filter_map(|entry| {
1892 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1893 Some(CodeAction {
1894 range: range.clone(),
1895 lsp_action,
1896 })
1897 } else {
1898 None
1899 }
1900 })
1901 .collect())
1902 })
1903 } else if let Some(project_id) = self.remote_id() {
1904 let rpc = self.client.clone();
1905 let version = buffer.version();
1906 cx.spawn_weak(|_, mut cx| async move {
1907 let response = rpc
1908 .request(proto::GetCodeActions {
1909 project_id,
1910 buffer_id,
1911 start: Some(language::proto::serialize_anchor(&range.start)),
1912 end: Some(language::proto::serialize_anchor(&range.end)),
1913 version: serialize_version(&version),
1914 })
1915 .await?;
1916
1917 buffer_handle
1918 .update(&mut cx, |buffer, _| {
1919 buffer.wait_for_version(deserialize_version(response.version))
1920 })
1921 .await;
1922
1923 response
1924 .actions
1925 .into_iter()
1926 .map(language::proto::deserialize_code_action)
1927 .collect()
1928 })
1929 } else {
1930 Task::ready(Ok(Default::default()))
1931 }
1932 }
1933
1934 pub fn apply_code_action(
1935 &self,
1936 buffer_handle: ModelHandle<Buffer>,
1937 mut action: CodeAction,
1938 push_to_history: bool,
1939 cx: &mut ModelContext<Self>,
1940 ) -> Task<Result<ProjectTransaction>> {
1941 if self.is_local() {
1942 let buffer = buffer_handle.read(cx);
1943 let lang_name = if let Some(lang) = buffer.language() {
1944 lang.name().to_string()
1945 } else {
1946 return Task::ready(Ok(Default::default()));
1947 };
1948 let lang_server = if let Some(language_server) = buffer.language_server() {
1949 language_server.clone()
1950 } else {
1951 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1952 };
1953 let range = action.range.to_point_utf16(buffer);
1954
1955 cx.spawn(|this, mut cx| async move {
1956 if let Some(lsp_range) = action
1957 .lsp_action
1958 .data
1959 .as_mut()
1960 .and_then(|d| d.get_mut("codeActionParams"))
1961 .and_then(|d| d.get_mut("range"))
1962 {
1963 *lsp_range = serde_json::to_value(&lsp::Range::new(
1964 range.start.to_lsp_position(),
1965 range.end.to_lsp_position(),
1966 ))
1967 .unwrap();
1968 action.lsp_action = lang_server
1969 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1970 .await?;
1971 } else {
1972 let actions = this
1973 .update(&mut cx, |this, cx| {
1974 this.code_actions(&buffer_handle, action.range, cx)
1975 })
1976 .await?;
1977 action.lsp_action = actions
1978 .into_iter()
1979 .find(|a| a.lsp_action.title == action.lsp_action.title)
1980 .ok_or_else(|| anyhow!("code action is outdated"))?
1981 .lsp_action;
1982 }
1983
1984 if let Some(edit) = action.lsp_action.edit {
1985 Self::deserialize_workspace_edit(
1986 this,
1987 edit,
1988 push_to_history,
1989 lang_name,
1990 lang_server,
1991 &mut cx,
1992 )
1993 .await
1994 } else {
1995 Ok(ProjectTransaction::default())
1996 }
1997 })
1998 } else if let Some(project_id) = self.remote_id() {
1999 let client = self.client.clone();
2000 let request = proto::ApplyCodeAction {
2001 project_id,
2002 buffer_id: buffer_handle.read(cx).remote_id(),
2003 action: Some(language::proto::serialize_code_action(&action)),
2004 };
2005 cx.spawn(|this, mut cx| async move {
2006 let response = client
2007 .request(request)
2008 .await?
2009 .transaction
2010 .ok_or_else(|| anyhow!("missing transaction"))?;
2011 this.update(&mut cx, |this, cx| {
2012 this.deserialize_project_transaction(response, push_to_history, cx)
2013 })
2014 .await
2015 })
2016 } else {
2017 Task::ready(Err(anyhow!("project does not have a remote id")))
2018 }
2019 }
2020
2021 async fn deserialize_workspace_edit(
2022 this: ModelHandle<Self>,
2023 edit: lsp::WorkspaceEdit,
2024 push_to_history: bool,
2025 language_name: String,
2026 language_server: Arc<LanguageServer>,
2027 cx: &mut AsyncAppContext,
2028 ) -> Result<ProjectTransaction> {
2029 let fs = this.read_with(cx, |this, _| this.fs.clone());
2030 let mut operations = Vec::new();
2031 if let Some(document_changes) = edit.document_changes {
2032 match document_changes {
2033 lsp::DocumentChanges::Edits(edits) => {
2034 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2035 }
2036 lsp::DocumentChanges::Operations(ops) => operations = ops,
2037 }
2038 } else if let Some(changes) = edit.changes {
2039 operations.extend(changes.into_iter().map(|(uri, edits)| {
2040 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2041 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2042 uri,
2043 version: None,
2044 },
2045 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2046 })
2047 }));
2048 }
2049
2050 let mut project_transaction = ProjectTransaction::default();
2051 for operation in operations {
2052 match operation {
2053 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2054 let abs_path = op
2055 .uri
2056 .to_file_path()
2057 .map_err(|_| anyhow!("can't convert URI to path"))?;
2058
2059 if let Some(parent_path) = abs_path.parent() {
2060 fs.create_dir(parent_path).await?;
2061 }
2062 if abs_path.ends_with("/") {
2063 fs.create_dir(&abs_path).await?;
2064 } else {
2065 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2066 .await?;
2067 }
2068 }
2069 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2070 let source_abs_path = op
2071 .old_uri
2072 .to_file_path()
2073 .map_err(|_| anyhow!("can't convert URI to path"))?;
2074 let target_abs_path = op
2075 .new_uri
2076 .to_file_path()
2077 .map_err(|_| anyhow!("can't convert URI to path"))?;
2078 fs.rename(
2079 &source_abs_path,
2080 &target_abs_path,
2081 op.options.map(Into::into).unwrap_or_default(),
2082 )
2083 .await?;
2084 }
2085 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2086 let abs_path = op
2087 .uri
2088 .to_file_path()
2089 .map_err(|_| anyhow!("can't convert URI to path"))?;
2090 let options = op.options.map(Into::into).unwrap_or_default();
2091 if abs_path.ends_with("/") {
2092 fs.remove_dir(&abs_path, options).await?;
2093 } else {
2094 fs.remove_file(&abs_path, options).await?;
2095 }
2096 }
2097 lsp::DocumentChangeOperation::Edit(op) => {
2098 let buffer_to_edit = this
2099 .update(cx, |this, cx| {
2100 this.open_local_buffer_via_lsp(
2101 op.text_document.uri,
2102 language_name.clone(),
2103 language_server.clone(),
2104 cx,
2105 )
2106 })
2107 .await?;
2108
2109 let edits = buffer_to_edit
2110 .update(cx, |buffer, cx| {
2111 let edits = op.edits.into_iter().map(|edit| match edit {
2112 lsp::OneOf::Left(edit) => edit,
2113 lsp::OneOf::Right(edit) => edit.text_edit,
2114 });
2115 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2116 })
2117 .await?;
2118
2119 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2120 buffer.finalize_last_transaction();
2121 buffer.start_transaction();
2122 for (range, text) in edits {
2123 buffer.edit([range], text, cx);
2124 }
2125 let transaction = if buffer.end_transaction(cx).is_some() {
2126 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2127 if !push_to_history {
2128 buffer.forget_transaction(transaction.id);
2129 }
2130 Some(transaction)
2131 } else {
2132 None
2133 };
2134
2135 transaction
2136 });
2137 if let Some(transaction) = transaction {
2138 project_transaction.0.insert(buffer_to_edit, transaction);
2139 }
2140 }
2141 }
2142 }
2143
2144 Ok(project_transaction)
2145 }
2146
2147 pub fn prepare_rename<T: ToPointUtf16>(
2148 &self,
2149 buffer: ModelHandle<Buffer>,
2150 position: T,
2151 cx: &mut ModelContext<Self>,
2152 ) -> Task<Result<Option<Range<Anchor>>>> {
2153 let position = position.to_point_utf16(buffer.read(cx));
2154 self.request_lsp(buffer, PrepareRename { position }, cx)
2155 }
2156
2157 pub fn perform_rename<T: ToPointUtf16>(
2158 &self,
2159 buffer: ModelHandle<Buffer>,
2160 position: T,
2161 new_name: String,
2162 push_to_history: bool,
2163 cx: &mut ModelContext<Self>,
2164 ) -> Task<Result<ProjectTransaction>> {
2165 let position = position.to_point_utf16(buffer.read(cx));
2166 self.request_lsp(
2167 buffer,
2168 PerformRename {
2169 position,
2170 new_name,
2171 push_to_history,
2172 },
2173 cx,
2174 )
2175 }
2176
2177 pub fn search(
2178 &self,
2179 query: SearchQuery,
2180 cx: &mut ModelContext<Self>,
2181 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2182 if self.is_local() {
2183 let snapshots = self
2184 .visible_worktrees(cx)
2185 .filter_map(|tree| {
2186 let tree = tree.read(cx).as_local()?;
2187 Some(tree.snapshot())
2188 })
2189 .collect::<Vec<_>>();
2190
2191 let background = cx.background().clone();
2192 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2193 if path_count == 0 {
2194 return Task::ready(Ok(Default::default()));
2195 }
2196 let workers = background.num_cpus().min(path_count);
2197 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2198 cx.background()
2199 .spawn({
2200 let fs = self.fs.clone();
2201 let background = cx.background().clone();
2202 let query = query.clone();
2203 async move {
2204 let fs = &fs;
2205 let query = &query;
2206 let matching_paths_tx = &matching_paths_tx;
2207 let paths_per_worker = (path_count + workers - 1) / workers;
2208 let snapshots = &snapshots;
2209 background
2210 .scoped(|scope| {
2211 for worker_ix in 0..workers {
2212 let worker_start_ix = worker_ix * paths_per_worker;
2213 let worker_end_ix = worker_start_ix + paths_per_worker;
2214 scope.spawn(async move {
2215 let mut snapshot_start_ix = 0;
2216 let mut abs_path = PathBuf::new();
2217 for snapshot in snapshots {
2218 let snapshot_end_ix =
2219 snapshot_start_ix + snapshot.visible_file_count();
2220 if worker_end_ix <= snapshot_start_ix {
2221 break;
2222 } else if worker_start_ix > snapshot_end_ix {
2223 snapshot_start_ix = snapshot_end_ix;
2224 continue;
2225 } else {
2226 let start_in_snapshot = worker_start_ix
2227 .saturating_sub(snapshot_start_ix);
2228 let end_in_snapshot =
2229 cmp::min(worker_end_ix, snapshot_end_ix)
2230 - snapshot_start_ix;
2231
2232 for entry in snapshot
2233 .files(false, start_in_snapshot)
2234 .take(end_in_snapshot - start_in_snapshot)
2235 {
2236 if matching_paths_tx.is_closed() {
2237 break;
2238 }
2239
2240 abs_path.clear();
2241 abs_path.push(&snapshot.abs_path());
2242 abs_path.push(&entry.path);
2243 let matches = if let Some(file) =
2244 fs.open_sync(&abs_path).await.log_err()
2245 {
2246 query.detect(file).unwrap_or(false)
2247 } else {
2248 false
2249 };
2250
2251 if matches {
2252 let project_path =
2253 (snapshot.id(), entry.path.clone());
2254 if matching_paths_tx
2255 .send(project_path)
2256 .await
2257 .is_err()
2258 {
2259 break;
2260 }
2261 }
2262 }
2263
2264 snapshot_start_ix = snapshot_end_ix;
2265 }
2266 }
2267 });
2268 }
2269 })
2270 .await;
2271 }
2272 })
2273 .detach();
2274
2275 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2276 let open_buffers = self
2277 .opened_buffers
2278 .values()
2279 .filter_map(|b| b.upgrade(cx))
2280 .collect::<HashSet<_>>();
2281 cx.spawn(|this, cx| async move {
2282 for buffer in &open_buffers {
2283 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2284 buffers_tx.send((buffer.clone(), snapshot)).await?;
2285 }
2286
2287 let open_buffers = Rc::new(RefCell::new(open_buffers));
2288 while let Some(project_path) = matching_paths_rx.next().await {
2289 if buffers_tx.is_closed() {
2290 break;
2291 }
2292
2293 let this = this.clone();
2294 let open_buffers = open_buffers.clone();
2295 let buffers_tx = buffers_tx.clone();
2296 cx.spawn(|mut cx| async move {
2297 if let Some(buffer) = this
2298 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2299 .await
2300 .log_err()
2301 {
2302 if open_buffers.borrow_mut().insert(buffer.clone()) {
2303 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2304 buffers_tx.send((buffer, snapshot)).await?;
2305 }
2306 }
2307
2308 Ok::<_, anyhow::Error>(())
2309 })
2310 .detach();
2311 }
2312
2313 Ok::<_, anyhow::Error>(())
2314 })
2315 .detach_and_log_err(cx);
2316
2317 let background = cx.background().clone();
2318 cx.background().spawn(async move {
2319 let query = &query;
2320 let mut matched_buffers = Vec::new();
2321 for _ in 0..workers {
2322 matched_buffers.push(HashMap::default());
2323 }
2324 background
2325 .scoped(|scope| {
2326 for worker_matched_buffers in matched_buffers.iter_mut() {
2327 let mut buffers_rx = buffers_rx.clone();
2328 scope.spawn(async move {
2329 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2330 let buffer_matches = query
2331 .search(snapshot.as_rope())
2332 .await
2333 .iter()
2334 .map(|range| {
2335 snapshot.anchor_before(range.start)
2336 ..snapshot.anchor_after(range.end)
2337 })
2338 .collect::<Vec<_>>();
2339 if !buffer_matches.is_empty() {
2340 worker_matched_buffers
2341 .insert(buffer.clone(), buffer_matches);
2342 }
2343 }
2344 });
2345 }
2346 })
2347 .await;
2348 Ok(matched_buffers.into_iter().flatten().collect())
2349 })
2350 } else if let Some(project_id) = self.remote_id() {
2351 let request = self.client.request(query.to_proto(project_id));
2352 cx.spawn(|this, mut cx| async move {
2353 let response = request.await?;
2354 let mut result = HashMap::default();
2355 for location in response.locations {
2356 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2357 let target_buffer = this
2358 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2359 .await?;
2360 let start = location
2361 .start
2362 .and_then(deserialize_anchor)
2363 .ok_or_else(|| anyhow!("missing target start"))?;
2364 let end = location
2365 .end
2366 .and_then(deserialize_anchor)
2367 .ok_or_else(|| anyhow!("missing target end"))?;
2368 result
2369 .entry(target_buffer)
2370 .or_insert(Vec::new())
2371 .push(start..end)
2372 }
2373 Ok(result)
2374 })
2375 } else {
2376 Task::ready(Ok(Default::default()))
2377 }
2378 }
2379
2380 fn request_lsp<R: LspCommand>(
2381 &self,
2382 buffer_handle: ModelHandle<Buffer>,
2383 request: R,
2384 cx: &mut ModelContext<Self>,
2385 ) -> Task<Result<R::Response>>
2386 where
2387 <R::LspRequest as lsp::request::Request>::Result: Send,
2388 {
2389 let buffer = buffer_handle.read(cx);
2390 if self.is_local() {
2391 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2392 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2393 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2394 return cx.spawn(|this, cx| async move {
2395 if !language_server
2396 .capabilities()
2397 .await
2398 .map_or(false, |capabilities| {
2399 request.check_capabilities(&capabilities)
2400 })
2401 {
2402 return Ok(Default::default());
2403 }
2404
2405 let response = language_server
2406 .request::<R::LspRequest>(lsp_params)
2407 .await
2408 .context("lsp request failed")?;
2409 request
2410 .response_from_lsp(response, this, buffer_handle, cx)
2411 .await
2412 });
2413 }
2414 } else if let Some(project_id) = self.remote_id() {
2415 let rpc = self.client.clone();
2416 let message = request.to_proto(project_id, buffer);
2417 return cx.spawn(|this, cx| async move {
2418 let response = rpc.request(message).await?;
2419 request
2420 .response_from_proto(response, this, buffer_handle, cx)
2421 .await
2422 });
2423 }
2424 Task::ready(Ok(Default::default()))
2425 }
2426
2427 pub fn find_or_create_local_worktree(
2428 &mut self,
2429 abs_path: impl AsRef<Path>,
2430 visible: bool,
2431 cx: &mut ModelContext<Self>,
2432 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2433 let abs_path = abs_path.as_ref();
2434 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2435 Task::ready(Ok((tree.clone(), relative_path.into())))
2436 } else {
2437 let worktree = self.create_local_worktree(abs_path, visible, cx);
2438 cx.foreground()
2439 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2440 }
2441 }
2442
2443 pub fn find_local_worktree(
2444 &self,
2445 abs_path: &Path,
2446 cx: &AppContext,
2447 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2448 for tree in self.worktrees(cx) {
2449 if let Some(relative_path) = tree
2450 .read(cx)
2451 .as_local()
2452 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2453 {
2454 return Some((tree.clone(), relative_path.into()));
2455 }
2456 }
2457 None
2458 }
2459
2460 pub fn is_shared(&self) -> bool {
2461 match &self.client_state {
2462 ProjectClientState::Local { is_shared, .. } => *is_shared,
2463 ProjectClientState::Remote { .. } => false,
2464 }
2465 }
2466
2467 fn create_local_worktree(
2468 &mut self,
2469 abs_path: impl AsRef<Path>,
2470 visible: bool,
2471 cx: &mut ModelContext<Self>,
2472 ) -> Task<Result<ModelHandle<Worktree>>> {
2473 let fs = self.fs.clone();
2474 let client = self.client.clone();
2475 let path: Arc<Path> = abs_path.as_ref().into();
2476 let task = self
2477 .loading_local_worktrees
2478 .entry(path.clone())
2479 .or_insert_with(|| {
2480 cx.spawn(|project, mut cx| {
2481 async move {
2482 let worktree =
2483 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2484 .await;
2485 project.update(&mut cx, |project, _| {
2486 project.loading_local_worktrees.remove(&path);
2487 });
2488 let worktree = worktree?;
2489
2490 let (remote_project_id, is_shared) =
2491 project.update(&mut cx, |project, cx| {
2492 project.add_worktree(&worktree, cx);
2493 (project.remote_id(), project.is_shared())
2494 });
2495
2496 if let Some(project_id) = remote_project_id {
2497 if is_shared {
2498 worktree
2499 .update(&mut cx, |worktree, cx| {
2500 worktree.as_local_mut().unwrap().share(project_id, cx)
2501 })
2502 .await?;
2503 } else {
2504 worktree
2505 .update(&mut cx, |worktree, cx| {
2506 worktree.as_local_mut().unwrap().register(project_id, cx)
2507 })
2508 .await?;
2509 }
2510 }
2511
2512 Ok(worktree)
2513 }
2514 .map_err(|err| Arc::new(err))
2515 })
2516 .shared()
2517 })
2518 .clone();
2519 cx.foreground().spawn(async move {
2520 match task.await {
2521 Ok(worktree) => Ok(worktree),
2522 Err(err) => Err(anyhow!("{}", err)),
2523 }
2524 })
2525 }
2526
2527 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2528 self.worktrees.retain(|worktree| {
2529 worktree
2530 .upgrade(cx)
2531 .map_or(false, |w| w.read(cx).id() != id)
2532 });
2533 cx.notify();
2534 }
2535
2536 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2537 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2538 if worktree.read(cx).is_local() {
2539 cx.subscribe(&worktree, |this, worktree, _, cx| {
2540 this.update_local_worktree_buffers(worktree, cx);
2541 })
2542 .detach();
2543 }
2544
2545 let push_strong_handle = {
2546 let worktree = worktree.read(cx);
2547 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2548 };
2549 if push_strong_handle {
2550 self.worktrees
2551 .push(WorktreeHandle::Strong(worktree.clone()));
2552 } else {
2553 cx.observe_release(&worktree, |this, cx| {
2554 this.worktrees
2555 .retain(|worktree| worktree.upgrade(cx).is_some());
2556 cx.notify();
2557 })
2558 .detach();
2559 self.worktrees
2560 .push(WorktreeHandle::Weak(worktree.downgrade()));
2561 }
2562 cx.notify();
2563 }
2564
2565 fn update_local_worktree_buffers(
2566 &mut self,
2567 worktree_handle: ModelHandle<Worktree>,
2568 cx: &mut ModelContext<Self>,
2569 ) {
2570 let snapshot = worktree_handle.read(cx).snapshot();
2571 let mut buffers_to_delete = Vec::new();
2572 for (buffer_id, buffer) in &self.opened_buffers {
2573 if let Some(buffer) = buffer.upgrade(cx) {
2574 buffer.update(cx, |buffer, cx| {
2575 if let Some(old_file) = File::from_dyn(buffer.file()) {
2576 if old_file.worktree != worktree_handle {
2577 return;
2578 }
2579
2580 let new_file = if let Some(entry) = old_file
2581 .entry_id
2582 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2583 {
2584 File {
2585 is_local: true,
2586 entry_id: Some(entry.id),
2587 mtime: entry.mtime,
2588 path: entry.path.clone(),
2589 worktree: worktree_handle.clone(),
2590 }
2591 } else if let Some(entry) =
2592 snapshot.entry_for_path(old_file.path().as_ref())
2593 {
2594 File {
2595 is_local: true,
2596 entry_id: Some(entry.id),
2597 mtime: entry.mtime,
2598 path: entry.path.clone(),
2599 worktree: worktree_handle.clone(),
2600 }
2601 } else {
2602 File {
2603 is_local: true,
2604 entry_id: None,
2605 path: old_file.path().clone(),
2606 mtime: old_file.mtime(),
2607 worktree: worktree_handle.clone(),
2608 }
2609 };
2610
2611 if let Some(project_id) = self.remote_id() {
2612 self.client
2613 .send(proto::UpdateBufferFile {
2614 project_id,
2615 buffer_id: *buffer_id as u64,
2616 file: Some(new_file.to_proto()),
2617 })
2618 .log_err();
2619 }
2620 buffer.file_updated(Box::new(new_file), cx).detach();
2621 }
2622 });
2623 } else {
2624 buffers_to_delete.push(*buffer_id);
2625 }
2626 }
2627
2628 for buffer_id in buffers_to_delete {
2629 self.opened_buffers.remove(&buffer_id);
2630 }
2631 }
2632
2633 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2634 let new_active_entry = entry.and_then(|project_path| {
2635 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2636 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2637 Some(ProjectEntry {
2638 worktree_id: project_path.worktree_id,
2639 entry_id: entry.id,
2640 })
2641 });
2642 if new_active_entry != self.active_entry {
2643 self.active_entry = new_active_entry;
2644 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2645 }
2646 }
2647
2648 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2649 self.language_servers_with_diagnostics_running > 0
2650 }
2651
2652 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2653 let mut summary = DiagnosticSummary::default();
2654 for (_, path_summary) in self.diagnostic_summaries(cx) {
2655 summary.error_count += path_summary.error_count;
2656 summary.warning_count += path_summary.warning_count;
2657 summary.info_count += path_summary.info_count;
2658 summary.hint_count += path_summary.hint_count;
2659 }
2660 summary
2661 }
2662
2663 pub fn diagnostic_summaries<'a>(
2664 &'a self,
2665 cx: &'a AppContext,
2666 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2667 self.worktrees(cx).flat_map(move |worktree| {
2668 let worktree = worktree.read(cx);
2669 let worktree_id = worktree.id();
2670 worktree
2671 .diagnostic_summaries()
2672 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2673 })
2674 }
2675
2676 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2677 self.language_servers_with_diagnostics_running += 1;
2678 if self.language_servers_with_diagnostics_running == 1 {
2679 cx.emit(Event::DiskBasedDiagnosticsStarted);
2680 }
2681 }
2682
2683 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2684 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2685 self.language_servers_with_diagnostics_running -= 1;
2686 if self.language_servers_with_diagnostics_running == 0 {
2687 cx.emit(Event::DiskBasedDiagnosticsFinished);
2688 }
2689 }
2690
2691 pub fn active_entry(&self) -> Option<ProjectEntry> {
2692 self.active_entry
2693 }
2694
2695 // RPC message handlers
2696
2697 async fn handle_unshare_project(
2698 this: ModelHandle<Self>,
2699 _: TypedEnvelope<proto::UnshareProject>,
2700 _: Arc<Client>,
2701 mut cx: AsyncAppContext,
2702 ) -> Result<()> {
2703 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2704 Ok(())
2705 }
2706
2707 async fn handle_add_collaborator(
2708 this: ModelHandle<Self>,
2709 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2710 _: Arc<Client>,
2711 mut cx: AsyncAppContext,
2712 ) -> Result<()> {
2713 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2714 let collaborator = envelope
2715 .payload
2716 .collaborator
2717 .take()
2718 .ok_or_else(|| anyhow!("empty collaborator"))?;
2719
2720 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2721 this.update(&mut cx, |this, cx| {
2722 this.collaborators
2723 .insert(collaborator.peer_id, collaborator);
2724 cx.notify();
2725 });
2726
2727 Ok(())
2728 }
2729
2730 async fn handle_remove_collaborator(
2731 this: ModelHandle<Self>,
2732 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2733 _: Arc<Client>,
2734 mut cx: AsyncAppContext,
2735 ) -> Result<()> {
2736 this.update(&mut cx, |this, cx| {
2737 let peer_id = PeerId(envelope.payload.peer_id);
2738 let replica_id = this
2739 .collaborators
2740 .remove(&peer_id)
2741 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2742 .replica_id;
2743 for (_, buffer) in &this.opened_buffers {
2744 if let Some(buffer) = buffer.upgrade(cx) {
2745 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2746 }
2747 }
2748 cx.notify();
2749 Ok(())
2750 })
2751 }
2752
2753 async fn handle_register_worktree(
2754 this: ModelHandle<Self>,
2755 envelope: TypedEnvelope<proto::RegisterWorktree>,
2756 client: Arc<Client>,
2757 mut cx: AsyncAppContext,
2758 ) -> Result<()> {
2759 this.update(&mut cx, |this, cx| {
2760 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2761 let replica_id = this.replica_id();
2762 let worktree = proto::Worktree {
2763 id: envelope.payload.worktree_id,
2764 root_name: envelope.payload.root_name,
2765 entries: Default::default(),
2766 diagnostic_summaries: Default::default(),
2767 visible: envelope.payload.visible,
2768 };
2769 let (worktree, load_task) =
2770 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2771 this.add_worktree(&worktree, cx);
2772 load_task.detach();
2773 Ok(())
2774 })
2775 }
2776
2777 async fn handle_unregister_worktree(
2778 this: ModelHandle<Self>,
2779 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2780 _: Arc<Client>,
2781 mut cx: AsyncAppContext,
2782 ) -> Result<()> {
2783 this.update(&mut cx, |this, cx| {
2784 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2785 this.remove_worktree(worktree_id, cx);
2786 Ok(())
2787 })
2788 }
2789
2790 async fn handle_update_worktree(
2791 this: ModelHandle<Self>,
2792 envelope: TypedEnvelope<proto::UpdateWorktree>,
2793 _: Arc<Client>,
2794 mut cx: AsyncAppContext,
2795 ) -> Result<()> {
2796 this.update(&mut cx, |this, cx| {
2797 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2798 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2799 worktree.update(cx, |worktree, _| {
2800 let worktree = worktree.as_remote_mut().unwrap();
2801 worktree.update_from_remote(envelope)
2802 })?;
2803 }
2804 Ok(())
2805 })
2806 }
2807
2808 async fn handle_update_diagnostic_summary(
2809 this: ModelHandle<Self>,
2810 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2811 _: Arc<Client>,
2812 mut cx: AsyncAppContext,
2813 ) -> Result<()> {
2814 this.update(&mut cx, |this, cx| {
2815 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2816 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2817 if let Some(summary) = envelope.payload.summary {
2818 let project_path = ProjectPath {
2819 worktree_id,
2820 path: Path::new(&summary.path).into(),
2821 };
2822 worktree.update(cx, |worktree, _| {
2823 worktree
2824 .as_remote_mut()
2825 .unwrap()
2826 .update_diagnostic_summary(project_path.path.clone(), &summary);
2827 });
2828 cx.emit(Event::DiagnosticsUpdated(project_path));
2829 }
2830 }
2831 Ok(())
2832 })
2833 }
2834
2835 async fn handle_disk_based_diagnostics_updating(
2836 this: ModelHandle<Self>,
2837 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2838 _: Arc<Client>,
2839 mut cx: AsyncAppContext,
2840 ) -> Result<()> {
2841 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2842 Ok(())
2843 }
2844
2845 async fn handle_disk_based_diagnostics_updated(
2846 this: ModelHandle<Self>,
2847 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2848 _: Arc<Client>,
2849 mut cx: AsyncAppContext,
2850 ) -> Result<()> {
2851 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2852 Ok(())
2853 }
2854
2855 async fn handle_update_buffer(
2856 this: ModelHandle<Self>,
2857 envelope: TypedEnvelope<proto::UpdateBuffer>,
2858 _: Arc<Client>,
2859 mut cx: AsyncAppContext,
2860 ) -> Result<()> {
2861 this.update(&mut cx, |this, cx| {
2862 let payload = envelope.payload.clone();
2863 let buffer_id = payload.buffer_id;
2864 let ops = payload
2865 .operations
2866 .into_iter()
2867 .map(|op| language::proto::deserialize_operation(op))
2868 .collect::<Result<Vec<_>, _>>()?;
2869 match this.opened_buffers.entry(buffer_id) {
2870 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2871 OpenBuffer::Strong(buffer) => {
2872 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2873 }
2874 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2875 OpenBuffer::Weak(_) => {}
2876 },
2877 hash_map::Entry::Vacant(e) => {
2878 e.insert(OpenBuffer::Loading(ops));
2879 }
2880 }
2881 Ok(())
2882 })
2883 }
2884
2885 async fn handle_update_buffer_file(
2886 this: ModelHandle<Self>,
2887 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2888 _: Arc<Client>,
2889 mut cx: AsyncAppContext,
2890 ) -> Result<()> {
2891 this.update(&mut cx, |this, cx| {
2892 let payload = envelope.payload.clone();
2893 let buffer_id = payload.buffer_id;
2894 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2895 let worktree = this
2896 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2897 .ok_or_else(|| anyhow!("no such worktree"))?;
2898 let file = File::from_proto(file, worktree.clone(), cx)?;
2899 let buffer = this
2900 .opened_buffers
2901 .get_mut(&buffer_id)
2902 .and_then(|b| b.upgrade(cx))
2903 .ok_or_else(|| anyhow!("no such buffer"))?;
2904 buffer.update(cx, |buffer, cx| {
2905 buffer.file_updated(Box::new(file), cx).detach();
2906 });
2907 Ok(())
2908 })
2909 }
2910
2911 async fn handle_save_buffer(
2912 this: ModelHandle<Self>,
2913 envelope: TypedEnvelope<proto::SaveBuffer>,
2914 _: Arc<Client>,
2915 mut cx: AsyncAppContext,
2916 ) -> Result<proto::BufferSaved> {
2917 let buffer_id = envelope.payload.buffer_id;
2918 let requested_version = deserialize_version(envelope.payload.version);
2919
2920 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2921 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2922 let buffer = this
2923 .opened_buffers
2924 .get(&buffer_id)
2925 .map(|buffer| buffer.upgrade(cx).unwrap())
2926 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2927 Ok::<_, anyhow::Error>((project_id, buffer))
2928 })?;
2929 buffer
2930 .update(&mut cx, |buffer, _| {
2931 buffer.wait_for_version(requested_version)
2932 })
2933 .await;
2934
2935 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2936 Ok(proto::BufferSaved {
2937 project_id,
2938 buffer_id,
2939 version: serialize_version(&saved_version),
2940 mtime: Some(mtime.into()),
2941 })
2942 }
2943
2944 async fn handle_format_buffers(
2945 this: ModelHandle<Self>,
2946 envelope: TypedEnvelope<proto::FormatBuffers>,
2947 _: Arc<Client>,
2948 mut cx: AsyncAppContext,
2949 ) -> Result<proto::FormatBuffersResponse> {
2950 let sender_id = envelope.original_sender_id()?;
2951 let format = this.update(&mut cx, |this, cx| {
2952 let mut buffers = HashSet::default();
2953 for buffer_id in &envelope.payload.buffer_ids {
2954 buffers.insert(
2955 this.opened_buffers
2956 .get(buffer_id)
2957 .map(|buffer| buffer.upgrade(cx).unwrap())
2958 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2959 );
2960 }
2961 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2962 })?;
2963
2964 let project_transaction = format.await?;
2965 let project_transaction = this.update(&mut cx, |this, cx| {
2966 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2967 });
2968 Ok(proto::FormatBuffersResponse {
2969 transaction: Some(project_transaction),
2970 })
2971 }
2972
2973 async fn handle_get_completions(
2974 this: ModelHandle<Self>,
2975 envelope: TypedEnvelope<proto::GetCompletions>,
2976 _: Arc<Client>,
2977 mut cx: AsyncAppContext,
2978 ) -> Result<proto::GetCompletionsResponse> {
2979 let position = envelope
2980 .payload
2981 .position
2982 .and_then(language::proto::deserialize_anchor)
2983 .ok_or_else(|| anyhow!("invalid position"))?;
2984 let version = deserialize_version(envelope.payload.version);
2985 let buffer = this.read_with(&cx, |this, cx| {
2986 this.opened_buffers
2987 .get(&envelope.payload.buffer_id)
2988 .map(|buffer| buffer.upgrade(cx).unwrap())
2989 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2990 })?;
2991 buffer
2992 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2993 .await;
2994 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2995 let completions = this
2996 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2997 .await?;
2998
2999 Ok(proto::GetCompletionsResponse {
3000 completions: completions
3001 .iter()
3002 .map(language::proto::serialize_completion)
3003 .collect(),
3004 version: serialize_version(&version),
3005 })
3006 }
3007
3008 async fn handle_apply_additional_edits_for_completion(
3009 this: ModelHandle<Self>,
3010 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3011 _: Arc<Client>,
3012 mut cx: AsyncAppContext,
3013 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3014 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3015 let buffer = this
3016 .opened_buffers
3017 .get(&envelope.payload.buffer_id)
3018 .map(|buffer| buffer.upgrade(cx).unwrap())
3019 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3020 let language = buffer.read(cx).language();
3021 let completion = language::proto::deserialize_completion(
3022 envelope
3023 .payload
3024 .completion
3025 .ok_or_else(|| anyhow!("invalid completion"))?,
3026 language,
3027 )?;
3028 Ok::<_, anyhow::Error>(
3029 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3030 )
3031 })?;
3032
3033 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3034 transaction: apply_additional_edits
3035 .await?
3036 .as_ref()
3037 .map(language::proto::serialize_transaction),
3038 })
3039 }
3040
3041 async fn handle_get_code_actions(
3042 this: ModelHandle<Self>,
3043 envelope: TypedEnvelope<proto::GetCodeActions>,
3044 _: Arc<Client>,
3045 mut cx: AsyncAppContext,
3046 ) -> Result<proto::GetCodeActionsResponse> {
3047 let start = envelope
3048 .payload
3049 .start
3050 .and_then(language::proto::deserialize_anchor)
3051 .ok_or_else(|| anyhow!("invalid start"))?;
3052 let end = envelope
3053 .payload
3054 .end
3055 .and_then(language::proto::deserialize_anchor)
3056 .ok_or_else(|| anyhow!("invalid end"))?;
3057 let buffer = this.update(&mut cx, |this, cx| {
3058 this.opened_buffers
3059 .get(&envelope.payload.buffer_id)
3060 .map(|buffer| buffer.upgrade(cx).unwrap())
3061 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3062 })?;
3063 buffer
3064 .update(&mut cx, |buffer, _| {
3065 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3066 })
3067 .await;
3068
3069 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3070 let code_actions = this.update(&mut cx, |this, cx| {
3071 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3072 })?;
3073
3074 Ok(proto::GetCodeActionsResponse {
3075 actions: code_actions
3076 .await?
3077 .iter()
3078 .map(language::proto::serialize_code_action)
3079 .collect(),
3080 version: serialize_version(&version),
3081 })
3082 }
3083
3084 async fn handle_apply_code_action(
3085 this: ModelHandle<Self>,
3086 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3087 _: Arc<Client>,
3088 mut cx: AsyncAppContext,
3089 ) -> Result<proto::ApplyCodeActionResponse> {
3090 let sender_id = envelope.original_sender_id()?;
3091 let action = language::proto::deserialize_code_action(
3092 envelope
3093 .payload
3094 .action
3095 .ok_or_else(|| anyhow!("invalid action"))?,
3096 )?;
3097 let apply_code_action = this.update(&mut cx, |this, cx| {
3098 let buffer = this
3099 .opened_buffers
3100 .get(&envelope.payload.buffer_id)
3101 .map(|buffer| buffer.upgrade(cx).unwrap())
3102 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3103 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3104 })?;
3105
3106 let project_transaction = apply_code_action.await?;
3107 let project_transaction = this.update(&mut cx, |this, cx| {
3108 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3109 });
3110 Ok(proto::ApplyCodeActionResponse {
3111 transaction: Some(project_transaction),
3112 })
3113 }
3114
3115 async fn handle_lsp_command<T: LspCommand>(
3116 this: ModelHandle<Self>,
3117 envelope: TypedEnvelope<T::ProtoRequest>,
3118 _: Arc<Client>,
3119 mut cx: AsyncAppContext,
3120 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3121 where
3122 <T::LspRequest as lsp::request::Request>::Result: Send,
3123 {
3124 let sender_id = envelope.original_sender_id()?;
3125 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3126 let buffer_handle = this.read_with(&cx, |this, _| {
3127 this.opened_buffers
3128 .get(&buffer_id)
3129 .map(|buffer| buffer.upgrade(&cx).unwrap())
3130 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3131 })?;
3132 let request = T::from_proto(
3133 envelope.payload,
3134 this.clone(),
3135 buffer_handle.clone(),
3136 cx.clone(),
3137 )
3138 .await?;
3139 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3140 let response = this
3141 .update(&mut cx, |this, cx| {
3142 this.request_lsp(buffer_handle, request, cx)
3143 })
3144 .await?;
3145 this.update(&mut cx, |this, cx| {
3146 Ok(T::response_to_proto(
3147 response,
3148 this,
3149 sender_id,
3150 &buffer_version,
3151 cx,
3152 ))
3153 })
3154 }
3155
3156 async fn handle_get_project_symbols(
3157 this: ModelHandle<Self>,
3158 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3159 _: Arc<Client>,
3160 mut cx: AsyncAppContext,
3161 ) -> Result<proto::GetProjectSymbolsResponse> {
3162 let symbols = this
3163 .update(&mut cx, |this, cx| {
3164 this.symbols(&envelope.payload.query, cx)
3165 })
3166 .await?;
3167
3168 Ok(proto::GetProjectSymbolsResponse {
3169 symbols: symbols.iter().map(serialize_symbol).collect(),
3170 })
3171 }
3172
3173 async fn handle_search_project(
3174 this: ModelHandle<Self>,
3175 envelope: TypedEnvelope<proto::SearchProject>,
3176 _: Arc<Client>,
3177 mut cx: AsyncAppContext,
3178 ) -> Result<proto::SearchProjectResponse> {
3179 let peer_id = envelope.original_sender_id()?;
3180 let query = SearchQuery::from_proto(envelope.payload)?;
3181 let result = this
3182 .update(&mut cx, |this, cx| this.search(query, cx))
3183 .await?;
3184
3185 this.update(&mut cx, |this, cx| {
3186 let mut locations = Vec::new();
3187 for (buffer, ranges) in result {
3188 for range in ranges {
3189 let start = serialize_anchor(&range.start);
3190 let end = serialize_anchor(&range.end);
3191 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3192 locations.push(proto::Location {
3193 buffer: Some(buffer),
3194 start: Some(start),
3195 end: Some(end),
3196 });
3197 }
3198 }
3199 Ok(proto::SearchProjectResponse { locations })
3200 })
3201 }
3202
3203 async fn handle_open_buffer_for_symbol(
3204 this: ModelHandle<Self>,
3205 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3206 _: Arc<Client>,
3207 mut cx: AsyncAppContext,
3208 ) -> Result<proto::OpenBufferForSymbolResponse> {
3209 let peer_id = envelope.original_sender_id()?;
3210 let symbol = envelope
3211 .payload
3212 .symbol
3213 .ok_or_else(|| anyhow!("invalid symbol"))?;
3214 let symbol = this.read_with(&cx, |this, _| {
3215 let symbol = this.deserialize_symbol(symbol)?;
3216 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3217 if signature == symbol.signature {
3218 Ok(symbol)
3219 } else {
3220 Err(anyhow!("invalid symbol signature"))
3221 }
3222 })?;
3223 let buffer = this
3224 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3225 .await?;
3226
3227 Ok(proto::OpenBufferForSymbolResponse {
3228 buffer: Some(this.update(&mut cx, |this, cx| {
3229 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3230 })),
3231 })
3232 }
3233
3234 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3235 let mut hasher = Sha256::new();
3236 hasher.update(worktree_id.to_proto().to_be_bytes());
3237 hasher.update(path.to_string_lossy().as_bytes());
3238 hasher.update(self.nonce.to_be_bytes());
3239 hasher.finalize().as_slice().try_into().unwrap()
3240 }
3241
3242 async fn handle_open_buffer(
3243 this: ModelHandle<Self>,
3244 envelope: TypedEnvelope<proto::OpenBuffer>,
3245 _: Arc<Client>,
3246 mut cx: AsyncAppContext,
3247 ) -> Result<proto::OpenBufferResponse> {
3248 let peer_id = envelope.original_sender_id()?;
3249 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3250 let open_buffer = this.update(&mut cx, |this, cx| {
3251 this.open_buffer(
3252 ProjectPath {
3253 worktree_id,
3254 path: PathBuf::from(envelope.payload.path).into(),
3255 },
3256 cx,
3257 )
3258 });
3259
3260 let buffer = open_buffer.await?;
3261 this.update(&mut cx, |this, cx| {
3262 Ok(proto::OpenBufferResponse {
3263 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3264 })
3265 })
3266 }
3267
3268 fn serialize_project_transaction_for_peer(
3269 &mut self,
3270 project_transaction: ProjectTransaction,
3271 peer_id: PeerId,
3272 cx: &AppContext,
3273 ) -> proto::ProjectTransaction {
3274 let mut serialized_transaction = proto::ProjectTransaction {
3275 buffers: Default::default(),
3276 transactions: Default::default(),
3277 };
3278 for (buffer, transaction) in project_transaction.0 {
3279 serialized_transaction
3280 .buffers
3281 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3282 serialized_transaction
3283 .transactions
3284 .push(language::proto::serialize_transaction(&transaction));
3285 }
3286 serialized_transaction
3287 }
3288
3289 fn deserialize_project_transaction(
3290 &mut self,
3291 message: proto::ProjectTransaction,
3292 push_to_history: bool,
3293 cx: &mut ModelContext<Self>,
3294 ) -> Task<Result<ProjectTransaction>> {
3295 cx.spawn(|this, mut cx| async move {
3296 let mut project_transaction = ProjectTransaction::default();
3297 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3298 let buffer = this
3299 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3300 .await?;
3301 let transaction = language::proto::deserialize_transaction(transaction)?;
3302 project_transaction.0.insert(buffer, transaction);
3303 }
3304
3305 for (buffer, transaction) in &project_transaction.0 {
3306 buffer
3307 .update(&mut cx, |buffer, _| {
3308 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3309 })
3310 .await;
3311
3312 if push_to_history {
3313 buffer.update(&mut cx, |buffer, _| {
3314 buffer.push_transaction(transaction.clone(), Instant::now());
3315 });
3316 }
3317 }
3318
3319 Ok(project_transaction)
3320 })
3321 }
3322
3323 fn serialize_buffer_for_peer(
3324 &mut self,
3325 buffer: &ModelHandle<Buffer>,
3326 peer_id: PeerId,
3327 cx: &AppContext,
3328 ) -> proto::Buffer {
3329 let buffer_id = buffer.read(cx).remote_id();
3330 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3331 if shared_buffers.insert(buffer_id) {
3332 proto::Buffer {
3333 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3334 }
3335 } else {
3336 proto::Buffer {
3337 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3338 }
3339 }
3340 }
3341
3342 fn deserialize_buffer(
3343 &mut self,
3344 buffer: proto::Buffer,
3345 cx: &mut ModelContext<Self>,
3346 ) -> Task<Result<ModelHandle<Buffer>>> {
3347 let replica_id = self.replica_id();
3348
3349 let opened_buffer_tx = self.opened_buffer.0.clone();
3350 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3351 cx.spawn(|this, mut cx| async move {
3352 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3353 proto::buffer::Variant::Id(id) => {
3354 let buffer = loop {
3355 let buffer = this.read_with(&cx, |this, cx| {
3356 this.opened_buffers
3357 .get(&id)
3358 .and_then(|buffer| buffer.upgrade(cx))
3359 });
3360 if let Some(buffer) = buffer {
3361 break buffer;
3362 }
3363 opened_buffer_rx
3364 .next()
3365 .await
3366 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3367 };
3368 Ok(buffer)
3369 }
3370 proto::buffer::Variant::State(mut buffer) => {
3371 let mut buffer_worktree = None;
3372 let mut buffer_file = None;
3373 if let Some(file) = buffer.file.take() {
3374 this.read_with(&cx, |this, cx| {
3375 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3376 let worktree =
3377 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3378 anyhow!("no worktree found for id {}", file.worktree_id)
3379 })?;
3380 buffer_file =
3381 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3382 as Box<dyn language::File>);
3383 buffer_worktree = Some(worktree);
3384 Ok::<_, anyhow::Error>(())
3385 })?;
3386 }
3387
3388 let buffer = cx.add_model(|cx| {
3389 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3390 });
3391
3392 this.update(&mut cx, |this, cx| {
3393 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3394 })?;
3395
3396 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3397 Ok(buffer)
3398 }
3399 }
3400 })
3401 }
3402
3403 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3404 let language = self
3405 .languages
3406 .get_language(&serialized_symbol.language_name);
3407 let start = serialized_symbol
3408 .start
3409 .ok_or_else(|| anyhow!("invalid start"))?;
3410 let end = serialized_symbol
3411 .end
3412 .ok_or_else(|| anyhow!("invalid end"))?;
3413 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3414 Ok(Symbol {
3415 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3416 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3417 language_name: serialized_symbol.language_name.clone(),
3418 label: language
3419 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3420 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3421 name: serialized_symbol.name,
3422 path: PathBuf::from(serialized_symbol.path),
3423 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3424 kind,
3425 signature: serialized_symbol
3426 .signature
3427 .try_into()
3428 .map_err(|_| anyhow!("invalid signature"))?,
3429 })
3430 }
3431
3432 async fn handle_close_buffer(
3433 _: ModelHandle<Self>,
3434 _: TypedEnvelope<proto::CloseBuffer>,
3435 _: Arc<Client>,
3436 _: AsyncAppContext,
3437 ) -> Result<()> {
3438 // TODO: use this for following
3439 Ok(())
3440 }
3441
3442 async fn handle_buffer_saved(
3443 this: ModelHandle<Self>,
3444 envelope: TypedEnvelope<proto::BufferSaved>,
3445 _: Arc<Client>,
3446 mut cx: AsyncAppContext,
3447 ) -> Result<()> {
3448 let version = deserialize_version(envelope.payload.version);
3449 let mtime = envelope
3450 .payload
3451 .mtime
3452 .ok_or_else(|| anyhow!("missing mtime"))?
3453 .into();
3454
3455 this.update(&mut cx, |this, cx| {
3456 let buffer = this
3457 .opened_buffers
3458 .get(&envelope.payload.buffer_id)
3459 .and_then(|buffer| buffer.upgrade(cx));
3460 if let Some(buffer) = buffer {
3461 buffer.update(cx, |buffer, cx| {
3462 buffer.did_save(version, mtime, None, cx);
3463 });
3464 }
3465 Ok(())
3466 })
3467 }
3468
3469 async fn handle_buffer_reloaded(
3470 this: ModelHandle<Self>,
3471 envelope: TypedEnvelope<proto::BufferReloaded>,
3472 _: Arc<Client>,
3473 mut cx: AsyncAppContext,
3474 ) -> Result<()> {
3475 let payload = envelope.payload.clone();
3476 let version = deserialize_version(payload.version);
3477 let mtime = payload
3478 .mtime
3479 .ok_or_else(|| anyhow!("missing mtime"))?
3480 .into();
3481 this.update(&mut cx, |this, cx| {
3482 let buffer = this
3483 .opened_buffers
3484 .get(&payload.buffer_id)
3485 .and_then(|buffer| buffer.upgrade(cx));
3486 if let Some(buffer) = buffer {
3487 buffer.update(cx, |buffer, cx| {
3488 buffer.did_reload(version, mtime, cx);
3489 });
3490 }
3491 Ok(())
3492 })
3493 }
3494
3495 pub fn match_paths<'a>(
3496 &self,
3497 query: &'a str,
3498 include_ignored: bool,
3499 smart_case: bool,
3500 max_results: usize,
3501 cancel_flag: &'a AtomicBool,
3502 cx: &AppContext,
3503 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3504 let worktrees = self
3505 .worktrees(cx)
3506 .filter(|worktree| worktree.read(cx).is_visible())
3507 .collect::<Vec<_>>();
3508 let include_root_name = worktrees.len() > 1;
3509 let candidate_sets = worktrees
3510 .into_iter()
3511 .map(|worktree| CandidateSet {
3512 snapshot: worktree.read(cx).snapshot(),
3513 include_ignored,
3514 include_root_name,
3515 })
3516 .collect::<Vec<_>>();
3517
3518 let background = cx.background().clone();
3519 async move {
3520 fuzzy::match_paths(
3521 candidate_sets.as_slice(),
3522 query,
3523 smart_case,
3524 max_results,
3525 cancel_flag,
3526 background,
3527 )
3528 .await
3529 }
3530 }
3531}
3532
3533impl WorktreeHandle {
3534 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3535 match self {
3536 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3537 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3538 }
3539 }
3540}
3541
3542impl OpenBuffer {
3543 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3544 match self {
3545 OpenBuffer::Strong(handle) => Some(handle.clone()),
3546 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3547 OpenBuffer::Loading(_) => None,
3548 }
3549 }
3550}
3551
3552struct CandidateSet {
3553 snapshot: Snapshot,
3554 include_ignored: bool,
3555 include_root_name: bool,
3556}
3557
3558impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3559 type Candidates = CandidateSetIter<'a>;
3560
3561 fn id(&self) -> usize {
3562 self.snapshot.id().to_usize()
3563 }
3564
3565 fn len(&self) -> usize {
3566 if self.include_ignored {
3567 self.snapshot.file_count()
3568 } else {
3569 self.snapshot.visible_file_count()
3570 }
3571 }
3572
3573 fn prefix(&self) -> Arc<str> {
3574 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3575 self.snapshot.root_name().into()
3576 } else if self.include_root_name {
3577 format!("{}/", self.snapshot.root_name()).into()
3578 } else {
3579 "".into()
3580 }
3581 }
3582
3583 fn candidates(&'a self, start: usize) -> Self::Candidates {
3584 CandidateSetIter {
3585 traversal: self.snapshot.files(self.include_ignored, start),
3586 }
3587 }
3588}
3589
3590struct CandidateSetIter<'a> {
3591 traversal: Traversal<'a>,
3592}
3593
3594impl<'a> Iterator for CandidateSetIter<'a> {
3595 type Item = PathMatchCandidate<'a>;
3596
3597 fn next(&mut self) -> Option<Self::Item> {
3598 self.traversal.next().map(|entry| {
3599 if let EntryKind::File(char_bag) = entry.kind {
3600 PathMatchCandidate {
3601 path: &entry.path,
3602 char_bag,
3603 }
3604 } else {
3605 unreachable!()
3606 }
3607 })
3608 }
3609}
3610
3611impl Entity for Project {
3612 type Event = Event;
3613
3614 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3615 match &self.client_state {
3616 ProjectClientState::Local { remote_id_rx, .. } => {
3617 if let Some(project_id) = *remote_id_rx.borrow() {
3618 self.client
3619 .send(proto::UnregisterProject { project_id })
3620 .log_err();
3621 }
3622 }
3623 ProjectClientState::Remote { remote_id, .. } => {
3624 self.client
3625 .send(proto::LeaveProject {
3626 project_id: *remote_id,
3627 })
3628 .log_err();
3629 }
3630 }
3631 }
3632
3633 fn app_will_quit(
3634 &mut self,
3635 _: &mut MutableAppContext,
3636 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3637 let shutdown_futures = self
3638 .language_servers
3639 .drain()
3640 .filter_map(|(_, server)| server.shutdown())
3641 .collect::<Vec<_>>();
3642 Some(
3643 async move {
3644 futures::future::join_all(shutdown_futures).await;
3645 }
3646 .boxed(),
3647 )
3648 }
3649}
3650
3651impl Collaborator {
3652 fn from_proto(
3653 message: proto::Collaborator,
3654 user_store: &ModelHandle<UserStore>,
3655 cx: &mut AsyncAppContext,
3656 ) -> impl Future<Output = Result<Self>> {
3657 let user = user_store.update(cx, |user_store, cx| {
3658 user_store.fetch_user(message.user_id, cx)
3659 });
3660
3661 async move {
3662 Ok(Self {
3663 peer_id: PeerId(message.peer_id),
3664 user: user.await?,
3665 replica_id: message.replica_id as ReplicaId,
3666 })
3667 }
3668 }
3669}
3670
3671impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3672 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3673 Self {
3674 worktree_id,
3675 path: path.as_ref().into(),
3676 }
3677 }
3678}
3679
3680impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3681 fn from(options: lsp::CreateFileOptions) -> Self {
3682 Self {
3683 overwrite: options.overwrite.unwrap_or(false),
3684 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3685 }
3686 }
3687}
3688
3689impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3690 fn from(options: lsp::RenameFileOptions) -> Self {
3691 Self {
3692 overwrite: options.overwrite.unwrap_or(false),
3693 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3694 }
3695 }
3696}
3697
3698impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3699 fn from(options: lsp::DeleteFileOptions) -> Self {
3700 Self {
3701 recursive: options.recursive.unwrap_or(false),
3702 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3703 }
3704 }
3705}
3706
3707fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3708 proto::Symbol {
3709 source_worktree_id: symbol.source_worktree_id.to_proto(),
3710 worktree_id: symbol.worktree_id.to_proto(),
3711 language_name: symbol.language_name.clone(),
3712 name: symbol.name.clone(),
3713 kind: unsafe { mem::transmute(symbol.kind) },
3714 path: symbol.path.to_string_lossy().to_string(),
3715 start: Some(proto::Point {
3716 row: symbol.range.start.row,
3717 column: symbol.range.start.column,
3718 }),
3719 end: Some(proto::Point {
3720 row: symbol.range.end.row,
3721 column: symbol.range.end.column,
3722 }),
3723 signature: symbol.signature.to_vec(),
3724 }
3725}
3726
3727fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3728 let mut path_components = path.components();
3729 let mut base_components = base.components();
3730 let mut components: Vec<Component> = Vec::new();
3731 loop {
3732 match (path_components.next(), base_components.next()) {
3733 (None, None) => break,
3734 (Some(a), None) => {
3735 components.push(a);
3736 components.extend(path_components.by_ref());
3737 break;
3738 }
3739 (None, _) => components.push(Component::ParentDir),
3740 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3741 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3742 (Some(a), Some(_)) => {
3743 components.push(Component::ParentDir);
3744 for _ in base_components {
3745 components.push(Component::ParentDir);
3746 }
3747 components.push(a);
3748 components.extend(path_components.by_ref());
3749 break;
3750 }
3751 }
3752 }
3753 components.iter().map(|c| c.as_os_str()).collect()
3754}
3755
3756#[cfg(test)]
3757mod tests {
3758 use super::{Event, *};
3759 use fs::RealFs;
3760 use futures::StreamExt;
3761 use gpui::test::subscribe;
3762 use language::{
3763 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3764 };
3765 use lsp::Url;
3766 use serde_json::json;
3767 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3768 use unindent::Unindent as _;
3769 use util::test::temp_tree;
3770 use worktree::WorktreeHandle as _;
3771
3772 #[gpui::test]
3773 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3774 let dir = temp_tree(json!({
3775 "root": {
3776 "apple": "",
3777 "banana": {
3778 "carrot": {
3779 "date": "",
3780 "endive": "",
3781 }
3782 },
3783 "fennel": {
3784 "grape": "",
3785 }
3786 }
3787 }));
3788
3789 let root_link_path = dir.path().join("root_link");
3790 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3791 unix::fs::symlink(
3792 &dir.path().join("root/fennel"),
3793 &dir.path().join("root/finnochio"),
3794 )
3795 .unwrap();
3796
3797 let project = Project::test(Arc::new(RealFs), cx);
3798
3799 let (tree, _) = project
3800 .update(cx, |project, cx| {
3801 project.find_or_create_local_worktree(&root_link_path, true, cx)
3802 })
3803 .await
3804 .unwrap();
3805
3806 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3807 .await;
3808 cx.read(|cx| {
3809 let tree = tree.read(cx);
3810 assert_eq!(tree.file_count(), 5);
3811 assert_eq!(
3812 tree.inode_for_path("fennel/grape"),
3813 tree.inode_for_path("finnochio/grape")
3814 );
3815 });
3816
3817 let cancel_flag = Default::default();
3818 let results = project
3819 .read_with(cx, |project, cx| {
3820 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3821 })
3822 .await;
3823 assert_eq!(
3824 results
3825 .into_iter()
3826 .map(|result| result.path)
3827 .collect::<Vec<Arc<Path>>>(),
3828 vec![
3829 PathBuf::from("banana/carrot/date").into(),
3830 PathBuf::from("banana/carrot/endive").into(),
3831 ]
3832 );
3833 }
3834
3835 #[gpui::test]
3836 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3837 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3838 let progress_token = language_server_config
3839 .disk_based_diagnostics_progress_token
3840 .clone()
3841 .unwrap();
3842
3843 let language = Arc::new(Language::new(
3844 LanguageConfig {
3845 name: "Rust".into(),
3846 path_suffixes: vec!["rs".to_string()],
3847 language_server: Some(language_server_config),
3848 ..Default::default()
3849 },
3850 Some(tree_sitter_rust::language()),
3851 ));
3852
3853 let fs = FakeFs::new(cx.background());
3854 fs.insert_tree(
3855 "/dir",
3856 json!({
3857 "a.rs": "fn a() { A }",
3858 "b.rs": "const y: i32 = 1",
3859 }),
3860 )
3861 .await;
3862
3863 let project = Project::test(fs, cx);
3864 project.update(cx, |project, _| {
3865 Arc::get_mut(&mut project.languages).unwrap().add(language);
3866 });
3867
3868 let (tree, _) = project
3869 .update(cx, |project, cx| {
3870 project.find_or_create_local_worktree("/dir", true, cx)
3871 })
3872 .await
3873 .unwrap();
3874 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3875
3876 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3877 .await;
3878
3879 // Cause worktree to start the fake language server
3880 let _buffer = project
3881 .update(cx, |project, cx| {
3882 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3883 })
3884 .await
3885 .unwrap();
3886
3887 let mut events = subscribe(&project, cx);
3888
3889 let mut fake_server = fake_servers.next().await.unwrap();
3890 fake_server.start_progress(&progress_token).await;
3891 assert_eq!(
3892 events.next().await.unwrap(),
3893 Event::DiskBasedDiagnosticsStarted
3894 );
3895
3896 fake_server.start_progress(&progress_token).await;
3897 fake_server.end_progress(&progress_token).await;
3898 fake_server.start_progress(&progress_token).await;
3899
3900 fake_server
3901 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3902 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3903 version: None,
3904 diagnostics: vec![lsp::Diagnostic {
3905 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3906 severity: Some(lsp::DiagnosticSeverity::ERROR),
3907 message: "undefined variable 'A'".to_string(),
3908 ..Default::default()
3909 }],
3910 })
3911 .await;
3912 assert_eq!(
3913 events.next().await.unwrap(),
3914 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3915 );
3916
3917 fake_server.end_progress(&progress_token).await;
3918 fake_server.end_progress(&progress_token).await;
3919 assert_eq!(
3920 events.next().await.unwrap(),
3921 Event::DiskBasedDiagnosticsUpdated
3922 );
3923 assert_eq!(
3924 events.next().await.unwrap(),
3925 Event::DiskBasedDiagnosticsFinished
3926 );
3927
3928 let buffer = project
3929 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3930 .await
3931 .unwrap();
3932
3933 buffer.read_with(cx, |buffer, _| {
3934 let snapshot = buffer.snapshot();
3935 let diagnostics = snapshot
3936 .diagnostics_in_range::<_, Point>(0..buffer.len())
3937 .collect::<Vec<_>>();
3938 assert_eq!(
3939 diagnostics,
3940 &[DiagnosticEntry {
3941 range: Point::new(0, 9)..Point::new(0, 10),
3942 diagnostic: Diagnostic {
3943 severity: lsp::DiagnosticSeverity::ERROR,
3944 message: "undefined variable 'A'".to_string(),
3945 group_id: 0,
3946 is_primary: true,
3947 ..Default::default()
3948 }
3949 }]
3950 )
3951 });
3952 }
3953
3954 #[gpui::test]
3955 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3956 let dir = temp_tree(json!({
3957 "root": {
3958 "dir1": {},
3959 "dir2": {
3960 "dir3": {}
3961 }
3962 }
3963 }));
3964
3965 let project = Project::test(Arc::new(RealFs), cx);
3966 let (tree, _) = project
3967 .update(cx, |project, cx| {
3968 project.find_or_create_local_worktree(&dir.path(), true, cx)
3969 })
3970 .await
3971 .unwrap();
3972
3973 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3974 .await;
3975
3976 let cancel_flag = Default::default();
3977 let results = project
3978 .read_with(cx, |project, cx| {
3979 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3980 })
3981 .await;
3982
3983 assert!(results.is_empty());
3984 }
3985
3986 #[gpui::test]
3987 async fn test_definition(cx: &mut gpui::TestAppContext) {
3988 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3989 let language = Arc::new(Language::new(
3990 LanguageConfig {
3991 name: "Rust".into(),
3992 path_suffixes: vec!["rs".to_string()],
3993 language_server: Some(language_server_config),
3994 ..Default::default()
3995 },
3996 Some(tree_sitter_rust::language()),
3997 ));
3998
3999 let fs = FakeFs::new(cx.background());
4000 fs.insert_tree(
4001 "/dir",
4002 json!({
4003 "a.rs": "const fn a() { A }",
4004 "b.rs": "const y: i32 = crate::a()",
4005 }),
4006 )
4007 .await;
4008
4009 let project = Project::test(fs, cx);
4010 project.update(cx, |project, _| {
4011 Arc::get_mut(&mut project.languages).unwrap().add(language);
4012 });
4013
4014 let (tree, _) = project
4015 .update(cx, |project, cx| {
4016 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
4017 })
4018 .await
4019 .unwrap();
4020 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4021 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4022 .await;
4023
4024 let buffer = project
4025 .update(cx, |project, cx| {
4026 project.open_buffer(
4027 ProjectPath {
4028 worktree_id,
4029 path: Path::new("").into(),
4030 },
4031 cx,
4032 )
4033 })
4034 .await
4035 .unwrap();
4036
4037 let mut fake_server = fake_servers.next().await.unwrap();
4038 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
4039 let params = params.text_document_position_params;
4040 assert_eq!(
4041 params.text_document.uri.to_file_path().unwrap(),
4042 Path::new("/dir/b.rs"),
4043 );
4044 assert_eq!(params.position, lsp::Position::new(0, 22));
4045
4046 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4047 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4048 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4049 )))
4050 });
4051
4052 let mut definitions = project
4053 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4054 .await
4055 .unwrap();
4056
4057 assert_eq!(definitions.len(), 1);
4058 let definition = definitions.pop().unwrap();
4059 cx.update(|cx| {
4060 let target_buffer = definition.buffer.read(cx);
4061 assert_eq!(
4062 target_buffer
4063 .file()
4064 .unwrap()
4065 .as_local()
4066 .unwrap()
4067 .abs_path(cx),
4068 Path::new("/dir/a.rs"),
4069 );
4070 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4071 assert_eq!(
4072 list_worktrees(&project, cx),
4073 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4074 );
4075
4076 drop(definition);
4077 });
4078 cx.read(|cx| {
4079 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4080 });
4081
4082 fn list_worktrees<'a>(
4083 project: &'a ModelHandle<Project>,
4084 cx: &'a AppContext,
4085 ) -> Vec<(&'a Path, bool)> {
4086 project
4087 .read(cx)
4088 .worktrees(cx)
4089 .map(|worktree| {
4090 let worktree = worktree.read(cx);
4091 (
4092 worktree.as_local().unwrap().abs_path().as_ref(),
4093 worktree.is_visible(),
4094 )
4095 })
4096 .collect::<Vec<_>>()
4097 }
4098 }
4099
4100 #[gpui::test]
4101 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4102 let fs = FakeFs::new(cx.background());
4103 fs.insert_tree(
4104 "/dir",
4105 json!({
4106 "file1": "the old contents",
4107 }),
4108 )
4109 .await;
4110
4111 let project = Project::test(fs.clone(), cx);
4112 let worktree_id = project
4113 .update(cx, |p, cx| {
4114 p.find_or_create_local_worktree("/dir", true, cx)
4115 })
4116 .await
4117 .unwrap()
4118 .0
4119 .read_with(cx, |tree, _| tree.id());
4120
4121 let buffer = project
4122 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4123 .await
4124 .unwrap();
4125 buffer
4126 .update(cx, |buffer, cx| {
4127 assert_eq!(buffer.text(), "the old contents");
4128 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4129 buffer.save(cx)
4130 })
4131 .await
4132 .unwrap();
4133
4134 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4135 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4136 }
4137
4138 #[gpui::test]
4139 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4140 let fs = FakeFs::new(cx.background());
4141 fs.insert_tree(
4142 "/dir",
4143 json!({
4144 "file1": "the old contents",
4145 }),
4146 )
4147 .await;
4148
4149 let project = Project::test(fs.clone(), cx);
4150 let worktree_id = project
4151 .update(cx, |p, cx| {
4152 p.find_or_create_local_worktree("/dir/file1", true, cx)
4153 })
4154 .await
4155 .unwrap()
4156 .0
4157 .read_with(cx, |tree, _| tree.id());
4158
4159 let buffer = project
4160 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4161 .await
4162 .unwrap();
4163 buffer
4164 .update(cx, |buffer, cx| {
4165 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4166 buffer.save(cx)
4167 })
4168 .await
4169 .unwrap();
4170
4171 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4172 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4173 }
4174
4175 #[gpui::test]
4176 async fn test_save_as(cx: &mut gpui::TestAppContext) {
4177 let fs = FakeFs::new(cx.background());
4178 fs.insert_tree("/dir", json!({})).await;
4179
4180 let project = Project::test(fs.clone(), cx);
4181 let (worktree, _) = project
4182 .update(cx, |project, cx| {
4183 project.find_or_create_local_worktree("/dir", true, cx)
4184 })
4185 .await
4186 .unwrap();
4187 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4188
4189 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
4190 buffer.update(cx, |buffer, cx| {
4191 buffer.edit([0..0], "abc", cx);
4192 assert!(buffer.is_dirty());
4193 assert!(!buffer.has_conflict());
4194 });
4195 project
4196 .update(cx, |project, cx| {
4197 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
4198 })
4199 .await
4200 .unwrap();
4201 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
4202 buffer.read_with(cx, |buffer, cx| {
4203 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
4204 assert!(!buffer.is_dirty());
4205 assert!(!buffer.has_conflict());
4206 });
4207
4208 let opened_buffer = project
4209 .update(cx, |project, cx| {
4210 project.open_buffer((worktree_id, "file1"), cx)
4211 })
4212 .await
4213 .unwrap();
4214 assert_eq!(opened_buffer, buffer);
4215 }
4216
4217 #[gpui::test(retries = 5)]
4218 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4219 let dir = temp_tree(json!({
4220 "a": {
4221 "file1": "",
4222 "file2": "",
4223 "file3": "",
4224 },
4225 "b": {
4226 "c": {
4227 "file4": "",
4228 "file5": "",
4229 }
4230 }
4231 }));
4232
4233 let project = Project::test(Arc::new(RealFs), cx);
4234 let rpc = project.read_with(cx, |p, _| p.client.clone());
4235
4236 let (tree, _) = project
4237 .update(cx, |p, cx| {
4238 p.find_or_create_local_worktree(dir.path(), true, cx)
4239 })
4240 .await
4241 .unwrap();
4242 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4243
4244 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4245 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4246 async move { buffer.await.unwrap() }
4247 };
4248 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4249 tree.read_with(cx, |tree, _| {
4250 tree.entry_for_path(path)
4251 .expect(&format!("no entry for path {}", path))
4252 .id
4253 })
4254 };
4255
4256 let buffer2 = buffer_for_path("a/file2", cx).await;
4257 let buffer3 = buffer_for_path("a/file3", cx).await;
4258 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4259 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4260
4261 let file2_id = id_for_path("a/file2", &cx);
4262 let file3_id = id_for_path("a/file3", &cx);
4263 let file4_id = id_for_path("b/c/file4", &cx);
4264
4265 // Wait for the initial scan.
4266 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4267 .await;
4268
4269 // Create a remote copy of this worktree.
4270 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4271 let (remote, load_task) = cx.update(|cx| {
4272 Worktree::remote(
4273 1,
4274 1,
4275 initial_snapshot.to_proto(&Default::default(), true),
4276 rpc.clone(),
4277 cx,
4278 )
4279 });
4280 load_task.await;
4281
4282 cx.read(|cx| {
4283 assert!(!buffer2.read(cx).is_dirty());
4284 assert!(!buffer3.read(cx).is_dirty());
4285 assert!(!buffer4.read(cx).is_dirty());
4286 assert!(!buffer5.read(cx).is_dirty());
4287 });
4288
4289 // Rename and delete files and directories.
4290 tree.flush_fs_events(&cx).await;
4291 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4292 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4293 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4294 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4295 tree.flush_fs_events(&cx).await;
4296
4297 let expected_paths = vec![
4298 "a",
4299 "a/file1",
4300 "a/file2.new",
4301 "b",
4302 "d",
4303 "d/file3",
4304 "d/file4",
4305 ];
4306
4307 cx.read(|app| {
4308 assert_eq!(
4309 tree.read(app)
4310 .paths()
4311 .map(|p| p.to_str().unwrap())
4312 .collect::<Vec<_>>(),
4313 expected_paths
4314 );
4315
4316 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4317 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4318 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4319
4320 assert_eq!(
4321 buffer2.read(app).file().unwrap().path().as_ref(),
4322 Path::new("a/file2.new")
4323 );
4324 assert_eq!(
4325 buffer3.read(app).file().unwrap().path().as_ref(),
4326 Path::new("d/file3")
4327 );
4328 assert_eq!(
4329 buffer4.read(app).file().unwrap().path().as_ref(),
4330 Path::new("d/file4")
4331 );
4332 assert_eq!(
4333 buffer5.read(app).file().unwrap().path().as_ref(),
4334 Path::new("b/c/file5")
4335 );
4336
4337 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4338 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4339 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4340 assert!(buffer5.read(app).file().unwrap().is_deleted());
4341 });
4342
4343 // Update the remote worktree. Check that it becomes consistent with the
4344 // local worktree.
4345 remote.update(cx, |remote, cx| {
4346 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4347 &initial_snapshot,
4348 1,
4349 1,
4350 true,
4351 );
4352 remote
4353 .as_remote_mut()
4354 .unwrap()
4355 .snapshot
4356 .apply_remote_update(update_message)
4357 .unwrap();
4358
4359 assert_eq!(
4360 remote
4361 .paths()
4362 .map(|p| p.to_str().unwrap())
4363 .collect::<Vec<_>>(),
4364 expected_paths
4365 );
4366 });
4367 }
4368
4369 #[gpui::test]
4370 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4371 let fs = FakeFs::new(cx.background());
4372 fs.insert_tree(
4373 "/the-dir",
4374 json!({
4375 "a.txt": "a-contents",
4376 "b.txt": "b-contents",
4377 }),
4378 )
4379 .await;
4380
4381 let project = Project::test(fs.clone(), cx);
4382 let worktree_id = project
4383 .update(cx, |p, cx| {
4384 p.find_or_create_local_worktree("/the-dir", true, cx)
4385 })
4386 .await
4387 .unwrap()
4388 .0
4389 .read_with(cx, |tree, _| tree.id());
4390
4391 // Spawn multiple tasks to open paths, repeating some paths.
4392 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4393 (
4394 p.open_buffer((worktree_id, "a.txt"), cx),
4395 p.open_buffer((worktree_id, "b.txt"), cx),
4396 p.open_buffer((worktree_id, "a.txt"), cx),
4397 )
4398 });
4399
4400 let buffer_a_1 = buffer_a_1.await.unwrap();
4401 let buffer_a_2 = buffer_a_2.await.unwrap();
4402 let buffer_b = buffer_b.await.unwrap();
4403 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4404 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4405
4406 // There is only one buffer per path.
4407 let buffer_a_id = buffer_a_1.id();
4408 assert_eq!(buffer_a_2.id(), buffer_a_id);
4409
4410 // Open the same path again while it is still open.
4411 drop(buffer_a_1);
4412 let buffer_a_3 = project
4413 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4414 .await
4415 .unwrap();
4416
4417 // There's still only one buffer per path.
4418 assert_eq!(buffer_a_3.id(), buffer_a_id);
4419 }
4420
4421 #[gpui::test]
4422 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4423 use std::fs;
4424
4425 let dir = temp_tree(json!({
4426 "file1": "abc",
4427 "file2": "def",
4428 "file3": "ghi",
4429 }));
4430
4431 let project = Project::test(Arc::new(RealFs), cx);
4432 let (worktree, _) = project
4433 .update(cx, |p, cx| {
4434 p.find_or_create_local_worktree(dir.path(), true, cx)
4435 })
4436 .await
4437 .unwrap();
4438 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4439
4440 worktree.flush_fs_events(&cx).await;
4441 worktree
4442 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4443 .await;
4444
4445 let buffer1 = project
4446 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4447 .await
4448 .unwrap();
4449 let events = Rc::new(RefCell::new(Vec::new()));
4450
4451 // initially, the buffer isn't dirty.
4452 buffer1.update(cx, |buffer, cx| {
4453 cx.subscribe(&buffer1, {
4454 let events = events.clone();
4455 move |_, _, event, _| events.borrow_mut().push(event.clone())
4456 })
4457 .detach();
4458
4459 assert!(!buffer.is_dirty());
4460 assert!(events.borrow().is_empty());
4461
4462 buffer.edit(vec![1..2], "", cx);
4463 });
4464
4465 // after the first edit, the buffer is dirty, and emits a dirtied event.
4466 buffer1.update(cx, |buffer, cx| {
4467 assert!(buffer.text() == "ac");
4468 assert!(buffer.is_dirty());
4469 assert_eq!(
4470 *events.borrow(),
4471 &[language::Event::Edited, language::Event::Dirtied]
4472 );
4473 events.borrow_mut().clear();
4474 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4475 });
4476
4477 // after saving, the buffer is not dirty, and emits a saved event.
4478 buffer1.update(cx, |buffer, cx| {
4479 assert!(!buffer.is_dirty());
4480 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4481 events.borrow_mut().clear();
4482
4483 buffer.edit(vec![1..1], "B", cx);
4484 buffer.edit(vec![2..2], "D", cx);
4485 });
4486
4487 // after editing again, the buffer is dirty, and emits another dirty event.
4488 buffer1.update(cx, |buffer, cx| {
4489 assert!(buffer.text() == "aBDc");
4490 assert!(buffer.is_dirty());
4491 assert_eq!(
4492 *events.borrow(),
4493 &[
4494 language::Event::Edited,
4495 language::Event::Dirtied,
4496 language::Event::Edited,
4497 ],
4498 );
4499 events.borrow_mut().clear();
4500
4501 // TODO - currently, after restoring the buffer to its
4502 // previously-saved state, the is still considered dirty.
4503 buffer.edit([1..3], "", cx);
4504 assert!(buffer.text() == "ac");
4505 assert!(buffer.is_dirty());
4506 });
4507
4508 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4509
4510 // When a file is deleted, the buffer is considered dirty.
4511 let events = Rc::new(RefCell::new(Vec::new()));
4512 let buffer2 = project
4513 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4514 .await
4515 .unwrap();
4516 buffer2.update(cx, |_, cx| {
4517 cx.subscribe(&buffer2, {
4518 let events = events.clone();
4519 move |_, _, event, _| events.borrow_mut().push(event.clone())
4520 })
4521 .detach();
4522 });
4523
4524 fs::remove_file(dir.path().join("file2")).unwrap();
4525 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4526 assert_eq!(
4527 *events.borrow(),
4528 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4529 );
4530
4531 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4532 let events = Rc::new(RefCell::new(Vec::new()));
4533 let buffer3 = project
4534 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4535 .await
4536 .unwrap();
4537 buffer3.update(cx, |_, cx| {
4538 cx.subscribe(&buffer3, {
4539 let events = events.clone();
4540 move |_, _, event, _| events.borrow_mut().push(event.clone())
4541 })
4542 .detach();
4543 });
4544
4545 worktree.flush_fs_events(&cx).await;
4546 buffer3.update(cx, |buffer, cx| {
4547 buffer.edit(Some(0..0), "x", cx);
4548 });
4549 events.borrow_mut().clear();
4550 fs::remove_file(dir.path().join("file3")).unwrap();
4551 buffer3
4552 .condition(&cx, |_, _| !events.borrow().is_empty())
4553 .await;
4554 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4555 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4556 }
4557
4558 #[gpui::test]
4559 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4560 use std::fs;
4561
4562 let initial_contents = "aaa\nbbbbb\nc\n";
4563 let dir = temp_tree(json!({ "the-file": initial_contents }));
4564
4565 let project = Project::test(Arc::new(RealFs), cx);
4566 let (worktree, _) = project
4567 .update(cx, |p, cx| {
4568 p.find_or_create_local_worktree(dir.path(), true, cx)
4569 })
4570 .await
4571 .unwrap();
4572 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4573
4574 worktree
4575 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4576 .await;
4577
4578 let abs_path = dir.path().join("the-file");
4579 let buffer = project
4580 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4581 .await
4582 .unwrap();
4583
4584 // TODO
4585 // Add a cursor on each row.
4586 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4587 // assert!(!buffer.is_dirty());
4588 // buffer.add_selection_set(
4589 // &(0..3)
4590 // .map(|row| Selection {
4591 // id: row as usize,
4592 // start: Point::new(row, 1),
4593 // end: Point::new(row, 1),
4594 // reversed: false,
4595 // goal: SelectionGoal::None,
4596 // })
4597 // .collect::<Vec<_>>(),
4598 // cx,
4599 // )
4600 // });
4601
4602 // Change the file on disk, adding two new lines of text, and removing
4603 // one line.
4604 buffer.read_with(cx, |buffer, _| {
4605 assert!(!buffer.is_dirty());
4606 assert!(!buffer.has_conflict());
4607 });
4608 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4609 fs::write(&abs_path, new_contents).unwrap();
4610
4611 // Because the buffer was not modified, it is reloaded from disk. Its
4612 // contents are edited according to the diff between the old and new
4613 // file contents.
4614 buffer
4615 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4616 .await;
4617
4618 buffer.update(cx, |buffer, _| {
4619 assert_eq!(buffer.text(), new_contents);
4620 assert!(!buffer.is_dirty());
4621 assert!(!buffer.has_conflict());
4622
4623 // TODO
4624 // let cursor_positions = buffer
4625 // .selection_set(selection_set_id)
4626 // .unwrap()
4627 // .selections::<Point>(&*buffer)
4628 // .map(|selection| {
4629 // assert_eq!(selection.start, selection.end);
4630 // selection.start
4631 // })
4632 // .collect::<Vec<_>>();
4633 // assert_eq!(
4634 // cursor_positions,
4635 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4636 // );
4637 });
4638
4639 // Modify the buffer
4640 buffer.update(cx, |buffer, cx| {
4641 buffer.edit(vec![0..0], " ", cx);
4642 assert!(buffer.is_dirty());
4643 assert!(!buffer.has_conflict());
4644 });
4645
4646 // Change the file on disk again, adding blank lines to the beginning.
4647 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4648
4649 // Because the buffer is modified, it doesn't reload from disk, but is
4650 // marked as having a conflict.
4651 buffer
4652 .condition(&cx, |buffer, _| buffer.has_conflict())
4653 .await;
4654 }
4655
4656 #[gpui::test]
4657 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4658 let fs = FakeFs::new(cx.background());
4659 fs.insert_tree(
4660 "/the-dir",
4661 json!({
4662 "a.rs": "
4663 fn foo(mut v: Vec<usize>) {
4664 for x in &v {
4665 v.push(1);
4666 }
4667 }
4668 "
4669 .unindent(),
4670 }),
4671 )
4672 .await;
4673
4674 let project = Project::test(fs.clone(), cx);
4675 let (worktree, _) = project
4676 .update(cx, |p, cx| {
4677 p.find_or_create_local_worktree("/the-dir", true, cx)
4678 })
4679 .await
4680 .unwrap();
4681 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4682
4683 let buffer = project
4684 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4685 .await
4686 .unwrap();
4687
4688 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4689 let message = lsp::PublishDiagnosticsParams {
4690 uri: buffer_uri.clone(),
4691 diagnostics: vec![
4692 lsp::Diagnostic {
4693 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4694 severity: Some(DiagnosticSeverity::WARNING),
4695 message: "error 1".to_string(),
4696 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4697 location: lsp::Location {
4698 uri: buffer_uri.clone(),
4699 range: lsp::Range::new(
4700 lsp::Position::new(1, 8),
4701 lsp::Position::new(1, 9),
4702 ),
4703 },
4704 message: "error 1 hint 1".to_string(),
4705 }]),
4706 ..Default::default()
4707 },
4708 lsp::Diagnostic {
4709 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4710 severity: Some(DiagnosticSeverity::HINT),
4711 message: "error 1 hint 1".to_string(),
4712 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4713 location: lsp::Location {
4714 uri: buffer_uri.clone(),
4715 range: lsp::Range::new(
4716 lsp::Position::new(1, 8),
4717 lsp::Position::new(1, 9),
4718 ),
4719 },
4720 message: "original diagnostic".to_string(),
4721 }]),
4722 ..Default::default()
4723 },
4724 lsp::Diagnostic {
4725 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4726 severity: Some(DiagnosticSeverity::ERROR),
4727 message: "error 2".to_string(),
4728 related_information: Some(vec![
4729 lsp::DiagnosticRelatedInformation {
4730 location: lsp::Location {
4731 uri: buffer_uri.clone(),
4732 range: lsp::Range::new(
4733 lsp::Position::new(1, 13),
4734 lsp::Position::new(1, 15),
4735 ),
4736 },
4737 message: "error 2 hint 1".to_string(),
4738 },
4739 lsp::DiagnosticRelatedInformation {
4740 location: lsp::Location {
4741 uri: buffer_uri.clone(),
4742 range: lsp::Range::new(
4743 lsp::Position::new(1, 13),
4744 lsp::Position::new(1, 15),
4745 ),
4746 },
4747 message: "error 2 hint 2".to_string(),
4748 },
4749 ]),
4750 ..Default::default()
4751 },
4752 lsp::Diagnostic {
4753 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4754 severity: Some(DiagnosticSeverity::HINT),
4755 message: "error 2 hint 1".to_string(),
4756 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4757 location: lsp::Location {
4758 uri: buffer_uri.clone(),
4759 range: lsp::Range::new(
4760 lsp::Position::new(2, 8),
4761 lsp::Position::new(2, 17),
4762 ),
4763 },
4764 message: "original diagnostic".to_string(),
4765 }]),
4766 ..Default::default()
4767 },
4768 lsp::Diagnostic {
4769 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4770 severity: Some(DiagnosticSeverity::HINT),
4771 message: "error 2 hint 2".to_string(),
4772 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4773 location: lsp::Location {
4774 uri: buffer_uri.clone(),
4775 range: lsp::Range::new(
4776 lsp::Position::new(2, 8),
4777 lsp::Position::new(2, 17),
4778 ),
4779 },
4780 message: "original diagnostic".to_string(),
4781 }]),
4782 ..Default::default()
4783 },
4784 ],
4785 version: None,
4786 };
4787
4788 project
4789 .update(cx, |p, cx| {
4790 p.update_diagnostics(message, &Default::default(), cx)
4791 })
4792 .unwrap();
4793 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4794
4795 assert_eq!(
4796 buffer
4797 .diagnostics_in_range::<_, Point>(0..buffer.len())
4798 .collect::<Vec<_>>(),
4799 &[
4800 DiagnosticEntry {
4801 range: Point::new(1, 8)..Point::new(1, 9),
4802 diagnostic: Diagnostic {
4803 severity: DiagnosticSeverity::WARNING,
4804 message: "error 1".to_string(),
4805 group_id: 0,
4806 is_primary: true,
4807 ..Default::default()
4808 }
4809 },
4810 DiagnosticEntry {
4811 range: Point::new(1, 8)..Point::new(1, 9),
4812 diagnostic: Diagnostic {
4813 severity: DiagnosticSeverity::HINT,
4814 message: "error 1 hint 1".to_string(),
4815 group_id: 0,
4816 is_primary: false,
4817 ..Default::default()
4818 }
4819 },
4820 DiagnosticEntry {
4821 range: Point::new(1, 13)..Point::new(1, 15),
4822 diagnostic: Diagnostic {
4823 severity: DiagnosticSeverity::HINT,
4824 message: "error 2 hint 1".to_string(),
4825 group_id: 1,
4826 is_primary: false,
4827 ..Default::default()
4828 }
4829 },
4830 DiagnosticEntry {
4831 range: Point::new(1, 13)..Point::new(1, 15),
4832 diagnostic: Diagnostic {
4833 severity: DiagnosticSeverity::HINT,
4834 message: "error 2 hint 2".to_string(),
4835 group_id: 1,
4836 is_primary: false,
4837 ..Default::default()
4838 }
4839 },
4840 DiagnosticEntry {
4841 range: Point::new(2, 8)..Point::new(2, 17),
4842 diagnostic: Diagnostic {
4843 severity: DiagnosticSeverity::ERROR,
4844 message: "error 2".to_string(),
4845 group_id: 1,
4846 is_primary: true,
4847 ..Default::default()
4848 }
4849 }
4850 ]
4851 );
4852
4853 assert_eq!(
4854 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4855 &[
4856 DiagnosticEntry {
4857 range: Point::new(1, 8)..Point::new(1, 9),
4858 diagnostic: Diagnostic {
4859 severity: DiagnosticSeverity::WARNING,
4860 message: "error 1".to_string(),
4861 group_id: 0,
4862 is_primary: true,
4863 ..Default::default()
4864 }
4865 },
4866 DiagnosticEntry {
4867 range: Point::new(1, 8)..Point::new(1, 9),
4868 diagnostic: Diagnostic {
4869 severity: DiagnosticSeverity::HINT,
4870 message: "error 1 hint 1".to_string(),
4871 group_id: 0,
4872 is_primary: false,
4873 ..Default::default()
4874 }
4875 },
4876 ]
4877 );
4878 assert_eq!(
4879 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4880 &[
4881 DiagnosticEntry {
4882 range: Point::new(1, 13)..Point::new(1, 15),
4883 diagnostic: Diagnostic {
4884 severity: DiagnosticSeverity::HINT,
4885 message: "error 2 hint 1".to_string(),
4886 group_id: 1,
4887 is_primary: false,
4888 ..Default::default()
4889 }
4890 },
4891 DiagnosticEntry {
4892 range: Point::new(1, 13)..Point::new(1, 15),
4893 diagnostic: Diagnostic {
4894 severity: DiagnosticSeverity::HINT,
4895 message: "error 2 hint 2".to_string(),
4896 group_id: 1,
4897 is_primary: false,
4898 ..Default::default()
4899 }
4900 },
4901 DiagnosticEntry {
4902 range: Point::new(2, 8)..Point::new(2, 17),
4903 diagnostic: Diagnostic {
4904 severity: DiagnosticSeverity::ERROR,
4905 message: "error 2".to_string(),
4906 group_id: 1,
4907 is_primary: true,
4908 ..Default::default()
4909 }
4910 }
4911 ]
4912 );
4913 }
4914
4915 #[gpui::test]
4916 async fn test_rename(cx: &mut gpui::TestAppContext) {
4917 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4918 let language = Arc::new(Language::new(
4919 LanguageConfig {
4920 name: "Rust".into(),
4921 path_suffixes: vec!["rs".to_string()],
4922 language_server: Some(language_server_config),
4923 ..Default::default()
4924 },
4925 Some(tree_sitter_rust::language()),
4926 ));
4927
4928 let fs = FakeFs::new(cx.background());
4929 fs.insert_tree(
4930 "/dir",
4931 json!({
4932 "one.rs": "const ONE: usize = 1;",
4933 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4934 }),
4935 )
4936 .await;
4937
4938 let project = Project::test(fs.clone(), cx);
4939 project.update(cx, |project, _| {
4940 Arc::get_mut(&mut project.languages).unwrap().add(language);
4941 });
4942
4943 let (tree, _) = project
4944 .update(cx, |project, cx| {
4945 project.find_or_create_local_worktree("/dir", true, cx)
4946 })
4947 .await
4948 .unwrap();
4949 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4950 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4951 .await;
4952
4953 let buffer = project
4954 .update(cx, |project, cx| {
4955 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4956 })
4957 .await
4958 .unwrap();
4959
4960 let mut fake_server = fake_servers.next().await.unwrap();
4961
4962 let response = project.update(cx, |project, cx| {
4963 project.prepare_rename(buffer.clone(), 7, cx)
4964 });
4965 fake_server
4966 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4967 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4968 assert_eq!(params.position, lsp::Position::new(0, 7));
4969 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4970 lsp::Position::new(0, 6),
4971 lsp::Position::new(0, 9),
4972 )))
4973 })
4974 .next()
4975 .await
4976 .unwrap();
4977 let range = response.await.unwrap().unwrap();
4978 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4979 assert_eq!(range, 6..9);
4980
4981 let response = project.update(cx, |project, cx| {
4982 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4983 });
4984 fake_server
4985 .handle_request::<lsp::request::Rename, _>(|params, _| {
4986 assert_eq!(
4987 params.text_document_position.text_document.uri.as_str(),
4988 "file:///dir/one.rs"
4989 );
4990 assert_eq!(
4991 params.text_document_position.position,
4992 lsp::Position::new(0, 7)
4993 );
4994 assert_eq!(params.new_name, "THREE");
4995 Some(lsp::WorkspaceEdit {
4996 changes: Some(
4997 [
4998 (
4999 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
5000 vec![lsp::TextEdit::new(
5001 lsp::Range::new(
5002 lsp::Position::new(0, 6),
5003 lsp::Position::new(0, 9),
5004 ),
5005 "THREE".to_string(),
5006 )],
5007 ),
5008 (
5009 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
5010 vec![
5011 lsp::TextEdit::new(
5012 lsp::Range::new(
5013 lsp::Position::new(0, 24),
5014 lsp::Position::new(0, 27),
5015 ),
5016 "THREE".to_string(),
5017 ),
5018 lsp::TextEdit::new(
5019 lsp::Range::new(
5020 lsp::Position::new(0, 35),
5021 lsp::Position::new(0, 38),
5022 ),
5023 "THREE".to_string(),
5024 ),
5025 ],
5026 ),
5027 ]
5028 .into_iter()
5029 .collect(),
5030 ),
5031 ..Default::default()
5032 })
5033 })
5034 .next()
5035 .await
5036 .unwrap();
5037 let mut transaction = response.await.unwrap().0;
5038 assert_eq!(transaction.len(), 2);
5039 assert_eq!(
5040 transaction
5041 .remove_entry(&buffer)
5042 .unwrap()
5043 .0
5044 .read_with(cx, |buffer, _| buffer.text()),
5045 "const THREE: usize = 1;"
5046 );
5047 assert_eq!(
5048 transaction
5049 .into_keys()
5050 .next()
5051 .unwrap()
5052 .read_with(cx, |buffer, _| buffer.text()),
5053 "const TWO: usize = one::THREE + one::THREE;"
5054 );
5055 }
5056
5057 #[gpui::test]
5058 async fn test_search(cx: &mut gpui::TestAppContext) {
5059 let fs = FakeFs::new(cx.background());
5060 fs.insert_tree(
5061 "/dir",
5062 json!({
5063 "one.rs": "const ONE: usize = 1;",
5064 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5065 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5066 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5067 }),
5068 )
5069 .await;
5070 let project = Project::test(fs.clone(), cx);
5071 let (tree, _) = project
5072 .update(cx, |project, cx| {
5073 project.find_or_create_local_worktree("/dir", true, cx)
5074 })
5075 .await
5076 .unwrap();
5077 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5078 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5079 .await;
5080
5081 assert_eq!(
5082 search(&project, SearchQuery::text("TWO", false, true), cx)
5083 .await
5084 .unwrap(),
5085 HashMap::from_iter([
5086 ("two.rs".to_string(), vec![6..9]),
5087 ("three.rs".to_string(), vec![37..40])
5088 ])
5089 );
5090
5091 let buffer_4 = project
5092 .update(cx, |project, cx| {
5093 project.open_buffer((worktree_id, "four.rs"), cx)
5094 })
5095 .await
5096 .unwrap();
5097 buffer_4.update(cx, |buffer, cx| {
5098 buffer.edit([20..28, 31..43], "two::TWO", cx);
5099 });
5100
5101 assert_eq!(
5102 search(&project, SearchQuery::text("TWO", false, true), cx)
5103 .await
5104 .unwrap(),
5105 HashMap::from_iter([
5106 ("two.rs".to_string(), vec![6..9]),
5107 ("three.rs".to_string(), vec![37..40]),
5108 ("four.rs".to_string(), vec![25..28, 36..39])
5109 ])
5110 );
5111
5112 async fn search(
5113 project: &ModelHandle<Project>,
5114 query: SearchQuery,
5115 cx: &mut gpui::TestAppContext,
5116 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5117 let results = project
5118 .update(cx, |project, cx| project.search(query, cx))
5119 .await?;
5120
5121 Ok(results
5122 .into_iter()
5123 .map(|(buffer, ranges)| {
5124 buffer.read_with(cx, |buffer, _| {
5125 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5126 let ranges = ranges
5127 .into_iter()
5128 .map(|range| range.to_offset(buffer))
5129 .collect::<Vec<_>>();
5130 (path, ranges)
5131 })
5132 })
5133 .collect())
5134 }
5135 }
5136}