1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, Bias, Buffer, CodeAction, Diagnostic, DiagnosticEntry, File as _, Language,
19 LanguageRegistry, PointUtf16, ToLspPosition, ToOffset, ToPointUtf16,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use postage::{prelude::Stream, watch};
23use smol::block_on;
24use std::{
25 convert::TryInto,
26 ops::Range,
27 path::{Path, PathBuf},
28 sync::{atomic::AtomicBool, Arc},
29 time::Instant,
30};
31use util::{post_inc, ResultExt, TryFutureExt as _};
32
33pub use fs::*;
34pub use worktree::*;
35
36pub struct Project {
37 worktrees: Vec<WorktreeHandle>,
38 active_entry: Option<ProjectEntry>,
39 languages: Arc<LanguageRegistry>,
40 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
41 client: Arc<client::Client>,
42 user_store: ModelHandle<UserStore>,
43 fs: Arc<dyn Fs>,
44 client_state: ProjectClientState,
45 collaborators: HashMap<PeerId, Collaborator>,
46 subscriptions: Vec<client::Subscription>,
47 language_servers_with_diagnostics_running: isize,
48 open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
49 loading_buffers: HashMap<
50 ProjectPath,
51 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
52 >,
53 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
54}
55
56enum WorktreeHandle {
57 Strong(ModelHandle<Worktree>),
58 Weak(WeakModelHandle<Worktree>),
59}
60
61enum ProjectClientState {
62 Local {
63 is_shared: bool,
64 remote_id_tx: watch::Sender<Option<u64>>,
65 remote_id_rx: watch::Receiver<Option<u64>>,
66 _maintain_remote_id_task: Task<Option<()>>,
67 },
68 Remote {
69 sharing_has_stopped: bool,
70 remote_id: u64,
71 replica_id: ReplicaId,
72 },
73}
74
75#[derive(Clone, Debug)]
76pub struct Collaborator {
77 pub user: Arc<User>,
78 pub peer_id: PeerId,
79 pub replica_id: ReplicaId,
80}
81
82#[derive(Clone, Debug, PartialEq)]
83pub enum Event {
84 ActiveEntryChanged(Option<ProjectEntry>),
85 WorktreeRemoved(WorktreeId),
86 DiskBasedDiagnosticsStarted,
87 DiskBasedDiagnosticsUpdated,
88 DiskBasedDiagnosticsFinished,
89 DiagnosticsUpdated(ProjectPath),
90}
91
92#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
93pub struct ProjectPath {
94 pub worktree_id: WorktreeId,
95 pub path: Arc<Path>,
96}
97
98#[derive(Clone, Debug, Default, PartialEq)]
99pub struct DiagnosticSummary {
100 pub error_count: usize,
101 pub warning_count: usize,
102 pub info_count: usize,
103 pub hint_count: usize,
104}
105
106#[derive(Debug)]
107pub struct Definition {
108 pub target_buffer: ModelHandle<Buffer>,
109 pub target_range: Range<language::Anchor>,
110}
111
112#[derive(Default)]
113pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
114
115impl DiagnosticSummary {
116 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
117 let mut this = Self {
118 error_count: 0,
119 warning_count: 0,
120 info_count: 0,
121 hint_count: 0,
122 };
123
124 for entry in diagnostics {
125 if entry.diagnostic.is_primary {
126 match entry.diagnostic.severity {
127 DiagnosticSeverity::ERROR => this.error_count += 1,
128 DiagnosticSeverity::WARNING => this.warning_count += 1,
129 DiagnosticSeverity::INFORMATION => this.info_count += 1,
130 DiagnosticSeverity::HINT => this.hint_count += 1,
131 _ => {}
132 }
133 }
134 }
135
136 this
137 }
138
139 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
140 proto::DiagnosticSummary {
141 path: path.to_string_lossy().to_string(),
142 error_count: self.error_count as u32,
143 warning_count: self.warning_count as u32,
144 info_count: self.info_count as u32,
145 hint_count: self.hint_count as u32,
146 }
147 }
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151pub struct ProjectEntry {
152 pub worktree_id: WorktreeId,
153 pub entry_id: usize,
154}
155
156impl Project {
157 pub fn local(
158 client: Arc<Client>,
159 user_store: ModelHandle<UserStore>,
160 languages: Arc<LanguageRegistry>,
161 fs: Arc<dyn Fs>,
162 cx: &mut MutableAppContext,
163 ) -> ModelHandle<Self> {
164 cx.add_model(|cx: &mut ModelContext<Self>| {
165 let (remote_id_tx, remote_id_rx) = watch::channel();
166 let _maintain_remote_id_task = cx.spawn_weak({
167 let rpc = client.clone();
168 move |this, mut cx| {
169 async move {
170 let mut status = rpc.status();
171 while let Some(status) = status.recv().await {
172 if let Some(this) = this.upgrade(&cx) {
173 let remote_id = if let client::Status::Connected { .. } = status {
174 let response = rpc.request(proto::RegisterProject {}).await?;
175 Some(response.project_id)
176 } else {
177 None
178 };
179
180 if let Some(project_id) = remote_id {
181 let mut registrations = Vec::new();
182 this.update(&mut cx, |this, cx| {
183 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
184 registrations.push(worktree.update(
185 cx,
186 |worktree, cx| {
187 let worktree = worktree.as_local_mut().unwrap();
188 worktree.register(project_id, cx)
189 },
190 ));
191 }
192 });
193 for registration in registrations {
194 registration.await?;
195 }
196 }
197 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
198 }
199 }
200 Ok(())
201 }
202 .log_err()
203 }
204 });
205
206 Self {
207 worktrees: Default::default(),
208 collaborators: Default::default(),
209 open_buffers: Default::default(),
210 loading_buffers: Default::default(),
211 shared_buffers: Default::default(),
212 client_state: ProjectClientState::Local {
213 is_shared: false,
214 remote_id_tx,
215 remote_id_rx,
216 _maintain_remote_id_task,
217 },
218 subscriptions: Vec::new(),
219 active_entry: None,
220 languages,
221 client,
222 user_store,
223 fs,
224 language_servers_with_diagnostics_running: 0,
225 language_servers: Default::default(),
226 }
227 })
228 }
229
230 pub async fn remote(
231 remote_id: u64,
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut AsyncAppContext,
237 ) -> Result<ModelHandle<Self>> {
238 client.authenticate_and_connect(&cx).await?;
239
240 let response = client
241 .request(proto::JoinProject {
242 project_id: remote_id,
243 })
244 .await?;
245
246 let replica_id = response.replica_id as ReplicaId;
247
248 let mut worktrees = Vec::new();
249 for worktree in response.worktrees {
250 let (worktree, load_task) = cx
251 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
252 worktrees.push(worktree);
253 load_task.detach();
254 }
255
256 let user_ids = response
257 .collaborators
258 .iter()
259 .map(|peer| peer.user_id)
260 .collect();
261 user_store
262 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
263 .await?;
264 let mut collaborators = HashMap::default();
265 for message in response.collaborators {
266 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
267 collaborators.insert(collaborator.peer_id, collaborator);
268 }
269
270 Ok(cx.add_model(|cx| {
271 let mut this = Self {
272 worktrees: Vec::new(),
273 open_buffers: Default::default(),
274 loading_buffers: Default::default(),
275 shared_buffers: Default::default(),
276 active_entry: None,
277 collaborators,
278 languages,
279 user_store,
280 fs,
281 subscriptions: vec![
282 client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
283 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
284 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
285 client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
286 client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
287 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
288 client.subscribe_to_entity(
289 remote_id,
290 cx,
291 Self::handle_update_diagnostic_summary,
292 ),
293 client.subscribe_to_entity(
294 remote_id,
295 cx,
296 Self::handle_disk_based_diagnostics_updating,
297 ),
298 client.subscribe_to_entity(
299 remote_id,
300 cx,
301 Self::handle_disk_based_diagnostics_updated,
302 ),
303 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
304 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer_file),
305 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_reloaded),
306 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
307 ],
308 client,
309 client_state: ProjectClientState::Remote {
310 sharing_has_stopped: false,
311 remote_id,
312 replica_id,
313 },
314 language_servers_with_diagnostics_running: 0,
315 language_servers: Default::default(),
316 };
317 for worktree in worktrees {
318 this.add_worktree(&worktree, cx);
319 }
320 this
321 }))
322 }
323
324 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
325 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
326 *remote_id_tx.borrow_mut() = remote_id;
327 }
328
329 self.subscriptions.clear();
330 if let Some(remote_id) = remote_id {
331 let client = &self.client;
332 self.subscriptions.extend([
333 client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer),
334 client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer),
335 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
336 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
337 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
338 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
339 client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
340 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
341 client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
342 client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions),
343 client.subscribe_to_entity(
344 remote_id,
345 cx,
346 Self::handle_apply_additional_edits_for_completion,
347 ),
348 client.subscribe_to_entity(remote_id, cx, Self::handle_get_code_actions),
349 client.subscribe_to_entity(remote_id, cx, Self::handle_apply_code_action),
350 client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
351 ]);
352 }
353 }
354
355 pub fn remote_id(&self) -> Option<u64> {
356 match &self.client_state {
357 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
358 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
359 }
360 }
361
362 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
363 let mut id = None;
364 let mut watch = None;
365 match &self.client_state {
366 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
367 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
368 }
369
370 async move {
371 if let Some(id) = id {
372 return id;
373 }
374 let mut watch = watch.unwrap();
375 loop {
376 let id = *watch.borrow();
377 if let Some(id) = id {
378 return id;
379 }
380 watch.recv().await;
381 }
382 }
383 }
384
385 pub fn replica_id(&self) -> ReplicaId {
386 match &self.client_state {
387 ProjectClientState::Local { .. } => 0,
388 ProjectClientState::Remote { replica_id, .. } => *replica_id,
389 }
390 }
391
392 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
393 &self.collaborators
394 }
395
396 pub fn worktrees<'a>(
397 &'a self,
398 cx: &'a AppContext,
399 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
400 self.worktrees
401 .iter()
402 .filter_map(move |worktree| worktree.upgrade(cx))
403 }
404
405 pub fn worktree_for_id(
406 &self,
407 id: WorktreeId,
408 cx: &AppContext,
409 ) -> Option<ModelHandle<Worktree>> {
410 self.worktrees(cx)
411 .find(|worktree| worktree.read(cx).id() == id)
412 }
413
414 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
415 let rpc = self.client.clone();
416 cx.spawn(|this, mut cx| async move {
417 let project_id = this.update(&mut cx, |this, _| {
418 if let ProjectClientState::Local {
419 is_shared,
420 remote_id_rx,
421 ..
422 } = &mut this.client_state
423 {
424 *is_shared = true;
425 remote_id_rx
426 .borrow()
427 .ok_or_else(|| anyhow!("no project id"))
428 } else {
429 Err(anyhow!("can't share a remote project"))
430 }
431 })?;
432
433 rpc.request(proto::ShareProject { project_id }).await?;
434 let mut tasks = Vec::new();
435 this.update(&mut cx, |this, cx| {
436 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
437 worktree.update(cx, |worktree, cx| {
438 let worktree = worktree.as_local_mut().unwrap();
439 tasks.push(worktree.share(project_id, cx));
440 });
441 }
442 });
443 for task in tasks {
444 task.await?;
445 }
446 this.update(&mut cx, |_, cx| cx.notify());
447 Ok(())
448 })
449 }
450
451 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
452 let rpc = self.client.clone();
453 cx.spawn(|this, mut cx| async move {
454 let project_id = this.update(&mut cx, |this, _| {
455 if let ProjectClientState::Local {
456 is_shared,
457 remote_id_rx,
458 ..
459 } = &mut this.client_state
460 {
461 *is_shared = false;
462 remote_id_rx
463 .borrow()
464 .ok_or_else(|| anyhow!("no project id"))
465 } else {
466 Err(anyhow!("can't share a remote project"))
467 }
468 })?;
469
470 rpc.send(proto::UnshareProject { project_id })?;
471 this.update(&mut cx, |this, cx| {
472 this.collaborators.clear();
473 this.shared_buffers.clear();
474 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
475 worktree.update(cx, |worktree, _| {
476 worktree.as_local_mut().unwrap().unshare();
477 });
478 }
479 cx.notify()
480 });
481 Ok(())
482 })
483 }
484
485 pub fn is_read_only(&self) -> bool {
486 match &self.client_state {
487 ProjectClientState::Local { .. } => false,
488 ProjectClientState::Remote {
489 sharing_has_stopped,
490 ..
491 } => *sharing_has_stopped,
492 }
493 }
494
495 pub fn is_local(&self) -> bool {
496 match &self.client_state {
497 ProjectClientState::Local { .. } => true,
498 ProjectClientState::Remote { .. } => false,
499 }
500 }
501
502 pub fn open_buffer(
503 &mut self,
504 path: impl Into<ProjectPath>,
505 cx: &mut ModelContext<Self>,
506 ) -> Task<Result<ModelHandle<Buffer>>> {
507 let project_path = path.into();
508 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
509 worktree
510 } else {
511 return Task::ready(Err(anyhow!("no such worktree")));
512 };
513
514 // If there is already a buffer for the given path, then return it.
515 let existing_buffer = self.get_open_buffer(&project_path, cx);
516 if let Some(existing_buffer) = existing_buffer {
517 return Task::ready(Ok(existing_buffer));
518 }
519
520 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
521 // If the given path is already being loaded, then wait for that existing
522 // task to complete and return the same buffer.
523 hash_map::Entry::Occupied(e) => e.get().clone(),
524
525 // Otherwise, record the fact that this path is now being loaded.
526 hash_map::Entry::Vacant(entry) => {
527 let (mut tx, rx) = postage::watch::channel();
528 entry.insert(rx.clone());
529
530 let load_buffer = if worktree.read(cx).is_local() {
531 self.open_local_buffer(&project_path.path, &worktree, cx)
532 } else {
533 self.open_remote_buffer(&project_path.path, &worktree, cx)
534 };
535
536 cx.spawn(move |this, mut cx| async move {
537 let load_result = load_buffer.await;
538 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
539 // Record the fact that the buffer is no longer loading.
540 this.loading_buffers.remove(&project_path);
541 let buffer = load_result.map_err(Arc::new)?;
542 Ok(buffer)
543 }));
544 })
545 .detach();
546 rx
547 }
548 };
549
550 cx.foreground().spawn(async move {
551 loop {
552 if let Some(result) = loading_watch.borrow().as_ref() {
553 match result {
554 Ok(buffer) => return Ok(buffer.clone()),
555 Err(error) => return Err(anyhow!("{}", error)),
556 }
557 }
558 loading_watch.recv().await;
559 }
560 })
561 }
562
563 fn open_local_buffer(
564 &mut self,
565 path: &Arc<Path>,
566 worktree: &ModelHandle<Worktree>,
567 cx: &mut ModelContext<Self>,
568 ) -> Task<Result<ModelHandle<Buffer>>> {
569 let load_buffer = worktree.update(cx, |worktree, cx| {
570 let worktree = worktree.as_local_mut().unwrap();
571 worktree.load_buffer(path, cx)
572 });
573 let worktree = worktree.downgrade();
574 cx.spawn(|this, mut cx| async move {
575 let buffer = load_buffer.await?;
576 let worktree = worktree
577 .upgrade(&cx)
578 .ok_or_else(|| anyhow!("worktree was removed"))?;
579 this.update(&mut cx, |this, cx| {
580 this.register_buffer(&buffer, Some(&worktree), cx)
581 })?;
582 Ok(buffer)
583 })
584 }
585
586 fn open_remote_buffer(
587 &mut self,
588 path: &Arc<Path>,
589 worktree: &ModelHandle<Worktree>,
590 cx: &mut ModelContext<Self>,
591 ) -> Task<Result<ModelHandle<Buffer>>> {
592 let rpc = self.client.clone();
593 let project_id = self.remote_id().unwrap();
594 let remote_worktree_id = worktree.read(cx).id();
595 let path = path.clone();
596 let path_string = path.to_string_lossy().to_string();
597 cx.spawn(|this, mut cx| async move {
598 let response = rpc
599 .request(proto::OpenBuffer {
600 project_id,
601 worktree_id: remote_worktree_id.to_proto(),
602 path: path_string,
603 })
604 .await?;
605 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
606 this.update(&mut cx, |this, cx| {
607 this.deserialize_remote_buffer(buffer, cx)
608 })
609 })
610 }
611
612 fn open_local_buffer_from_lsp_path(
613 &mut self,
614 abs_path: lsp::Url,
615 lang_name: String,
616 lang_server: Arc<LanguageServer>,
617 cx: &mut ModelContext<Self>,
618 ) -> Task<Result<ModelHandle<Buffer>>> {
619 cx.spawn(|this, mut cx| async move {
620 let abs_path = abs_path
621 .to_file_path()
622 .map_err(|_| anyhow!("can't convert URI to path"))?;
623 let (worktree, relative_path) = if let Some(result) =
624 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
625 {
626 result
627 } else {
628 let worktree = this
629 .update(&mut cx, |this, cx| {
630 this.create_local_worktree(&abs_path, true, cx)
631 })
632 .await?;
633 this.update(&mut cx, |this, cx| {
634 this.language_servers
635 .insert((worktree.read(cx).id(), lang_name), lang_server);
636 });
637 (worktree, PathBuf::new())
638 };
639
640 let project_path = ProjectPath {
641 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
642 path: relative_path.into(),
643 };
644 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
645 .await
646 })
647 }
648
649 pub fn save_buffer_as(
650 &self,
651 buffer: ModelHandle<Buffer>,
652 abs_path: PathBuf,
653 cx: &mut ModelContext<Project>,
654 ) -> Task<Result<()>> {
655 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
656 cx.spawn(|this, mut cx| async move {
657 let (worktree, path) = worktree_task.await?;
658 worktree
659 .update(&mut cx, |worktree, cx| {
660 worktree
661 .as_local_mut()
662 .unwrap()
663 .save_buffer_as(buffer.clone(), path, cx)
664 })
665 .await?;
666 this.update(&mut cx, |this, cx| {
667 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
668 });
669 Ok(())
670 })
671 }
672
673 #[cfg(any(test, feature = "test-support"))]
674 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
675 let path = path.into();
676 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
677 self.open_buffers.iter().any(|(_, buffer)| {
678 if let Some(buffer) = buffer.upgrade(cx) {
679 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
680 if file.worktree == worktree && file.path() == &path.path {
681 return true;
682 }
683 }
684 }
685 false
686 })
687 } else {
688 false
689 }
690 }
691
692 fn get_open_buffer(
693 &mut self,
694 path: &ProjectPath,
695 cx: &mut ModelContext<Self>,
696 ) -> Option<ModelHandle<Buffer>> {
697 let mut result = None;
698 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
699 self.open_buffers.retain(|_, buffer| {
700 if let Some(buffer) = buffer.upgrade(cx) {
701 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
702 if file.worktree == worktree && file.path() == &path.path {
703 result = Some(buffer);
704 }
705 }
706 true
707 } else {
708 false
709 }
710 });
711 result
712 }
713
714 fn register_buffer(
715 &mut self,
716 buffer: &ModelHandle<Buffer>,
717 worktree: Option<&ModelHandle<Worktree>>,
718 cx: &mut ModelContext<Self>,
719 ) -> Result<()> {
720 if self
721 .open_buffers
722 .insert(buffer.read(cx).remote_id() as usize, buffer.downgrade())
723 .is_some()
724 {
725 return Err(anyhow!("registered the same buffer twice"));
726 }
727 self.assign_language_to_buffer(&buffer, worktree, cx);
728 Ok(())
729 }
730
731 fn assign_language_to_buffer(
732 &mut self,
733 buffer: &ModelHandle<Buffer>,
734 worktree: Option<&ModelHandle<Worktree>>,
735 cx: &mut ModelContext<Self>,
736 ) -> Option<()> {
737 let (path, full_path) = {
738 let file = buffer.read(cx).file()?;
739 (file.path().clone(), file.full_path(cx))
740 };
741
742 // If the buffer has a language, set it and start/assign the language server
743 if let Some(language) = self.languages.select_language(&full_path) {
744 buffer.update(cx, |buffer, cx| {
745 buffer.set_language(Some(language.clone()), cx);
746 });
747
748 // For local worktrees, start a language server if needed.
749 // Also assign the language server and any previously stored diagnostics to the buffer.
750 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
751 let worktree_id = local_worktree.id();
752 let worktree_abs_path = local_worktree.abs_path().clone();
753
754 let language_server = match self
755 .language_servers
756 .entry((worktree_id, language.name().to_string()))
757 {
758 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
759 hash_map::Entry::Vacant(e) => Self::start_language_server(
760 self.client.clone(),
761 language.clone(),
762 &worktree_abs_path,
763 cx,
764 )
765 .map(|server| e.insert(server).clone()),
766 };
767
768 buffer.update(cx, |buffer, cx| {
769 buffer.set_language_server(language_server, cx);
770 });
771 }
772 }
773
774 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
775 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
776 buffer.update(cx, |buffer, cx| {
777 buffer.update_diagnostics(diagnostics, None, cx).log_err();
778 });
779 }
780 }
781
782 None
783 }
784
785 fn start_language_server(
786 rpc: Arc<Client>,
787 language: Arc<Language>,
788 worktree_path: &Path,
789 cx: &mut ModelContext<Self>,
790 ) -> Option<Arc<LanguageServer>> {
791 enum LspEvent {
792 DiagnosticsStart,
793 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
794 DiagnosticsFinish,
795 }
796
797 let language_server = language
798 .start_server(worktree_path, cx)
799 .log_err()
800 .flatten()?;
801 let disk_based_sources = language
802 .disk_based_diagnostic_sources()
803 .cloned()
804 .unwrap_or_default();
805 let disk_based_diagnostics_progress_token =
806 language.disk_based_diagnostics_progress_token().cloned();
807 let has_disk_based_diagnostic_progress_token =
808 disk_based_diagnostics_progress_token.is_some();
809 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
810
811 // Listen for `PublishDiagnostics` notifications.
812 language_server
813 .on_notification::<lsp::notification::PublishDiagnostics, _>({
814 let diagnostics_tx = diagnostics_tx.clone();
815 move |params| {
816 if !has_disk_based_diagnostic_progress_token {
817 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
818 }
819 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
820 if !has_disk_based_diagnostic_progress_token {
821 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
822 }
823 }
824 })
825 .detach();
826
827 // Listen for `Progress` notifications. Send an event when the language server
828 // transitions between running jobs and not running any jobs.
829 let mut running_jobs_for_this_server: i32 = 0;
830 language_server
831 .on_notification::<lsp::notification::Progress, _>(move |params| {
832 let token = match params.token {
833 lsp::NumberOrString::Number(_) => None,
834 lsp::NumberOrString::String(token) => Some(token),
835 };
836
837 if token == disk_based_diagnostics_progress_token {
838 match params.value {
839 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
840 lsp::WorkDoneProgress::Begin(_) => {
841 running_jobs_for_this_server += 1;
842 if running_jobs_for_this_server == 1 {
843 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
844 }
845 }
846 lsp::WorkDoneProgress::End(_) => {
847 running_jobs_for_this_server -= 1;
848 if running_jobs_for_this_server == 0 {
849 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
850 }
851 }
852 _ => {}
853 },
854 }
855 }
856 })
857 .detach();
858
859 // Process all the LSP events.
860 cx.spawn_weak(|this, mut cx| async move {
861 while let Ok(message) = diagnostics_rx.recv().await {
862 let this = cx.read(|cx| this.upgrade(cx))?;
863 match message {
864 LspEvent::DiagnosticsStart => {
865 this.update(&mut cx, |this, cx| {
866 this.disk_based_diagnostics_started(cx);
867 if let Some(project_id) = this.remote_id() {
868 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
869 .log_err();
870 }
871 });
872 }
873 LspEvent::DiagnosticsUpdate(mut params) => {
874 language.process_diagnostics(&mut params);
875 this.update(&mut cx, |this, cx| {
876 this.update_diagnostics(params, &disk_based_sources, cx)
877 .log_err();
878 });
879 }
880 LspEvent::DiagnosticsFinish => {
881 this.update(&mut cx, |this, cx| {
882 this.disk_based_diagnostics_finished(cx);
883 if let Some(project_id) = this.remote_id() {
884 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
885 .log_err();
886 }
887 });
888 }
889 }
890 }
891 Some(())
892 })
893 .detach();
894
895 Some(language_server)
896 }
897
898 pub fn update_diagnostics(
899 &mut self,
900 params: lsp::PublishDiagnosticsParams,
901 disk_based_sources: &HashSet<String>,
902 cx: &mut ModelContext<Self>,
903 ) -> Result<()> {
904 let abs_path = params
905 .uri
906 .to_file_path()
907 .map_err(|_| anyhow!("URI is not a file"))?;
908 let mut next_group_id = 0;
909 let mut diagnostics = Vec::default();
910 let mut primary_diagnostic_group_ids = HashMap::default();
911 let mut sources_by_group_id = HashMap::default();
912 let mut supporting_diagnostic_severities = HashMap::default();
913 for diagnostic in ¶ms.diagnostics {
914 let source = diagnostic.source.as_ref();
915 let code = diagnostic.code.as_ref().map(|code| match code {
916 lsp::NumberOrString::Number(code) => code.to_string(),
917 lsp::NumberOrString::String(code) => code.clone(),
918 });
919 let range = range_from_lsp(diagnostic.range);
920 let is_supporting = diagnostic
921 .related_information
922 .as_ref()
923 .map_or(false, |infos| {
924 infos.iter().any(|info| {
925 primary_diagnostic_group_ids.contains_key(&(
926 source,
927 code.clone(),
928 range_from_lsp(info.location.range),
929 ))
930 })
931 });
932
933 if is_supporting {
934 if let Some(severity) = diagnostic.severity {
935 supporting_diagnostic_severities
936 .insert((source, code.clone(), range), severity);
937 }
938 } else {
939 let group_id = post_inc(&mut next_group_id);
940 let is_disk_based =
941 source.map_or(false, |source| disk_based_sources.contains(source));
942
943 sources_by_group_id.insert(group_id, source);
944 primary_diagnostic_group_ids
945 .insert((source, code.clone(), range.clone()), group_id);
946
947 diagnostics.push(DiagnosticEntry {
948 range,
949 diagnostic: Diagnostic {
950 code: code.clone(),
951 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
952 message: diagnostic.message.clone(),
953 group_id,
954 is_primary: true,
955 is_valid: true,
956 is_disk_based,
957 },
958 });
959 if let Some(infos) = &diagnostic.related_information {
960 for info in infos {
961 if info.location.uri == params.uri && !info.message.is_empty() {
962 let range = range_from_lsp(info.location.range);
963 diagnostics.push(DiagnosticEntry {
964 range,
965 diagnostic: Diagnostic {
966 code: code.clone(),
967 severity: DiagnosticSeverity::INFORMATION,
968 message: info.message.clone(),
969 group_id,
970 is_primary: false,
971 is_valid: true,
972 is_disk_based,
973 },
974 });
975 }
976 }
977 }
978 }
979 }
980
981 for entry in &mut diagnostics {
982 let diagnostic = &mut entry.diagnostic;
983 if !diagnostic.is_primary {
984 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
985 if let Some(&severity) = supporting_diagnostic_severities.get(&(
986 source,
987 diagnostic.code.clone(),
988 entry.range.clone(),
989 )) {
990 diagnostic.severity = severity;
991 }
992 }
993 }
994
995 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
996 Ok(())
997 }
998
999 pub fn update_diagnostic_entries(
1000 &mut self,
1001 abs_path: PathBuf,
1002 version: Option<i32>,
1003 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1004 cx: &mut ModelContext<Project>,
1005 ) -> Result<(), anyhow::Error> {
1006 let (worktree, relative_path) = self
1007 .find_local_worktree(&abs_path, cx)
1008 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1009 let project_path = ProjectPath {
1010 worktree_id: worktree.read(cx).id(),
1011 path: relative_path.into(),
1012 };
1013
1014 for buffer in self.open_buffers.values() {
1015 if let Some(buffer) = buffer.upgrade(cx) {
1016 if buffer
1017 .read(cx)
1018 .file()
1019 .map_or(false, |file| *file.path() == project_path.path)
1020 {
1021 buffer.update(cx, |buffer, cx| {
1022 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1023 })?;
1024 break;
1025 }
1026 }
1027 }
1028 worktree.update(cx, |worktree, cx| {
1029 worktree
1030 .as_local_mut()
1031 .ok_or_else(|| anyhow!("not a local worktree"))?
1032 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1033 })?;
1034 cx.emit(Event::DiagnosticsUpdated(project_path));
1035 Ok(())
1036 }
1037
1038 pub fn definition<T: ToOffset>(
1039 &self,
1040 source_buffer_handle: &ModelHandle<Buffer>,
1041 position: T,
1042 cx: &mut ModelContext<Self>,
1043 ) -> Task<Result<Vec<Definition>>> {
1044 let source_buffer_handle = source_buffer_handle.clone();
1045 let source_buffer = source_buffer_handle.read(cx);
1046 let worktree;
1047 let buffer_abs_path;
1048 if let Some(file) = File::from_dyn(source_buffer.file()) {
1049 worktree = file.worktree.clone();
1050 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1051 } else {
1052 return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
1053 };
1054
1055 if worktree.read(cx).as_local().is_some() {
1056 let point = source_buffer.offset_to_point_utf16(position.to_offset(source_buffer));
1057 let buffer_abs_path = buffer_abs_path.unwrap();
1058 let lang_name;
1059 let lang_server;
1060 if let Some(lang) = source_buffer.language() {
1061 lang_name = lang.name().to_string();
1062 if let Some(server) = self
1063 .language_servers
1064 .get(&(worktree.read(cx).id(), lang_name.clone()))
1065 {
1066 lang_server = server.clone();
1067 } else {
1068 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1069 };
1070 } else {
1071 return Task::ready(Err(anyhow!("buffer does not have a language")));
1072 }
1073
1074 cx.spawn(|this, mut cx| async move {
1075 let response = lang_server
1076 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1077 text_document_position_params: lsp::TextDocumentPositionParams {
1078 text_document: lsp::TextDocumentIdentifier::new(
1079 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1080 ),
1081 position: lsp::Position::new(point.row, point.column),
1082 },
1083 work_done_progress_params: Default::default(),
1084 partial_result_params: Default::default(),
1085 })
1086 .await?;
1087
1088 let mut definitions = Vec::new();
1089 if let Some(response) = response {
1090 let mut unresolved_locations = Vec::new();
1091 match response {
1092 lsp::GotoDefinitionResponse::Scalar(loc) => {
1093 unresolved_locations.push((loc.uri, loc.range));
1094 }
1095 lsp::GotoDefinitionResponse::Array(locs) => {
1096 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1097 }
1098 lsp::GotoDefinitionResponse::Link(links) => {
1099 unresolved_locations.extend(
1100 links
1101 .into_iter()
1102 .map(|l| (l.target_uri, l.target_selection_range)),
1103 );
1104 }
1105 }
1106
1107 for (target_uri, target_range) in unresolved_locations {
1108 let target_buffer_handle = this
1109 .update(&mut cx, |this, cx| {
1110 this.open_local_buffer_from_lsp_path(
1111 target_uri,
1112 lang_name.clone(),
1113 lang_server.clone(),
1114 cx,
1115 )
1116 })
1117 .await?;
1118
1119 cx.read(|cx| {
1120 let target_buffer = target_buffer_handle.read(cx);
1121 let target_start = target_buffer
1122 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1123 let target_end = target_buffer
1124 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1125 definitions.push(Definition {
1126 target_buffer: target_buffer_handle,
1127 target_range: target_buffer.anchor_after(target_start)
1128 ..target_buffer.anchor_before(target_end),
1129 });
1130 });
1131 }
1132 }
1133
1134 Ok(definitions)
1135 })
1136 } else if let Some(project_id) = self.remote_id() {
1137 let client = self.client.clone();
1138 let request = proto::GetDefinition {
1139 project_id,
1140 buffer_id: source_buffer.remote_id(),
1141 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1142 };
1143 cx.spawn(|this, mut cx| async move {
1144 let response = client.request(request).await?;
1145 this.update(&mut cx, |this, cx| {
1146 let mut definitions = Vec::new();
1147 for definition in response.definitions {
1148 let target_buffer = this.deserialize_remote_buffer(
1149 definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
1150 cx,
1151 )?;
1152 let target_start = definition
1153 .target_start
1154 .and_then(deserialize_anchor)
1155 .ok_or_else(|| anyhow!("missing target start"))?;
1156 let target_end = definition
1157 .target_end
1158 .and_then(deserialize_anchor)
1159 .ok_or_else(|| anyhow!("missing target end"))?;
1160 definitions.push(Definition {
1161 target_buffer,
1162 target_range: target_start..target_end,
1163 })
1164 }
1165
1166 Ok(definitions)
1167 })
1168 })
1169 } else {
1170 Task::ready(Err(anyhow!("project does not have a remote id")))
1171 }
1172 }
1173
1174 pub fn apply_code_action(
1175 &self,
1176 buffer_handle: ModelHandle<Buffer>,
1177 mut action: CodeAction<language::Anchor>,
1178 push_to_history: bool,
1179 cx: &mut ModelContext<Self>,
1180 ) -> Task<Result<ProjectTransaction>> {
1181 if self.is_local() {
1182 let buffer = buffer_handle.read(cx);
1183 let lang_name = if let Some(lang) = buffer.language() {
1184 lang.name().to_string()
1185 } else {
1186 return Task::ready(Ok(Default::default()));
1187 };
1188 let lang_server = if let Some(language_server) = buffer.language_server() {
1189 language_server.clone()
1190 } else {
1191 return Task::ready(Ok(Default::default()));
1192 };
1193 let position = action.position.to_point_utf16(buffer).to_lsp_position();
1194 let fs = self.fs.clone();
1195
1196 cx.spawn(|this, mut cx| async move {
1197 if let Some(range) = action
1198 .lsp_action
1199 .data
1200 .as_mut()
1201 .and_then(|d| d.get_mut("codeActionParams"))
1202 .and_then(|d| d.get_mut("range"))
1203 {
1204 *range = serde_json::to_value(&lsp::Range::new(position, position)).unwrap();
1205 action.lsp_action = lang_server
1206 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1207 .await?;
1208 } else {
1209 let actions = buffer_handle
1210 .update(&mut cx, |buffer, cx| {
1211 buffer.code_actions(action.position.clone(), cx)
1212 })
1213 .await?;
1214 action.lsp_action = actions
1215 .into_iter()
1216 .find(|a| a.lsp_action.title == action.lsp_action.title)
1217 .ok_or_else(|| anyhow!("code action is outdated"))?
1218 .lsp_action;
1219 }
1220
1221 let mut operations = Vec::new();
1222 if let Some(edit) = action.lsp_action.edit {
1223 if let Some(document_changes) = edit.document_changes {
1224 match document_changes {
1225 lsp::DocumentChanges::Edits(edits) => operations
1226 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1227 lsp::DocumentChanges::Operations(ops) => operations = ops,
1228 }
1229 } else if let Some(changes) = edit.changes {
1230 operations.extend(changes.into_iter().map(|(uri, edits)| {
1231 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1232 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1233 uri,
1234 version: None,
1235 },
1236 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1237 })
1238 }));
1239 }
1240 }
1241
1242 let mut project_transaction = ProjectTransaction::default();
1243 for operation in operations {
1244 match operation {
1245 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1246 let abs_path = op
1247 .uri
1248 .to_file_path()
1249 .map_err(|_| anyhow!("can't convert URI to path"))?;
1250
1251 if let Some(parent_path) = abs_path.parent() {
1252 fs.create_dir(parent_path).await?;
1253 }
1254 if abs_path.ends_with("/") {
1255 fs.create_dir(&abs_path).await?;
1256 } else {
1257 fs.create_file(
1258 &abs_path,
1259 op.options.map(Into::into).unwrap_or_default(),
1260 )
1261 .await?;
1262 }
1263 }
1264 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1265 let source_abs_path = op
1266 .old_uri
1267 .to_file_path()
1268 .map_err(|_| anyhow!("can't convert URI to path"))?;
1269 let target_abs_path = op
1270 .new_uri
1271 .to_file_path()
1272 .map_err(|_| anyhow!("can't convert URI to path"))?;
1273 fs.rename(
1274 &source_abs_path,
1275 &target_abs_path,
1276 op.options.map(Into::into).unwrap_or_default(),
1277 )
1278 .await?;
1279 }
1280 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1281 let abs_path = op
1282 .uri
1283 .to_file_path()
1284 .map_err(|_| anyhow!("can't convert URI to path"))?;
1285 let options = op.options.map(Into::into).unwrap_or_default();
1286 if abs_path.ends_with("/") {
1287 fs.remove_dir(&abs_path, options).await?;
1288 } else {
1289 fs.remove_file(&abs_path, options).await?;
1290 }
1291 }
1292 lsp::DocumentChangeOperation::Edit(op) => {
1293 let buffer_to_edit = this
1294 .update(&mut cx, |this, cx| {
1295 this.open_local_buffer_from_lsp_path(
1296 op.text_document.uri,
1297 lang_name.clone(),
1298 lang_server.clone(),
1299 cx,
1300 )
1301 })
1302 .await?;
1303 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1304 let edits = op.edits.into_iter().map(|edit| match edit {
1305 lsp::OneOf::Left(edit) => edit,
1306 lsp::OneOf::Right(edit) => edit.text_edit,
1307 });
1308
1309 buffer.finalize_last_transaction();
1310 buffer.start_transaction();
1311 buffer
1312 .apply_lsp_edits(edits, op.text_document.version, cx)
1313 .log_err();
1314 let transaction = if buffer.end_transaction(cx).is_some() {
1315 let transaction =
1316 buffer.finalize_last_transaction().unwrap().clone();
1317 if !push_to_history {
1318 buffer.forget_transaction(transaction.id);
1319 }
1320 Some(transaction)
1321 } else {
1322 None
1323 };
1324
1325 transaction
1326 });
1327 if let Some(transaction) = transaction {
1328 project_transaction.0.insert(buffer_to_edit, transaction);
1329 }
1330 }
1331 }
1332 }
1333
1334 Ok(project_transaction)
1335 })
1336 } else if let Some(project_id) = self.remote_id() {
1337 let client = self.client.clone();
1338 let request = proto::ApplyCodeAction {
1339 project_id,
1340 buffer_id: buffer_handle.read(cx).remote_id(),
1341 action: Some(language::proto::serialize_code_action(&action)),
1342 };
1343 cx.spawn(|this, mut cx| async move {
1344 let response = client
1345 .request(request)
1346 .await?
1347 .transaction
1348 .ok_or_else(|| anyhow!("missing transaction"))?;
1349 let mut project_transaction = ProjectTransaction::default();
1350 for (buffer, transaction) in response.buffers.into_iter().zip(response.transactions)
1351 {
1352 let buffer = this.update(&mut cx, |this, cx| {
1353 this.deserialize_remote_buffer(buffer, cx)
1354 })?;
1355 let transaction = language::proto::deserialize_transaction(transaction)?;
1356
1357 buffer
1358 .update(&mut cx, |buffer, _| {
1359 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1360 })
1361 .await;
1362
1363 if push_to_history {
1364 buffer.update(&mut cx, |buffer, _| {
1365 buffer.push_transaction(transaction.clone(), Instant::now());
1366 });
1367 }
1368
1369 project_transaction.0.insert(buffer, transaction);
1370 }
1371 Ok(project_transaction)
1372 })
1373 } else {
1374 Task::ready(Err(anyhow!("project does not have a remote id")))
1375 }
1376 }
1377
1378 pub fn find_or_create_local_worktree(
1379 &self,
1380 abs_path: impl AsRef<Path>,
1381 weak: bool,
1382 cx: &mut ModelContext<Self>,
1383 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1384 let abs_path = abs_path.as_ref();
1385 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1386 Task::ready(Ok((tree.clone(), relative_path.into())))
1387 } else {
1388 let worktree = self.create_local_worktree(abs_path, weak, cx);
1389 cx.foreground()
1390 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1391 }
1392 }
1393
1394 fn find_local_worktree(
1395 &self,
1396 abs_path: &Path,
1397 cx: &AppContext,
1398 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1399 for tree in self.worktrees(cx) {
1400 if let Some(relative_path) = tree
1401 .read(cx)
1402 .as_local()
1403 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1404 {
1405 return Some((tree.clone(), relative_path.into()));
1406 }
1407 }
1408 None
1409 }
1410
1411 pub fn is_shared(&self) -> bool {
1412 match &self.client_state {
1413 ProjectClientState::Local { is_shared, .. } => *is_shared,
1414 ProjectClientState::Remote { .. } => false,
1415 }
1416 }
1417
1418 fn create_local_worktree(
1419 &self,
1420 abs_path: impl AsRef<Path>,
1421 weak: bool,
1422 cx: &mut ModelContext<Self>,
1423 ) -> Task<Result<ModelHandle<Worktree>>> {
1424 let fs = self.fs.clone();
1425 let client = self.client.clone();
1426 let path = Arc::from(abs_path.as_ref());
1427 cx.spawn(|project, mut cx| async move {
1428 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1429
1430 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1431 project.add_worktree(&worktree, cx);
1432 (project.remote_id(), project.is_shared())
1433 });
1434
1435 if let Some(project_id) = remote_project_id {
1436 worktree
1437 .update(&mut cx, |worktree, cx| {
1438 worktree.as_local_mut().unwrap().register(project_id, cx)
1439 })
1440 .await?;
1441 if is_shared {
1442 worktree
1443 .update(&mut cx, |worktree, cx| {
1444 worktree.as_local_mut().unwrap().share(project_id, cx)
1445 })
1446 .await?;
1447 }
1448 }
1449
1450 Ok(worktree)
1451 })
1452 }
1453
1454 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1455 self.worktrees.retain(|worktree| {
1456 worktree
1457 .upgrade(cx)
1458 .map_or(false, |w| w.read(cx).id() != id)
1459 });
1460 cx.notify();
1461 }
1462
1463 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1464 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1465 if worktree.read(cx).is_local() {
1466 cx.subscribe(&worktree, |this, worktree, _, cx| {
1467 this.update_local_worktree_buffers(worktree, cx);
1468 })
1469 .detach();
1470 }
1471
1472 let push_weak_handle = {
1473 let worktree = worktree.read(cx);
1474 worktree.is_local() && worktree.is_weak()
1475 };
1476 if push_weak_handle {
1477 cx.observe_release(&worktree, |this, cx| {
1478 this.worktrees
1479 .retain(|worktree| worktree.upgrade(cx).is_some());
1480 cx.notify();
1481 })
1482 .detach();
1483 self.worktrees
1484 .push(WorktreeHandle::Weak(worktree.downgrade()));
1485 } else {
1486 self.worktrees
1487 .push(WorktreeHandle::Strong(worktree.clone()));
1488 }
1489 cx.notify();
1490 }
1491
1492 fn update_local_worktree_buffers(
1493 &mut self,
1494 worktree_handle: ModelHandle<Worktree>,
1495 cx: &mut ModelContext<Self>,
1496 ) {
1497 let snapshot = worktree_handle.read(cx).snapshot();
1498 let mut buffers_to_delete = Vec::new();
1499 for (buffer_id, buffer) in &self.open_buffers {
1500 if let Some(buffer) = buffer.upgrade(cx) {
1501 buffer.update(cx, |buffer, cx| {
1502 if let Some(old_file) = File::from_dyn(buffer.file()) {
1503 if old_file.worktree != worktree_handle {
1504 return;
1505 }
1506
1507 let new_file = if let Some(entry) = old_file
1508 .entry_id
1509 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1510 {
1511 File {
1512 is_local: true,
1513 entry_id: Some(entry.id),
1514 mtime: entry.mtime,
1515 path: entry.path.clone(),
1516 worktree: worktree_handle.clone(),
1517 }
1518 } else if let Some(entry) =
1519 snapshot.entry_for_path(old_file.path().as_ref())
1520 {
1521 File {
1522 is_local: true,
1523 entry_id: Some(entry.id),
1524 mtime: entry.mtime,
1525 path: entry.path.clone(),
1526 worktree: worktree_handle.clone(),
1527 }
1528 } else {
1529 File {
1530 is_local: true,
1531 entry_id: None,
1532 path: old_file.path().clone(),
1533 mtime: old_file.mtime(),
1534 worktree: worktree_handle.clone(),
1535 }
1536 };
1537
1538 if let Some(project_id) = self.remote_id() {
1539 self.client
1540 .send(proto::UpdateBufferFile {
1541 project_id,
1542 buffer_id: *buffer_id as u64,
1543 file: Some(new_file.to_proto()),
1544 })
1545 .log_err();
1546 }
1547 buffer.file_updated(Box::new(new_file), cx).detach();
1548 }
1549 });
1550 } else {
1551 buffers_to_delete.push(*buffer_id);
1552 }
1553 }
1554
1555 for buffer_id in buffers_to_delete {
1556 self.open_buffers.remove(&buffer_id);
1557 }
1558 }
1559
1560 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1561 let new_active_entry = entry.and_then(|project_path| {
1562 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1563 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1564 Some(ProjectEntry {
1565 worktree_id: project_path.worktree_id,
1566 entry_id: entry.id,
1567 })
1568 });
1569 if new_active_entry != self.active_entry {
1570 self.active_entry = new_active_entry;
1571 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1572 }
1573 }
1574
1575 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1576 self.language_servers_with_diagnostics_running > 0
1577 }
1578
1579 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1580 let mut summary = DiagnosticSummary::default();
1581 for (_, path_summary) in self.diagnostic_summaries(cx) {
1582 summary.error_count += path_summary.error_count;
1583 summary.warning_count += path_summary.warning_count;
1584 summary.info_count += path_summary.info_count;
1585 summary.hint_count += path_summary.hint_count;
1586 }
1587 summary
1588 }
1589
1590 pub fn diagnostic_summaries<'a>(
1591 &'a self,
1592 cx: &'a AppContext,
1593 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1594 self.worktrees(cx).flat_map(move |worktree| {
1595 let worktree = worktree.read(cx);
1596 let worktree_id = worktree.id();
1597 worktree
1598 .diagnostic_summaries()
1599 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1600 })
1601 }
1602
1603 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1604 self.language_servers_with_diagnostics_running += 1;
1605 if self.language_servers_with_diagnostics_running == 1 {
1606 cx.emit(Event::DiskBasedDiagnosticsStarted);
1607 }
1608 }
1609
1610 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1611 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1612 self.language_servers_with_diagnostics_running -= 1;
1613 if self.language_servers_with_diagnostics_running == 0 {
1614 cx.emit(Event::DiskBasedDiagnosticsFinished);
1615 }
1616 }
1617
1618 pub fn active_entry(&self) -> Option<ProjectEntry> {
1619 self.active_entry
1620 }
1621
1622 // RPC message handlers
1623
1624 fn handle_unshare_project(
1625 &mut self,
1626 _: TypedEnvelope<proto::UnshareProject>,
1627 _: Arc<Client>,
1628 cx: &mut ModelContext<Self>,
1629 ) -> Result<()> {
1630 if let ProjectClientState::Remote {
1631 sharing_has_stopped,
1632 ..
1633 } = &mut self.client_state
1634 {
1635 *sharing_has_stopped = true;
1636 self.collaborators.clear();
1637 cx.notify();
1638 Ok(())
1639 } else {
1640 unreachable!()
1641 }
1642 }
1643
1644 fn handle_add_collaborator(
1645 &mut self,
1646 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
1647 _: Arc<Client>,
1648 cx: &mut ModelContext<Self>,
1649 ) -> Result<()> {
1650 let user_store = self.user_store.clone();
1651 let collaborator = envelope
1652 .payload
1653 .collaborator
1654 .take()
1655 .ok_or_else(|| anyhow!("empty collaborator"))?;
1656
1657 cx.spawn(|this, mut cx| {
1658 async move {
1659 let collaborator =
1660 Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
1661 this.update(&mut cx, |this, cx| {
1662 this.collaborators
1663 .insert(collaborator.peer_id, collaborator);
1664 cx.notify();
1665 });
1666 Ok(())
1667 }
1668 .log_err()
1669 })
1670 .detach();
1671
1672 Ok(())
1673 }
1674
1675 fn handle_remove_collaborator(
1676 &mut self,
1677 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
1678 _: Arc<Client>,
1679 cx: &mut ModelContext<Self>,
1680 ) -> Result<()> {
1681 let peer_id = PeerId(envelope.payload.peer_id);
1682 let replica_id = self
1683 .collaborators
1684 .remove(&peer_id)
1685 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
1686 .replica_id;
1687 self.shared_buffers.remove(&peer_id);
1688 for (_, buffer) in &self.open_buffers {
1689 if let Some(buffer) = buffer.upgrade(cx) {
1690 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
1691 }
1692 }
1693 cx.notify();
1694 Ok(())
1695 }
1696
1697 fn handle_share_worktree(
1698 &mut self,
1699 envelope: TypedEnvelope<proto::ShareWorktree>,
1700 client: Arc<Client>,
1701 cx: &mut ModelContext<Self>,
1702 ) -> Result<()> {
1703 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
1704 let replica_id = self.replica_id();
1705 let worktree = envelope
1706 .payload
1707 .worktree
1708 .ok_or_else(|| anyhow!("invalid worktree"))?;
1709 let (worktree, load_task) = Worktree::remote(remote_id, replica_id, worktree, client, cx);
1710 self.add_worktree(&worktree, cx);
1711 load_task.detach();
1712 Ok(())
1713 }
1714
1715 fn handle_unregister_worktree(
1716 &mut self,
1717 envelope: TypedEnvelope<proto::UnregisterWorktree>,
1718 _: Arc<Client>,
1719 cx: &mut ModelContext<Self>,
1720 ) -> Result<()> {
1721 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1722 self.remove_worktree(worktree_id, cx);
1723 Ok(())
1724 }
1725
1726 fn handle_update_worktree(
1727 &mut self,
1728 envelope: TypedEnvelope<proto::UpdateWorktree>,
1729 _: Arc<Client>,
1730 cx: &mut ModelContext<Self>,
1731 ) -> Result<()> {
1732 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1733 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
1734 worktree.update(cx, |worktree, cx| {
1735 let worktree = worktree.as_remote_mut().unwrap();
1736 worktree.update_from_remote(envelope, cx)
1737 })?;
1738 }
1739 Ok(())
1740 }
1741
1742 fn handle_update_diagnostic_summary(
1743 &mut self,
1744 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
1745 _: Arc<Client>,
1746 cx: &mut ModelContext<Self>,
1747 ) -> Result<()> {
1748 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1749 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
1750 if let Some(summary) = envelope.payload.summary {
1751 let project_path = ProjectPath {
1752 worktree_id,
1753 path: Path::new(&summary.path).into(),
1754 };
1755 worktree.update(cx, |worktree, _| {
1756 worktree
1757 .as_remote_mut()
1758 .unwrap()
1759 .update_diagnostic_summary(project_path.path.clone(), &summary);
1760 });
1761 cx.emit(Event::DiagnosticsUpdated(project_path));
1762 }
1763 }
1764 Ok(())
1765 }
1766
1767 fn handle_disk_based_diagnostics_updating(
1768 &mut self,
1769 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
1770 _: Arc<Client>,
1771 cx: &mut ModelContext<Self>,
1772 ) -> Result<()> {
1773 self.disk_based_diagnostics_started(cx);
1774 Ok(())
1775 }
1776
1777 fn handle_disk_based_diagnostics_updated(
1778 &mut self,
1779 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
1780 _: Arc<Client>,
1781 cx: &mut ModelContext<Self>,
1782 ) -> Result<()> {
1783 self.disk_based_diagnostics_finished(cx);
1784 Ok(())
1785 }
1786
1787 pub fn handle_update_buffer(
1788 &mut self,
1789 envelope: TypedEnvelope<proto::UpdateBuffer>,
1790 _: Arc<Client>,
1791 cx: &mut ModelContext<Self>,
1792 ) -> Result<()> {
1793 let payload = envelope.payload.clone();
1794 let buffer_id = payload.buffer_id as usize;
1795 let ops = payload
1796 .operations
1797 .into_iter()
1798 .map(|op| language::proto::deserialize_operation(op))
1799 .collect::<Result<Vec<_>, _>>()?;
1800 if let Some(buffer) = self.open_buffers.get_mut(&buffer_id) {
1801 if let Some(buffer) = buffer.upgrade(cx) {
1802 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
1803 }
1804 }
1805 Ok(())
1806 }
1807
1808 pub fn handle_update_buffer_file(
1809 &mut self,
1810 envelope: TypedEnvelope<proto::UpdateBufferFile>,
1811 _: Arc<Client>,
1812 cx: &mut ModelContext<Self>,
1813 ) -> Result<()> {
1814 let payload = envelope.payload.clone();
1815 let buffer_id = payload.buffer_id as usize;
1816 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
1817 let worktree = self
1818 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
1819 .ok_or_else(|| anyhow!("no such worktree"))?;
1820 let file = File::from_proto(file, worktree.clone(), cx)?;
1821 let buffer = self
1822 .open_buffers
1823 .get_mut(&buffer_id)
1824 .and_then(|b| b.upgrade(cx))
1825 .ok_or_else(|| anyhow!("no such buffer"))?;
1826 buffer.update(cx, |buffer, cx| {
1827 buffer.file_updated(Box::new(file), cx).detach();
1828 });
1829
1830 Ok(())
1831 }
1832
1833 pub fn handle_save_buffer(
1834 &mut self,
1835 envelope: TypedEnvelope<proto::SaveBuffer>,
1836 rpc: Arc<Client>,
1837 cx: &mut ModelContext<Self>,
1838 ) -> Result<()> {
1839 let sender_id = envelope.original_sender_id()?;
1840 let project_id = self.remote_id().ok_or_else(|| anyhow!("not connected"))?;
1841 let buffer = self
1842 .shared_buffers
1843 .get(&sender_id)
1844 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1845 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1846 let receipt = envelope.receipt();
1847 let buffer_id = envelope.payload.buffer_id;
1848 let save = cx.spawn(|_, mut cx| async move {
1849 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await
1850 });
1851
1852 cx.background()
1853 .spawn(
1854 async move {
1855 let (version, mtime) = save.await?;
1856
1857 rpc.respond(
1858 receipt,
1859 proto::BufferSaved {
1860 project_id,
1861 buffer_id,
1862 version: (&version).into(),
1863 mtime: Some(mtime.into()),
1864 },
1865 )?;
1866
1867 Ok(())
1868 }
1869 .log_err(),
1870 )
1871 .detach();
1872 Ok(())
1873 }
1874
1875 pub fn handle_format_buffer(
1876 &mut self,
1877 envelope: TypedEnvelope<proto::FormatBuffer>,
1878 rpc: Arc<Client>,
1879 cx: &mut ModelContext<Self>,
1880 ) -> Result<()> {
1881 let receipt = envelope.receipt();
1882 let sender_id = envelope.original_sender_id()?;
1883 let buffer = self
1884 .shared_buffers
1885 .get(&sender_id)
1886 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1887 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1888 cx.spawn(|_, mut cx| async move {
1889 let format = buffer.update(&mut cx, |buffer, cx| buffer.format(cx)).await;
1890 // We spawn here in order to enqueue the sending of `Ack` *after* transmission of edits
1891 // associated with formatting.
1892 cx.spawn(|_| async move {
1893 match format {
1894 Ok(()) => rpc.respond(receipt, proto::Ack {})?,
1895 Err(error) => rpc.respond_with_error(
1896 receipt,
1897 proto::Error {
1898 message: error.to_string(),
1899 },
1900 )?,
1901 }
1902 Ok::<_, anyhow::Error>(())
1903 })
1904 .await
1905 .log_err();
1906 })
1907 .detach();
1908 Ok(())
1909 }
1910
1911 fn handle_get_completions(
1912 &mut self,
1913 envelope: TypedEnvelope<proto::GetCompletions>,
1914 rpc: Arc<Client>,
1915 cx: &mut ModelContext<Self>,
1916 ) -> Result<()> {
1917 let receipt = envelope.receipt();
1918 let sender_id = envelope.original_sender_id()?;
1919 let buffer = self
1920 .shared_buffers
1921 .get(&sender_id)
1922 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1923 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1924 let position = envelope
1925 .payload
1926 .position
1927 .and_then(language::proto::deserialize_anchor)
1928 .ok_or_else(|| anyhow!("invalid position"))?;
1929 cx.spawn(|_, mut cx| async move {
1930 match buffer
1931 .update(&mut cx, |buffer, cx| buffer.completions(position, cx))
1932 .await
1933 {
1934 Ok(completions) => rpc.respond(
1935 receipt,
1936 proto::GetCompletionsResponse {
1937 completions: completions
1938 .iter()
1939 .map(language::proto::serialize_completion)
1940 .collect(),
1941 },
1942 ),
1943 Err(error) => rpc.respond_with_error(
1944 receipt,
1945 proto::Error {
1946 message: error.to_string(),
1947 },
1948 ),
1949 }
1950 })
1951 .detach_and_log_err(cx);
1952 Ok(())
1953 }
1954
1955 fn handle_apply_additional_edits_for_completion(
1956 &mut self,
1957 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
1958 rpc: Arc<Client>,
1959 cx: &mut ModelContext<Self>,
1960 ) -> Result<()> {
1961 let receipt = envelope.receipt();
1962 let sender_id = envelope.original_sender_id()?;
1963 let buffer = self
1964 .shared_buffers
1965 .get(&sender_id)
1966 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1967 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1968 let language = buffer.read(cx).language();
1969 let completion = language::proto::deserialize_completion(
1970 envelope
1971 .payload
1972 .completion
1973 .ok_or_else(|| anyhow!("invalid completion"))?,
1974 language,
1975 )?;
1976 cx.spawn(|_, mut cx| async move {
1977 match buffer
1978 .update(&mut cx, |buffer, cx| {
1979 buffer.apply_additional_edits_for_completion(completion, false, cx)
1980 })
1981 .await
1982 {
1983 Ok(transaction) => rpc.respond(
1984 receipt,
1985 proto::ApplyCompletionAdditionalEditsResponse {
1986 transaction: transaction
1987 .as_ref()
1988 .map(language::proto::serialize_transaction),
1989 },
1990 ),
1991 Err(error) => rpc.respond_with_error(
1992 receipt,
1993 proto::Error {
1994 message: error.to_string(),
1995 },
1996 ),
1997 }
1998 })
1999 .detach_and_log_err(cx);
2000 Ok(())
2001 }
2002
2003 fn handle_get_code_actions(
2004 &mut self,
2005 envelope: TypedEnvelope<proto::GetCodeActions>,
2006 rpc: Arc<Client>,
2007 cx: &mut ModelContext<Self>,
2008 ) -> Result<()> {
2009 let receipt = envelope.receipt();
2010 let sender_id = envelope.original_sender_id()?;
2011 let buffer = self
2012 .shared_buffers
2013 .get(&sender_id)
2014 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2015 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2016 let position = envelope
2017 .payload
2018 .position
2019 .and_then(language::proto::deserialize_anchor)
2020 .ok_or_else(|| anyhow!("invalid position"))?;
2021 cx.spawn(|_, mut cx| async move {
2022 match buffer
2023 .update(&mut cx, |buffer, cx| buffer.code_actions(position, cx))
2024 .await
2025 {
2026 Ok(completions) => rpc.respond(
2027 receipt,
2028 proto::GetCodeActionsResponse {
2029 actions: completions
2030 .iter()
2031 .map(language::proto::serialize_code_action)
2032 .collect(),
2033 },
2034 ),
2035 Err(error) => rpc.respond_with_error(
2036 receipt,
2037 proto::Error {
2038 message: error.to_string(),
2039 },
2040 ),
2041 }
2042 })
2043 .detach_and_log_err(cx);
2044 Ok(())
2045 }
2046
2047 fn handle_apply_code_action(
2048 &mut self,
2049 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2050 rpc: Arc<Client>,
2051 cx: &mut ModelContext<Self>,
2052 ) -> Result<()> {
2053 let receipt = envelope.receipt();
2054 let sender_id = envelope.original_sender_id()?;
2055 let buffer = self
2056 .shared_buffers
2057 .get(&sender_id)
2058 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2059 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2060 let action = language::proto::deserialize_code_action(
2061 envelope
2062 .payload
2063 .action
2064 .ok_or_else(|| anyhow!("invalid action"))?,
2065 )?;
2066 let apply_code_action = self.apply_code_action(buffer, action, false, cx);
2067 cx.spawn(|this, mut cx| async move {
2068 match apply_code_action.await {
2069 Ok(project_transaction) => this.update(&mut cx, |this, cx| {
2070 let mut serialized_transaction = proto::ProjectTransaction {
2071 buffers: Default::default(),
2072 transactions: Default::default(),
2073 };
2074 for (buffer, transaction) in project_transaction.0 {
2075 serialized_transaction
2076 .buffers
2077 .push(this.serialize_buffer_for_peer(&buffer, sender_id, cx));
2078 serialized_transaction
2079 .transactions
2080 .push(language::proto::serialize_transaction(&transaction));
2081 }
2082 rpc.respond(
2083 receipt,
2084 proto::ApplyCodeActionResponse {
2085 transaction: Some(serialized_transaction),
2086 },
2087 )
2088 }),
2089 Err(error) => rpc.respond_with_error(
2090 receipt,
2091 proto::Error {
2092 message: error.to_string(),
2093 },
2094 ),
2095 }
2096 })
2097 .detach_and_log_err(cx);
2098 Ok(())
2099 }
2100
2101 pub fn handle_get_definition(
2102 &mut self,
2103 envelope: TypedEnvelope<proto::GetDefinition>,
2104 rpc: Arc<Client>,
2105 cx: &mut ModelContext<Self>,
2106 ) -> Result<()> {
2107 let receipt = envelope.receipt();
2108 let sender_id = envelope.original_sender_id()?;
2109 let source_buffer = self
2110 .shared_buffers
2111 .get(&sender_id)
2112 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2113 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2114 let position = envelope
2115 .payload
2116 .position
2117 .and_then(deserialize_anchor)
2118 .ok_or_else(|| anyhow!("invalid position"))?;
2119 if !source_buffer.read(cx).can_resolve(&position) {
2120 return Err(anyhow!("cannot resolve position"));
2121 }
2122
2123 let definitions = self.definition(&source_buffer, position, cx);
2124 cx.spawn(|this, mut cx| async move {
2125 let definitions = definitions.await?;
2126 let mut response = proto::GetDefinitionResponse {
2127 definitions: Default::default(),
2128 };
2129 this.update(&mut cx, |this, cx| {
2130 for definition in definitions {
2131 let buffer =
2132 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2133 response.definitions.push(proto::Definition {
2134 target_start: Some(serialize_anchor(&definition.target_range.start)),
2135 target_end: Some(serialize_anchor(&definition.target_range.end)),
2136 buffer: Some(buffer),
2137 });
2138 }
2139 });
2140 rpc.respond(receipt, response)?;
2141 Ok::<_, anyhow::Error>(())
2142 })
2143 .detach_and_log_err(cx);
2144
2145 Ok(())
2146 }
2147
2148 pub fn handle_open_buffer(
2149 &mut self,
2150 envelope: TypedEnvelope<proto::OpenBuffer>,
2151 rpc: Arc<Client>,
2152 cx: &mut ModelContext<Self>,
2153 ) -> anyhow::Result<()> {
2154 let receipt = envelope.receipt();
2155 let peer_id = envelope.original_sender_id()?;
2156 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2157 let open_buffer = self.open_buffer(
2158 ProjectPath {
2159 worktree_id,
2160 path: PathBuf::from(envelope.payload.path).into(),
2161 },
2162 cx,
2163 );
2164 cx.spawn(|this, mut cx| {
2165 async move {
2166 let buffer = open_buffer.await?;
2167 let buffer = this.update(&mut cx, |this, cx| {
2168 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
2169 });
2170 rpc.respond(
2171 receipt,
2172 proto::OpenBufferResponse {
2173 buffer: Some(buffer),
2174 },
2175 )
2176 }
2177 .log_err()
2178 })
2179 .detach();
2180 Ok(())
2181 }
2182
2183 fn serialize_buffer_for_peer(
2184 &mut self,
2185 buffer: &ModelHandle<Buffer>,
2186 peer_id: PeerId,
2187 cx: &AppContext,
2188 ) -> proto::Buffer {
2189 let buffer_id = buffer.read(cx).remote_id();
2190 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2191 match shared_buffers.entry(buffer_id) {
2192 hash_map::Entry::Occupied(_) => proto::Buffer {
2193 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2194 },
2195 hash_map::Entry::Vacant(entry) => {
2196 entry.insert(buffer.clone());
2197 proto::Buffer {
2198 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2199 }
2200 }
2201 }
2202 }
2203
2204 fn deserialize_remote_buffer(
2205 &mut self,
2206 buffer: proto::Buffer,
2207 cx: &mut ModelContext<Self>,
2208 ) -> Result<ModelHandle<Buffer>> {
2209 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2210 proto::buffer::Variant::Id(id) => self
2211 .open_buffers
2212 .get(&(id as usize))
2213 .and_then(|buffer| buffer.upgrade(cx))
2214 .ok_or_else(|| anyhow!("no buffer exists for id {}", id)),
2215 proto::buffer::Variant::State(mut buffer) => {
2216 let mut buffer_worktree = None;
2217 let mut buffer_file = None;
2218 if let Some(file) = buffer.file.take() {
2219 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2220 let worktree = self
2221 .worktree_for_id(worktree_id, cx)
2222 .ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?;
2223 buffer_file = Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2224 as Box<dyn language::File>);
2225 buffer_worktree = Some(worktree);
2226 }
2227
2228 let buffer = cx.add_model(|cx| {
2229 Buffer::from_proto(self.replica_id(), buffer, buffer_file, cx).unwrap()
2230 });
2231 self.register_buffer(&buffer, buffer_worktree.as_ref(), cx)?;
2232 Ok(buffer)
2233 }
2234 }
2235 }
2236
2237 pub fn handle_close_buffer(
2238 &mut self,
2239 envelope: TypedEnvelope<proto::CloseBuffer>,
2240 _: Arc<Client>,
2241 cx: &mut ModelContext<Self>,
2242 ) -> anyhow::Result<()> {
2243 if let Some(shared_buffers) = self.shared_buffers.get_mut(&envelope.original_sender_id()?) {
2244 shared_buffers.remove(&envelope.payload.buffer_id);
2245 cx.notify();
2246 }
2247 Ok(())
2248 }
2249
2250 pub fn handle_buffer_saved(
2251 &mut self,
2252 envelope: TypedEnvelope<proto::BufferSaved>,
2253 _: Arc<Client>,
2254 cx: &mut ModelContext<Self>,
2255 ) -> Result<()> {
2256 let payload = envelope.payload.clone();
2257 let buffer = self
2258 .open_buffers
2259 .get(&(payload.buffer_id as usize))
2260 .and_then(|buffer| buffer.upgrade(cx));
2261 if let Some(buffer) = buffer {
2262 buffer.update(cx, |buffer, cx| {
2263 let version = payload.version.try_into()?;
2264 let mtime = payload
2265 .mtime
2266 .ok_or_else(|| anyhow!("missing mtime"))?
2267 .into();
2268 buffer.did_save(version, mtime, None, cx);
2269 Result::<_, anyhow::Error>::Ok(())
2270 })?;
2271 }
2272 Ok(())
2273 }
2274
2275 pub fn handle_buffer_reloaded(
2276 &mut self,
2277 envelope: TypedEnvelope<proto::BufferReloaded>,
2278 _: Arc<Client>,
2279 cx: &mut ModelContext<Self>,
2280 ) -> Result<()> {
2281 let payload = envelope.payload.clone();
2282 let buffer = self
2283 .open_buffers
2284 .get(&(payload.buffer_id as usize))
2285 .and_then(|buffer| buffer.upgrade(cx));
2286 if let Some(buffer) = buffer {
2287 buffer.update(cx, |buffer, cx| {
2288 let version = payload.version.try_into()?;
2289 let mtime = payload
2290 .mtime
2291 .ok_or_else(|| anyhow!("missing mtime"))?
2292 .into();
2293 buffer.did_reload(version, mtime, cx);
2294 Result::<_, anyhow::Error>::Ok(())
2295 })?;
2296 }
2297 Ok(())
2298 }
2299
2300 pub fn match_paths<'a>(
2301 &self,
2302 query: &'a str,
2303 include_ignored: bool,
2304 smart_case: bool,
2305 max_results: usize,
2306 cancel_flag: &'a AtomicBool,
2307 cx: &AppContext,
2308 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2309 let worktrees = self
2310 .worktrees(cx)
2311 .filter(|worktree| !worktree.read(cx).is_weak())
2312 .collect::<Vec<_>>();
2313 let include_root_name = worktrees.len() > 1;
2314 let candidate_sets = worktrees
2315 .into_iter()
2316 .map(|worktree| CandidateSet {
2317 snapshot: worktree.read(cx).snapshot(),
2318 include_ignored,
2319 include_root_name,
2320 })
2321 .collect::<Vec<_>>();
2322
2323 let background = cx.background().clone();
2324 async move {
2325 fuzzy::match_paths(
2326 candidate_sets.as_slice(),
2327 query,
2328 smart_case,
2329 max_results,
2330 cancel_flag,
2331 background,
2332 )
2333 .await
2334 }
2335 }
2336}
2337
2338impl WorktreeHandle {
2339 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2340 match self {
2341 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2342 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2343 }
2344 }
2345}
2346
2347struct CandidateSet {
2348 snapshot: Snapshot,
2349 include_ignored: bool,
2350 include_root_name: bool,
2351}
2352
2353impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2354 type Candidates = CandidateSetIter<'a>;
2355
2356 fn id(&self) -> usize {
2357 self.snapshot.id().to_usize()
2358 }
2359
2360 fn len(&self) -> usize {
2361 if self.include_ignored {
2362 self.snapshot.file_count()
2363 } else {
2364 self.snapshot.visible_file_count()
2365 }
2366 }
2367
2368 fn prefix(&self) -> Arc<str> {
2369 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2370 self.snapshot.root_name().into()
2371 } else if self.include_root_name {
2372 format!("{}/", self.snapshot.root_name()).into()
2373 } else {
2374 "".into()
2375 }
2376 }
2377
2378 fn candidates(&'a self, start: usize) -> Self::Candidates {
2379 CandidateSetIter {
2380 traversal: self.snapshot.files(self.include_ignored, start),
2381 }
2382 }
2383}
2384
2385struct CandidateSetIter<'a> {
2386 traversal: Traversal<'a>,
2387}
2388
2389impl<'a> Iterator for CandidateSetIter<'a> {
2390 type Item = PathMatchCandidate<'a>;
2391
2392 fn next(&mut self) -> Option<Self::Item> {
2393 self.traversal.next().map(|entry| {
2394 if let EntryKind::File(char_bag) = entry.kind {
2395 PathMatchCandidate {
2396 path: &entry.path,
2397 char_bag,
2398 }
2399 } else {
2400 unreachable!()
2401 }
2402 })
2403 }
2404}
2405
2406impl Entity for Project {
2407 type Event = Event;
2408
2409 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2410 match &self.client_state {
2411 ProjectClientState::Local { remote_id_rx, .. } => {
2412 if let Some(project_id) = *remote_id_rx.borrow() {
2413 self.client
2414 .send(proto::UnregisterProject { project_id })
2415 .log_err();
2416 }
2417 }
2418 ProjectClientState::Remote { remote_id, .. } => {
2419 self.client
2420 .send(proto::LeaveProject {
2421 project_id: *remote_id,
2422 })
2423 .log_err();
2424 }
2425 }
2426 }
2427
2428 fn app_will_quit(
2429 &mut self,
2430 _: &mut MutableAppContext,
2431 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2432 use futures::FutureExt;
2433
2434 let shutdown_futures = self
2435 .language_servers
2436 .drain()
2437 .filter_map(|(_, server)| server.shutdown())
2438 .collect::<Vec<_>>();
2439 Some(
2440 async move {
2441 futures::future::join_all(shutdown_futures).await;
2442 }
2443 .boxed(),
2444 )
2445 }
2446}
2447
2448impl Collaborator {
2449 fn from_proto(
2450 message: proto::Collaborator,
2451 user_store: &ModelHandle<UserStore>,
2452 cx: &mut AsyncAppContext,
2453 ) -> impl Future<Output = Result<Self>> {
2454 let user = user_store.update(cx, |user_store, cx| {
2455 user_store.fetch_user(message.user_id, cx)
2456 });
2457
2458 async move {
2459 Ok(Self {
2460 peer_id: PeerId(message.peer_id),
2461 user: user.await?,
2462 replica_id: message.replica_id as ReplicaId,
2463 })
2464 }
2465 }
2466}
2467
2468impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2469 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2470 Self {
2471 worktree_id,
2472 path: path.as_ref().into(),
2473 }
2474 }
2475}
2476
2477impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2478 fn from(options: lsp::CreateFileOptions) -> Self {
2479 Self {
2480 overwrite: options.overwrite.unwrap_or(false),
2481 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2482 }
2483 }
2484}
2485
2486impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2487 fn from(options: lsp::RenameFileOptions) -> Self {
2488 Self {
2489 overwrite: options.overwrite.unwrap_or(false),
2490 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2491 }
2492 }
2493}
2494
2495impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2496 fn from(options: lsp::DeleteFileOptions) -> Self {
2497 Self {
2498 recursive: options.recursive.unwrap_or(false),
2499 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2500 }
2501 }
2502}
2503
2504#[cfg(test)]
2505mod tests {
2506 use super::{Event, *};
2507 use client::test::FakeHttpClient;
2508 use fs::RealFs;
2509 use futures::StreamExt;
2510 use gpui::{test::subscribe, TestAppContext};
2511 use language::{
2512 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2513 LanguageServerConfig, Point,
2514 };
2515 use lsp::Url;
2516 use serde_json::json;
2517 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2518 use unindent::Unindent as _;
2519 use util::test::temp_tree;
2520 use worktree::WorktreeHandle as _;
2521
2522 #[gpui::test]
2523 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2524 let dir = temp_tree(json!({
2525 "root": {
2526 "apple": "",
2527 "banana": {
2528 "carrot": {
2529 "date": "",
2530 "endive": "",
2531 }
2532 },
2533 "fennel": {
2534 "grape": "",
2535 }
2536 }
2537 }));
2538
2539 let root_link_path = dir.path().join("root_link");
2540 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2541 unix::fs::symlink(
2542 &dir.path().join("root/fennel"),
2543 &dir.path().join("root/finnochio"),
2544 )
2545 .unwrap();
2546
2547 let project = build_project(Arc::new(RealFs), &mut cx);
2548
2549 let (tree, _) = project
2550 .update(&mut cx, |project, cx| {
2551 project.find_or_create_local_worktree(&root_link_path, false, cx)
2552 })
2553 .await
2554 .unwrap();
2555
2556 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2557 .await;
2558 cx.read(|cx| {
2559 let tree = tree.read(cx);
2560 assert_eq!(tree.file_count(), 5);
2561 assert_eq!(
2562 tree.inode_for_path("fennel/grape"),
2563 tree.inode_for_path("finnochio/grape")
2564 );
2565 });
2566
2567 let cancel_flag = Default::default();
2568 let results = project
2569 .read_with(&cx, |project, cx| {
2570 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2571 })
2572 .await;
2573 assert_eq!(
2574 results
2575 .into_iter()
2576 .map(|result| result.path)
2577 .collect::<Vec<Arc<Path>>>(),
2578 vec![
2579 PathBuf::from("banana/carrot/date").into(),
2580 PathBuf::from("banana/carrot/endive").into(),
2581 ]
2582 );
2583 }
2584
2585 #[gpui::test]
2586 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
2587 let (language_server_config, mut fake_server) =
2588 LanguageServerConfig::fake(cx.background()).await;
2589 let progress_token = language_server_config
2590 .disk_based_diagnostics_progress_token
2591 .clone()
2592 .unwrap();
2593
2594 let mut languages = LanguageRegistry::new();
2595 languages.add(Arc::new(Language::new(
2596 LanguageConfig {
2597 name: "Rust".to_string(),
2598 path_suffixes: vec!["rs".to_string()],
2599 language_server: Some(language_server_config),
2600 ..Default::default()
2601 },
2602 Some(tree_sitter_rust::language()),
2603 )));
2604
2605 let dir = temp_tree(json!({
2606 "a.rs": "fn a() { A }",
2607 "b.rs": "const y: i32 = 1",
2608 }));
2609
2610 let http_client = FakeHttpClient::with_404_response();
2611 let client = Client::new(http_client.clone());
2612 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2613
2614 let project = cx.update(|cx| {
2615 Project::local(
2616 client,
2617 user_store,
2618 Arc::new(languages),
2619 Arc::new(RealFs),
2620 cx,
2621 )
2622 });
2623
2624 let (tree, _) = project
2625 .update(&mut cx, |project, cx| {
2626 project.find_or_create_local_worktree(dir.path(), false, cx)
2627 })
2628 .await
2629 .unwrap();
2630 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2631
2632 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2633 .await;
2634
2635 // Cause worktree to start the fake language server
2636 let _buffer = project
2637 .update(&mut cx, |project, cx| {
2638 project.open_buffer(
2639 ProjectPath {
2640 worktree_id,
2641 path: Path::new("b.rs").into(),
2642 },
2643 cx,
2644 )
2645 })
2646 .await
2647 .unwrap();
2648
2649 let mut events = subscribe(&project, &mut cx);
2650
2651 fake_server.start_progress(&progress_token).await;
2652 assert_eq!(
2653 events.next().await.unwrap(),
2654 Event::DiskBasedDiagnosticsStarted
2655 );
2656
2657 fake_server.start_progress(&progress_token).await;
2658 fake_server.end_progress(&progress_token).await;
2659 fake_server.start_progress(&progress_token).await;
2660
2661 fake_server
2662 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2663 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
2664 version: None,
2665 diagnostics: vec![lsp::Diagnostic {
2666 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2667 severity: Some(lsp::DiagnosticSeverity::ERROR),
2668 message: "undefined variable 'A'".to_string(),
2669 ..Default::default()
2670 }],
2671 })
2672 .await;
2673 assert_eq!(
2674 events.next().await.unwrap(),
2675 Event::DiagnosticsUpdated(ProjectPath {
2676 worktree_id,
2677 path: Arc::from(Path::new("a.rs"))
2678 })
2679 );
2680
2681 fake_server.end_progress(&progress_token).await;
2682 fake_server.end_progress(&progress_token).await;
2683 assert_eq!(
2684 events.next().await.unwrap(),
2685 Event::DiskBasedDiagnosticsUpdated
2686 );
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsFinished
2690 );
2691
2692 let buffer = project
2693 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
2694 .await
2695 .unwrap();
2696
2697 buffer.read_with(&cx, |buffer, _| {
2698 let snapshot = buffer.snapshot();
2699 let diagnostics = snapshot
2700 .diagnostics_in_range::<_, Point>(0..buffer.len())
2701 .collect::<Vec<_>>();
2702 assert_eq!(
2703 diagnostics,
2704 &[DiagnosticEntry {
2705 range: Point::new(0, 9)..Point::new(0, 10),
2706 diagnostic: Diagnostic {
2707 severity: lsp::DiagnosticSeverity::ERROR,
2708 message: "undefined variable 'A'".to_string(),
2709 group_id: 0,
2710 is_primary: true,
2711 ..Default::default()
2712 }
2713 }]
2714 )
2715 });
2716 }
2717
2718 #[gpui::test]
2719 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
2720 let dir = temp_tree(json!({
2721 "root": {
2722 "dir1": {},
2723 "dir2": {
2724 "dir3": {}
2725 }
2726 }
2727 }));
2728
2729 let project = build_project(Arc::new(RealFs), &mut cx);
2730 let (tree, _) = project
2731 .update(&mut cx, |project, cx| {
2732 project.find_or_create_local_worktree(&dir.path(), false, cx)
2733 })
2734 .await
2735 .unwrap();
2736
2737 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2738 .await;
2739
2740 let cancel_flag = Default::default();
2741 let results = project
2742 .read_with(&cx, |project, cx| {
2743 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
2744 })
2745 .await;
2746
2747 assert!(results.is_empty());
2748 }
2749
2750 #[gpui::test]
2751 async fn test_definition(mut cx: gpui::TestAppContext) {
2752 let (language_server_config, mut fake_server) =
2753 LanguageServerConfig::fake(cx.background()).await;
2754
2755 let mut languages = LanguageRegistry::new();
2756 languages.add(Arc::new(Language::new(
2757 LanguageConfig {
2758 name: "Rust".to_string(),
2759 path_suffixes: vec!["rs".to_string()],
2760 language_server: Some(language_server_config),
2761 ..Default::default()
2762 },
2763 Some(tree_sitter_rust::language()),
2764 )));
2765
2766 let dir = temp_tree(json!({
2767 "a.rs": "const fn a() { A }",
2768 "b.rs": "const y: i32 = crate::a()",
2769 }));
2770
2771 let http_client = FakeHttpClient::with_404_response();
2772 let client = Client::new(http_client.clone());
2773 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2774 let project = cx.update(|cx| {
2775 Project::local(
2776 client,
2777 user_store,
2778 Arc::new(languages),
2779 Arc::new(RealFs),
2780 cx,
2781 )
2782 });
2783
2784 let (tree, _) = project
2785 .update(&mut cx, |project, cx| {
2786 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
2787 })
2788 .await
2789 .unwrap();
2790 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2791 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2792 .await;
2793
2794 // Cause worktree to start the fake language server
2795 let buffer = project
2796 .update(&mut cx, |project, cx| {
2797 project.open_buffer(
2798 ProjectPath {
2799 worktree_id,
2800 path: Path::new("").into(),
2801 },
2802 cx,
2803 )
2804 })
2805 .await
2806 .unwrap();
2807 let definitions =
2808 project.update(&mut cx, |project, cx| project.definition(&buffer, 22, cx));
2809 let (request_id, request) = fake_server
2810 .receive_request::<lsp::request::GotoDefinition>()
2811 .await;
2812 let request_params = request.text_document_position_params;
2813 assert_eq!(
2814 request_params.text_document.uri.to_file_path().unwrap(),
2815 dir.path().join("b.rs")
2816 );
2817 assert_eq!(request_params.position, lsp::Position::new(0, 22));
2818
2819 fake_server
2820 .respond(
2821 request_id,
2822 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
2823 lsp::Url::from_file_path(dir.path().join("a.rs")).unwrap(),
2824 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2825 ))),
2826 )
2827 .await;
2828 let mut definitions = definitions.await.unwrap();
2829 assert_eq!(definitions.len(), 1);
2830 let definition = definitions.pop().unwrap();
2831 cx.update(|cx| {
2832 let target_buffer = definition.target_buffer.read(cx);
2833 assert_eq!(
2834 target_buffer
2835 .file()
2836 .unwrap()
2837 .as_local()
2838 .unwrap()
2839 .abs_path(cx),
2840 dir.path().join("a.rs")
2841 );
2842 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
2843 assert_eq!(
2844 list_worktrees(&project, cx),
2845 [
2846 (dir.path().join("b.rs"), false),
2847 (dir.path().join("a.rs"), true)
2848 ]
2849 );
2850
2851 drop(definition);
2852 });
2853 cx.read(|cx| {
2854 assert_eq!(
2855 list_worktrees(&project, cx),
2856 [(dir.path().join("b.rs"), false)]
2857 );
2858 });
2859
2860 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
2861 project
2862 .read(cx)
2863 .worktrees(cx)
2864 .map(|worktree| {
2865 let worktree = worktree.read(cx);
2866 (
2867 worktree.as_local().unwrap().abs_path().to_path_buf(),
2868 worktree.is_weak(),
2869 )
2870 })
2871 .collect::<Vec<_>>()
2872 }
2873 }
2874
2875 #[gpui::test]
2876 async fn test_save_file(mut cx: gpui::TestAppContext) {
2877 let fs = Arc::new(FakeFs::new(cx.background()));
2878 fs.insert_tree(
2879 "/dir",
2880 json!({
2881 "file1": "the old contents",
2882 }),
2883 )
2884 .await;
2885
2886 let project = build_project(fs.clone(), &mut cx);
2887 let worktree_id = project
2888 .update(&mut cx, |p, cx| {
2889 p.find_or_create_local_worktree("/dir", false, cx)
2890 })
2891 .await
2892 .unwrap()
2893 .0
2894 .read_with(&cx, |tree, _| tree.id());
2895
2896 let buffer = project
2897 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
2898 .await
2899 .unwrap();
2900 buffer
2901 .update(&mut cx, |buffer, cx| {
2902 assert_eq!(buffer.text(), "the old contents");
2903 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
2904 buffer.save(cx)
2905 })
2906 .await
2907 .unwrap();
2908
2909 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2910 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
2911 }
2912
2913 #[gpui::test]
2914 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
2915 let fs = Arc::new(FakeFs::new(cx.background()));
2916 fs.insert_tree(
2917 "/dir",
2918 json!({
2919 "file1": "the old contents",
2920 }),
2921 )
2922 .await;
2923
2924 let project = build_project(fs.clone(), &mut cx);
2925 let worktree_id = project
2926 .update(&mut cx, |p, cx| {
2927 p.find_or_create_local_worktree("/dir/file1", false, cx)
2928 })
2929 .await
2930 .unwrap()
2931 .0
2932 .read_with(&cx, |tree, _| tree.id());
2933
2934 let buffer = project
2935 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
2936 .await
2937 .unwrap();
2938 buffer
2939 .update(&mut cx, |buffer, cx| {
2940 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
2941 buffer.save(cx)
2942 })
2943 .await
2944 .unwrap();
2945
2946 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2947 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
2948 }
2949
2950 #[gpui::test(retries = 5)]
2951 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
2952 let dir = temp_tree(json!({
2953 "a": {
2954 "file1": "",
2955 "file2": "",
2956 "file3": "",
2957 },
2958 "b": {
2959 "c": {
2960 "file4": "",
2961 "file5": "",
2962 }
2963 }
2964 }));
2965
2966 let project = build_project(Arc::new(RealFs), &mut cx);
2967 let rpc = project.read_with(&cx, |p, _| p.client.clone());
2968
2969 let (tree, _) = project
2970 .update(&mut cx, |p, cx| {
2971 p.find_or_create_local_worktree(dir.path(), false, cx)
2972 })
2973 .await
2974 .unwrap();
2975 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2976
2977 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2978 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
2979 async move { buffer.await.unwrap() }
2980 };
2981 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2982 tree.read_with(cx, |tree, _| {
2983 tree.entry_for_path(path)
2984 .expect(&format!("no entry for path {}", path))
2985 .id
2986 })
2987 };
2988
2989 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
2990 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
2991 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
2992 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
2993
2994 let file2_id = id_for_path("a/file2", &cx);
2995 let file3_id = id_for_path("a/file3", &cx);
2996 let file4_id = id_for_path("b/c/file4", &cx);
2997
2998 // Wait for the initial scan.
2999 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3000 .await;
3001
3002 // Create a remote copy of this worktree.
3003 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot());
3004 let (remote, load_task) = cx.update(|cx| {
3005 Worktree::remote(
3006 1,
3007 1,
3008 initial_snapshot.to_proto(&Default::default(), Default::default()),
3009 rpc.clone(),
3010 cx,
3011 )
3012 });
3013 load_task.await;
3014
3015 cx.read(|cx| {
3016 assert!(!buffer2.read(cx).is_dirty());
3017 assert!(!buffer3.read(cx).is_dirty());
3018 assert!(!buffer4.read(cx).is_dirty());
3019 assert!(!buffer5.read(cx).is_dirty());
3020 });
3021
3022 // Rename and delete files and directories.
3023 tree.flush_fs_events(&cx).await;
3024 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3025 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3026 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3027 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3028 tree.flush_fs_events(&cx).await;
3029
3030 let expected_paths = vec![
3031 "a",
3032 "a/file1",
3033 "a/file2.new",
3034 "b",
3035 "d",
3036 "d/file3",
3037 "d/file4",
3038 ];
3039
3040 cx.read(|app| {
3041 assert_eq!(
3042 tree.read(app)
3043 .paths()
3044 .map(|p| p.to_str().unwrap())
3045 .collect::<Vec<_>>(),
3046 expected_paths
3047 );
3048
3049 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3050 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3051 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3052
3053 assert_eq!(
3054 buffer2.read(app).file().unwrap().path().as_ref(),
3055 Path::new("a/file2.new")
3056 );
3057 assert_eq!(
3058 buffer3.read(app).file().unwrap().path().as_ref(),
3059 Path::new("d/file3")
3060 );
3061 assert_eq!(
3062 buffer4.read(app).file().unwrap().path().as_ref(),
3063 Path::new("d/file4")
3064 );
3065 assert_eq!(
3066 buffer5.read(app).file().unwrap().path().as_ref(),
3067 Path::new("b/c/file5")
3068 );
3069
3070 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3071 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3072 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3073 assert!(buffer5.read(app).file().unwrap().is_deleted());
3074 });
3075
3076 // Update the remote worktree. Check that it becomes consistent with the
3077 // local worktree.
3078 remote.update(&mut cx, |remote, cx| {
3079 let update_message =
3080 tree.read(cx)
3081 .snapshot()
3082 .build_update(&initial_snapshot, 1, 1, true);
3083 remote
3084 .as_remote_mut()
3085 .unwrap()
3086 .snapshot
3087 .apply_remote_update(update_message)
3088 .unwrap();
3089
3090 assert_eq!(
3091 remote
3092 .paths()
3093 .map(|p| p.to_str().unwrap())
3094 .collect::<Vec<_>>(),
3095 expected_paths
3096 );
3097 });
3098 }
3099
3100 #[gpui::test]
3101 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3102 let fs = Arc::new(FakeFs::new(cx.background()));
3103 fs.insert_tree(
3104 "/the-dir",
3105 json!({
3106 "a.txt": "a-contents",
3107 "b.txt": "b-contents",
3108 }),
3109 )
3110 .await;
3111
3112 let project = build_project(fs.clone(), &mut cx);
3113 let worktree_id = project
3114 .update(&mut cx, |p, cx| {
3115 p.find_or_create_local_worktree("/the-dir", false, cx)
3116 })
3117 .await
3118 .unwrap()
3119 .0
3120 .read_with(&cx, |tree, _| tree.id());
3121
3122 // Spawn multiple tasks to open paths, repeating some paths.
3123 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3124 (
3125 p.open_buffer((worktree_id, "a.txt"), cx),
3126 p.open_buffer((worktree_id, "b.txt"), cx),
3127 p.open_buffer((worktree_id, "a.txt"), cx),
3128 )
3129 });
3130
3131 let buffer_a_1 = buffer_a_1.await.unwrap();
3132 let buffer_a_2 = buffer_a_2.await.unwrap();
3133 let buffer_b = buffer_b.await.unwrap();
3134 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3135 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3136
3137 // There is only one buffer per path.
3138 let buffer_a_id = buffer_a_1.id();
3139 assert_eq!(buffer_a_2.id(), buffer_a_id);
3140
3141 // Open the same path again while it is still open.
3142 drop(buffer_a_1);
3143 let buffer_a_3 = project
3144 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3145 .await
3146 .unwrap();
3147
3148 // There's still only one buffer per path.
3149 assert_eq!(buffer_a_3.id(), buffer_a_id);
3150 }
3151
3152 #[gpui::test]
3153 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3154 use std::fs;
3155
3156 let dir = temp_tree(json!({
3157 "file1": "abc",
3158 "file2": "def",
3159 "file3": "ghi",
3160 }));
3161
3162 let project = build_project(Arc::new(RealFs), &mut cx);
3163 let (worktree, _) = project
3164 .update(&mut cx, |p, cx| {
3165 p.find_or_create_local_worktree(dir.path(), false, cx)
3166 })
3167 .await
3168 .unwrap();
3169 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3170
3171 worktree.flush_fs_events(&cx).await;
3172 worktree
3173 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3174 .await;
3175
3176 let buffer1 = project
3177 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3178 .await
3179 .unwrap();
3180 let events = Rc::new(RefCell::new(Vec::new()));
3181
3182 // initially, the buffer isn't dirty.
3183 buffer1.update(&mut cx, |buffer, cx| {
3184 cx.subscribe(&buffer1, {
3185 let events = events.clone();
3186 move |_, _, event, _| events.borrow_mut().push(event.clone())
3187 })
3188 .detach();
3189
3190 assert!(!buffer.is_dirty());
3191 assert!(events.borrow().is_empty());
3192
3193 buffer.edit(vec![1..2], "", cx);
3194 });
3195
3196 // after the first edit, the buffer is dirty, and emits a dirtied event.
3197 buffer1.update(&mut cx, |buffer, cx| {
3198 assert!(buffer.text() == "ac");
3199 assert!(buffer.is_dirty());
3200 assert_eq!(
3201 *events.borrow(),
3202 &[language::Event::Edited, language::Event::Dirtied]
3203 );
3204 events.borrow_mut().clear();
3205 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3206 });
3207
3208 // after saving, the buffer is not dirty, and emits a saved event.
3209 buffer1.update(&mut cx, |buffer, cx| {
3210 assert!(!buffer.is_dirty());
3211 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3212 events.borrow_mut().clear();
3213
3214 buffer.edit(vec![1..1], "B", cx);
3215 buffer.edit(vec![2..2], "D", cx);
3216 });
3217
3218 // after editing again, the buffer is dirty, and emits another dirty event.
3219 buffer1.update(&mut cx, |buffer, cx| {
3220 assert!(buffer.text() == "aBDc");
3221 assert!(buffer.is_dirty());
3222 assert_eq!(
3223 *events.borrow(),
3224 &[
3225 language::Event::Edited,
3226 language::Event::Dirtied,
3227 language::Event::Edited,
3228 ],
3229 );
3230 events.borrow_mut().clear();
3231
3232 // TODO - currently, after restoring the buffer to its
3233 // previously-saved state, the is still considered dirty.
3234 buffer.edit([1..3], "", cx);
3235 assert!(buffer.text() == "ac");
3236 assert!(buffer.is_dirty());
3237 });
3238
3239 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3240
3241 // When a file is deleted, the buffer is considered dirty.
3242 let events = Rc::new(RefCell::new(Vec::new()));
3243 let buffer2 = project
3244 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3245 .await
3246 .unwrap();
3247 buffer2.update(&mut cx, |_, cx| {
3248 cx.subscribe(&buffer2, {
3249 let events = events.clone();
3250 move |_, _, event, _| events.borrow_mut().push(event.clone())
3251 })
3252 .detach();
3253 });
3254
3255 fs::remove_file(dir.path().join("file2")).unwrap();
3256 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3257 assert_eq!(
3258 *events.borrow(),
3259 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3260 );
3261
3262 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3263 let events = Rc::new(RefCell::new(Vec::new()));
3264 let buffer3 = project
3265 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3266 .await
3267 .unwrap();
3268 buffer3.update(&mut cx, |_, cx| {
3269 cx.subscribe(&buffer3, {
3270 let events = events.clone();
3271 move |_, _, event, _| events.borrow_mut().push(event.clone())
3272 })
3273 .detach();
3274 });
3275
3276 worktree.flush_fs_events(&cx).await;
3277 buffer3.update(&mut cx, |buffer, cx| {
3278 buffer.edit(Some(0..0), "x", cx);
3279 });
3280 events.borrow_mut().clear();
3281 fs::remove_file(dir.path().join("file3")).unwrap();
3282 buffer3
3283 .condition(&cx, |_, _| !events.borrow().is_empty())
3284 .await;
3285 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3286 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3287 }
3288
3289 #[gpui::test]
3290 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3291 use std::fs;
3292
3293 let initial_contents = "aaa\nbbbbb\nc\n";
3294 let dir = temp_tree(json!({ "the-file": initial_contents }));
3295
3296 let project = build_project(Arc::new(RealFs), &mut cx);
3297 let (worktree, _) = project
3298 .update(&mut cx, |p, cx| {
3299 p.find_or_create_local_worktree(dir.path(), false, cx)
3300 })
3301 .await
3302 .unwrap();
3303 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3304
3305 worktree
3306 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3307 .await;
3308
3309 let abs_path = dir.path().join("the-file");
3310 let buffer = project
3311 .update(&mut cx, |p, cx| {
3312 p.open_buffer((worktree_id, "the-file"), cx)
3313 })
3314 .await
3315 .unwrap();
3316
3317 // TODO
3318 // Add a cursor on each row.
3319 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3320 // assert!(!buffer.is_dirty());
3321 // buffer.add_selection_set(
3322 // &(0..3)
3323 // .map(|row| Selection {
3324 // id: row as usize,
3325 // start: Point::new(row, 1),
3326 // end: Point::new(row, 1),
3327 // reversed: false,
3328 // goal: SelectionGoal::None,
3329 // })
3330 // .collect::<Vec<_>>(),
3331 // cx,
3332 // )
3333 // });
3334
3335 // Change the file on disk, adding two new lines of text, and removing
3336 // one line.
3337 buffer.read_with(&cx, |buffer, _| {
3338 assert!(!buffer.is_dirty());
3339 assert!(!buffer.has_conflict());
3340 });
3341 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3342 fs::write(&abs_path, new_contents).unwrap();
3343
3344 // Because the buffer was not modified, it is reloaded from disk. Its
3345 // contents are edited according to the diff between the old and new
3346 // file contents.
3347 buffer
3348 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3349 .await;
3350
3351 buffer.update(&mut cx, |buffer, _| {
3352 assert_eq!(buffer.text(), new_contents);
3353 assert!(!buffer.is_dirty());
3354 assert!(!buffer.has_conflict());
3355
3356 // TODO
3357 // let cursor_positions = buffer
3358 // .selection_set(selection_set_id)
3359 // .unwrap()
3360 // .selections::<Point>(&*buffer)
3361 // .map(|selection| {
3362 // assert_eq!(selection.start, selection.end);
3363 // selection.start
3364 // })
3365 // .collect::<Vec<_>>();
3366 // assert_eq!(
3367 // cursor_positions,
3368 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3369 // );
3370 });
3371
3372 // Modify the buffer
3373 buffer.update(&mut cx, |buffer, cx| {
3374 buffer.edit(vec![0..0], " ", cx);
3375 assert!(buffer.is_dirty());
3376 assert!(!buffer.has_conflict());
3377 });
3378
3379 // Change the file on disk again, adding blank lines to the beginning.
3380 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3381
3382 // Because the buffer is modified, it doesn't reload from disk, but is
3383 // marked as having a conflict.
3384 buffer
3385 .condition(&cx, |buffer, _| buffer.has_conflict())
3386 .await;
3387 }
3388
3389 #[gpui::test]
3390 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3391 let fs = Arc::new(FakeFs::new(cx.background()));
3392 fs.insert_tree(
3393 "/the-dir",
3394 json!({
3395 "a.rs": "
3396 fn foo(mut v: Vec<usize>) {
3397 for x in &v {
3398 v.push(1);
3399 }
3400 }
3401 "
3402 .unindent(),
3403 }),
3404 )
3405 .await;
3406
3407 let project = build_project(fs.clone(), &mut cx);
3408 let (worktree, _) = project
3409 .update(&mut cx, |p, cx| {
3410 p.find_or_create_local_worktree("/the-dir", false, cx)
3411 })
3412 .await
3413 .unwrap();
3414 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3415
3416 let buffer = project
3417 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3418 .await
3419 .unwrap();
3420
3421 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3422 let message = lsp::PublishDiagnosticsParams {
3423 uri: buffer_uri.clone(),
3424 diagnostics: vec![
3425 lsp::Diagnostic {
3426 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3427 severity: Some(DiagnosticSeverity::WARNING),
3428 message: "error 1".to_string(),
3429 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3430 location: lsp::Location {
3431 uri: buffer_uri.clone(),
3432 range: lsp::Range::new(
3433 lsp::Position::new(1, 8),
3434 lsp::Position::new(1, 9),
3435 ),
3436 },
3437 message: "error 1 hint 1".to_string(),
3438 }]),
3439 ..Default::default()
3440 },
3441 lsp::Diagnostic {
3442 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3443 severity: Some(DiagnosticSeverity::HINT),
3444 message: "error 1 hint 1".to_string(),
3445 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3446 location: lsp::Location {
3447 uri: buffer_uri.clone(),
3448 range: lsp::Range::new(
3449 lsp::Position::new(1, 8),
3450 lsp::Position::new(1, 9),
3451 ),
3452 },
3453 message: "original diagnostic".to_string(),
3454 }]),
3455 ..Default::default()
3456 },
3457 lsp::Diagnostic {
3458 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3459 severity: Some(DiagnosticSeverity::ERROR),
3460 message: "error 2".to_string(),
3461 related_information: Some(vec![
3462 lsp::DiagnosticRelatedInformation {
3463 location: lsp::Location {
3464 uri: buffer_uri.clone(),
3465 range: lsp::Range::new(
3466 lsp::Position::new(1, 13),
3467 lsp::Position::new(1, 15),
3468 ),
3469 },
3470 message: "error 2 hint 1".to_string(),
3471 },
3472 lsp::DiagnosticRelatedInformation {
3473 location: lsp::Location {
3474 uri: buffer_uri.clone(),
3475 range: lsp::Range::new(
3476 lsp::Position::new(1, 13),
3477 lsp::Position::new(1, 15),
3478 ),
3479 },
3480 message: "error 2 hint 2".to_string(),
3481 },
3482 ]),
3483 ..Default::default()
3484 },
3485 lsp::Diagnostic {
3486 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3487 severity: Some(DiagnosticSeverity::HINT),
3488 message: "error 2 hint 1".to_string(),
3489 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3490 location: lsp::Location {
3491 uri: buffer_uri.clone(),
3492 range: lsp::Range::new(
3493 lsp::Position::new(2, 8),
3494 lsp::Position::new(2, 17),
3495 ),
3496 },
3497 message: "original diagnostic".to_string(),
3498 }]),
3499 ..Default::default()
3500 },
3501 lsp::Diagnostic {
3502 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3503 severity: Some(DiagnosticSeverity::HINT),
3504 message: "error 2 hint 2".to_string(),
3505 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3506 location: lsp::Location {
3507 uri: buffer_uri.clone(),
3508 range: lsp::Range::new(
3509 lsp::Position::new(2, 8),
3510 lsp::Position::new(2, 17),
3511 ),
3512 },
3513 message: "original diagnostic".to_string(),
3514 }]),
3515 ..Default::default()
3516 },
3517 ],
3518 version: None,
3519 };
3520
3521 project
3522 .update(&mut cx, |p, cx| {
3523 p.update_diagnostics(message, &Default::default(), cx)
3524 })
3525 .unwrap();
3526 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3527
3528 assert_eq!(
3529 buffer
3530 .diagnostics_in_range::<_, Point>(0..buffer.len())
3531 .collect::<Vec<_>>(),
3532 &[
3533 DiagnosticEntry {
3534 range: Point::new(1, 8)..Point::new(1, 9),
3535 diagnostic: Diagnostic {
3536 severity: DiagnosticSeverity::WARNING,
3537 message: "error 1".to_string(),
3538 group_id: 0,
3539 is_primary: true,
3540 ..Default::default()
3541 }
3542 },
3543 DiagnosticEntry {
3544 range: Point::new(1, 8)..Point::new(1, 9),
3545 diagnostic: Diagnostic {
3546 severity: DiagnosticSeverity::HINT,
3547 message: "error 1 hint 1".to_string(),
3548 group_id: 0,
3549 is_primary: false,
3550 ..Default::default()
3551 }
3552 },
3553 DiagnosticEntry {
3554 range: Point::new(1, 13)..Point::new(1, 15),
3555 diagnostic: Diagnostic {
3556 severity: DiagnosticSeverity::HINT,
3557 message: "error 2 hint 1".to_string(),
3558 group_id: 1,
3559 is_primary: false,
3560 ..Default::default()
3561 }
3562 },
3563 DiagnosticEntry {
3564 range: Point::new(1, 13)..Point::new(1, 15),
3565 diagnostic: Diagnostic {
3566 severity: DiagnosticSeverity::HINT,
3567 message: "error 2 hint 2".to_string(),
3568 group_id: 1,
3569 is_primary: false,
3570 ..Default::default()
3571 }
3572 },
3573 DiagnosticEntry {
3574 range: Point::new(2, 8)..Point::new(2, 17),
3575 diagnostic: Diagnostic {
3576 severity: DiagnosticSeverity::ERROR,
3577 message: "error 2".to_string(),
3578 group_id: 1,
3579 is_primary: true,
3580 ..Default::default()
3581 }
3582 }
3583 ]
3584 );
3585
3586 assert_eq!(
3587 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3588 &[
3589 DiagnosticEntry {
3590 range: Point::new(1, 8)..Point::new(1, 9),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::WARNING,
3593 message: "error 1".to_string(),
3594 group_id: 0,
3595 is_primary: true,
3596 ..Default::default()
3597 }
3598 },
3599 DiagnosticEntry {
3600 range: Point::new(1, 8)..Point::new(1, 9),
3601 diagnostic: Diagnostic {
3602 severity: DiagnosticSeverity::HINT,
3603 message: "error 1 hint 1".to_string(),
3604 group_id: 0,
3605 is_primary: false,
3606 ..Default::default()
3607 }
3608 },
3609 ]
3610 );
3611 assert_eq!(
3612 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3613 &[
3614 DiagnosticEntry {
3615 range: Point::new(1, 13)..Point::new(1, 15),
3616 diagnostic: Diagnostic {
3617 severity: DiagnosticSeverity::HINT,
3618 message: "error 2 hint 1".to_string(),
3619 group_id: 1,
3620 is_primary: false,
3621 ..Default::default()
3622 }
3623 },
3624 DiagnosticEntry {
3625 range: Point::new(1, 13)..Point::new(1, 15),
3626 diagnostic: Diagnostic {
3627 severity: DiagnosticSeverity::HINT,
3628 message: "error 2 hint 2".to_string(),
3629 group_id: 1,
3630 is_primary: false,
3631 ..Default::default()
3632 }
3633 },
3634 DiagnosticEntry {
3635 range: Point::new(2, 8)..Point::new(2, 17),
3636 diagnostic: Diagnostic {
3637 severity: DiagnosticSeverity::ERROR,
3638 message: "error 2".to_string(),
3639 group_id: 1,
3640 is_primary: true,
3641 ..Default::default()
3642 }
3643 }
3644 ]
3645 );
3646 }
3647
3648 fn build_project(fs: Arc<dyn Fs>, cx: &mut TestAppContext) -> ModelHandle<Project> {
3649 let languages = Arc::new(LanguageRegistry::new());
3650 let http_client = FakeHttpClient::with_404_response();
3651 let client = client::Client::new(http_client.clone());
3652 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3653 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
3654 }
3655}