1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, Bias, Buffer, CodeAction, Diagnostic, DiagnosticEntry, File as _, Language,
19 LanguageRegistry, PointUtf16, ToLspPosition, ToOffset, ToPointUtf16,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use postage::{prelude::Stream, watch};
23use smol::block_on;
24use std::{
25 convert::TryInto,
26 ops::Range,
27 path::{Path, PathBuf},
28 sync::{atomic::AtomicBool, Arc},
29 time::Instant,
30};
31use util::{post_inc, ResultExt, TryFutureExt as _};
32
33pub use fs::*;
34pub use worktree::*;
35
36pub struct Project {
37 worktrees: Vec<WorktreeHandle>,
38 active_entry: Option<ProjectEntry>,
39 languages: Arc<LanguageRegistry>,
40 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
41 client: Arc<client::Client>,
42 user_store: ModelHandle<UserStore>,
43 fs: Arc<dyn Fs>,
44 client_state: ProjectClientState,
45 collaborators: HashMap<PeerId, Collaborator>,
46 subscriptions: Vec<client::Subscription>,
47 language_servers_with_diagnostics_running: isize,
48 open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
49 loading_buffers: HashMap<
50 ProjectPath,
51 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
52 >,
53 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
54}
55
56enum WorktreeHandle {
57 Strong(ModelHandle<Worktree>),
58 Weak(WeakModelHandle<Worktree>),
59}
60
61enum ProjectClientState {
62 Local {
63 is_shared: bool,
64 remote_id_tx: watch::Sender<Option<u64>>,
65 remote_id_rx: watch::Receiver<Option<u64>>,
66 _maintain_remote_id_task: Task<Option<()>>,
67 },
68 Remote {
69 sharing_has_stopped: bool,
70 remote_id: u64,
71 replica_id: ReplicaId,
72 },
73}
74
75#[derive(Clone, Debug)]
76pub struct Collaborator {
77 pub user: Arc<User>,
78 pub peer_id: PeerId,
79 pub replica_id: ReplicaId,
80}
81
82#[derive(Clone, Debug, PartialEq)]
83pub enum Event {
84 ActiveEntryChanged(Option<ProjectEntry>),
85 WorktreeRemoved(WorktreeId),
86 DiskBasedDiagnosticsStarted,
87 DiskBasedDiagnosticsUpdated,
88 DiskBasedDiagnosticsFinished,
89 DiagnosticsUpdated(ProjectPath),
90}
91
92#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
93pub struct ProjectPath {
94 pub worktree_id: WorktreeId,
95 pub path: Arc<Path>,
96}
97
98#[derive(Clone, Debug, Default, PartialEq)]
99pub struct DiagnosticSummary {
100 pub error_count: usize,
101 pub warning_count: usize,
102 pub info_count: usize,
103 pub hint_count: usize,
104}
105
106#[derive(Debug)]
107pub struct Definition {
108 pub target_buffer: ModelHandle<Buffer>,
109 pub target_range: Range<language::Anchor>,
110}
111
112impl DiagnosticSummary {
113 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
114 let mut this = Self {
115 error_count: 0,
116 warning_count: 0,
117 info_count: 0,
118 hint_count: 0,
119 };
120
121 for entry in diagnostics {
122 if entry.diagnostic.is_primary {
123 match entry.diagnostic.severity {
124 DiagnosticSeverity::ERROR => this.error_count += 1,
125 DiagnosticSeverity::WARNING => this.warning_count += 1,
126 DiagnosticSeverity::INFORMATION => this.info_count += 1,
127 DiagnosticSeverity::HINT => this.hint_count += 1,
128 _ => {}
129 }
130 }
131 }
132
133 this
134 }
135
136 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
137 proto::DiagnosticSummary {
138 path: path.to_string_lossy().to_string(),
139 error_count: self.error_count as u32,
140 warning_count: self.warning_count as u32,
141 info_count: self.info_count as u32,
142 hint_count: self.hint_count as u32,
143 }
144 }
145}
146
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
148pub struct ProjectEntry {
149 pub worktree_id: WorktreeId,
150 pub entry_id: usize,
151}
152
153impl Project {
154 pub fn local(
155 client: Arc<Client>,
156 user_store: ModelHandle<UserStore>,
157 languages: Arc<LanguageRegistry>,
158 fs: Arc<dyn Fs>,
159 cx: &mut MutableAppContext,
160 ) -> ModelHandle<Self> {
161 cx.add_model(|cx: &mut ModelContext<Self>| {
162 let (remote_id_tx, remote_id_rx) = watch::channel();
163 let _maintain_remote_id_task = cx.spawn_weak({
164 let rpc = client.clone();
165 move |this, mut cx| {
166 async move {
167 let mut status = rpc.status();
168 while let Some(status) = status.recv().await {
169 if let Some(this) = this.upgrade(&cx) {
170 let remote_id = if let client::Status::Connected { .. } = status {
171 let response = rpc.request(proto::RegisterProject {}).await?;
172 Some(response.project_id)
173 } else {
174 None
175 };
176
177 if let Some(project_id) = remote_id {
178 let mut registrations = Vec::new();
179 this.update(&mut cx, |this, cx| {
180 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
181 registrations.push(worktree.update(
182 cx,
183 |worktree, cx| {
184 let worktree = worktree.as_local_mut().unwrap();
185 worktree.register(project_id, cx)
186 },
187 ));
188 }
189 });
190 for registration in registrations {
191 registration.await?;
192 }
193 }
194 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
195 }
196 }
197 Ok(())
198 }
199 .log_err()
200 }
201 });
202
203 Self {
204 worktrees: Default::default(),
205 collaborators: Default::default(),
206 open_buffers: Default::default(),
207 loading_buffers: Default::default(),
208 shared_buffers: Default::default(),
209 client_state: ProjectClientState::Local {
210 is_shared: false,
211 remote_id_tx,
212 remote_id_rx,
213 _maintain_remote_id_task,
214 },
215 subscriptions: Vec::new(),
216 active_entry: None,
217 languages,
218 client,
219 user_store,
220 fs,
221 language_servers_with_diagnostics_running: 0,
222 language_servers: Default::default(),
223 }
224 })
225 }
226
227 pub async fn remote(
228 remote_id: u64,
229 client: Arc<Client>,
230 user_store: ModelHandle<UserStore>,
231 languages: Arc<LanguageRegistry>,
232 fs: Arc<dyn Fs>,
233 cx: &mut AsyncAppContext,
234 ) -> Result<ModelHandle<Self>> {
235 client.authenticate_and_connect(&cx).await?;
236
237 let response = client
238 .request(proto::JoinProject {
239 project_id: remote_id,
240 })
241 .await?;
242
243 let replica_id = response.replica_id as ReplicaId;
244
245 let mut worktrees = Vec::new();
246 for worktree in response.worktrees {
247 let (worktree, load_task) = cx
248 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
249 worktrees.push(worktree);
250 load_task.detach();
251 }
252
253 let user_ids = response
254 .collaborators
255 .iter()
256 .map(|peer| peer.user_id)
257 .collect();
258 user_store
259 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
260 .await?;
261 let mut collaborators = HashMap::default();
262 for message in response.collaborators {
263 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
264 collaborators.insert(collaborator.peer_id, collaborator);
265 }
266
267 Ok(cx.add_model(|cx| {
268 let mut this = Self {
269 worktrees: Vec::new(),
270 open_buffers: Default::default(),
271 loading_buffers: Default::default(),
272 shared_buffers: Default::default(),
273 active_entry: None,
274 collaborators,
275 languages,
276 user_store,
277 fs,
278 subscriptions: vec![
279 client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
280 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
281 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
282 client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
283 client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
284 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
285 client.subscribe_to_entity(
286 remote_id,
287 cx,
288 Self::handle_update_diagnostic_summary,
289 ),
290 client.subscribe_to_entity(
291 remote_id,
292 cx,
293 Self::handle_disk_based_diagnostics_updating,
294 ),
295 client.subscribe_to_entity(
296 remote_id,
297 cx,
298 Self::handle_disk_based_diagnostics_updated,
299 ),
300 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
301 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer_file),
302 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_reloaded),
303 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
304 ],
305 client,
306 client_state: ProjectClientState::Remote {
307 sharing_has_stopped: false,
308 remote_id,
309 replica_id,
310 },
311 language_servers_with_diagnostics_running: 0,
312 language_servers: Default::default(),
313 };
314 for worktree in worktrees {
315 this.add_worktree(&worktree, cx);
316 }
317 this
318 }))
319 }
320
321 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
322 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
323 *remote_id_tx.borrow_mut() = remote_id;
324 }
325
326 self.subscriptions.clear();
327 if let Some(remote_id) = remote_id {
328 let client = &self.client;
329 self.subscriptions.extend([
330 client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer),
331 client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer),
332 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
333 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
334 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
335 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
336 client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
337 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
338 client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
339 client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions),
340 client.subscribe_to_entity(
341 remote_id,
342 cx,
343 Self::handle_apply_additional_edits_for_completion,
344 ),
345 client.subscribe_to_entity(remote_id, cx, Self::handle_get_code_actions),
346 client.subscribe_to_entity(remote_id, cx, Self::handle_apply_code_action),
347 client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
348 ]);
349 }
350 }
351
352 pub fn remote_id(&self) -> Option<u64> {
353 match &self.client_state {
354 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
355 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
356 }
357 }
358
359 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
360 let mut id = None;
361 let mut watch = None;
362 match &self.client_state {
363 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
364 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
365 }
366
367 async move {
368 if let Some(id) = id {
369 return id;
370 }
371 let mut watch = watch.unwrap();
372 loop {
373 let id = *watch.borrow();
374 if let Some(id) = id {
375 return id;
376 }
377 watch.recv().await;
378 }
379 }
380 }
381
382 pub fn replica_id(&self) -> ReplicaId {
383 match &self.client_state {
384 ProjectClientState::Local { .. } => 0,
385 ProjectClientState::Remote { replica_id, .. } => *replica_id,
386 }
387 }
388
389 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
390 &self.collaborators
391 }
392
393 pub fn worktrees<'a>(
394 &'a self,
395 cx: &'a AppContext,
396 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
397 self.worktrees
398 .iter()
399 .filter_map(move |worktree| worktree.upgrade(cx))
400 }
401
402 pub fn worktree_for_id(
403 &self,
404 id: WorktreeId,
405 cx: &AppContext,
406 ) -> Option<ModelHandle<Worktree>> {
407 self.worktrees(cx)
408 .find(|worktree| worktree.read(cx).id() == id)
409 }
410
411 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
412 let rpc = self.client.clone();
413 cx.spawn(|this, mut cx| async move {
414 let project_id = this.update(&mut cx, |this, _| {
415 if let ProjectClientState::Local {
416 is_shared,
417 remote_id_rx,
418 ..
419 } = &mut this.client_state
420 {
421 *is_shared = true;
422 remote_id_rx
423 .borrow()
424 .ok_or_else(|| anyhow!("no project id"))
425 } else {
426 Err(anyhow!("can't share a remote project"))
427 }
428 })?;
429
430 rpc.request(proto::ShareProject { project_id }).await?;
431 let mut tasks = Vec::new();
432 this.update(&mut cx, |this, cx| {
433 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
434 worktree.update(cx, |worktree, cx| {
435 let worktree = worktree.as_local_mut().unwrap();
436 tasks.push(worktree.share(project_id, cx));
437 });
438 }
439 });
440 for task in tasks {
441 task.await?;
442 }
443 this.update(&mut cx, |_, cx| cx.notify());
444 Ok(())
445 })
446 }
447
448 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
449 let rpc = self.client.clone();
450 cx.spawn(|this, mut cx| async move {
451 let project_id = this.update(&mut cx, |this, _| {
452 if let ProjectClientState::Local {
453 is_shared,
454 remote_id_rx,
455 ..
456 } = &mut this.client_state
457 {
458 *is_shared = false;
459 remote_id_rx
460 .borrow()
461 .ok_or_else(|| anyhow!("no project id"))
462 } else {
463 Err(anyhow!("can't share a remote project"))
464 }
465 })?;
466
467 rpc.send(proto::UnshareProject { project_id })?;
468 this.update(&mut cx, |this, cx| {
469 this.collaborators.clear();
470 this.shared_buffers.clear();
471 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
472 worktree.update(cx, |worktree, _| {
473 worktree.as_local_mut().unwrap().unshare();
474 });
475 }
476 cx.notify()
477 });
478 Ok(())
479 })
480 }
481
482 pub fn is_read_only(&self) -> bool {
483 match &self.client_state {
484 ProjectClientState::Local { .. } => false,
485 ProjectClientState::Remote {
486 sharing_has_stopped,
487 ..
488 } => *sharing_has_stopped,
489 }
490 }
491
492 pub fn is_local(&self) -> bool {
493 match &self.client_state {
494 ProjectClientState::Local { .. } => true,
495 ProjectClientState::Remote { .. } => false,
496 }
497 }
498
499 pub fn open_buffer(
500 &mut self,
501 path: impl Into<ProjectPath>,
502 cx: &mut ModelContext<Self>,
503 ) -> Task<Result<ModelHandle<Buffer>>> {
504 let project_path = path.into();
505 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
506 worktree
507 } else {
508 return Task::ready(Err(anyhow!("no such worktree")));
509 };
510
511 // If there is already a buffer for the given path, then return it.
512 let existing_buffer = self.get_open_buffer(&project_path, cx);
513 if let Some(existing_buffer) = existing_buffer {
514 return Task::ready(Ok(existing_buffer));
515 }
516
517 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
518 // If the given path is already being loaded, then wait for that existing
519 // task to complete and return the same buffer.
520 hash_map::Entry::Occupied(e) => e.get().clone(),
521
522 // Otherwise, record the fact that this path is now being loaded.
523 hash_map::Entry::Vacant(entry) => {
524 let (mut tx, rx) = postage::watch::channel();
525 entry.insert(rx.clone());
526
527 let load_buffer = if worktree.read(cx).is_local() {
528 self.open_local_buffer(&project_path.path, &worktree, cx)
529 } else {
530 self.open_remote_buffer(&project_path.path, &worktree, cx)
531 };
532
533 cx.spawn(move |this, mut cx| async move {
534 let load_result = load_buffer.await;
535 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
536 // Record the fact that the buffer is no longer loading.
537 this.loading_buffers.remove(&project_path);
538 let buffer = load_result.map_err(Arc::new)?;
539 Ok(buffer)
540 }));
541 })
542 .detach();
543 rx
544 }
545 };
546
547 cx.foreground().spawn(async move {
548 loop {
549 if let Some(result) = loading_watch.borrow().as_ref() {
550 match result {
551 Ok(buffer) => return Ok(buffer.clone()),
552 Err(error) => return Err(anyhow!("{}", error)),
553 }
554 }
555 loading_watch.recv().await;
556 }
557 })
558 }
559
560 fn open_local_buffer(
561 &mut self,
562 path: &Arc<Path>,
563 worktree: &ModelHandle<Worktree>,
564 cx: &mut ModelContext<Self>,
565 ) -> Task<Result<ModelHandle<Buffer>>> {
566 let load_buffer = worktree.update(cx, |worktree, cx| {
567 let worktree = worktree.as_local_mut().unwrap();
568 worktree.load_buffer(path, cx)
569 });
570 let worktree = worktree.downgrade();
571 cx.spawn(|this, mut cx| async move {
572 let buffer = load_buffer.await?;
573 let worktree = worktree
574 .upgrade(&cx)
575 .ok_or_else(|| anyhow!("worktree was removed"))?;
576 this.update(&mut cx, |this, cx| {
577 this.register_buffer(&buffer, Some(&worktree), cx)
578 })?;
579 Ok(buffer)
580 })
581 }
582
583 fn open_remote_buffer(
584 &mut self,
585 path: &Arc<Path>,
586 worktree: &ModelHandle<Worktree>,
587 cx: &mut ModelContext<Self>,
588 ) -> Task<Result<ModelHandle<Buffer>>> {
589 let rpc = self.client.clone();
590 let project_id = self.remote_id().unwrap();
591 let remote_worktree_id = worktree.read(cx).id();
592 let path = path.clone();
593 let path_string = path.to_string_lossy().to_string();
594 cx.spawn(|this, mut cx| async move {
595 let response = rpc
596 .request(proto::OpenBuffer {
597 project_id,
598 worktree_id: remote_worktree_id.to_proto(),
599 path: path_string,
600 })
601 .await?;
602 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
603 this.update(&mut cx, |this, cx| {
604 this.deserialize_remote_buffer(buffer, cx)
605 })
606 })
607 }
608
609 fn open_local_buffer_from_lsp_path(
610 &mut self,
611 abs_path: lsp::Url,
612 lang_name: String,
613 lang_server: Arc<LanguageServer>,
614 cx: &mut ModelContext<Self>,
615 ) -> Task<Result<ModelHandle<Buffer>>> {
616 cx.spawn(|this, mut cx| async move {
617 let abs_path = abs_path
618 .to_file_path()
619 .map_err(|_| anyhow!("can't convert URI to path"))?;
620 let (worktree, relative_path) = if let Some(result) =
621 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
622 {
623 result
624 } else {
625 let worktree = this
626 .update(&mut cx, |this, cx| {
627 this.create_local_worktree(&abs_path, true, cx)
628 })
629 .await?;
630 this.update(&mut cx, |this, cx| {
631 this.language_servers
632 .insert((worktree.read(cx).id(), lang_name), lang_server);
633 });
634 (worktree, PathBuf::new())
635 };
636
637 let project_path = ProjectPath {
638 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
639 path: relative_path.into(),
640 };
641 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
642 .await
643 })
644 }
645
646 pub fn save_buffer_as(
647 &self,
648 buffer: ModelHandle<Buffer>,
649 abs_path: PathBuf,
650 cx: &mut ModelContext<Project>,
651 ) -> Task<Result<()>> {
652 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
653 cx.spawn(|this, mut cx| async move {
654 let (worktree, path) = worktree_task.await?;
655 worktree
656 .update(&mut cx, |worktree, cx| {
657 worktree
658 .as_local_mut()
659 .unwrap()
660 .save_buffer_as(buffer.clone(), path, cx)
661 })
662 .await?;
663 this.update(&mut cx, |this, cx| {
664 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
665 });
666 Ok(())
667 })
668 }
669
670 #[cfg(any(test, feature = "test-support"))]
671 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
672 let path = path.into();
673 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
674 self.open_buffers.iter().any(|(_, buffer)| {
675 if let Some(buffer) = buffer.upgrade(cx) {
676 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
677 if file.worktree == worktree && file.path() == &path.path {
678 return true;
679 }
680 }
681 }
682 false
683 })
684 } else {
685 false
686 }
687 }
688
689 fn get_open_buffer(
690 &mut self,
691 path: &ProjectPath,
692 cx: &mut ModelContext<Self>,
693 ) -> Option<ModelHandle<Buffer>> {
694 let mut result = None;
695 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
696 self.open_buffers.retain(|_, buffer| {
697 if let Some(buffer) = buffer.upgrade(cx) {
698 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
699 if file.worktree == worktree && file.path() == &path.path {
700 result = Some(buffer);
701 }
702 }
703 true
704 } else {
705 false
706 }
707 });
708 result
709 }
710
711 fn register_buffer(
712 &mut self,
713 buffer: &ModelHandle<Buffer>,
714 worktree: Option<&ModelHandle<Worktree>>,
715 cx: &mut ModelContext<Self>,
716 ) -> Result<()> {
717 if self
718 .open_buffers
719 .insert(buffer.read(cx).remote_id() as usize, buffer.downgrade())
720 .is_some()
721 {
722 return Err(anyhow!("registered the same buffer twice"));
723 }
724 self.assign_language_to_buffer(&buffer, worktree, cx);
725 Ok(())
726 }
727
728 fn assign_language_to_buffer(
729 &mut self,
730 buffer: &ModelHandle<Buffer>,
731 worktree: Option<&ModelHandle<Worktree>>,
732 cx: &mut ModelContext<Self>,
733 ) -> Option<()> {
734 let (path, full_path) = {
735 let file = buffer.read(cx).file()?;
736 (file.path().clone(), file.full_path(cx))
737 };
738
739 // If the buffer has a language, set it and start/assign the language server
740 if let Some(language) = self.languages.select_language(&full_path) {
741 buffer.update(cx, |buffer, cx| {
742 buffer.set_language(Some(language.clone()), cx);
743 });
744
745 // For local worktrees, start a language server if needed.
746 // Also assign the language server and any previously stored diagnostics to the buffer.
747 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
748 let worktree_id = local_worktree.id();
749 let worktree_abs_path = local_worktree.abs_path().clone();
750
751 let language_server = match self
752 .language_servers
753 .entry((worktree_id, language.name().to_string()))
754 {
755 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
756 hash_map::Entry::Vacant(e) => Self::start_language_server(
757 self.client.clone(),
758 language.clone(),
759 &worktree_abs_path,
760 cx,
761 )
762 .map(|server| e.insert(server).clone()),
763 };
764
765 buffer.update(cx, |buffer, cx| {
766 buffer.set_language_server(language_server, cx);
767 });
768 }
769 }
770
771 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
772 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
773 buffer.update(cx, |buffer, cx| {
774 buffer.update_diagnostics(diagnostics, None, cx).log_err();
775 });
776 }
777 }
778
779 None
780 }
781
782 fn start_language_server(
783 rpc: Arc<Client>,
784 language: Arc<Language>,
785 worktree_path: &Path,
786 cx: &mut ModelContext<Self>,
787 ) -> Option<Arc<LanguageServer>> {
788 enum LspEvent {
789 DiagnosticsStart,
790 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
791 DiagnosticsFinish,
792 }
793
794 let language_server = language
795 .start_server(worktree_path, cx)
796 .log_err()
797 .flatten()?;
798 let disk_based_sources = language
799 .disk_based_diagnostic_sources()
800 .cloned()
801 .unwrap_or_default();
802 let disk_based_diagnostics_progress_token =
803 language.disk_based_diagnostics_progress_token().cloned();
804 let has_disk_based_diagnostic_progress_token =
805 disk_based_diagnostics_progress_token.is_some();
806 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
807
808 // Listen for `PublishDiagnostics` notifications.
809 language_server
810 .on_notification::<lsp::notification::PublishDiagnostics, _>({
811 let diagnostics_tx = diagnostics_tx.clone();
812 move |params| {
813 if !has_disk_based_diagnostic_progress_token {
814 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
815 }
816 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
817 if !has_disk_based_diagnostic_progress_token {
818 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
819 }
820 }
821 })
822 .detach();
823
824 // Listen for `Progress` notifications. Send an event when the language server
825 // transitions between running jobs and not running any jobs.
826 let mut running_jobs_for_this_server: i32 = 0;
827 language_server
828 .on_notification::<lsp::notification::Progress, _>(move |params| {
829 let token = match params.token {
830 lsp::NumberOrString::Number(_) => None,
831 lsp::NumberOrString::String(token) => Some(token),
832 };
833
834 if token == disk_based_diagnostics_progress_token {
835 match params.value {
836 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
837 lsp::WorkDoneProgress::Begin(_) => {
838 running_jobs_for_this_server += 1;
839 if running_jobs_for_this_server == 1 {
840 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
841 }
842 }
843 lsp::WorkDoneProgress::End(_) => {
844 running_jobs_for_this_server -= 1;
845 if running_jobs_for_this_server == 0 {
846 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
847 }
848 }
849 _ => {}
850 },
851 }
852 }
853 })
854 .detach();
855
856 // Process all the LSP events.
857 cx.spawn_weak(|this, mut cx| async move {
858 while let Ok(message) = diagnostics_rx.recv().await {
859 let this = cx.read(|cx| this.upgrade(cx))?;
860 match message {
861 LspEvent::DiagnosticsStart => {
862 this.update(&mut cx, |this, cx| {
863 this.disk_based_diagnostics_started(cx);
864 if let Some(project_id) = this.remote_id() {
865 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
866 .log_err();
867 }
868 });
869 }
870 LspEvent::DiagnosticsUpdate(mut params) => {
871 language.process_diagnostics(&mut params);
872 this.update(&mut cx, |this, cx| {
873 this.update_diagnostics(params, &disk_based_sources, cx)
874 .log_err();
875 });
876 }
877 LspEvent::DiagnosticsFinish => {
878 this.update(&mut cx, |this, cx| {
879 this.disk_based_diagnostics_finished(cx);
880 if let Some(project_id) = this.remote_id() {
881 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
882 .log_err();
883 }
884 });
885 }
886 }
887 }
888 Some(())
889 })
890 .detach();
891
892 Some(language_server)
893 }
894
895 pub fn update_diagnostics(
896 &mut self,
897 params: lsp::PublishDiagnosticsParams,
898 disk_based_sources: &HashSet<String>,
899 cx: &mut ModelContext<Self>,
900 ) -> Result<()> {
901 let abs_path = params
902 .uri
903 .to_file_path()
904 .map_err(|_| anyhow!("URI is not a file"))?;
905 let mut next_group_id = 0;
906 let mut diagnostics = Vec::default();
907 let mut primary_diagnostic_group_ids = HashMap::default();
908 let mut sources_by_group_id = HashMap::default();
909 let mut supporting_diagnostic_severities = HashMap::default();
910 for diagnostic in ¶ms.diagnostics {
911 let source = diagnostic.source.as_ref();
912 let code = diagnostic.code.as_ref().map(|code| match code {
913 lsp::NumberOrString::Number(code) => code.to_string(),
914 lsp::NumberOrString::String(code) => code.clone(),
915 });
916 let range = range_from_lsp(diagnostic.range);
917 let is_supporting = diagnostic
918 .related_information
919 .as_ref()
920 .map_or(false, |infos| {
921 infos.iter().any(|info| {
922 primary_diagnostic_group_ids.contains_key(&(
923 source,
924 code.clone(),
925 range_from_lsp(info.location.range),
926 ))
927 })
928 });
929
930 if is_supporting {
931 if let Some(severity) = diagnostic.severity {
932 supporting_diagnostic_severities
933 .insert((source, code.clone(), range), severity);
934 }
935 } else {
936 let group_id = post_inc(&mut next_group_id);
937 let is_disk_based =
938 source.map_or(false, |source| disk_based_sources.contains(source));
939
940 sources_by_group_id.insert(group_id, source);
941 primary_diagnostic_group_ids
942 .insert((source, code.clone(), range.clone()), group_id);
943
944 diagnostics.push(DiagnosticEntry {
945 range,
946 diagnostic: Diagnostic {
947 code: code.clone(),
948 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
949 message: diagnostic.message.clone(),
950 group_id,
951 is_primary: true,
952 is_valid: true,
953 is_disk_based,
954 },
955 });
956 if let Some(infos) = &diagnostic.related_information {
957 for info in infos {
958 if info.location.uri == params.uri && !info.message.is_empty() {
959 let range = range_from_lsp(info.location.range);
960 diagnostics.push(DiagnosticEntry {
961 range,
962 diagnostic: Diagnostic {
963 code: code.clone(),
964 severity: DiagnosticSeverity::INFORMATION,
965 message: info.message.clone(),
966 group_id,
967 is_primary: false,
968 is_valid: true,
969 is_disk_based,
970 },
971 });
972 }
973 }
974 }
975 }
976 }
977
978 for entry in &mut diagnostics {
979 let diagnostic = &mut entry.diagnostic;
980 if !diagnostic.is_primary {
981 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
982 if let Some(&severity) = supporting_diagnostic_severities.get(&(
983 source,
984 diagnostic.code.clone(),
985 entry.range.clone(),
986 )) {
987 diagnostic.severity = severity;
988 }
989 }
990 }
991
992 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
993 Ok(())
994 }
995
996 pub fn update_diagnostic_entries(
997 &mut self,
998 abs_path: PathBuf,
999 version: Option<i32>,
1000 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1001 cx: &mut ModelContext<Project>,
1002 ) -> Result<(), anyhow::Error> {
1003 let (worktree, relative_path) = self
1004 .find_local_worktree(&abs_path, cx)
1005 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1006 let project_path = ProjectPath {
1007 worktree_id: worktree.read(cx).id(),
1008 path: relative_path.into(),
1009 };
1010
1011 for buffer in self.open_buffers.values() {
1012 if let Some(buffer) = buffer.upgrade(cx) {
1013 if buffer
1014 .read(cx)
1015 .file()
1016 .map_or(false, |file| *file.path() == project_path.path)
1017 {
1018 buffer.update(cx, |buffer, cx| {
1019 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1020 })?;
1021 break;
1022 }
1023 }
1024 }
1025 worktree.update(cx, |worktree, cx| {
1026 worktree
1027 .as_local_mut()
1028 .ok_or_else(|| anyhow!("not a local worktree"))?
1029 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1030 })?;
1031 cx.emit(Event::DiagnosticsUpdated(project_path));
1032 Ok(())
1033 }
1034
1035 pub fn definition<T: ToOffset>(
1036 &self,
1037 source_buffer_handle: &ModelHandle<Buffer>,
1038 position: T,
1039 cx: &mut ModelContext<Self>,
1040 ) -> Task<Result<Vec<Definition>>> {
1041 let source_buffer_handle = source_buffer_handle.clone();
1042 let source_buffer = source_buffer_handle.read(cx);
1043 let worktree;
1044 let buffer_abs_path;
1045 if let Some(file) = File::from_dyn(source_buffer.file()) {
1046 worktree = file.worktree.clone();
1047 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1048 } else {
1049 return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
1050 };
1051
1052 if worktree.read(cx).as_local().is_some() {
1053 let point = source_buffer.offset_to_point_utf16(position.to_offset(source_buffer));
1054 let buffer_abs_path = buffer_abs_path.unwrap();
1055 let lang_name;
1056 let lang_server;
1057 if let Some(lang) = source_buffer.language() {
1058 lang_name = lang.name().to_string();
1059 if let Some(server) = self
1060 .language_servers
1061 .get(&(worktree.read(cx).id(), lang_name.clone()))
1062 {
1063 lang_server = server.clone();
1064 } else {
1065 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1066 };
1067 } else {
1068 return Task::ready(Err(anyhow!("buffer does not have a language")));
1069 }
1070
1071 cx.spawn(|this, mut cx| async move {
1072 let response = lang_server
1073 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1074 text_document_position_params: lsp::TextDocumentPositionParams {
1075 text_document: lsp::TextDocumentIdentifier::new(
1076 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1077 ),
1078 position: lsp::Position::new(point.row, point.column),
1079 },
1080 work_done_progress_params: Default::default(),
1081 partial_result_params: Default::default(),
1082 })
1083 .await?;
1084
1085 let mut definitions = Vec::new();
1086 if let Some(response) = response {
1087 let mut unresolved_locations = Vec::new();
1088 match response {
1089 lsp::GotoDefinitionResponse::Scalar(loc) => {
1090 unresolved_locations.push((loc.uri, loc.range));
1091 }
1092 lsp::GotoDefinitionResponse::Array(locs) => {
1093 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1094 }
1095 lsp::GotoDefinitionResponse::Link(links) => {
1096 unresolved_locations.extend(
1097 links
1098 .into_iter()
1099 .map(|l| (l.target_uri, l.target_selection_range)),
1100 );
1101 }
1102 }
1103
1104 for (target_uri, target_range) in unresolved_locations {
1105 let target_buffer_handle = this
1106 .update(&mut cx, |this, cx| {
1107 this.open_local_buffer_from_lsp_path(
1108 target_uri,
1109 lang_name.clone(),
1110 lang_server.clone(),
1111 cx,
1112 )
1113 })
1114 .await?;
1115
1116 cx.read(|cx| {
1117 let target_buffer = target_buffer_handle.read(cx);
1118 let target_start = target_buffer
1119 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1120 let target_end = target_buffer
1121 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1122 definitions.push(Definition {
1123 target_buffer: target_buffer_handle,
1124 target_range: target_buffer.anchor_after(target_start)
1125 ..target_buffer.anchor_before(target_end),
1126 });
1127 });
1128 }
1129 }
1130
1131 Ok(definitions)
1132 })
1133 } else if let Some(project_id) = self.remote_id() {
1134 let client = self.client.clone();
1135 let request = proto::GetDefinition {
1136 project_id,
1137 buffer_id: source_buffer.remote_id(),
1138 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1139 };
1140 cx.spawn(|this, mut cx| async move {
1141 let response = client.request(request).await?;
1142 this.update(&mut cx, |this, cx| {
1143 let mut definitions = Vec::new();
1144 for definition in response.definitions {
1145 let target_buffer = this.deserialize_remote_buffer(
1146 definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
1147 cx,
1148 )?;
1149 let target_start = definition
1150 .target_start
1151 .and_then(deserialize_anchor)
1152 .ok_or_else(|| anyhow!("missing target start"))?;
1153 let target_end = definition
1154 .target_end
1155 .and_then(deserialize_anchor)
1156 .ok_or_else(|| anyhow!("missing target end"))?;
1157 definitions.push(Definition {
1158 target_buffer,
1159 target_range: target_start..target_end,
1160 })
1161 }
1162
1163 Ok(definitions)
1164 })
1165 })
1166 } else {
1167 Task::ready(Err(anyhow!("project does not have a remote id")))
1168 }
1169 }
1170
1171 pub fn apply_code_action(
1172 &self,
1173 buffer_handle: ModelHandle<Buffer>,
1174 mut action: CodeAction<language::Anchor>,
1175 push_to_history: bool,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<(Range<language::Anchor>, clock::Local)>>>>
1178 {
1179 if self.is_local() {
1180 let buffer = buffer_handle.read(cx);
1181 let lang_name = if let Some(lang) = buffer.language() {
1182 lang.name().to_string()
1183 } else {
1184 return Task::ready(Ok(Default::default()));
1185 };
1186 let lang_server = if let Some(language_server) = buffer.language_server() {
1187 language_server.clone()
1188 } else {
1189 return Task::ready(Ok(Default::default()));
1190 };
1191 let position = action.position.to_point_utf16(buffer).to_lsp_position();
1192 let fs = self.fs.clone();
1193
1194 cx.spawn(|this, mut cx| async move {
1195 if let Some(range) = action
1196 .lsp_action
1197 .data
1198 .as_mut()
1199 .and_then(|d| d.get_mut("codeActionParams"))
1200 .and_then(|d| d.get_mut("range"))
1201 {
1202 *range = serde_json::to_value(&lsp::Range::new(position, position)).unwrap();
1203 action.lsp_action = lang_server
1204 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1205 .await?;
1206 } else {
1207 let actions = buffer_handle
1208 .update(&mut cx, |buffer, cx| {
1209 buffer.code_actions(action.position.clone(), cx)
1210 })
1211 .await?;
1212 action.lsp_action = actions
1213 .into_iter()
1214 .find(|a| a.lsp_action.title == action.lsp_action.title)
1215 .ok_or_else(|| anyhow!("code action is outdated"))?
1216 .lsp_action;
1217 }
1218
1219 let mut operations = Vec::new();
1220 if let Some(edit) = action.lsp_action.edit {
1221 if let Some(document_changes) = edit.document_changes {
1222 match document_changes {
1223 lsp::DocumentChanges::Edits(edits) => operations
1224 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1225 lsp::DocumentChanges::Operations(ops) => operations = ops,
1226 }
1227 } else if let Some(changes) = edit.changes {
1228 operations.extend(changes.into_iter().map(|(uri, edits)| {
1229 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1230 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1231 uri,
1232 version: None,
1233 },
1234 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1235 })
1236 }));
1237 }
1238 }
1239
1240 let mut edited_buffers = HashMap::default();
1241 for operation in operations {
1242 match operation {
1243 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1244 let abs_path = op
1245 .uri
1246 .to_file_path()
1247 .map_err(|_| anyhow!("can't convert URI to path"))?;
1248
1249 if let Some(parent_path) = abs_path.parent() {
1250 fs.create_dir(parent_path).await?;
1251 }
1252 if abs_path.ends_with("/") {
1253 fs.create_dir(&abs_path).await?;
1254 } else {
1255 fs.create_file(
1256 &abs_path,
1257 op.options.map(Into::into).unwrap_or_default(),
1258 )
1259 .await?;
1260 }
1261 }
1262 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1263 let source_abs_path = op
1264 .old_uri
1265 .to_file_path()
1266 .map_err(|_| anyhow!("can't convert URI to path"))?;
1267 let target_abs_path = op
1268 .new_uri
1269 .to_file_path()
1270 .map_err(|_| anyhow!("can't convert URI to path"))?;
1271 fs.rename(
1272 &source_abs_path,
1273 &target_abs_path,
1274 op.options.map(Into::into).unwrap_or_default(),
1275 )
1276 .await?;
1277 }
1278 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1279 let abs_path = op
1280 .uri
1281 .to_file_path()
1282 .map_err(|_| anyhow!("can't convert URI to path"))?;
1283 let options = op.options.map(Into::into).unwrap_or_default();
1284 if abs_path.ends_with("/") {
1285 fs.remove_dir(&abs_path, options).await?;
1286 } else {
1287 fs.remove_file(&abs_path, options).await?;
1288 }
1289 }
1290 lsp::DocumentChangeOperation::Edit(op) => {
1291 let buffer_to_edit = this
1292 .update(&mut cx, |this, cx| {
1293 this.open_local_buffer_from_lsp_path(
1294 op.text_document.uri,
1295 lang_name.clone(),
1296 lang_server.clone(),
1297 cx,
1298 )
1299 })
1300 .await?;
1301 let edits = buffer_to_edit.update(&mut cx, |buffer, cx| {
1302 let edits = op.edits.into_iter().map(|edit| match edit {
1303 lsp::OneOf::Left(edit) => edit,
1304 lsp::OneOf::Right(edit) => edit.text_edit,
1305 });
1306 if !push_to_history {
1307 buffer.avoid_grouping_next_transaction();
1308 }
1309 buffer.start_transaction();
1310 let edits =
1311 buffer.apply_lsp_edits(edits, op.text_document.version, cx);
1312 if let Some(transaction_id) = buffer.end_transaction(cx) {
1313 if !push_to_history {
1314 buffer.forget_transaction(transaction_id);
1315 }
1316 }
1317
1318 edits
1319 })?;
1320 edited_buffers
1321 .entry(buffer_to_edit)
1322 .or_insert(Vec::new())
1323 .extend(edits);
1324 }
1325 }
1326 }
1327
1328 Ok(edited_buffers)
1329 })
1330 } else if let Some(project_id) = self.remote_id() {
1331 let client = self.client.clone();
1332 let request = proto::ApplyCodeAction {
1333 project_id,
1334 buffer_id: buffer_handle.read(cx).remote_id(),
1335 action: Some(language::proto::serialize_code_action(&action)),
1336 };
1337 cx.spawn(|this, mut cx| async move {
1338 let response = client.request(request).await?;
1339 let mut edited_buffers = HashMap::default();
1340 for buffer_edit in response.buffer_edits {
1341 let buffer = buffer_edit
1342 .buffer
1343 .ok_or_else(|| anyhow!("invalid buffer"))?;
1344 let buffer = this.update(&mut cx, |this, cx| {
1345 this.deserialize_remote_buffer(buffer, cx)
1346 })?;
1347
1348 let buffer_edits = edited_buffers.entry(buffer.clone()).or_insert(Vec::new());
1349 for edit in buffer_edit.edits {
1350 buffer_edits.push(language::proto::deserialize_code_action_edit(edit)?);
1351 }
1352
1353 buffer
1354 .update(&mut cx, |buffer, _| {
1355 buffer.wait_for_edits(buffer_edits.iter().map(|e| e.1))
1356 })
1357 .await;
1358
1359 if push_to_history {
1360 buffer.update(&mut cx, |buffer, _| {
1361 buffer
1362 .push_transaction(buffer_edits.iter().map(|e| e.1), Instant::now());
1363 });
1364 }
1365 }
1366 Ok(edited_buffers)
1367 })
1368 } else {
1369 Task::ready(Err(anyhow!("project does not have a remote id")))
1370 }
1371 }
1372
1373 pub fn find_or_create_local_worktree(
1374 &self,
1375 abs_path: impl AsRef<Path>,
1376 weak: bool,
1377 cx: &mut ModelContext<Self>,
1378 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1379 let abs_path = abs_path.as_ref();
1380 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1381 Task::ready(Ok((tree.clone(), relative_path.into())))
1382 } else {
1383 let worktree = self.create_local_worktree(abs_path, weak, cx);
1384 cx.foreground()
1385 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1386 }
1387 }
1388
1389 fn find_local_worktree(
1390 &self,
1391 abs_path: &Path,
1392 cx: &AppContext,
1393 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1394 for tree in self.worktrees(cx) {
1395 if let Some(relative_path) = tree
1396 .read(cx)
1397 .as_local()
1398 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1399 {
1400 return Some((tree.clone(), relative_path.into()));
1401 }
1402 }
1403 None
1404 }
1405
1406 pub fn is_shared(&self) -> bool {
1407 match &self.client_state {
1408 ProjectClientState::Local { is_shared, .. } => *is_shared,
1409 ProjectClientState::Remote { .. } => false,
1410 }
1411 }
1412
1413 fn create_local_worktree(
1414 &self,
1415 abs_path: impl AsRef<Path>,
1416 weak: bool,
1417 cx: &mut ModelContext<Self>,
1418 ) -> Task<Result<ModelHandle<Worktree>>> {
1419 let fs = self.fs.clone();
1420 let client = self.client.clone();
1421 let path = Arc::from(abs_path.as_ref());
1422 cx.spawn(|project, mut cx| async move {
1423 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1424
1425 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1426 project.add_worktree(&worktree, cx);
1427 (project.remote_id(), project.is_shared())
1428 });
1429
1430 if let Some(project_id) = remote_project_id {
1431 worktree
1432 .update(&mut cx, |worktree, cx| {
1433 worktree.as_local_mut().unwrap().register(project_id, cx)
1434 })
1435 .await?;
1436 if is_shared {
1437 worktree
1438 .update(&mut cx, |worktree, cx| {
1439 worktree.as_local_mut().unwrap().share(project_id, cx)
1440 })
1441 .await?;
1442 }
1443 }
1444
1445 Ok(worktree)
1446 })
1447 }
1448
1449 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1450 self.worktrees.retain(|worktree| {
1451 worktree
1452 .upgrade(cx)
1453 .map_or(false, |w| w.read(cx).id() != id)
1454 });
1455 cx.notify();
1456 }
1457
1458 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1459 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1460 if worktree.read(cx).is_local() {
1461 cx.subscribe(&worktree, |this, worktree, _, cx| {
1462 this.update_local_worktree_buffers(worktree, cx);
1463 })
1464 .detach();
1465 }
1466
1467 let push_weak_handle = {
1468 let worktree = worktree.read(cx);
1469 worktree.is_local() && worktree.is_weak()
1470 };
1471 if push_weak_handle {
1472 cx.observe_release(&worktree, |this, cx| {
1473 this.worktrees
1474 .retain(|worktree| worktree.upgrade(cx).is_some());
1475 cx.notify();
1476 })
1477 .detach();
1478 self.worktrees
1479 .push(WorktreeHandle::Weak(worktree.downgrade()));
1480 } else {
1481 self.worktrees
1482 .push(WorktreeHandle::Strong(worktree.clone()));
1483 }
1484 cx.notify();
1485 }
1486
1487 fn update_local_worktree_buffers(
1488 &mut self,
1489 worktree_handle: ModelHandle<Worktree>,
1490 cx: &mut ModelContext<Self>,
1491 ) {
1492 let snapshot = worktree_handle.read(cx).snapshot();
1493 let mut buffers_to_delete = Vec::new();
1494 for (buffer_id, buffer) in &self.open_buffers {
1495 if let Some(buffer) = buffer.upgrade(cx) {
1496 buffer.update(cx, |buffer, cx| {
1497 if let Some(old_file) = File::from_dyn(buffer.file()) {
1498 if old_file.worktree != worktree_handle {
1499 return;
1500 }
1501
1502 let new_file = if let Some(entry) = old_file
1503 .entry_id
1504 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1505 {
1506 File {
1507 is_local: true,
1508 entry_id: Some(entry.id),
1509 mtime: entry.mtime,
1510 path: entry.path.clone(),
1511 worktree: worktree_handle.clone(),
1512 }
1513 } else if let Some(entry) =
1514 snapshot.entry_for_path(old_file.path().as_ref())
1515 {
1516 File {
1517 is_local: true,
1518 entry_id: Some(entry.id),
1519 mtime: entry.mtime,
1520 path: entry.path.clone(),
1521 worktree: worktree_handle.clone(),
1522 }
1523 } else {
1524 File {
1525 is_local: true,
1526 entry_id: None,
1527 path: old_file.path().clone(),
1528 mtime: old_file.mtime(),
1529 worktree: worktree_handle.clone(),
1530 }
1531 };
1532
1533 if let Some(project_id) = self.remote_id() {
1534 self.client
1535 .send(proto::UpdateBufferFile {
1536 project_id,
1537 buffer_id: *buffer_id as u64,
1538 file: Some(new_file.to_proto()),
1539 })
1540 .log_err();
1541 }
1542 buffer.file_updated(Box::new(new_file), cx).detach();
1543 }
1544 });
1545 } else {
1546 buffers_to_delete.push(*buffer_id);
1547 }
1548 }
1549
1550 for buffer_id in buffers_to_delete {
1551 self.open_buffers.remove(&buffer_id);
1552 }
1553 }
1554
1555 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1556 let new_active_entry = entry.and_then(|project_path| {
1557 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1558 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1559 Some(ProjectEntry {
1560 worktree_id: project_path.worktree_id,
1561 entry_id: entry.id,
1562 })
1563 });
1564 if new_active_entry != self.active_entry {
1565 self.active_entry = new_active_entry;
1566 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1567 }
1568 }
1569
1570 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1571 self.language_servers_with_diagnostics_running > 0
1572 }
1573
1574 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1575 let mut summary = DiagnosticSummary::default();
1576 for (_, path_summary) in self.diagnostic_summaries(cx) {
1577 summary.error_count += path_summary.error_count;
1578 summary.warning_count += path_summary.warning_count;
1579 summary.info_count += path_summary.info_count;
1580 summary.hint_count += path_summary.hint_count;
1581 }
1582 summary
1583 }
1584
1585 pub fn diagnostic_summaries<'a>(
1586 &'a self,
1587 cx: &'a AppContext,
1588 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1589 self.worktrees(cx).flat_map(move |worktree| {
1590 let worktree = worktree.read(cx);
1591 let worktree_id = worktree.id();
1592 worktree
1593 .diagnostic_summaries()
1594 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1595 })
1596 }
1597
1598 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1599 self.language_servers_with_diagnostics_running += 1;
1600 if self.language_servers_with_diagnostics_running == 1 {
1601 cx.emit(Event::DiskBasedDiagnosticsStarted);
1602 }
1603 }
1604
1605 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1606 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1607 self.language_servers_with_diagnostics_running -= 1;
1608 if self.language_servers_with_diagnostics_running == 0 {
1609 cx.emit(Event::DiskBasedDiagnosticsFinished);
1610 }
1611 }
1612
1613 pub fn active_entry(&self) -> Option<ProjectEntry> {
1614 self.active_entry
1615 }
1616
1617 // RPC message handlers
1618
1619 fn handle_unshare_project(
1620 &mut self,
1621 _: TypedEnvelope<proto::UnshareProject>,
1622 _: Arc<Client>,
1623 cx: &mut ModelContext<Self>,
1624 ) -> Result<()> {
1625 if let ProjectClientState::Remote {
1626 sharing_has_stopped,
1627 ..
1628 } = &mut self.client_state
1629 {
1630 *sharing_has_stopped = true;
1631 self.collaborators.clear();
1632 cx.notify();
1633 Ok(())
1634 } else {
1635 unreachable!()
1636 }
1637 }
1638
1639 fn handle_add_collaborator(
1640 &mut self,
1641 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
1642 _: Arc<Client>,
1643 cx: &mut ModelContext<Self>,
1644 ) -> Result<()> {
1645 let user_store = self.user_store.clone();
1646 let collaborator = envelope
1647 .payload
1648 .collaborator
1649 .take()
1650 .ok_or_else(|| anyhow!("empty collaborator"))?;
1651
1652 cx.spawn(|this, mut cx| {
1653 async move {
1654 let collaborator =
1655 Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
1656 this.update(&mut cx, |this, cx| {
1657 this.collaborators
1658 .insert(collaborator.peer_id, collaborator);
1659 cx.notify();
1660 });
1661 Ok(())
1662 }
1663 .log_err()
1664 })
1665 .detach();
1666
1667 Ok(())
1668 }
1669
1670 fn handle_remove_collaborator(
1671 &mut self,
1672 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
1673 _: Arc<Client>,
1674 cx: &mut ModelContext<Self>,
1675 ) -> Result<()> {
1676 let peer_id = PeerId(envelope.payload.peer_id);
1677 let replica_id = self
1678 .collaborators
1679 .remove(&peer_id)
1680 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
1681 .replica_id;
1682 self.shared_buffers.remove(&peer_id);
1683 for (_, buffer) in &self.open_buffers {
1684 if let Some(buffer) = buffer.upgrade(cx) {
1685 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
1686 }
1687 }
1688 cx.notify();
1689 Ok(())
1690 }
1691
1692 fn handle_share_worktree(
1693 &mut self,
1694 envelope: TypedEnvelope<proto::ShareWorktree>,
1695 client: Arc<Client>,
1696 cx: &mut ModelContext<Self>,
1697 ) -> Result<()> {
1698 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
1699 let replica_id = self.replica_id();
1700 let worktree = envelope
1701 .payload
1702 .worktree
1703 .ok_or_else(|| anyhow!("invalid worktree"))?;
1704 let (worktree, load_task) = Worktree::remote(remote_id, replica_id, worktree, client, cx);
1705 self.add_worktree(&worktree, cx);
1706 load_task.detach();
1707 Ok(())
1708 }
1709
1710 fn handle_unregister_worktree(
1711 &mut self,
1712 envelope: TypedEnvelope<proto::UnregisterWorktree>,
1713 _: Arc<Client>,
1714 cx: &mut ModelContext<Self>,
1715 ) -> Result<()> {
1716 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1717 self.remove_worktree(worktree_id, cx);
1718 Ok(())
1719 }
1720
1721 fn handle_update_worktree(
1722 &mut self,
1723 envelope: TypedEnvelope<proto::UpdateWorktree>,
1724 _: Arc<Client>,
1725 cx: &mut ModelContext<Self>,
1726 ) -> Result<()> {
1727 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1728 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
1729 worktree.update(cx, |worktree, cx| {
1730 let worktree = worktree.as_remote_mut().unwrap();
1731 worktree.update_from_remote(envelope, cx)
1732 })?;
1733 }
1734 Ok(())
1735 }
1736
1737 fn handle_update_diagnostic_summary(
1738 &mut self,
1739 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
1740 _: Arc<Client>,
1741 cx: &mut ModelContext<Self>,
1742 ) -> Result<()> {
1743 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1744 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
1745 if let Some(summary) = envelope.payload.summary {
1746 let project_path = ProjectPath {
1747 worktree_id,
1748 path: Path::new(&summary.path).into(),
1749 };
1750 worktree.update(cx, |worktree, _| {
1751 worktree
1752 .as_remote_mut()
1753 .unwrap()
1754 .update_diagnostic_summary(project_path.path.clone(), &summary);
1755 });
1756 cx.emit(Event::DiagnosticsUpdated(project_path));
1757 }
1758 }
1759 Ok(())
1760 }
1761
1762 fn handle_disk_based_diagnostics_updating(
1763 &mut self,
1764 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
1765 _: Arc<Client>,
1766 cx: &mut ModelContext<Self>,
1767 ) -> Result<()> {
1768 self.disk_based_diagnostics_started(cx);
1769 Ok(())
1770 }
1771
1772 fn handle_disk_based_diagnostics_updated(
1773 &mut self,
1774 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
1775 _: Arc<Client>,
1776 cx: &mut ModelContext<Self>,
1777 ) -> Result<()> {
1778 self.disk_based_diagnostics_finished(cx);
1779 Ok(())
1780 }
1781
1782 pub fn handle_update_buffer(
1783 &mut self,
1784 envelope: TypedEnvelope<proto::UpdateBuffer>,
1785 _: Arc<Client>,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Result<()> {
1788 let payload = envelope.payload.clone();
1789 let buffer_id = payload.buffer_id as usize;
1790 let ops = payload
1791 .operations
1792 .into_iter()
1793 .map(|op| language::proto::deserialize_operation(op))
1794 .collect::<Result<Vec<_>, _>>()?;
1795 if let Some(buffer) = self.open_buffers.get_mut(&buffer_id) {
1796 if let Some(buffer) = buffer.upgrade(cx) {
1797 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
1798 }
1799 }
1800 Ok(())
1801 }
1802
1803 pub fn handle_update_buffer_file(
1804 &mut self,
1805 envelope: TypedEnvelope<proto::UpdateBufferFile>,
1806 _: Arc<Client>,
1807 cx: &mut ModelContext<Self>,
1808 ) -> Result<()> {
1809 let payload = envelope.payload.clone();
1810 let buffer_id = payload.buffer_id as usize;
1811 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
1812 let worktree = self
1813 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
1814 .ok_or_else(|| anyhow!("no such worktree"))?;
1815 let file = File::from_proto(file, worktree.clone(), cx)?;
1816 let buffer = self
1817 .open_buffers
1818 .get_mut(&buffer_id)
1819 .and_then(|b| b.upgrade(cx))
1820 .ok_or_else(|| anyhow!("no such buffer"))?;
1821 buffer.update(cx, |buffer, cx| {
1822 buffer.file_updated(Box::new(file), cx).detach();
1823 });
1824
1825 Ok(())
1826 }
1827
1828 pub fn handle_save_buffer(
1829 &mut self,
1830 envelope: TypedEnvelope<proto::SaveBuffer>,
1831 rpc: Arc<Client>,
1832 cx: &mut ModelContext<Self>,
1833 ) -> Result<()> {
1834 let sender_id = envelope.original_sender_id()?;
1835 let project_id = self.remote_id().ok_or_else(|| anyhow!("not connected"))?;
1836 let buffer = self
1837 .shared_buffers
1838 .get(&sender_id)
1839 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1840 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1841 let receipt = envelope.receipt();
1842 let buffer_id = envelope.payload.buffer_id;
1843 let save = cx.spawn(|_, mut cx| async move {
1844 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await
1845 });
1846
1847 cx.background()
1848 .spawn(
1849 async move {
1850 let (version, mtime) = save.await?;
1851
1852 rpc.respond(
1853 receipt,
1854 proto::BufferSaved {
1855 project_id,
1856 buffer_id,
1857 version: (&version).into(),
1858 mtime: Some(mtime.into()),
1859 },
1860 )?;
1861
1862 Ok(())
1863 }
1864 .log_err(),
1865 )
1866 .detach();
1867 Ok(())
1868 }
1869
1870 pub fn handle_format_buffer(
1871 &mut self,
1872 envelope: TypedEnvelope<proto::FormatBuffer>,
1873 rpc: Arc<Client>,
1874 cx: &mut ModelContext<Self>,
1875 ) -> Result<()> {
1876 let receipt = envelope.receipt();
1877 let sender_id = envelope.original_sender_id()?;
1878 let buffer = self
1879 .shared_buffers
1880 .get(&sender_id)
1881 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1882 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1883 cx.spawn(|_, mut cx| async move {
1884 let format = buffer.update(&mut cx, |buffer, cx| buffer.format(cx)).await;
1885 // We spawn here in order to enqueue the sending of `Ack` *after* transmission of edits
1886 // associated with formatting.
1887 cx.spawn(|_| async move {
1888 match format {
1889 Ok(()) => rpc.respond(receipt, proto::Ack {})?,
1890 Err(error) => rpc.respond_with_error(
1891 receipt,
1892 proto::Error {
1893 message: error.to_string(),
1894 },
1895 )?,
1896 }
1897 Ok::<_, anyhow::Error>(())
1898 })
1899 .await
1900 .log_err();
1901 })
1902 .detach();
1903 Ok(())
1904 }
1905
1906 fn handle_get_completions(
1907 &mut self,
1908 envelope: TypedEnvelope<proto::GetCompletions>,
1909 rpc: Arc<Client>,
1910 cx: &mut ModelContext<Self>,
1911 ) -> Result<()> {
1912 let receipt = envelope.receipt();
1913 let sender_id = envelope.original_sender_id()?;
1914 let buffer = self
1915 .shared_buffers
1916 .get(&sender_id)
1917 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1918 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1919 let position = envelope
1920 .payload
1921 .position
1922 .and_then(language::proto::deserialize_anchor)
1923 .ok_or_else(|| anyhow!("invalid position"))?;
1924 cx.spawn(|_, mut cx| async move {
1925 match buffer
1926 .update(&mut cx, |buffer, cx| buffer.completions(position, cx))
1927 .await
1928 {
1929 Ok(completions) => rpc.respond(
1930 receipt,
1931 proto::GetCompletionsResponse {
1932 completions: completions
1933 .iter()
1934 .map(language::proto::serialize_completion)
1935 .collect(),
1936 },
1937 ),
1938 Err(error) => rpc.respond_with_error(
1939 receipt,
1940 proto::Error {
1941 message: error.to_string(),
1942 },
1943 ),
1944 }
1945 })
1946 .detach_and_log_err(cx);
1947 Ok(())
1948 }
1949
1950 fn handle_apply_additional_edits_for_completion(
1951 &mut self,
1952 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
1953 rpc: Arc<Client>,
1954 cx: &mut ModelContext<Self>,
1955 ) -> Result<()> {
1956 let receipt = envelope.receipt();
1957 let sender_id = envelope.original_sender_id()?;
1958 let buffer = self
1959 .shared_buffers
1960 .get(&sender_id)
1961 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
1962 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
1963 let language = buffer.read(cx).language();
1964 let completion = language::proto::deserialize_completion(
1965 envelope
1966 .payload
1967 .completion
1968 .ok_or_else(|| anyhow!("invalid completion"))?,
1969 language,
1970 )?;
1971 cx.spawn(|_, mut cx| async move {
1972 match buffer
1973 .update(&mut cx, |buffer, cx| {
1974 buffer.apply_additional_edits_for_completion(completion, false, cx)
1975 })
1976 .await
1977 {
1978 Ok(edit_ids) => rpc.respond(
1979 receipt,
1980 proto::ApplyCompletionAdditionalEditsResponse {
1981 additional_edits: edit_ids
1982 .into_iter()
1983 .map(language::proto::serialize_edit_id)
1984 .collect(),
1985 },
1986 ),
1987 Err(error) => rpc.respond_with_error(
1988 receipt,
1989 proto::Error {
1990 message: error.to_string(),
1991 },
1992 ),
1993 }
1994 })
1995 .detach_and_log_err(cx);
1996 Ok(())
1997 }
1998
1999 fn handle_get_code_actions(
2000 &mut self,
2001 envelope: TypedEnvelope<proto::GetCodeActions>,
2002 rpc: Arc<Client>,
2003 cx: &mut ModelContext<Self>,
2004 ) -> Result<()> {
2005 let receipt = envelope.receipt();
2006 let sender_id = envelope.original_sender_id()?;
2007 let buffer = self
2008 .shared_buffers
2009 .get(&sender_id)
2010 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2011 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2012 let position = envelope
2013 .payload
2014 .position
2015 .and_then(language::proto::deserialize_anchor)
2016 .ok_or_else(|| anyhow!("invalid position"))?;
2017 cx.spawn(|_, mut cx| async move {
2018 match buffer
2019 .update(&mut cx, |buffer, cx| buffer.code_actions(position, cx))
2020 .await
2021 {
2022 Ok(completions) => rpc.respond(
2023 receipt,
2024 proto::GetCodeActionsResponse {
2025 actions: completions
2026 .iter()
2027 .map(language::proto::serialize_code_action)
2028 .collect(),
2029 },
2030 ),
2031 Err(error) => rpc.respond_with_error(
2032 receipt,
2033 proto::Error {
2034 message: error.to_string(),
2035 },
2036 ),
2037 }
2038 })
2039 .detach_and_log_err(cx);
2040 Ok(())
2041 }
2042
2043 fn handle_apply_code_action(
2044 &mut self,
2045 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2046 rpc: Arc<Client>,
2047 cx: &mut ModelContext<Self>,
2048 ) -> Result<()> {
2049 let receipt = envelope.receipt();
2050 let sender_id = envelope.original_sender_id()?;
2051 let buffer = self
2052 .shared_buffers
2053 .get(&sender_id)
2054 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2055 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2056 let action = language::proto::deserialize_code_action(
2057 envelope
2058 .payload
2059 .action
2060 .ok_or_else(|| anyhow!("invalid action"))?,
2061 )?;
2062 let apply_code_action = self.apply_code_action(buffer, action, false, cx);
2063 cx.spawn(|this, mut cx| async move {
2064 match apply_code_action.await {
2065 Ok(edited_buffers) => this.update(&mut cx, |this, cx| {
2066 let buffer_edits = edited_buffers
2067 .into_iter()
2068 .map(|(buffer, edits)| proto::CodeActionBufferEdits {
2069 buffer: Some(this.serialize_buffer_for_peer(&buffer, sender_id, cx)),
2070 edits: edits
2071 .into_iter()
2072 .map(|(range, edit_id)| {
2073 language::proto::serialize_code_action_edit(edit_id, &range)
2074 })
2075 .collect(),
2076 })
2077 .collect();
2078 rpc.respond(receipt, proto::ApplyCodeActionResponse { buffer_edits })
2079 }),
2080 Err(error) => rpc.respond_with_error(
2081 receipt,
2082 proto::Error {
2083 message: error.to_string(),
2084 },
2085 ),
2086 }
2087 })
2088 .detach_and_log_err(cx);
2089 Ok(())
2090 }
2091
2092 pub fn handle_get_definition(
2093 &mut self,
2094 envelope: TypedEnvelope<proto::GetDefinition>,
2095 rpc: Arc<Client>,
2096 cx: &mut ModelContext<Self>,
2097 ) -> Result<()> {
2098 let receipt = envelope.receipt();
2099 let sender_id = envelope.original_sender_id()?;
2100 let source_buffer = self
2101 .shared_buffers
2102 .get(&sender_id)
2103 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2104 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2105 let position = envelope
2106 .payload
2107 .position
2108 .and_then(deserialize_anchor)
2109 .ok_or_else(|| anyhow!("invalid position"))?;
2110 if !source_buffer.read(cx).can_resolve(&position) {
2111 return Err(anyhow!("cannot resolve position"));
2112 }
2113
2114 let definitions = self.definition(&source_buffer, position, cx);
2115 cx.spawn(|this, mut cx| async move {
2116 let definitions = definitions.await?;
2117 let mut response = proto::GetDefinitionResponse {
2118 definitions: Default::default(),
2119 };
2120 this.update(&mut cx, |this, cx| {
2121 for definition in definitions {
2122 let buffer =
2123 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2124 response.definitions.push(proto::Definition {
2125 target_start: Some(serialize_anchor(&definition.target_range.start)),
2126 target_end: Some(serialize_anchor(&definition.target_range.end)),
2127 buffer: Some(buffer),
2128 });
2129 }
2130 });
2131 rpc.respond(receipt, response)?;
2132 Ok::<_, anyhow::Error>(())
2133 })
2134 .detach_and_log_err(cx);
2135
2136 Ok(())
2137 }
2138
2139 pub fn handle_open_buffer(
2140 &mut self,
2141 envelope: TypedEnvelope<proto::OpenBuffer>,
2142 rpc: Arc<Client>,
2143 cx: &mut ModelContext<Self>,
2144 ) -> anyhow::Result<()> {
2145 let receipt = envelope.receipt();
2146 let peer_id = envelope.original_sender_id()?;
2147 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2148 let open_buffer = self.open_buffer(
2149 ProjectPath {
2150 worktree_id,
2151 path: PathBuf::from(envelope.payload.path).into(),
2152 },
2153 cx,
2154 );
2155 cx.spawn(|this, mut cx| {
2156 async move {
2157 let buffer = open_buffer.await?;
2158 let buffer = this.update(&mut cx, |this, cx| {
2159 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
2160 });
2161 rpc.respond(
2162 receipt,
2163 proto::OpenBufferResponse {
2164 buffer: Some(buffer),
2165 },
2166 )
2167 }
2168 .log_err()
2169 })
2170 .detach();
2171 Ok(())
2172 }
2173
2174 fn serialize_buffer_for_peer(
2175 &mut self,
2176 buffer: &ModelHandle<Buffer>,
2177 peer_id: PeerId,
2178 cx: &AppContext,
2179 ) -> proto::Buffer {
2180 let buffer_id = buffer.read(cx).remote_id();
2181 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2182 match shared_buffers.entry(buffer_id) {
2183 hash_map::Entry::Occupied(_) => proto::Buffer {
2184 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2185 },
2186 hash_map::Entry::Vacant(entry) => {
2187 entry.insert(buffer.clone());
2188 proto::Buffer {
2189 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2190 }
2191 }
2192 }
2193 }
2194
2195 fn deserialize_remote_buffer(
2196 &mut self,
2197 buffer: proto::Buffer,
2198 cx: &mut ModelContext<Self>,
2199 ) -> Result<ModelHandle<Buffer>> {
2200 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2201 proto::buffer::Variant::Id(id) => self
2202 .open_buffers
2203 .get(&(id as usize))
2204 .and_then(|buffer| buffer.upgrade(cx))
2205 .ok_or_else(|| anyhow!("no buffer exists for id {}", id)),
2206 proto::buffer::Variant::State(mut buffer) => {
2207 let mut buffer_worktree = None;
2208 let mut buffer_file = None;
2209 if let Some(file) = buffer.file.take() {
2210 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2211 let worktree = self
2212 .worktree_for_id(worktree_id, cx)
2213 .ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?;
2214 buffer_file = Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2215 as Box<dyn language::File>);
2216 buffer_worktree = Some(worktree);
2217 }
2218
2219 let buffer = cx.add_model(|cx| {
2220 Buffer::from_proto(self.replica_id(), buffer, buffer_file, cx).unwrap()
2221 });
2222 self.register_buffer(&buffer, buffer_worktree.as_ref(), cx)?;
2223 Ok(buffer)
2224 }
2225 }
2226 }
2227
2228 pub fn handle_close_buffer(
2229 &mut self,
2230 envelope: TypedEnvelope<proto::CloseBuffer>,
2231 _: Arc<Client>,
2232 cx: &mut ModelContext<Self>,
2233 ) -> anyhow::Result<()> {
2234 if let Some(shared_buffers) = self.shared_buffers.get_mut(&envelope.original_sender_id()?) {
2235 shared_buffers.remove(&envelope.payload.buffer_id);
2236 cx.notify();
2237 }
2238 Ok(())
2239 }
2240
2241 pub fn handle_buffer_saved(
2242 &mut self,
2243 envelope: TypedEnvelope<proto::BufferSaved>,
2244 _: Arc<Client>,
2245 cx: &mut ModelContext<Self>,
2246 ) -> Result<()> {
2247 let payload = envelope.payload.clone();
2248 let buffer = self
2249 .open_buffers
2250 .get(&(payload.buffer_id as usize))
2251 .and_then(|buffer| buffer.upgrade(cx));
2252 if let Some(buffer) = buffer {
2253 buffer.update(cx, |buffer, cx| {
2254 let version = payload.version.try_into()?;
2255 let mtime = payload
2256 .mtime
2257 .ok_or_else(|| anyhow!("missing mtime"))?
2258 .into();
2259 buffer.did_save(version, mtime, None, cx);
2260 Result::<_, anyhow::Error>::Ok(())
2261 })?;
2262 }
2263 Ok(())
2264 }
2265
2266 pub fn handle_buffer_reloaded(
2267 &mut self,
2268 envelope: TypedEnvelope<proto::BufferReloaded>,
2269 _: Arc<Client>,
2270 cx: &mut ModelContext<Self>,
2271 ) -> Result<()> {
2272 let payload = envelope.payload.clone();
2273 let buffer = self
2274 .open_buffers
2275 .get(&(payload.buffer_id as usize))
2276 .and_then(|buffer| buffer.upgrade(cx));
2277 if let Some(buffer) = buffer {
2278 buffer.update(cx, |buffer, cx| {
2279 let version = payload.version.try_into()?;
2280 let mtime = payload
2281 .mtime
2282 .ok_or_else(|| anyhow!("missing mtime"))?
2283 .into();
2284 buffer.did_reload(version, mtime, cx);
2285 Result::<_, anyhow::Error>::Ok(())
2286 })?;
2287 }
2288 Ok(())
2289 }
2290
2291 pub fn match_paths<'a>(
2292 &self,
2293 query: &'a str,
2294 include_ignored: bool,
2295 smart_case: bool,
2296 max_results: usize,
2297 cancel_flag: &'a AtomicBool,
2298 cx: &AppContext,
2299 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2300 let worktrees = self
2301 .worktrees(cx)
2302 .filter(|worktree| !worktree.read(cx).is_weak())
2303 .collect::<Vec<_>>();
2304 let include_root_name = worktrees.len() > 1;
2305 let candidate_sets = worktrees
2306 .into_iter()
2307 .map(|worktree| CandidateSet {
2308 snapshot: worktree.read(cx).snapshot(),
2309 include_ignored,
2310 include_root_name,
2311 })
2312 .collect::<Vec<_>>();
2313
2314 let background = cx.background().clone();
2315 async move {
2316 fuzzy::match_paths(
2317 candidate_sets.as_slice(),
2318 query,
2319 smart_case,
2320 max_results,
2321 cancel_flag,
2322 background,
2323 )
2324 .await
2325 }
2326 }
2327}
2328
2329impl WorktreeHandle {
2330 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2331 match self {
2332 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2333 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2334 }
2335 }
2336}
2337
2338struct CandidateSet {
2339 snapshot: Snapshot,
2340 include_ignored: bool,
2341 include_root_name: bool,
2342}
2343
2344impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2345 type Candidates = CandidateSetIter<'a>;
2346
2347 fn id(&self) -> usize {
2348 self.snapshot.id().to_usize()
2349 }
2350
2351 fn len(&self) -> usize {
2352 if self.include_ignored {
2353 self.snapshot.file_count()
2354 } else {
2355 self.snapshot.visible_file_count()
2356 }
2357 }
2358
2359 fn prefix(&self) -> Arc<str> {
2360 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2361 self.snapshot.root_name().into()
2362 } else if self.include_root_name {
2363 format!("{}/", self.snapshot.root_name()).into()
2364 } else {
2365 "".into()
2366 }
2367 }
2368
2369 fn candidates(&'a self, start: usize) -> Self::Candidates {
2370 CandidateSetIter {
2371 traversal: self.snapshot.files(self.include_ignored, start),
2372 }
2373 }
2374}
2375
2376struct CandidateSetIter<'a> {
2377 traversal: Traversal<'a>,
2378}
2379
2380impl<'a> Iterator for CandidateSetIter<'a> {
2381 type Item = PathMatchCandidate<'a>;
2382
2383 fn next(&mut self) -> Option<Self::Item> {
2384 self.traversal.next().map(|entry| {
2385 if let EntryKind::File(char_bag) = entry.kind {
2386 PathMatchCandidate {
2387 path: &entry.path,
2388 char_bag,
2389 }
2390 } else {
2391 unreachable!()
2392 }
2393 })
2394 }
2395}
2396
2397impl Entity for Project {
2398 type Event = Event;
2399
2400 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2401 match &self.client_state {
2402 ProjectClientState::Local { remote_id_rx, .. } => {
2403 if let Some(project_id) = *remote_id_rx.borrow() {
2404 self.client
2405 .send(proto::UnregisterProject { project_id })
2406 .log_err();
2407 }
2408 }
2409 ProjectClientState::Remote { remote_id, .. } => {
2410 self.client
2411 .send(proto::LeaveProject {
2412 project_id: *remote_id,
2413 })
2414 .log_err();
2415 }
2416 }
2417 }
2418
2419 fn app_will_quit(
2420 &mut self,
2421 _: &mut MutableAppContext,
2422 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2423 use futures::FutureExt;
2424
2425 let shutdown_futures = self
2426 .language_servers
2427 .drain()
2428 .filter_map(|(_, server)| server.shutdown())
2429 .collect::<Vec<_>>();
2430 Some(
2431 async move {
2432 futures::future::join_all(shutdown_futures).await;
2433 }
2434 .boxed(),
2435 )
2436 }
2437}
2438
2439impl Collaborator {
2440 fn from_proto(
2441 message: proto::Collaborator,
2442 user_store: &ModelHandle<UserStore>,
2443 cx: &mut AsyncAppContext,
2444 ) -> impl Future<Output = Result<Self>> {
2445 let user = user_store.update(cx, |user_store, cx| {
2446 user_store.fetch_user(message.user_id, cx)
2447 });
2448
2449 async move {
2450 Ok(Self {
2451 peer_id: PeerId(message.peer_id),
2452 user: user.await?,
2453 replica_id: message.replica_id as ReplicaId,
2454 })
2455 }
2456 }
2457}
2458
2459impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2460 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2461 Self {
2462 worktree_id,
2463 path: path.as_ref().into(),
2464 }
2465 }
2466}
2467
2468impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2469 fn from(options: lsp::CreateFileOptions) -> Self {
2470 Self {
2471 overwrite: options.overwrite.unwrap_or(false),
2472 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2473 }
2474 }
2475}
2476
2477impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2478 fn from(options: lsp::RenameFileOptions) -> Self {
2479 Self {
2480 overwrite: options.overwrite.unwrap_or(false),
2481 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2482 }
2483 }
2484}
2485
2486impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2487 fn from(options: lsp::DeleteFileOptions) -> Self {
2488 Self {
2489 recursive: options.recursive.unwrap_or(false),
2490 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2491 }
2492 }
2493}
2494
2495#[cfg(test)]
2496mod tests {
2497 use super::{Event, *};
2498 use client::test::FakeHttpClient;
2499 use fs::RealFs;
2500 use futures::StreamExt;
2501 use gpui::{test::subscribe, TestAppContext};
2502 use language::{
2503 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2504 LanguageServerConfig, Point,
2505 };
2506 use lsp::Url;
2507 use serde_json::json;
2508 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2509 use unindent::Unindent as _;
2510 use util::test::temp_tree;
2511 use worktree::WorktreeHandle as _;
2512
2513 #[gpui::test]
2514 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2515 let dir = temp_tree(json!({
2516 "root": {
2517 "apple": "",
2518 "banana": {
2519 "carrot": {
2520 "date": "",
2521 "endive": "",
2522 }
2523 },
2524 "fennel": {
2525 "grape": "",
2526 }
2527 }
2528 }));
2529
2530 let root_link_path = dir.path().join("root_link");
2531 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2532 unix::fs::symlink(
2533 &dir.path().join("root/fennel"),
2534 &dir.path().join("root/finnochio"),
2535 )
2536 .unwrap();
2537
2538 let project = build_project(Arc::new(RealFs), &mut cx);
2539
2540 let (tree, _) = project
2541 .update(&mut cx, |project, cx| {
2542 project.find_or_create_local_worktree(&root_link_path, false, cx)
2543 })
2544 .await
2545 .unwrap();
2546
2547 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2548 .await;
2549 cx.read(|cx| {
2550 let tree = tree.read(cx);
2551 assert_eq!(tree.file_count(), 5);
2552 assert_eq!(
2553 tree.inode_for_path("fennel/grape"),
2554 tree.inode_for_path("finnochio/grape")
2555 );
2556 });
2557
2558 let cancel_flag = Default::default();
2559 let results = project
2560 .read_with(&cx, |project, cx| {
2561 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2562 })
2563 .await;
2564 assert_eq!(
2565 results
2566 .into_iter()
2567 .map(|result| result.path)
2568 .collect::<Vec<Arc<Path>>>(),
2569 vec![
2570 PathBuf::from("banana/carrot/date").into(),
2571 PathBuf::from("banana/carrot/endive").into(),
2572 ]
2573 );
2574 }
2575
2576 #[gpui::test]
2577 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
2578 let (language_server_config, mut fake_server) =
2579 LanguageServerConfig::fake(cx.background()).await;
2580 let progress_token = language_server_config
2581 .disk_based_diagnostics_progress_token
2582 .clone()
2583 .unwrap();
2584
2585 let mut languages = LanguageRegistry::new();
2586 languages.add(Arc::new(Language::new(
2587 LanguageConfig {
2588 name: "Rust".to_string(),
2589 path_suffixes: vec!["rs".to_string()],
2590 language_server: Some(language_server_config),
2591 ..Default::default()
2592 },
2593 Some(tree_sitter_rust::language()),
2594 )));
2595
2596 let dir = temp_tree(json!({
2597 "a.rs": "fn a() { A }",
2598 "b.rs": "const y: i32 = 1",
2599 }));
2600
2601 let http_client = FakeHttpClient::with_404_response();
2602 let client = Client::new(http_client.clone());
2603 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2604
2605 let project = cx.update(|cx| {
2606 Project::local(
2607 client,
2608 user_store,
2609 Arc::new(languages),
2610 Arc::new(RealFs),
2611 cx,
2612 )
2613 });
2614
2615 let (tree, _) = project
2616 .update(&mut cx, |project, cx| {
2617 project.find_or_create_local_worktree(dir.path(), false, cx)
2618 })
2619 .await
2620 .unwrap();
2621 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2622
2623 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2624 .await;
2625
2626 // Cause worktree to start the fake language server
2627 let _buffer = project
2628 .update(&mut cx, |project, cx| {
2629 project.open_buffer(
2630 ProjectPath {
2631 worktree_id,
2632 path: Path::new("b.rs").into(),
2633 },
2634 cx,
2635 )
2636 })
2637 .await
2638 .unwrap();
2639
2640 let mut events = subscribe(&project, &mut cx);
2641
2642 fake_server.start_progress(&progress_token).await;
2643 assert_eq!(
2644 events.next().await.unwrap(),
2645 Event::DiskBasedDiagnosticsStarted
2646 );
2647
2648 fake_server.start_progress(&progress_token).await;
2649 fake_server.end_progress(&progress_token).await;
2650 fake_server.start_progress(&progress_token).await;
2651
2652 fake_server
2653 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2654 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
2655 version: None,
2656 diagnostics: vec![lsp::Diagnostic {
2657 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2658 severity: Some(lsp::DiagnosticSeverity::ERROR),
2659 message: "undefined variable 'A'".to_string(),
2660 ..Default::default()
2661 }],
2662 })
2663 .await;
2664 assert_eq!(
2665 events.next().await.unwrap(),
2666 Event::DiagnosticsUpdated(ProjectPath {
2667 worktree_id,
2668 path: Arc::from(Path::new("a.rs"))
2669 })
2670 );
2671
2672 fake_server.end_progress(&progress_token).await;
2673 fake_server.end_progress(&progress_token).await;
2674 assert_eq!(
2675 events.next().await.unwrap(),
2676 Event::DiskBasedDiagnosticsUpdated
2677 );
2678 assert_eq!(
2679 events.next().await.unwrap(),
2680 Event::DiskBasedDiagnosticsFinished
2681 );
2682
2683 let buffer = project
2684 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
2685 .await
2686 .unwrap();
2687
2688 buffer.read_with(&cx, |buffer, _| {
2689 let snapshot = buffer.snapshot();
2690 let diagnostics = snapshot
2691 .diagnostics_in_range::<_, Point>(0..buffer.len())
2692 .collect::<Vec<_>>();
2693 assert_eq!(
2694 diagnostics,
2695 &[DiagnosticEntry {
2696 range: Point::new(0, 9)..Point::new(0, 10),
2697 diagnostic: Diagnostic {
2698 severity: lsp::DiagnosticSeverity::ERROR,
2699 message: "undefined variable 'A'".to_string(),
2700 group_id: 0,
2701 is_primary: true,
2702 ..Default::default()
2703 }
2704 }]
2705 )
2706 });
2707 }
2708
2709 #[gpui::test]
2710 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
2711 let dir = temp_tree(json!({
2712 "root": {
2713 "dir1": {},
2714 "dir2": {
2715 "dir3": {}
2716 }
2717 }
2718 }));
2719
2720 let project = build_project(Arc::new(RealFs), &mut cx);
2721 let (tree, _) = project
2722 .update(&mut cx, |project, cx| {
2723 project.find_or_create_local_worktree(&dir.path(), false, cx)
2724 })
2725 .await
2726 .unwrap();
2727
2728 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2729 .await;
2730
2731 let cancel_flag = Default::default();
2732 let results = project
2733 .read_with(&cx, |project, cx| {
2734 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
2735 })
2736 .await;
2737
2738 assert!(results.is_empty());
2739 }
2740
2741 #[gpui::test]
2742 async fn test_definition(mut cx: gpui::TestAppContext) {
2743 let (language_server_config, mut fake_server) =
2744 LanguageServerConfig::fake(cx.background()).await;
2745
2746 let mut languages = LanguageRegistry::new();
2747 languages.add(Arc::new(Language::new(
2748 LanguageConfig {
2749 name: "Rust".to_string(),
2750 path_suffixes: vec!["rs".to_string()],
2751 language_server: Some(language_server_config),
2752 ..Default::default()
2753 },
2754 Some(tree_sitter_rust::language()),
2755 )));
2756
2757 let dir = temp_tree(json!({
2758 "a.rs": "const fn a() { A }",
2759 "b.rs": "const y: i32 = crate::a()",
2760 }));
2761
2762 let http_client = FakeHttpClient::with_404_response();
2763 let client = Client::new(http_client.clone());
2764 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2765 let project = cx.update(|cx| {
2766 Project::local(
2767 client,
2768 user_store,
2769 Arc::new(languages),
2770 Arc::new(RealFs),
2771 cx,
2772 )
2773 });
2774
2775 let (tree, _) = project
2776 .update(&mut cx, |project, cx| {
2777 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
2778 })
2779 .await
2780 .unwrap();
2781 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2782 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2783 .await;
2784
2785 // Cause worktree to start the fake language server
2786 let buffer = project
2787 .update(&mut cx, |project, cx| {
2788 project.open_buffer(
2789 ProjectPath {
2790 worktree_id,
2791 path: Path::new("").into(),
2792 },
2793 cx,
2794 )
2795 })
2796 .await
2797 .unwrap();
2798 let definitions =
2799 project.update(&mut cx, |project, cx| project.definition(&buffer, 22, cx));
2800 let (request_id, request) = fake_server
2801 .receive_request::<lsp::request::GotoDefinition>()
2802 .await;
2803 let request_params = request.text_document_position_params;
2804 assert_eq!(
2805 request_params.text_document.uri.to_file_path().unwrap(),
2806 dir.path().join("b.rs")
2807 );
2808 assert_eq!(request_params.position, lsp::Position::new(0, 22));
2809
2810 fake_server
2811 .respond(
2812 request_id,
2813 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
2814 lsp::Url::from_file_path(dir.path().join("a.rs")).unwrap(),
2815 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2816 ))),
2817 )
2818 .await;
2819 let mut definitions = definitions.await.unwrap();
2820 assert_eq!(definitions.len(), 1);
2821 let definition = definitions.pop().unwrap();
2822 cx.update(|cx| {
2823 let target_buffer = definition.target_buffer.read(cx);
2824 assert_eq!(
2825 target_buffer
2826 .file()
2827 .unwrap()
2828 .as_local()
2829 .unwrap()
2830 .abs_path(cx),
2831 dir.path().join("a.rs")
2832 );
2833 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
2834 assert_eq!(
2835 list_worktrees(&project, cx),
2836 [
2837 (dir.path().join("b.rs"), false),
2838 (dir.path().join("a.rs"), true)
2839 ]
2840 );
2841
2842 drop(definition);
2843 });
2844 cx.read(|cx| {
2845 assert_eq!(
2846 list_worktrees(&project, cx),
2847 [(dir.path().join("b.rs"), false)]
2848 );
2849 });
2850
2851 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
2852 project
2853 .read(cx)
2854 .worktrees(cx)
2855 .map(|worktree| {
2856 let worktree = worktree.read(cx);
2857 (
2858 worktree.as_local().unwrap().abs_path().to_path_buf(),
2859 worktree.is_weak(),
2860 )
2861 })
2862 .collect::<Vec<_>>()
2863 }
2864 }
2865
2866 #[gpui::test]
2867 async fn test_save_file(mut cx: gpui::TestAppContext) {
2868 let fs = Arc::new(FakeFs::new(cx.background()));
2869 fs.insert_tree(
2870 "/dir",
2871 json!({
2872 "file1": "the old contents",
2873 }),
2874 )
2875 .await;
2876
2877 let project = build_project(fs.clone(), &mut cx);
2878 let worktree_id = project
2879 .update(&mut cx, |p, cx| {
2880 p.find_or_create_local_worktree("/dir", false, cx)
2881 })
2882 .await
2883 .unwrap()
2884 .0
2885 .read_with(&cx, |tree, _| tree.id());
2886
2887 let buffer = project
2888 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
2889 .await
2890 .unwrap();
2891 buffer
2892 .update(&mut cx, |buffer, cx| {
2893 assert_eq!(buffer.text(), "the old contents");
2894 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
2895 buffer.save(cx)
2896 })
2897 .await
2898 .unwrap();
2899
2900 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2901 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
2902 }
2903
2904 #[gpui::test]
2905 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
2906 let fs = Arc::new(FakeFs::new(cx.background()));
2907 fs.insert_tree(
2908 "/dir",
2909 json!({
2910 "file1": "the old contents",
2911 }),
2912 )
2913 .await;
2914
2915 let project = build_project(fs.clone(), &mut cx);
2916 let worktree_id = project
2917 .update(&mut cx, |p, cx| {
2918 p.find_or_create_local_worktree("/dir/file1", false, cx)
2919 })
2920 .await
2921 .unwrap()
2922 .0
2923 .read_with(&cx, |tree, _| tree.id());
2924
2925 let buffer = project
2926 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
2927 .await
2928 .unwrap();
2929 buffer
2930 .update(&mut cx, |buffer, cx| {
2931 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
2932 buffer.save(cx)
2933 })
2934 .await
2935 .unwrap();
2936
2937 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2938 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
2939 }
2940
2941 #[gpui::test(retries = 5)]
2942 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
2943 let dir = temp_tree(json!({
2944 "a": {
2945 "file1": "",
2946 "file2": "",
2947 "file3": "",
2948 },
2949 "b": {
2950 "c": {
2951 "file4": "",
2952 "file5": "",
2953 }
2954 }
2955 }));
2956
2957 let project = build_project(Arc::new(RealFs), &mut cx);
2958 let rpc = project.read_with(&cx, |p, _| p.client.clone());
2959
2960 let (tree, _) = project
2961 .update(&mut cx, |p, cx| {
2962 p.find_or_create_local_worktree(dir.path(), false, cx)
2963 })
2964 .await
2965 .unwrap();
2966 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2967
2968 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2969 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
2970 async move { buffer.await.unwrap() }
2971 };
2972 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2973 tree.read_with(cx, |tree, _| {
2974 tree.entry_for_path(path)
2975 .expect(&format!("no entry for path {}", path))
2976 .id
2977 })
2978 };
2979
2980 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
2981 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
2982 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
2983 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
2984
2985 let file2_id = id_for_path("a/file2", &cx);
2986 let file3_id = id_for_path("a/file3", &cx);
2987 let file4_id = id_for_path("b/c/file4", &cx);
2988
2989 // Wait for the initial scan.
2990 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2991 .await;
2992
2993 // Create a remote copy of this worktree.
2994 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot());
2995 let (remote, load_task) = cx.update(|cx| {
2996 Worktree::remote(
2997 1,
2998 1,
2999 initial_snapshot.to_proto(&Default::default(), Default::default()),
3000 rpc.clone(),
3001 cx,
3002 )
3003 });
3004 load_task.await;
3005
3006 cx.read(|cx| {
3007 assert!(!buffer2.read(cx).is_dirty());
3008 assert!(!buffer3.read(cx).is_dirty());
3009 assert!(!buffer4.read(cx).is_dirty());
3010 assert!(!buffer5.read(cx).is_dirty());
3011 });
3012
3013 // Rename and delete files and directories.
3014 tree.flush_fs_events(&cx).await;
3015 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3016 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3017 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3018 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3019 tree.flush_fs_events(&cx).await;
3020
3021 let expected_paths = vec![
3022 "a",
3023 "a/file1",
3024 "a/file2.new",
3025 "b",
3026 "d",
3027 "d/file3",
3028 "d/file4",
3029 ];
3030
3031 cx.read(|app| {
3032 assert_eq!(
3033 tree.read(app)
3034 .paths()
3035 .map(|p| p.to_str().unwrap())
3036 .collect::<Vec<_>>(),
3037 expected_paths
3038 );
3039
3040 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3041 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3042 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3043
3044 assert_eq!(
3045 buffer2.read(app).file().unwrap().path().as_ref(),
3046 Path::new("a/file2.new")
3047 );
3048 assert_eq!(
3049 buffer3.read(app).file().unwrap().path().as_ref(),
3050 Path::new("d/file3")
3051 );
3052 assert_eq!(
3053 buffer4.read(app).file().unwrap().path().as_ref(),
3054 Path::new("d/file4")
3055 );
3056 assert_eq!(
3057 buffer5.read(app).file().unwrap().path().as_ref(),
3058 Path::new("b/c/file5")
3059 );
3060
3061 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3062 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3063 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3064 assert!(buffer5.read(app).file().unwrap().is_deleted());
3065 });
3066
3067 // Update the remote worktree. Check that it becomes consistent with the
3068 // local worktree.
3069 remote.update(&mut cx, |remote, cx| {
3070 let update_message =
3071 tree.read(cx)
3072 .snapshot()
3073 .build_update(&initial_snapshot, 1, 1, true);
3074 remote
3075 .as_remote_mut()
3076 .unwrap()
3077 .snapshot
3078 .apply_remote_update(update_message)
3079 .unwrap();
3080
3081 assert_eq!(
3082 remote
3083 .paths()
3084 .map(|p| p.to_str().unwrap())
3085 .collect::<Vec<_>>(),
3086 expected_paths
3087 );
3088 });
3089 }
3090
3091 #[gpui::test]
3092 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3093 let fs = Arc::new(FakeFs::new(cx.background()));
3094 fs.insert_tree(
3095 "/the-dir",
3096 json!({
3097 "a.txt": "a-contents",
3098 "b.txt": "b-contents",
3099 }),
3100 )
3101 .await;
3102
3103 let project = build_project(fs.clone(), &mut cx);
3104 let worktree_id = project
3105 .update(&mut cx, |p, cx| {
3106 p.find_or_create_local_worktree("/the-dir", false, cx)
3107 })
3108 .await
3109 .unwrap()
3110 .0
3111 .read_with(&cx, |tree, _| tree.id());
3112
3113 // Spawn multiple tasks to open paths, repeating some paths.
3114 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3115 (
3116 p.open_buffer((worktree_id, "a.txt"), cx),
3117 p.open_buffer((worktree_id, "b.txt"), cx),
3118 p.open_buffer((worktree_id, "a.txt"), cx),
3119 )
3120 });
3121
3122 let buffer_a_1 = buffer_a_1.await.unwrap();
3123 let buffer_a_2 = buffer_a_2.await.unwrap();
3124 let buffer_b = buffer_b.await.unwrap();
3125 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3126 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3127
3128 // There is only one buffer per path.
3129 let buffer_a_id = buffer_a_1.id();
3130 assert_eq!(buffer_a_2.id(), buffer_a_id);
3131
3132 // Open the same path again while it is still open.
3133 drop(buffer_a_1);
3134 let buffer_a_3 = project
3135 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3136 .await
3137 .unwrap();
3138
3139 // There's still only one buffer per path.
3140 assert_eq!(buffer_a_3.id(), buffer_a_id);
3141 }
3142
3143 #[gpui::test]
3144 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3145 use std::fs;
3146
3147 let dir = temp_tree(json!({
3148 "file1": "abc",
3149 "file2": "def",
3150 "file3": "ghi",
3151 }));
3152
3153 let project = build_project(Arc::new(RealFs), &mut cx);
3154 let (worktree, _) = project
3155 .update(&mut cx, |p, cx| {
3156 p.find_or_create_local_worktree(dir.path(), false, cx)
3157 })
3158 .await
3159 .unwrap();
3160 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3161
3162 worktree.flush_fs_events(&cx).await;
3163 worktree
3164 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3165 .await;
3166
3167 let buffer1 = project
3168 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3169 .await
3170 .unwrap();
3171 let events = Rc::new(RefCell::new(Vec::new()));
3172
3173 // initially, the buffer isn't dirty.
3174 buffer1.update(&mut cx, |buffer, cx| {
3175 cx.subscribe(&buffer1, {
3176 let events = events.clone();
3177 move |_, _, event, _| events.borrow_mut().push(event.clone())
3178 })
3179 .detach();
3180
3181 assert!(!buffer.is_dirty());
3182 assert!(events.borrow().is_empty());
3183
3184 buffer.edit(vec![1..2], "", cx);
3185 });
3186
3187 // after the first edit, the buffer is dirty, and emits a dirtied event.
3188 buffer1.update(&mut cx, |buffer, cx| {
3189 assert!(buffer.text() == "ac");
3190 assert!(buffer.is_dirty());
3191 assert_eq!(
3192 *events.borrow(),
3193 &[language::Event::Edited, language::Event::Dirtied]
3194 );
3195 events.borrow_mut().clear();
3196 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3197 });
3198
3199 // after saving, the buffer is not dirty, and emits a saved event.
3200 buffer1.update(&mut cx, |buffer, cx| {
3201 assert!(!buffer.is_dirty());
3202 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3203 events.borrow_mut().clear();
3204
3205 buffer.edit(vec![1..1], "B", cx);
3206 buffer.edit(vec![2..2], "D", cx);
3207 });
3208
3209 // after editing again, the buffer is dirty, and emits another dirty event.
3210 buffer1.update(&mut cx, |buffer, cx| {
3211 assert!(buffer.text() == "aBDc");
3212 assert!(buffer.is_dirty());
3213 assert_eq!(
3214 *events.borrow(),
3215 &[
3216 language::Event::Edited,
3217 language::Event::Dirtied,
3218 language::Event::Edited,
3219 ],
3220 );
3221 events.borrow_mut().clear();
3222
3223 // TODO - currently, after restoring the buffer to its
3224 // previously-saved state, the is still considered dirty.
3225 buffer.edit([1..3], "", cx);
3226 assert!(buffer.text() == "ac");
3227 assert!(buffer.is_dirty());
3228 });
3229
3230 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3231
3232 // When a file is deleted, the buffer is considered dirty.
3233 let events = Rc::new(RefCell::new(Vec::new()));
3234 let buffer2 = project
3235 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3236 .await
3237 .unwrap();
3238 buffer2.update(&mut cx, |_, cx| {
3239 cx.subscribe(&buffer2, {
3240 let events = events.clone();
3241 move |_, _, event, _| events.borrow_mut().push(event.clone())
3242 })
3243 .detach();
3244 });
3245
3246 fs::remove_file(dir.path().join("file2")).unwrap();
3247 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3248 assert_eq!(
3249 *events.borrow(),
3250 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3251 );
3252
3253 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3254 let events = Rc::new(RefCell::new(Vec::new()));
3255 let buffer3 = project
3256 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3257 .await
3258 .unwrap();
3259 buffer3.update(&mut cx, |_, cx| {
3260 cx.subscribe(&buffer3, {
3261 let events = events.clone();
3262 move |_, _, event, _| events.borrow_mut().push(event.clone())
3263 })
3264 .detach();
3265 });
3266
3267 worktree.flush_fs_events(&cx).await;
3268 buffer3.update(&mut cx, |buffer, cx| {
3269 buffer.edit(Some(0..0), "x", cx);
3270 });
3271 events.borrow_mut().clear();
3272 fs::remove_file(dir.path().join("file3")).unwrap();
3273 buffer3
3274 .condition(&cx, |_, _| !events.borrow().is_empty())
3275 .await;
3276 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3277 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3278 }
3279
3280 #[gpui::test]
3281 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3282 use std::fs;
3283
3284 let initial_contents = "aaa\nbbbbb\nc\n";
3285 let dir = temp_tree(json!({ "the-file": initial_contents }));
3286
3287 let project = build_project(Arc::new(RealFs), &mut cx);
3288 let (worktree, _) = project
3289 .update(&mut cx, |p, cx| {
3290 p.find_or_create_local_worktree(dir.path(), false, cx)
3291 })
3292 .await
3293 .unwrap();
3294 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3295
3296 worktree
3297 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3298 .await;
3299
3300 let abs_path = dir.path().join("the-file");
3301 let buffer = project
3302 .update(&mut cx, |p, cx| {
3303 p.open_buffer((worktree_id, "the-file"), cx)
3304 })
3305 .await
3306 .unwrap();
3307
3308 // TODO
3309 // Add a cursor on each row.
3310 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3311 // assert!(!buffer.is_dirty());
3312 // buffer.add_selection_set(
3313 // &(0..3)
3314 // .map(|row| Selection {
3315 // id: row as usize,
3316 // start: Point::new(row, 1),
3317 // end: Point::new(row, 1),
3318 // reversed: false,
3319 // goal: SelectionGoal::None,
3320 // })
3321 // .collect::<Vec<_>>(),
3322 // cx,
3323 // )
3324 // });
3325
3326 // Change the file on disk, adding two new lines of text, and removing
3327 // one line.
3328 buffer.read_with(&cx, |buffer, _| {
3329 assert!(!buffer.is_dirty());
3330 assert!(!buffer.has_conflict());
3331 });
3332 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3333 fs::write(&abs_path, new_contents).unwrap();
3334
3335 // Because the buffer was not modified, it is reloaded from disk. Its
3336 // contents are edited according to the diff between the old and new
3337 // file contents.
3338 buffer
3339 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3340 .await;
3341
3342 buffer.update(&mut cx, |buffer, _| {
3343 assert_eq!(buffer.text(), new_contents);
3344 assert!(!buffer.is_dirty());
3345 assert!(!buffer.has_conflict());
3346
3347 // TODO
3348 // let cursor_positions = buffer
3349 // .selection_set(selection_set_id)
3350 // .unwrap()
3351 // .selections::<Point>(&*buffer)
3352 // .map(|selection| {
3353 // assert_eq!(selection.start, selection.end);
3354 // selection.start
3355 // })
3356 // .collect::<Vec<_>>();
3357 // assert_eq!(
3358 // cursor_positions,
3359 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3360 // );
3361 });
3362
3363 // Modify the buffer
3364 buffer.update(&mut cx, |buffer, cx| {
3365 buffer.edit(vec![0..0], " ", cx);
3366 assert!(buffer.is_dirty());
3367 assert!(!buffer.has_conflict());
3368 });
3369
3370 // Change the file on disk again, adding blank lines to the beginning.
3371 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3372
3373 // Because the buffer is modified, it doesn't reload from disk, but is
3374 // marked as having a conflict.
3375 buffer
3376 .condition(&cx, |buffer, _| buffer.has_conflict())
3377 .await;
3378 }
3379
3380 #[gpui::test]
3381 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3382 let fs = Arc::new(FakeFs::new(cx.background()));
3383 fs.insert_tree(
3384 "/the-dir",
3385 json!({
3386 "a.rs": "
3387 fn foo(mut v: Vec<usize>) {
3388 for x in &v {
3389 v.push(1);
3390 }
3391 }
3392 "
3393 .unindent(),
3394 }),
3395 )
3396 .await;
3397
3398 let project = build_project(fs.clone(), &mut cx);
3399 let (worktree, _) = project
3400 .update(&mut cx, |p, cx| {
3401 p.find_or_create_local_worktree("/the-dir", false, cx)
3402 })
3403 .await
3404 .unwrap();
3405 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3406
3407 let buffer = project
3408 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3409 .await
3410 .unwrap();
3411
3412 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3413 let message = lsp::PublishDiagnosticsParams {
3414 uri: buffer_uri.clone(),
3415 diagnostics: vec![
3416 lsp::Diagnostic {
3417 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3418 severity: Some(DiagnosticSeverity::WARNING),
3419 message: "error 1".to_string(),
3420 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3421 location: lsp::Location {
3422 uri: buffer_uri.clone(),
3423 range: lsp::Range::new(
3424 lsp::Position::new(1, 8),
3425 lsp::Position::new(1, 9),
3426 ),
3427 },
3428 message: "error 1 hint 1".to_string(),
3429 }]),
3430 ..Default::default()
3431 },
3432 lsp::Diagnostic {
3433 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3434 severity: Some(DiagnosticSeverity::HINT),
3435 message: "error 1 hint 1".to_string(),
3436 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3437 location: lsp::Location {
3438 uri: buffer_uri.clone(),
3439 range: lsp::Range::new(
3440 lsp::Position::new(1, 8),
3441 lsp::Position::new(1, 9),
3442 ),
3443 },
3444 message: "original diagnostic".to_string(),
3445 }]),
3446 ..Default::default()
3447 },
3448 lsp::Diagnostic {
3449 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3450 severity: Some(DiagnosticSeverity::ERROR),
3451 message: "error 2".to_string(),
3452 related_information: Some(vec![
3453 lsp::DiagnosticRelatedInformation {
3454 location: lsp::Location {
3455 uri: buffer_uri.clone(),
3456 range: lsp::Range::new(
3457 lsp::Position::new(1, 13),
3458 lsp::Position::new(1, 15),
3459 ),
3460 },
3461 message: "error 2 hint 1".to_string(),
3462 },
3463 lsp::DiagnosticRelatedInformation {
3464 location: lsp::Location {
3465 uri: buffer_uri.clone(),
3466 range: lsp::Range::new(
3467 lsp::Position::new(1, 13),
3468 lsp::Position::new(1, 15),
3469 ),
3470 },
3471 message: "error 2 hint 2".to_string(),
3472 },
3473 ]),
3474 ..Default::default()
3475 },
3476 lsp::Diagnostic {
3477 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3478 severity: Some(DiagnosticSeverity::HINT),
3479 message: "error 2 hint 1".to_string(),
3480 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3481 location: lsp::Location {
3482 uri: buffer_uri.clone(),
3483 range: lsp::Range::new(
3484 lsp::Position::new(2, 8),
3485 lsp::Position::new(2, 17),
3486 ),
3487 },
3488 message: "original diagnostic".to_string(),
3489 }]),
3490 ..Default::default()
3491 },
3492 lsp::Diagnostic {
3493 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3494 severity: Some(DiagnosticSeverity::HINT),
3495 message: "error 2 hint 2".to_string(),
3496 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3497 location: lsp::Location {
3498 uri: buffer_uri.clone(),
3499 range: lsp::Range::new(
3500 lsp::Position::new(2, 8),
3501 lsp::Position::new(2, 17),
3502 ),
3503 },
3504 message: "original diagnostic".to_string(),
3505 }]),
3506 ..Default::default()
3507 },
3508 ],
3509 version: None,
3510 };
3511
3512 project
3513 .update(&mut cx, |p, cx| {
3514 p.update_diagnostics(message, &Default::default(), cx)
3515 })
3516 .unwrap();
3517 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3518
3519 assert_eq!(
3520 buffer
3521 .diagnostics_in_range::<_, Point>(0..buffer.len())
3522 .collect::<Vec<_>>(),
3523 &[
3524 DiagnosticEntry {
3525 range: Point::new(1, 8)..Point::new(1, 9),
3526 diagnostic: Diagnostic {
3527 severity: DiagnosticSeverity::WARNING,
3528 message: "error 1".to_string(),
3529 group_id: 0,
3530 is_primary: true,
3531 ..Default::default()
3532 }
3533 },
3534 DiagnosticEntry {
3535 range: Point::new(1, 8)..Point::new(1, 9),
3536 diagnostic: Diagnostic {
3537 severity: DiagnosticSeverity::HINT,
3538 message: "error 1 hint 1".to_string(),
3539 group_id: 0,
3540 is_primary: false,
3541 ..Default::default()
3542 }
3543 },
3544 DiagnosticEntry {
3545 range: Point::new(1, 13)..Point::new(1, 15),
3546 diagnostic: Diagnostic {
3547 severity: DiagnosticSeverity::HINT,
3548 message: "error 2 hint 1".to_string(),
3549 group_id: 1,
3550 is_primary: false,
3551 ..Default::default()
3552 }
3553 },
3554 DiagnosticEntry {
3555 range: Point::new(1, 13)..Point::new(1, 15),
3556 diagnostic: Diagnostic {
3557 severity: DiagnosticSeverity::HINT,
3558 message: "error 2 hint 2".to_string(),
3559 group_id: 1,
3560 is_primary: false,
3561 ..Default::default()
3562 }
3563 },
3564 DiagnosticEntry {
3565 range: Point::new(2, 8)..Point::new(2, 17),
3566 diagnostic: Diagnostic {
3567 severity: DiagnosticSeverity::ERROR,
3568 message: "error 2".to_string(),
3569 group_id: 1,
3570 is_primary: true,
3571 ..Default::default()
3572 }
3573 }
3574 ]
3575 );
3576
3577 assert_eq!(
3578 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3579 &[
3580 DiagnosticEntry {
3581 range: Point::new(1, 8)..Point::new(1, 9),
3582 diagnostic: Diagnostic {
3583 severity: DiagnosticSeverity::WARNING,
3584 message: "error 1".to_string(),
3585 group_id: 0,
3586 is_primary: true,
3587 ..Default::default()
3588 }
3589 },
3590 DiagnosticEntry {
3591 range: Point::new(1, 8)..Point::new(1, 9),
3592 diagnostic: Diagnostic {
3593 severity: DiagnosticSeverity::HINT,
3594 message: "error 1 hint 1".to_string(),
3595 group_id: 0,
3596 is_primary: false,
3597 ..Default::default()
3598 }
3599 },
3600 ]
3601 );
3602 assert_eq!(
3603 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3604 &[
3605 DiagnosticEntry {
3606 range: Point::new(1, 13)..Point::new(1, 15),
3607 diagnostic: Diagnostic {
3608 severity: DiagnosticSeverity::HINT,
3609 message: "error 2 hint 1".to_string(),
3610 group_id: 1,
3611 is_primary: false,
3612 ..Default::default()
3613 }
3614 },
3615 DiagnosticEntry {
3616 range: Point::new(1, 13)..Point::new(1, 15),
3617 diagnostic: Diagnostic {
3618 severity: DiagnosticSeverity::HINT,
3619 message: "error 2 hint 2".to_string(),
3620 group_id: 1,
3621 is_primary: false,
3622 ..Default::default()
3623 }
3624 },
3625 DiagnosticEntry {
3626 range: Point::new(2, 8)..Point::new(2, 17),
3627 diagnostic: Diagnostic {
3628 severity: DiagnosticSeverity::ERROR,
3629 message: "error 2".to_string(),
3630 group_id: 1,
3631 is_primary: true,
3632 ..Default::default()
3633 }
3634 }
3635 ]
3636 );
3637 }
3638
3639 fn build_project(fs: Arc<dyn Fs>, cx: &mut TestAppContext) -> ModelHandle<Project> {
3640 let languages = Arc::new(LanguageRegistry::new());
3641 let http_client = FakeHttpClient::with_404_response();
3642 let client = client::Client::new(http_client.clone());
3643 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3644 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
3645 }
3646}