1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, Bias, Buffer, CodeAction, Completion, CompletionLabel, Diagnostic,
19 DiagnosticEntry, File as _, Language, LanguageRegistry, PointUtf16, ToLspPosition,
20 ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{prelude::Stream, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
50 loading_buffers: HashMap<
51 ProjectPath,
52 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
53 >,
54 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
55}
56
57enum WorktreeHandle {
58 Strong(ModelHandle<Worktree>),
59 Weak(WeakModelHandle<Worktree>),
60}
61
62enum ProjectClientState {
63 Local {
64 is_shared: bool,
65 remote_id_tx: watch::Sender<Option<u64>>,
66 remote_id_rx: watch::Receiver<Option<u64>>,
67 _maintain_remote_id_task: Task<Option<()>>,
68 },
69 Remote {
70 sharing_has_stopped: bool,
71 remote_id: u64,
72 replica_id: ReplicaId,
73 },
74}
75
76#[derive(Clone, Debug)]
77pub struct Collaborator {
78 pub user: Arc<User>,
79 pub peer_id: PeerId,
80 pub replica_id: ReplicaId,
81}
82
83#[derive(Clone, Debug, PartialEq)]
84pub enum Event {
85 ActiveEntryChanged(Option<ProjectEntry>),
86 WorktreeRemoved(WorktreeId),
87 DiskBasedDiagnosticsStarted,
88 DiskBasedDiagnosticsUpdated,
89 DiskBasedDiagnosticsFinished,
90 DiagnosticsUpdated(ProjectPath),
91}
92
93#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
94pub struct ProjectPath {
95 pub worktree_id: WorktreeId,
96 pub path: Arc<Path>,
97}
98
99#[derive(Clone, Debug, Default, PartialEq)]
100pub struct DiagnosticSummary {
101 pub error_count: usize,
102 pub warning_count: usize,
103 pub info_count: usize,
104 pub hint_count: usize,
105}
106
107#[derive(Debug)]
108pub struct Definition {
109 pub target_buffer: ModelHandle<Buffer>,
110 pub target_range: Range<language::Anchor>,
111}
112
113#[derive(Default)]
114pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
115
116impl DiagnosticSummary {
117 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
118 let mut this = Self {
119 error_count: 0,
120 warning_count: 0,
121 info_count: 0,
122 hint_count: 0,
123 };
124
125 for entry in diagnostics {
126 if entry.diagnostic.is_primary {
127 match entry.diagnostic.severity {
128 DiagnosticSeverity::ERROR => this.error_count += 1,
129 DiagnosticSeverity::WARNING => this.warning_count += 1,
130 DiagnosticSeverity::INFORMATION => this.info_count += 1,
131 DiagnosticSeverity::HINT => this.hint_count += 1,
132 _ => {}
133 }
134 }
135 }
136
137 this
138 }
139
140 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
141 proto::DiagnosticSummary {
142 path: path.to_string_lossy().to_string(),
143 error_count: self.error_count as u32,
144 warning_count: self.warning_count as u32,
145 info_count: self.info_count as u32,
146 hint_count: self.hint_count as u32,
147 }
148 }
149}
150
151#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
152pub struct ProjectEntry {
153 pub worktree_id: WorktreeId,
154 pub entry_id: usize,
155}
156
157impl Project {
158 pub fn local(
159 client: Arc<Client>,
160 user_store: ModelHandle<UserStore>,
161 languages: Arc<LanguageRegistry>,
162 fs: Arc<dyn Fs>,
163 cx: &mut MutableAppContext,
164 ) -> ModelHandle<Self> {
165 cx.add_model(|cx: &mut ModelContext<Self>| {
166 let (remote_id_tx, remote_id_rx) = watch::channel();
167 let _maintain_remote_id_task = cx.spawn_weak({
168 let rpc = client.clone();
169 move |this, mut cx| {
170 async move {
171 let mut status = rpc.status();
172 while let Some(status) = status.recv().await {
173 if let Some(this) = this.upgrade(&cx) {
174 let remote_id = if let client::Status::Connected { .. } = status {
175 let response = rpc.request(proto::RegisterProject {}).await?;
176 Some(response.project_id)
177 } else {
178 None
179 };
180
181 if let Some(project_id) = remote_id {
182 let mut registrations = Vec::new();
183 this.update(&mut cx, |this, cx| {
184 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
185 registrations.push(worktree.update(
186 cx,
187 |worktree, cx| {
188 let worktree = worktree.as_local_mut().unwrap();
189 worktree.register(project_id, cx)
190 },
191 ));
192 }
193 });
194 for registration in registrations {
195 registration.await?;
196 }
197 }
198 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
199 }
200 }
201 Ok(())
202 }
203 .log_err()
204 }
205 });
206
207 Self {
208 worktrees: Default::default(),
209 collaborators: Default::default(),
210 open_buffers: Default::default(),
211 loading_buffers: Default::default(),
212 shared_buffers: Default::default(),
213 client_state: ProjectClientState::Local {
214 is_shared: false,
215 remote_id_tx,
216 remote_id_rx,
217 _maintain_remote_id_task,
218 },
219 subscriptions: Vec::new(),
220 active_entry: None,
221 languages,
222 client,
223 user_store,
224 fs,
225 language_servers_with_diagnostics_running: 0,
226 language_servers: Default::default(),
227 }
228 })
229 }
230
231 pub async fn remote(
232 remote_id: u64,
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut AsyncAppContext,
238 ) -> Result<ModelHandle<Self>> {
239 client.authenticate_and_connect(&cx).await?;
240
241 let response = client
242 .request(proto::JoinProject {
243 project_id: remote_id,
244 })
245 .await?;
246
247 let replica_id = response.replica_id as ReplicaId;
248
249 let mut worktrees = Vec::new();
250 for worktree in response.worktrees {
251 let (worktree, load_task) = cx
252 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
253 worktrees.push(worktree);
254 load_task.detach();
255 }
256
257 let user_ids = response
258 .collaborators
259 .iter()
260 .map(|peer| peer.user_id)
261 .collect();
262 user_store
263 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
264 .await?;
265 let mut collaborators = HashMap::default();
266 for message in response.collaborators {
267 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
268 collaborators.insert(collaborator.peer_id, collaborator);
269 }
270
271 Ok(cx.add_model(|cx| {
272 let mut this = Self {
273 worktrees: Vec::new(),
274 open_buffers: Default::default(),
275 loading_buffers: Default::default(),
276 shared_buffers: Default::default(),
277 active_entry: None,
278 collaborators,
279 languages,
280 user_store,
281 fs,
282 subscriptions: vec![
283 client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
284 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
285 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
286 client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
287 client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
288 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
289 client.subscribe_to_entity(
290 remote_id,
291 cx,
292 Self::handle_update_diagnostic_summary,
293 ),
294 client.subscribe_to_entity(
295 remote_id,
296 cx,
297 Self::handle_disk_based_diagnostics_updating,
298 ),
299 client.subscribe_to_entity(
300 remote_id,
301 cx,
302 Self::handle_disk_based_diagnostics_updated,
303 ),
304 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
305 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer_file),
306 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_reloaded),
307 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
308 ],
309 client,
310 client_state: ProjectClientState::Remote {
311 sharing_has_stopped: false,
312 remote_id,
313 replica_id,
314 },
315 language_servers_with_diagnostics_running: 0,
316 language_servers: Default::default(),
317 };
318 for worktree in worktrees {
319 this.add_worktree(&worktree, cx);
320 }
321 this
322 }))
323 }
324
325 #[cfg(any(test, feature = "test-support"))]
326 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
327 let languages = Arc::new(LanguageRegistry::new());
328 let http_client = client::test::FakeHttpClient::with_404_response();
329 let client = client::Client::new(http_client.clone());
330 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
331 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
332 }
333
334 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
335 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
336 *remote_id_tx.borrow_mut() = remote_id;
337 }
338
339 self.subscriptions.clear();
340 if let Some(remote_id) = remote_id {
341 let client = &self.client;
342 self.subscriptions.extend([
343 client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer),
344 client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer),
345 client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
346 client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
347 client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
348 client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
349 client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
350 client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
351 client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
352 client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions),
353 client.subscribe_to_entity(
354 remote_id,
355 cx,
356 Self::handle_apply_additional_edits_for_completion,
357 ),
358 client.subscribe_to_entity(remote_id, cx, Self::handle_get_code_actions),
359 client.subscribe_to_entity(remote_id, cx, Self::handle_apply_code_action),
360 client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
361 ]);
362 }
363 }
364
365 pub fn remote_id(&self) -> Option<u64> {
366 match &self.client_state {
367 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
368 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
369 }
370 }
371
372 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
373 let mut id = None;
374 let mut watch = None;
375 match &self.client_state {
376 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
377 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
378 }
379
380 async move {
381 if let Some(id) = id {
382 return id;
383 }
384 let mut watch = watch.unwrap();
385 loop {
386 let id = *watch.borrow();
387 if let Some(id) = id {
388 return id;
389 }
390 watch.recv().await;
391 }
392 }
393 }
394
395 pub fn replica_id(&self) -> ReplicaId {
396 match &self.client_state {
397 ProjectClientState::Local { .. } => 0,
398 ProjectClientState::Remote { replica_id, .. } => *replica_id,
399 }
400 }
401
402 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
403 &self.collaborators
404 }
405
406 pub fn worktrees<'a>(
407 &'a self,
408 cx: &'a AppContext,
409 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
410 self.worktrees
411 .iter()
412 .filter_map(move |worktree| worktree.upgrade(cx))
413 }
414
415 pub fn worktree_for_id(
416 &self,
417 id: WorktreeId,
418 cx: &AppContext,
419 ) -> Option<ModelHandle<Worktree>> {
420 self.worktrees(cx)
421 .find(|worktree| worktree.read(cx).id() == id)
422 }
423
424 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
425 let rpc = self.client.clone();
426 cx.spawn(|this, mut cx| async move {
427 let project_id = this.update(&mut cx, |this, _| {
428 if let ProjectClientState::Local {
429 is_shared,
430 remote_id_rx,
431 ..
432 } = &mut this.client_state
433 {
434 *is_shared = true;
435 remote_id_rx
436 .borrow()
437 .ok_or_else(|| anyhow!("no project id"))
438 } else {
439 Err(anyhow!("can't share a remote project"))
440 }
441 })?;
442
443 rpc.request(proto::ShareProject { project_id }).await?;
444 let mut tasks = Vec::new();
445 this.update(&mut cx, |this, cx| {
446 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
447 worktree.update(cx, |worktree, cx| {
448 let worktree = worktree.as_local_mut().unwrap();
449 tasks.push(worktree.share(project_id, cx));
450 });
451 }
452 });
453 for task in tasks {
454 task.await?;
455 }
456 this.update(&mut cx, |_, cx| cx.notify());
457 Ok(())
458 })
459 }
460
461 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
462 let rpc = self.client.clone();
463 cx.spawn(|this, mut cx| async move {
464 let project_id = this.update(&mut cx, |this, _| {
465 if let ProjectClientState::Local {
466 is_shared,
467 remote_id_rx,
468 ..
469 } = &mut this.client_state
470 {
471 *is_shared = false;
472 remote_id_rx
473 .borrow()
474 .ok_or_else(|| anyhow!("no project id"))
475 } else {
476 Err(anyhow!("can't share a remote project"))
477 }
478 })?;
479
480 rpc.send(proto::UnshareProject { project_id })?;
481 this.update(&mut cx, |this, cx| {
482 this.collaborators.clear();
483 this.shared_buffers.clear();
484 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
485 worktree.update(cx, |worktree, _| {
486 worktree.as_local_mut().unwrap().unshare();
487 });
488 }
489 cx.notify()
490 });
491 Ok(())
492 })
493 }
494
495 pub fn is_read_only(&self) -> bool {
496 match &self.client_state {
497 ProjectClientState::Local { .. } => false,
498 ProjectClientState::Remote {
499 sharing_has_stopped,
500 ..
501 } => *sharing_has_stopped,
502 }
503 }
504
505 pub fn is_local(&self) -> bool {
506 match &self.client_state {
507 ProjectClientState::Local { .. } => true,
508 ProjectClientState::Remote { .. } => false,
509 }
510 }
511
512 pub fn open_buffer(
513 &mut self,
514 path: impl Into<ProjectPath>,
515 cx: &mut ModelContext<Self>,
516 ) -> Task<Result<ModelHandle<Buffer>>> {
517 let project_path = path.into();
518 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
519 worktree
520 } else {
521 return Task::ready(Err(anyhow!("no such worktree")));
522 };
523
524 // If there is already a buffer for the given path, then return it.
525 let existing_buffer = self.get_open_buffer(&project_path, cx);
526 if let Some(existing_buffer) = existing_buffer {
527 return Task::ready(Ok(existing_buffer));
528 }
529
530 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
531 // If the given path is already being loaded, then wait for that existing
532 // task to complete and return the same buffer.
533 hash_map::Entry::Occupied(e) => e.get().clone(),
534
535 // Otherwise, record the fact that this path is now being loaded.
536 hash_map::Entry::Vacant(entry) => {
537 let (mut tx, rx) = postage::watch::channel();
538 entry.insert(rx.clone());
539
540 let load_buffer = if worktree.read(cx).is_local() {
541 self.open_local_buffer(&project_path.path, &worktree, cx)
542 } else {
543 self.open_remote_buffer(&project_path.path, &worktree, cx)
544 };
545
546 cx.spawn(move |this, mut cx| async move {
547 let load_result = load_buffer.await;
548 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
549 // Record the fact that the buffer is no longer loading.
550 this.loading_buffers.remove(&project_path);
551 let buffer = load_result.map_err(Arc::new)?;
552 Ok(buffer)
553 }));
554 })
555 .detach();
556 rx
557 }
558 };
559
560 cx.foreground().spawn(async move {
561 loop {
562 if let Some(result) = loading_watch.borrow().as_ref() {
563 match result {
564 Ok(buffer) => return Ok(buffer.clone()),
565 Err(error) => return Err(anyhow!("{}", error)),
566 }
567 }
568 loading_watch.recv().await;
569 }
570 })
571 }
572
573 fn open_local_buffer(
574 &mut self,
575 path: &Arc<Path>,
576 worktree: &ModelHandle<Worktree>,
577 cx: &mut ModelContext<Self>,
578 ) -> Task<Result<ModelHandle<Buffer>>> {
579 let load_buffer = worktree.update(cx, |worktree, cx| {
580 let worktree = worktree.as_local_mut().unwrap();
581 worktree.load_buffer(path, cx)
582 });
583 let worktree = worktree.downgrade();
584 cx.spawn(|this, mut cx| async move {
585 let buffer = load_buffer.await?;
586 let worktree = worktree
587 .upgrade(&cx)
588 .ok_or_else(|| anyhow!("worktree was removed"))?;
589 this.update(&mut cx, |this, cx| {
590 this.register_buffer(&buffer, Some(&worktree), cx)
591 })?;
592 Ok(buffer)
593 })
594 }
595
596 fn open_remote_buffer(
597 &mut self,
598 path: &Arc<Path>,
599 worktree: &ModelHandle<Worktree>,
600 cx: &mut ModelContext<Self>,
601 ) -> Task<Result<ModelHandle<Buffer>>> {
602 let rpc = self.client.clone();
603 let project_id = self.remote_id().unwrap();
604 let remote_worktree_id = worktree.read(cx).id();
605 let path = path.clone();
606 let path_string = path.to_string_lossy().to_string();
607 cx.spawn(|this, mut cx| async move {
608 let response = rpc
609 .request(proto::OpenBuffer {
610 project_id,
611 worktree_id: remote_worktree_id.to_proto(),
612 path: path_string,
613 })
614 .await?;
615 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
616 this.update(&mut cx, |this, cx| {
617 this.deserialize_remote_buffer(buffer, cx)
618 })
619 })
620 }
621
622 fn open_local_buffer_from_lsp_path(
623 &mut self,
624 abs_path: lsp::Url,
625 lang_name: String,
626 lang_server: Arc<LanguageServer>,
627 cx: &mut ModelContext<Self>,
628 ) -> Task<Result<ModelHandle<Buffer>>> {
629 cx.spawn(|this, mut cx| async move {
630 let abs_path = abs_path
631 .to_file_path()
632 .map_err(|_| anyhow!("can't convert URI to path"))?;
633 let (worktree, relative_path) = if let Some(result) =
634 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
635 {
636 result
637 } else {
638 let worktree = this
639 .update(&mut cx, |this, cx| {
640 this.create_local_worktree(&abs_path, true, cx)
641 })
642 .await?;
643 this.update(&mut cx, |this, cx| {
644 this.language_servers
645 .insert((worktree.read(cx).id(), lang_name), lang_server);
646 });
647 (worktree, PathBuf::new())
648 };
649
650 let project_path = ProjectPath {
651 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
652 path: relative_path.into(),
653 };
654 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
655 .await
656 })
657 }
658
659 pub fn save_buffer_as(
660 &self,
661 buffer: ModelHandle<Buffer>,
662 abs_path: PathBuf,
663 cx: &mut ModelContext<Project>,
664 ) -> Task<Result<()>> {
665 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
666 cx.spawn(|this, mut cx| async move {
667 let (worktree, path) = worktree_task.await?;
668 worktree
669 .update(&mut cx, |worktree, cx| {
670 worktree
671 .as_local_mut()
672 .unwrap()
673 .save_buffer_as(buffer.clone(), path, cx)
674 })
675 .await?;
676 this.update(&mut cx, |this, cx| {
677 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
678 });
679 Ok(())
680 })
681 }
682
683 #[cfg(any(test, feature = "test-support"))]
684 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
685 let path = path.into();
686 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
687 self.open_buffers.iter().any(|(_, buffer)| {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
690 if file.worktree == worktree && file.path() == &path.path {
691 return true;
692 }
693 }
694 }
695 false
696 })
697 } else {
698 false
699 }
700 }
701
702 fn get_open_buffer(
703 &mut self,
704 path: &ProjectPath,
705 cx: &mut ModelContext<Self>,
706 ) -> Option<ModelHandle<Buffer>> {
707 let mut result = None;
708 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
709 self.open_buffers.retain(|_, buffer| {
710 if let Some(buffer) = buffer.upgrade(cx) {
711 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
712 if file.worktree == worktree && file.path() == &path.path {
713 result = Some(buffer);
714 }
715 }
716 true
717 } else {
718 false
719 }
720 });
721 result
722 }
723
724 fn register_buffer(
725 &mut self,
726 buffer: &ModelHandle<Buffer>,
727 worktree: Option<&ModelHandle<Worktree>>,
728 cx: &mut ModelContext<Self>,
729 ) -> Result<()> {
730 if self
731 .open_buffers
732 .insert(buffer.read(cx).remote_id() as usize, buffer.downgrade())
733 .is_some()
734 {
735 return Err(anyhow!("registered the same buffer twice"));
736 }
737 self.assign_language_to_buffer(&buffer, worktree, cx);
738 Ok(())
739 }
740
741 fn assign_language_to_buffer(
742 &mut self,
743 buffer: &ModelHandle<Buffer>,
744 worktree: Option<&ModelHandle<Worktree>>,
745 cx: &mut ModelContext<Self>,
746 ) -> Option<()> {
747 let (path, full_path) = {
748 let file = buffer.read(cx).file()?;
749 (file.path().clone(), file.full_path(cx))
750 };
751
752 // If the buffer has a language, set it and start/assign the language server
753 if let Some(language) = self.languages.select_language(&full_path) {
754 buffer.update(cx, |buffer, cx| {
755 buffer.set_language(Some(language.clone()), cx);
756 });
757
758 // For local worktrees, start a language server if needed.
759 // Also assign the language server and any previously stored diagnostics to the buffer.
760 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
761 let worktree_id = local_worktree.id();
762 let worktree_abs_path = local_worktree.abs_path().clone();
763
764 let language_server = match self
765 .language_servers
766 .entry((worktree_id, language.name().to_string()))
767 {
768 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
769 hash_map::Entry::Vacant(e) => Self::start_language_server(
770 self.client.clone(),
771 language.clone(),
772 &worktree_abs_path,
773 cx,
774 )
775 .map(|server| e.insert(server).clone()),
776 };
777
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language_server(language_server, cx);
780 });
781 }
782 }
783
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
786 buffer.update(cx, |buffer, cx| {
787 buffer.update_diagnostics(diagnostics, None, cx).log_err();
788 });
789 }
790 }
791
792 None
793 }
794
795 fn start_language_server(
796 rpc: Arc<Client>,
797 language: Arc<Language>,
798 worktree_path: &Path,
799 cx: &mut ModelContext<Self>,
800 ) -> Option<Arc<LanguageServer>> {
801 enum LspEvent {
802 DiagnosticsStart,
803 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
804 DiagnosticsFinish,
805 }
806
807 let language_server = language
808 .start_server(worktree_path, cx)
809 .log_err()
810 .flatten()?;
811 let disk_based_sources = language
812 .disk_based_diagnostic_sources()
813 .cloned()
814 .unwrap_or_default();
815 let disk_based_diagnostics_progress_token =
816 language.disk_based_diagnostics_progress_token().cloned();
817 let has_disk_based_diagnostic_progress_token =
818 disk_based_diagnostics_progress_token.is_some();
819 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
820
821 // Listen for `PublishDiagnostics` notifications.
822 language_server
823 .on_notification::<lsp::notification::PublishDiagnostics, _>({
824 let diagnostics_tx = diagnostics_tx.clone();
825 move |params| {
826 if !has_disk_based_diagnostic_progress_token {
827 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
828 }
829 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
830 if !has_disk_based_diagnostic_progress_token {
831 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
832 }
833 }
834 })
835 .detach();
836
837 // Listen for `Progress` notifications. Send an event when the language server
838 // transitions between running jobs and not running any jobs.
839 let mut running_jobs_for_this_server: i32 = 0;
840 language_server
841 .on_notification::<lsp::notification::Progress, _>(move |params| {
842 let token = match params.token {
843 lsp::NumberOrString::Number(_) => None,
844 lsp::NumberOrString::String(token) => Some(token),
845 };
846
847 if token == disk_based_diagnostics_progress_token {
848 match params.value {
849 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
850 lsp::WorkDoneProgress::Begin(_) => {
851 running_jobs_for_this_server += 1;
852 if running_jobs_for_this_server == 1 {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
854 }
855 }
856 lsp::WorkDoneProgress::End(_) => {
857 running_jobs_for_this_server -= 1;
858 if running_jobs_for_this_server == 0 {
859 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
860 }
861 }
862 _ => {}
863 },
864 }
865 }
866 })
867 .detach();
868
869 // Process all the LSP events.
870 cx.spawn_weak(|this, mut cx| async move {
871 while let Ok(message) = diagnostics_rx.recv().await {
872 let this = cx.read(|cx| this.upgrade(cx))?;
873 match message {
874 LspEvent::DiagnosticsStart => {
875 this.update(&mut cx, |this, cx| {
876 this.disk_based_diagnostics_started(cx);
877 if let Some(project_id) = this.remote_id() {
878 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
879 .log_err();
880 }
881 });
882 }
883 LspEvent::DiagnosticsUpdate(mut params) => {
884 language.process_diagnostics(&mut params);
885 this.update(&mut cx, |this, cx| {
886 this.update_diagnostics(params, &disk_based_sources, cx)
887 .log_err();
888 });
889 }
890 LspEvent::DiagnosticsFinish => {
891 this.update(&mut cx, |this, cx| {
892 this.disk_based_diagnostics_finished(cx);
893 if let Some(project_id) = this.remote_id() {
894 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
895 .log_err();
896 }
897 });
898 }
899 }
900 }
901 Some(())
902 })
903 .detach();
904
905 Some(language_server)
906 }
907
908 pub fn update_diagnostics(
909 &mut self,
910 params: lsp::PublishDiagnosticsParams,
911 disk_based_sources: &HashSet<String>,
912 cx: &mut ModelContext<Self>,
913 ) -> Result<()> {
914 let abs_path = params
915 .uri
916 .to_file_path()
917 .map_err(|_| anyhow!("URI is not a file"))?;
918 let mut next_group_id = 0;
919 let mut diagnostics = Vec::default();
920 let mut primary_diagnostic_group_ids = HashMap::default();
921 let mut sources_by_group_id = HashMap::default();
922 let mut supporting_diagnostic_severities = HashMap::default();
923 for diagnostic in ¶ms.diagnostics {
924 let source = diagnostic.source.as_ref();
925 let code = diagnostic.code.as_ref().map(|code| match code {
926 lsp::NumberOrString::Number(code) => code.to_string(),
927 lsp::NumberOrString::String(code) => code.clone(),
928 });
929 let range = range_from_lsp(diagnostic.range);
930 let is_supporting = diagnostic
931 .related_information
932 .as_ref()
933 .map_or(false, |infos| {
934 infos.iter().any(|info| {
935 primary_diagnostic_group_ids.contains_key(&(
936 source,
937 code.clone(),
938 range_from_lsp(info.location.range),
939 ))
940 })
941 });
942
943 if is_supporting {
944 if let Some(severity) = diagnostic.severity {
945 supporting_diagnostic_severities
946 .insert((source, code.clone(), range), severity);
947 }
948 } else {
949 let group_id = post_inc(&mut next_group_id);
950 let is_disk_based =
951 source.map_or(false, |source| disk_based_sources.contains(source));
952
953 sources_by_group_id.insert(group_id, source);
954 primary_diagnostic_group_ids
955 .insert((source, code.clone(), range.clone()), group_id);
956
957 diagnostics.push(DiagnosticEntry {
958 range,
959 diagnostic: Diagnostic {
960 code: code.clone(),
961 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
962 message: diagnostic.message.clone(),
963 group_id,
964 is_primary: true,
965 is_valid: true,
966 is_disk_based,
967 },
968 });
969 if let Some(infos) = &diagnostic.related_information {
970 for info in infos {
971 if info.location.uri == params.uri && !info.message.is_empty() {
972 let range = range_from_lsp(info.location.range);
973 diagnostics.push(DiagnosticEntry {
974 range,
975 diagnostic: Diagnostic {
976 code: code.clone(),
977 severity: DiagnosticSeverity::INFORMATION,
978 message: info.message.clone(),
979 group_id,
980 is_primary: false,
981 is_valid: true,
982 is_disk_based,
983 },
984 });
985 }
986 }
987 }
988 }
989 }
990
991 for entry in &mut diagnostics {
992 let diagnostic = &mut entry.diagnostic;
993 if !diagnostic.is_primary {
994 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
995 if let Some(&severity) = supporting_diagnostic_severities.get(&(
996 source,
997 diagnostic.code.clone(),
998 entry.range.clone(),
999 )) {
1000 diagnostic.severity = severity;
1001 }
1002 }
1003 }
1004
1005 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1006 Ok(())
1007 }
1008
1009 pub fn update_diagnostic_entries(
1010 &mut self,
1011 abs_path: PathBuf,
1012 version: Option<i32>,
1013 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1014 cx: &mut ModelContext<Project>,
1015 ) -> Result<(), anyhow::Error> {
1016 let (worktree, relative_path) = self
1017 .find_local_worktree(&abs_path, cx)
1018 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1019 let project_path = ProjectPath {
1020 worktree_id: worktree.read(cx).id(),
1021 path: relative_path.into(),
1022 };
1023
1024 for buffer in self.open_buffers.values() {
1025 if let Some(buffer) = buffer.upgrade(cx) {
1026 if buffer
1027 .read(cx)
1028 .file()
1029 .map_or(false, |file| *file.path() == project_path.path)
1030 {
1031 buffer.update(cx, |buffer, cx| {
1032 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1033 })?;
1034 break;
1035 }
1036 }
1037 }
1038 worktree.update(cx, |worktree, cx| {
1039 worktree
1040 .as_local_mut()
1041 .ok_or_else(|| anyhow!("not a local worktree"))?
1042 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1043 })?;
1044 cx.emit(Event::DiagnosticsUpdated(project_path));
1045 Ok(())
1046 }
1047
1048 pub fn definition<T: ToPointUtf16>(
1049 &self,
1050 source_buffer_handle: &ModelHandle<Buffer>,
1051 position: T,
1052 cx: &mut ModelContext<Self>,
1053 ) -> Task<Result<Vec<Definition>>> {
1054 let source_buffer_handle = source_buffer_handle.clone();
1055 let source_buffer = source_buffer_handle.read(cx);
1056 let worktree;
1057 let buffer_abs_path;
1058 if let Some(file) = File::from_dyn(source_buffer.file()) {
1059 worktree = file.worktree.clone();
1060 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1061 } else {
1062 return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
1063 };
1064
1065 let position = position.to_point_utf16(source_buffer);
1066
1067 if worktree.read(cx).as_local().is_some() {
1068 let buffer_abs_path = buffer_abs_path.unwrap();
1069 let lang_name;
1070 let lang_server;
1071 if let Some(lang) = source_buffer.language() {
1072 lang_name = lang.name().to_string();
1073 if let Some(server) = self
1074 .language_servers
1075 .get(&(worktree.read(cx).id(), lang_name.clone()))
1076 {
1077 lang_server = server.clone();
1078 } else {
1079 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1080 };
1081 } else {
1082 return Task::ready(Err(anyhow!("buffer does not have a language")));
1083 }
1084
1085 cx.spawn(|this, mut cx| async move {
1086 let response = lang_server
1087 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1088 text_document_position_params: lsp::TextDocumentPositionParams {
1089 text_document: lsp::TextDocumentIdentifier::new(
1090 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1091 ),
1092 position: lsp::Position::new(position.row, position.column),
1093 },
1094 work_done_progress_params: Default::default(),
1095 partial_result_params: Default::default(),
1096 })
1097 .await?;
1098
1099 let mut definitions = Vec::new();
1100 if let Some(response) = response {
1101 let mut unresolved_locations = Vec::new();
1102 match response {
1103 lsp::GotoDefinitionResponse::Scalar(loc) => {
1104 unresolved_locations.push((loc.uri, loc.range));
1105 }
1106 lsp::GotoDefinitionResponse::Array(locs) => {
1107 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1108 }
1109 lsp::GotoDefinitionResponse::Link(links) => {
1110 unresolved_locations.extend(
1111 links
1112 .into_iter()
1113 .map(|l| (l.target_uri, l.target_selection_range)),
1114 );
1115 }
1116 }
1117
1118 for (target_uri, target_range) in unresolved_locations {
1119 let target_buffer_handle = this
1120 .update(&mut cx, |this, cx| {
1121 this.open_local_buffer_from_lsp_path(
1122 target_uri,
1123 lang_name.clone(),
1124 lang_server.clone(),
1125 cx,
1126 )
1127 })
1128 .await?;
1129
1130 cx.read(|cx| {
1131 let target_buffer = target_buffer_handle.read(cx);
1132 let target_start = target_buffer
1133 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1134 let target_end = target_buffer
1135 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1136 definitions.push(Definition {
1137 target_buffer: target_buffer_handle,
1138 target_range: target_buffer.anchor_after(target_start)
1139 ..target_buffer.anchor_before(target_end),
1140 });
1141 });
1142 }
1143 }
1144
1145 Ok(definitions)
1146 })
1147 } else if let Some(project_id) = self.remote_id() {
1148 let client = self.client.clone();
1149 let request = proto::GetDefinition {
1150 project_id,
1151 buffer_id: source_buffer.remote_id(),
1152 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1153 };
1154 cx.spawn(|this, mut cx| async move {
1155 let response = client.request(request).await?;
1156 this.update(&mut cx, |this, cx| {
1157 let mut definitions = Vec::new();
1158 for definition in response.definitions {
1159 let target_buffer = this.deserialize_remote_buffer(
1160 definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
1161 cx,
1162 )?;
1163 let target_start = definition
1164 .target_start
1165 .and_then(deserialize_anchor)
1166 .ok_or_else(|| anyhow!("missing target start"))?;
1167 let target_end = definition
1168 .target_end
1169 .and_then(deserialize_anchor)
1170 .ok_or_else(|| anyhow!("missing target end"))?;
1171 definitions.push(Definition {
1172 target_buffer,
1173 target_range: target_start..target_end,
1174 })
1175 }
1176
1177 Ok(definitions)
1178 })
1179 })
1180 } else {
1181 Task::ready(Err(anyhow!("project does not have a remote id")))
1182 }
1183 }
1184
1185 pub fn completions<T: ToPointUtf16>(
1186 &self,
1187 source_buffer_handle: &ModelHandle<Buffer>,
1188 position: T,
1189 cx: &mut ModelContext<Self>,
1190 ) -> Task<Result<Vec<Completion>>> {
1191 let source_buffer_handle = source_buffer_handle.clone();
1192 let source_buffer = source_buffer_handle.read(cx);
1193 let buffer_id = source_buffer.remote_id();
1194 let language = source_buffer.language().cloned();
1195 let worktree;
1196 let buffer_abs_path;
1197 if let Some(file) = File::from_dyn(source_buffer.file()) {
1198 worktree = file.worktree.clone();
1199 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1200 } else {
1201 return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
1202 };
1203
1204 let position = position.to_point_utf16(source_buffer);
1205 let anchor = source_buffer.anchor_after(position);
1206
1207 if worktree.read(cx).as_local().is_some() {
1208 let buffer_abs_path = buffer_abs_path.unwrap();
1209 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1210 server
1211 } else {
1212 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1213 };
1214
1215 cx.spawn(|_, cx| async move {
1216 let completions = lang_server
1217 .request::<lsp::request::Completion>(lsp::CompletionParams {
1218 text_document_position: lsp::TextDocumentPositionParams::new(
1219 lsp::TextDocumentIdentifier::new(
1220 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1221 ),
1222 position.to_lsp_position(),
1223 ),
1224 context: Default::default(),
1225 work_done_progress_params: Default::default(),
1226 partial_result_params: Default::default(),
1227 })
1228 .await?;
1229
1230 let completions = if let Some(completions) = completions {
1231 match completions {
1232 lsp::CompletionResponse::Array(completions) => completions,
1233 lsp::CompletionResponse::List(list) => list.items,
1234 }
1235 } else {
1236 Default::default()
1237 };
1238
1239 source_buffer_handle.read_with(&cx, |this, _| {
1240 Ok(completions.into_iter().filter_map(|lsp_completion| {
1241 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1242 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1243 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1244 log::info!("received an insert and replace completion but we don't yet support that");
1245 return None
1246 },
1247 };
1248
1249 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1250 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1251 if clipped_start == old_range.start && clipped_end == old_range.end {
1252 Some(Completion {
1253 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1254 new_text,
1255 label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1256 lsp_completion,
1257 })
1258 } else {
1259 None
1260 }
1261 }).collect())
1262 })
1263
1264 })
1265 } else if let Some(project_id) = self.remote_id() {
1266 let rpc = self.client.clone();
1267 cx.foreground().spawn(async move {
1268 let response = rpc
1269 .request(proto::GetCompletions {
1270 project_id,
1271 buffer_id,
1272 position: Some(language::proto::serialize_anchor(&anchor)),
1273 })
1274 .await?;
1275 response
1276 .completions
1277 .into_iter()
1278 .map(|completion| {
1279 language::proto::deserialize_completion(completion, language.as_ref())
1280 })
1281 .collect()
1282 })
1283 } else {
1284 Task::ready(Err(anyhow!("project does not have a remote id")))
1285 }
1286 }
1287
1288 pub fn apply_additional_edits_for_completion(
1289 &self,
1290 buffer_handle: ModelHandle<Buffer>,
1291 completion: Completion,
1292 push_to_history: bool,
1293 cx: &mut ModelContext<Self>,
1294 ) -> Task<Result<Option<Transaction>>> {
1295 let buffer = buffer_handle.read(cx);
1296 let buffer_id = buffer.remote_id();
1297
1298 if self.is_local() {
1299 let lang_server = if let Some(language_server) = buffer.language_server() {
1300 language_server.clone()
1301 } else {
1302 return Task::ready(Ok(Default::default()));
1303 };
1304
1305 cx.spawn(|_, mut cx| async move {
1306 let resolved_completion = lang_server
1307 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1308 .await?;
1309 if let Some(additional_edits) = resolved_completion.additional_text_edits {
1310 buffer_handle.update(&mut cx, |buffer, cx| {
1311 buffer.finalize_last_transaction();
1312 buffer.start_transaction();
1313 buffer.apply_lsp_edits(additional_edits, None, cx).log_err();
1314 let transaction = if buffer.end_transaction(cx).is_some() {
1315 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1316 if !push_to_history {
1317 buffer.forget_transaction(transaction.id);
1318 }
1319 Some(transaction)
1320 } else {
1321 None
1322 };
1323 Ok(transaction)
1324 })
1325 } else {
1326 Ok(None)
1327 }
1328 })
1329 } else if let Some(project_id) = self.remote_id() {
1330 let client = self.client.clone();
1331 cx.spawn(|_, mut cx| async move {
1332 let response = client
1333 .request(proto::ApplyCompletionAdditionalEdits {
1334 project_id,
1335 buffer_id,
1336 completion: Some(language::proto::serialize_completion(&completion)),
1337 })
1338 .await?;
1339
1340 if let Some(transaction) = response.transaction {
1341 let transaction = language::proto::deserialize_transaction(transaction)?;
1342 buffer_handle
1343 .update(&mut cx, |buffer, _| {
1344 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1345 })
1346 .await;
1347 if push_to_history {
1348 buffer_handle.update(&mut cx, |buffer, _| {
1349 buffer.push_transaction(transaction.clone(), Instant::now());
1350 });
1351 }
1352 Ok(Some(transaction))
1353 } else {
1354 Ok(None)
1355 }
1356 })
1357 } else {
1358 Task::ready(Err(anyhow!("project does not have a remote id")))
1359 }
1360 }
1361
1362 pub fn code_actions<T: ToPointUtf16>(
1363 &self,
1364 source_buffer_handle: &ModelHandle<Buffer>,
1365 position: T,
1366 cx: &mut ModelContext<Self>,
1367 ) -> Task<Result<Vec<CodeAction>>> {
1368 let source_buffer_handle = source_buffer_handle.clone();
1369 let source_buffer = source_buffer_handle.read(cx);
1370 let buffer_id = source_buffer.remote_id();
1371 let worktree;
1372 let buffer_abs_path;
1373 if let Some(file) = File::from_dyn(source_buffer.file()) {
1374 worktree = file.worktree.clone();
1375 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1376 } else {
1377 return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
1378 };
1379
1380 let position = position.to_point_utf16(source_buffer);
1381 let anchor = source_buffer.anchor_after(position);
1382
1383 if worktree.read(cx).as_local().is_some() {
1384 let buffer_abs_path = buffer_abs_path.unwrap();
1385 let lang_name;
1386 let lang_server;
1387 if let Some(lang) = source_buffer.language() {
1388 lang_name = lang.name().to_string();
1389 if let Some(server) = self
1390 .language_servers
1391 .get(&(worktree.read(cx).id(), lang_name.clone()))
1392 {
1393 lang_server = server.clone();
1394 } else {
1395 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1396 };
1397 } else {
1398 return Task::ready(Err(anyhow!("buffer does not have a language")));
1399 }
1400
1401 cx.foreground().spawn(async move {
1402 let actions = lang_server
1403 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1404 text_document: lsp::TextDocumentIdentifier::new(
1405 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1406 ),
1407 range: lsp::Range::new(
1408 position.to_lsp_position(),
1409 position.to_lsp_position(),
1410 ),
1411 work_done_progress_params: Default::default(),
1412 partial_result_params: Default::default(),
1413 context: lsp::CodeActionContext {
1414 diagnostics: Default::default(),
1415 only: Some(vec![
1416 lsp::CodeActionKind::QUICKFIX,
1417 lsp::CodeActionKind::REFACTOR,
1418 lsp::CodeActionKind::REFACTOR_EXTRACT,
1419 ]),
1420 },
1421 })
1422 .await?
1423 .unwrap_or_default()
1424 .into_iter()
1425 .filter_map(|entry| {
1426 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1427 Some(CodeAction {
1428 position: anchor.clone(),
1429 lsp_action,
1430 })
1431 } else {
1432 None
1433 }
1434 })
1435 .collect();
1436 Ok(actions)
1437 })
1438 } else if let Some(project_id) = self.remote_id() {
1439 let rpc = self.client.clone();
1440 cx.foreground().spawn(async move {
1441 let response = rpc
1442 .request(proto::GetCodeActions {
1443 project_id,
1444 buffer_id,
1445 position: Some(language::proto::serialize_anchor(&anchor)),
1446 })
1447 .await?;
1448 response
1449 .actions
1450 .into_iter()
1451 .map(language::proto::deserialize_code_action)
1452 .collect()
1453 })
1454 } else {
1455 Task::ready(Err(anyhow!("project does not have a remote id")))
1456 }
1457 }
1458
1459 pub fn apply_code_action(
1460 &self,
1461 buffer_handle: ModelHandle<Buffer>,
1462 mut action: CodeAction,
1463 push_to_history: bool,
1464 cx: &mut ModelContext<Self>,
1465 ) -> Task<Result<ProjectTransaction>> {
1466 if self.is_local() {
1467 let buffer = buffer_handle.read(cx);
1468 let lang_name = if let Some(lang) = buffer.language() {
1469 lang.name().to_string()
1470 } else {
1471 return Task::ready(Ok(Default::default()));
1472 };
1473 let lang_server = if let Some(language_server) = buffer.language_server() {
1474 language_server.clone()
1475 } else {
1476 return Task::ready(Ok(Default::default()));
1477 };
1478 let position = action.position.to_point_utf16(buffer).to_lsp_position();
1479 let fs = self.fs.clone();
1480
1481 cx.spawn(|this, mut cx| async move {
1482 if let Some(range) = action
1483 .lsp_action
1484 .data
1485 .as_mut()
1486 .and_then(|d| d.get_mut("codeActionParams"))
1487 .and_then(|d| d.get_mut("range"))
1488 {
1489 *range = serde_json::to_value(&lsp::Range::new(position, position)).unwrap();
1490 action.lsp_action = lang_server
1491 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1492 .await?;
1493 } else {
1494 let actions = this
1495 .update(&mut cx, |this, cx| {
1496 this.code_actions(&buffer_handle, action.position.clone(), cx)
1497 })
1498 .await?;
1499 action.lsp_action = actions
1500 .into_iter()
1501 .find(|a| a.lsp_action.title == action.lsp_action.title)
1502 .ok_or_else(|| anyhow!("code action is outdated"))?
1503 .lsp_action;
1504 }
1505
1506 let mut operations = Vec::new();
1507 if let Some(edit) = action.lsp_action.edit {
1508 if let Some(document_changes) = edit.document_changes {
1509 match document_changes {
1510 lsp::DocumentChanges::Edits(edits) => operations
1511 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1512 lsp::DocumentChanges::Operations(ops) => operations = ops,
1513 }
1514 } else if let Some(changes) = edit.changes {
1515 operations.extend(changes.into_iter().map(|(uri, edits)| {
1516 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1517 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1518 uri,
1519 version: None,
1520 },
1521 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1522 })
1523 }));
1524 }
1525 }
1526
1527 let mut project_transaction = ProjectTransaction::default();
1528 for operation in operations {
1529 match operation {
1530 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1531 let abs_path = op
1532 .uri
1533 .to_file_path()
1534 .map_err(|_| anyhow!("can't convert URI to path"))?;
1535
1536 if let Some(parent_path) = abs_path.parent() {
1537 fs.create_dir(parent_path).await?;
1538 }
1539 if abs_path.ends_with("/") {
1540 fs.create_dir(&abs_path).await?;
1541 } else {
1542 fs.create_file(
1543 &abs_path,
1544 op.options.map(Into::into).unwrap_or_default(),
1545 )
1546 .await?;
1547 }
1548 }
1549 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1550 let source_abs_path = op
1551 .old_uri
1552 .to_file_path()
1553 .map_err(|_| anyhow!("can't convert URI to path"))?;
1554 let target_abs_path = op
1555 .new_uri
1556 .to_file_path()
1557 .map_err(|_| anyhow!("can't convert URI to path"))?;
1558 fs.rename(
1559 &source_abs_path,
1560 &target_abs_path,
1561 op.options.map(Into::into).unwrap_or_default(),
1562 )
1563 .await?;
1564 }
1565 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1566 let abs_path = op
1567 .uri
1568 .to_file_path()
1569 .map_err(|_| anyhow!("can't convert URI to path"))?;
1570 let options = op.options.map(Into::into).unwrap_or_default();
1571 if abs_path.ends_with("/") {
1572 fs.remove_dir(&abs_path, options).await?;
1573 } else {
1574 fs.remove_file(&abs_path, options).await?;
1575 }
1576 }
1577 lsp::DocumentChangeOperation::Edit(op) => {
1578 let buffer_to_edit = this
1579 .update(&mut cx, |this, cx| {
1580 this.open_local_buffer_from_lsp_path(
1581 op.text_document.uri,
1582 lang_name.clone(),
1583 lang_server.clone(),
1584 cx,
1585 )
1586 })
1587 .await?;
1588 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1589 let edits = op.edits.into_iter().map(|edit| match edit {
1590 lsp::OneOf::Left(edit) => edit,
1591 lsp::OneOf::Right(edit) => edit.text_edit,
1592 });
1593
1594 buffer.finalize_last_transaction();
1595 buffer.start_transaction();
1596 buffer
1597 .apply_lsp_edits(edits, op.text_document.version, cx)
1598 .log_err();
1599 let transaction = if buffer.end_transaction(cx).is_some() {
1600 let transaction =
1601 buffer.finalize_last_transaction().unwrap().clone();
1602 if !push_to_history {
1603 buffer.forget_transaction(transaction.id);
1604 }
1605 Some(transaction)
1606 } else {
1607 None
1608 };
1609
1610 transaction
1611 });
1612 if let Some(transaction) = transaction {
1613 project_transaction.0.insert(buffer_to_edit, transaction);
1614 }
1615 }
1616 }
1617 }
1618
1619 Ok(project_transaction)
1620 })
1621 } else if let Some(project_id) = self.remote_id() {
1622 let client = self.client.clone();
1623 let request = proto::ApplyCodeAction {
1624 project_id,
1625 buffer_id: buffer_handle.read(cx).remote_id(),
1626 action: Some(language::proto::serialize_code_action(&action)),
1627 };
1628 cx.spawn(|this, mut cx| async move {
1629 let response = client
1630 .request(request)
1631 .await?
1632 .transaction
1633 .ok_or_else(|| anyhow!("missing transaction"))?;
1634 let mut project_transaction = ProjectTransaction::default();
1635 for (buffer, transaction) in response.buffers.into_iter().zip(response.transactions)
1636 {
1637 let buffer = this.update(&mut cx, |this, cx| {
1638 this.deserialize_remote_buffer(buffer, cx)
1639 })?;
1640 let transaction = language::proto::deserialize_transaction(transaction)?;
1641
1642 buffer
1643 .update(&mut cx, |buffer, _| {
1644 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1645 })
1646 .await;
1647
1648 if push_to_history {
1649 buffer.update(&mut cx, |buffer, _| {
1650 buffer.push_transaction(transaction.clone(), Instant::now());
1651 });
1652 }
1653
1654 project_transaction.0.insert(buffer, transaction);
1655 }
1656 Ok(project_transaction)
1657 })
1658 } else {
1659 Task::ready(Err(anyhow!("project does not have a remote id")))
1660 }
1661 }
1662
1663 pub fn find_or_create_local_worktree(
1664 &self,
1665 abs_path: impl AsRef<Path>,
1666 weak: bool,
1667 cx: &mut ModelContext<Self>,
1668 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1669 let abs_path = abs_path.as_ref();
1670 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1671 Task::ready(Ok((tree.clone(), relative_path.into())))
1672 } else {
1673 let worktree = self.create_local_worktree(abs_path, weak, cx);
1674 cx.foreground()
1675 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1676 }
1677 }
1678
1679 fn find_local_worktree(
1680 &self,
1681 abs_path: &Path,
1682 cx: &AppContext,
1683 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1684 for tree in self.worktrees(cx) {
1685 if let Some(relative_path) = tree
1686 .read(cx)
1687 .as_local()
1688 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1689 {
1690 return Some((tree.clone(), relative_path.into()));
1691 }
1692 }
1693 None
1694 }
1695
1696 pub fn is_shared(&self) -> bool {
1697 match &self.client_state {
1698 ProjectClientState::Local { is_shared, .. } => *is_shared,
1699 ProjectClientState::Remote { .. } => false,
1700 }
1701 }
1702
1703 fn create_local_worktree(
1704 &self,
1705 abs_path: impl AsRef<Path>,
1706 weak: bool,
1707 cx: &mut ModelContext<Self>,
1708 ) -> Task<Result<ModelHandle<Worktree>>> {
1709 let fs = self.fs.clone();
1710 let client = self.client.clone();
1711 let path = Arc::from(abs_path.as_ref());
1712 cx.spawn(|project, mut cx| async move {
1713 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1714
1715 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1716 project.add_worktree(&worktree, cx);
1717 (project.remote_id(), project.is_shared())
1718 });
1719
1720 if let Some(project_id) = remote_project_id {
1721 worktree
1722 .update(&mut cx, |worktree, cx| {
1723 worktree.as_local_mut().unwrap().register(project_id, cx)
1724 })
1725 .await?;
1726 if is_shared {
1727 worktree
1728 .update(&mut cx, |worktree, cx| {
1729 worktree.as_local_mut().unwrap().share(project_id, cx)
1730 })
1731 .await?;
1732 }
1733 }
1734
1735 Ok(worktree)
1736 })
1737 }
1738
1739 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1740 self.worktrees.retain(|worktree| {
1741 worktree
1742 .upgrade(cx)
1743 .map_or(false, |w| w.read(cx).id() != id)
1744 });
1745 cx.notify();
1746 }
1747
1748 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1749 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1750 if worktree.read(cx).is_local() {
1751 cx.subscribe(&worktree, |this, worktree, _, cx| {
1752 this.update_local_worktree_buffers(worktree, cx);
1753 })
1754 .detach();
1755 }
1756
1757 let push_weak_handle = {
1758 let worktree = worktree.read(cx);
1759 worktree.is_local() && worktree.is_weak()
1760 };
1761 if push_weak_handle {
1762 cx.observe_release(&worktree, |this, cx| {
1763 this.worktrees
1764 .retain(|worktree| worktree.upgrade(cx).is_some());
1765 cx.notify();
1766 })
1767 .detach();
1768 self.worktrees
1769 .push(WorktreeHandle::Weak(worktree.downgrade()));
1770 } else {
1771 self.worktrees
1772 .push(WorktreeHandle::Strong(worktree.clone()));
1773 }
1774 cx.notify();
1775 }
1776
1777 fn update_local_worktree_buffers(
1778 &mut self,
1779 worktree_handle: ModelHandle<Worktree>,
1780 cx: &mut ModelContext<Self>,
1781 ) {
1782 let snapshot = worktree_handle.read(cx).snapshot();
1783 let mut buffers_to_delete = Vec::new();
1784 for (buffer_id, buffer) in &self.open_buffers {
1785 if let Some(buffer) = buffer.upgrade(cx) {
1786 buffer.update(cx, |buffer, cx| {
1787 if let Some(old_file) = File::from_dyn(buffer.file()) {
1788 if old_file.worktree != worktree_handle {
1789 return;
1790 }
1791
1792 let new_file = if let Some(entry) = old_file
1793 .entry_id
1794 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1795 {
1796 File {
1797 is_local: true,
1798 entry_id: Some(entry.id),
1799 mtime: entry.mtime,
1800 path: entry.path.clone(),
1801 worktree: worktree_handle.clone(),
1802 }
1803 } else if let Some(entry) =
1804 snapshot.entry_for_path(old_file.path().as_ref())
1805 {
1806 File {
1807 is_local: true,
1808 entry_id: Some(entry.id),
1809 mtime: entry.mtime,
1810 path: entry.path.clone(),
1811 worktree: worktree_handle.clone(),
1812 }
1813 } else {
1814 File {
1815 is_local: true,
1816 entry_id: None,
1817 path: old_file.path().clone(),
1818 mtime: old_file.mtime(),
1819 worktree: worktree_handle.clone(),
1820 }
1821 };
1822
1823 if let Some(project_id) = self.remote_id() {
1824 self.client
1825 .send(proto::UpdateBufferFile {
1826 project_id,
1827 buffer_id: *buffer_id as u64,
1828 file: Some(new_file.to_proto()),
1829 })
1830 .log_err();
1831 }
1832 buffer.file_updated(Box::new(new_file), cx).detach();
1833 }
1834 });
1835 } else {
1836 buffers_to_delete.push(*buffer_id);
1837 }
1838 }
1839
1840 for buffer_id in buffers_to_delete {
1841 self.open_buffers.remove(&buffer_id);
1842 }
1843 }
1844
1845 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1846 let new_active_entry = entry.and_then(|project_path| {
1847 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1848 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1849 Some(ProjectEntry {
1850 worktree_id: project_path.worktree_id,
1851 entry_id: entry.id,
1852 })
1853 });
1854 if new_active_entry != self.active_entry {
1855 self.active_entry = new_active_entry;
1856 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1857 }
1858 }
1859
1860 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1861 self.language_servers_with_diagnostics_running > 0
1862 }
1863
1864 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1865 let mut summary = DiagnosticSummary::default();
1866 for (_, path_summary) in self.diagnostic_summaries(cx) {
1867 summary.error_count += path_summary.error_count;
1868 summary.warning_count += path_summary.warning_count;
1869 summary.info_count += path_summary.info_count;
1870 summary.hint_count += path_summary.hint_count;
1871 }
1872 summary
1873 }
1874
1875 pub fn diagnostic_summaries<'a>(
1876 &'a self,
1877 cx: &'a AppContext,
1878 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1879 self.worktrees(cx).flat_map(move |worktree| {
1880 let worktree = worktree.read(cx);
1881 let worktree_id = worktree.id();
1882 worktree
1883 .diagnostic_summaries()
1884 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1885 })
1886 }
1887
1888 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1889 self.language_servers_with_diagnostics_running += 1;
1890 if self.language_servers_with_diagnostics_running == 1 {
1891 cx.emit(Event::DiskBasedDiagnosticsStarted);
1892 }
1893 }
1894
1895 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1896 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1897 self.language_servers_with_diagnostics_running -= 1;
1898 if self.language_servers_with_diagnostics_running == 0 {
1899 cx.emit(Event::DiskBasedDiagnosticsFinished);
1900 }
1901 }
1902
1903 pub fn active_entry(&self) -> Option<ProjectEntry> {
1904 self.active_entry
1905 }
1906
1907 // RPC message handlers
1908
1909 fn handle_unshare_project(
1910 &mut self,
1911 _: TypedEnvelope<proto::UnshareProject>,
1912 _: Arc<Client>,
1913 cx: &mut ModelContext<Self>,
1914 ) -> Result<()> {
1915 if let ProjectClientState::Remote {
1916 sharing_has_stopped,
1917 ..
1918 } = &mut self.client_state
1919 {
1920 *sharing_has_stopped = true;
1921 self.collaborators.clear();
1922 cx.notify();
1923 Ok(())
1924 } else {
1925 unreachable!()
1926 }
1927 }
1928
1929 fn handle_add_collaborator(
1930 &mut self,
1931 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
1932 _: Arc<Client>,
1933 cx: &mut ModelContext<Self>,
1934 ) -> Result<()> {
1935 let user_store = self.user_store.clone();
1936 let collaborator = envelope
1937 .payload
1938 .collaborator
1939 .take()
1940 .ok_or_else(|| anyhow!("empty collaborator"))?;
1941
1942 cx.spawn(|this, mut cx| {
1943 async move {
1944 let collaborator =
1945 Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
1946 this.update(&mut cx, |this, cx| {
1947 this.collaborators
1948 .insert(collaborator.peer_id, collaborator);
1949 cx.notify();
1950 });
1951 Ok(())
1952 }
1953 .log_err()
1954 })
1955 .detach();
1956
1957 Ok(())
1958 }
1959
1960 fn handle_remove_collaborator(
1961 &mut self,
1962 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
1963 _: Arc<Client>,
1964 cx: &mut ModelContext<Self>,
1965 ) -> Result<()> {
1966 let peer_id = PeerId(envelope.payload.peer_id);
1967 let replica_id = self
1968 .collaborators
1969 .remove(&peer_id)
1970 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
1971 .replica_id;
1972 self.shared_buffers.remove(&peer_id);
1973 for (_, buffer) in &self.open_buffers {
1974 if let Some(buffer) = buffer.upgrade(cx) {
1975 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
1976 }
1977 }
1978 cx.notify();
1979 Ok(())
1980 }
1981
1982 fn handle_share_worktree(
1983 &mut self,
1984 envelope: TypedEnvelope<proto::ShareWorktree>,
1985 client: Arc<Client>,
1986 cx: &mut ModelContext<Self>,
1987 ) -> Result<()> {
1988 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
1989 let replica_id = self.replica_id();
1990 let worktree = envelope
1991 .payload
1992 .worktree
1993 .ok_or_else(|| anyhow!("invalid worktree"))?;
1994 let (worktree, load_task) = Worktree::remote(remote_id, replica_id, worktree, client, cx);
1995 self.add_worktree(&worktree, cx);
1996 load_task.detach();
1997 Ok(())
1998 }
1999
2000 fn handle_unregister_worktree(
2001 &mut self,
2002 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2003 _: Arc<Client>,
2004 cx: &mut ModelContext<Self>,
2005 ) -> Result<()> {
2006 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2007 self.remove_worktree(worktree_id, cx);
2008 Ok(())
2009 }
2010
2011 fn handle_update_worktree(
2012 &mut self,
2013 envelope: TypedEnvelope<proto::UpdateWorktree>,
2014 _: Arc<Client>,
2015 cx: &mut ModelContext<Self>,
2016 ) -> Result<()> {
2017 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2018 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
2019 worktree.update(cx, |worktree, cx| {
2020 let worktree = worktree.as_remote_mut().unwrap();
2021 worktree.update_from_remote(envelope, cx)
2022 })?;
2023 }
2024 Ok(())
2025 }
2026
2027 fn handle_update_diagnostic_summary(
2028 &mut self,
2029 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2030 _: Arc<Client>,
2031 cx: &mut ModelContext<Self>,
2032 ) -> Result<()> {
2033 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2034 if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
2035 if let Some(summary) = envelope.payload.summary {
2036 let project_path = ProjectPath {
2037 worktree_id,
2038 path: Path::new(&summary.path).into(),
2039 };
2040 worktree.update(cx, |worktree, _| {
2041 worktree
2042 .as_remote_mut()
2043 .unwrap()
2044 .update_diagnostic_summary(project_path.path.clone(), &summary);
2045 });
2046 cx.emit(Event::DiagnosticsUpdated(project_path));
2047 }
2048 }
2049 Ok(())
2050 }
2051
2052 fn handle_disk_based_diagnostics_updating(
2053 &mut self,
2054 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2055 _: Arc<Client>,
2056 cx: &mut ModelContext<Self>,
2057 ) -> Result<()> {
2058 self.disk_based_diagnostics_started(cx);
2059 Ok(())
2060 }
2061
2062 fn handle_disk_based_diagnostics_updated(
2063 &mut self,
2064 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2065 _: Arc<Client>,
2066 cx: &mut ModelContext<Self>,
2067 ) -> Result<()> {
2068 self.disk_based_diagnostics_finished(cx);
2069 Ok(())
2070 }
2071
2072 pub fn handle_update_buffer(
2073 &mut self,
2074 envelope: TypedEnvelope<proto::UpdateBuffer>,
2075 _: Arc<Client>,
2076 cx: &mut ModelContext<Self>,
2077 ) -> Result<()> {
2078 let payload = envelope.payload.clone();
2079 let buffer_id = payload.buffer_id as usize;
2080 let ops = payload
2081 .operations
2082 .into_iter()
2083 .map(|op| language::proto::deserialize_operation(op))
2084 .collect::<Result<Vec<_>, _>>()?;
2085 if let Some(buffer) = self.open_buffers.get_mut(&buffer_id) {
2086 if let Some(buffer) = buffer.upgrade(cx) {
2087 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2088 }
2089 }
2090 Ok(())
2091 }
2092
2093 pub fn handle_update_buffer_file(
2094 &mut self,
2095 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2096 _: Arc<Client>,
2097 cx: &mut ModelContext<Self>,
2098 ) -> Result<()> {
2099 let payload = envelope.payload.clone();
2100 let buffer_id = payload.buffer_id as usize;
2101 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2102 let worktree = self
2103 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2104 .ok_or_else(|| anyhow!("no such worktree"))?;
2105 let file = File::from_proto(file, worktree.clone(), cx)?;
2106 let buffer = self
2107 .open_buffers
2108 .get_mut(&buffer_id)
2109 .and_then(|b| b.upgrade(cx))
2110 .ok_or_else(|| anyhow!("no such buffer"))?;
2111 buffer.update(cx, |buffer, cx| {
2112 buffer.file_updated(Box::new(file), cx).detach();
2113 });
2114
2115 Ok(())
2116 }
2117
2118 pub fn handle_save_buffer(
2119 &mut self,
2120 envelope: TypedEnvelope<proto::SaveBuffer>,
2121 rpc: Arc<Client>,
2122 cx: &mut ModelContext<Self>,
2123 ) -> Result<()> {
2124 let sender_id = envelope.original_sender_id()?;
2125 let project_id = self.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2126 let buffer = self
2127 .shared_buffers
2128 .get(&sender_id)
2129 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2130 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2131 let receipt = envelope.receipt();
2132 let buffer_id = envelope.payload.buffer_id;
2133 let save = cx.spawn(|_, mut cx| async move {
2134 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await
2135 });
2136
2137 cx.background()
2138 .spawn(
2139 async move {
2140 let (version, mtime) = save.await?;
2141
2142 rpc.respond(
2143 receipt,
2144 proto::BufferSaved {
2145 project_id,
2146 buffer_id,
2147 version: (&version).into(),
2148 mtime: Some(mtime.into()),
2149 },
2150 )?;
2151
2152 Ok(())
2153 }
2154 .log_err(),
2155 )
2156 .detach();
2157 Ok(())
2158 }
2159
2160 pub fn handle_format_buffer(
2161 &mut self,
2162 envelope: TypedEnvelope<proto::FormatBuffer>,
2163 rpc: Arc<Client>,
2164 cx: &mut ModelContext<Self>,
2165 ) -> Result<()> {
2166 let receipt = envelope.receipt();
2167 let sender_id = envelope.original_sender_id()?;
2168 let buffer = self
2169 .shared_buffers
2170 .get(&sender_id)
2171 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2172 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2173 cx.spawn(|_, mut cx| async move {
2174 let format = buffer.update(&mut cx, |buffer, cx| buffer.format(cx)).await;
2175 // We spawn here in order to enqueue the sending of `Ack` *after* transmission of edits
2176 // associated with formatting.
2177 cx.spawn(|_| async move {
2178 match format {
2179 Ok(()) => rpc.respond(receipt, proto::Ack {})?,
2180 Err(error) => rpc.respond_with_error(
2181 receipt,
2182 proto::Error {
2183 message: error.to_string(),
2184 },
2185 )?,
2186 }
2187 Ok::<_, anyhow::Error>(())
2188 })
2189 .await
2190 .log_err();
2191 })
2192 .detach();
2193 Ok(())
2194 }
2195
2196 fn handle_get_completions(
2197 &mut self,
2198 envelope: TypedEnvelope<proto::GetCompletions>,
2199 rpc: Arc<Client>,
2200 cx: &mut ModelContext<Self>,
2201 ) -> Result<()> {
2202 let receipt = envelope.receipt();
2203 let sender_id = envelope.original_sender_id()?;
2204 let buffer = self
2205 .shared_buffers
2206 .get(&sender_id)
2207 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2208 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2209 let position = envelope
2210 .payload
2211 .position
2212 .and_then(language::proto::deserialize_anchor)
2213 .ok_or_else(|| anyhow!("invalid position"))?;
2214 cx.spawn(|this, mut cx| async move {
2215 match this
2216 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2217 .await
2218 {
2219 Ok(completions) => rpc.respond(
2220 receipt,
2221 proto::GetCompletionsResponse {
2222 completions: completions
2223 .iter()
2224 .map(language::proto::serialize_completion)
2225 .collect(),
2226 },
2227 ),
2228 Err(error) => rpc.respond_with_error(
2229 receipt,
2230 proto::Error {
2231 message: error.to_string(),
2232 },
2233 ),
2234 }
2235 })
2236 .detach_and_log_err(cx);
2237 Ok(())
2238 }
2239
2240 fn handle_apply_additional_edits_for_completion(
2241 &mut self,
2242 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2243 rpc: Arc<Client>,
2244 cx: &mut ModelContext<Self>,
2245 ) -> Result<()> {
2246 let receipt = envelope.receipt();
2247 let sender_id = envelope.original_sender_id()?;
2248 let buffer = self
2249 .shared_buffers
2250 .get(&sender_id)
2251 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2252 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2253 let language = buffer.read(cx).language();
2254 let completion = language::proto::deserialize_completion(
2255 envelope
2256 .payload
2257 .completion
2258 .ok_or_else(|| anyhow!("invalid completion"))?,
2259 language,
2260 )?;
2261 cx.spawn(|this, mut cx| async move {
2262 match this
2263 .update(&mut cx, |this, cx| {
2264 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
2265 })
2266 .await
2267 {
2268 Ok(transaction) => rpc.respond(
2269 receipt,
2270 proto::ApplyCompletionAdditionalEditsResponse {
2271 transaction: transaction
2272 .as_ref()
2273 .map(language::proto::serialize_transaction),
2274 },
2275 ),
2276 Err(error) => rpc.respond_with_error(
2277 receipt,
2278 proto::Error {
2279 message: error.to_string(),
2280 },
2281 ),
2282 }
2283 })
2284 .detach_and_log_err(cx);
2285 Ok(())
2286 }
2287
2288 fn handle_get_code_actions(
2289 &mut self,
2290 envelope: TypedEnvelope<proto::GetCodeActions>,
2291 rpc: Arc<Client>,
2292 cx: &mut ModelContext<Self>,
2293 ) -> Result<()> {
2294 let receipt = envelope.receipt();
2295 let sender_id = envelope.original_sender_id()?;
2296 let buffer = self
2297 .shared_buffers
2298 .get(&sender_id)
2299 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2300 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2301 let position = envelope
2302 .payload
2303 .position
2304 .and_then(language::proto::deserialize_anchor)
2305 .ok_or_else(|| anyhow!("invalid position"))?;
2306 cx.spawn(|this, mut cx| async move {
2307 match this
2308 .update(&mut cx, |this, cx| this.code_actions(&buffer, position, cx))
2309 .await
2310 {
2311 Ok(completions) => rpc.respond(
2312 receipt,
2313 proto::GetCodeActionsResponse {
2314 actions: completions
2315 .iter()
2316 .map(language::proto::serialize_code_action)
2317 .collect(),
2318 },
2319 ),
2320 Err(error) => rpc.respond_with_error(
2321 receipt,
2322 proto::Error {
2323 message: error.to_string(),
2324 },
2325 ),
2326 }
2327 })
2328 .detach_and_log_err(cx);
2329 Ok(())
2330 }
2331
2332 fn handle_apply_code_action(
2333 &mut self,
2334 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2335 rpc: Arc<Client>,
2336 cx: &mut ModelContext<Self>,
2337 ) -> Result<()> {
2338 let receipt = envelope.receipt();
2339 let sender_id = envelope.original_sender_id()?;
2340 let buffer = self
2341 .shared_buffers
2342 .get(&sender_id)
2343 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2344 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2345 let action = language::proto::deserialize_code_action(
2346 envelope
2347 .payload
2348 .action
2349 .ok_or_else(|| anyhow!("invalid action"))?,
2350 )?;
2351 let apply_code_action = self.apply_code_action(buffer, action, false, cx);
2352 cx.spawn(|this, mut cx| async move {
2353 match apply_code_action.await {
2354 Ok(project_transaction) => this.update(&mut cx, |this, cx| {
2355 let mut serialized_transaction = proto::ProjectTransaction {
2356 buffers: Default::default(),
2357 transactions: Default::default(),
2358 };
2359 for (buffer, transaction) in project_transaction.0 {
2360 serialized_transaction
2361 .buffers
2362 .push(this.serialize_buffer_for_peer(&buffer, sender_id, cx));
2363 serialized_transaction
2364 .transactions
2365 .push(language::proto::serialize_transaction(&transaction));
2366 }
2367 rpc.respond(
2368 receipt,
2369 proto::ApplyCodeActionResponse {
2370 transaction: Some(serialized_transaction),
2371 },
2372 )
2373 }),
2374 Err(error) => rpc.respond_with_error(
2375 receipt,
2376 proto::Error {
2377 message: error.to_string(),
2378 },
2379 ),
2380 }
2381 })
2382 .detach_and_log_err(cx);
2383 Ok(())
2384 }
2385
2386 pub fn handle_get_definition(
2387 &mut self,
2388 envelope: TypedEnvelope<proto::GetDefinition>,
2389 rpc: Arc<Client>,
2390 cx: &mut ModelContext<Self>,
2391 ) -> Result<()> {
2392 let receipt = envelope.receipt();
2393 let sender_id = envelope.original_sender_id()?;
2394 let source_buffer = self
2395 .shared_buffers
2396 .get(&sender_id)
2397 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2398 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2399 let position = envelope
2400 .payload
2401 .position
2402 .and_then(deserialize_anchor)
2403 .ok_or_else(|| anyhow!("invalid position"))?;
2404 if !source_buffer.read(cx).can_resolve(&position) {
2405 return Err(anyhow!("cannot resolve position"));
2406 }
2407
2408 let definitions = self.definition(&source_buffer, position, cx);
2409 cx.spawn(|this, mut cx| async move {
2410 let definitions = definitions.await?;
2411 let mut response = proto::GetDefinitionResponse {
2412 definitions: Default::default(),
2413 };
2414 this.update(&mut cx, |this, cx| {
2415 for definition in definitions {
2416 let buffer =
2417 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2418 response.definitions.push(proto::Definition {
2419 target_start: Some(serialize_anchor(&definition.target_range.start)),
2420 target_end: Some(serialize_anchor(&definition.target_range.end)),
2421 buffer: Some(buffer),
2422 });
2423 }
2424 });
2425 rpc.respond(receipt, response)?;
2426 Ok::<_, anyhow::Error>(())
2427 })
2428 .detach_and_log_err(cx);
2429
2430 Ok(())
2431 }
2432
2433 pub fn handle_open_buffer(
2434 &mut self,
2435 envelope: TypedEnvelope<proto::OpenBuffer>,
2436 rpc: Arc<Client>,
2437 cx: &mut ModelContext<Self>,
2438 ) -> anyhow::Result<()> {
2439 let receipt = envelope.receipt();
2440 let peer_id = envelope.original_sender_id()?;
2441 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2442 let open_buffer = self.open_buffer(
2443 ProjectPath {
2444 worktree_id,
2445 path: PathBuf::from(envelope.payload.path).into(),
2446 },
2447 cx,
2448 );
2449 cx.spawn(|this, mut cx| {
2450 async move {
2451 let buffer = open_buffer.await?;
2452 let buffer = this.update(&mut cx, |this, cx| {
2453 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
2454 });
2455 rpc.respond(
2456 receipt,
2457 proto::OpenBufferResponse {
2458 buffer: Some(buffer),
2459 },
2460 )
2461 }
2462 .log_err()
2463 })
2464 .detach();
2465 Ok(())
2466 }
2467
2468 fn serialize_buffer_for_peer(
2469 &mut self,
2470 buffer: &ModelHandle<Buffer>,
2471 peer_id: PeerId,
2472 cx: &AppContext,
2473 ) -> proto::Buffer {
2474 let buffer_id = buffer.read(cx).remote_id();
2475 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2476 match shared_buffers.entry(buffer_id) {
2477 hash_map::Entry::Occupied(_) => proto::Buffer {
2478 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2479 },
2480 hash_map::Entry::Vacant(entry) => {
2481 entry.insert(buffer.clone());
2482 proto::Buffer {
2483 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2484 }
2485 }
2486 }
2487 }
2488
2489 fn deserialize_remote_buffer(
2490 &mut self,
2491 buffer: proto::Buffer,
2492 cx: &mut ModelContext<Self>,
2493 ) -> Result<ModelHandle<Buffer>> {
2494 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2495 proto::buffer::Variant::Id(id) => self
2496 .open_buffers
2497 .get(&(id as usize))
2498 .and_then(|buffer| buffer.upgrade(cx))
2499 .ok_or_else(|| anyhow!("no buffer exists for id {}", id)),
2500 proto::buffer::Variant::State(mut buffer) => {
2501 let mut buffer_worktree = None;
2502 let mut buffer_file = None;
2503 if let Some(file) = buffer.file.take() {
2504 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2505 let worktree = self
2506 .worktree_for_id(worktree_id, cx)
2507 .ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?;
2508 buffer_file = Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2509 as Box<dyn language::File>);
2510 buffer_worktree = Some(worktree);
2511 }
2512
2513 let buffer = cx.add_model(|cx| {
2514 Buffer::from_proto(self.replica_id(), buffer, buffer_file, cx).unwrap()
2515 });
2516 self.register_buffer(&buffer, buffer_worktree.as_ref(), cx)?;
2517 Ok(buffer)
2518 }
2519 }
2520 }
2521
2522 pub fn handle_close_buffer(
2523 &mut self,
2524 envelope: TypedEnvelope<proto::CloseBuffer>,
2525 _: Arc<Client>,
2526 cx: &mut ModelContext<Self>,
2527 ) -> anyhow::Result<()> {
2528 if let Some(shared_buffers) = self.shared_buffers.get_mut(&envelope.original_sender_id()?) {
2529 shared_buffers.remove(&envelope.payload.buffer_id);
2530 cx.notify();
2531 }
2532 Ok(())
2533 }
2534
2535 pub fn handle_buffer_saved(
2536 &mut self,
2537 envelope: TypedEnvelope<proto::BufferSaved>,
2538 _: Arc<Client>,
2539 cx: &mut ModelContext<Self>,
2540 ) -> Result<()> {
2541 let payload = envelope.payload.clone();
2542 let buffer = self
2543 .open_buffers
2544 .get(&(payload.buffer_id as usize))
2545 .and_then(|buffer| buffer.upgrade(cx));
2546 if let Some(buffer) = buffer {
2547 buffer.update(cx, |buffer, cx| {
2548 let version = payload.version.try_into()?;
2549 let mtime = payload
2550 .mtime
2551 .ok_or_else(|| anyhow!("missing mtime"))?
2552 .into();
2553 buffer.did_save(version, mtime, None, cx);
2554 Result::<_, anyhow::Error>::Ok(())
2555 })?;
2556 }
2557 Ok(())
2558 }
2559
2560 pub fn handle_buffer_reloaded(
2561 &mut self,
2562 envelope: TypedEnvelope<proto::BufferReloaded>,
2563 _: Arc<Client>,
2564 cx: &mut ModelContext<Self>,
2565 ) -> Result<()> {
2566 let payload = envelope.payload.clone();
2567 let buffer = self
2568 .open_buffers
2569 .get(&(payload.buffer_id as usize))
2570 .and_then(|buffer| buffer.upgrade(cx));
2571 if let Some(buffer) = buffer {
2572 buffer.update(cx, |buffer, cx| {
2573 let version = payload.version.try_into()?;
2574 let mtime = payload
2575 .mtime
2576 .ok_or_else(|| anyhow!("missing mtime"))?
2577 .into();
2578 buffer.did_reload(version, mtime, cx);
2579 Result::<_, anyhow::Error>::Ok(())
2580 })?;
2581 }
2582 Ok(())
2583 }
2584
2585 pub fn match_paths<'a>(
2586 &self,
2587 query: &'a str,
2588 include_ignored: bool,
2589 smart_case: bool,
2590 max_results: usize,
2591 cancel_flag: &'a AtomicBool,
2592 cx: &AppContext,
2593 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2594 let worktrees = self
2595 .worktrees(cx)
2596 .filter(|worktree| !worktree.read(cx).is_weak())
2597 .collect::<Vec<_>>();
2598 let include_root_name = worktrees.len() > 1;
2599 let candidate_sets = worktrees
2600 .into_iter()
2601 .map(|worktree| CandidateSet {
2602 snapshot: worktree.read(cx).snapshot(),
2603 include_ignored,
2604 include_root_name,
2605 })
2606 .collect::<Vec<_>>();
2607
2608 let background = cx.background().clone();
2609 async move {
2610 fuzzy::match_paths(
2611 candidate_sets.as_slice(),
2612 query,
2613 smart_case,
2614 max_results,
2615 cancel_flag,
2616 background,
2617 )
2618 .await
2619 }
2620 }
2621}
2622
2623impl WorktreeHandle {
2624 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2625 match self {
2626 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2627 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2628 }
2629 }
2630}
2631
2632struct CandidateSet {
2633 snapshot: Snapshot,
2634 include_ignored: bool,
2635 include_root_name: bool,
2636}
2637
2638impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2639 type Candidates = CandidateSetIter<'a>;
2640
2641 fn id(&self) -> usize {
2642 self.snapshot.id().to_usize()
2643 }
2644
2645 fn len(&self) -> usize {
2646 if self.include_ignored {
2647 self.snapshot.file_count()
2648 } else {
2649 self.snapshot.visible_file_count()
2650 }
2651 }
2652
2653 fn prefix(&self) -> Arc<str> {
2654 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2655 self.snapshot.root_name().into()
2656 } else if self.include_root_name {
2657 format!("{}/", self.snapshot.root_name()).into()
2658 } else {
2659 "".into()
2660 }
2661 }
2662
2663 fn candidates(&'a self, start: usize) -> Self::Candidates {
2664 CandidateSetIter {
2665 traversal: self.snapshot.files(self.include_ignored, start),
2666 }
2667 }
2668}
2669
2670struct CandidateSetIter<'a> {
2671 traversal: Traversal<'a>,
2672}
2673
2674impl<'a> Iterator for CandidateSetIter<'a> {
2675 type Item = PathMatchCandidate<'a>;
2676
2677 fn next(&mut self) -> Option<Self::Item> {
2678 self.traversal.next().map(|entry| {
2679 if let EntryKind::File(char_bag) = entry.kind {
2680 PathMatchCandidate {
2681 path: &entry.path,
2682 char_bag,
2683 }
2684 } else {
2685 unreachable!()
2686 }
2687 })
2688 }
2689}
2690
2691impl Entity for Project {
2692 type Event = Event;
2693
2694 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2695 match &self.client_state {
2696 ProjectClientState::Local { remote_id_rx, .. } => {
2697 if let Some(project_id) = *remote_id_rx.borrow() {
2698 self.client
2699 .send(proto::UnregisterProject { project_id })
2700 .log_err();
2701 }
2702 }
2703 ProjectClientState::Remote { remote_id, .. } => {
2704 self.client
2705 .send(proto::LeaveProject {
2706 project_id: *remote_id,
2707 })
2708 .log_err();
2709 }
2710 }
2711 }
2712
2713 fn app_will_quit(
2714 &mut self,
2715 _: &mut MutableAppContext,
2716 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2717 use futures::FutureExt;
2718
2719 let shutdown_futures = self
2720 .language_servers
2721 .drain()
2722 .filter_map(|(_, server)| server.shutdown())
2723 .collect::<Vec<_>>();
2724 Some(
2725 async move {
2726 futures::future::join_all(shutdown_futures).await;
2727 }
2728 .boxed(),
2729 )
2730 }
2731}
2732
2733impl Collaborator {
2734 fn from_proto(
2735 message: proto::Collaborator,
2736 user_store: &ModelHandle<UserStore>,
2737 cx: &mut AsyncAppContext,
2738 ) -> impl Future<Output = Result<Self>> {
2739 let user = user_store.update(cx, |user_store, cx| {
2740 user_store.fetch_user(message.user_id, cx)
2741 });
2742
2743 async move {
2744 Ok(Self {
2745 peer_id: PeerId(message.peer_id),
2746 user: user.await?,
2747 replica_id: message.replica_id as ReplicaId,
2748 })
2749 }
2750 }
2751}
2752
2753impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2754 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2755 Self {
2756 worktree_id,
2757 path: path.as_ref().into(),
2758 }
2759 }
2760}
2761
2762impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2763 fn from(options: lsp::CreateFileOptions) -> Self {
2764 Self {
2765 overwrite: options.overwrite.unwrap_or(false),
2766 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2767 }
2768 }
2769}
2770
2771impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2772 fn from(options: lsp::RenameFileOptions) -> Self {
2773 Self {
2774 overwrite: options.overwrite.unwrap_or(false),
2775 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2776 }
2777 }
2778}
2779
2780impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2781 fn from(options: lsp::DeleteFileOptions) -> Self {
2782 Self {
2783 recursive: options.recursive.unwrap_or(false),
2784 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2785 }
2786 }
2787}
2788
2789#[cfg(test)]
2790mod tests {
2791 use super::{Event, *};
2792 use client::test::FakeHttpClient;
2793 use fs::RealFs;
2794 use futures::StreamExt;
2795 use gpui::test::subscribe;
2796 use language::{
2797 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2798 LanguageServerConfig, Point,
2799 };
2800 use lsp::Url;
2801 use serde_json::json;
2802 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2803 use unindent::Unindent as _;
2804 use util::test::temp_tree;
2805 use worktree::WorktreeHandle as _;
2806
2807 #[gpui::test]
2808 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2809 let dir = temp_tree(json!({
2810 "root": {
2811 "apple": "",
2812 "banana": {
2813 "carrot": {
2814 "date": "",
2815 "endive": "",
2816 }
2817 },
2818 "fennel": {
2819 "grape": "",
2820 }
2821 }
2822 }));
2823
2824 let root_link_path = dir.path().join("root_link");
2825 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2826 unix::fs::symlink(
2827 &dir.path().join("root/fennel"),
2828 &dir.path().join("root/finnochio"),
2829 )
2830 .unwrap();
2831
2832 let project = Project::test(Arc::new(RealFs), &mut cx);
2833
2834 let (tree, _) = project
2835 .update(&mut cx, |project, cx| {
2836 project.find_or_create_local_worktree(&root_link_path, false, cx)
2837 })
2838 .await
2839 .unwrap();
2840
2841 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2842 .await;
2843 cx.read(|cx| {
2844 let tree = tree.read(cx);
2845 assert_eq!(tree.file_count(), 5);
2846 assert_eq!(
2847 tree.inode_for_path("fennel/grape"),
2848 tree.inode_for_path("finnochio/grape")
2849 );
2850 });
2851
2852 let cancel_flag = Default::default();
2853 let results = project
2854 .read_with(&cx, |project, cx| {
2855 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2856 })
2857 .await;
2858 assert_eq!(
2859 results
2860 .into_iter()
2861 .map(|result| result.path)
2862 .collect::<Vec<Arc<Path>>>(),
2863 vec![
2864 PathBuf::from("banana/carrot/date").into(),
2865 PathBuf::from("banana/carrot/endive").into(),
2866 ]
2867 );
2868 }
2869
2870 #[gpui::test]
2871 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
2872 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
2873 let progress_token = language_server_config
2874 .disk_based_diagnostics_progress_token
2875 .clone()
2876 .unwrap();
2877
2878 let mut languages = LanguageRegistry::new();
2879 languages.add(Arc::new(Language::new(
2880 LanguageConfig {
2881 name: "Rust".to_string(),
2882 path_suffixes: vec!["rs".to_string()],
2883 language_server: Some(language_server_config),
2884 ..Default::default()
2885 },
2886 Some(tree_sitter_rust::language()),
2887 )));
2888
2889 let dir = temp_tree(json!({
2890 "a.rs": "fn a() { A }",
2891 "b.rs": "const y: i32 = 1",
2892 }));
2893
2894 let http_client = FakeHttpClient::with_404_response();
2895 let client = Client::new(http_client.clone());
2896 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2897
2898 let project = cx.update(|cx| {
2899 Project::local(
2900 client,
2901 user_store,
2902 Arc::new(languages),
2903 Arc::new(RealFs),
2904 cx,
2905 )
2906 });
2907
2908 let (tree, _) = project
2909 .update(&mut cx, |project, cx| {
2910 project.find_or_create_local_worktree(dir.path(), false, cx)
2911 })
2912 .await
2913 .unwrap();
2914 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2915
2916 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2917 .await;
2918
2919 // Cause worktree to start the fake language server
2920 let _buffer = project
2921 .update(&mut cx, |project, cx| {
2922 project.open_buffer(
2923 ProjectPath {
2924 worktree_id,
2925 path: Path::new("b.rs").into(),
2926 },
2927 cx,
2928 )
2929 })
2930 .await
2931 .unwrap();
2932
2933 let mut events = subscribe(&project, &mut cx);
2934
2935 fake_server.start_progress(&progress_token).await;
2936 assert_eq!(
2937 events.next().await.unwrap(),
2938 Event::DiskBasedDiagnosticsStarted
2939 );
2940
2941 fake_server.start_progress(&progress_token).await;
2942 fake_server.end_progress(&progress_token).await;
2943 fake_server.start_progress(&progress_token).await;
2944
2945 fake_server
2946 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2947 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
2948 version: None,
2949 diagnostics: vec![lsp::Diagnostic {
2950 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2951 severity: Some(lsp::DiagnosticSeverity::ERROR),
2952 message: "undefined variable 'A'".to_string(),
2953 ..Default::default()
2954 }],
2955 })
2956 .await;
2957 assert_eq!(
2958 events.next().await.unwrap(),
2959 Event::DiagnosticsUpdated(ProjectPath {
2960 worktree_id,
2961 path: Arc::from(Path::new("a.rs"))
2962 })
2963 );
2964
2965 fake_server.end_progress(&progress_token).await;
2966 fake_server.end_progress(&progress_token).await;
2967 assert_eq!(
2968 events.next().await.unwrap(),
2969 Event::DiskBasedDiagnosticsUpdated
2970 );
2971 assert_eq!(
2972 events.next().await.unwrap(),
2973 Event::DiskBasedDiagnosticsFinished
2974 );
2975
2976 let buffer = project
2977 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
2978 .await
2979 .unwrap();
2980
2981 buffer.read_with(&cx, |buffer, _| {
2982 let snapshot = buffer.snapshot();
2983 let diagnostics = snapshot
2984 .diagnostics_in_range::<_, Point>(0..buffer.len())
2985 .collect::<Vec<_>>();
2986 assert_eq!(
2987 diagnostics,
2988 &[DiagnosticEntry {
2989 range: Point::new(0, 9)..Point::new(0, 10),
2990 diagnostic: Diagnostic {
2991 severity: lsp::DiagnosticSeverity::ERROR,
2992 message: "undefined variable 'A'".to_string(),
2993 group_id: 0,
2994 is_primary: true,
2995 ..Default::default()
2996 }
2997 }]
2998 )
2999 });
3000 }
3001
3002 #[gpui::test]
3003 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3004 let dir = temp_tree(json!({
3005 "root": {
3006 "dir1": {},
3007 "dir2": {
3008 "dir3": {}
3009 }
3010 }
3011 }));
3012
3013 let project = Project::test(Arc::new(RealFs), &mut cx);
3014 let (tree, _) = project
3015 .update(&mut cx, |project, cx| {
3016 project.find_or_create_local_worktree(&dir.path(), false, cx)
3017 })
3018 .await
3019 .unwrap();
3020
3021 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3022 .await;
3023
3024 let cancel_flag = Default::default();
3025 let results = project
3026 .read_with(&cx, |project, cx| {
3027 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3028 })
3029 .await;
3030
3031 assert!(results.is_empty());
3032 }
3033
3034 #[gpui::test]
3035 async fn test_definition(mut cx: gpui::TestAppContext) {
3036 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3037
3038 let mut languages = LanguageRegistry::new();
3039 languages.add(Arc::new(Language::new(
3040 LanguageConfig {
3041 name: "Rust".to_string(),
3042 path_suffixes: vec!["rs".to_string()],
3043 language_server: Some(language_server_config),
3044 ..Default::default()
3045 },
3046 Some(tree_sitter_rust::language()),
3047 )));
3048
3049 let dir = temp_tree(json!({
3050 "a.rs": "const fn a() { A }",
3051 "b.rs": "const y: i32 = crate::a()",
3052 }));
3053
3054 let http_client = FakeHttpClient::with_404_response();
3055 let client = Client::new(http_client.clone());
3056 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3057 let project = cx.update(|cx| {
3058 Project::local(
3059 client,
3060 user_store,
3061 Arc::new(languages),
3062 Arc::new(RealFs),
3063 cx,
3064 )
3065 });
3066
3067 let (tree, _) = project
3068 .update(&mut cx, |project, cx| {
3069 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3070 })
3071 .await
3072 .unwrap();
3073 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3074 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3075 .await;
3076
3077 // Cause worktree to start the fake language server
3078 let buffer = project
3079 .update(&mut cx, |project, cx| {
3080 project.open_buffer(
3081 ProjectPath {
3082 worktree_id,
3083 path: Path::new("").into(),
3084 },
3085 cx,
3086 )
3087 })
3088 .await
3089 .unwrap();
3090 let definitions =
3091 project.update(&mut cx, |project, cx| project.definition(&buffer, 22, cx));
3092 let (request_id, request) = fake_server
3093 .receive_request::<lsp::request::GotoDefinition>()
3094 .await;
3095 let request_params = request.text_document_position_params;
3096 assert_eq!(
3097 request_params.text_document.uri.to_file_path().unwrap(),
3098 dir.path().join("b.rs")
3099 );
3100 assert_eq!(request_params.position, lsp::Position::new(0, 22));
3101
3102 fake_server
3103 .respond(
3104 request_id,
3105 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3106 lsp::Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3107 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3108 ))),
3109 )
3110 .await;
3111 let mut definitions = definitions.await.unwrap();
3112 assert_eq!(definitions.len(), 1);
3113 let definition = definitions.pop().unwrap();
3114 cx.update(|cx| {
3115 let target_buffer = definition.target_buffer.read(cx);
3116 assert_eq!(
3117 target_buffer
3118 .file()
3119 .unwrap()
3120 .as_local()
3121 .unwrap()
3122 .abs_path(cx),
3123 dir.path().join("a.rs")
3124 );
3125 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3126 assert_eq!(
3127 list_worktrees(&project, cx),
3128 [
3129 (dir.path().join("b.rs"), false),
3130 (dir.path().join("a.rs"), true)
3131 ]
3132 );
3133
3134 drop(definition);
3135 });
3136 cx.read(|cx| {
3137 assert_eq!(
3138 list_worktrees(&project, cx),
3139 [(dir.path().join("b.rs"), false)]
3140 );
3141 });
3142
3143 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3144 project
3145 .read(cx)
3146 .worktrees(cx)
3147 .map(|worktree| {
3148 let worktree = worktree.read(cx);
3149 (
3150 worktree.as_local().unwrap().abs_path().to_path_buf(),
3151 worktree.is_weak(),
3152 )
3153 })
3154 .collect::<Vec<_>>()
3155 }
3156 }
3157
3158 #[gpui::test]
3159 async fn test_save_file(mut cx: gpui::TestAppContext) {
3160 let fs = Arc::new(FakeFs::new(cx.background()));
3161 fs.insert_tree(
3162 "/dir",
3163 json!({
3164 "file1": "the old contents",
3165 }),
3166 )
3167 .await;
3168
3169 let project = Project::test(fs.clone(), &mut cx);
3170 let worktree_id = project
3171 .update(&mut cx, |p, cx| {
3172 p.find_or_create_local_worktree("/dir", false, cx)
3173 })
3174 .await
3175 .unwrap()
3176 .0
3177 .read_with(&cx, |tree, _| tree.id());
3178
3179 let buffer = project
3180 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3181 .await
3182 .unwrap();
3183 buffer
3184 .update(&mut cx, |buffer, cx| {
3185 assert_eq!(buffer.text(), "the old contents");
3186 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3187 buffer.save(cx)
3188 })
3189 .await
3190 .unwrap();
3191
3192 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3193 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3194 }
3195
3196 #[gpui::test]
3197 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3198 let fs = Arc::new(FakeFs::new(cx.background()));
3199 fs.insert_tree(
3200 "/dir",
3201 json!({
3202 "file1": "the old contents",
3203 }),
3204 )
3205 .await;
3206
3207 let project = Project::test(fs.clone(), &mut cx);
3208 let worktree_id = project
3209 .update(&mut cx, |p, cx| {
3210 p.find_or_create_local_worktree("/dir/file1", false, cx)
3211 })
3212 .await
3213 .unwrap()
3214 .0
3215 .read_with(&cx, |tree, _| tree.id());
3216
3217 let buffer = project
3218 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3219 .await
3220 .unwrap();
3221 buffer
3222 .update(&mut cx, |buffer, cx| {
3223 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3224 buffer.save(cx)
3225 })
3226 .await
3227 .unwrap();
3228
3229 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3230 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3231 }
3232
3233 #[gpui::test(retries = 5)]
3234 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3235 let dir = temp_tree(json!({
3236 "a": {
3237 "file1": "",
3238 "file2": "",
3239 "file3": "",
3240 },
3241 "b": {
3242 "c": {
3243 "file4": "",
3244 "file5": "",
3245 }
3246 }
3247 }));
3248
3249 let project = Project::test(Arc::new(RealFs), &mut cx);
3250 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3251
3252 let (tree, _) = project
3253 .update(&mut cx, |p, cx| {
3254 p.find_or_create_local_worktree(dir.path(), false, cx)
3255 })
3256 .await
3257 .unwrap();
3258 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3259
3260 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3261 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3262 async move { buffer.await.unwrap() }
3263 };
3264 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3265 tree.read_with(cx, |tree, _| {
3266 tree.entry_for_path(path)
3267 .expect(&format!("no entry for path {}", path))
3268 .id
3269 })
3270 };
3271
3272 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3273 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3274 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3275 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3276
3277 let file2_id = id_for_path("a/file2", &cx);
3278 let file3_id = id_for_path("a/file3", &cx);
3279 let file4_id = id_for_path("b/c/file4", &cx);
3280
3281 // Wait for the initial scan.
3282 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3283 .await;
3284
3285 // Create a remote copy of this worktree.
3286 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot());
3287 let (remote, load_task) = cx.update(|cx| {
3288 Worktree::remote(
3289 1,
3290 1,
3291 initial_snapshot.to_proto(&Default::default(), Default::default()),
3292 rpc.clone(),
3293 cx,
3294 )
3295 });
3296 load_task.await;
3297
3298 cx.read(|cx| {
3299 assert!(!buffer2.read(cx).is_dirty());
3300 assert!(!buffer3.read(cx).is_dirty());
3301 assert!(!buffer4.read(cx).is_dirty());
3302 assert!(!buffer5.read(cx).is_dirty());
3303 });
3304
3305 // Rename and delete files and directories.
3306 tree.flush_fs_events(&cx).await;
3307 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3308 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3309 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3310 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3311 tree.flush_fs_events(&cx).await;
3312
3313 let expected_paths = vec![
3314 "a",
3315 "a/file1",
3316 "a/file2.new",
3317 "b",
3318 "d",
3319 "d/file3",
3320 "d/file4",
3321 ];
3322
3323 cx.read(|app| {
3324 assert_eq!(
3325 tree.read(app)
3326 .paths()
3327 .map(|p| p.to_str().unwrap())
3328 .collect::<Vec<_>>(),
3329 expected_paths
3330 );
3331
3332 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3333 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3334 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3335
3336 assert_eq!(
3337 buffer2.read(app).file().unwrap().path().as_ref(),
3338 Path::new("a/file2.new")
3339 );
3340 assert_eq!(
3341 buffer3.read(app).file().unwrap().path().as_ref(),
3342 Path::new("d/file3")
3343 );
3344 assert_eq!(
3345 buffer4.read(app).file().unwrap().path().as_ref(),
3346 Path::new("d/file4")
3347 );
3348 assert_eq!(
3349 buffer5.read(app).file().unwrap().path().as_ref(),
3350 Path::new("b/c/file5")
3351 );
3352
3353 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3354 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3355 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3356 assert!(buffer5.read(app).file().unwrap().is_deleted());
3357 });
3358
3359 // Update the remote worktree. Check that it becomes consistent with the
3360 // local worktree.
3361 remote.update(&mut cx, |remote, cx| {
3362 let update_message =
3363 tree.read(cx)
3364 .snapshot()
3365 .build_update(&initial_snapshot, 1, 1, true);
3366 remote
3367 .as_remote_mut()
3368 .unwrap()
3369 .snapshot
3370 .apply_remote_update(update_message)
3371 .unwrap();
3372
3373 assert_eq!(
3374 remote
3375 .paths()
3376 .map(|p| p.to_str().unwrap())
3377 .collect::<Vec<_>>(),
3378 expected_paths
3379 );
3380 });
3381 }
3382
3383 #[gpui::test]
3384 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3385 let fs = Arc::new(FakeFs::new(cx.background()));
3386 fs.insert_tree(
3387 "/the-dir",
3388 json!({
3389 "a.txt": "a-contents",
3390 "b.txt": "b-contents",
3391 }),
3392 )
3393 .await;
3394
3395 let project = Project::test(fs.clone(), &mut cx);
3396 let worktree_id = project
3397 .update(&mut cx, |p, cx| {
3398 p.find_or_create_local_worktree("/the-dir", false, cx)
3399 })
3400 .await
3401 .unwrap()
3402 .0
3403 .read_with(&cx, |tree, _| tree.id());
3404
3405 // Spawn multiple tasks to open paths, repeating some paths.
3406 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3407 (
3408 p.open_buffer((worktree_id, "a.txt"), cx),
3409 p.open_buffer((worktree_id, "b.txt"), cx),
3410 p.open_buffer((worktree_id, "a.txt"), cx),
3411 )
3412 });
3413
3414 let buffer_a_1 = buffer_a_1.await.unwrap();
3415 let buffer_a_2 = buffer_a_2.await.unwrap();
3416 let buffer_b = buffer_b.await.unwrap();
3417 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3418 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3419
3420 // There is only one buffer per path.
3421 let buffer_a_id = buffer_a_1.id();
3422 assert_eq!(buffer_a_2.id(), buffer_a_id);
3423
3424 // Open the same path again while it is still open.
3425 drop(buffer_a_1);
3426 let buffer_a_3 = project
3427 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3428 .await
3429 .unwrap();
3430
3431 // There's still only one buffer per path.
3432 assert_eq!(buffer_a_3.id(), buffer_a_id);
3433 }
3434
3435 #[gpui::test]
3436 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3437 use std::fs;
3438
3439 let dir = temp_tree(json!({
3440 "file1": "abc",
3441 "file2": "def",
3442 "file3": "ghi",
3443 }));
3444
3445 let project = Project::test(Arc::new(RealFs), &mut cx);
3446 let (worktree, _) = project
3447 .update(&mut cx, |p, cx| {
3448 p.find_or_create_local_worktree(dir.path(), false, cx)
3449 })
3450 .await
3451 .unwrap();
3452 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3453
3454 worktree.flush_fs_events(&cx).await;
3455 worktree
3456 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3457 .await;
3458
3459 let buffer1 = project
3460 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3461 .await
3462 .unwrap();
3463 let events = Rc::new(RefCell::new(Vec::new()));
3464
3465 // initially, the buffer isn't dirty.
3466 buffer1.update(&mut cx, |buffer, cx| {
3467 cx.subscribe(&buffer1, {
3468 let events = events.clone();
3469 move |_, _, event, _| events.borrow_mut().push(event.clone())
3470 })
3471 .detach();
3472
3473 assert!(!buffer.is_dirty());
3474 assert!(events.borrow().is_empty());
3475
3476 buffer.edit(vec![1..2], "", cx);
3477 });
3478
3479 // after the first edit, the buffer is dirty, and emits a dirtied event.
3480 buffer1.update(&mut cx, |buffer, cx| {
3481 assert!(buffer.text() == "ac");
3482 assert!(buffer.is_dirty());
3483 assert_eq!(
3484 *events.borrow(),
3485 &[language::Event::Edited, language::Event::Dirtied]
3486 );
3487 events.borrow_mut().clear();
3488 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3489 });
3490
3491 // after saving, the buffer is not dirty, and emits a saved event.
3492 buffer1.update(&mut cx, |buffer, cx| {
3493 assert!(!buffer.is_dirty());
3494 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3495 events.borrow_mut().clear();
3496
3497 buffer.edit(vec![1..1], "B", cx);
3498 buffer.edit(vec![2..2], "D", cx);
3499 });
3500
3501 // after editing again, the buffer is dirty, and emits another dirty event.
3502 buffer1.update(&mut cx, |buffer, cx| {
3503 assert!(buffer.text() == "aBDc");
3504 assert!(buffer.is_dirty());
3505 assert_eq!(
3506 *events.borrow(),
3507 &[
3508 language::Event::Edited,
3509 language::Event::Dirtied,
3510 language::Event::Edited,
3511 ],
3512 );
3513 events.borrow_mut().clear();
3514
3515 // TODO - currently, after restoring the buffer to its
3516 // previously-saved state, the is still considered dirty.
3517 buffer.edit([1..3], "", cx);
3518 assert!(buffer.text() == "ac");
3519 assert!(buffer.is_dirty());
3520 });
3521
3522 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3523
3524 // When a file is deleted, the buffer is considered dirty.
3525 let events = Rc::new(RefCell::new(Vec::new()));
3526 let buffer2 = project
3527 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3528 .await
3529 .unwrap();
3530 buffer2.update(&mut cx, |_, cx| {
3531 cx.subscribe(&buffer2, {
3532 let events = events.clone();
3533 move |_, _, event, _| events.borrow_mut().push(event.clone())
3534 })
3535 .detach();
3536 });
3537
3538 fs::remove_file(dir.path().join("file2")).unwrap();
3539 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3540 assert_eq!(
3541 *events.borrow(),
3542 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3543 );
3544
3545 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3546 let events = Rc::new(RefCell::new(Vec::new()));
3547 let buffer3 = project
3548 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3549 .await
3550 .unwrap();
3551 buffer3.update(&mut cx, |_, cx| {
3552 cx.subscribe(&buffer3, {
3553 let events = events.clone();
3554 move |_, _, event, _| events.borrow_mut().push(event.clone())
3555 })
3556 .detach();
3557 });
3558
3559 worktree.flush_fs_events(&cx).await;
3560 buffer3.update(&mut cx, |buffer, cx| {
3561 buffer.edit(Some(0..0), "x", cx);
3562 });
3563 events.borrow_mut().clear();
3564 fs::remove_file(dir.path().join("file3")).unwrap();
3565 buffer3
3566 .condition(&cx, |_, _| !events.borrow().is_empty())
3567 .await;
3568 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3569 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3570 }
3571
3572 #[gpui::test]
3573 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3574 use std::fs;
3575
3576 let initial_contents = "aaa\nbbbbb\nc\n";
3577 let dir = temp_tree(json!({ "the-file": initial_contents }));
3578
3579 let project = Project::test(Arc::new(RealFs), &mut cx);
3580 let (worktree, _) = project
3581 .update(&mut cx, |p, cx| {
3582 p.find_or_create_local_worktree(dir.path(), false, cx)
3583 })
3584 .await
3585 .unwrap();
3586 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3587
3588 worktree
3589 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3590 .await;
3591
3592 let abs_path = dir.path().join("the-file");
3593 let buffer = project
3594 .update(&mut cx, |p, cx| {
3595 p.open_buffer((worktree_id, "the-file"), cx)
3596 })
3597 .await
3598 .unwrap();
3599
3600 // TODO
3601 // Add a cursor on each row.
3602 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3603 // assert!(!buffer.is_dirty());
3604 // buffer.add_selection_set(
3605 // &(0..3)
3606 // .map(|row| Selection {
3607 // id: row as usize,
3608 // start: Point::new(row, 1),
3609 // end: Point::new(row, 1),
3610 // reversed: false,
3611 // goal: SelectionGoal::None,
3612 // })
3613 // .collect::<Vec<_>>(),
3614 // cx,
3615 // )
3616 // });
3617
3618 // Change the file on disk, adding two new lines of text, and removing
3619 // one line.
3620 buffer.read_with(&cx, |buffer, _| {
3621 assert!(!buffer.is_dirty());
3622 assert!(!buffer.has_conflict());
3623 });
3624 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3625 fs::write(&abs_path, new_contents).unwrap();
3626
3627 // Because the buffer was not modified, it is reloaded from disk. Its
3628 // contents are edited according to the diff between the old and new
3629 // file contents.
3630 buffer
3631 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3632 .await;
3633
3634 buffer.update(&mut cx, |buffer, _| {
3635 assert_eq!(buffer.text(), new_contents);
3636 assert!(!buffer.is_dirty());
3637 assert!(!buffer.has_conflict());
3638
3639 // TODO
3640 // let cursor_positions = buffer
3641 // .selection_set(selection_set_id)
3642 // .unwrap()
3643 // .selections::<Point>(&*buffer)
3644 // .map(|selection| {
3645 // assert_eq!(selection.start, selection.end);
3646 // selection.start
3647 // })
3648 // .collect::<Vec<_>>();
3649 // assert_eq!(
3650 // cursor_positions,
3651 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3652 // );
3653 });
3654
3655 // Modify the buffer
3656 buffer.update(&mut cx, |buffer, cx| {
3657 buffer.edit(vec![0..0], " ", cx);
3658 assert!(buffer.is_dirty());
3659 assert!(!buffer.has_conflict());
3660 });
3661
3662 // Change the file on disk again, adding blank lines to the beginning.
3663 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3664
3665 // Because the buffer is modified, it doesn't reload from disk, but is
3666 // marked as having a conflict.
3667 buffer
3668 .condition(&cx, |buffer, _| buffer.has_conflict())
3669 .await;
3670 }
3671
3672 #[gpui::test]
3673 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3674 let fs = Arc::new(FakeFs::new(cx.background()));
3675 fs.insert_tree(
3676 "/the-dir",
3677 json!({
3678 "a.rs": "
3679 fn foo(mut v: Vec<usize>) {
3680 for x in &v {
3681 v.push(1);
3682 }
3683 }
3684 "
3685 .unindent(),
3686 }),
3687 )
3688 .await;
3689
3690 let project = Project::test(fs.clone(), &mut cx);
3691 let (worktree, _) = project
3692 .update(&mut cx, |p, cx| {
3693 p.find_or_create_local_worktree("/the-dir", false, cx)
3694 })
3695 .await
3696 .unwrap();
3697 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3698
3699 let buffer = project
3700 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3701 .await
3702 .unwrap();
3703
3704 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3705 let message = lsp::PublishDiagnosticsParams {
3706 uri: buffer_uri.clone(),
3707 diagnostics: vec![
3708 lsp::Diagnostic {
3709 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3710 severity: Some(DiagnosticSeverity::WARNING),
3711 message: "error 1".to_string(),
3712 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3713 location: lsp::Location {
3714 uri: buffer_uri.clone(),
3715 range: lsp::Range::new(
3716 lsp::Position::new(1, 8),
3717 lsp::Position::new(1, 9),
3718 ),
3719 },
3720 message: "error 1 hint 1".to_string(),
3721 }]),
3722 ..Default::default()
3723 },
3724 lsp::Diagnostic {
3725 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3726 severity: Some(DiagnosticSeverity::HINT),
3727 message: "error 1 hint 1".to_string(),
3728 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3729 location: lsp::Location {
3730 uri: buffer_uri.clone(),
3731 range: lsp::Range::new(
3732 lsp::Position::new(1, 8),
3733 lsp::Position::new(1, 9),
3734 ),
3735 },
3736 message: "original diagnostic".to_string(),
3737 }]),
3738 ..Default::default()
3739 },
3740 lsp::Diagnostic {
3741 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3742 severity: Some(DiagnosticSeverity::ERROR),
3743 message: "error 2".to_string(),
3744 related_information: Some(vec![
3745 lsp::DiagnosticRelatedInformation {
3746 location: lsp::Location {
3747 uri: buffer_uri.clone(),
3748 range: lsp::Range::new(
3749 lsp::Position::new(1, 13),
3750 lsp::Position::new(1, 15),
3751 ),
3752 },
3753 message: "error 2 hint 1".to_string(),
3754 },
3755 lsp::DiagnosticRelatedInformation {
3756 location: lsp::Location {
3757 uri: buffer_uri.clone(),
3758 range: lsp::Range::new(
3759 lsp::Position::new(1, 13),
3760 lsp::Position::new(1, 15),
3761 ),
3762 },
3763 message: "error 2 hint 2".to_string(),
3764 },
3765 ]),
3766 ..Default::default()
3767 },
3768 lsp::Diagnostic {
3769 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3770 severity: Some(DiagnosticSeverity::HINT),
3771 message: "error 2 hint 1".to_string(),
3772 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3773 location: lsp::Location {
3774 uri: buffer_uri.clone(),
3775 range: lsp::Range::new(
3776 lsp::Position::new(2, 8),
3777 lsp::Position::new(2, 17),
3778 ),
3779 },
3780 message: "original diagnostic".to_string(),
3781 }]),
3782 ..Default::default()
3783 },
3784 lsp::Diagnostic {
3785 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3786 severity: Some(DiagnosticSeverity::HINT),
3787 message: "error 2 hint 2".to_string(),
3788 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3789 location: lsp::Location {
3790 uri: buffer_uri.clone(),
3791 range: lsp::Range::new(
3792 lsp::Position::new(2, 8),
3793 lsp::Position::new(2, 17),
3794 ),
3795 },
3796 message: "original diagnostic".to_string(),
3797 }]),
3798 ..Default::default()
3799 },
3800 ],
3801 version: None,
3802 };
3803
3804 project
3805 .update(&mut cx, |p, cx| {
3806 p.update_diagnostics(message, &Default::default(), cx)
3807 })
3808 .unwrap();
3809 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3810
3811 assert_eq!(
3812 buffer
3813 .diagnostics_in_range::<_, Point>(0..buffer.len())
3814 .collect::<Vec<_>>(),
3815 &[
3816 DiagnosticEntry {
3817 range: Point::new(1, 8)..Point::new(1, 9),
3818 diagnostic: Diagnostic {
3819 severity: DiagnosticSeverity::WARNING,
3820 message: "error 1".to_string(),
3821 group_id: 0,
3822 is_primary: true,
3823 ..Default::default()
3824 }
3825 },
3826 DiagnosticEntry {
3827 range: Point::new(1, 8)..Point::new(1, 9),
3828 diagnostic: Diagnostic {
3829 severity: DiagnosticSeverity::HINT,
3830 message: "error 1 hint 1".to_string(),
3831 group_id: 0,
3832 is_primary: false,
3833 ..Default::default()
3834 }
3835 },
3836 DiagnosticEntry {
3837 range: Point::new(1, 13)..Point::new(1, 15),
3838 diagnostic: Diagnostic {
3839 severity: DiagnosticSeverity::HINT,
3840 message: "error 2 hint 1".to_string(),
3841 group_id: 1,
3842 is_primary: false,
3843 ..Default::default()
3844 }
3845 },
3846 DiagnosticEntry {
3847 range: Point::new(1, 13)..Point::new(1, 15),
3848 diagnostic: Diagnostic {
3849 severity: DiagnosticSeverity::HINT,
3850 message: "error 2 hint 2".to_string(),
3851 group_id: 1,
3852 is_primary: false,
3853 ..Default::default()
3854 }
3855 },
3856 DiagnosticEntry {
3857 range: Point::new(2, 8)..Point::new(2, 17),
3858 diagnostic: Diagnostic {
3859 severity: DiagnosticSeverity::ERROR,
3860 message: "error 2".to_string(),
3861 group_id: 1,
3862 is_primary: true,
3863 ..Default::default()
3864 }
3865 }
3866 ]
3867 );
3868
3869 assert_eq!(
3870 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3871 &[
3872 DiagnosticEntry {
3873 range: Point::new(1, 8)..Point::new(1, 9),
3874 diagnostic: Diagnostic {
3875 severity: DiagnosticSeverity::WARNING,
3876 message: "error 1".to_string(),
3877 group_id: 0,
3878 is_primary: true,
3879 ..Default::default()
3880 }
3881 },
3882 DiagnosticEntry {
3883 range: Point::new(1, 8)..Point::new(1, 9),
3884 diagnostic: Diagnostic {
3885 severity: DiagnosticSeverity::HINT,
3886 message: "error 1 hint 1".to_string(),
3887 group_id: 0,
3888 is_primary: false,
3889 ..Default::default()
3890 }
3891 },
3892 ]
3893 );
3894 assert_eq!(
3895 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3896 &[
3897 DiagnosticEntry {
3898 range: Point::new(1, 13)..Point::new(1, 15),
3899 diagnostic: Diagnostic {
3900 severity: DiagnosticSeverity::HINT,
3901 message: "error 2 hint 1".to_string(),
3902 group_id: 1,
3903 is_primary: false,
3904 ..Default::default()
3905 }
3906 },
3907 DiagnosticEntry {
3908 range: Point::new(1, 13)..Point::new(1, 15),
3909 diagnostic: Diagnostic {
3910 severity: DiagnosticSeverity::HINT,
3911 message: "error 2 hint 2".to_string(),
3912 group_id: 1,
3913 is_primary: false,
3914 ..Default::default()
3915 }
3916 },
3917 DiagnosticEntry {
3918 range: Point::new(2, 8)..Point::new(2, 17),
3919 diagnostic: Diagnostic {
3920 severity: DiagnosticSeverity::ERROR,
3921 message: "error 2".to_string(),
3922 group_id: 1,
3923 is_primary: true,
3924 ..Default::default()
3925 }
3926 }
3927 ]
3928 );
3929 }
3930}