1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 #[cfg(any(test, feature = "test-support"))]
348 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
349 let result = self
350 .shared_buffers
351 .get(&peer_id)
352 .and_then(|buffers| buffers.get(&remote_id))
353 .cloned();
354 if result.is_none() {
355 dbg!(&self.shared_buffers);
356 }
357 result
358 }
359
360 #[cfg(any(test, feature = "test-support"))]
361 pub fn has_buffered_operations(&self) -> bool {
362 self.open_buffers
363 .values()
364 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
365 }
366
367 pub fn fs(&self) -> &Arc<dyn Fs> {
368 &self.fs
369 }
370
371 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
372 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
373 *remote_id_tx.borrow_mut() = remote_id;
374 }
375
376 self.subscriptions.clear();
377 if let Some(remote_id) = remote_id {
378 self.subscriptions
379 .push(self.client.add_model_for_remote_entity(remote_id, cx));
380 }
381 }
382
383 pub fn remote_id(&self) -> Option<u64> {
384 match &self.client_state {
385 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
386 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
387 }
388 }
389
390 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
391 let mut id = None;
392 let mut watch = None;
393 match &self.client_state {
394 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
395 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
396 }
397
398 async move {
399 if let Some(id) = id {
400 return id;
401 }
402 let mut watch = watch.unwrap();
403 loop {
404 let id = *watch.borrow();
405 if let Some(id) = id {
406 return id;
407 }
408 watch.recv().await;
409 }
410 }
411 }
412
413 pub fn replica_id(&self) -> ReplicaId {
414 match &self.client_state {
415 ProjectClientState::Local { .. } => 0,
416 ProjectClientState::Remote { replica_id, .. } => *replica_id,
417 }
418 }
419
420 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
421 &self.collaborators
422 }
423
424 pub fn worktrees<'a>(
425 &'a self,
426 cx: &'a AppContext,
427 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
428 self.worktrees
429 .iter()
430 .filter_map(move |worktree| worktree.upgrade(cx))
431 }
432
433 pub fn worktree_for_id(
434 &self,
435 id: WorktreeId,
436 cx: &AppContext,
437 ) -> Option<ModelHandle<Worktree>> {
438 self.worktrees(cx)
439 .find(|worktree| worktree.read(cx).id() == id)
440 }
441
442 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
443 let rpc = self.client.clone();
444 cx.spawn(|this, mut cx| async move {
445 let project_id = this.update(&mut cx, |this, _| {
446 if let ProjectClientState::Local {
447 is_shared,
448 remote_id_rx,
449 ..
450 } = &mut this.client_state
451 {
452 *is_shared = true;
453 remote_id_rx
454 .borrow()
455 .ok_or_else(|| anyhow!("no project id"))
456 } else {
457 Err(anyhow!("can't share a remote project"))
458 }
459 })?;
460
461 rpc.request(proto::ShareProject { project_id }).await?;
462 let mut tasks = Vec::new();
463 this.update(&mut cx, |this, cx| {
464 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
465 worktree.update(cx, |worktree, cx| {
466 let worktree = worktree.as_local_mut().unwrap();
467 tasks.push(worktree.share(project_id, cx));
468 });
469 }
470 });
471 for task in tasks {
472 task.await?;
473 }
474 this.update(&mut cx, |_, cx| cx.notify());
475 Ok(())
476 })
477 }
478
479 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
480 let rpc = self.client.clone();
481 cx.spawn(|this, mut cx| async move {
482 let project_id = this.update(&mut cx, |this, _| {
483 if let ProjectClientState::Local {
484 is_shared,
485 remote_id_rx,
486 ..
487 } = &mut this.client_state
488 {
489 *is_shared = false;
490 remote_id_rx
491 .borrow()
492 .ok_or_else(|| anyhow!("no project id"))
493 } else {
494 Err(anyhow!("can't share a remote project"))
495 }
496 })?;
497
498 rpc.send(proto::UnshareProject { project_id })?;
499 this.update(&mut cx, |this, cx| {
500 this.collaborators.clear();
501 this.shared_buffers.clear();
502 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
503 worktree.update(cx, |worktree, _| {
504 worktree.as_local_mut().unwrap().unshare();
505 });
506 }
507 cx.notify()
508 });
509 Ok(())
510 })
511 }
512
513 pub fn is_read_only(&self) -> bool {
514 match &self.client_state {
515 ProjectClientState::Local { .. } => false,
516 ProjectClientState::Remote {
517 sharing_has_stopped,
518 ..
519 } => *sharing_has_stopped,
520 }
521 }
522
523 pub fn is_local(&self) -> bool {
524 match &self.client_state {
525 ProjectClientState::Local { .. } => true,
526 ProjectClientState::Remote { .. } => false,
527 }
528 }
529
530 pub fn is_remote(&self) -> bool {
531 !self.is_local()
532 }
533
534 pub fn open_buffer(
535 &mut self,
536 path: impl Into<ProjectPath>,
537 cx: &mut ModelContext<Self>,
538 ) -> Task<Result<ModelHandle<Buffer>>> {
539 let project_path = path.into();
540 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
541 worktree
542 } else {
543 return Task::ready(Err(anyhow!("no such worktree")));
544 };
545
546 // If there is already a buffer for the given path, then return it.
547 let existing_buffer = self.get_open_buffer(&project_path, cx);
548 if let Some(existing_buffer) = existing_buffer {
549 return Task::ready(Ok(existing_buffer));
550 }
551
552 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
553 // If the given path is already being loaded, then wait for that existing
554 // task to complete and return the same buffer.
555 hash_map::Entry::Occupied(e) => e.get().clone(),
556
557 // Otherwise, record the fact that this path is now being loaded.
558 hash_map::Entry::Vacant(entry) => {
559 let (mut tx, rx) = postage::watch::channel();
560 entry.insert(rx.clone());
561
562 let load_buffer = if worktree.read(cx).is_local() {
563 self.open_local_buffer(&project_path.path, &worktree, cx)
564 } else {
565 self.open_remote_buffer(&project_path.path, &worktree, cx)
566 };
567
568 cx.spawn(move |this, mut cx| async move {
569 let load_result = load_buffer.await;
570 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
571 // Record the fact that the buffer is no longer loading.
572 this.loading_buffers.remove(&project_path);
573 if this.loading_buffers.is_empty() {
574 this.open_buffers
575 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
576 }
577
578 let buffer = load_result.map_err(Arc::new)?;
579 Ok(buffer)
580 }));
581 })
582 .detach();
583 rx
584 }
585 };
586
587 cx.foreground().spawn(async move {
588 loop {
589 if let Some(result) = loading_watch.borrow().as_ref() {
590 match result {
591 Ok(buffer) => return Ok(buffer.clone()),
592 Err(error) => return Err(anyhow!("{}", error)),
593 }
594 }
595 loading_watch.recv().await;
596 }
597 })
598 }
599
600 fn open_local_buffer(
601 &mut self,
602 path: &Arc<Path>,
603 worktree: &ModelHandle<Worktree>,
604 cx: &mut ModelContext<Self>,
605 ) -> Task<Result<ModelHandle<Buffer>>> {
606 let load_buffer = worktree.update(cx, |worktree, cx| {
607 let worktree = worktree.as_local_mut().unwrap();
608 worktree.load_buffer(path, cx)
609 });
610 let worktree = worktree.downgrade();
611 cx.spawn(|this, mut cx| async move {
612 let buffer = load_buffer.await?;
613 let worktree = worktree
614 .upgrade(&cx)
615 .ok_or_else(|| anyhow!("worktree was removed"))?;
616 this.update(&mut cx, |this, cx| {
617 this.register_buffer(&buffer, Some(&worktree), cx)
618 })?;
619 Ok(buffer)
620 })
621 }
622
623 fn open_remote_buffer(
624 &mut self,
625 path: &Arc<Path>,
626 worktree: &ModelHandle<Worktree>,
627 cx: &mut ModelContext<Self>,
628 ) -> Task<Result<ModelHandle<Buffer>>> {
629 let rpc = self.client.clone();
630 let project_id = self.remote_id().unwrap();
631 let remote_worktree_id = worktree.read(cx).id();
632 let path = path.clone();
633 let path_string = path.to_string_lossy().to_string();
634 cx.spawn(|this, mut cx| async move {
635 let response = rpc
636 .request(proto::OpenBuffer {
637 project_id,
638 worktree_id: remote_worktree_id.to_proto(),
639 path: path_string,
640 })
641 .await?;
642 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
643 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
644 .await
645 })
646 }
647
648 fn open_local_buffer_from_lsp_path(
649 &mut self,
650 abs_path: lsp::Url,
651 lang_name: String,
652 lang_server: Arc<LanguageServer>,
653 cx: &mut ModelContext<Self>,
654 ) -> Task<Result<ModelHandle<Buffer>>> {
655 cx.spawn(|this, mut cx| async move {
656 let abs_path = abs_path
657 .to_file_path()
658 .map_err(|_| anyhow!("can't convert URI to path"))?;
659 let (worktree, relative_path) = if let Some(result) =
660 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
661 {
662 result
663 } else {
664 let worktree = this
665 .update(&mut cx, |this, cx| {
666 this.create_local_worktree(&abs_path, true, cx)
667 })
668 .await?;
669 this.update(&mut cx, |this, cx| {
670 this.language_servers
671 .insert((worktree.read(cx).id(), lang_name), lang_server);
672 });
673 (worktree, PathBuf::new())
674 };
675
676 let project_path = ProjectPath {
677 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
678 path: relative_path.into(),
679 };
680 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
681 .await
682 })
683 }
684
685 pub fn save_buffer_as(
686 &self,
687 buffer: ModelHandle<Buffer>,
688 abs_path: PathBuf,
689 cx: &mut ModelContext<Project>,
690 ) -> Task<Result<()>> {
691 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
692 cx.spawn(|this, mut cx| async move {
693 let (worktree, path) = worktree_task.await?;
694 worktree
695 .update(&mut cx, |worktree, cx| {
696 worktree
697 .as_local_mut()
698 .unwrap()
699 .save_buffer_as(buffer.clone(), path, cx)
700 })
701 .await?;
702 this.update(&mut cx, |this, cx| {
703 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
704 });
705 Ok(())
706 })
707 }
708
709 #[cfg(any(test, feature = "test-support"))]
710 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
711 let path = path.into();
712 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
713 self.open_buffers.iter().any(|(_, buffer)| {
714 if let Some(buffer) = buffer.upgrade(cx) {
715 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
716 if file.worktree == worktree && file.path() == &path.path {
717 return true;
718 }
719 }
720 }
721 false
722 })
723 } else {
724 false
725 }
726 }
727
728 fn get_open_buffer(
729 &mut self,
730 path: &ProjectPath,
731 cx: &mut ModelContext<Self>,
732 ) -> Option<ModelHandle<Buffer>> {
733 let mut result = None;
734 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
735 self.open_buffers.retain(|_, buffer| {
736 if let Some(buffer) = buffer.upgrade(cx) {
737 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
738 if file.worktree == worktree && file.path() == &path.path {
739 result = Some(buffer);
740 }
741 }
742 true
743 } else {
744 false
745 }
746 });
747 result
748 }
749
750 fn register_buffer(
751 &mut self,
752 buffer: &ModelHandle<Buffer>,
753 worktree: Option<&ModelHandle<Worktree>>,
754 cx: &mut ModelContext<Self>,
755 ) -> Result<()> {
756 match self.open_buffers.insert(
757 buffer.read(cx).remote_id(),
758 OpenBuffer::Loaded(buffer.downgrade()),
759 ) {
760 None => {}
761 Some(OpenBuffer::Loading(operations)) => {
762 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
763 }
764 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
765 }
766 self.assign_language_to_buffer(&buffer, worktree, cx);
767 Ok(())
768 }
769
770 fn assign_language_to_buffer(
771 &mut self,
772 buffer: &ModelHandle<Buffer>,
773 worktree: Option<&ModelHandle<Worktree>>,
774 cx: &mut ModelContext<Self>,
775 ) -> Option<()> {
776 let (path, full_path) = {
777 let file = buffer.read(cx).file()?;
778 (file.path().clone(), file.full_path(cx))
779 };
780
781 // If the buffer has a language, set it and start/assign the language server
782 if let Some(language) = self.languages.select_language(&full_path) {
783 buffer.update(cx, |buffer, cx| {
784 buffer.set_language(Some(language.clone()), cx);
785 });
786
787 // For local worktrees, start a language server if needed.
788 // Also assign the language server and any previously stored diagnostics to the buffer.
789 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
790 let worktree_id = local_worktree.id();
791 let worktree_abs_path = local_worktree.abs_path().clone();
792
793 let language_server = match self
794 .language_servers
795 .entry((worktree_id, language.name().to_string()))
796 {
797 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
798 hash_map::Entry::Vacant(e) => Self::start_language_server(
799 self.client.clone(),
800 language.clone(),
801 &worktree_abs_path,
802 cx,
803 )
804 .map(|server| e.insert(server).clone()),
805 };
806
807 buffer.update(cx, |buffer, cx| {
808 buffer.set_language_server(language_server, cx);
809 });
810 }
811 }
812
813 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
814 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
815 buffer.update(cx, |buffer, cx| {
816 buffer.update_diagnostics(diagnostics, None, cx).log_err();
817 });
818 }
819 }
820
821 None
822 }
823
824 fn start_language_server(
825 rpc: Arc<Client>,
826 language: Arc<Language>,
827 worktree_path: &Path,
828 cx: &mut ModelContext<Self>,
829 ) -> Option<Arc<LanguageServer>> {
830 enum LspEvent {
831 DiagnosticsStart,
832 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
833 DiagnosticsFinish,
834 }
835
836 let language_server = language
837 .start_server(worktree_path, cx)
838 .log_err()
839 .flatten()?;
840 let disk_based_sources = language
841 .disk_based_diagnostic_sources()
842 .cloned()
843 .unwrap_or_default();
844 let disk_based_diagnostics_progress_token =
845 language.disk_based_diagnostics_progress_token().cloned();
846 let has_disk_based_diagnostic_progress_token =
847 disk_based_diagnostics_progress_token.is_some();
848 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
849
850 // Listen for `PublishDiagnostics` notifications.
851 language_server
852 .on_notification::<lsp::notification::PublishDiagnostics, _>({
853 let diagnostics_tx = diagnostics_tx.clone();
854 move |params| {
855 if !has_disk_based_diagnostic_progress_token {
856 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
857 }
858 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
859 if !has_disk_based_diagnostic_progress_token {
860 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
861 }
862 }
863 })
864 .detach();
865
866 // Listen for `Progress` notifications. Send an event when the language server
867 // transitions between running jobs and not running any jobs.
868 let mut running_jobs_for_this_server: i32 = 0;
869 language_server
870 .on_notification::<lsp::notification::Progress, _>(move |params| {
871 let token = match params.token {
872 lsp::NumberOrString::Number(_) => None,
873 lsp::NumberOrString::String(token) => Some(token),
874 };
875
876 if token == disk_based_diagnostics_progress_token {
877 match params.value {
878 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
879 lsp::WorkDoneProgress::Begin(_) => {
880 running_jobs_for_this_server += 1;
881 if running_jobs_for_this_server == 1 {
882 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
883 }
884 }
885 lsp::WorkDoneProgress::End(_) => {
886 running_jobs_for_this_server -= 1;
887 if running_jobs_for_this_server == 0 {
888 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
889 }
890 }
891 _ => {}
892 },
893 }
894 }
895 })
896 .detach();
897
898 // Process all the LSP events.
899 cx.spawn_weak(|this, mut cx| async move {
900 while let Ok(message) = diagnostics_rx.recv().await {
901 let this = this.upgrade(&cx)?;
902 match message {
903 LspEvent::DiagnosticsStart => {
904 this.update(&mut cx, |this, cx| {
905 this.disk_based_diagnostics_started(cx);
906 if let Some(project_id) = this.remote_id() {
907 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
908 .log_err();
909 }
910 });
911 }
912 LspEvent::DiagnosticsUpdate(mut params) => {
913 language.process_diagnostics(&mut params);
914 this.update(&mut cx, |this, cx| {
915 this.update_diagnostics(params, &disk_based_sources, cx)
916 .log_err();
917 });
918 }
919 LspEvent::DiagnosticsFinish => {
920 this.update(&mut cx, |this, cx| {
921 this.disk_based_diagnostics_finished(cx);
922 if let Some(project_id) = this.remote_id() {
923 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
924 .log_err();
925 }
926 });
927 }
928 }
929 }
930 Some(())
931 })
932 .detach();
933
934 Some(language_server)
935 }
936
937 pub fn update_diagnostics(
938 &mut self,
939 params: lsp::PublishDiagnosticsParams,
940 disk_based_sources: &HashSet<String>,
941 cx: &mut ModelContext<Self>,
942 ) -> Result<()> {
943 let abs_path = params
944 .uri
945 .to_file_path()
946 .map_err(|_| anyhow!("URI is not a file"))?;
947 let mut next_group_id = 0;
948 let mut diagnostics = Vec::default();
949 let mut primary_diagnostic_group_ids = HashMap::default();
950 let mut sources_by_group_id = HashMap::default();
951 let mut supporting_diagnostic_severities = HashMap::default();
952 for diagnostic in ¶ms.diagnostics {
953 let source = diagnostic.source.as_ref();
954 let code = diagnostic.code.as_ref().map(|code| match code {
955 lsp::NumberOrString::Number(code) => code.to_string(),
956 lsp::NumberOrString::String(code) => code.clone(),
957 });
958 let range = range_from_lsp(diagnostic.range);
959 let is_supporting = diagnostic
960 .related_information
961 .as_ref()
962 .map_or(false, |infos| {
963 infos.iter().any(|info| {
964 primary_diagnostic_group_ids.contains_key(&(
965 source,
966 code.clone(),
967 range_from_lsp(info.location.range),
968 ))
969 })
970 });
971
972 if is_supporting {
973 if let Some(severity) = diagnostic.severity {
974 supporting_diagnostic_severities
975 .insert((source, code.clone(), range), severity);
976 }
977 } else {
978 let group_id = post_inc(&mut next_group_id);
979 let is_disk_based =
980 source.map_or(false, |source| disk_based_sources.contains(source));
981
982 sources_by_group_id.insert(group_id, source);
983 primary_diagnostic_group_ids
984 .insert((source, code.clone(), range.clone()), group_id);
985
986 diagnostics.push(DiagnosticEntry {
987 range,
988 diagnostic: Diagnostic {
989 code: code.clone(),
990 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
991 message: diagnostic.message.clone(),
992 group_id,
993 is_primary: true,
994 is_valid: true,
995 is_disk_based,
996 },
997 });
998 if let Some(infos) = &diagnostic.related_information {
999 for info in infos {
1000 if info.location.uri == params.uri && !info.message.is_empty() {
1001 let range = range_from_lsp(info.location.range);
1002 diagnostics.push(DiagnosticEntry {
1003 range,
1004 diagnostic: Diagnostic {
1005 code: code.clone(),
1006 severity: DiagnosticSeverity::INFORMATION,
1007 message: info.message.clone(),
1008 group_id,
1009 is_primary: false,
1010 is_valid: true,
1011 is_disk_based,
1012 },
1013 });
1014 }
1015 }
1016 }
1017 }
1018 }
1019
1020 for entry in &mut diagnostics {
1021 let diagnostic = &mut entry.diagnostic;
1022 if !diagnostic.is_primary {
1023 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1024 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1025 source,
1026 diagnostic.code.clone(),
1027 entry.range.clone(),
1028 )) {
1029 diagnostic.severity = severity;
1030 }
1031 }
1032 }
1033
1034 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1035 Ok(())
1036 }
1037
1038 pub fn update_diagnostic_entries(
1039 &mut self,
1040 abs_path: PathBuf,
1041 version: Option<i32>,
1042 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1043 cx: &mut ModelContext<Project>,
1044 ) -> Result<(), anyhow::Error> {
1045 let (worktree, relative_path) = self
1046 .find_local_worktree(&abs_path, cx)
1047 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1048 let project_path = ProjectPath {
1049 worktree_id: worktree.read(cx).id(),
1050 path: relative_path.into(),
1051 };
1052
1053 for buffer in self.open_buffers.values() {
1054 if let Some(buffer) = buffer.upgrade(cx) {
1055 if buffer
1056 .read(cx)
1057 .file()
1058 .map_or(false, |file| *file.path() == project_path.path)
1059 {
1060 buffer.update(cx, |buffer, cx| {
1061 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1062 })?;
1063 break;
1064 }
1065 }
1066 }
1067 worktree.update(cx, |worktree, cx| {
1068 worktree
1069 .as_local_mut()
1070 .ok_or_else(|| anyhow!("not a local worktree"))?
1071 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1072 })?;
1073 cx.emit(Event::DiagnosticsUpdated(project_path));
1074 Ok(())
1075 }
1076
1077 pub fn format(
1078 &self,
1079 buffers: HashSet<ModelHandle<Buffer>>,
1080 push_to_history: bool,
1081 cx: &mut ModelContext<Project>,
1082 ) -> Task<Result<ProjectTransaction>> {
1083 let mut local_buffers = Vec::new();
1084 let mut remote_buffers = None;
1085 for buffer_handle in buffers {
1086 let buffer = buffer_handle.read(cx);
1087 let worktree;
1088 if let Some(file) = File::from_dyn(buffer.file()) {
1089 worktree = file.worktree.clone();
1090 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1091 let lang_server;
1092 if let Some(lang) = buffer.language() {
1093 if let Some(server) = self
1094 .language_servers
1095 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1096 {
1097 lang_server = server.clone();
1098 } else {
1099 return Task::ready(Ok(Default::default()));
1100 };
1101 } else {
1102 return Task::ready(Ok(Default::default()));
1103 }
1104
1105 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1106 } else {
1107 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1108 }
1109 } else {
1110 return Task::ready(Ok(Default::default()));
1111 }
1112 }
1113
1114 let remote_buffers = self.remote_id().zip(remote_buffers);
1115 let client = self.client.clone();
1116
1117 cx.spawn(|this, mut cx| async move {
1118 let mut project_transaction = ProjectTransaction::default();
1119
1120 if let Some((project_id, remote_buffers)) = remote_buffers {
1121 let response = client
1122 .request(proto::FormatBuffers {
1123 project_id,
1124 buffer_ids: remote_buffers
1125 .iter()
1126 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1127 .collect(),
1128 })
1129 .await?
1130 .transaction
1131 .ok_or_else(|| anyhow!("missing transaction"))?;
1132 project_transaction = this
1133 .update(&mut cx, |this, cx| {
1134 this.deserialize_project_transaction(response, push_to_history, cx)
1135 })
1136 .await?;
1137 }
1138
1139 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1140 let lsp_edits = lang_server
1141 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1142 text_document: lsp::TextDocumentIdentifier::new(
1143 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1144 ),
1145 options: Default::default(),
1146 work_done_progress_params: Default::default(),
1147 })
1148 .await?;
1149
1150 if let Some(lsp_edits) = lsp_edits {
1151 let edits = buffer
1152 .update(&mut cx, |buffer, cx| {
1153 buffer.edits_from_lsp(lsp_edits, None, cx)
1154 })
1155 .await?;
1156 buffer.update(&mut cx, |buffer, cx| {
1157 buffer.finalize_last_transaction();
1158 buffer.start_transaction();
1159 for (range, text) in edits {
1160 buffer.edit([range], text, cx);
1161 }
1162 if buffer.end_transaction(cx).is_some() {
1163 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1164 if !push_to_history {
1165 buffer.forget_transaction(transaction.id);
1166 }
1167 project_transaction.0.insert(cx.handle(), transaction);
1168 }
1169 });
1170 }
1171 }
1172
1173 Ok(project_transaction)
1174 })
1175 }
1176
1177 pub fn definition<T: ToPointUtf16>(
1178 &self,
1179 source_buffer_handle: &ModelHandle<Buffer>,
1180 position: T,
1181 cx: &mut ModelContext<Self>,
1182 ) -> Task<Result<Vec<Definition>>> {
1183 let source_buffer_handle = source_buffer_handle.clone();
1184 let source_buffer = source_buffer_handle.read(cx);
1185 let worktree;
1186 let buffer_abs_path;
1187 if let Some(file) = File::from_dyn(source_buffer.file()) {
1188 worktree = file.worktree.clone();
1189 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1190 } else {
1191 return Task::ready(Ok(Default::default()));
1192 };
1193
1194 let position = position.to_point_utf16(source_buffer);
1195
1196 if worktree.read(cx).as_local().is_some() {
1197 let buffer_abs_path = buffer_abs_path.unwrap();
1198 let lang_name;
1199 let lang_server;
1200 if let Some(lang) = source_buffer.language() {
1201 lang_name = lang.name().to_string();
1202 if let Some(server) = self
1203 .language_servers
1204 .get(&(worktree.read(cx).id(), lang_name.clone()))
1205 {
1206 lang_server = server.clone();
1207 } else {
1208 return Task::ready(Ok(Default::default()));
1209 };
1210 } else {
1211 return Task::ready(Ok(Default::default()));
1212 }
1213
1214 cx.spawn(|this, mut cx| async move {
1215 let response = lang_server
1216 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1217 text_document_position_params: lsp::TextDocumentPositionParams {
1218 text_document: lsp::TextDocumentIdentifier::new(
1219 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1220 ),
1221 position: lsp::Position::new(position.row, position.column),
1222 },
1223 work_done_progress_params: Default::default(),
1224 partial_result_params: Default::default(),
1225 })
1226 .await?;
1227
1228 let mut definitions = Vec::new();
1229 if let Some(response) = response {
1230 let mut unresolved_locations = Vec::new();
1231 match response {
1232 lsp::GotoDefinitionResponse::Scalar(loc) => {
1233 unresolved_locations.push((loc.uri, loc.range));
1234 }
1235 lsp::GotoDefinitionResponse::Array(locs) => {
1236 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1237 }
1238 lsp::GotoDefinitionResponse::Link(links) => {
1239 unresolved_locations.extend(
1240 links
1241 .into_iter()
1242 .map(|l| (l.target_uri, l.target_selection_range)),
1243 );
1244 }
1245 }
1246
1247 for (target_uri, target_range) in unresolved_locations {
1248 let target_buffer_handle = this
1249 .update(&mut cx, |this, cx| {
1250 this.open_local_buffer_from_lsp_path(
1251 target_uri,
1252 lang_name.clone(),
1253 lang_server.clone(),
1254 cx,
1255 )
1256 })
1257 .await?;
1258
1259 cx.read(|cx| {
1260 let target_buffer = target_buffer_handle.read(cx);
1261 let target_start = target_buffer
1262 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1263 let target_end = target_buffer
1264 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1265 definitions.push(Definition {
1266 target_buffer: target_buffer_handle,
1267 target_range: target_buffer.anchor_after(target_start)
1268 ..target_buffer.anchor_before(target_end),
1269 });
1270 });
1271 }
1272 }
1273
1274 Ok(definitions)
1275 })
1276 } else if let Some(project_id) = self.remote_id() {
1277 let client = self.client.clone();
1278 let request = proto::GetDefinition {
1279 project_id,
1280 buffer_id: source_buffer.remote_id(),
1281 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1282 };
1283 cx.spawn(|this, mut cx| async move {
1284 let response = client.request(request).await?;
1285 let mut definitions = Vec::new();
1286 for definition in response.definitions {
1287 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1288 let target_buffer = this
1289 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1290 .await?;
1291 let target_start = definition
1292 .target_start
1293 .and_then(deserialize_anchor)
1294 .ok_or_else(|| anyhow!("missing target start"))?;
1295 let target_end = definition
1296 .target_end
1297 .and_then(deserialize_anchor)
1298 .ok_or_else(|| anyhow!("missing target end"))?;
1299 definitions.push(Definition {
1300 target_buffer,
1301 target_range: target_start..target_end,
1302 })
1303 }
1304
1305 Ok(definitions)
1306 })
1307 } else {
1308 Task::ready(Ok(Default::default()))
1309 }
1310 }
1311
1312 pub fn completions<T: ToPointUtf16>(
1313 &self,
1314 source_buffer_handle: &ModelHandle<Buffer>,
1315 position: T,
1316 cx: &mut ModelContext<Self>,
1317 ) -> Task<Result<Vec<Completion>>> {
1318 let source_buffer_handle = source_buffer_handle.clone();
1319 let source_buffer = source_buffer_handle.read(cx);
1320 let buffer_id = source_buffer.remote_id();
1321 let language = source_buffer.language().cloned();
1322 let worktree;
1323 let buffer_abs_path;
1324 if let Some(file) = File::from_dyn(source_buffer.file()) {
1325 worktree = file.worktree.clone();
1326 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1327 } else {
1328 return Task::ready(Ok(Default::default()));
1329 };
1330
1331 let position = position.to_point_utf16(source_buffer);
1332 let anchor = source_buffer.anchor_after(position);
1333
1334 if worktree.read(cx).as_local().is_some() {
1335 let buffer_abs_path = buffer_abs_path.unwrap();
1336 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1337 server
1338 } else {
1339 return Task::ready(Ok(Default::default()));
1340 };
1341
1342 cx.spawn(|_, cx| async move {
1343 let completions = lang_server
1344 .request::<lsp::request::Completion>(lsp::CompletionParams {
1345 text_document_position: lsp::TextDocumentPositionParams::new(
1346 lsp::TextDocumentIdentifier::new(
1347 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1348 ),
1349 position.to_lsp_position(),
1350 ),
1351 context: Default::default(),
1352 work_done_progress_params: Default::default(),
1353 partial_result_params: Default::default(),
1354 })
1355 .await?;
1356
1357 let completions = if let Some(completions) = completions {
1358 match completions {
1359 lsp::CompletionResponse::Array(completions) => completions,
1360 lsp::CompletionResponse::List(list) => list.items,
1361 }
1362 } else {
1363 Default::default()
1364 };
1365
1366 source_buffer_handle.read_with(&cx, |this, _| {
1367 Ok(completions
1368 .into_iter()
1369 .filter_map(|lsp_completion| {
1370 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1371 lsp::CompletionTextEdit::Edit(edit) => {
1372 (range_from_lsp(edit.range), edit.new_text.clone())
1373 }
1374 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1375 log::info!("unsupported insert/replace completion");
1376 return None;
1377 }
1378 };
1379
1380 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1381 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1382 if clipped_start == old_range.start && clipped_end == old_range.end {
1383 Some(Completion {
1384 old_range: this.anchor_before(old_range.start)
1385 ..this.anchor_after(old_range.end),
1386 new_text,
1387 label: language
1388 .as_ref()
1389 .and_then(|l| l.label_for_completion(&lsp_completion))
1390 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1391 lsp_completion,
1392 })
1393 } else {
1394 None
1395 }
1396 })
1397 .collect())
1398 })
1399 })
1400 } else if let Some(project_id) = self.remote_id() {
1401 let rpc = self.client.clone();
1402 let message = proto::GetCompletions {
1403 project_id,
1404 buffer_id,
1405 position: Some(language::proto::serialize_anchor(&anchor)),
1406 version: (&source_buffer.version()).into(),
1407 };
1408 cx.spawn_weak(|_, mut cx| async move {
1409 let response = rpc.request(message).await?;
1410 source_buffer_handle
1411 .update(&mut cx, |buffer, _| {
1412 buffer.wait_for_version(response.version.into())
1413 })
1414 .await;
1415 response
1416 .completions
1417 .into_iter()
1418 .map(|completion| {
1419 language::proto::deserialize_completion(completion, language.as_ref())
1420 })
1421 .collect()
1422 })
1423 } else {
1424 Task::ready(Ok(Default::default()))
1425 }
1426 }
1427
1428 pub fn apply_additional_edits_for_completion(
1429 &self,
1430 buffer_handle: ModelHandle<Buffer>,
1431 completion: Completion,
1432 push_to_history: bool,
1433 cx: &mut ModelContext<Self>,
1434 ) -> Task<Result<Option<Transaction>>> {
1435 let buffer = buffer_handle.read(cx);
1436 let buffer_id = buffer.remote_id();
1437
1438 if self.is_local() {
1439 let lang_server = if let Some(language_server) = buffer.language_server() {
1440 language_server.clone()
1441 } else {
1442 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1443 };
1444
1445 cx.spawn(|_, mut cx| async move {
1446 let resolved_completion = lang_server
1447 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1448 .await?;
1449 if let Some(edits) = resolved_completion.additional_text_edits {
1450 let edits = buffer_handle
1451 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1452 .await?;
1453 buffer_handle.update(&mut cx, |buffer, cx| {
1454 buffer.finalize_last_transaction();
1455 buffer.start_transaction();
1456 for (range, text) in edits {
1457 buffer.edit([range], text, cx);
1458 }
1459 let transaction = if buffer.end_transaction(cx).is_some() {
1460 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1461 if !push_to_history {
1462 buffer.forget_transaction(transaction.id);
1463 }
1464 Some(transaction)
1465 } else {
1466 None
1467 };
1468 Ok(transaction)
1469 })
1470 } else {
1471 Ok(None)
1472 }
1473 })
1474 } else if let Some(project_id) = self.remote_id() {
1475 let client = self.client.clone();
1476 cx.spawn(|_, mut cx| async move {
1477 let response = client
1478 .request(proto::ApplyCompletionAdditionalEdits {
1479 project_id,
1480 buffer_id,
1481 completion: Some(language::proto::serialize_completion(&completion)),
1482 })
1483 .await?;
1484
1485 if let Some(transaction) = response.transaction {
1486 let transaction = language::proto::deserialize_transaction(transaction)?;
1487 buffer_handle
1488 .update(&mut cx, |buffer, _| {
1489 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1490 })
1491 .await;
1492 if push_to_history {
1493 buffer_handle.update(&mut cx, |buffer, _| {
1494 buffer.push_transaction(transaction.clone(), Instant::now());
1495 });
1496 }
1497 Ok(Some(transaction))
1498 } else {
1499 Ok(None)
1500 }
1501 })
1502 } else {
1503 Task::ready(Err(anyhow!("project does not have a remote id")))
1504 }
1505 }
1506
1507 pub fn code_actions<T: ToOffset>(
1508 &self,
1509 buffer_handle: &ModelHandle<Buffer>,
1510 range: Range<T>,
1511 cx: &mut ModelContext<Self>,
1512 ) -> Task<Result<Vec<CodeAction>>> {
1513 let buffer_handle = buffer_handle.clone();
1514 let buffer = buffer_handle.read(cx);
1515 let buffer_id = buffer.remote_id();
1516 let worktree;
1517 let buffer_abs_path;
1518 if let Some(file) = File::from_dyn(buffer.file()) {
1519 worktree = file.worktree.clone();
1520 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1521 } else {
1522 return Task::ready(Ok(Default::default()));
1523 };
1524 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1525
1526 if worktree.read(cx).as_local().is_some() {
1527 let buffer_abs_path = buffer_abs_path.unwrap();
1528 let lang_name;
1529 let lang_server;
1530 if let Some(lang) = buffer.language() {
1531 lang_name = lang.name().to_string();
1532 if let Some(server) = self
1533 .language_servers
1534 .get(&(worktree.read(cx).id(), lang_name.clone()))
1535 {
1536 lang_server = server.clone();
1537 } else {
1538 return Task::ready(Ok(Default::default()));
1539 };
1540 } else {
1541 return Task::ready(Ok(Default::default()));
1542 }
1543
1544 let lsp_range = lsp::Range::new(
1545 range.start.to_point_utf16(buffer).to_lsp_position(),
1546 range.end.to_point_utf16(buffer).to_lsp_position(),
1547 );
1548 cx.foreground().spawn(async move {
1549 Ok(lang_server
1550 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1551 text_document: lsp::TextDocumentIdentifier::new(
1552 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1553 ),
1554 range: lsp_range,
1555 work_done_progress_params: Default::default(),
1556 partial_result_params: Default::default(),
1557 context: lsp::CodeActionContext {
1558 diagnostics: Default::default(),
1559 only: Some(vec![
1560 lsp::CodeActionKind::QUICKFIX,
1561 lsp::CodeActionKind::REFACTOR,
1562 lsp::CodeActionKind::REFACTOR_EXTRACT,
1563 ]),
1564 },
1565 })
1566 .await?
1567 .unwrap_or_default()
1568 .into_iter()
1569 .filter_map(|entry| {
1570 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1571 Some(CodeAction {
1572 range: range.clone(),
1573 lsp_action,
1574 })
1575 } else {
1576 None
1577 }
1578 })
1579 .collect())
1580 })
1581 } else if let Some(project_id) = self.remote_id() {
1582 let rpc = self.client.clone();
1583 cx.foreground().spawn(async move {
1584 let response = rpc
1585 .request(proto::GetCodeActions {
1586 project_id,
1587 buffer_id,
1588 start: Some(language::proto::serialize_anchor(&range.start)),
1589 end: Some(language::proto::serialize_anchor(&range.end)),
1590 })
1591 .await?;
1592 response
1593 .actions
1594 .into_iter()
1595 .map(language::proto::deserialize_code_action)
1596 .collect()
1597 })
1598 } else {
1599 Task::ready(Ok(Default::default()))
1600 }
1601 }
1602
1603 pub fn apply_code_action(
1604 &self,
1605 buffer_handle: ModelHandle<Buffer>,
1606 mut action: CodeAction,
1607 push_to_history: bool,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Task<Result<ProjectTransaction>> {
1610 if self.is_local() {
1611 let buffer = buffer_handle.read(cx);
1612 let lang_name = if let Some(lang) = buffer.language() {
1613 lang.name().to_string()
1614 } else {
1615 return Task::ready(Ok(Default::default()));
1616 };
1617 let lang_server = if let Some(language_server) = buffer.language_server() {
1618 language_server.clone()
1619 } else {
1620 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1621 };
1622 let range = action.range.to_point_utf16(buffer);
1623 let fs = self.fs.clone();
1624
1625 cx.spawn(|this, mut cx| async move {
1626 if let Some(lsp_range) = action
1627 .lsp_action
1628 .data
1629 .as_mut()
1630 .and_then(|d| d.get_mut("codeActionParams"))
1631 .and_then(|d| d.get_mut("range"))
1632 {
1633 *lsp_range = serde_json::to_value(&lsp::Range::new(
1634 range.start.to_lsp_position(),
1635 range.end.to_lsp_position(),
1636 ))
1637 .unwrap();
1638 action.lsp_action = lang_server
1639 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1640 .await?;
1641 } else {
1642 let actions = this
1643 .update(&mut cx, |this, cx| {
1644 this.code_actions(&buffer_handle, action.range, cx)
1645 })
1646 .await?;
1647 action.lsp_action = actions
1648 .into_iter()
1649 .find(|a| a.lsp_action.title == action.lsp_action.title)
1650 .ok_or_else(|| anyhow!("code action is outdated"))?
1651 .lsp_action;
1652 }
1653
1654 let mut operations = Vec::new();
1655 if let Some(edit) = action.lsp_action.edit {
1656 if let Some(document_changes) = edit.document_changes {
1657 match document_changes {
1658 lsp::DocumentChanges::Edits(edits) => operations
1659 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1660 lsp::DocumentChanges::Operations(ops) => operations = ops,
1661 }
1662 } else if let Some(changes) = edit.changes {
1663 operations.extend(changes.into_iter().map(|(uri, edits)| {
1664 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1665 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1666 uri,
1667 version: None,
1668 },
1669 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1670 })
1671 }));
1672 }
1673 }
1674
1675 let mut project_transaction = ProjectTransaction::default();
1676 for operation in operations {
1677 match operation {
1678 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1679 let abs_path = op
1680 .uri
1681 .to_file_path()
1682 .map_err(|_| anyhow!("can't convert URI to path"))?;
1683
1684 if let Some(parent_path) = abs_path.parent() {
1685 fs.create_dir(parent_path).await?;
1686 }
1687 if abs_path.ends_with("/") {
1688 fs.create_dir(&abs_path).await?;
1689 } else {
1690 fs.create_file(
1691 &abs_path,
1692 op.options.map(Into::into).unwrap_or_default(),
1693 )
1694 .await?;
1695 }
1696 }
1697 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1698 let source_abs_path = op
1699 .old_uri
1700 .to_file_path()
1701 .map_err(|_| anyhow!("can't convert URI to path"))?;
1702 let target_abs_path = op
1703 .new_uri
1704 .to_file_path()
1705 .map_err(|_| anyhow!("can't convert URI to path"))?;
1706 fs.rename(
1707 &source_abs_path,
1708 &target_abs_path,
1709 op.options.map(Into::into).unwrap_or_default(),
1710 )
1711 .await?;
1712 }
1713 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1714 let abs_path = op
1715 .uri
1716 .to_file_path()
1717 .map_err(|_| anyhow!("can't convert URI to path"))?;
1718 let options = op.options.map(Into::into).unwrap_or_default();
1719 if abs_path.ends_with("/") {
1720 fs.remove_dir(&abs_path, options).await?;
1721 } else {
1722 fs.remove_file(&abs_path, options).await?;
1723 }
1724 }
1725 lsp::DocumentChangeOperation::Edit(op) => {
1726 let buffer_to_edit = this
1727 .update(&mut cx, |this, cx| {
1728 this.open_local_buffer_from_lsp_path(
1729 op.text_document.uri,
1730 lang_name.clone(),
1731 lang_server.clone(),
1732 cx,
1733 )
1734 })
1735 .await?;
1736
1737 let edits = buffer_to_edit
1738 .update(&mut cx, |buffer, cx| {
1739 let edits = op.edits.into_iter().map(|edit| match edit {
1740 lsp::OneOf::Left(edit) => edit,
1741 lsp::OneOf::Right(edit) => edit.text_edit,
1742 });
1743 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1744 })
1745 .await?;
1746
1747 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1748 buffer.finalize_last_transaction();
1749 buffer.start_transaction();
1750 for (range, text) in edits {
1751 buffer.edit([range], text, cx);
1752 }
1753 let transaction = if buffer.end_transaction(cx).is_some() {
1754 let transaction =
1755 buffer.finalize_last_transaction().unwrap().clone();
1756 if !push_to_history {
1757 buffer.forget_transaction(transaction.id);
1758 }
1759 Some(transaction)
1760 } else {
1761 None
1762 };
1763
1764 transaction
1765 });
1766 if let Some(transaction) = transaction {
1767 project_transaction.0.insert(buffer_to_edit, transaction);
1768 }
1769 }
1770 }
1771 }
1772
1773 Ok(project_transaction)
1774 })
1775 } else if let Some(project_id) = self.remote_id() {
1776 let client = self.client.clone();
1777 let request = proto::ApplyCodeAction {
1778 project_id,
1779 buffer_id: buffer_handle.read(cx).remote_id(),
1780 action: Some(language::proto::serialize_code_action(&action)),
1781 };
1782 cx.spawn(|this, mut cx| async move {
1783 let response = client
1784 .request(request)
1785 .await?
1786 .transaction
1787 .ok_or_else(|| anyhow!("missing transaction"))?;
1788 this.update(&mut cx, |this, cx| {
1789 this.deserialize_project_transaction(response, push_to_history, cx)
1790 })
1791 .await
1792 })
1793 } else {
1794 Task::ready(Err(anyhow!("project does not have a remote id")))
1795 }
1796 }
1797
1798 pub fn find_or_create_local_worktree(
1799 &self,
1800 abs_path: impl AsRef<Path>,
1801 weak: bool,
1802 cx: &mut ModelContext<Self>,
1803 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1804 let abs_path = abs_path.as_ref();
1805 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1806 Task::ready(Ok((tree.clone(), relative_path.into())))
1807 } else {
1808 let worktree = self.create_local_worktree(abs_path, weak, cx);
1809 cx.foreground()
1810 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1811 }
1812 }
1813
1814 fn find_local_worktree(
1815 &self,
1816 abs_path: &Path,
1817 cx: &AppContext,
1818 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1819 for tree in self.worktrees(cx) {
1820 if let Some(relative_path) = tree
1821 .read(cx)
1822 .as_local()
1823 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1824 {
1825 return Some((tree.clone(), relative_path.into()));
1826 }
1827 }
1828 None
1829 }
1830
1831 pub fn is_shared(&self) -> bool {
1832 match &self.client_state {
1833 ProjectClientState::Local { is_shared, .. } => *is_shared,
1834 ProjectClientState::Remote { .. } => false,
1835 }
1836 }
1837
1838 fn create_local_worktree(
1839 &self,
1840 abs_path: impl AsRef<Path>,
1841 weak: bool,
1842 cx: &mut ModelContext<Self>,
1843 ) -> Task<Result<ModelHandle<Worktree>>> {
1844 let fs = self.fs.clone();
1845 let client = self.client.clone();
1846 let path = Arc::from(abs_path.as_ref());
1847 cx.spawn(|project, mut cx| async move {
1848 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1849
1850 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1851 project.add_worktree(&worktree, cx);
1852 (project.remote_id(), project.is_shared())
1853 });
1854
1855 if let Some(project_id) = remote_project_id {
1856 worktree
1857 .update(&mut cx, |worktree, cx| {
1858 worktree.as_local_mut().unwrap().register(project_id, cx)
1859 })
1860 .await?;
1861 if is_shared {
1862 worktree
1863 .update(&mut cx, |worktree, cx| {
1864 worktree.as_local_mut().unwrap().share(project_id, cx)
1865 })
1866 .await?;
1867 }
1868 }
1869
1870 Ok(worktree)
1871 })
1872 }
1873
1874 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1875 self.worktrees.retain(|worktree| {
1876 worktree
1877 .upgrade(cx)
1878 .map_or(false, |w| w.read(cx).id() != id)
1879 });
1880 cx.notify();
1881 }
1882
1883 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1884 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1885 if worktree.read(cx).is_local() {
1886 cx.subscribe(&worktree, |this, worktree, _, cx| {
1887 this.update_local_worktree_buffers(worktree, cx);
1888 })
1889 .detach();
1890 }
1891
1892 let push_weak_handle = {
1893 let worktree = worktree.read(cx);
1894 worktree.is_local() && worktree.is_weak()
1895 };
1896 if push_weak_handle {
1897 cx.observe_release(&worktree, |this, cx| {
1898 this.worktrees
1899 .retain(|worktree| worktree.upgrade(cx).is_some());
1900 cx.notify();
1901 })
1902 .detach();
1903 self.worktrees
1904 .push(WorktreeHandle::Weak(worktree.downgrade()));
1905 } else {
1906 self.worktrees
1907 .push(WorktreeHandle::Strong(worktree.clone()));
1908 }
1909 cx.notify();
1910 }
1911
1912 fn update_local_worktree_buffers(
1913 &mut self,
1914 worktree_handle: ModelHandle<Worktree>,
1915 cx: &mut ModelContext<Self>,
1916 ) {
1917 let snapshot = worktree_handle.read(cx).snapshot();
1918 let mut buffers_to_delete = Vec::new();
1919 for (buffer_id, buffer) in &self.open_buffers {
1920 if let Some(buffer) = buffer.upgrade(cx) {
1921 buffer.update(cx, |buffer, cx| {
1922 if let Some(old_file) = File::from_dyn(buffer.file()) {
1923 if old_file.worktree != worktree_handle {
1924 return;
1925 }
1926
1927 let new_file = if let Some(entry) = old_file
1928 .entry_id
1929 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1930 {
1931 File {
1932 is_local: true,
1933 entry_id: Some(entry.id),
1934 mtime: entry.mtime,
1935 path: entry.path.clone(),
1936 worktree: worktree_handle.clone(),
1937 }
1938 } else if let Some(entry) =
1939 snapshot.entry_for_path(old_file.path().as_ref())
1940 {
1941 File {
1942 is_local: true,
1943 entry_id: Some(entry.id),
1944 mtime: entry.mtime,
1945 path: entry.path.clone(),
1946 worktree: worktree_handle.clone(),
1947 }
1948 } else {
1949 File {
1950 is_local: true,
1951 entry_id: None,
1952 path: old_file.path().clone(),
1953 mtime: old_file.mtime(),
1954 worktree: worktree_handle.clone(),
1955 }
1956 };
1957
1958 if let Some(project_id) = self.remote_id() {
1959 self.client
1960 .send(proto::UpdateBufferFile {
1961 project_id,
1962 buffer_id: *buffer_id as u64,
1963 file: Some(new_file.to_proto()),
1964 })
1965 .log_err();
1966 }
1967 buffer.file_updated(Box::new(new_file), cx).detach();
1968 }
1969 });
1970 } else {
1971 buffers_to_delete.push(*buffer_id);
1972 }
1973 }
1974
1975 for buffer_id in buffers_to_delete {
1976 self.open_buffers.remove(&buffer_id);
1977 }
1978 }
1979
1980 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1981 let new_active_entry = entry.and_then(|project_path| {
1982 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1983 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1984 Some(ProjectEntry {
1985 worktree_id: project_path.worktree_id,
1986 entry_id: entry.id,
1987 })
1988 });
1989 if new_active_entry != self.active_entry {
1990 self.active_entry = new_active_entry;
1991 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1992 }
1993 }
1994
1995 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1996 self.language_servers_with_diagnostics_running > 0
1997 }
1998
1999 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2000 let mut summary = DiagnosticSummary::default();
2001 for (_, path_summary) in self.diagnostic_summaries(cx) {
2002 summary.error_count += path_summary.error_count;
2003 summary.warning_count += path_summary.warning_count;
2004 summary.info_count += path_summary.info_count;
2005 summary.hint_count += path_summary.hint_count;
2006 }
2007 summary
2008 }
2009
2010 pub fn diagnostic_summaries<'a>(
2011 &'a self,
2012 cx: &'a AppContext,
2013 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2014 self.worktrees(cx).flat_map(move |worktree| {
2015 let worktree = worktree.read(cx);
2016 let worktree_id = worktree.id();
2017 worktree
2018 .diagnostic_summaries()
2019 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2020 })
2021 }
2022
2023 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2024 self.language_servers_with_diagnostics_running += 1;
2025 if self.language_servers_with_diagnostics_running == 1 {
2026 cx.emit(Event::DiskBasedDiagnosticsStarted);
2027 }
2028 }
2029
2030 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2031 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2032 self.language_servers_with_diagnostics_running -= 1;
2033 if self.language_servers_with_diagnostics_running == 0 {
2034 cx.emit(Event::DiskBasedDiagnosticsFinished);
2035 }
2036 }
2037
2038 pub fn active_entry(&self) -> Option<ProjectEntry> {
2039 self.active_entry
2040 }
2041
2042 // RPC message handlers
2043
2044 async fn handle_unshare_project(
2045 this: ModelHandle<Self>,
2046 _: TypedEnvelope<proto::UnshareProject>,
2047 _: Arc<Client>,
2048 mut cx: AsyncAppContext,
2049 ) -> Result<()> {
2050 this.update(&mut cx, |this, cx| {
2051 if let ProjectClientState::Remote {
2052 sharing_has_stopped,
2053 ..
2054 } = &mut this.client_state
2055 {
2056 *sharing_has_stopped = true;
2057 this.collaborators.clear();
2058 cx.notify();
2059 } else {
2060 unreachable!()
2061 }
2062 });
2063
2064 Ok(())
2065 }
2066
2067 async fn handle_add_collaborator(
2068 this: ModelHandle<Self>,
2069 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2070 _: Arc<Client>,
2071 mut cx: AsyncAppContext,
2072 ) -> Result<()> {
2073 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2074 let collaborator = envelope
2075 .payload
2076 .collaborator
2077 .take()
2078 .ok_or_else(|| anyhow!("empty collaborator"))?;
2079
2080 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2081 this.update(&mut cx, |this, cx| {
2082 this.collaborators
2083 .insert(collaborator.peer_id, collaborator);
2084 cx.notify();
2085 });
2086
2087 Ok(())
2088 }
2089
2090 async fn handle_remove_collaborator(
2091 this: ModelHandle<Self>,
2092 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2093 _: Arc<Client>,
2094 mut cx: AsyncAppContext,
2095 ) -> Result<()> {
2096 this.update(&mut cx, |this, cx| {
2097 let peer_id = PeerId(envelope.payload.peer_id);
2098 let replica_id = this
2099 .collaborators
2100 .remove(&peer_id)
2101 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2102 .replica_id;
2103 this.shared_buffers.remove(&peer_id);
2104 for (_, buffer) in &this.open_buffers {
2105 if let Some(buffer) = buffer.upgrade(cx) {
2106 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2107 }
2108 }
2109 cx.notify();
2110 Ok(())
2111 })
2112 }
2113
2114 async fn handle_share_worktree(
2115 this: ModelHandle<Self>,
2116 envelope: TypedEnvelope<proto::ShareWorktree>,
2117 client: Arc<Client>,
2118 mut cx: AsyncAppContext,
2119 ) -> Result<()> {
2120 this.update(&mut cx, |this, cx| {
2121 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2122 let replica_id = this.replica_id();
2123 let worktree = envelope
2124 .payload
2125 .worktree
2126 .ok_or_else(|| anyhow!("invalid worktree"))?;
2127 let (worktree, load_task) =
2128 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2129 this.add_worktree(&worktree, cx);
2130 load_task.detach();
2131 Ok(())
2132 })
2133 }
2134
2135 async fn handle_unregister_worktree(
2136 this: ModelHandle<Self>,
2137 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2138 _: Arc<Client>,
2139 mut cx: AsyncAppContext,
2140 ) -> Result<()> {
2141 this.update(&mut cx, |this, cx| {
2142 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2143 this.remove_worktree(worktree_id, cx);
2144 Ok(())
2145 })
2146 }
2147
2148 async fn handle_update_worktree(
2149 this: ModelHandle<Self>,
2150 envelope: TypedEnvelope<proto::UpdateWorktree>,
2151 _: Arc<Client>,
2152 mut cx: AsyncAppContext,
2153 ) -> Result<()> {
2154 this.update(&mut cx, |this, cx| {
2155 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2156 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2157 worktree.update(cx, |worktree, _| {
2158 let worktree = worktree.as_remote_mut().unwrap();
2159 worktree.update_from_remote(envelope)
2160 })?;
2161 }
2162 Ok(())
2163 })
2164 }
2165
2166 async fn handle_update_diagnostic_summary(
2167 this: ModelHandle<Self>,
2168 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2169 _: Arc<Client>,
2170 mut cx: AsyncAppContext,
2171 ) -> Result<()> {
2172 this.update(&mut cx, |this, cx| {
2173 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2174 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2175 if let Some(summary) = envelope.payload.summary {
2176 let project_path = ProjectPath {
2177 worktree_id,
2178 path: Path::new(&summary.path).into(),
2179 };
2180 worktree.update(cx, |worktree, _| {
2181 worktree
2182 .as_remote_mut()
2183 .unwrap()
2184 .update_diagnostic_summary(project_path.path.clone(), &summary);
2185 });
2186 cx.emit(Event::DiagnosticsUpdated(project_path));
2187 }
2188 }
2189 Ok(())
2190 })
2191 }
2192
2193 async fn handle_disk_based_diagnostics_updating(
2194 this: ModelHandle<Self>,
2195 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2196 _: Arc<Client>,
2197 mut cx: AsyncAppContext,
2198 ) -> Result<()> {
2199 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2200 Ok(())
2201 }
2202
2203 async fn handle_disk_based_diagnostics_updated(
2204 this: ModelHandle<Self>,
2205 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2206 _: Arc<Client>,
2207 mut cx: AsyncAppContext,
2208 ) -> Result<()> {
2209 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2210 Ok(())
2211 }
2212
2213 async fn handle_update_buffer(
2214 this: ModelHandle<Self>,
2215 envelope: TypedEnvelope<proto::UpdateBuffer>,
2216 _: Arc<Client>,
2217 mut cx: AsyncAppContext,
2218 ) -> Result<()> {
2219 this.update(&mut cx, |this, cx| {
2220 let payload = envelope.payload.clone();
2221 let buffer_id = payload.buffer_id;
2222 let ops = payload
2223 .operations
2224 .into_iter()
2225 .map(|op| language::proto::deserialize_operation(op))
2226 .collect::<Result<Vec<_>, _>>()?;
2227 let is_remote = this.is_remote();
2228 match this.open_buffers.entry(buffer_id) {
2229 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2230 OpenBuffer::Loaded(buffer) => {
2231 if let Some(buffer) = buffer.upgrade(cx) {
2232 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2233 }
2234 }
2235 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2236 },
2237 hash_map::Entry::Vacant(e) => {
2238 if is_remote && this.loading_buffers.len() > 0 {
2239 e.insert(OpenBuffer::Loading(ops));
2240 }
2241 }
2242 }
2243 Ok(())
2244 })
2245 }
2246
2247 async fn handle_update_buffer_file(
2248 this: ModelHandle<Self>,
2249 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2250 _: Arc<Client>,
2251 mut cx: AsyncAppContext,
2252 ) -> Result<()> {
2253 this.update(&mut cx, |this, cx| {
2254 let payload = envelope.payload.clone();
2255 let buffer_id = payload.buffer_id;
2256 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2257 let worktree = this
2258 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2259 .ok_or_else(|| anyhow!("no such worktree"))?;
2260 let file = File::from_proto(file, worktree.clone(), cx)?;
2261 let buffer = this
2262 .open_buffers
2263 .get_mut(&buffer_id)
2264 .and_then(|b| b.upgrade(cx))
2265 .ok_or_else(|| anyhow!("no such buffer"))?;
2266 buffer.update(cx, |buffer, cx| {
2267 buffer.file_updated(Box::new(file), cx).detach();
2268 });
2269 Ok(())
2270 })
2271 }
2272
2273 async fn handle_save_buffer(
2274 this: ModelHandle<Self>,
2275 envelope: TypedEnvelope<proto::SaveBuffer>,
2276 _: Arc<Client>,
2277 mut cx: AsyncAppContext,
2278 ) -> Result<proto::BufferSaved> {
2279 let buffer_id = envelope.payload.buffer_id;
2280 let sender_id = envelope.original_sender_id()?;
2281 let requested_version = envelope.payload.version.try_into()?;
2282
2283 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2284 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2285 let buffer = this
2286 .shared_buffers
2287 .get(&sender_id)
2288 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2289 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2290 Ok::<_, anyhow::Error>((project_id, buffer))
2291 })?;
2292
2293 buffer
2294 .update(&mut cx, |buffer, _| {
2295 buffer.wait_for_version(requested_version)
2296 })
2297 .await;
2298
2299 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2300 Ok(proto::BufferSaved {
2301 project_id,
2302 buffer_id,
2303 version: (&saved_version).into(),
2304 mtime: Some(mtime.into()),
2305 })
2306 }
2307
2308 async fn handle_format_buffers(
2309 this: ModelHandle<Self>,
2310 envelope: TypedEnvelope<proto::FormatBuffers>,
2311 _: Arc<Client>,
2312 mut cx: AsyncAppContext,
2313 ) -> Result<proto::FormatBuffersResponse> {
2314 let sender_id = envelope.original_sender_id()?;
2315 let format = this.update(&mut cx, |this, cx| {
2316 let shared_buffers = this
2317 .shared_buffers
2318 .get(&sender_id)
2319 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2320 let mut buffers = HashSet::default();
2321 for buffer_id in &envelope.payload.buffer_ids {
2322 buffers.insert(
2323 shared_buffers
2324 .get(buffer_id)
2325 .cloned()
2326 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2327 );
2328 }
2329 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2330 })?;
2331
2332 let project_transaction = format.await?;
2333 let project_transaction = this.update(&mut cx, |this, cx| {
2334 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2335 });
2336 Ok(proto::FormatBuffersResponse {
2337 transaction: Some(project_transaction),
2338 })
2339 }
2340
2341 async fn handle_get_completions(
2342 this: ModelHandle<Self>,
2343 envelope: TypedEnvelope<proto::GetCompletions>,
2344 _: Arc<Client>,
2345 mut cx: AsyncAppContext,
2346 ) -> Result<proto::GetCompletionsResponse> {
2347 let sender_id = envelope.original_sender_id()?;
2348 let position = envelope
2349 .payload
2350 .position
2351 .and_then(language::proto::deserialize_anchor)
2352 .ok_or_else(|| anyhow!("invalid position"))?;
2353 let version = clock::Global::from(envelope.payload.version);
2354 let buffer = this.read_with(&cx, |this, _| {
2355 this.shared_buffers
2356 .get(&sender_id)
2357 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2358 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2359 })?;
2360 buffer
2361 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2362 .await;
2363 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2364 let completions = this
2365 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2366 .await?;
2367
2368 Ok(proto::GetCompletionsResponse {
2369 completions: completions
2370 .iter()
2371 .map(language::proto::serialize_completion)
2372 .collect(),
2373 version: (&version).into(),
2374 })
2375 }
2376
2377 async fn handle_apply_additional_edits_for_completion(
2378 this: ModelHandle<Self>,
2379 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2380 _: Arc<Client>,
2381 mut cx: AsyncAppContext,
2382 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2383 let sender_id = envelope.original_sender_id()?;
2384 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2385 let buffer = this
2386 .shared_buffers
2387 .get(&sender_id)
2388 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2389 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2390 let language = buffer.read(cx).language();
2391 let completion = language::proto::deserialize_completion(
2392 envelope
2393 .payload
2394 .completion
2395 .ok_or_else(|| anyhow!("invalid completion"))?,
2396 language,
2397 )?;
2398 Ok::<_, anyhow::Error>(
2399 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2400 )
2401 })?;
2402
2403 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2404 transaction: apply_additional_edits
2405 .await?
2406 .as_ref()
2407 .map(language::proto::serialize_transaction),
2408 })
2409 }
2410
2411 async fn handle_get_code_actions(
2412 this: ModelHandle<Self>,
2413 envelope: TypedEnvelope<proto::GetCodeActions>,
2414 _: Arc<Client>,
2415 mut cx: AsyncAppContext,
2416 ) -> Result<proto::GetCodeActionsResponse> {
2417 let sender_id = envelope.original_sender_id()?;
2418 let start = envelope
2419 .payload
2420 .start
2421 .and_then(language::proto::deserialize_anchor)
2422 .ok_or_else(|| anyhow!("invalid start"))?;
2423 let end = envelope
2424 .payload
2425 .end
2426 .and_then(language::proto::deserialize_anchor)
2427 .ok_or_else(|| anyhow!("invalid end"))?;
2428 let buffer = this.update(&mut cx, |this, _| {
2429 this.shared_buffers
2430 .get(&sender_id)
2431 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2432 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2433 })?;
2434 buffer
2435 .update(&mut cx, |buffer, _| {
2436 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2437 })
2438 .await;
2439 let code_actions = this.update(&mut cx, |this, cx| {
2440 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2441 })?;
2442
2443 Ok(proto::GetCodeActionsResponse {
2444 actions: code_actions
2445 .await?
2446 .iter()
2447 .map(language::proto::serialize_code_action)
2448 .collect(),
2449 })
2450 }
2451
2452 async fn handle_apply_code_action(
2453 this: ModelHandle<Self>,
2454 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2455 _: Arc<Client>,
2456 mut cx: AsyncAppContext,
2457 ) -> Result<proto::ApplyCodeActionResponse> {
2458 let sender_id = envelope.original_sender_id()?;
2459 let action = language::proto::deserialize_code_action(
2460 envelope
2461 .payload
2462 .action
2463 .ok_or_else(|| anyhow!("invalid action"))?,
2464 )?;
2465 let apply_code_action = this.update(&mut cx, |this, cx| {
2466 let buffer = this
2467 .shared_buffers
2468 .get(&sender_id)
2469 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2470 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2471 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2472 })?;
2473
2474 let project_transaction = apply_code_action.await?;
2475 let project_transaction = this.update(&mut cx, |this, cx| {
2476 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2477 });
2478 Ok(proto::ApplyCodeActionResponse {
2479 transaction: Some(project_transaction),
2480 })
2481 }
2482
2483 async fn handle_get_definition(
2484 this: ModelHandle<Self>,
2485 envelope: TypedEnvelope<proto::GetDefinition>,
2486 _: Arc<Client>,
2487 mut cx: AsyncAppContext,
2488 ) -> Result<proto::GetDefinitionResponse> {
2489 let sender_id = envelope.original_sender_id()?;
2490 let position = envelope
2491 .payload
2492 .position
2493 .and_then(deserialize_anchor)
2494 .ok_or_else(|| anyhow!("invalid position"))?;
2495 let definitions = this.update(&mut cx, |this, cx| {
2496 let source_buffer = this
2497 .shared_buffers
2498 .get(&sender_id)
2499 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2500 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2501 if source_buffer.read(cx).can_resolve(&position) {
2502 Ok(this.definition(&source_buffer, position, cx))
2503 } else {
2504 Err(anyhow!("cannot resolve position"))
2505 }
2506 })?;
2507
2508 let definitions = definitions.await?;
2509
2510 this.update(&mut cx, |this, cx| {
2511 let mut response = proto::GetDefinitionResponse {
2512 definitions: Default::default(),
2513 };
2514 for definition in definitions {
2515 let buffer =
2516 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2517 response.definitions.push(proto::Definition {
2518 target_start: Some(serialize_anchor(&definition.target_range.start)),
2519 target_end: Some(serialize_anchor(&definition.target_range.end)),
2520 buffer: Some(buffer),
2521 });
2522 }
2523 Ok(response)
2524 })
2525 }
2526
2527 async fn handle_open_buffer(
2528 this: ModelHandle<Self>,
2529 envelope: TypedEnvelope<proto::OpenBuffer>,
2530 _: Arc<Client>,
2531 mut cx: AsyncAppContext,
2532 ) -> anyhow::Result<proto::OpenBufferResponse> {
2533 let peer_id = envelope.original_sender_id()?;
2534 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2535 let open_buffer = this.update(&mut cx, |this, cx| {
2536 this.open_buffer(
2537 ProjectPath {
2538 worktree_id,
2539 path: PathBuf::from(envelope.payload.path).into(),
2540 },
2541 cx,
2542 )
2543 });
2544
2545 let buffer = open_buffer.await?;
2546 this.update(&mut cx, |this, cx| {
2547 Ok(proto::OpenBufferResponse {
2548 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2549 })
2550 })
2551 }
2552
2553 fn serialize_project_transaction_for_peer(
2554 &mut self,
2555 project_transaction: ProjectTransaction,
2556 peer_id: PeerId,
2557 cx: &AppContext,
2558 ) -> proto::ProjectTransaction {
2559 let mut serialized_transaction = proto::ProjectTransaction {
2560 buffers: Default::default(),
2561 transactions: Default::default(),
2562 };
2563 for (buffer, transaction) in project_transaction.0 {
2564 serialized_transaction
2565 .buffers
2566 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2567 serialized_transaction
2568 .transactions
2569 .push(language::proto::serialize_transaction(&transaction));
2570 }
2571 serialized_transaction
2572 }
2573
2574 fn deserialize_project_transaction(
2575 &mut self,
2576 message: proto::ProjectTransaction,
2577 push_to_history: bool,
2578 cx: &mut ModelContext<Self>,
2579 ) -> Task<Result<ProjectTransaction>> {
2580 cx.spawn(|this, mut cx| async move {
2581 let mut project_transaction = ProjectTransaction::default();
2582 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2583 let buffer = this
2584 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2585 .await?;
2586 let transaction = language::proto::deserialize_transaction(transaction)?;
2587 project_transaction.0.insert(buffer, transaction);
2588 }
2589 for (buffer, transaction) in &project_transaction.0 {
2590 buffer
2591 .update(&mut cx, |buffer, _| {
2592 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2593 })
2594 .await;
2595
2596 if push_to_history {
2597 buffer.update(&mut cx, |buffer, _| {
2598 buffer.push_transaction(transaction.clone(), Instant::now());
2599 });
2600 }
2601 }
2602
2603 Ok(project_transaction)
2604 })
2605 }
2606
2607 fn serialize_buffer_for_peer(
2608 &mut self,
2609 buffer: &ModelHandle<Buffer>,
2610 peer_id: PeerId,
2611 cx: &AppContext,
2612 ) -> proto::Buffer {
2613 let buffer_id = buffer.read(cx).remote_id();
2614 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2615 match shared_buffers.entry(buffer_id) {
2616 hash_map::Entry::Occupied(_) => proto::Buffer {
2617 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2618 },
2619 hash_map::Entry::Vacant(entry) => {
2620 entry.insert(buffer.clone());
2621 proto::Buffer {
2622 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2623 }
2624 }
2625 }
2626 }
2627
2628 fn deserialize_buffer(
2629 &mut self,
2630 buffer: proto::Buffer,
2631 cx: &mut ModelContext<Self>,
2632 ) -> Task<Result<ModelHandle<Buffer>>> {
2633 let replica_id = self.replica_id();
2634
2635 let mut opened_buffer_tx = self.opened_buffer.clone();
2636 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2637 cx.spawn(|this, mut cx| async move {
2638 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2639 proto::buffer::Variant::Id(id) => {
2640 let buffer = loop {
2641 let buffer = this.read_with(&cx, |this, cx| {
2642 this.open_buffers
2643 .get(&id)
2644 .and_then(|buffer| buffer.upgrade(cx))
2645 });
2646 if let Some(buffer) = buffer {
2647 break buffer;
2648 }
2649 opened_buffer_rx
2650 .recv()
2651 .await
2652 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2653 };
2654 Ok(buffer)
2655 }
2656 proto::buffer::Variant::State(mut buffer) => {
2657 let mut buffer_worktree = None;
2658 let mut buffer_file = None;
2659 if let Some(file) = buffer.file.take() {
2660 this.read_with(&cx, |this, cx| {
2661 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2662 let worktree =
2663 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2664 anyhow!("no worktree found for id {}", file.worktree_id)
2665 })?;
2666 buffer_file =
2667 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2668 as Box<dyn language::File>);
2669 buffer_worktree = Some(worktree);
2670 Ok::<_, anyhow::Error>(())
2671 })?;
2672 }
2673
2674 let buffer = cx.add_model(|cx| {
2675 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2676 });
2677 this.update(&mut cx, |this, cx| {
2678 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2679 })?;
2680
2681 let _ = opened_buffer_tx.send(()).await;
2682 Ok(buffer)
2683 }
2684 }
2685 })
2686 }
2687
2688 async fn handle_close_buffer(
2689 this: ModelHandle<Self>,
2690 envelope: TypedEnvelope<proto::CloseBuffer>,
2691 _: Arc<Client>,
2692 mut cx: AsyncAppContext,
2693 ) -> anyhow::Result<()> {
2694 this.update(&mut cx, |this, cx| {
2695 if let Some(shared_buffers) =
2696 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2697 {
2698 shared_buffers.remove(&envelope.payload.buffer_id);
2699 cx.notify();
2700 }
2701 Ok(())
2702 })
2703 }
2704
2705 async fn handle_buffer_saved(
2706 this: ModelHandle<Self>,
2707 envelope: TypedEnvelope<proto::BufferSaved>,
2708 _: Arc<Client>,
2709 mut cx: AsyncAppContext,
2710 ) -> Result<()> {
2711 let version = envelope.payload.version.try_into()?;
2712 let mtime = envelope
2713 .payload
2714 .mtime
2715 .ok_or_else(|| anyhow!("missing mtime"))?
2716 .into();
2717
2718 this.update(&mut cx, |this, cx| {
2719 let buffer = this
2720 .open_buffers
2721 .get(&envelope.payload.buffer_id)
2722 .and_then(|buffer| buffer.upgrade(cx));
2723 if let Some(buffer) = buffer {
2724 buffer.update(cx, |buffer, cx| {
2725 buffer.did_save(version, mtime, None, cx);
2726 });
2727 }
2728 Ok(())
2729 })
2730 }
2731
2732 async fn handle_buffer_reloaded(
2733 this: ModelHandle<Self>,
2734 envelope: TypedEnvelope<proto::BufferReloaded>,
2735 _: Arc<Client>,
2736 mut cx: AsyncAppContext,
2737 ) -> Result<()> {
2738 let payload = envelope.payload.clone();
2739 let version = payload.version.try_into()?;
2740 let mtime = payload
2741 .mtime
2742 .ok_or_else(|| anyhow!("missing mtime"))?
2743 .into();
2744 this.update(&mut cx, |this, cx| {
2745 let buffer = this
2746 .open_buffers
2747 .get(&payload.buffer_id)
2748 .and_then(|buffer| buffer.upgrade(cx));
2749 if let Some(buffer) = buffer {
2750 buffer.update(cx, |buffer, cx| {
2751 buffer.did_reload(version, mtime, cx);
2752 });
2753 }
2754 Ok(())
2755 })
2756 }
2757
2758 pub fn match_paths<'a>(
2759 &self,
2760 query: &'a str,
2761 include_ignored: bool,
2762 smart_case: bool,
2763 max_results: usize,
2764 cancel_flag: &'a AtomicBool,
2765 cx: &AppContext,
2766 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2767 let worktrees = self
2768 .worktrees(cx)
2769 .filter(|worktree| !worktree.read(cx).is_weak())
2770 .collect::<Vec<_>>();
2771 let include_root_name = worktrees.len() > 1;
2772 let candidate_sets = worktrees
2773 .into_iter()
2774 .map(|worktree| CandidateSet {
2775 snapshot: worktree.read(cx).snapshot(),
2776 include_ignored,
2777 include_root_name,
2778 })
2779 .collect::<Vec<_>>();
2780
2781 let background = cx.background().clone();
2782 async move {
2783 fuzzy::match_paths(
2784 candidate_sets.as_slice(),
2785 query,
2786 smart_case,
2787 max_results,
2788 cancel_flag,
2789 background,
2790 )
2791 .await
2792 }
2793 }
2794}
2795
2796impl WorktreeHandle {
2797 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2798 match self {
2799 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2800 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2801 }
2802 }
2803}
2804
2805impl OpenBuffer {
2806 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2807 match self {
2808 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2809 OpenBuffer::Loading(_) => None,
2810 }
2811 }
2812}
2813
2814struct CandidateSet {
2815 snapshot: Snapshot,
2816 include_ignored: bool,
2817 include_root_name: bool,
2818}
2819
2820impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2821 type Candidates = CandidateSetIter<'a>;
2822
2823 fn id(&self) -> usize {
2824 self.snapshot.id().to_usize()
2825 }
2826
2827 fn len(&self) -> usize {
2828 if self.include_ignored {
2829 self.snapshot.file_count()
2830 } else {
2831 self.snapshot.visible_file_count()
2832 }
2833 }
2834
2835 fn prefix(&self) -> Arc<str> {
2836 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2837 self.snapshot.root_name().into()
2838 } else if self.include_root_name {
2839 format!("{}/", self.snapshot.root_name()).into()
2840 } else {
2841 "".into()
2842 }
2843 }
2844
2845 fn candidates(&'a self, start: usize) -> Self::Candidates {
2846 CandidateSetIter {
2847 traversal: self.snapshot.files(self.include_ignored, start),
2848 }
2849 }
2850}
2851
2852struct CandidateSetIter<'a> {
2853 traversal: Traversal<'a>,
2854}
2855
2856impl<'a> Iterator for CandidateSetIter<'a> {
2857 type Item = PathMatchCandidate<'a>;
2858
2859 fn next(&mut self) -> Option<Self::Item> {
2860 self.traversal.next().map(|entry| {
2861 if let EntryKind::File(char_bag) = entry.kind {
2862 PathMatchCandidate {
2863 path: &entry.path,
2864 char_bag,
2865 }
2866 } else {
2867 unreachable!()
2868 }
2869 })
2870 }
2871}
2872
2873impl Entity for Project {
2874 type Event = Event;
2875
2876 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2877 match &self.client_state {
2878 ProjectClientState::Local { remote_id_rx, .. } => {
2879 if let Some(project_id) = *remote_id_rx.borrow() {
2880 self.client
2881 .send(proto::UnregisterProject { project_id })
2882 .log_err();
2883 }
2884 }
2885 ProjectClientState::Remote { remote_id, .. } => {
2886 self.client
2887 .send(proto::LeaveProject {
2888 project_id: *remote_id,
2889 })
2890 .log_err();
2891 }
2892 }
2893 }
2894
2895 fn app_will_quit(
2896 &mut self,
2897 _: &mut MutableAppContext,
2898 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2899 use futures::FutureExt;
2900
2901 let shutdown_futures = self
2902 .language_servers
2903 .drain()
2904 .filter_map(|(_, server)| server.shutdown())
2905 .collect::<Vec<_>>();
2906 Some(
2907 async move {
2908 futures::future::join_all(shutdown_futures).await;
2909 }
2910 .boxed(),
2911 )
2912 }
2913}
2914
2915impl Collaborator {
2916 fn from_proto(
2917 message: proto::Collaborator,
2918 user_store: &ModelHandle<UserStore>,
2919 cx: &mut AsyncAppContext,
2920 ) -> impl Future<Output = Result<Self>> {
2921 let user = user_store.update(cx, |user_store, cx| {
2922 user_store.fetch_user(message.user_id, cx)
2923 });
2924
2925 async move {
2926 Ok(Self {
2927 peer_id: PeerId(message.peer_id),
2928 user: user.await?,
2929 replica_id: message.replica_id as ReplicaId,
2930 })
2931 }
2932 }
2933}
2934
2935impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2936 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2937 Self {
2938 worktree_id,
2939 path: path.as_ref().into(),
2940 }
2941 }
2942}
2943
2944impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2945 fn from(options: lsp::CreateFileOptions) -> Self {
2946 Self {
2947 overwrite: options.overwrite.unwrap_or(false),
2948 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2949 }
2950 }
2951}
2952
2953impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2954 fn from(options: lsp::RenameFileOptions) -> Self {
2955 Self {
2956 overwrite: options.overwrite.unwrap_or(false),
2957 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2958 }
2959 }
2960}
2961
2962impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2963 fn from(options: lsp::DeleteFileOptions) -> Self {
2964 Self {
2965 recursive: options.recursive.unwrap_or(false),
2966 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2967 }
2968 }
2969}
2970
2971#[cfg(test)]
2972mod tests {
2973 use super::{Event, *};
2974 use client::test::FakeHttpClient;
2975 use fs::RealFs;
2976 use futures::StreamExt;
2977 use gpui::test::subscribe;
2978 use language::{
2979 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2980 LanguageServerConfig, Point,
2981 };
2982 use lsp::Url;
2983 use serde_json::json;
2984 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2985 use unindent::Unindent as _;
2986 use util::test::temp_tree;
2987 use worktree::WorktreeHandle as _;
2988
2989 #[gpui::test]
2990 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2991 let dir = temp_tree(json!({
2992 "root": {
2993 "apple": "",
2994 "banana": {
2995 "carrot": {
2996 "date": "",
2997 "endive": "",
2998 }
2999 },
3000 "fennel": {
3001 "grape": "",
3002 }
3003 }
3004 }));
3005
3006 let root_link_path = dir.path().join("root_link");
3007 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3008 unix::fs::symlink(
3009 &dir.path().join("root/fennel"),
3010 &dir.path().join("root/finnochio"),
3011 )
3012 .unwrap();
3013
3014 let project = Project::test(Arc::new(RealFs), &mut cx);
3015
3016 let (tree, _) = project
3017 .update(&mut cx, |project, cx| {
3018 project.find_or_create_local_worktree(&root_link_path, false, cx)
3019 })
3020 .await
3021 .unwrap();
3022
3023 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3024 .await;
3025 cx.read(|cx| {
3026 let tree = tree.read(cx);
3027 assert_eq!(tree.file_count(), 5);
3028 assert_eq!(
3029 tree.inode_for_path("fennel/grape"),
3030 tree.inode_for_path("finnochio/grape")
3031 );
3032 });
3033
3034 let cancel_flag = Default::default();
3035 let results = project
3036 .read_with(&cx, |project, cx| {
3037 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3038 })
3039 .await;
3040 assert_eq!(
3041 results
3042 .into_iter()
3043 .map(|result| result.path)
3044 .collect::<Vec<Arc<Path>>>(),
3045 vec![
3046 PathBuf::from("banana/carrot/date").into(),
3047 PathBuf::from("banana/carrot/endive").into(),
3048 ]
3049 );
3050 }
3051
3052 #[gpui::test]
3053 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3054 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3055 let progress_token = language_server_config
3056 .disk_based_diagnostics_progress_token
3057 .clone()
3058 .unwrap();
3059
3060 let mut languages = LanguageRegistry::new();
3061 languages.add(Arc::new(Language::new(
3062 LanguageConfig {
3063 name: "Rust".to_string(),
3064 path_suffixes: vec!["rs".to_string()],
3065 language_server: Some(language_server_config),
3066 ..Default::default()
3067 },
3068 Some(tree_sitter_rust::language()),
3069 )));
3070
3071 let dir = temp_tree(json!({
3072 "a.rs": "fn a() { A }",
3073 "b.rs": "const y: i32 = 1",
3074 }));
3075
3076 let http_client = FakeHttpClient::with_404_response();
3077 let client = Client::new(http_client.clone());
3078 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3079
3080 let project = cx.update(|cx| {
3081 Project::local(
3082 client,
3083 user_store,
3084 Arc::new(languages),
3085 Arc::new(RealFs),
3086 cx,
3087 )
3088 });
3089
3090 let (tree, _) = project
3091 .update(&mut cx, |project, cx| {
3092 project.find_or_create_local_worktree(dir.path(), false, cx)
3093 })
3094 .await
3095 .unwrap();
3096 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3097
3098 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3099 .await;
3100
3101 // Cause worktree to start the fake language server
3102 let _buffer = project
3103 .update(&mut cx, |project, cx| {
3104 project.open_buffer(
3105 ProjectPath {
3106 worktree_id,
3107 path: Path::new("b.rs").into(),
3108 },
3109 cx,
3110 )
3111 })
3112 .await
3113 .unwrap();
3114
3115 let mut events = subscribe(&project, &mut cx);
3116
3117 fake_server.start_progress(&progress_token).await;
3118 assert_eq!(
3119 events.next().await.unwrap(),
3120 Event::DiskBasedDiagnosticsStarted
3121 );
3122
3123 fake_server.start_progress(&progress_token).await;
3124 fake_server.end_progress(&progress_token).await;
3125 fake_server.start_progress(&progress_token).await;
3126
3127 fake_server
3128 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3129 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3130 version: None,
3131 diagnostics: vec![lsp::Diagnostic {
3132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3133 severity: Some(lsp::DiagnosticSeverity::ERROR),
3134 message: "undefined variable 'A'".to_string(),
3135 ..Default::default()
3136 }],
3137 })
3138 .await;
3139 assert_eq!(
3140 events.next().await.unwrap(),
3141 Event::DiagnosticsUpdated(ProjectPath {
3142 worktree_id,
3143 path: Arc::from(Path::new("a.rs"))
3144 })
3145 );
3146
3147 fake_server.end_progress(&progress_token).await;
3148 fake_server.end_progress(&progress_token).await;
3149 assert_eq!(
3150 events.next().await.unwrap(),
3151 Event::DiskBasedDiagnosticsUpdated
3152 );
3153 assert_eq!(
3154 events.next().await.unwrap(),
3155 Event::DiskBasedDiagnosticsFinished
3156 );
3157
3158 let buffer = project
3159 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3160 .await
3161 .unwrap();
3162
3163 buffer.read_with(&cx, |buffer, _| {
3164 let snapshot = buffer.snapshot();
3165 let diagnostics = snapshot
3166 .diagnostics_in_range::<_, Point>(0..buffer.len())
3167 .collect::<Vec<_>>();
3168 assert_eq!(
3169 diagnostics,
3170 &[DiagnosticEntry {
3171 range: Point::new(0, 9)..Point::new(0, 10),
3172 diagnostic: Diagnostic {
3173 severity: lsp::DiagnosticSeverity::ERROR,
3174 message: "undefined variable 'A'".to_string(),
3175 group_id: 0,
3176 is_primary: true,
3177 ..Default::default()
3178 }
3179 }]
3180 )
3181 });
3182 }
3183
3184 #[gpui::test]
3185 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3186 let dir = temp_tree(json!({
3187 "root": {
3188 "dir1": {},
3189 "dir2": {
3190 "dir3": {}
3191 }
3192 }
3193 }));
3194
3195 let project = Project::test(Arc::new(RealFs), &mut cx);
3196 let (tree, _) = project
3197 .update(&mut cx, |project, cx| {
3198 project.find_or_create_local_worktree(&dir.path(), false, cx)
3199 })
3200 .await
3201 .unwrap();
3202
3203 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3204 .await;
3205
3206 let cancel_flag = Default::default();
3207 let results = project
3208 .read_with(&cx, |project, cx| {
3209 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3210 })
3211 .await;
3212
3213 assert!(results.is_empty());
3214 }
3215
3216 #[gpui::test]
3217 async fn test_definition(mut cx: gpui::TestAppContext) {
3218 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3219
3220 let mut languages = LanguageRegistry::new();
3221 languages.add(Arc::new(Language::new(
3222 LanguageConfig {
3223 name: "Rust".to_string(),
3224 path_suffixes: vec!["rs".to_string()],
3225 language_server: Some(language_server_config),
3226 ..Default::default()
3227 },
3228 Some(tree_sitter_rust::language()),
3229 )));
3230
3231 let dir = temp_tree(json!({
3232 "a.rs": "const fn a() { A }",
3233 "b.rs": "const y: i32 = crate::a()",
3234 }));
3235 let dir_path = dir.path().to_path_buf();
3236
3237 let http_client = FakeHttpClient::with_404_response();
3238 let client = Client::new(http_client.clone());
3239 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3240 let project = cx.update(|cx| {
3241 Project::local(
3242 client,
3243 user_store,
3244 Arc::new(languages),
3245 Arc::new(RealFs),
3246 cx,
3247 )
3248 });
3249
3250 let (tree, _) = project
3251 .update(&mut cx, |project, cx| {
3252 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3253 })
3254 .await
3255 .unwrap();
3256 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3257 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3258 .await;
3259
3260 let buffer = project
3261 .update(&mut cx, |project, cx| {
3262 project.open_buffer(
3263 ProjectPath {
3264 worktree_id,
3265 path: Path::new("").into(),
3266 },
3267 cx,
3268 )
3269 })
3270 .await
3271 .unwrap();
3272
3273 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3274 let params = params.text_document_position_params;
3275 assert_eq!(
3276 params.text_document.uri.to_file_path().unwrap(),
3277 dir_path.join("b.rs")
3278 );
3279 assert_eq!(params.position, lsp::Position::new(0, 22));
3280
3281 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3282 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3283 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3284 )))
3285 });
3286
3287 let mut definitions = project
3288 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3289 .await
3290 .unwrap();
3291
3292 assert_eq!(definitions.len(), 1);
3293 let definition = definitions.pop().unwrap();
3294 cx.update(|cx| {
3295 let target_buffer = definition.target_buffer.read(cx);
3296 assert_eq!(
3297 target_buffer
3298 .file()
3299 .unwrap()
3300 .as_local()
3301 .unwrap()
3302 .abs_path(cx),
3303 dir.path().join("a.rs")
3304 );
3305 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3306 assert_eq!(
3307 list_worktrees(&project, cx),
3308 [
3309 (dir.path().join("b.rs"), false),
3310 (dir.path().join("a.rs"), true)
3311 ]
3312 );
3313
3314 drop(definition);
3315 });
3316 cx.read(|cx| {
3317 assert_eq!(
3318 list_worktrees(&project, cx),
3319 [(dir.path().join("b.rs"), false)]
3320 );
3321 });
3322
3323 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3324 project
3325 .read(cx)
3326 .worktrees(cx)
3327 .map(|worktree| {
3328 let worktree = worktree.read(cx);
3329 (
3330 worktree.as_local().unwrap().abs_path().to_path_buf(),
3331 worktree.is_weak(),
3332 )
3333 })
3334 .collect::<Vec<_>>()
3335 }
3336 }
3337
3338 #[gpui::test]
3339 async fn test_save_file(mut cx: gpui::TestAppContext) {
3340 let fs = Arc::new(FakeFs::new(cx.background()));
3341 fs.insert_tree(
3342 "/dir",
3343 json!({
3344 "file1": "the old contents",
3345 }),
3346 )
3347 .await;
3348
3349 let project = Project::test(fs.clone(), &mut cx);
3350 let worktree_id = project
3351 .update(&mut cx, |p, cx| {
3352 p.find_or_create_local_worktree("/dir", false, cx)
3353 })
3354 .await
3355 .unwrap()
3356 .0
3357 .read_with(&cx, |tree, _| tree.id());
3358
3359 let buffer = project
3360 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3361 .await
3362 .unwrap();
3363 buffer
3364 .update(&mut cx, |buffer, cx| {
3365 assert_eq!(buffer.text(), "the old contents");
3366 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3367 buffer.save(cx)
3368 })
3369 .await
3370 .unwrap();
3371
3372 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3373 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3374 }
3375
3376 #[gpui::test]
3377 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3378 let fs = Arc::new(FakeFs::new(cx.background()));
3379 fs.insert_tree(
3380 "/dir",
3381 json!({
3382 "file1": "the old contents",
3383 }),
3384 )
3385 .await;
3386
3387 let project = Project::test(fs.clone(), &mut cx);
3388 let worktree_id = project
3389 .update(&mut cx, |p, cx| {
3390 p.find_or_create_local_worktree("/dir/file1", false, cx)
3391 })
3392 .await
3393 .unwrap()
3394 .0
3395 .read_with(&cx, |tree, _| tree.id());
3396
3397 let buffer = project
3398 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3399 .await
3400 .unwrap();
3401 buffer
3402 .update(&mut cx, |buffer, cx| {
3403 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3404 buffer.save(cx)
3405 })
3406 .await
3407 .unwrap();
3408
3409 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3410 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3411 }
3412
3413 #[gpui::test(retries = 5)]
3414 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3415 let dir = temp_tree(json!({
3416 "a": {
3417 "file1": "",
3418 "file2": "",
3419 "file3": "",
3420 },
3421 "b": {
3422 "c": {
3423 "file4": "",
3424 "file5": "",
3425 }
3426 }
3427 }));
3428
3429 let project = Project::test(Arc::new(RealFs), &mut cx);
3430 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3431
3432 let (tree, _) = project
3433 .update(&mut cx, |p, cx| {
3434 p.find_or_create_local_worktree(dir.path(), false, cx)
3435 })
3436 .await
3437 .unwrap();
3438 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3439
3440 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3441 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3442 async move { buffer.await.unwrap() }
3443 };
3444 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3445 tree.read_with(cx, |tree, _| {
3446 tree.entry_for_path(path)
3447 .expect(&format!("no entry for path {}", path))
3448 .id
3449 })
3450 };
3451
3452 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3453 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3454 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3455 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3456
3457 let file2_id = id_for_path("a/file2", &cx);
3458 let file3_id = id_for_path("a/file3", &cx);
3459 let file4_id = id_for_path("b/c/file4", &cx);
3460
3461 // Wait for the initial scan.
3462 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3463 .await;
3464
3465 // Create a remote copy of this worktree.
3466 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3467 let (remote, load_task) = cx.update(|cx| {
3468 Worktree::remote(
3469 1,
3470 1,
3471 initial_snapshot.to_proto(&Default::default(), Default::default()),
3472 rpc.clone(),
3473 cx,
3474 )
3475 });
3476 load_task.await;
3477
3478 cx.read(|cx| {
3479 assert!(!buffer2.read(cx).is_dirty());
3480 assert!(!buffer3.read(cx).is_dirty());
3481 assert!(!buffer4.read(cx).is_dirty());
3482 assert!(!buffer5.read(cx).is_dirty());
3483 });
3484
3485 // Rename and delete files and directories.
3486 tree.flush_fs_events(&cx).await;
3487 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3488 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3489 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3490 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3491 tree.flush_fs_events(&cx).await;
3492
3493 let expected_paths = vec![
3494 "a",
3495 "a/file1",
3496 "a/file2.new",
3497 "b",
3498 "d",
3499 "d/file3",
3500 "d/file4",
3501 ];
3502
3503 cx.read(|app| {
3504 assert_eq!(
3505 tree.read(app)
3506 .paths()
3507 .map(|p| p.to_str().unwrap())
3508 .collect::<Vec<_>>(),
3509 expected_paths
3510 );
3511
3512 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3513 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3514 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3515
3516 assert_eq!(
3517 buffer2.read(app).file().unwrap().path().as_ref(),
3518 Path::new("a/file2.new")
3519 );
3520 assert_eq!(
3521 buffer3.read(app).file().unwrap().path().as_ref(),
3522 Path::new("d/file3")
3523 );
3524 assert_eq!(
3525 buffer4.read(app).file().unwrap().path().as_ref(),
3526 Path::new("d/file4")
3527 );
3528 assert_eq!(
3529 buffer5.read(app).file().unwrap().path().as_ref(),
3530 Path::new("b/c/file5")
3531 );
3532
3533 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3534 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3535 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3536 assert!(buffer5.read(app).file().unwrap().is_deleted());
3537 });
3538
3539 // Update the remote worktree. Check that it becomes consistent with the
3540 // local worktree.
3541 remote.update(&mut cx, |remote, cx| {
3542 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3543 &initial_snapshot,
3544 1,
3545 1,
3546 0,
3547 true,
3548 );
3549 remote
3550 .as_remote_mut()
3551 .unwrap()
3552 .snapshot
3553 .apply_remote_update(update_message)
3554 .unwrap();
3555
3556 assert_eq!(
3557 remote
3558 .paths()
3559 .map(|p| p.to_str().unwrap())
3560 .collect::<Vec<_>>(),
3561 expected_paths
3562 );
3563 });
3564 }
3565
3566 #[gpui::test]
3567 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3568 let fs = Arc::new(FakeFs::new(cx.background()));
3569 fs.insert_tree(
3570 "/the-dir",
3571 json!({
3572 "a.txt": "a-contents",
3573 "b.txt": "b-contents",
3574 }),
3575 )
3576 .await;
3577
3578 let project = Project::test(fs.clone(), &mut cx);
3579 let worktree_id = project
3580 .update(&mut cx, |p, cx| {
3581 p.find_or_create_local_worktree("/the-dir", false, cx)
3582 })
3583 .await
3584 .unwrap()
3585 .0
3586 .read_with(&cx, |tree, _| tree.id());
3587
3588 // Spawn multiple tasks to open paths, repeating some paths.
3589 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3590 (
3591 p.open_buffer((worktree_id, "a.txt"), cx),
3592 p.open_buffer((worktree_id, "b.txt"), cx),
3593 p.open_buffer((worktree_id, "a.txt"), cx),
3594 )
3595 });
3596
3597 let buffer_a_1 = buffer_a_1.await.unwrap();
3598 let buffer_a_2 = buffer_a_2.await.unwrap();
3599 let buffer_b = buffer_b.await.unwrap();
3600 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3601 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3602
3603 // There is only one buffer per path.
3604 let buffer_a_id = buffer_a_1.id();
3605 assert_eq!(buffer_a_2.id(), buffer_a_id);
3606
3607 // Open the same path again while it is still open.
3608 drop(buffer_a_1);
3609 let buffer_a_3 = project
3610 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3611 .await
3612 .unwrap();
3613
3614 // There's still only one buffer per path.
3615 assert_eq!(buffer_a_3.id(), buffer_a_id);
3616 }
3617
3618 #[gpui::test]
3619 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3620 use std::fs;
3621
3622 let dir = temp_tree(json!({
3623 "file1": "abc",
3624 "file2": "def",
3625 "file3": "ghi",
3626 }));
3627
3628 let project = Project::test(Arc::new(RealFs), &mut cx);
3629 let (worktree, _) = project
3630 .update(&mut cx, |p, cx| {
3631 p.find_or_create_local_worktree(dir.path(), false, cx)
3632 })
3633 .await
3634 .unwrap();
3635 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3636
3637 worktree.flush_fs_events(&cx).await;
3638 worktree
3639 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3640 .await;
3641
3642 let buffer1 = project
3643 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3644 .await
3645 .unwrap();
3646 let events = Rc::new(RefCell::new(Vec::new()));
3647
3648 // initially, the buffer isn't dirty.
3649 buffer1.update(&mut cx, |buffer, cx| {
3650 cx.subscribe(&buffer1, {
3651 let events = events.clone();
3652 move |_, _, event, _| events.borrow_mut().push(event.clone())
3653 })
3654 .detach();
3655
3656 assert!(!buffer.is_dirty());
3657 assert!(events.borrow().is_empty());
3658
3659 buffer.edit(vec![1..2], "", cx);
3660 });
3661
3662 // after the first edit, the buffer is dirty, and emits a dirtied event.
3663 buffer1.update(&mut cx, |buffer, cx| {
3664 assert!(buffer.text() == "ac");
3665 assert!(buffer.is_dirty());
3666 assert_eq!(
3667 *events.borrow(),
3668 &[language::Event::Edited, language::Event::Dirtied]
3669 );
3670 events.borrow_mut().clear();
3671 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3672 });
3673
3674 // after saving, the buffer is not dirty, and emits a saved event.
3675 buffer1.update(&mut cx, |buffer, cx| {
3676 assert!(!buffer.is_dirty());
3677 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3678 events.borrow_mut().clear();
3679
3680 buffer.edit(vec![1..1], "B", cx);
3681 buffer.edit(vec![2..2], "D", cx);
3682 });
3683
3684 // after editing again, the buffer is dirty, and emits another dirty event.
3685 buffer1.update(&mut cx, |buffer, cx| {
3686 assert!(buffer.text() == "aBDc");
3687 assert!(buffer.is_dirty());
3688 assert_eq!(
3689 *events.borrow(),
3690 &[
3691 language::Event::Edited,
3692 language::Event::Dirtied,
3693 language::Event::Edited,
3694 ],
3695 );
3696 events.borrow_mut().clear();
3697
3698 // TODO - currently, after restoring the buffer to its
3699 // previously-saved state, the is still considered dirty.
3700 buffer.edit([1..3], "", cx);
3701 assert!(buffer.text() == "ac");
3702 assert!(buffer.is_dirty());
3703 });
3704
3705 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3706
3707 // When a file is deleted, the buffer is considered dirty.
3708 let events = Rc::new(RefCell::new(Vec::new()));
3709 let buffer2 = project
3710 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3711 .await
3712 .unwrap();
3713 buffer2.update(&mut cx, |_, cx| {
3714 cx.subscribe(&buffer2, {
3715 let events = events.clone();
3716 move |_, _, event, _| events.borrow_mut().push(event.clone())
3717 })
3718 .detach();
3719 });
3720
3721 fs::remove_file(dir.path().join("file2")).unwrap();
3722 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3723 assert_eq!(
3724 *events.borrow(),
3725 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3726 );
3727
3728 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3729 let events = Rc::new(RefCell::new(Vec::new()));
3730 let buffer3 = project
3731 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3732 .await
3733 .unwrap();
3734 buffer3.update(&mut cx, |_, cx| {
3735 cx.subscribe(&buffer3, {
3736 let events = events.clone();
3737 move |_, _, event, _| events.borrow_mut().push(event.clone())
3738 })
3739 .detach();
3740 });
3741
3742 worktree.flush_fs_events(&cx).await;
3743 buffer3.update(&mut cx, |buffer, cx| {
3744 buffer.edit(Some(0..0), "x", cx);
3745 });
3746 events.borrow_mut().clear();
3747 fs::remove_file(dir.path().join("file3")).unwrap();
3748 buffer3
3749 .condition(&cx, |_, _| !events.borrow().is_empty())
3750 .await;
3751 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3752 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3753 }
3754
3755 #[gpui::test]
3756 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3757 use std::fs;
3758
3759 let initial_contents = "aaa\nbbbbb\nc\n";
3760 let dir = temp_tree(json!({ "the-file": initial_contents }));
3761
3762 let project = Project::test(Arc::new(RealFs), &mut cx);
3763 let (worktree, _) = project
3764 .update(&mut cx, |p, cx| {
3765 p.find_or_create_local_worktree(dir.path(), false, cx)
3766 })
3767 .await
3768 .unwrap();
3769 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3770
3771 worktree
3772 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3773 .await;
3774
3775 let abs_path = dir.path().join("the-file");
3776 let buffer = project
3777 .update(&mut cx, |p, cx| {
3778 p.open_buffer((worktree_id, "the-file"), cx)
3779 })
3780 .await
3781 .unwrap();
3782
3783 // TODO
3784 // Add a cursor on each row.
3785 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3786 // assert!(!buffer.is_dirty());
3787 // buffer.add_selection_set(
3788 // &(0..3)
3789 // .map(|row| Selection {
3790 // id: row as usize,
3791 // start: Point::new(row, 1),
3792 // end: Point::new(row, 1),
3793 // reversed: false,
3794 // goal: SelectionGoal::None,
3795 // })
3796 // .collect::<Vec<_>>(),
3797 // cx,
3798 // )
3799 // });
3800
3801 // Change the file on disk, adding two new lines of text, and removing
3802 // one line.
3803 buffer.read_with(&cx, |buffer, _| {
3804 assert!(!buffer.is_dirty());
3805 assert!(!buffer.has_conflict());
3806 });
3807 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3808 fs::write(&abs_path, new_contents).unwrap();
3809
3810 // Because the buffer was not modified, it is reloaded from disk. Its
3811 // contents are edited according to the diff between the old and new
3812 // file contents.
3813 buffer
3814 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3815 .await;
3816
3817 buffer.update(&mut cx, |buffer, _| {
3818 assert_eq!(buffer.text(), new_contents);
3819 assert!(!buffer.is_dirty());
3820 assert!(!buffer.has_conflict());
3821
3822 // TODO
3823 // let cursor_positions = buffer
3824 // .selection_set(selection_set_id)
3825 // .unwrap()
3826 // .selections::<Point>(&*buffer)
3827 // .map(|selection| {
3828 // assert_eq!(selection.start, selection.end);
3829 // selection.start
3830 // })
3831 // .collect::<Vec<_>>();
3832 // assert_eq!(
3833 // cursor_positions,
3834 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3835 // );
3836 });
3837
3838 // Modify the buffer
3839 buffer.update(&mut cx, |buffer, cx| {
3840 buffer.edit(vec![0..0], " ", cx);
3841 assert!(buffer.is_dirty());
3842 assert!(!buffer.has_conflict());
3843 });
3844
3845 // Change the file on disk again, adding blank lines to the beginning.
3846 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3847
3848 // Because the buffer is modified, it doesn't reload from disk, but is
3849 // marked as having a conflict.
3850 buffer
3851 .condition(&cx, |buffer, _| buffer.has_conflict())
3852 .await;
3853 }
3854
3855 #[gpui::test]
3856 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3857 let fs = Arc::new(FakeFs::new(cx.background()));
3858 fs.insert_tree(
3859 "/the-dir",
3860 json!({
3861 "a.rs": "
3862 fn foo(mut v: Vec<usize>) {
3863 for x in &v {
3864 v.push(1);
3865 }
3866 }
3867 "
3868 .unindent(),
3869 }),
3870 )
3871 .await;
3872
3873 let project = Project::test(fs.clone(), &mut cx);
3874 let (worktree, _) = project
3875 .update(&mut cx, |p, cx| {
3876 p.find_or_create_local_worktree("/the-dir", false, cx)
3877 })
3878 .await
3879 .unwrap();
3880 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3881
3882 let buffer = project
3883 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3884 .await
3885 .unwrap();
3886
3887 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3888 let message = lsp::PublishDiagnosticsParams {
3889 uri: buffer_uri.clone(),
3890 diagnostics: vec![
3891 lsp::Diagnostic {
3892 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3893 severity: Some(DiagnosticSeverity::WARNING),
3894 message: "error 1".to_string(),
3895 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3896 location: lsp::Location {
3897 uri: buffer_uri.clone(),
3898 range: lsp::Range::new(
3899 lsp::Position::new(1, 8),
3900 lsp::Position::new(1, 9),
3901 ),
3902 },
3903 message: "error 1 hint 1".to_string(),
3904 }]),
3905 ..Default::default()
3906 },
3907 lsp::Diagnostic {
3908 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3909 severity: Some(DiagnosticSeverity::HINT),
3910 message: "error 1 hint 1".to_string(),
3911 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3912 location: lsp::Location {
3913 uri: buffer_uri.clone(),
3914 range: lsp::Range::new(
3915 lsp::Position::new(1, 8),
3916 lsp::Position::new(1, 9),
3917 ),
3918 },
3919 message: "original diagnostic".to_string(),
3920 }]),
3921 ..Default::default()
3922 },
3923 lsp::Diagnostic {
3924 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3925 severity: Some(DiagnosticSeverity::ERROR),
3926 message: "error 2".to_string(),
3927 related_information: Some(vec![
3928 lsp::DiagnosticRelatedInformation {
3929 location: lsp::Location {
3930 uri: buffer_uri.clone(),
3931 range: lsp::Range::new(
3932 lsp::Position::new(1, 13),
3933 lsp::Position::new(1, 15),
3934 ),
3935 },
3936 message: "error 2 hint 1".to_string(),
3937 },
3938 lsp::DiagnosticRelatedInformation {
3939 location: lsp::Location {
3940 uri: buffer_uri.clone(),
3941 range: lsp::Range::new(
3942 lsp::Position::new(1, 13),
3943 lsp::Position::new(1, 15),
3944 ),
3945 },
3946 message: "error 2 hint 2".to_string(),
3947 },
3948 ]),
3949 ..Default::default()
3950 },
3951 lsp::Diagnostic {
3952 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3953 severity: Some(DiagnosticSeverity::HINT),
3954 message: "error 2 hint 1".to_string(),
3955 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3956 location: lsp::Location {
3957 uri: buffer_uri.clone(),
3958 range: lsp::Range::new(
3959 lsp::Position::new(2, 8),
3960 lsp::Position::new(2, 17),
3961 ),
3962 },
3963 message: "original diagnostic".to_string(),
3964 }]),
3965 ..Default::default()
3966 },
3967 lsp::Diagnostic {
3968 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3969 severity: Some(DiagnosticSeverity::HINT),
3970 message: "error 2 hint 2".to_string(),
3971 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3972 location: lsp::Location {
3973 uri: buffer_uri.clone(),
3974 range: lsp::Range::new(
3975 lsp::Position::new(2, 8),
3976 lsp::Position::new(2, 17),
3977 ),
3978 },
3979 message: "original diagnostic".to_string(),
3980 }]),
3981 ..Default::default()
3982 },
3983 ],
3984 version: None,
3985 };
3986
3987 project
3988 .update(&mut cx, |p, cx| {
3989 p.update_diagnostics(message, &Default::default(), cx)
3990 })
3991 .unwrap();
3992 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3993
3994 assert_eq!(
3995 buffer
3996 .diagnostics_in_range::<_, Point>(0..buffer.len())
3997 .collect::<Vec<_>>(),
3998 &[
3999 DiagnosticEntry {
4000 range: Point::new(1, 8)..Point::new(1, 9),
4001 diagnostic: Diagnostic {
4002 severity: DiagnosticSeverity::WARNING,
4003 message: "error 1".to_string(),
4004 group_id: 0,
4005 is_primary: true,
4006 ..Default::default()
4007 }
4008 },
4009 DiagnosticEntry {
4010 range: Point::new(1, 8)..Point::new(1, 9),
4011 diagnostic: Diagnostic {
4012 severity: DiagnosticSeverity::HINT,
4013 message: "error 1 hint 1".to_string(),
4014 group_id: 0,
4015 is_primary: false,
4016 ..Default::default()
4017 }
4018 },
4019 DiagnosticEntry {
4020 range: Point::new(1, 13)..Point::new(1, 15),
4021 diagnostic: Diagnostic {
4022 severity: DiagnosticSeverity::HINT,
4023 message: "error 2 hint 1".to_string(),
4024 group_id: 1,
4025 is_primary: false,
4026 ..Default::default()
4027 }
4028 },
4029 DiagnosticEntry {
4030 range: Point::new(1, 13)..Point::new(1, 15),
4031 diagnostic: Diagnostic {
4032 severity: DiagnosticSeverity::HINT,
4033 message: "error 2 hint 2".to_string(),
4034 group_id: 1,
4035 is_primary: false,
4036 ..Default::default()
4037 }
4038 },
4039 DiagnosticEntry {
4040 range: Point::new(2, 8)..Point::new(2, 17),
4041 diagnostic: Diagnostic {
4042 severity: DiagnosticSeverity::ERROR,
4043 message: "error 2".to_string(),
4044 group_id: 1,
4045 is_primary: true,
4046 ..Default::default()
4047 }
4048 }
4049 ]
4050 );
4051
4052 assert_eq!(
4053 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4054 &[
4055 DiagnosticEntry {
4056 range: Point::new(1, 8)..Point::new(1, 9),
4057 diagnostic: Diagnostic {
4058 severity: DiagnosticSeverity::WARNING,
4059 message: "error 1".to_string(),
4060 group_id: 0,
4061 is_primary: true,
4062 ..Default::default()
4063 }
4064 },
4065 DiagnosticEntry {
4066 range: Point::new(1, 8)..Point::new(1, 9),
4067 diagnostic: Diagnostic {
4068 severity: DiagnosticSeverity::HINT,
4069 message: "error 1 hint 1".to_string(),
4070 group_id: 0,
4071 is_primary: false,
4072 ..Default::default()
4073 }
4074 },
4075 ]
4076 );
4077 assert_eq!(
4078 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4079 &[
4080 DiagnosticEntry {
4081 range: Point::new(1, 13)..Point::new(1, 15),
4082 diagnostic: Diagnostic {
4083 severity: DiagnosticSeverity::HINT,
4084 message: "error 2 hint 1".to_string(),
4085 group_id: 1,
4086 is_primary: false,
4087 ..Default::default()
4088 }
4089 },
4090 DiagnosticEntry {
4091 range: Point::new(1, 13)..Point::new(1, 15),
4092 diagnostic: Diagnostic {
4093 severity: DiagnosticSeverity::HINT,
4094 message: "error 2 hint 2".to_string(),
4095 group_id: 1,
4096 is_primary: false,
4097 ..Default::default()
4098 }
4099 },
4100 DiagnosticEntry {
4101 range: Point::new(2, 8)..Point::new(2, 17),
4102 diagnostic: Diagnostic {
4103 severity: DiagnosticSeverity::ERROR,
4104 message: "error 2".to_string(),
4105 group_id: 1,
4106 is_primary: true,
4107 ..Default::default()
4108 }
4109 }
4110 ]
4111 );
4112 }
4113}