1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Operations(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn local(
165 client: Arc<Client>,
166 user_store: ModelHandle<UserStore>,
167 languages: Arc<LanguageRegistry>,
168 fs: Arc<dyn Fs>,
169 cx: &mut MutableAppContext,
170 ) -> ModelHandle<Self> {
171 cx.add_model(|cx: &mut ModelContext<Self>| {
172 let (remote_id_tx, remote_id_rx) = watch::channel();
173 let _maintain_remote_id_task = cx.spawn_weak({
174 let rpc = client.clone();
175 move |this, mut cx| {
176 async move {
177 let mut status = rpc.status();
178 while let Some(status) = status.recv().await {
179 if let Some(this) = this.upgrade(&cx) {
180 let remote_id = if let client::Status::Connected { .. } = status {
181 let response = rpc.request(proto::RegisterProject {}).await?;
182 Some(response.project_id)
183 } else {
184 None
185 };
186
187 if let Some(project_id) = remote_id {
188 let mut registrations = Vec::new();
189 this.update(&mut cx, |this, cx| {
190 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
191 registrations.push(worktree.update(
192 cx,
193 |worktree, cx| {
194 let worktree = worktree.as_local_mut().unwrap();
195 worktree.register(project_id, cx)
196 },
197 ));
198 }
199 });
200 for registration in registrations {
201 registration.await?;
202 }
203 }
204 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
205 }
206 }
207 Ok(())
208 }
209 .log_err()
210 }
211 });
212
213 Self {
214 worktrees: Default::default(),
215 collaborators: Default::default(),
216 open_buffers: Default::default(),
217 loading_buffers: Default::default(),
218 shared_buffers: Default::default(),
219 client_state: ProjectClientState::Local {
220 is_shared: false,
221 remote_id_tx,
222 remote_id_rx,
223 _maintain_remote_id_task,
224 },
225 opened_buffer: broadcast::channel(1).0,
226 subscriptions: Vec::new(),
227 active_entry: None,
228 languages,
229 client,
230 user_store,
231 fs,
232 language_servers_with_diagnostics_running: 0,
233 language_servers: Default::default(),
234 }
235 })
236 }
237
238 pub async fn remote(
239 remote_id: u64,
240 client: Arc<Client>,
241 user_store: ModelHandle<UserStore>,
242 languages: Arc<LanguageRegistry>,
243 fs: Arc<dyn Fs>,
244 cx: &mut AsyncAppContext,
245 ) -> Result<ModelHandle<Self>> {
246 client.authenticate_and_connect(&cx).await?;
247
248 let response = client
249 .request(proto::JoinProject {
250 project_id: remote_id,
251 })
252 .await?;
253
254 let replica_id = response.replica_id as ReplicaId;
255
256 let mut worktrees = Vec::new();
257 for worktree in response.worktrees {
258 let (worktree, load_task) = cx
259 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
260 worktrees.push(worktree);
261 load_task.detach();
262 }
263
264 let user_ids = response
265 .collaborators
266 .iter()
267 .map(|peer| peer.user_id)
268 .collect();
269 user_store
270 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
271 .await?;
272 let mut collaborators = HashMap::default();
273 for message in response.collaborators {
274 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
275 collaborators.insert(collaborator.peer_id, collaborator);
276 }
277
278 Ok(cx.add_model(|cx| {
279 let mut this = Self {
280 worktrees: Vec::new(),
281 open_buffers: Default::default(),
282 loading_buffers: Default::default(),
283 opened_buffer: broadcast::channel(1).0,
284 shared_buffers: Default::default(),
285 active_entry: None,
286 collaborators,
287 languages,
288 user_store,
289 fs,
290 subscriptions: vec![
291 client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
292 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
293 client.add_entity_message_handler(
294 remote_id,
295 cx,
296 Self::handle_remove_collaborator,
297 ),
298 client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
299 client.add_entity_message_handler(
300 remote_id,
301 cx,
302 Self::handle_unregister_worktree,
303 ),
304 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
305 client.add_entity_message_handler(
306 remote_id,
307 cx,
308 Self::handle_update_diagnostic_summary,
309 ),
310 client.add_entity_message_handler(
311 remote_id,
312 cx,
313 Self::handle_disk_based_diagnostics_updating,
314 ),
315 client.add_entity_message_handler(
316 remote_id,
317 cx,
318 Self::handle_disk_based_diagnostics_updated,
319 ),
320 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
321 client.add_entity_message_handler(
322 remote_id,
323 cx,
324 Self::handle_update_buffer_file,
325 ),
326 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
327 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
328 ],
329 client,
330 client_state: ProjectClientState::Remote {
331 sharing_has_stopped: false,
332 remote_id,
333 replica_id,
334 },
335 language_servers_with_diagnostics_running: 0,
336 language_servers: Default::default(),
337 };
338 for worktree in worktrees {
339 this.add_worktree(&worktree, cx);
340 }
341 this
342 }))
343 }
344
345 #[cfg(any(test, feature = "test-support"))]
346 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
347 let languages = Arc::new(LanguageRegistry::new());
348 let http_client = client::test::FakeHttpClient::with_404_response();
349 let client = client::Client::new(http_client.clone());
350 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
351 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
352 }
353
354 pub fn fs(&self) -> &Arc<dyn Fs> {
355 &self.fs
356 }
357
358 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
359 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
360 *remote_id_tx.borrow_mut() = remote_id;
361 }
362
363 self.subscriptions.clear();
364 if let Some(remote_id) = remote_id {
365 let client = &self.client;
366 self.subscriptions.extend([
367 client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
368 client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
369 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
370 client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
371 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
372 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
373 client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
374 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
375 client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
376 client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
377 client.add_entity_request_handler(
378 remote_id,
379 cx,
380 Self::handle_apply_additional_edits_for_completion,
381 ),
382 client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
383 client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
384 client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
385 ]);
386 }
387 }
388
389 pub fn remote_id(&self) -> Option<u64> {
390 match &self.client_state {
391 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
392 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
393 }
394 }
395
396 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
397 let mut id = None;
398 let mut watch = None;
399 match &self.client_state {
400 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
401 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
402 }
403
404 async move {
405 if let Some(id) = id {
406 return id;
407 }
408 let mut watch = watch.unwrap();
409 loop {
410 let id = *watch.borrow();
411 if let Some(id) = id {
412 return id;
413 }
414 watch.recv().await;
415 }
416 }
417 }
418
419 pub fn replica_id(&self) -> ReplicaId {
420 match &self.client_state {
421 ProjectClientState::Local { .. } => 0,
422 ProjectClientState::Remote { replica_id, .. } => *replica_id,
423 }
424 }
425
426 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
427 &self.collaborators
428 }
429
430 pub fn worktrees<'a>(
431 &'a self,
432 cx: &'a AppContext,
433 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
434 self.worktrees
435 .iter()
436 .filter_map(move |worktree| worktree.upgrade(cx))
437 }
438
439 pub fn worktree_for_id(
440 &self,
441 id: WorktreeId,
442 cx: &AppContext,
443 ) -> Option<ModelHandle<Worktree>> {
444 self.worktrees(cx)
445 .find(|worktree| worktree.read(cx).id() == id)
446 }
447
448 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
449 let rpc = self.client.clone();
450 cx.spawn(|this, mut cx| async move {
451 let project_id = this.update(&mut cx, |this, _| {
452 if let ProjectClientState::Local {
453 is_shared,
454 remote_id_rx,
455 ..
456 } = &mut this.client_state
457 {
458 *is_shared = true;
459 remote_id_rx
460 .borrow()
461 .ok_or_else(|| anyhow!("no project id"))
462 } else {
463 Err(anyhow!("can't share a remote project"))
464 }
465 })?;
466
467 rpc.request(proto::ShareProject { project_id }).await?;
468 let mut tasks = Vec::new();
469 this.update(&mut cx, |this, cx| {
470 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
471 worktree.update(cx, |worktree, cx| {
472 let worktree = worktree.as_local_mut().unwrap();
473 tasks.push(worktree.share(project_id, cx));
474 });
475 }
476 });
477 for task in tasks {
478 task.await?;
479 }
480 this.update(&mut cx, |_, cx| cx.notify());
481 Ok(())
482 })
483 }
484
485 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
486 let rpc = self.client.clone();
487 cx.spawn(|this, mut cx| async move {
488 let project_id = this.update(&mut cx, |this, _| {
489 if let ProjectClientState::Local {
490 is_shared,
491 remote_id_rx,
492 ..
493 } = &mut this.client_state
494 {
495 *is_shared = false;
496 remote_id_rx
497 .borrow()
498 .ok_or_else(|| anyhow!("no project id"))
499 } else {
500 Err(anyhow!("can't share a remote project"))
501 }
502 })?;
503
504 rpc.send(proto::UnshareProject { project_id })?;
505 this.update(&mut cx, |this, cx| {
506 this.collaborators.clear();
507 this.shared_buffers.clear();
508 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
509 worktree.update(cx, |worktree, _| {
510 worktree.as_local_mut().unwrap().unshare();
511 });
512 }
513 cx.notify()
514 });
515 Ok(())
516 })
517 }
518
519 pub fn is_read_only(&self) -> bool {
520 match &self.client_state {
521 ProjectClientState::Local { .. } => false,
522 ProjectClientState::Remote {
523 sharing_has_stopped,
524 ..
525 } => *sharing_has_stopped,
526 }
527 }
528
529 pub fn is_local(&self) -> bool {
530 match &self.client_state {
531 ProjectClientState::Local { .. } => true,
532 ProjectClientState::Remote { .. } => false,
533 }
534 }
535
536 pub fn open_buffer(
537 &mut self,
538 path: impl Into<ProjectPath>,
539 cx: &mut ModelContext<Self>,
540 ) -> Task<Result<ModelHandle<Buffer>>> {
541 let project_path = path.into();
542 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
543 worktree
544 } else {
545 return Task::ready(Err(anyhow!("no such worktree")));
546 };
547
548 // If there is already a buffer for the given path, then return it.
549 let existing_buffer = self.get_open_buffer(&project_path, cx);
550 if let Some(existing_buffer) = existing_buffer {
551 return Task::ready(Ok(existing_buffer));
552 }
553
554 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
555 // If the given path is already being loaded, then wait for that existing
556 // task to complete and return the same buffer.
557 hash_map::Entry::Occupied(e) => e.get().clone(),
558
559 // Otherwise, record the fact that this path is now being loaded.
560 hash_map::Entry::Vacant(entry) => {
561 let (mut tx, rx) = postage::watch::channel();
562 entry.insert(rx.clone());
563
564 let load_buffer = if worktree.read(cx).is_local() {
565 self.open_local_buffer(&project_path.path, &worktree, cx)
566 } else {
567 self.open_remote_buffer(&project_path.path, &worktree, cx)
568 };
569
570 cx.spawn(move |this, mut cx| async move {
571 let load_result = load_buffer.await;
572 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
573 // Record the fact that the buffer is no longer loading.
574 this.loading_buffers.remove(&project_path);
575 let buffer = load_result.map_err(Arc::new)?;
576 Ok(buffer)
577 }));
578 })
579 .detach();
580 rx
581 }
582 };
583
584 cx.foreground().spawn(async move {
585 loop {
586 if let Some(result) = loading_watch.borrow().as_ref() {
587 match result {
588 Ok(buffer) => return Ok(buffer.clone()),
589 Err(error) => return Err(anyhow!("{}", error)),
590 }
591 }
592 loading_watch.recv().await;
593 }
594 })
595 }
596
597 fn open_local_buffer(
598 &mut self,
599 path: &Arc<Path>,
600 worktree: &ModelHandle<Worktree>,
601 cx: &mut ModelContext<Self>,
602 ) -> Task<Result<ModelHandle<Buffer>>> {
603 let load_buffer = worktree.update(cx, |worktree, cx| {
604 let worktree = worktree.as_local_mut().unwrap();
605 worktree.load_buffer(path, cx)
606 });
607 let worktree = worktree.downgrade();
608 cx.spawn(|this, mut cx| async move {
609 let buffer = load_buffer.await?;
610 let worktree = worktree
611 .upgrade(&cx)
612 .ok_or_else(|| anyhow!("worktree was removed"))?;
613 this.update(&mut cx, |this, cx| {
614 this.register_buffer(&buffer, Some(&worktree), cx)
615 })?;
616 Ok(buffer)
617 })
618 }
619
620 fn open_remote_buffer(
621 &mut self,
622 path: &Arc<Path>,
623 worktree: &ModelHandle<Worktree>,
624 cx: &mut ModelContext<Self>,
625 ) -> Task<Result<ModelHandle<Buffer>>> {
626 let rpc = self.client.clone();
627 let project_id = self.remote_id().unwrap();
628 let remote_worktree_id = worktree.read(cx).id();
629 let path = path.clone();
630 let path_string = path.to_string_lossy().to_string();
631 cx.spawn(|this, mut cx| async move {
632 let response = rpc
633 .request(proto::OpenBuffer {
634 project_id,
635 worktree_id: remote_worktree_id.to_proto(),
636 path: path_string,
637 })
638 .await?;
639 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
640 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
641 .await
642 })
643 }
644
645 fn open_local_buffer_from_lsp_path(
646 &mut self,
647 abs_path: lsp::Url,
648 lang_name: String,
649 lang_server: Arc<LanguageServer>,
650 cx: &mut ModelContext<Self>,
651 ) -> Task<Result<ModelHandle<Buffer>>> {
652 cx.spawn(|this, mut cx| async move {
653 let abs_path = abs_path
654 .to_file_path()
655 .map_err(|_| anyhow!("can't convert URI to path"))?;
656 let (worktree, relative_path) = if let Some(result) =
657 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
658 {
659 result
660 } else {
661 let worktree = this
662 .update(&mut cx, |this, cx| {
663 this.create_local_worktree(&abs_path, true, cx)
664 })
665 .await?;
666 this.update(&mut cx, |this, cx| {
667 this.language_servers
668 .insert((worktree.read(cx).id(), lang_name), lang_server);
669 });
670 (worktree, PathBuf::new())
671 };
672
673 let project_path = ProjectPath {
674 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
675 path: relative_path.into(),
676 };
677 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
678 .await
679 })
680 }
681
682 pub fn save_buffer_as(
683 &self,
684 buffer: ModelHandle<Buffer>,
685 abs_path: PathBuf,
686 cx: &mut ModelContext<Project>,
687 ) -> Task<Result<()>> {
688 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
689 cx.spawn(|this, mut cx| async move {
690 let (worktree, path) = worktree_task.await?;
691 worktree
692 .update(&mut cx, |worktree, cx| {
693 worktree
694 .as_local_mut()
695 .unwrap()
696 .save_buffer_as(buffer.clone(), path, cx)
697 })
698 .await?;
699 this.update(&mut cx, |this, cx| {
700 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
701 });
702 Ok(())
703 })
704 }
705
706 #[cfg(any(test, feature = "test-support"))]
707 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
708 let path = path.into();
709 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
710 self.open_buffers.iter().any(|(_, buffer)| {
711 if let Some(buffer) = buffer.upgrade(cx) {
712 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
713 if file.worktree == worktree && file.path() == &path.path {
714 return true;
715 }
716 }
717 }
718 false
719 })
720 } else {
721 false
722 }
723 }
724
725 fn get_open_buffer(
726 &mut self,
727 path: &ProjectPath,
728 cx: &mut ModelContext<Self>,
729 ) -> Option<ModelHandle<Buffer>> {
730 let mut result = None;
731 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
732 self.open_buffers.retain(|_, buffer| {
733 if let Some(buffer) = buffer.upgrade(cx) {
734 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
735 if file.worktree == worktree && file.path() == &path.path {
736 result = Some(buffer);
737 }
738 }
739 true
740 } else {
741 false
742 }
743 });
744 result
745 }
746
747 fn register_buffer(
748 &mut self,
749 buffer: &ModelHandle<Buffer>,
750 worktree: Option<&ModelHandle<Worktree>>,
751 cx: &mut ModelContext<Self>,
752 ) -> Result<()> {
753 match self.open_buffers.insert(
754 buffer.read(cx).remote_id() as usize,
755 OpenBuffer::Loaded(buffer.downgrade()),
756 ) {
757 None => {}
758 Some(OpenBuffer::Operations(operations)) => {
759 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
760 }
761 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
762 }
763 self.assign_language_to_buffer(&buffer, worktree, cx);
764 Ok(())
765 }
766
767 fn assign_language_to_buffer(
768 &mut self,
769 buffer: &ModelHandle<Buffer>,
770 worktree: Option<&ModelHandle<Worktree>>,
771 cx: &mut ModelContext<Self>,
772 ) -> Option<()> {
773 let (path, full_path) = {
774 let file = buffer.read(cx).file()?;
775 (file.path().clone(), file.full_path(cx))
776 };
777
778 // If the buffer has a language, set it and start/assign the language server
779 if let Some(language) = self.languages.select_language(&full_path) {
780 buffer.update(cx, |buffer, cx| {
781 buffer.set_language(Some(language.clone()), cx);
782 });
783
784 // For local worktrees, start a language server if needed.
785 // Also assign the language server and any previously stored diagnostics to the buffer.
786 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
787 let worktree_id = local_worktree.id();
788 let worktree_abs_path = local_worktree.abs_path().clone();
789
790 let language_server = match self
791 .language_servers
792 .entry((worktree_id, language.name().to_string()))
793 {
794 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
795 hash_map::Entry::Vacant(e) => Self::start_language_server(
796 self.client.clone(),
797 language.clone(),
798 &worktree_abs_path,
799 cx,
800 )
801 .map(|server| e.insert(server).clone()),
802 };
803
804 buffer.update(cx, |buffer, cx| {
805 buffer.set_language_server(language_server, cx);
806 });
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 rpc: Arc<Client>,
823 language: Arc<Language>,
824 worktree_path: &Path,
825 cx: &mut ModelContext<Self>,
826 ) -> Option<Arc<LanguageServer>> {
827 enum LspEvent {
828 DiagnosticsStart,
829 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
830 DiagnosticsFinish,
831 }
832
833 let language_server = language
834 .start_server(worktree_path, cx)
835 .log_err()
836 .flatten()?;
837 let disk_based_sources = language
838 .disk_based_diagnostic_sources()
839 .cloned()
840 .unwrap_or_default();
841 let disk_based_diagnostics_progress_token =
842 language.disk_based_diagnostics_progress_token().cloned();
843 let has_disk_based_diagnostic_progress_token =
844 disk_based_diagnostics_progress_token.is_some();
845 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
846
847 // Listen for `PublishDiagnostics` notifications.
848 language_server
849 .on_notification::<lsp::notification::PublishDiagnostics, _>({
850 let diagnostics_tx = diagnostics_tx.clone();
851 move |params| {
852 if !has_disk_based_diagnostic_progress_token {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
854 }
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
856 if !has_disk_based_diagnostic_progress_token {
857 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
858 }
859 }
860 })
861 .detach();
862
863 // Listen for `Progress` notifications. Send an event when the language server
864 // transitions between running jobs and not running any jobs.
865 let mut running_jobs_for_this_server: i32 = 0;
866 language_server
867 .on_notification::<lsp::notification::Progress, _>(move |params| {
868 let token = match params.token {
869 lsp::NumberOrString::Number(_) => None,
870 lsp::NumberOrString::String(token) => Some(token),
871 };
872
873 if token == disk_based_diagnostics_progress_token {
874 match params.value {
875 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
876 lsp::WorkDoneProgress::Begin(_) => {
877 running_jobs_for_this_server += 1;
878 if running_jobs_for_this_server == 1 {
879 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
880 }
881 }
882 lsp::WorkDoneProgress::End(_) => {
883 running_jobs_for_this_server -= 1;
884 if running_jobs_for_this_server == 0 {
885 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
886 }
887 }
888 _ => {}
889 },
890 }
891 }
892 })
893 .detach();
894
895 // Process all the LSP events.
896 cx.spawn_weak(|this, mut cx| async move {
897 while let Ok(message) = diagnostics_rx.recv().await {
898 let this = this.upgrade(&cx)?;
899 match message {
900 LspEvent::DiagnosticsStart => {
901 this.update(&mut cx, |this, cx| {
902 this.disk_based_diagnostics_started(cx);
903 if let Some(project_id) = this.remote_id() {
904 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
905 .log_err();
906 }
907 });
908 }
909 LspEvent::DiagnosticsUpdate(mut params) => {
910 language.process_diagnostics(&mut params);
911 this.update(&mut cx, |this, cx| {
912 this.update_diagnostics(params, &disk_based_sources, cx)
913 .log_err();
914 });
915 }
916 LspEvent::DiagnosticsFinish => {
917 this.update(&mut cx, |this, cx| {
918 this.disk_based_diagnostics_finished(cx);
919 if let Some(project_id) = this.remote_id() {
920 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
921 .log_err();
922 }
923 });
924 }
925 }
926 }
927 Some(())
928 })
929 .detach();
930
931 Some(language_server)
932 }
933
934 pub fn update_diagnostics(
935 &mut self,
936 params: lsp::PublishDiagnosticsParams,
937 disk_based_sources: &HashSet<String>,
938 cx: &mut ModelContext<Self>,
939 ) -> Result<()> {
940 let abs_path = params
941 .uri
942 .to_file_path()
943 .map_err(|_| anyhow!("URI is not a file"))?;
944 let mut next_group_id = 0;
945 let mut diagnostics = Vec::default();
946 let mut primary_diagnostic_group_ids = HashMap::default();
947 let mut sources_by_group_id = HashMap::default();
948 let mut supporting_diagnostic_severities = HashMap::default();
949 for diagnostic in ¶ms.diagnostics {
950 let source = diagnostic.source.as_ref();
951 let code = diagnostic.code.as_ref().map(|code| match code {
952 lsp::NumberOrString::Number(code) => code.to_string(),
953 lsp::NumberOrString::String(code) => code.clone(),
954 });
955 let range = range_from_lsp(diagnostic.range);
956 let is_supporting = diagnostic
957 .related_information
958 .as_ref()
959 .map_or(false, |infos| {
960 infos.iter().any(|info| {
961 primary_diagnostic_group_ids.contains_key(&(
962 source,
963 code.clone(),
964 range_from_lsp(info.location.range),
965 ))
966 })
967 });
968
969 if is_supporting {
970 if let Some(severity) = diagnostic.severity {
971 supporting_diagnostic_severities
972 .insert((source, code.clone(), range), severity);
973 }
974 } else {
975 let group_id = post_inc(&mut next_group_id);
976 let is_disk_based =
977 source.map_or(false, |source| disk_based_sources.contains(source));
978
979 sources_by_group_id.insert(group_id, source);
980 primary_diagnostic_group_ids
981 .insert((source, code.clone(), range.clone()), group_id);
982
983 diagnostics.push(DiagnosticEntry {
984 range,
985 diagnostic: Diagnostic {
986 code: code.clone(),
987 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
988 message: diagnostic.message.clone(),
989 group_id,
990 is_primary: true,
991 is_valid: true,
992 is_disk_based,
993 },
994 });
995 if let Some(infos) = &diagnostic.related_information {
996 for info in infos {
997 if info.location.uri == params.uri && !info.message.is_empty() {
998 let range = range_from_lsp(info.location.range);
999 diagnostics.push(DiagnosticEntry {
1000 range,
1001 diagnostic: Diagnostic {
1002 code: code.clone(),
1003 severity: DiagnosticSeverity::INFORMATION,
1004 message: info.message.clone(),
1005 group_id,
1006 is_primary: false,
1007 is_valid: true,
1008 is_disk_based,
1009 },
1010 });
1011 }
1012 }
1013 }
1014 }
1015 }
1016
1017 for entry in &mut diagnostics {
1018 let diagnostic = &mut entry.diagnostic;
1019 if !diagnostic.is_primary {
1020 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1021 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1022 source,
1023 diagnostic.code.clone(),
1024 entry.range.clone(),
1025 )) {
1026 diagnostic.severity = severity;
1027 }
1028 }
1029 }
1030
1031 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1032 Ok(())
1033 }
1034
1035 pub fn update_diagnostic_entries(
1036 &mut self,
1037 abs_path: PathBuf,
1038 version: Option<i32>,
1039 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1040 cx: &mut ModelContext<Project>,
1041 ) -> Result<(), anyhow::Error> {
1042 let (worktree, relative_path) = self
1043 .find_local_worktree(&abs_path, cx)
1044 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1045 let project_path = ProjectPath {
1046 worktree_id: worktree.read(cx).id(),
1047 path: relative_path.into(),
1048 };
1049
1050 for buffer in self.open_buffers.values() {
1051 if let Some(buffer) = buffer.upgrade(cx) {
1052 if buffer
1053 .read(cx)
1054 .file()
1055 .map_or(false, |file| *file.path() == project_path.path)
1056 {
1057 buffer.update(cx, |buffer, cx| {
1058 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1059 })?;
1060 break;
1061 }
1062 }
1063 }
1064 worktree.update(cx, |worktree, cx| {
1065 worktree
1066 .as_local_mut()
1067 .ok_or_else(|| anyhow!("not a local worktree"))?
1068 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1069 })?;
1070 cx.emit(Event::DiagnosticsUpdated(project_path));
1071 Ok(())
1072 }
1073
1074 pub fn format(
1075 &self,
1076 buffers: HashSet<ModelHandle<Buffer>>,
1077 push_to_history: bool,
1078 cx: &mut ModelContext<Project>,
1079 ) -> Task<Result<ProjectTransaction>> {
1080 let mut local_buffers = Vec::new();
1081 let mut remote_buffers = None;
1082 for buffer_handle in buffers {
1083 let buffer = buffer_handle.read(cx);
1084 let worktree;
1085 if let Some(file) = File::from_dyn(buffer.file()) {
1086 worktree = file.worktree.clone();
1087 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1088 let lang_server;
1089 if let Some(lang) = buffer.language() {
1090 if let Some(server) = self
1091 .language_servers
1092 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1093 {
1094 lang_server = server.clone();
1095 } else {
1096 return Task::ready(Ok(Default::default()));
1097 };
1098 } else {
1099 return Task::ready(Ok(Default::default()));
1100 }
1101
1102 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1103 } else {
1104 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1105 }
1106 } else {
1107 return Task::ready(Ok(Default::default()));
1108 }
1109 }
1110
1111 let remote_buffers = self.remote_id().zip(remote_buffers);
1112 let client = self.client.clone();
1113
1114 cx.spawn(|this, mut cx| async move {
1115 let mut project_transaction = ProjectTransaction::default();
1116
1117 if let Some((project_id, remote_buffers)) = remote_buffers {
1118 let response = client
1119 .request(proto::FormatBuffers {
1120 project_id,
1121 buffer_ids: remote_buffers
1122 .iter()
1123 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1124 .collect(),
1125 })
1126 .await?
1127 .transaction
1128 .ok_or_else(|| anyhow!("missing transaction"))?;
1129 project_transaction = this
1130 .update(&mut cx, |this, cx| {
1131 this.deserialize_project_transaction(response, push_to_history, cx)
1132 })
1133 .await?;
1134 }
1135
1136 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1137 let lsp_edits = lang_server
1138 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1139 text_document: lsp::TextDocumentIdentifier::new(
1140 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1141 ),
1142 options: Default::default(),
1143 work_done_progress_params: Default::default(),
1144 })
1145 .await?;
1146
1147 if let Some(lsp_edits) = lsp_edits {
1148 let edits = buffer
1149 .update(&mut cx, |buffer, cx| {
1150 buffer.edits_from_lsp(lsp_edits, None, cx)
1151 })
1152 .await?;
1153 buffer.update(&mut cx, |buffer, cx| {
1154 buffer.finalize_last_transaction();
1155 buffer.start_transaction();
1156 for (range, text) in edits {
1157 buffer.edit([range], text, cx);
1158 }
1159 if buffer.end_transaction(cx).is_some() {
1160 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1161 if !push_to_history {
1162 buffer.forget_transaction(transaction.id);
1163 }
1164 project_transaction.0.insert(cx.handle(), transaction);
1165 }
1166 });
1167 }
1168 }
1169
1170 Ok(project_transaction)
1171 })
1172 }
1173
1174 pub fn definition<T: ToPointUtf16>(
1175 &self,
1176 source_buffer_handle: &ModelHandle<Buffer>,
1177 position: T,
1178 cx: &mut ModelContext<Self>,
1179 ) -> Task<Result<Vec<Definition>>> {
1180 let source_buffer_handle = source_buffer_handle.clone();
1181 let source_buffer = source_buffer_handle.read(cx);
1182 let worktree;
1183 let buffer_abs_path;
1184 if let Some(file) = File::from_dyn(source_buffer.file()) {
1185 worktree = file.worktree.clone();
1186 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1187 } else {
1188 return Task::ready(Ok(Default::default()));
1189 };
1190
1191 let position = position.to_point_utf16(source_buffer);
1192
1193 if worktree.read(cx).as_local().is_some() {
1194 let buffer_abs_path = buffer_abs_path.unwrap();
1195 let lang_name;
1196 let lang_server;
1197 if let Some(lang) = source_buffer.language() {
1198 lang_name = lang.name().to_string();
1199 if let Some(server) = self
1200 .language_servers
1201 .get(&(worktree.read(cx).id(), lang_name.clone()))
1202 {
1203 lang_server = server.clone();
1204 } else {
1205 return Task::ready(Ok(Default::default()));
1206 };
1207 } else {
1208 return Task::ready(Ok(Default::default()));
1209 }
1210
1211 cx.spawn(|this, mut cx| async move {
1212 let response = lang_server
1213 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1214 text_document_position_params: lsp::TextDocumentPositionParams {
1215 text_document: lsp::TextDocumentIdentifier::new(
1216 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1217 ),
1218 position: lsp::Position::new(position.row, position.column),
1219 },
1220 work_done_progress_params: Default::default(),
1221 partial_result_params: Default::default(),
1222 })
1223 .await?;
1224
1225 let mut definitions = Vec::new();
1226 if let Some(response) = response {
1227 let mut unresolved_locations = Vec::new();
1228 match response {
1229 lsp::GotoDefinitionResponse::Scalar(loc) => {
1230 unresolved_locations.push((loc.uri, loc.range));
1231 }
1232 lsp::GotoDefinitionResponse::Array(locs) => {
1233 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1234 }
1235 lsp::GotoDefinitionResponse::Link(links) => {
1236 unresolved_locations.extend(
1237 links
1238 .into_iter()
1239 .map(|l| (l.target_uri, l.target_selection_range)),
1240 );
1241 }
1242 }
1243
1244 for (target_uri, target_range) in unresolved_locations {
1245 let target_buffer_handle = this
1246 .update(&mut cx, |this, cx| {
1247 this.open_local_buffer_from_lsp_path(
1248 target_uri,
1249 lang_name.clone(),
1250 lang_server.clone(),
1251 cx,
1252 )
1253 })
1254 .await?;
1255
1256 cx.read(|cx| {
1257 let target_buffer = target_buffer_handle.read(cx);
1258 let target_start = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1260 let target_end = target_buffer
1261 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1262 definitions.push(Definition {
1263 target_buffer: target_buffer_handle,
1264 target_range: target_buffer.anchor_after(target_start)
1265 ..target_buffer.anchor_before(target_end),
1266 });
1267 });
1268 }
1269 }
1270
1271 Ok(definitions)
1272 })
1273 } else if let Some(project_id) = self.remote_id() {
1274 let client = self.client.clone();
1275 let request = proto::GetDefinition {
1276 project_id,
1277 buffer_id: source_buffer.remote_id(),
1278 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1279 };
1280 cx.spawn(|this, mut cx| async move {
1281 let response = client.request(request).await?;
1282 let mut definitions = Vec::new();
1283 for definition in response.definitions {
1284 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1285 let target_buffer = this
1286 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1287 .await?;
1288 let target_start = definition
1289 .target_start
1290 .and_then(deserialize_anchor)
1291 .ok_or_else(|| anyhow!("missing target start"))?;
1292 let target_end = definition
1293 .target_end
1294 .and_then(deserialize_anchor)
1295 .ok_or_else(|| anyhow!("missing target end"))?;
1296 definitions.push(Definition {
1297 target_buffer,
1298 target_range: target_start..target_end,
1299 })
1300 }
1301
1302 Ok(definitions)
1303 })
1304 } else {
1305 Task::ready(Ok(Default::default()))
1306 }
1307 }
1308
1309 pub fn completions<T: ToPointUtf16>(
1310 &self,
1311 source_buffer_handle: &ModelHandle<Buffer>,
1312 position: T,
1313 cx: &mut ModelContext<Self>,
1314 ) -> Task<Result<Vec<Completion>>> {
1315 let source_buffer_handle = source_buffer_handle.clone();
1316 let source_buffer = source_buffer_handle.read(cx);
1317 let buffer_id = source_buffer.remote_id();
1318 let language = source_buffer.language().cloned();
1319 let worktree;
1320 let buffer_abs_path;
1321 if let Some(file) = File::from_dyn(source_buffer.file()) {
1322 worktree = file.worktree.clone();
1323 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1324 } else {
1325 return Task::ready(Ok(Default::default()));
1326 };
1327
1328 let position = position.to_point_utf16(source_buffer);
1329 let anchor = source_buffer.anchor_after(position);
1330
1331 if worktree.read(cx).as_local().is_some() {
1332 let buffer_abs_path = buffer_abs_path.unwrap();
1333 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1334 server
1335 } else {
1336 return Task::ready(Ok(Default::default()));
1337 };
1338
1339 cx.spawn(|_, cx| async move {
1340 let completions = lang_server
1341 .request::<lsp::request::Completion>(lsp::CompletionParams {
1342 text_document_position: lsp::TextDocumentPositionParams::new(
1343 lsp::TextDocumentIdentifier::new(
1344 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1345 ),
1346 position.to_lsp_position(),
1347 ),
1348 context: Default::default(),
1349 work_done_progress_params: Default::default(),
1350 partial_result_params: Default::default(),
1351 })
1352 .await?;
1353
1354 let completions = if let Some(completions) = completions {
1355 match completions {
1356 lsp::CompletionResponse::Array(completions) => completions,
1357 lsp::CompletionResponse::List(list) => list.items,
1358 }
1359 } else {
1360 Default::default()
1361 };
1362
1363 source_buffer_handle.read_with(&cx, |this, _| {
1364 Ok(completions
1365 .into_iter()
1366 .filter_map(|lsp_completion| {
1367 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1368 lsp::CompletionTextEdit::Edit(edit) => {
1369 (range_from_lsp(edit.range), edit.new_text.clone())
1370 }
1371 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1372 log::info!("unsupported insert/replace completion");
1373 return None;
1374 }
1375 };
1376
1377 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1378 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1379 if clipped_start == old_range.start && clipped_end == old_range.end {
1380 Some(Completion {
1381 old_range: this.anchor_before(old_range.start)
1382 ..this.anchor_after(old_range.end),
1383 new_text,
1384 label: language
1385 .as_ref()
1386 .and_then(|l| l.label_for_completion(&lsp_completion))
1387 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1388 lsp_completion,
1389 })
1390 } else {
1391 None
1392 }
1393 })
1394 .collect())
1395 })
1396 })
1397 } else if let Some(project_id) = self.remote_id() {
1398 let rpc = self.client.clone();
1399 let message = proto::GetCompletions {
1400 project_id,
1401 buffer_id,
1402 position: Some(language::proto::serialize_anchor(&anchor)),
1403 version: (&source_buffer.version()).into(),
1404 };
1405 cx.spawn_weak(|_, mut cx| async move {
1406 let response = rpc.request(message).await?;
1407 source_buffer_handle
1408 .update(&mut cx, |buffer, _| {
1409 buffer.wait_for_version(response.version.into())
1410 })
1411 .await;
1412 response
1413 .completions
1414 .into_iter()
1415 .map(|completion| {
1416 language::proto::deserialize_completion(completion, language.as_ref())
1417 })
1418 .collect()
1419 })
1420 } else {
1421 Task::ready(Ok(Default::default()))
1422 }
1423 }
1424
1425 pub fn apply_additional_edits_for_completion(
1426 &self,
1427 buffer_handle: ModelHandle<Buffer>,
1428 completion: Completion,
1429 push_to_history: bool,
1430 cx: &mut ModelContext<Self>,
1431 ) -> Task<Result<Option<Transaction>>> {
1432 let buffer = buffer_handle.read(cx);
1433 let buffer_id = buffer.remote_id();
1434
1435 if self.is_local() {
1436 let lang_server = if let Some(language_server) = buffer.language_server() {
1437 language_server.clone()
1438 } else {
1439 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1440 };
1441
1442 cx.spawn(|_, mut cx| async move {
1443 let resolved_completion = lang_server
1444 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1445 .await?;
1446 if let Some(edits) = resolved_completion.additional_text_edits {
1447 let edits = buffer_handle
1448 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1449 .await?;
1450 buffer_handle.update(&mut cx, |buffer, cx| {
1451 buffer.finalize_last_transaction();
1452 buffer.start_transaction();
1453 for (range, text) in edits {
1454 buffer.edit([range], text, cx);
1455 }
1456 let transaction = if buffer.end_transaction(cx).is_some() {
1457 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1458 if !push_to_history {
1459 buffer.forget_transaction(transaction.id);
1460 }
1461 Some(transaction)
1462 } else {
1463 None
1464 };
1465 Ok(transaction)
1466 })
1467 } else {
1468 Ok(None)
1469 }
1470 })
1471 } else if let Some(project_id) = self.remote_id() {
1472 let client = self.client.clone();
1473 cx.spawn(|_, mut cx| async move {
1474 let response = client
1475 .request(proto::ApplyCompletionAdditionalEdits {
1476 project_id,
1477 buffer_id,
1478 completion: Some(language::proto::serialize_completion(&completion)),
1479 })
1480 .await?;
1481
1482 if let Some(transaction) = response.transaction {
1483 let transaction = language::proto::deserialize_transaction(transaction)?;
1484 buffer_handle
1485 .update(&mut cx, |buffer, _| {
1486 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1487 })
1488 .await;
1489 if push_to_history {
1490 buffer_handle.update(&mut cx, |buffer, _| {
1491 buffer.push_transaction(transaction.clone(), Instant::now());
1492 });
1493 }
1494 Ok(Some(transaction))
1495 } else {
1496 Ok(None)
1497 }
1498 })
1499 } else {
1500 Task::ready(Err(anyhow!("project does not have a remote id")))
1501 }
1502 }
1503
1504 pub fn code_actions<T: ToOffset>(
1505 &self,
1506 buffer_handle: &ModelHandle<Buffer>,
1507 range: Range<T>,
1508 cx: &mut ModelContext<Self>,
1509 ) -> Task<Result<Vec<CodeAction>>> {
1510 let buffer_handle = buffer_handle.clone();
1511 let buffer = buffer_handle.read(cx);
1512 let buffer_id = buffer.remote_id();
1513 let worktree;
1514 let buffer_abs_path;
1515 if let Some(file) = File::from_dyn(buffer.file()) {
1516 worktree = file.worktree.clone();
1517 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1518 } else {
1519 return Task::ready(Ok(Default::default()));
1520 };
1521 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1522
1523 if worktree.read(cx).as_local().is_some() {
1524 let buffer_abs_path = buffer_abs_path.unwrap();
1525 let lang_name;
1526 let lang_server;
1527 if let Some(lang) = buffer.language() {
1528 lang_name = lang.name().to_string();
1529 if let Some(server) = self
1530 .language_servers
1531 .get(&(worktree.read(cx).id(), lang_name.clone()))
1532 {
1533 lang_server = server.clone();
1534 } else {
1535 return Task::ready(Ok(Default::default()));
1536 };
1537 } else {
1538 return Task::ready(Ok(Default::default()));
1539 }
1540
1541 let lsp_range = lsp::Range::new(
1542 range.start.to_point_utf16(buffer).to_lsp_position(),
1543 range.end.to_point_utf16(buffer).to_lsp_position(),
1544 );
1545 cx.foreground().spawn(async move {
1546 Ok(lang_server
1547 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1548 text_document: lsp::TextDocumentIdentifier::new(
1549 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1550 ),
1551 range: lsp_range,
1552 work_done_progress_params: Default::default(),
1553 partial_result_params: Default::default(),
1554 context: lsp::CodeActionContext {
1555 diagnostics: Default::default(),
1556 only: Some(vec![
1557 lsp::CodeActionKind::QUICKFIX,
1558 lsp::CodeActionKind::REFACTOR,
1559 lsp::CodeActionKind::REFACTOR_EXTRACT,
1560 ]),
1561 },
1562 })
1563 .await?
1564 .unwrap_or_default()
1565 .into_iter()
1566 .filter_map(|entry| {
1567 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1568 Some(CodeAction {
1569 range: range.clone(),
1570 lsp_action,
1571 })
1572 } else {
1573 None
1574 }
1575 })
1576 .collect())
1577 })
1578 } else if let Some(project_id) = self.remote_id() {
1579 let rpc = self.client.clone();
1580 cx.foreground().spawn(async move {
1581 let response = rpc
1582 .request(proto::GetCodeActions {
1583 project_id,
1584 buffer_id,
1585 start: Some(language::proto::serialize_anchor(&range.start)),
1586 end: Some(language::proto::serialize_anchor(&range.end)),
1587 })
1588 .await?;
1589 response
1590 .actions
1591 .into_iter()
1592 .map(language::proto::deserialize_code_action)
1593 .collect()
1594 })
1595 } else {
1596 Task::ready(Ok(Default::default()))
1597 }
1598 }
1599
1600 pub fn apply_code_action(
1601 &self,
1602 buffer_handle: ModelHandle<Buffer>,
1603 mut action: CodeAction,
1604 push_to_history: bool,
1605 cx: &mut ModelContext<Self>,
1606 ) -> Task<Result<ProjectTransaction>> {
1607 if self.is_local() {
1608 let buffer = buffer_handle.read(cx);
1609 let lang_name = if let Some(lang) = buffer.language() {
1610 lang.name().to_string()
1611 } else {
1612 return Task::ready(Ok(Default::default()));
1613 };
1614 let lang_server = if let Some(language_server) = buffer.language_server() {
1615 language_server.clone()
1616 } else {
1617 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1618 };
1619 let range = action.range.to_point_utf16(buffer);
1620 let fs = self.fs.clone();
1621
1622 cx.spawn(|this, mut cx| async move {
1623 if let Some(lsp_range) = action
1624 .lsp_action
1625 .data
1626 .as_mut()
1627 .and_then(|d| d.get_mut("codeActionParams"))
1628 .and_then(|d| d.get_mut("range"))
1629 {
1630 *lsp_range = serde_json::to_value(&lsp::Range::new(
1631 range.start.to_lsp_position(),
1632 range.end.to_lsp_position(),
1633 ))
1634 .unwrap();
1635 action.lsp_action = lang_server
1636 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1637 .await?;
1638 } else {
1639 let actions = this
1640 .update(&mut cx, |this, cx| {
1641 this.code_actions(&buffer_handle, action.range, cx)
1642 })
1643 .await?;
1644 action.lsp_action = actions
1645 .into_iter()
1646 .find(|a| a.lsp_action.title == action.lsp_action.title)
1647 .ok_or_else(|| anyhow!("code action is outdated"))?
1648 .lsp_action;
1649 }
1650
1651 let mut operations = Vec::new();
1652 if let Some(edit) = action.lsp_action.edit {
1653 if let Some(document_changes) = edit.document_changes {
1654 match document_changes {
1655 lsp::DocumentChanges::Edits(edits) => operations
1656 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1657 lsp::DocumentChanges::Operations(ops) => operations = ops,
1658 }
1659 } else if let Some(changes) = edit.changes {
1660 operations.extend(changes.into_iter().map(|(uri, edits)| {
1661 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1662 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1663 uri,
1664 version: None,
1665 },
1666 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1667 })
1668 }));
1669 }
1670 }
1671
1672 let mut project_transaction = ProjectTransaction::default();
1673 for operation in operations {
1674 match operation {
1675 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1676 let abs_path = op
1677 .uri
1678 .to_file_path()
1679 .map_err(|_| anyhow!("can't convert URI to path"))?;
1680
1681 if let Some(parent_path) = abs_path.parent() {
1682 fs.create_dir(parent_path).await?;
1683 }
1684 if abs_path.ends_with("/") {
1685 fs.create_dir(&abs_path).await?;
1686 } else {
1687 fs.create_file(
1688 &abs_path,
1689 op.options.map(Into::into).unwrap_or_default(),
1690 )
1691 .await?;
1692 }
1693 }
1694 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1695 let source_abs_path = op
1696 .old_uri
1697 .to_file_path()
1698 .map_err(|_| anyhow!("can't convert URI to path"))?;
1699 let target_abs_path = op
1700 .new_uri
1701 .to_file_path()
1702 .map_err(|_| anyhow!("can't convert URI to path"))?;
1703 fs.rename(
1704 &source_abs_path,
1705 &target_abs_path,
1706 op.options.map(Into::into).unwrap_or_default(),
1707 )
1708 .await?;
1709 }
1710 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1711 let abs_path = op
1712 .uri
1713 .to_file_path()
1714 .map_err(|_| anyhow!("can't convert URI to path"))?;
1715 let options = op.options.map(Into::into).unwrap_or_default();
1716 if abs_path.ends_with("/") {
1717 fs.remove_dir(&abs_path, options).await?;
1718 } else {
1719 fs.remove_file(&abs_path, options).await?;
1720 }
1721 }
1722 lsp::DocumentChangeOperation::Edit(op) => {
1723 let buffer_to_edit = this
1724 .update(&mut cx, |this, cx| {
1725 this.open_local_buffer_from_lsp_path(
1726 op.text_document.uri,
1727 lang_name.clone(),
1728 lang_server.clone(),
1729 cx,
1730 )
1731 })
1732 .await?;
1733
1734 let edits = buffer_to_edit
1735 .update(&mut cx, |buffer, cx| {
1736 let edits = op.edits.into_iter().map(|edit| match edit {
1737 lsp::OneOf::Left(edit) => edit,
1738 lsp::OneOf::Right(edit) => edit.text_edit,
1739 });
1740 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1741 })
1742 .await?;
1743
1744 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1745 buffer.finalize_last_transaction();
1746 buffer.start_transaction();
1747 for (range, text) in edits {
1748 buffer.edit([range], text, cx);
1749 }
1750 let transaction = if buffer.end_transaction(cx).is_some() {
1751 let transaction =
1752 buffer.finalize_last_transaction().unwrap().clone();
1753 if !push_to_history {
1754 buffer.forget_transaction(transaction.id);
1755 }
1756 Some(transaction)
1757 } else {
1758 None
1759 };
1760
1761 transaction
1762 });
1763 if let Some(transaction) = transaction {
1764 project_transaction.0.insert(buffer_to_edit, transaction);
1765 }
1766 }
1767 }
1768 }
1769
1770 Ok(project_transaction)
1771 })
1772 } else if let Some(project_id) = self.remote_id() {
1773 let client = self.client.clone();
1774 let request = proto::ApplyCodeAction {
1775 project_id,
1776 buffer_id: buffer_handle.read(cx).remote_id(),
1777 action: Some(language::proto::serialize_code_action(&action)),
1778 };
1779 cx.spawn(|this, mut cx| async move {
1780 let response = client
1781 .request(request)
1782 .await?
1783 .transaction
1784 .ok_or_else(|| anyhow!("missing transaction"))?;
1785 this.update(&mut cx, |this, cx| {
1786 this.deserialize_project_transaction(response, push_to_history, cx)
1787 })
1788 .await
1789 })
1790 } else {
1791 Task::ready(Err(anyhow!("project does not have a remote id")))
1792 }
1793 }
1794
1795 pub fn find_or_create_local_worktree(
1796 &self,
1797 abs_path: impl AsRef<Path>,
1798 weak: bool,
1799 cx: &mut ModelContext<Self>,
1800 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1801 let abs_path = abs_path.as_ref();
1802 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1803 Task::ready(Ok((tree.clone(), relative_path.into())))
1804 } else {
1805 let worktree = self.create_local_worktree(abs_path, weak, cx);
1806 cx.foreground()
1807 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1808 }
1809 }
1810
1811 fn find_local_worktree(
1812 &self,
1813 abs_path: &Path,
1814 cx: &AppContext,
1815 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1816 for tree in self.worktrees(cx) {
1817 if let Some(relative_path) = tree
1818 .read(cx)
1819 .as_local()
1820 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1821 {
1822 return Some((tree.clone(), relative_path.into()));
1823 }
1824 }
1825 None
1826 }
1827
1828 pub fn is_shared(&self) -> bool {
1829 match &self.client_state {
1830 ProjectClientState::Local { is_shared, .. } => *is_shared,
1831 ProjectClientState::Remote { .. } => false,
1832 }
1833 }
1834
1835 fn create_local_worktree(
1836 &self,
1837 abs_path: impl AsRef<Path>,
1838 weak: bool,
1839 cx: &mut ModelContext<Self>,
1840 ) -> Task<Result<ModelHandle<Worktree>>> {
1841 let fs = self.fs.clone();
1842 let client = self.client.clone();
1843 let path = Arc::from(abs_path.as_ref());
1844 cx.spawn(|project, mut cx| async move {
1845 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1846
1847 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1848 project.add_worktree(&worktree, cx);
1849 (project.remote_id(), project.is_shared())
1850 });
1851
1852 if let Some(project_id) = remote_project_id {
1853 worktree
1854 .update(&mut cx, |worktree, cx| {
1855 worktree.as_local_mut().unwrap().register(project_id, cx)
1856 })
1857 .await?;
1858 if is_shared {
1859 worktree
1860 .update(&mut cx, |worktree, cx| {
1861 worktree.as_local_mut().unwrap().share(project_id, cx)
1862 })
1863 .await?;
1864 }
1865 }
1866
1867 Ok(worktree)
1868 })
1869 }
1870
1871 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1872 self.worktrees.retain(|worktree| {
1873 worktree
1874 .upgrade(cx)
1875 .map_or(false, |w| w.read(cx).id() != id)
1876 });
1877 cx.notify();
1878 }
1879
1880 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1881 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1882 if worktree.read(cx).is_local() {
1883 cx.subscribe(&worktree, |this, worktree, _, cx| {
1884 this.update_local_worktree_buffers(worktree, cx);
1885 })
1886 .detach();
1887 }
1888
1889 let push_weak_handle = {
1890 let worktree = worktree.read(cx);
1891 worktree.is_local() && worktree.is_weak()
1892 };
1893 if push_weak_handle {
1894 cx.observe_release(&worktree, |this, cx| {
1895 this.worktrees
1896 .retain(|worktree| worktree.upgrade(cx).is_some());
1897 cx.notify();
1898 })
1899 .detach();
1900 self.worktrees
1901 .push(WorktreeHandle::Weak(worktree.downgrade()));
1902 } else {
1903 self.worktrees
1904 .push(WorktreeHandle::Strong(worktree.clone()));
1905 }
1906 cx.notify();
1907 }
1908
1909 fn update_local_worktree_buffers(
1910 &mut self,
1911 worktree_handle: ModelHandle<Worktree>,
1912 cx: &mut ModelContext<Self>,
1913 ) {
1914 let snapshot = worktree_handle.read(cx).snapshot();
1915 let mut buffers_to_delete = Vec::new();
1916 for (buffer_id, buffer) in &self.open_buffers {
1917 if let Some(buffer) = buffer.upgrade(cx) {
1918 buffer.update(cx, |buffer, cx| {
1919 if let Some(old_file) = File::from_dyn(buffer.file()) {
1920 if old_file.worktree != worktree_handle {
1921 return;
1922 }
1923
1924 let new_file = if let Some(entry) = old_file
1925 .entry_id
1926 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1927 {
1928 File {
1929 is_local: true,
1930 entry_id: Some(entry.id),
1931 mtime: entry.mtime,
1932 path: entry.path.clone(),
1933 worktree: worktree_handle.clone(),
1934 }
1935 } else if let Some(entry) =
1936 snapshot.entry_for_path(old_file.path().as_ref())
1937 {
1938 File {
1939 is_local: true,
1940 entry_id: Some(entry.id),
1941 mtime: entry.mtime,
1942 path: entry.path.clone(),
1943 worktree: worktree_handle.clone(),
1944 }
1945 } else {
1946 File {
1947 is_local: true,
1948 entry_id: None,
1949 path: old_file.path().clone(),
1950 mtime: old_file.mtime(),
1951 worktree: worktree_handle.clone(),
1952 }
1953 };
1954
1955 if let Some(project_id) = self.remote_id() {
1956 self.client
1957 .send(proto::UpdateBufferFile {
1958 project_id,
1959 buffer_id: *buffer_id as u64,
1960 file: Some(new_file.to_proto()),
1961 })
1962 .log_err();
1963 }
1964 buffer.file_updated(Box::new(new_file), cx).detach();
1965 }
1966 });
1967 } else {
1968 buffers_to_delete.push(*buffer_id);
1969 }
1970 }
1971
1972 for buffer_id in buffers_to_delete {
1973 self.open_buffers.remove(&buffer_id);
1974 }
1975 }
1976
1977 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1978 let new_active_entry = entry.and_then(|project_path| {
1979 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1980 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1981 Some(ProjectEntry {
1982 worktree_id: project_path.worktree_id,
1983 entry_id: entry.id,
1984 })
1985 });
1986 if new_active_entry != self.active_entry {
1987 self.active_entry = new_active_entry;
1988 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1989 }
1990 }
1991
1992 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1993 self.language_servers_with_diagnostics_running > 0
1994 }
1995
1996 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1997 let mut summary = DiagnosticSummary::default();
1998 for (_, path_summary) in self.diagnostic_summaries(cx) {
1999 summary.error_count += path_summary.error_count;
2000 summary.warning_count += path_summary.warning_count;
2001 summary.info_count += path_summary.info_count;
2002 summary.hint_count += path_summary.hint_count;
2003 }
2004 summary
2005 }
2006
2007 pub fn diagnostic_summaries<'a>(
2008 &'a self,
2009 cx: &'a AppContext,
2010 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2011 self.worktrees(cx).flat_map(move |worktree| {
2012 let worktree = worktree.read(cx);
2013 let worktree_id = worktree.id();
2014 worktree
2015 .diagnostic_summaries()
2016 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2017 })
2018 }
2019
2020 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2021 self.language_servers_with_diagnostics_running += 1;
2022 if self.language_servers_with_diagnostics_running == 1 {
2023 cx.emit(Event::DiskBasedDiagnosticsStarted);
2024 }
2025 }
2026
2027 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2028 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2029 self.language_servers_with_diagnostics_running -= 1;
2030 if self.language_servers_with_diagnostics_running == 0 {
2031 cx.emit(Event::DiskBasedDiagnosticsFinished);
2032 }
2033 }
2034
2035 pub fn active_entry(&self) -> Option<ProjectEntry> {
2036 self.active_entry
2037 }
2038
2039 // RPC message handlers
2040
2041 async fn handle_unshare_project(
2042 this: ModelHandle<Self>,
2043 _: TypedEnvelope<proto::UnshareProject>,
2044 _: Arc<Client>,
2045 mut cx: AsyncAppContext,
2046 ) -> Result<()> {
2047 this.update(&mut cx, |this, cx| {
2048 if let ProjectClientState::Remote {
2049 sharing_has_stopped,
2050 ..
2051 } = &mut this.client_state
2052 {
2053 *sharing_has_stopped = true;
2054 this.collaborators.clear();
2055 cx.notify();
2056 } else {
2057 unreachable!()
2058 }
2059 });
2060
2061 Ok(())
2062 }
2063
2064 async fn handle_add_collaborator(
2065 this: ModelHandle<Self>,
2066 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2067 _: Arc<Client>,
2068 mut cx: AsyncAppContext,
2069 ) -> Result<()> {
2070 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2071 let collaborator = envelope
2072 .payload
2073 .collaborator
2074 .take()
2075 .ok_or_else(|| anyhow!("empty collaborator"))?;
2076
2077 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2078 this.update(&mut cx, |this, cx| {
2079 this.collaborators
2080 .insert(collaborator.peer_id, collaborator);
2081 cx.notify();
2082 });
2083
2084 Ok(())
2085 }
2086
2087 async fn handle_remove_collaborator(
2088 this: ModelHandle<Self>,
2089 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2090 _: Arc<Client>,
2091 mut cx: AsyncAppContext,
2092 ) -> Result<()> {
2093 this.update(&mut cx, |this, cx| {
2094 let peer_id = PeerId(envelope.payload.peer_id);
2095 let replica_id = this
2096 .collaborators
2097 .remove(&peer_id)
2098 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2099 .replica_id;
2100 this.shared_buffers.remove(&peer_id);
2101 for (_, buffer) in &this.open_buffers {
2102 if let Some(buffer) = buffer.upgrade(cx) {
2103 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2104 }
2105 }
2106 cx.notify();
2107 Ok(())
2108 })
2109 }
2110
2111 async fn handle_share_worktree(
2112 this: ModelHandle<Self>,
2113 envelope: TypedEnvelope<proto::ShareWorktree>,
2114 client: Arc<Client>,
2115 mut cx: AsyncAppContext,
2116 ) -> Result<()> {
2117 this.update(&mut cx, |this, cx| {
2118 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2119 let replica_id = this.replica_id();
2120 let worktree = envelope
2121 .payload
2122 .worktree
2123 .ok_or_else(|| anyhow!("invalid worktree"))?;
2124 let (worktree, load_task) =
2125 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2126 this.add_worktree(&worktree, cx);
2127 load_task.detach();
2128 Ok(())
2129 })
2130 }
2131
2132 async fn handle_unregister_worktree(
2133 this: ModelHandle<Self>,
2134 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2135 _: Arc<Client>,
2136 mut cx: AsyncAppContext,
2137 ) -> Result<()> {
2138 this.update(&mut cx, |this, cx| {
2139 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2140 this.remove_worktree(worktree_id, cx);
2141 Ok(())
2142 })
2143 }
2144
2145 async fn handle_update_worktree(
2146 this: ModelHandle<Self>,
2147 envelope: TypedEnvelope<proto::UpdateWorktree>,
2148 _: Arc<Client>,
2149 mut cx: AsyncAppContext,
2150 ) -> Result<()> {
2151 this.update(&mut cx, |this, cx| {
2152 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2153 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2154 worktree.update(cx, |worktree, _| {
2155 let worktree = worktree.as_remote_mut().unwrap();
2156 worktree.update_from_remote(envelope)
2157 })?;
2158 }
2159 Ok(())
2160 })
2161 }
2162
2163 async fn handle_update_diagnostic_summary(
2164 this: ModelHandle<Self>,
2165 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2166 _: Arc<Client>,
2167 mut cx: AsyncAppContext,
2168 ) -> Result<()> {
2169 this.update(&mut cx, |this, cx| {
2170 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2171 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2172 if let Some(summary) = envelope.payload.summary {
2173 let project_path = ProjectPath {
2174 worktree_id,
2175 path: Path::new(&summary.path).into(),
2176 };
2177 worktree.update(cx, |worktree, _| {
2178 worktree
2179 .as_remote_mut()
2180 .unwrap()
2181 .update_diagnostic_summary(project_path.path.clone(), &summary);
2182 });
2183 cx.emit(Event::DiagnosticsUpdated(project_path));
2184 }
2185 }
2186 Ok(())
2187 })
2188 }
2189
2190 async fn handle_disk_based_diagnostics_updating(
2191 this: ModelHandle<Self>,
2192 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2193 _: Arc<Client>,
2194 mut cx: AsyncAppContext,
2195 ) -> Result<()> {
2196 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2197 Ok(())
2198 }
2199
2200 async fn handle_disk_based_diagnostics_updated(
2201 this: ModelHandle<Self>,
2202 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2203 _: Arc<Client>,
2204 mut cx: AsyncAppContext,
2205 ) -> Result<()> {
2206 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2207 Ok(())
2208 }
2209
2210 async fn handle_update_buffer(
2211 this: ModelHandle<Self>,
2212 envelope: TypedEnvelope<proto::UpdateBuffer>,
2213 _: Arc<Client>,
2214 mut cx: AsyncAppContext,
2215 ) -> Result<()> {
2216 this.update(&mut cx, |this, cx| {
2217 let payload = envelope.payload.clone();
2218 let buffer_id = payload.buffer_id as usize;
2219 let ops = payload
2220 .operations
2221 .into_iter()
2222 .map(|op| language::proto::deserialize_operation(op))
2223 .collect::<Result<Vec<_>, _>>()?;
2224 let buffer = this
2225 .open_buffers
2226 .entry(buffer_id)
2227 .or_insert_with(|| OpenBuffer::Operations(Vec::new()));
2228 match buffer {
2229 OpenBuffer::Loaded(buffer) => {
2230 if let Some(buffer) = buffer.upgrade(cx) {
2231 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2232 }
2233 }
2234 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2235 }
2236 Ok(())
2237 })
2238 }
2239
2240 async fn handle_update_buffer_file(
2241 this: ModelHandle<Self>,
2242 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2243 _: Arc<Client>,
2244 mut cx: AsyncAppContext,
2245 ) -> Result<()> {
2246 this.update(&mut cx, |this, cx| {
2247 let payload = envelope.payload.clone();
2248 let buffer_id = payload.buffer_id as usize;
2249 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2250 let worktree = this
2251 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2252 .ok_or_else(|| anyhow!("no such worktree"))?;
2253 let file = File::from_proto(file, worktree.clone(), cx)?;
2254 let buffer = this
2255 .open_buffers
2256 .get_mut(&buffer_id)
2257 .and_then(|b| b.upgrade(cx))
2258 .ok_or_else(|| anyhow!("no such buffer"))?;
2259 buffer.update(cx, |buffer, cx| {
2260 buffer.file_updated(Box::new(file), cx).detach();
2261 });
2262 Ok(())
2263 })
2264 }
2265
2266 async fn handle_save_buffer(
2267 this: ModelHandle<Self>,
2268 envelope: TypedEnvelope<proto::SaveBuffer>,
2269 _: Arc<Client>,
2270 mut cx: AsyncAppContext,
2271 ) -> Result<proto::BufferSaved> {
2272 let buffer_id = envelope.payload.buffer_id;
2273 let sender_id = envelope.original_sender_id()?;
2274 let requested_version = envelope.payload.version.try_into()?;
2275
2276 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2277 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2278 let buffer = this
2279 .shared_buffers
2280 .get(&sender_id)
2281 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2282 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2283 Ok::<_, anyhow::Error>((project_id, buffer))
2284 })?;
2285
2286 buffer
2287 .update(&mut cx, |buffer, _| {
2288 buffer.wait_for_version(requested_version)
2289 })
2290 .await;
2291
2292 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2293 Ok(proto::BufferSaved {
2294 project_id,
2295 buffer_id,
2296 version: (&saved_version).into(),
2297 mtime: Some(mtime.into()),
2298 })
2299 }
2300
2301 async fn handle_format_buffers(
2302 this: ModelHandle<Self>,
2303 envelope: TypedEnvelope<proto::FormatBuffers>,
2304 _: Arc<Client>,
2305 mut cx: AsyncAppContext,
2306 ) -> Result<proto::FormatBuffersResponse> {
2307 let sender_id = envelope.original_sender_id()?;
2308 let format = this.update(&mut cx, |this, cx| {
2309 let shared_buffers = this
2310 .shared_buffers
2311 .get(&sender_id)
2312 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2313 let mut buffers = HashSet::default();
2314 for buffer_id in &envelope.payload.buffer_ids {
2315 buffers.insert(
2316 shared_buffers
2317 .get(buffer_id)
2318 .cloned()
2319 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2320 );
2321 }
2322 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2323 })?;
2324
2325 let project_transaction = format.await?;
2326 let project_transaction = this.update(&mut cx, |this, cx| {
2327 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2328 });
2329 Ok(proto::FormatBuffersResponse {
2330 transaction: Some(project_transaction),
2331 })
2332 }
2333
2334 async fn handle_get_completions(
2335 this: ModelHandle<Self>,
2336 envelope: TypedEnvelope<proto::GetCompletions>,
2337 _: Arc<Client>,
2338 mut cx: AsyncAppContext,
2339 ) -> Result<proto::GetCompletionsResponse> {
2340 let sender_id = envelope.original_sender_id()?;
2341 let position = envelope
2342 .payload
2343 .position
2344 .and_then(language::proto::deserialize_anchor)
2345 .ok_or_else(|| anyhow!("invalid position"))?;
2346 let version = clock::Global::from(envelope.payload.version);
2347 let buffer = this.read_with(&cx, |this, _| {
2348 this.shared_buffers
2349 .get(&sender_id)
2350 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2351 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2352 })?;
2353 buffer
2354 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2355 .await;
2356 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2357 let completions = this
2358 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2359 .await?;
2360
2361 Ok(proto::GetCompletionsResponse {
2362 completions: completions
2363 .iter()
2364 .map(language::proto::serialize_completion)
2365 .collect(),
2366 version: (&version).into(),
2367 })
2368 }
2369
2370 async fn handle_apply_additional_edits_for_completion(
2371 this: ModelHandle<Self>,
2372 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2373 _: Arc<Client>,
2374 mut cx: AsyncAppContext,
2375 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2376 let sender_id = envelope.original_sender_id()?;
2377 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2378 let buffer = this
2379 .shared_buffers
2380 .get(&sender_id)
2381 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2382 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2383 let language = buffer.read(cx).language();
2384 let completion = language::proto::deserialize_completion(
2385 envelope
2386 .payload
2387 .completion
2388 .ok_or_else(|| anyhow!("invalid completion"))?,
2389 language,
2390 )?;
2391 Ok::<_, anyhow::Error>(
2392 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2393 )
2394 })?;
2395
2396 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2397 transaction: apply_additional_edits
2398 .await?
2399 .as_ref()
2400 .map(language::proto::serialize_transaction),
2401 })
2402 }
2403
2404 async fn handle_get_code_actions(
2405 this: ModelHandle<Self>,
2406 envelope: TypedEnvelope<proto::GetCodeActions>,
2407 _: Arc<Client>,
2408 mut cx: AsyncAppContext,
2409 ) -> Result<proto::GetCodeActionsResponse> {
2410 let sender_id = envelope.original_sender_id()?;
2411 let start = envelope
2412 .payload
2413 .start
2414 .and_then(language::proto::deserialize_anchor)
2415 .ok_or_else(|| anyhow!("invalid start"))?;
2416 let end = envelope
2417 .payload
2418 .end
2419 .and_then(language::proto::deserialize_anchor)
2420 .ok_or_else(|| anyhow!("invalid end"))?;
2421 let buffer = this.update(&mut cx, |this, _| {
2422 this.shared_buffers
2423 .get(&sender_id)
2424 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2425 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2426 })?;
2427 buffer
2428 .update(&mut cx, |buffer, _| {
2429 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2430 })
2431 .await;
2432 let code_actions = this.update(&mut cx, |this, cx| {
2433 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2434 })?;
2435
2436 Ok(proto::GetCodeActionsResponse {
2437 actions: code_actions
2438 .await?
2439 .iter()
2440 .map(language::proto::serialize_code_action)
2441 .collect(),
2442 })
2443 }
2444
2445 async fn handle_apply_code_action(
2446 this: ModelHandle<Self>,
2447 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2448 _: Arc<Client>,
2449 mut cx: AsyncAppContext,
2450 ) -> Result<proto::ApplyCodeActionResponse> {
2451 let sender_id = envelope.original_sender_id()?;
2452 let action = language::proto::deserialize_code_action(
2453 envelope
2454 .payload
2455 .action
2456 .ok_or_else(|| anyhow!("invalid action"))?,
2457 )?;
2458 let apply_code_action = this.update(&mut cx, |this, cx| {
2459 let buffer = this
2460 .shared_buffers
2461 .get(&sender_id)
2462 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2463 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2464 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2465 })?;
2466
2467 let project_transaction = apply_code_action.await?;
2468 let project_transaction = this.update(&mut cx, |this, cx| {
2469 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2470 });
2471 Ok(proto::ApplyCodeActionResponse {
2472 transaction: Some(project_transaction),
2473 })
2474 }
2475
2476 async fn handle_get_definition(
2477 this: ModelHandle<Self>,
2478 envelope: TypedEnvelope<proto::GetDefinition>,
2479 _: Arc<Client>,
2480 mut cx: AsyncAppContext,
2481 ) -> Result<proto::GetDefinitionResponse> {
2482 let sender_id = envelope.original_sender_id()?;
2483 let position = envelope
2484 .payload
2485 .position
2486 .and_then(deserialize_anchor)
2487 .ok_or_else(|| anyhow!("invalid position"))?;
2488 let definitions = this.update(&mut cx, |this, cx| {
2489 let source_buffer = this
2490 .shared_buffers
2491 .get(&sender_id)
2492 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2493 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2494 if source_buffer.read(cx).can_resolve(&position) {
2495 Ok(this.definition(&source_buffer, position, cx))
2496 } else {
2497 Err(anyhow!("cannot resolve position"))
2498 }
2499 })?;
2500
2501 let definitions = definitions.await?;
2502
2503 this.update(&mut cx, |this, cx| {
2504 let mut response = proto::GetDefinitionResponse {
2505 definitions: Default::default(),
2506 };
2507 for definition in definitions {
2508 let buffer =
2509 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2510 response.definitions.push(proto::Definition {
2511 target_start: Some(serialize_anchor(&definition.target_range.start)),
2512 target_end: Some(serialize_anchor(&definition.target_range.end)),
2513 buffer: Some(buffer),
2514 });
2515 }
2516 Ok(response)
2517 })
2518 }
2519
2520 async fn handle_open_buffer(
2521 this: ModelHandle<Self>,
2522 envelope: TypedEnvelope<proto::OpenBuffer>,
2523 _: Arc<Client>,
2524 mut cx: AsyncAppContext,
2525 ) -> anyhow::Result<proto::OpenBufferResponse> {
2526 let peer_id = envelope.original_sender_id()?;
2527 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2528 let open_buffer = this.update(&mut cx, |this, cx| {
2529 this.open_buffer(
2530 ProjectPath {
2531 worktree_id,
2532 path: PathBuf::from(envelope.payload.path).into(),
2533 },
2534 cx,
2535 )
2536 });
2537
2538 let buffer = open_buffer.await?;
2539 this.update(&mut cx, |this, cx| {
2540 Ok(proto::OpenBufferResponse {
2541 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2542 })
2543 })
2544 }
2545
2546 fn serialize_project_transaction_for_peer(
2547 &mut self,
2548 project_transaction: ProjectTransaction,
2549 peer_id: PeerId,
2550 cx: &AppContext,
2551 ) -> proto::ProjectTransaction {
2552 let mut serialized_transaction = proto::ProjectTransaction {
2553 buffers: Default::default(),
2554 transactions: Default::default(),
2555 };
2556 for (buffer, transaction) in project_transaction.0 {
2557 serialized_transaction
2558 .buffers
2559 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2560 serialized_transaction
2561 .transactions
2562 .push(language::proto::serialize_transaction(&transaction));
2563 }
2564 serialized_transaction
2565 }
2566
2567 fn deserialize_project_transaction(
2568 &mut self,
2569 message: proto::ProjectTransaction,
2570 push_to_history: bool,
2571 cx: &mut ModelContext<Self>,
2572 ) -> Task<Result<ProjectTransaction>> {
2573 cx.spawn(|this, mut cx| async move {
2574 let mut project_transaction = ProjectTransaction::default();
2575 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2576 let buffer = this
2577 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2578 .await?;
2579 let transaction = language::proto::deserialize_transaction(transaction)?;
2580 project_transaction.0.insert(buffer, transaction);
2581 }
2582 for (buffer, transaction) in &project_transaction.0 {
2583 buffer
2584 .update(&mut cx, |buffer, _| {
2585 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2586 })
2587 .await;
2588
2589 if push_to_history {
2590 buffer.update(&mut cx, |buffer, _| {
2591 buffer.push_transaction(transaction.clone(), Instant::now());
2592 });
2593 }
2594 }
2595
2596 Ok(project_transaction)
2597 })
2598 }
2599
2600 fn serialize_buffer_for_peer(
2601 &mut self,
2602 buffer: &ModelHandle<Buffer>,
2603 peer_id: PeerId,
2604 cx: &AppContext,
2605 ) -> proto::Buffer {
2606 let buffer_id = buffer.read(cx).remote_id();
2607 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2608 match shared_buffers.entry(buffer_id) {
2609 hash_map::Entry::Occupied(_) => proto::Buffer {
2610 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2611 },
2612 hash_map::Entry::Vacant(entry) => {
2613 entry.insert(buffer.clone());
2614 proto::Buffer {
2615 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2616 }
2617 }
2618 }
2619 }
2620
2621 fn deserialize_buffer(
2622 &mut self,
2623 buffer: proto::Buffer,
2624 cx: &mut ModelContext<Self>,
2625 ) -> Task<Result<ModelHandle<Buffer>>> {
2626 let replica_id = self.replica_id();
2627
2628 let mut opened_buffer_tx = self.opened_buffer.clone();
2629 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2630 cx.spawn(|this, mut cx| async move {
2631 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2632 proto::buffer::Variant::Id(id) => {
2633 let buffer = loop {
2634 let buffer = this.read_with(&cx, |this, cx| {
2635 this.open_buffers
2636 .get(&(id as usize))
2637 .and_then(|buffer| buffer.upgrade(cx))
2638 });
2639 if let Some(buffer) = buffer {
2640 break buffer;
2641 }
2642 opened_buffer_rx
2643 .recv()
2644 .await
2645 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2646 };
2647 Ok(buffer)
2648 }
2649 proto::buffer::Variant::State(mut buffer) => {
2650 let mut buffer_worktree = None;
2651 let mut buffer_file = None;
2652 if let Some(file) = buffer.file.take() {
2653 this.read_with(&cx, |this, cx| {
2654 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2655 let worktree =
2656 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2657 anyhow!("no worktree found for id {}", file.worktree_id)
2658 })?;
2659 buffer_file =
2660 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2661 as Box<dyn language::File>);
2662 buffer_worktree = Some(worktree);
2663 Ok::<_, anyhow::Error>(())
2664 })?;
2665 }
2666
2667 let buffer = cx.add_model(|cx| {
2668 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2669 });
2670 this.update(&mut cx, |this, cx| {
2671 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2672 })?;
2673
2674 let _ = opened_buffer_tx.send(()).await;
2675 Ok(buffer)
2676 }
2677 }
2678 })
2679 }
2680
2681 async fn handle_close_buffer(
2682 this: ModelHandle<Self>,
2683 envelope: TypedEnvelope<proto::CloseBuffer>,
2684 _: Arc<Client>,
2685 mut cx: AsyncAppContext,
2686 ) -> anyhow::Result<()> {
2687 this.update(&mut cx, |this, cx| {
2688 if let Some(shared_buffers) =
2689 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2690 {
2691 shared_buffers.remove(&envelope.payload.buffer_id);
2692 cx.notify();
2693 }
2694 Ok(())
2695 })
2696 }
2697
2698 async fn handle_buffer_saved(
2699 this: ModelHandle<Self>,
2700 envelope: TypedEnvelope<proto::BufferSaved>,
2701 _: Arc<Client>,
2702 mut cx: AsyncAppContext,
2703 ) -> Result<()> {
2704 let version = envelope.payload.version.try_into()?;
2705 let mtime = envelope
2706 .payload
2707 .mtime
2708 .ok_or_else(|| anyhow!("missing mtime"))?
2709 .into();
2710
2711 this.update(&mut cx, |this, cx| {
2712 let buffer = this
2713 .open_buffers
2714 .get(&(envelope.payload.buffer_id as usize))
2715 .and_then(|buffer| buffer.upgrade(cx));
2716 if let Some(buffer) = buffer {
2717 buffer.update(cx, |buffer, cx| {
2718 buffer.did_save(version, mtime, None, cx);
2719 });
2720 }
2721 Ok(())
2722 })
2723 }
2724
2725 async fn handle_buffer_reloaded(
2726 this: ModelHandle<Self>,
2727 envelope: TypedEnvelope<proto::BufferReloaded>,
2728 _: Arc<Client>,
2729 mut cx: AsyncAppContext,
2730 ) -> Result<()> {
2731 let payload = envelope.payload.clone();
2732 let version = payload.version.try_into()?;
2733 let mtime = payload
2734 .mtime
2735 .ok_or_else(|| anyhow!("missing mtime"))?
2736 .into();
2737 this.update(&mut cx, |this, cx| {
2738 let buffer = this
2739 .open_buffers
2740 .get(&(payload.buffer_id as usize))
2741 .and_then(|buffer| buffer.upgrade(cx));
2742 if let Some(buffer) = buffer {
2743 buffer.update(cx, |buffer, cx| {
2744 buffer.did_reload(version, mtime, cx);
2745 });
2746 }
2747 Ok(())
2748 })
2749 }
2750
2751 pub fn match_paths<'a>(
2752 &self,
2753 query: &'a str,
2754 include_ignored: bool,
2755 smart_case: bool,
2756 max_results: usize,
2757 cancel_flag: &'a AtomicBool,
2758 cx: &AppContext,
2759 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2760 let worktrees = self
2761 .worktrees(cx)
2762 .filter(|worktree| !worktree.read(cx).is_weak())
2763 .collect::<Vec<_>>();
2764 let include_root_name = worktrees.len() > 1;
2765 let candidate_sets = worktrees
2766 .into_iter()
2767 .map(|worktree| CandidateSet {
2768 snapshot: worktree.read(cx).snapshot(),
2769 include_ignored,
2770 include_root_name,
2771 })
2772 .collect::<Vec<_>>();
2773
2774 let background = cx.background().clone();
2775 async move {
2776 fuzzy::match_paths(
2777 candidate_sets.as_slice(),
2778 query,
2779 smart_case,
2780 max_results,
2781 cancel_flag,
2782 background,
2783 )
2784 .await
2785 }
2786 }
2787}
2788
2789impl WorktreeHandle {
2790 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2791 match self {
2792 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2793 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2794 }
2795 }
2796}
2797
2798impl OpenBuffer {
2799 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2800 match self {
2801 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2802 OpenBuffer::Operations(_) => None,
2803 }
2804 }
2805}
2806
2807struct CandidateSet {
2808 snapshot: Snapshot,
2809 include_ignored: bool,
2810 include_root_name: bool,
2811}
2812
2813impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2814 type Candidates = CandidateSetIter<'a>;
2815
2816 fn id(&self) -> usize {
2817 self.snapshot.id().to_usize()
2818 }
2819
2820 fn len(&self) -> usize {
2821 if self.include_ignored {
2822 self.snapshot.file_count()
2823 } else {
2824 self.snapshot.visible_file_count()
2825 }
2826 }
2827
2828 fn prefix(&self) -> Arc<str> {
2829 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2830 self.snapshot.root_name().into()
2831 } else if self.include_root_name {
2832 format!("{}/", self.snapshot.root_name()).into()
2833 } else {
2834 "".into()
2835 }
2836 }
2837
2838 fn candidates(&'a self, start: usize) -> Self::Candidates {
2839 CandidateSetIter {
2840 traversal: self.snapshot.files(self.include_ignored, start),
2841 }
2842 }
2843}
2844
2845struct CandidateSetIter<'a> {
2846 traversal: Traversal<'a>,
2847}
2848
2849impl<'a> Iterator for CandidateSetIter<'a> {
2850 type Item = PathMatchCandidate<'a>;
2851
2852 fn next(&mut self) -> Option<Self::Item> {
2853 self.traversal.next().map(|entry| {
2854 if let EntryKind::File(char_bag) = entry.kind {
2855 PathMatchCandidate {
2856 path: &entry.path,
2857 char_bag,
2858 }
2859 } else {
2860 unreachable!()
2861 }
2862 })
2863 }
2864}
2865
2866impl Entity for Project {
2867 type Event = Event;
2868
2869 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2870 match &self.client_state {
2871 ProjectClientState::Local { remote_id_rx, .. } => {
2872 if let Some(project_id) = *remote_id_rx.borrow() {
2873 self.client
2874 .send(proto::UnregisterProject { project_id })
2875 .log_err();
2876 }
2877 }
2878 ProjectClientState::Remote { remote_id, .. } => {
2879 self.client
2880 .send(proto::LeaveProject {
2881 project_id: *remote_id,
2882 })
2883 .log_err();
2884 }
2885 }
2886 }
2887
2888 fn app_will_quit(
2889 &mut self,
2890 _: &mut MutableAppContext,
2891 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2892 use futures::FutureExt;
2893
2894 let shutdown_futures = self
2895 .language_servers
2896 .drain()
2897 .filter_map(|(_, server)| server.shutdown())
2898 .collect::<Vec<_>>();
2899 Some(
2900 async move {
2901 futures::future::join_all(shutdown_futures).await;
2902 }
2903 .boxed(),
2904 )
2905 }
2906}
2907
2908impl Collaborator {
2909 fn from_proto(
2910 message: proto::Collaborator,
2911 user_store: &ModelHandle<UserStore>,
2912 cx: &mut AsyncAppContext,
2913 ) -> impl Future<Output = Result<Self>> {
2914 let user = user_store.update(cx, |user_store, cx| {
2915 user_store.fetch_user(message.user_id, cx)
2916 });
2917
2918 async move {
2919 Ok(Self {
2920 peer_id: PeerId(message.peer_id),
2921 user: user.await?,
2922 replica_id: message.replica_id as ReplicaId,
2923 })
2924 }
2925 }
2926}
2927
2928impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2929 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2930 Self {
2931 worktree_id,
2932 path: path.as_ref().into(),
2933 }
2934 }
2935}
2936
2937impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2938 fn from(options: lsp::CreateFileOptions) -> Self {
2939 Self {
2940 overwrite: options.overwrite.unwrap_or(false),
2941 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2942 }
2943 }
2944}
2945
2946impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2947 fn from(options: lsp::RenameFileOptions) -> Self {
2948 Self {
2949 overwrite: options.overwrite.unwrap_or(false),
2950 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2951 }
2952 }
2953}
2954
2955impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2956 fn from(options: lsp::DeleteFileOptions) -> Self {
2957 Self {
2958 recursive: options.recursive.unwrap_or(false),
2959 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2960 }
2961 }
2962}
2963
2964#[cfg(test)]
2965mod tests {
2966 use super::{Event, *};
2967 use client::test::FakeHttpClient;
2968 use fs::RealFs;
2969 use futures::StreamExt;
2970 use gpui::test::subscribe;
2971 use language::{
2972 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2973 LanguageServerConfig, Point,
2974 };
2975 use lsp::Url;
2976 use serde_json::json;
2977 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2978 use unindent::Unindent as _;
2979 use util::test::temp_tree;
2980 use worktree::WorktreeHandle as _;
2981
2982 #[gpui::test]
2983 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2984 let dir = temp_tree(json!({
2985 "root": {
2986 "apple": "",
2987 "banana": {
2988 "carrot": {
2989 "date": "",
2990 "endive": "",
2991 }
2992 },
2993 "fennel": {
2994 "grape": "",
2995 }
2996 }
2997 }));
2998
2999 let root_link_path = dir.path().join("root_link");
3000 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3001 unix::fs::symlink(
3002 &dir.path().join("root/fennel"),
3003 &dir.path().join("root/finnochio"),
3004 )
3005 .unwrap();
3006
3007 let project = Project::test(Arc::new(RealFs), &mut cx);
3008
3009 let (tree, _) = project
3010 .update(&mut cx, |project, cx| {
3011 project.find_or_create_local_worktree(&root_link_path, false, cx)
3012 })
3013 .await
3014 .unwrap();
3015
3016 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3017 .await;
3018 cx.read(|cx| {
3019 let tree = tree.read(cx);
3020 assert_eq!(tree.file_count(), 5);
3021 assert_eq!(
3022 tree.inode_for_path("fennel/grape"),
3023 tree.inode_for_path("finnochio/grape")
3024 );
3025 });
3026
3027 let cancel_flag = Default::default();
3028 let results = project
3029 .read_with(&cx, |project, cx| {
3030 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3031 })
3032 .await;
3033 assert_eq!(
3034 results
3035 .into_iter()
3036 .map(|result| result.path)
3037 .collect::<Vec<Arc<Path>>>(),
3038 vec![
3039 PathBuf::from("banana/carrot/date").into(),
3040 PathBuf::from("banana/carrot/endive").into(),
3041 ]
3042 );
3043 }
3044
3045 #[gpui::test]
3046 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3047 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3048 let progress_token = language_server_config
3049 .disk_based_diagnostics_progress_token
3050 .clone()
3051 .unwrap();
3052
3053 let mut languages = LanguageRegistry::new();
3054 languages.add(Arc::new(Language::new(
3055 LanguageConfig {
3056 name: "Rust".to_string(),
3057 path_suffixes: vec!["rs".to_string()],
3058 language_server: Some(language_server_config),
3059 ..Default::default()
3060 },
3061 Some(tree_sitter_rust::language()),
3062 )));
3063
3064 let dir = temp_tree(json!({
3065 "a.rs": "fn a() { A }",
3066 "b.rs": "const y: i32 = 1",
3067 }));
3068
3069 let http_client = FakeHttpClient::with_404_response();
3070 let client = Client::new(http_client.clone());
3071 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3072
3073 let project = cx.update(|cx| {
3074 Project::local(
3075 client,
3076 user_store,
3077 Arc::new(languages),
3078 Arc::new(RealFs),
3079 cx,
3080 )
3081 });
3082
3083 let (tree, _) = project
3084 .update(&mut cx, |project, cx| {
3085 project.find_or_create_local_worktree(dir.path(), false, cx)
3086 })
3087 .await
3088 .unwrap();
3089 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3090
3091 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3092 .await;
3093
3094 // Cause worktree to start the fake language server
3095 let _buffer = project
3096 .update(&mut cx, |project, cx| {
3097 project.open_buffer(
3098 ProjectPath {
3099 worktree_id,
3100 path: Path::new("b.rs").into(),
3101 },
3102 cx,
3103 )
3104 })
3105 .await
3106 .unwrap();
3107
3108 let mut events = subscribe(&project, &mut cx);
3109
3110 fake_server.start_progress(&progress_token).await;
3111 assert_eq!(
3112 events.next().await.unwrap(),
3113 Event::DiskBasedDiagnosticsStarted
3114 );
3115
3116 fake_server.start_progress(&progress_token).await;
3117 fake_server.end_progress(&progress_token).await;
3118 fake_server.start_progress(&progress_token).await;
3119
3120 fake_server
3121 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3122 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3123 version: None,
3124 diagnostics: vec![lsp::Diagnostic {
3125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3126 severity: Some(lsp::DiagnosticSeverity::ERROR),
3127 message: "undefined variable 'A'".to_string(),
3128 ..Default::default()
3129 }],
3130 })
3131 .await;
3132 assert_eq!(
3133 events.next().await.unwrap(),
3134 Event::DiagnosticsUpdated(ProjectPath {
3135 worktree_id,
3136 path: Arc::from(Path::new("a.rs"))
3137 })
3138 );
3139
3140 fake_server.end_progress(&progress_token).await;
3141 fake_server.end_progress(&progress_token).await;
3142 assert_eq!(
3143 events.next().await.unwrap(),
3144 Event::DiskBasedDiagnosticsUpdated
3145 );
3146 assert_eq!(
3147 events.next().await.unwrap(),
3148 Event::DiskBasedDiagnosticsFinished
3149 );
3150
3151 let buffer = project
3152 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3153 .await
3154 .unwrap();
3155
3156 buffer.read_with(&cx, |buffer, _| {
3157 let snapshot = buffer.snapshot();
3158 let diagnostics = snapshot
3159 .diagnostics_in_range::<_, Point>(0..buffer.len())
3160 .collect::<Vec<_>>();
3161 assert_eq!(
3162 diagnostics,
3163 &[DiagnosticEntry {
3164 range: Point::new(0, 9)..Point::new(0, 10),
3165 diagnostic: Diagnostic {
3166 severity: lsp::DiagnosticSeverity::ERROR,
3167 message: "undefined variable 'A'".to_string(),
3168 group_id: 0,
3169 is_primary: true,
3170 ..Default::default()
3171 }
3172 }]
3173 )
3174 });
3175 }
3176
3177 #[gpui::test]
3178 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3179 let dir = temp_tree(json!({
3180 "root": {
3181 "dir1": {},
3182 "dir2": {
3183 "dir3": {}
3184 }
3185 }
3186 }));
3187
3188 let project = Project::test(Arc::new(RealFs), &mut cx);
3189 let (tree, _) = project
3190 .update(&mut cx, |project, cx| {
3191 project.find_or_create_local_worktree(&dir.path(), false, cx)
3192 })
3193 .await
3194 .unwrap();
3195
3196 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3197 .await;
3198
3199 let cancel_flag = Default::default();
3200 let results = project
3201 .read_with(&cx, |project, cx| {
3202 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3203 })
3204 .await;
3205
3206 assert!(results.is_empty());
3207 }
3208
3209 #[gpui::test]
3210 async fn test_definition(mut cx: gpui::TestAppContext) {
3211 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3212
3213 let mut languages = LanguageRegistry::new();
3214 languages.add(Arc::new(Language::new(
3215 LanguageConfig {
3216 name: "Rust".to_string(),
3217 path_suffixes: vec!["rs".to_string()],
3218 language_server: Some(language_server_config),
3219 ..Default::default()
3220 },
3221 Some(tree_sitter_rust::language()),
3222 )));
3223
3224 let dir = temp_tree(json!({
3225 "a.rs": "const fn a() { A }",
3226 "b.rs": "const y: i32 = crate::a()",
3227 }));
3228 let dir_path = dir.path().to_path_buf();
3229
3230 let http_client = FakeHttpClient::with_404_response();
3231 let client = Client::new(http_client.clone());
3232 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3233 let project = cx.update(|cx| {
3234 Project::local(
3235 client,
3236 user_store,
3237 Arc::new(languages),
3238 Arc::new(RealFs),
3239 cx,
3240 )
3241 });
3242
3243 let (tree, _) = project
3244 .update(&mut cx, |project, cx| {
3245 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3246 })
3247 .await
3248 .unwrap();
3249 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3250 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3251 .await;
3252
3253 let buffer = project
3254 .update(&mut cx, |project, cx| {
3255 project.open_buffer(
3256 ProjectPath {
3257 worktree_id,
3258 path: Path::new("").into(),
3259 },
3260 cx,
3261 )
3262 })
3263 .await
3264 .unwrap();
3265
3266 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3267 let params = params.text_document_position_params;
3268 assert_eq!(
3269 params.text_document.uri.to_file_path().unwrap(),
3270 dir_path.join("b.rs")
3271 );
3272 assert_eq!(params.position, lsp::Position::new(0, 22));
3273
3274 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3275 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3276 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3277 )))
3278 });
3279
3280 let mut definitions = project
3281 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3282 .await
3283 .unwrap();
3284
3285 assert_eq!(definitions.len(), 1);
3286 let definition = definitions.pop().unwrap();
3287 cx.update(|cx| {
3288 let target_buffer = definition.target_buffer.read(cx);
3289 assert_eq!(
3290 target_buffer
3291 .file()
3292 .unwrap()
3293 .as_local()
3294 .unwrap()
3295 .abs_path(cx),
3296 dir.path().join("a.rs")
3297 );
3298 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3299 assert_eq!(
3300 list_worktrees(&project, cx),
3301 [
3302 (dir.path().join("b.rs"), false),
3303 (dir.path().join("a.rs"), true)
3304 ]
3305 );
3306
3307 drop(definition);
3308 });
3309 cx.read(|cx| {
3310 assert_eq!(
3311 list_worktrees(&project, cx),
3312 [(dir.path().join("b.rs"), false)]
3313 );
3314 });
3315
3316 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3317 project
3318 .read(cx)
3319 .worktrees(cx)
3320 .map(|worktree| {
3321 let worktree = worktree.read(cx);
3322 (
3323 worktree.as_local().unwrap().abs_path().to_path_buf(),
3324 worktree.is_weak(),
3325 )
3326 })
3327 .collect::<Vec<_>>()
3328 }
3329 }
3330
3331 #[gpui::test]
3332 async fn test_save_file(mut cx: gpui::TestAppContext) {
3333 let fs = Arc::new(FakeFs::new(cx.background()));
3334 fs.insert_tree(
3335 "/dir",
3336 json!({
3337 "file1": "the old contents",
3338 }),
3339 )
3340 .await;
3341
3342 let project = Project::test(fs.clone(), &mut cx);
3343 let worktree_id = project
3344 .update(&mut cx, |p, cx| {
3345 p.find_or_create_local_worktree("/dir", false, cx)
3346 })
3347 .await
3348 .unwrap()
3349 .0
3350 .read_with(&cx, |tree, _| tree.id());
3351
3352 let buffer = project
3353 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3354 .await
3355 .unwrap();
3356 buffer
3357 .update(&mut cx, |buffer, cx| {
3358 assert_eq!(buffer.text(), "the old contents");
3359 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3360 buffer.save(cx)
3361 })
3362 .await
3363 .unwrap();
3364
3365 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3366 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3367 }
3368
3369 #[gpui::test]
3370 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3371 let fs = Arc::new(FakeFs::new(cx.background()));
3372 fs.insert_tree(
3373 "/dir",
3374 json!({
3375 "file1": "the old contents",
3376 }),
3377 )
3378 .await;
3379
3380 let project = Project::test(fs.clone(), &mut cx);
3381 let worktree_id = project
3382 .update(&mut cx, |p, cx| {
3383 p.find_or_create_local_worktree("/dir/file1", false, cx)
3384 })
3385 .await
3386 .unwrap()
3387 .0
3388 .read_with(&cx, |tree, _| tree.id());
3389
3390 let buffer = project
3391 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3392 .await
3393 .unwrap();
3394 buffer
3395 .update(&mut cx, |buffer, cx| {
3396 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3397 buffer.save(cx)
3398 })
3399 .await
3400 .unwrap();
3401
3402 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3403 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3404 }
3405
3406 #[gpui::test(retries = 5)]
3407 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3408 let dir = temp_tree(json!({
3409 "a": {
3410 "file1": "",
3411 "file2": "",
3412 "file3": "",
3413 },
3414 "b": {
3415 "c": {
3416 "file4": "",
3417 "file5": "",
3418 }
3419 }
3420 }));
3421
3422 let project = Project::test(Arc::new(RealFs), &mut cx);
3423 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3424
3425 let (tree, _) = project
3426 .update(&mut cx, |p, cx| {
3427 p.find_or_create_local_worktree(dir.path(), false, cx)
3428 })
3429 .await
3430 .unwrap();
3431 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3432
3433 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3434 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3435 async move { buffer.await.unwrap() }
3436 };
3437 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3438 tree.read_with(cx, |tree, _| {
3439 tree.entry_for_path(path)
3440 .expect(&format!("no entry for path {}", path))
3441 .id
3442 })
3443 };
3444
3445 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3446 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3447 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3448 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3449
3450 let file2_id = id_for_path("a/file2", &cx);
3451 let file3_id = id_for_path("a/file3", &cx);
3452 let file4_id = id_for_path("b/c/file4", &cx);
3453
3454 // Wait for the initial scan.
3455 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3456 .await;
3457
3458 // Create a remote copy of this worktree.
3459 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3460 let (remote, load_task) = cx.update(|cx| {
3461 Worktree::remote(
3462 1,
3463 1,
3464 initial_snapshot.to_proto(&Default::default(), Default::default()),
3465 rpc.clone(),
3466 cx,
3467 )
3468 });
3469 load_task.await;
3470
3471 cx.read(|cx| {
3472 assert!(!buffer2.read(cx).is_dirty());
3473 assert!(!buffer3.read(cx).is_dirty());
3474 assert!(!buffer4.read(cx).is_dirty());
3475 assert!(!buffer5.read(cx).is_dirty());
3476 });
3477
3478 // Rename and delete files and directories.
3479 tree.flush_fs_events(&cx).await;
3480 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3481 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3482 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3483 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3484 tree.flush_fs_events(&cx).await;
3485
3486 let expected_paths = vec![
3487 "a",
3488 "a/file1",
3489 "a/file2.new",
3490 "b",
3491 "d",
3492 "d/file3",
3493 "d/file4",
3494 ];
3495
3496 cx.read(|app| {
3497 assert_eq!(
3498 tree.read(app)
3499 .paths()
3500 .map(|p| p.to_str().unwrap())
3501 .collect::<Vec<_>>(),
3502 expected_paths
3503 );
3504
3505 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3506 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3507 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3508
3509 assert_eq!(
3510 buffer2.read(app).file().unwrap().path().as_ref(),
3511 Path::new("a/file2.new")
3512 );
3513 assert_eq!(
3514 buffer3.read(app).file().unwrap().path().as_ref(),
3515 Path::new("d/file3")
3516 );
3517 assert_eq!(
3518 buffer4.read(app).file().unwrap().path().as_ref(),
3519 Path::new("d/file4")
3520 );
3521 assert_eq!(
3522 buffer5.read(app).file().unwrap().path().as_ref(),
3523 Path::new("b/c/file5")
3524 );
3525
3526 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3527 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3528 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3529 assert!(buffer5.read(app).file().unwrap().is_deleted());
3530 });
3531
3532 // Update the remote worktree. Check that it becomes consistent with the
3533 // local worktree.
3534 remote.update(&mut cx, |remote, cx| {
3535 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3536 &initial_snapshot,
3537 1,
3538 1,
3539 0,
3540 true,
3541 );
3542 remote
3543 .as_remote_mut()
3544 .unwrap()
3545 .snapshot
3546 .apply_remote_update(update_message)
3547 .unwrap();
3548
3549 assert_eq!(
3550 remote
3551 .paths()
3552 .map(|p| p.to_str().unwrap())
3553 .collect::<Vec<_>>(),
3554 expected_paths
3555 );
3556 });
3557 }
3558
3559 #[gpui::test]
3560 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3561 let fs = Arc::new(FakeFs::new(cx.background()));
3562 fs.insert_tree(
3563 "/the-dir",
3564 json!({
3565 "a.txt": "a-contents",
3566 "b.txt": "b-contents",
3567 }),
3568 )
3569 .await;
3570
3571 let project = Project::test(fs.clone(), &mut cx);
3572 let worktree_id = project
3573 .update(&mut cx, |p, cx| {
3574 p.find_or_create_local_worktree("/the-dir", false, cx)
3575 })
3576 .await
3577 .unwrap()
3578 .0
3579 .read_with(&cx, |tree, _| tree.id());
3580
3581 // Spawn multiple tasks to open paths, repeating some paths.
3582 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3583 (
3584 p.open_buffer((worktree_id, "a.txt"), cx),
3585 p.open_buffer((worktree_id, "b.txt"), cx),
3586 p.open_buffer((worktree_id, "a.txt"), cx),
3587 )
3588 });
3589
3590 let buffer_a_1 = buffer_a_1.await.unwrap();
3591 let buffer_a_2 = buffer_a_2.await.unwrap();
3592 let buffer_b = buffer_b.await.unwrap();
3593 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3594 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3595
3596 // There is only one buffer per path.
3597 let buffer_a_id = buffer_a_1.id();
3598 assert_eq!(buffer_a_2.id(), buffer_a_id);
3599
3600 // Open the same path again while it is still open.
3601 drop(buffer_a_1);
3602 let buffer_a_3 = project
3603 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3604 .await
3605 .unwrap();
3606
3607 // There's still only one buffer per path.
3608 assert_eq!(buffer_a_3.id(), buffer_a_id);
3609 }
3610
3611 #[gpui::test]
3612 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3613 use std::fs;
3614
3615 let dir = temp_tree(json!({
3616 "file1": "abc",
3617 "file2": "def",
3618 "file3": "ghi",
3619 }));
3620
3621 let project = Project::test(Arc::new(RealFs), &mut cx);
3622 let (worktree, _) = project
3623 .update(&mut cx, |p, cx| {
3624 p.find_or_create_local_worktree(dir.path(), false, cx)
3625 })
3626 .await
3627 .unwrap();
3628 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3629
3630 worktree.flush_fs_events(&cx).await;
3631 worktree
3632 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3633 .await;
3634
3635 let buffer1 = project
3636 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3637 .await
3638 .unwrap();
3639 let events = Rc::new(RefCell::new(Vec::new()));
3640
3641 // initially, the buffer isn't dirty.
3642 buffer1.update(&mut cx, |buffer, cx| {
3643 cx.subscribe(&buffer1, {
3644 let events = events.clone();
3645 move |_, _, event, _| events.borrow_mut().push(event.clone())
3646 })
3647 .detach();
3648
3649 assert!(!buffer.is_dirty());
3650 assert!(events.borrow().is_empty());
3651
3652 buffer.edit(vec![1..2], "", cx);
3653 });
3654
3655 // after the first edit, the buffer is dirty, and emits a dirtied event.
3656 buffer1.update(&mut cx, |buffer, cx| {
3657 assert!(buffer.text() == "ac");
3658 assert!(buffer.is_dirty());
3659 assert_eq!(
3660 *events.borrow(),
3661 &[language::Event::Edited, language::Event::Dirtied]
3662 );
3663 events.borrow_mut().clear();
3664 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3665 });
3666
3667 // after saving, the buffer is not dirty, and emits a saved event.
3668 buffer1.update(&mut cx, |buffer, cx| {
3669 assert!(!buffer.is_dirty());
3670 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3671 events.borrow_mut().clear();
3672
3673 buffer.edit(vec![1..1], "B", cx);
3674 buffer.edit(vec![2..2], "D", cx);
3675 });
3676
3677 // after editing again, the buffer is dirty, and emits another dirty event.
3678 buffer1.update(&mut cx, |buffer, cx| {
3679 assert!(buffer.text() == "aBDc");
3680 assert!(buffer.is_dirty());
3681 assert_eq!(
3682 *events.borrow(),
3683 &[
3684 language::Event::Edited,
3685 language::Event::Dirtied,
3686 language::Event::Edited,
3687 ],
3688 );
3689 events.borrow_mut().clear();
3690
3691 // TODO - currently, after restoring the buffer to its
3692 // previously-saved state, the is still considered dirty.
3693 buffer.edit([1..3], "", cx);
3694 assert!(buffer.text() == "ac");
3695 assert!(buffer.is_dirty());
3696 });
3697
3698 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3699
3700 // When a file is deleted, the buffer is considered dirty.
3701 let events = Rc::new(RefCell::new(Vec::new()));
3702 let buffer2 = project
3703 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3704 .await
3705 .unwrap();
3706 buffer2.update(&mut cx, |_, cx| {
3707 cx.subscribe(&buffer2, {
3708 let events = events.clone();
3709 move |_, _, event, _| events.borrow_mut().push(event.clone())
3710 })
3711 .detach();
3712 });
3713
3714 fs::remove_file(dir.path().join("file2")).unwrap();
3715 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3716 assert_eq!(
3717 *events.borrow(),
3718 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3719 );
3720
3721 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3722 let events = Rc::new(RefCell::new(Vec::new()));
3723 let buffer3 = project
3724 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3725 .await
3726 .unwrap();
3727 buffer3.update(&mut cx, |_, cx| {
3728 cx.subscribe(&buffer3, {
3729 let events = events.clone();
3730 move |_, _, event, _| events.borrow_mut().push(event.clone())
3731 })
3732 .detach();
3733 });
3734
3735 worktree.flush_fs_events(&cx).await;
3736 buffer3.update(&mut cx, |buffer, cx| {
3737 buffer.edit(Some(0..0), "x", cx);
3738 });
3739 events.borrow_mut().clear();
3740 fs::remove_file(dir.path().join("file3")).unwrap();
3741 buffer3
3742 .condition(&cx, |_, _| !events.borrow().is_empty())
3743 .await;
3744 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3745 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3746 }
3747
3748 #[gpui::test]
3749 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3750 use std::fs;
3751
3752 let initial_contents = "aaa\nbbbbb\nc\n";
3753 let dir = temp_tree(json!({ "the-file": initial_contents }));
3754
3755 let project = Project::test(Arc::new(RealFs), &mut cx);
3756 let (worktree, _) = project
3757 .update(&mut cx, |p, cx| {
3758 p.find_or_create_local_worktree(dir.path(), false, cx)
3759 })
3760 .await
3761 .unwrap();
3762 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3763
3764 worktree
3765 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3766 .await;
3767
3768 let abs_path = dir.path().join("the-file");
3769 let buffer = project
3770 .update(&mut cx, |p, cx| {
3771 p.open_buffer((worktree_id, "the-file"), cx)
3772 })
3773 .await
3774 .unwrap();
3775
3776 // TODO
3777 // Add a cursor on each row.
3778 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3779 // assert!(!buffer.is_dirty());
3780 // buffer.add_selection_set(
3781 // &(0..3)
3782 // .map(|row| Selection {
3783 // id: row as usize,
3784 // start: Point::new(row, 1),
3785 // end: Point::new(row, 1),
3786 // reversed: false,
3787 // goal: SelectionGoal::None,
3788 // })
3789 // .collect::<Vec<_>>(),
3790 // cx,
3791 // )
3792 // });
3793
3794 // Change the file on disk, adding two new lines of text, and removing
3795 // one line.
3796 buffer.read_with(&cx, |buffer, _| {
3797 assert!(!buffer.is_dirty());
3798 assert!(!buffer.has_conflict());
3799 });
3800 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3801 fs::write(&abs_path, new_contents).unwrap();
3802
3803 // Because the buffer was not modified, it is reloaded from disk. Its
3804 // contents are edited according to the diff between the old and new
3805 // file contents.
3806 buffer
3807 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3808 .await;
3809
3810 buffer.update(&mut cx, |buffer, _| {
3811 assert_eq!(buffer.text(), new_contents);
3812 assert!(!buffer.is_dirty());
3813 assert!(!buffer.has_conflict());
3814
3815 // TODO
3816 // let cursor_positions = buffer
3817 // .selection_set(selection_set_id)
3818 // .unwrap()
3819 // .selections::<Point>(&*buffer)
3820 // .map(|selection| {
3821 // assert_eq!(selection.start, selection.end);
3822 // selection.start
3823 // })
3824 // .collect::<Vec<_>>();
3825 // assert_eq!(
3826 // cursor_positions,
3827 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3828 // );
3829 });
3830
3831 // Modify the buffer
3832 buffer.update(&mut cx, |buffer, cx| {
3833 buffer.edit(vec![0..0], " ", cx);
3834 assert!(buffer.is_dirty());
3835 assert!(!buffer.has_conflict());
3836 });
3837
3838 // Change the file on disk again, adding blank lines to the beginning.
3839 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3840
3841 // Because the buffer is modified, it doesn't reload from disk, but is
3842 // marked as having a conflict.
3843 buffer
3844 .condition(&cx, |buffer, _| buffer.has_conflict())
3845 .await;
3846 }
3847
3848 #[gpui::test]
3849 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3850 let fs = Arc::new(FakeFs::new(cx.background()));
3851 fs.insert_tree(
3852 "/the-dir",
3853 json!({
3854 "a.rs": "
3855 fn foo(mut v: Vec<usize>) {
3856 for x in &v {
3857 v.push(1);
3858 }
3859 }
3860 "
3861 .unindent(),
3862 }),
3863 )
3864 .await;
3865
3866 let project = Project::test(fs.clone(), &mut cx);
3867 let (worktree, _) = project
3868 .update(&mut cx, |p, cx| {
3869 p.find_or_create_local_worktree("/the-dir", false, cx)
3870 })
3871 .await
3872 .unwrap();
3873 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3874
3875 let buffer = project
3876 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3877 .await
3878 .unwrap();
3879
3880 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3881 let message = lsp::PublishDiagnosticsParams {
3882 uri: buffer_uri.clone(),
3883 diagnostics: vec![
3884 lsp::Diagnostic {
3885 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3886 severity: Some(DiagnosticSeverity::WARNING),
3887 message: "error 1".to_string(),
3888 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3889 location: lsp::Location {
3890 uri: buffer_uri.clone(),
3891 range: lsp::Range::new(
3892 lsp::Position::new(1, 8),
3893 lsp::Position::new(1, 9),
3894 ),
3895 },
3896 message: "error 1 hint 1".to_string(),
3897 }]),
3898 ..Default::default()
3899 },
3900 lsp::Diagnostic {
3901 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3902 severity: Some(DiagnosticSeverity::HINT),
3903 message: "error 1 hint 1".to_string(),
3904 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3905 location: lsp::Location {
3906 uri: buffer_uri.clone(),
3907 range: lsp::Range::new(
3908 lsp::Position::new(1, 8),
3909 lsp::Position::new(1, 9),
3910 ),
3911 },
3912 message: "original diagnostic".to_string(),
3913 }]),
3914 ..Default::default()
3915 },
3916 lsp::Diagnostic {
3917 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3918 severity: Some(DiagnosticSeverity::ERROR),
3919 message: "error 2".to_string(),
3920 related_information: Some(vec![
3921 lsp::DiagnosticRelatedInformation {
3922 location: lsp::Location {
3923 uri: buffer_uri.clone(),
3924 range: lsp::Range::new(
3925 lsp::Position::new(1, 13),
3926 lsp::Position::new(1, 15),
3927 ),
3928 },
3929 message: "error 2 hint 1".to_string(),
3930 },
3931 lsp::DiagnosticRelatedInformation {
3932 location: lsp::Location {
3933 uri: buffer_uri.clone(),
3934 range: lsp::Range::new(
3935 lsp::Position::new(1, 13),
3936 lsp::Position::new(1, 15),
3937 ),
3938 },
3939 message: "error 2 hint 2".to_string(),
3940 },
3941 ]),
3942 ..Default::default()
3943 },
3944 lsp::Diagnostic {
3945 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3946 severity: Some(DiagnosticSeverity::HINT),
3947 message: "error 2 hint 1".to_string(),
3948 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3949 location: lsp::Location {
3950 uri: buffer_uri.clone(),
3951 range: lsp::Range::new(
3952 lsp::Position::new(2, 8),
3953 lsp::Position::new(2, 17),
3954 ),
3955 },
3956 message: "original diagnostic".to_string(),
3957 }]),
3958 ..Default::default()
3959 },
3960 lsp::Diagnostic {
3961 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3962 severity: Some(DiagnosticSeverity::HINT),
3963 message: "error 2 hint 2".to_string(),
3964 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3965 location: lsp::Location {
3966 uri: buffer_uri.clone(),
3967 range: lsp::Range::new(
3968 lsp::Position::new(2, 8),
3969 lsp::Position::new(2, 17),
3970 ),
3971 },
3972 message: "original diagnostic".to_string(),
3973 }]),
3974 ..Default::default()
3975 },
3976 ],
3977 version: None,
3978 };
3979
3980 project
3981 .update(&mut cx, |p, cx| {
3982 p.update_diagnostics(message, &Default::default(), cx)
3983 })
3984 .unwrap();
3985 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3986
3987 assert_eq!(
3988 buffer
3989 .diagnostics_in_range::<_, Point>(0..buffer.len())
3990 .collect::<Vec<_>>(),
3991 &[
3992 DiagnosticEntry {
3993 range: Point::new(1, 8)..Point::new(1, 9),
3994 diagnostic: Diagnostic {
3995 severity: DiagnosticSeverity::WARNING,
3996 message: "error 1".to_string(),
3997 group_id: 0,
3998 is_primary: true,
3999 ..Default::default()
4000 }
4001 },
4002 DiagnosticEntry {
4003 range: Point::new(1, 8)..Point::new(1, 9),
4004 diagnostic: Diagnostic {
4005 severity: DiagnosticSeverity::HINT,
4006 message: "error 1 hint 1".to_string(),
4007 group_id: 0,
4008 is_primary: false,
4009 ..Default::default()
4010 }
4011 },
4012 DiagnosticEntry {
4013 range: Point::new(1, 13)..Point::new(1, 15),
4014 diagnostic: Diagnostic {
4015 severity: DiagnosticSeverity::HINT,
4016 message: "error 2 hint 1".to_string(),
4017 group_id: 1,
4018 is_primary: false,
4019 ..Default::default()
4020 }
4021 },
4022 DiagnosticEntry {
4023 range: Point::new(1, 13)..Point::new(1, 15),
4024 diagnostic: Diagnostic {
4025 severity: DiagnosticSeverity::HINT,
4026 message: "error 2 hint 2".to_string(),
4027 group_id: 1,
4028 is_primary: false,
4029 ..Default::default()
4030 }
4031 },
4032 DiagnosticEntry {
4033 range: Point::new(2, 8)..Point::new(2, 17),
4034 diagnostic: Diagnostic {
4035 severity: DiagnosticSeverity::ERROR,
4036 message: "error 2".to_string(),
4037 group_id: 1,
4038 is_primary: true,
4039 ..Default::default()
4040 }
4041 }
4042 ]
4043 );
4044
4045 assert_eq!(
4046 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4047 &[
4048 DiagnosticEntry {
4049 range: Point::new(1, 8)..Point::new(1, 9),
4050 diagnostic: Diagnostic {
4051 severity: DiagnosticSeverity::WARNING,
4052 message: "error 1".to_string(),
4053 group_id: 0,
4054 is_primary: true,
4055 ..Default::default()
4056 }
4057 },
4058 DiagnosticEntry {
4059 range: Point::new(1, 8)..Point::new(1, 9),
4060 diagnostic: Diagnostic {
4061 severity: DiagnosticSeverity::HINT,
4062 message: "error 1 hint 1".to_string(),
4063 group_id: 0,
4064 is_primary: false,
4065 ..Default::default()
4066 }
4067 },
4068 ]
4069 );
4070 assert_eq!(
4071 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4072 &[
4073 DiagnosticEntry {
4074 range: Point::new(1, 13)..Point::new(1, 15),
4075 diagnostic: Diagnostic {
4076 severity: DiagnosticSeverity::HINT,
4077 message: "error 2 hint 1".to_string(),
4078 group_id: 1,
4079 is_primary: false,
4080 ..Default::default()
4081 }
4082 },
4083 DiagnosticEntry {
4084 range: Point::new(1, 13)..Point::new(1, 15),
4085 diagnostic: Diagnostic {
4086 severity: DiagnosticSeverity::HINT,
4087 message: "error 2 hint 2".to_string(),
4088 group_id: 1,
4089 is_primary: false,
4090 ..Default::default()
4091 }
4092 },
4093 DiagnosticEntry {
4094 range: Point::new(2, 8)..Point::new(2, 17),
4095 diagnostic: Diagnostic {
4096 severity: DiagnosticSeverity::ERROR,
4097 message: "error 2".to_string(),
4098 group_id: 1,
4099 is_primary: true,
4100 ..Default::default()
4101 }
4102 }
4103 ]
4104 );
4105 }
4106}