1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Context, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 #[cfg(any(test, feature = "test-support"))]
348 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
349 self.shared_buffers
350 .get(&peer_id)
351 .and_then(|buffers| buffers.get(&remote_id))
352 .cloned()
353 }
354
355 #[cfg(any(test, feature = "test-support"))]
356 pub fn has_buffered_operations(&self) -> bool {
357 self.open_buffers
358 .values()
359 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
360 }
361
362 pub fn fs(&self) -> &Arc<dyn Fs> {
363 &self.fs
364 }
365
366 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
367 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
368 *remote_id_tx.borrow_mut() = remote_id;
369 }
370
371 self.subscriptions.clear();
372 if let Some(remote_id) = remote_id {
373 self.subscriptions
374 .push(self.client.add_model_for_remote_entity(remote_id, cx));
375 }
376 }
377
378 pub fn remote_id(&self) -> Option<u64> {
379 match &self.client_state {
380 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
381 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
382 }
383 }
384
385 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
386 let mut id = None;
387 let mut watch = None;
388 match &self.client_state {
389 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
390 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
391 }
392
393 async move {
394 if let Some(id) = id {
395 return id;
396 }
397 let mut watch = watch.unwrap();
398 loop {
399 let id = *watch.borrow();
400 if let Some(id) = id {
401 return id;
402 }
403 watch.recv().await;
404 }
405 }
406 }
407
408 pub fn replica_id(&self) -> ReplicaId {
409 match &self.client_state {
410 ProjectClientState::Local { .. } => 0,
411 ProjectClientState::Remote { replica_id, .. } => *replica_id,
412 }
413 }
414
415 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
416 &self.collaborators
417 }
418
419 pub fn worktrees<'a>(
420 &'a self,
421 cx: &'a AppContext,
422 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
423 self.worktrees
424 .iter()
425 .filter_map(move |worktree| worktree.upgrade(cx))
426 }
427
428 pub fn worktree_for_id(
429 &self,
430 id: WorktreeId,
431 cx: &AppContext,
432 ) -> Option<ModelHandle<Worktree>> {
433 self.worktrees(cx)
434 .find(|worktree| worktree.read(cx).id() == id)
435 }
436
437 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
438 let rpc = self.client.clone();
439 cx.spawn(|this, mut cx| async move {
440 let project_id = this.update(&mut cx, |this, _| {
441 if let ProjectClientState::Local {
442 is_shared,
443 remote_id_rx,
444 ..
445 } = &mut this.client_state
446 {
447 *is_shared = true;
448 remote_id_rx
449 .borrow()
450 .ok_or_else(|| anyhow!("no project id"))
451 } else {
452 Err(anyhow!("can't share a remote project"))
453 }
454 })?;
455
456 rpc.request(proto::ShareProject { project_id }).await?;
457 let mut tasks = Vec::new();
458 this.update(&mut cx, |this, cx| {
459 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
460 worktree.update(cx, |worktree, cx| {
461 let worktree = worktree.as_local_mut().unwrap();
462 tasks.push(worktree.share(project_id, cx));
463 });
464 }
465 });
466 for task in tasks {
467 task.await?;
468 }
469 this.update(&mut cx, |_, cx| cx.notify());
470 Ok(())
471 })
472 }
473
474 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
475 let rpc = self.client.clone();
476 cx.spawn(|this, mut cx| async move {
477 let project_id = this.update(&mut cx, |this, _| {
478 if let ProjectClientState::Local {
479 is_shared,
480 remote_id_rx,
481 ..
482 } = &mut this.client_state
483 {
484 *is_shared = false;
485 remote_id_rx
486 .borrow()
487 .ok_or_else(|| anyhow!("no project id"))
488 } else {
489 Err(anyhow!("can't share a remote project"))
490 }
491 })?;
492
493 rpc.send(proto::UnshareProject { project_id })?;
494 this.update(&mut cx, |this, cx| {
495 this.collaborators.clear();
496 this.shared_buffers.clear();
497 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
498 worktree.update(cx, |worktree, _| {
499 worktree.as_local_mut().unwrap().unshare();
500 });
501 }
502 cx.notify()
503 });
504 Ok(())
505 })
506 }
507
508 pub fn is_read_only(&self) -> bool {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => false,
511 ProjectClientState::Remote {
512 sharing_has_stopped,
513 ..
514 } => *sharing_has_stopped,
515 }
516 }
517
518 pub fn is_local(&self) -> bool {
519 match &self.client_state {
520 ProjectClientState::Local { .. } => true,
521 ProjectClientState::Remote { .. } => false,
522 }
523 }
524
525 pub fn is_remote(&self) -> bool {
526 !self.is_local()
527 }
528
529 pub fn open_buffer(
530 &mut self,
531 path: impl Into<ProjectPath>,
532 cx: &mut ModelContext<Self>,
533 ) -> Task<Result<ModelHandle<Buffer>>> {
534 let project_path = path.into();
535 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
536 worktree
537 } else {
538 return Task::ready(Err(anyhow!("no such worktree")));
539 };
540
541 // If there is already a buffer for the given path, then return it.
542 let existing_buffer = self.get_open_buffer(&project_path, cx);
543 if let Some(existing_buffer) = existing_buffer {
544 return Task::ready(Ok(existing_buffer));
545 }
546
547 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
548 // If the given path is already being loaded, then wait for that existing
549 // task to complete and return the same buffer.
550 hash_map::Entry::Occupied(e) => e.get().clone(),
551
552 // Otherwise, record the fact that this path is now being loaded.
553 hash_map::Entry::Vacant(entry) => {
554 let (mut tx, rx) = postage::watch::channel();
555 entry.insert(rx.clone());
556
557 let load_buffer = if worktree.read(cx).is_local() {
558 self.open_local_buffer(&project_path.path, &worktree, cx)
559 } else {
560 self.open_remote_buffer(&project_path.path, &worktree, cx)
561 };
562
563 cx.spawn(move |this, mut cx| async move {
564 let load_result = load_buffer.await;
565 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
566 // Record the fact that the buffer is no longer loading.
567 this.loading_buffers.remove(&project_path);
568 if this.loading_buffers.is_empty() {
569 this.open_buffers
570 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
571 }
572
573 let buffer = load_result.map_err(Arc::new)?;
574 Ok(buffer)
575 }));
576 })
577 .detach();
578 rx
579 }
580 };
581
582 cx.foreground().spawn(async move {
583 loop {
584 if let Some(result) = loading_watch.borrow().as_ref() {
585 match result {
586 Ok(buffer) => return Ok(buffer.clone()),
587 Err(error) => return Err(anyhow!("{}", error)),
588 }
589 }
590 loading_watch.recv().await;
591 }
592 })
593 }
594
595 fn open_local_buffer(
596 &mut self,
597 path: &Arc<Path>,
598 worktree: &ModelHandle<Worktree>,
599 cx: &mut ModelContext<Self>,
600 ) -> Task<Result<ModelHandle<Buffer>>> {
601 let load_buffer = worktree.update(cx, |worktree, cx| {
602 let worktree = worktree.as_local_mut().unwrap();
603 worktree.load_buffer(path, cx)
604 });
605 let worktree = worktree.downgrade();
606 cx.spawn(|this, mut cx| async move {
607 let buffer = load_buffer.await?;
608 let worktree = worktree
609 .upgrade(&cx)
610 .ok_or_else(|| anyhow!("worktree was removed"))?;
611 this.update(&mut cx, |this, cx| {
612 this.register_buffer(&buffer, Some(&worktree), cx)
613 })?;
614 Ok(buffer)
615 })
616 }
617
618 fn open_remote_buffer(
619 &mut self,
620 path: &Arc<Path>,
621 worktree: &ModelHandle<Worktree>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let rpc = self.client.clone();
625 let project_id = self.remote_id().unwrap();
626 let remote_worktree_id = worktree.read(cx).id();
627 let path = path.clone();
628 let path_string = path.to_string_lossy().to_string();
629 cx.spawn(|this, mut cx| async move {
630 let response = rpc
631 .request(proto::OpenBuffer {
632 project_id,
633 worktree_id: remote_worktree_id.to_proto(),
634 path: path_string,
635 })
636 .await?;
637 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
638 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
639 .await
640 })
641 }
642
643 fn open_local_buffer_from_lsp_path(
644 &mut self,
645 abs_path: lsp::Url,
646 lang_name: String,
647 lang_server: Arc<LanguageServer>,
648 cx: &mut ModelContext<Self>,
649 ) -> Task<Result<ModelHandle<Buffer>>> {
650 cx.spawn(|this, mut cx| async move {
651 let abs_path = abs_path
652 .to_file_path()
653 .map_err(|_| anyhow!("can't convert URI to path"))?;
654 let (worktree, relative_path) = if let Some(result) =
655 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
656 {
657 result
658 } else {
659 let worktree = this
660 .update(&mut cx, |this, cx| {
661 this.create_local_worktree(&abs_path, true, cx)
662 })
663 .await?;
664 this.update(&mut cx, |this, cx| {
665 this.language_servers
666 .insert((worktree.read(cx).id(), lang_name), lang_server);
667 });
668 (worktree, PathBuf::new())
669 };
670
671 let project_path = ProjectPath {
672 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
673 path: relative_path.into(),
674 };
675 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
676 .await
677 })
678 }
679
680 pub fn save_buffer_as(
681 &self,
682 buffer: ModelHandle<Buffer>,
683 abs_path: PathBuf,
684 cx: &mut ModelContext<Project>,
685 ) -> Task<Result<()>> {
686 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
687 cx.spawn(|this, mut cx| async move {
688 let (worktree, path) = worktree_task.await?;
689 worktree
690 .update(&mut cx, |worktree, cx| {
691 worktree
692 .as_local_mut()
693 .unwrap()
694 .save_buffer_as(buffer.clone(), path, cx)
695 })
696 .await?;
697 this.update(&mut cx, |this, cx| {
698 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
699 });
700 Ok(())
701 })
702 }
703
704 #[cfg(any(test, feature = "test-support"))]
705 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
706 let path = path.into();
707 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
708 self.open_buffers.iter().any(|(_, buffer)| {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
711 if file.worktree == worktree && file.path() == &path.path {
712 return true;
713 }
714 }
715 }
716 false
717 })
718 } else {
719 false
720 }
721 }
722
723 fn get_open_buffer(
724 &mut self,
725 path: &ProjectPath,
726 cx: &mut ModelContext<Self>,
727 ) -> Option<ModelHandle<Buffer>> {
728 let mut result = None;
729 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
730 self.open_buffers.retain(|_, buffer| {
731 if let Some(buffer) = buffer.upgrade(cx) {
732 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
733 if file.worktree == worktree && file.path() == &path.path {
734 result = Some(buffer);
735 }
736 }
737 true
738 } else {
739 false
740 }
741 });
742 result
743 }
744
745 fn register_buffer(
746 &mut self,
747 buffer: &ModelHandle<Buffer>,
748 worktree: Option<&ModelHandle<Worktree>>,
749 cx: &mut ModelContext<Self>,
750 ) -> Result<()> {
751 match self.open_buffers.insert(
752 buffer.read(cx).remote_id(),
753 OpenBuffer::Loaded(buffer.downgrade()),
754 ) {
755 None => {}
756 Some(OpenBuffer::Loading(operations)) => {
757 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
758 }
759 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
760 }
761 self.assign_language_to_buffer(&buffer, worktree, cx);
762 Ok(())
763 }
764
765 fn assign_language_to_buffer(
766 &mut self,
767 buffer: &ModelHandle<Buffer>,
768 worktree: Option<&ModelHandle<Worktree>>,
769 cx: &mut ModelContext<Self>,
770 ) -> Option<()> {
771 let (path, full_path) = {
772 let file = buffer.read(cx).file()?;
773 (file.path().clone(), file.full_path(cx))
774 };
775
776 // If the buffer has a language, set it and start/assign the language server
777 if let Some(language) = self.languages.select_language(&full_path) {
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language(Some(language.clone()), cx);
780 });
781
782 // For local worktrees, start a language server if needed.
783 // Also assign the language server and any previously stored diagnostics to the buffer.
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 let worktree_id = local_worktree.id();
786 let worktree_abs_path = local_worktree.abs_path().clone();
787
788 let language_server = match self
789 .language_servers
790 .entry((worktree_id, language.name().to_string()))
791 {
792 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
793 hash_map::Entry::Vacant(e) => Self::start_language_server(
794 self.client.clone(),
795 language.clone(),
796 &worktree_abs_path,
797 cx,
798 )
799 .map(|server| e.insert(server).clone()),
800 };
801
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_language_server(language_server, cx);
804 });
805 }
806 }
807
808 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
809 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
810 buffer.update(cx, |buffer, cx| {
811 buffer.update_diagnostics(diagnostics, None, cx).log_err();
812 });
813 }
814 }
815
816 None
817 }
818
819 fn start_language_server(
820 rpc: Arc<Client>,
821 language: Arc<Language>,
822 worktree_path: &Path,
823 cx: &mut ModelContext<Self>,
824 ) -> Option<Arc<LanguageServer>> {
825 enum LspEvent {
826 DiagnosticsStart,
827 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
828 DiagnosticsFinish,
829 }
830
831 let language_server = language
832 .start_server(worktree_path, cx)
833 .log_err()
834 .flatten()?;
835 let disk_based_sources = language
836 .disk_based_diagnostic_sources()
837 .cloned()
838 .unwrap_or_default();
839 let disk_based_diagnostics_progress_token =
840 language.disk_based_diagnostics_progress_token().cloned();
841 let has_disk_based_diagnostic_progress_token =
842 disk_based_diagnostics_progress_token.is_some();
843 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
844
845 // Listen for `PublishDiagnostics` notifications.
846 language_server
847 .on_notification::<lsp::notification::PublishDiagnostics, _>({
848 let diagnostics_tx = diagnostics_tx.clone();
849 move |params| {
850 if !has_disk_based_diagnostic_progress_token {
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
852 }
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
856 }
857 }
858 })
859 .detach();
860
861 // Listen for `Progress` notifications. Send an event when the language server
862 // transitions between running jobs and not running any jobs.
863 let mut running_jobs_for_this_server: i32 = 0;
864 language_server
865 .on_notification::<lsp::notification::Progress, _>(move |params| {
866 let token = match params.token {
867 lsp::NumberOrString::Number(_) => None,
868 lsp::NumberOrString::String(token) => Some(token),
869 };
870
871 if token == disk_based_diagnostics_progress_token {
872 match params.value {
873 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
874 lsp::WorkDoneProgress::Begin(_) => {
875 running_jobs_for_this_server += 1;
876 if running_jobs_for_this_server == 1 {
877 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
878 }
879 }
880 lsp::WorkDoneProgress::End(_) => {
881 running_jobs_for_this_server -= 1;
882 if running_jobs_for_this_server == 0 {
883 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
884 }
885 }
886 _ => {}
887 },
888 }
889 }
890 })
891 .detach();
892
893 // Process all the LSP events.
894 cx.spawn_weak(|this, mut cx| async move {
895 while let Ok(message) = diagnostics_rx.recv().await {
896 let this = this.upgrade(&cx)?;
897 match message {
898 LspEvent::DiagnosticsStart => {
899 this.update(&mut cx, |this, cx| {
900 this.disk_based_diagnostics_started(cx);
901 if let Some(project_id) = this.remote_id() {
902 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
903 .log_err();
904 }
905 });
906 }
907 LspEvent::DiagnosticsUpdate(mut params) => {
908 language.process_diagnostics(&mut params);
909 this.update(&mut cx, |this, cx| {
910 this.update_diagnostics(params, &disk_based_sources, cx)
911 .log_err();
912 });
913 }
914 LspEvent::DiagnosticsFinish => {
915 this.update(&mut cx, |this, cx| {
916 this.disk_based_diagnostics_finished(cx);
917 if let Some(project_id) = this.remote_id() {
918 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
919 .log_err();
920 }
921 });
922 }
923 }
924 }
925 Some(())
926 })
927 .detach();
928
929 Some(language_server)
930 }
931
932 pub fn update_diagnostics(
933 &mut self,
934 params: lsp::PublishDiagnosticsParams,
935 disk_based_sources: &HashSet<String>,
936 cx: &mut ModelContext<Self>,
937 ) -> Result<()> {
938 let abs_path = params
939 .uri
940 .to_file_path()
941 .map_err(|_| anyhow!("URI is not a file"))?;
942 let mut next_group_id = 0;
943 let mut diagnostics = Vec::default();
944 let mut primary_diagnostic_group_ids = HashMap::default();
945 let mut sources_by_group_id = HashMap::default();
946 let mut supporting_diagnostic_severities = HashMap::default();
947 for diagnostic in ¶ms.diagnostics {
948 let source = diagnostic.source.as_ref();
949 let code = diagnostic.code.as_ref().map(|code| match code {
950 lsp::NumberOrString::Number(code) => code.to_string(),
951 lsp::NumberOrString::String(code) => code.clone(),
952 });
953 let range = range_from_lsp(diagnostic.range);
954 let is_supporting = diagnostic
955 .related_information
956 .as_ref()
957 .map_or(false, |infos| {
958 infos.iter().any(|info| {
959 primary_diagnostic_group_ids.contains_key(&(
960 source,
961 code.clone(),
962 range_from_lsp(info.location.range),
963 ))
964 })
965 });
966
967 if is_supporting {
968 if let Some(severity) = diagnostic.severity {
969 supporting_diagnostic_severities
970 .insert((source, code.clone(), range), severity);
971 }
972 } else {
973 let group_id = post_inc(&mut next_group_id);
974 let is_disk_based =
975 source.map_or(false, |source| disk_based_sources.contains(source));
976
977 sources_by_group_id.insert(group_id, source);
978 primary_diagnostic_group_ids
979 .insert((source, code.clone(), range.clone()), group_id);
980
981 diagnostics.push(DiagnosticEntry {
982 range,
983 diagnostic: Diagnostic {
984 code: code.clone(),
985 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
986 message: diagnostic.message.clone(),
987 group_id,
988 is_primary: true,
989 is_valid: true,
990 is_disk_based,
991 },
992 });
993 if let Some(infos) = &diagnostic.related_information {
994 for info in infos {
995 if info.location.uri == params.uri && !info.message.is_empty() {
996 let range = range_from_lsp(info.location.range);
997 diagnostics.push(DiagnosticEntry {
998 range,
999 diagnostic: Diagnostic {
1000 code: code.clone(),
1001 severity: DiagnosticSeverity::INFORMATION,
1002 message: info.message.clone(),
1003 group_id,
1004 is_primary: false,
1005 is_valid: true,
1006 is_disk_based,
1007 },
1008 });
1009 }
1010 }
1011 }
1012 }
1013 }
1014
1015 for entry in &mut diagnostics {
1016 let diagnostic = &mut entry.diagnostic;
1017 if !diagnostic.is_primary {
1018 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1019 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1020 source,
1021 diagnostic.code.clone(),
1022 entry.range.clone(),
1023 )) {
1024 diagnostic.severity = severity;
1025 }
1026 }
1027 }
1028
1029 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1030 Ok(())
1031 }
1032
1033 pub fn update_diagnostic_entries(
1034 &mut self,
1035 abs_path: PathBuf,
1036 version: Option<i32>,
1037 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1038 cx: &mut ModelContext<Project>,
1039 ) -> Result<(), anyhow::Error> {
1040 let (worktree, relative_path) = self
1041 .find_local_worktree(&abs_path, cx)
1042 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1043 let project_path = ProjectPath {
1044 worktree_id: worktree.read(cx).id(),
1045 path: relative_path.into(),
1046 };
1047
1048 for buffer in self.open_buffers.values() {
1049 if let Some(buffer) = buffer.upgrade(cx) {
1050 if buffer
1051 .read(cx)
1052 .file()
1053 .map_or(false, |file| *file.path() == project_path.path)
1054 {
1055 buffer.update(cx, |buffer, cx| {
1056 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1057 })?;
1058 break;
1059 }
1060 }
1061 }
1062 worktree.update(cx, |worktree, cx| {
1063 worktree
1064 .as_local_mut()
1065 .ok_or_else(|| anyhow!("not a local worktree"))?
1066 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1067 })?;
1068 cx.emit(Event::DiagnosticsUpdated(project_path));
1069 Ok(())
1070 }
1071
1072 pub fn format(
1073 &self,
1074 buffers: HashSet<ModelHandle<Buffer>>,
1075 push_to_history: bool,
1076 cx: &mut ModelContext<Project>,
1077 ) -> Task<Result<ProjectTransaction>> {
1078 let mut local_buffers = Vec::new();
1079 let mut remote_buffers = None;
1080 for buffer_handle in buffers {
1081 let buffer = buffer_handle.read(cx);
1082 let worktree;
1083 if let Some(file) = File::from_dyn(buffer.file()) {
1084 worktree = file.worktree.clone();
1085 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1086 let lang_server;
1087 if let Some(lang) = buffer.language() {
1088 if let Some(server) = self
1089 .language_servers
1090 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1091 {
1092 lang_server = server.clone();
1093 } else {
1094 return Task::ready(Ok(Default::default()));
1095 };
1096 } else {
1097 return Task::ready(Ok(Default::default()));
1098 }
1099
1100 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1101 } else {
1102 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1103 }
1104 } else {
1105 return Task::ready(Ok(Default::default()));
1106 }
1107 }
1108
1109 let remote_buffers = self.remote_id().zip(remote_buffers);
1110 let client = self.client.clone();
1111
1112 cx.spawn(|this, mut cx| async move {
1113 let mut project_transaction = ProjectTransaction::default();
1114
1115 if let Some((project_id, remote_buffers)) = remote_buffers {
1116 let response = client
1117 .request(proto::FormatBuffers {
1118 project_id,
1119 buffer_ids: remote_buffers
1120 .iter()
1121 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1122 .collect(),
1123 })
1124 .await?
1125 .transaction
1126 .ok_or_else(|| anyhow!("missing transaction"))?;
1127 project_transaction = this
1128 .update(&mut cx, |this, cx| {
1129 this.deserialize_project_transaction(response, push_to_history, cx)
1130 })
1131 .await?;
1132 }
1133
1134 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1135 let lsp_edits = lang_server
1136 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1137 text_document: lsp::TextDocumentIdentifier::new(
1138 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1139 ),
1140 options: Default::default(),
1141 work_done_progress_params: Default::default(),
1142 })
1143 .await?;
1144
1145 if let Some(lsp_edits) = lsp_edits {
1146 let edits = buffer
1147 .update(&mut cx, |buffer, cx| {
1148 buffer.edits_from_lsp(lsp_edits, None, cx)
1149 })
1150 .await?;
1151 buffer.update(&mut cx, |buffer, cx| {
1152 buffer.finalize_last_transaction();
1153 buffer.start_transaction();
1154 for (range, text) in edits {
1155 buffer.edit([range], text, cx);
1156 }
1157 if buffer.end_transaction(cx).is_some() {
1158 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1159 if !push_to_history {
1160 buffer.forget_transaction(transaction.id);
1161 }
1162 project_transaction.0.insert(cx.handle(), transaction);
1163 }
1164 });
1165 }
1166 }
1167
1168 Ok(project_transaction)
1169 })
1170 }
1171
1172 pub fn definition<T: ToPointUtf16>(
1173 &self,
1174 source_buffer_handle: &ModelHandle<Buffer>,
1175 position: T,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Task<Result<Vec<Definition>>> {
1178 let source_buffer_handle = source_buffer_handle.clone();
1179 let source_buffer = source_buffer_handle.read(cx);
1180 let worktree;
1181 let buffer_abs_path;
1182 if let Some(file) = File::from_dyn(source_buffer.file()) {
1183 worktree = file.worktree.clone();
1184 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1185 } else {
1186 return Task::ready(Ok(Default::default()));
1187 };
1188
1189 let position = position.to_point_utf16(source_buffer);
1190
1191 if worktree.read(cx).as_local().is_some() {
1192 let buffer_abs_path = buffer_abs_path.unwrap();
1193 let lang_name;
1194 let lang_server;
1195 if let Some(lang) = source_buffer.language() {
1196 lang_name = lang.name().to_string();
1197 if let Some(server) = self
1198 .language_servers
1199 .get(&(worktree.read(cx).id(), lang_name.clone()))
1200 {
1201 lang_server = server.clone();
1202 } else {
1203 return Task::ready(Ok(Default::default()));
1204 };
1205 } else {
1206 return Task::ready(Ok(Default::default()));
1207 }
1208
1209 cx.spawn(|this, mut cx| async move {
1210 let response = lang_server
1211 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1212 text_document_position_params: lsp::TextDocumentPositionParams {
1213 text_document: lsp::TextDocumentIdentifier::new(
1214 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1215 ),
1216 position: lsp::Position::new(position.row, position.column),
1217 },
1218 work_done_progress_params: Default::default(),
1219 partial_result_params: Default::default(),
1220 })
1221 .await?;
1222
1223 let mut definitions = Vec::new();
1224 if let Some(response) = response {
1225 let mut unresolved_locations = Vec::new();
1226 match response {
1227 lsp::GotoDefinitionResponse::Scalar(loc) => {
1228 unresolved_locations.push((loc.uri, loc.range));
1229 }
1230 lsp::GotoDefinitionResponse::Array(locs) => {
1231 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1232 }
1233 lsp::GotoDefinitionResponse::Link(links) => {
1234 unresolved_locations.extend(
1235 links
1236 .into_iter()
1237 .map(|l| (l.target_uri, l.target_selection_range)),
1238 );
1239 }
1240 }
1241
1242 for (target_uri, target_range) in unresolved_locations {
1243 let target_buffer_handle = this
1244 .update(&mut cx, |this, cx| {
1245 this.open_local_buffer_from_lsp_path(
1246 target_uri,
1247 lang_name.clone(),
1248 lang_server.clone(),
1249 cx,
1250 )
1251 })
1252 .await?;
1253
1254 cx.read(|cx| {
1255 let target_buffer = target_buffer_handle.read(cx);
1256 let target_start = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1258 let target_end = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1260 definitions.push(Definition {
1261 target_buffer: target_buffer_handle,
1262 target_range: target_buffer.anchor_after(target_start)
1263 ..target_buffer.anchor_before(target_end),
1264 });
1265 });
1266 }
1267 }
1268
1269 Ok(definitions)
1270 })
1271 } else if let Some(project_id) = self.remote_id() {
1272 let client = self.client.clone();
1273 let request = proto::GetDefinition {
1274 project_id,
1275 buffer_id: source_buffer.remote_id(),
1276 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1277 };
1278 cx.spawn(|this, mut cx| async move {
1279 let response = client.request(request).await?;
1280 let mut definitions = Vec::new();
1281 for definition in response.definitions {
1282 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1283 let target_buffer = this
1284 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1285 .await?;
1286 let target_start = definition
1287 .target_start
1288 .and_then(deserialize_anchor)
1289 .ok_or_else(|| anyhow!("missing target start"))?;
1290 let target_end = definition
1291 .target_end
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target end"))?;
1294 definitions.push(Definition {
1295 target_buffer,
1296 target_range: target_start..target_end,
1297 })
1298 }
1299
1300 Ok(definitions)
1301 })
1302 } else {
1303 Task::ready(Ok(Default::default()))
1304 }
1305 }
1306
1307 pub fn completions<T: ToPointUtf16>(
1308 &self,
1309 source_buffer_handle: &ModelHandle<Buffer>,
1310 position: T,
1311 cx: &mut ModelContext<Self>,
1312 ) -> Task<Result<Vec<Completion>>> {
1313 let source_buffer_handle = source_buffer_handle.clone();
1314 let source_buffer = source_buffer_handle.read(cx);
1315 let buffer_id = source_buffer.remote_id();
1316 let language = source_buffer.language().cloned();
1317 let worktree;
1318 let buffer_abs_path;
1319 if let Some(file) = File::from_dyn(source_buffer.file()) {
1320 worktree = file.worktree.clone();
1321 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1322 } else {
1323 return Task::ready(Ok(Default::default()));
1324 };
1325
1326 let position = position.to_point_utf16(source_buffer);
1327 let anchor = source_buffer.anchor_after(position);
1328
1329 if worktree.read(cx).as_local().is_some() {
1330 let buffer_abs_path = buffer_abs_path.unwrap();
1331 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1332 server
1333 } else {
1334 return Task::ready(Ok(Default::default()));
1335 };
1336
1337 cx.spawn(|_, cx| async move {
1338 let completions = lang_server
1339 .request::<lsp::request::Completion>(lsp::CompletionParams {
1340 text_document_position: lsp::TextDocumentPositionParams::new(
1341 lsp::TextDocumentIdentifier::new(
1342 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1343 ),
1344 position.to_lsp_position(),
1345 ),
1346 context: Default::default(),
1347 work_done_progress_params: Default::default(),
1348 partial_result_params: Default::default(),
1349 })
1350 .await
1351 .context("lsp completion request failed")?;
1352
1353 let completions = if let Some(completions) = completions {
1354 match completions {
1355 lsp::CompletionResponse::Array(completions) => completions,
1356 lsp::CompletionResponse::List(list) => list.items,
1357 }
1358 } else {
1359 Default::default()
1360 };
1361
1362 source_buffer_handle.read_with(&cx, |this, _| {
1363 Ok(completions
1364 .into_iter()
1365 .filter_map(|lsp_completion| {
1366 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1367 lsp::CompletionTextEdit::Edit(edit) => {
1368 (range_from_lsp(edit.range), edit.new_text.clone())
1369 }
1370 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1371 log::info!("unsupported insert/replace completion");
1372 return None;
1373 }
1374 };
1375
1376 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1377 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1378 if clipped_start == old_range.start && clipped_end == old_range.end {
1379 Some(Completion {
1380 old_range: this.anchor_before(old_range.start)
1381 ..this.anchor_after(old_range.end),
1382 new_text,
1383 label: language
1384 .as_ref()
1385 .and_then(|l| l.label_for_completion(&lsp_completion))
1386 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1387 lsp_completion,
1388 })
1389 } else {
1390 None
1391 }
1392 })
1393 .collect())
1394 })
1395 })
1396 } else if let Some(project_id) = self.remote_id() {
1397 let rpc = self.client.clone();
1398 let message = proto::GetCompletions {
1399 project_id,
1400 buffer_id,
1401 position: Some(language::proto::serialize_anchor(&anchor)),
1402 version: (&source_buffer.version()).into(),
1403 };
1404 cx.spawn_weak(|_, mut cx| async move {
1405 let response = rpc.request(message).await?;
1406
1407 source_buffer_handle
1408 .update(&mut cx, |buffer, _| {
1409 buffer.wait_for_version(response.version.into())
1410 })
1411 .await;
1412
1413 response
1414 .completions
1415 .into_iter()
1416 .map(|completion| {
1417 language::proto::deserialize_completion(completion, language.as_ref())
1418 })
1419 .collect()
1420 })
1421 } else {
1422 Task::ready(Ok(Default::default()))
1423 }
1424 }
1425
1426 pub fn apply_additional_edits_for_completion(
1427 &self,
1428 buffer_handle: ModelHandle<Buffer>,
1429 completion: Completion,
1430 push_to_history: bool,
1431 cx: &mut ModelContext<Self>,
1432 ) -> Task<Result<Option<Transaction>>> {
1433 let buffer = buffer_handle.read(cx);
1434 let buffer_id = buffer.remote_id();
1435
1436 if self.is_local() {
1437 let lang_server = if let Some(language_server) = buffer.language_server() {
1438 language_server.clone()
1439 } else {
1440 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1441 };
1442
1443 cx.spawn(|_, mut cx| async move {
1444 let resolved_completion = lang_server
1445 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1446 .await?;
1447 if let Some(edits) = resolved_completion.additional_text_edits {
1448 let edits = buffer_handle
1449 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1450 .await?;
1451 buffer_handle.update(&mut cx, |buffer, cx| {
1452 buffer.finalize_last_transaction();
1453 buffer.start_transaction();
1454 for (range, text) in edits {
1455 buffer.edit([range], text, cx);
1456 }
1457 let transaction = if buffer.end_transaction(cx).is_some() {
1458 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1459 if !push_to_history {
1460 buffer.forget_transaction(transaction.id);
1461 }
1462 Some(transaction)
1463 } else {
1464 None
1465 };
1466 Ok(transaction)
1467 })
1468 } else {
1469 Ok(None)
1470 }
1471 })
1472 } else if let Some(project_id) = self.remote_id() {
1473 let client = self.client.clone();
1474 cx.spawn(|_, mut cx| async move {
1475 let response = client
1476 .request(proto::ApplyCompletionAdditionalEdits {
1477 project_id,
1478 buffer_id,
1479 completion: Some(language::proto::serialize_completion(&completion)),
1480 })
1481 .await?;
1482
1483 if let Some(transaction) = response.transaction {
1484 let transaction = language::proto::deserialize_transaction(transaction)?;
1485 buffer_handle
1486 .update(&mut cx, |buffer, _| {
1487 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1488 })
1489 .await;
1490 if push_to_history {
1491 buffer_handle.update(&mut cx, |buffer, _| {
1492 buffer.push_transaction(transaction.clone(), Instant::now());
1493 });
1494 }
1495 Ok(Some(transaction))
1496 } else {
1497 Ok(None)
1498 }
1499 })
1500 } else {
1501 Task::ready(Err(anyhow!("project does not have a remote id")))
1502 }
1503 }
1504
1505 pub fn code_actions<T: ToOffset>(
1506 &self,
1507 buffer_handle: &ModelHandle<Buffer>,
1508 range: Range<T>,
1509 cx: &mut ModelContext<Self>,
1510 ) -> Task<Result<Vec<CodeAction>>> {
1511 let buffer_handle = buffer_handle.clone();
1512 let buffer = buffer_handle.read(cx);
1513 let buffer_id = buffer.remote_id();
1514 let worktree;
1515 let buffer_abs_path;
1516 if let Some(file) = File::from_dyn(buffer.file()) {
1517 worktree = file.worktree.clone();
1518 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1519 } else {
1520 return Task::ready(Ok(Default::default()));
1521 };
1522 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1523
1524 if worktree.read(cx).as_local().is_some() {
1525 let buffer_abs_path = buffer_abs_path.unwrap();
1526 let lang_name;
1527 let lang_server;
1528 if let Some(lang) = buffer.language() {
1529 lang_name = lang.name().to_string();
1530 if let Some(server) = self
1531 .language_servers
1532 .get(&(worktree.read(cx).id(), lang_name.clone()))
1533 {
1534 lang_server = server.clone();
1535 } else {
1536 return Task::ready(Ok(Default::default()));
1537 };
1538 } else {
1539 return Task::ready(Ok(Default::default()));
1540 }
1541
1542 let lsp_range = lsp::Range::new(
1543 range.start.to_point_utf16(buffer).to_lsp_position(),
1544 range.end.to_point_utf16(buffer).to_lsp_position(),
1545 );
1546 cx.foreground().spawn(async move {
1547 Ok(lang_server
1548 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1549 text_document: lsp::TextDocumentIdentifier::new(
1550 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1551 ),
1552 range: lsp_range,
1553 work_done_progress_params: Default::default(),
1554 partial_result_params: Default::default(),
1555 context: lsp::CodeActionContext {
1556 diagnostics: Default::default(),
1557 only: Some(vec![
1558 lsp::CodeActionKind::QUICKFIX,
1559 lsp::CodeActionKind::REFACTOR,
1560 lsp::CodeActionKind::REFACTOR_EXTRACT,
1561 ]),
1562 },
1563 })
1564 .await?
1565 .unwrap_or_default()
1566 .into_iter()
1567 .filter_map(|entry| {
1568 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1569 Some(CodeAction {
1570 range: range.clone(),
1571 lsp_action,
1572 })
1573 } else {
1574 None
1575 }
1576 })
1577 .collect())
1578 })
1579 } else if let Some(project_id) = self.remote_id() {
1580 let rpc = self.client.clone();
1581 cx.spawn_weak(|_, mut cx| async move {
1582 let response = rpc
1583 .request(proto::GetCodeActions {
1584 project_id,
1585 buffer_id,
1586 start: Some(language::proto::serialize_anchor(&range.start)),
1587 end: Some(language::proto::serialize_anchor(&range.end)),
1588 })
1589 .await?;
1590
1591 buffer_handle
1592 .update(&mut cx, |buffer, _| {
1593 buffer.wait_for_version(response.version.into())
1594 })
1595 .await;
1596
1597 response
1598 .actions
1599 .into_iter()
1600 .map(language::proto::deserialize_code_action)
1601 .collect()
1602 })
1603 } else {
1604 Task::ready(Ok(Default::default()))
1605 }
1606 }
1607
1608 pub fn apply_code_action(
1609 &self,
1610 buffer_handle: ModelHandle<Buffer>,
1611 mut action: CodeAction,
1612 push_to_history: bool,
1613 cx: &mut ModelContext<Self>,
1614 ) -> Task<Result<ProjectTransaction>> {
1615 if self.is_local() {
1616 let buffer = buffer_handle.read(cx);
1617 let lang_name = if let Some(lang) = buffer.language() {
1618 lang.name().to_string()
1619 } else {
1620 return Task::ready(Ok(Default::default()));
1621 };
1622 let lang_server = if let Some(language_server) = buffer.language_server() {
1623 language_server.clone()
1624 } else {
1625 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1626 };
1627 let range = action.range.to_point_utf16(buffer);
1628 let fs = self.fs.clone();
1629
1630 cx.spawn(|this, mut cx| async move {
1631 if let Some(lsp_range) = action
1632 .lsp_action
1633 .data
1634 .as_mut()
1635 .and_then(|d| d.get_mut("codeActionParams"))
1636 .and_then(|d| d.get_mut("range"))
1637 {
1638 *lsp_range = serde_json::to_value(&lsp::Range::new(
1639 range.start.to_lsp_position(),
1640 range.end.to_lsp_position(),
1641 ))
1642 .unwrap();
1643 action.lsp_action = lang_server
1644 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1645 .await?;
1646 } else {
1647 let actions = this
1648 .update(&mut cx, |this, cx| {
1649 this.code_actions(&buffer_handle, action.range, cx)
1650 })
1651 .await?;
1652 action.lsp_action = actions
1653 .into_iter()
1654 .find(|a| a.lsp_action.title == action.lsp_action.title)
1655 .ok_or_else(|| anyhow!("code action is outdated"))?
1656 .lsp_action;
1657 }
1658
1659 let mut operations = Vec::new();
1660 if let Some(edit) = action.lsp_action.edit {
1661 if let Some(document_changes) = edit.document_changes {
1662 match document_changes {
1663 lsp::DocumentChanges::Edits(edits) => operations
1664 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1665 lsp::DocumentChanges::Operations(ops) => operations = ops,
1666 }
1667 } else if let Some(changes) = edit.changes {
1668 operations.extend(changes.into_iter().map(|(uri, edits)| {
1669 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1670 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1671 uri,
1672 version: None,
1673 },
1674 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1675 })
1676 }));
1677 }
1678 }
1679
1680 let mut project_transaction = ProjectTransaction::default();
1681 for operation in operations {
1682 match operation {
1683 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1684 let abs_path = op
1685 .uri
1686 .to_file_path()
1687 .map_err(|_| anyhow!("can't convert URI to path"))?;
1688
1689 if let Some(parent_path) = abs_path.parent() {
1690 fs.create_dir(parent_path).await?;
1691 }
1692 if abs_path.ends_with("/") {
1693 fs.create_dir(&abs_path).await?;
1694 } else {
1695 fs.create_file(
1696 &abs_path,
1697 op.options.map(Into::into).unwrap_or_default(),
1698 )
1699 .await?;
1700 }
1701 }
1702 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1703 let source_abs_path = op
1704 .old_uri
1705 .to_file_path()
1706 .map_err(|_| anyhow!("can't convert URI to path"))?;
1707 let target_abs_path = op
1708 .new_uri
1709 .to_file_path()
1710 .map_err(|_| anyhow!("can't convert URI to path"))?;
1711 fs.rename(
1712 &source_abs_path,
1713 &target_abs_path,
1714 op.options.map(Into::into).unwrap_or_default(),
1715 )
1716 .await?;
1717 }
1718 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1719 let abs_path = op
1720 .uri
1721 .to_file_path()
1722 .map_err(|_| anyhow!("can't convert URI to path"))?;
1723 let options = op.options.map(Into::into).unwrap_or_default();
1724 if abs_path.ends_with("/") {
1725 fs.remove_dir(&abs_path, options).await?;
1726 } else {
1727 fs.remove_file(&abs_path, options).await?;
1728 }
1729 }
1730 lsp::DocumentChangeOperation::Edit(op) => {
1731 let buffer_to_edit = this
1732 .update(&mut cx, |this, cx| {
1733 this.open_local_buffer_from_lsp_path(
1734 op.text_document.uri,
1735 lang_name.clone(),
1736 lang_server.clone(),
1737 cx,
1738 )
1739 })
1740 .await?;
1741
1742 let edits = buffer_to_edit
1743 .update(&mut cx, |buffer, cx| {
1744 let edits = op.edits.into_iter().map(|edit| match edit {
1745 lsp::OneOf::Left(edit) => edit,
1746 lsp::OneOf::Right(edit) => edit.text_edit,
1747 });
1748 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1749 })
1750 .await?;
1751
1752 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1753 buffer.finalize_last_transaction();
1754 buffer.start_transaction();
1755 for (range, text) in edits {
1756 buffer.edit([range], text, cx);
1757 }
1758 let transaction = if buffer.end_transaction(cx).is_some() {
1759 let transaction =
1760 buffer.finalize_last_transaction().unwrap().clone();
1761 if !push_to_history {
1762 buffer.forget_transaction(transaction.id);
1763 }
1764 Some(transaction)
1765 } else {
1766 None
1767 };
1768
1769 transaction
1770 });
1771 if let Some(transaction) = transaction {
1772 project_transaction.0.insert(buffer_to_edit, transaction);
1773 }
1774 }
1775 }
1776 }
1777
1778 Ok(project_transaction)
1779 })
1780 } else if let Some(project_id) = self.remote_id() {
1781 let client = self.client.clone();
1782 let request = proto::ApplyCodeAction {
1783 project_id,
1784 buffer_id: buffer_handle.read(cx).remote_id(),
1785 action: Some(language::proto::serialize_code_action(&action)),
1786 };
1787 cx.spawn(|this, mut cx| async move {
1788 let response = client
1789 .request(request)
1790 .await?
1791 .transaction
1792 .ok_or_else(|| anyhow!("missing transaction"))?;
1793 this.update(&mut cx, |this, cx| {
1794 this.deserialize_project_transaction(response, push_to_history, cx)
1795 })
1796 .await
1797 })
1798 } else {
1799 Task::ready(Err(anyhow!("project does not have a remote id")))
1800 }
1801 }
1802
1803 pub fn find_or_create_local_worktree(
1804 &self,
1805 abs_path: impl AsRef<Path>,
1806 weak: bool,
1807 cx: &mut ModelContext<Self>,
1808 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1809 let abs_path = abs_path.as_ref();
1810 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1811 Task::ready(Ok((tree.clone(), relative_path.into())))
1812 } else {
1813 let worktree = self.create_local_worktree(abs_path, weak, cx);
1814 cx.foreground()
1815 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1816 }
1817 }
1818
1819 fn find_local_worktree(
1820 &self,
1821 abs_path: &Path,
1822 cx: &AppContext,
1823 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1824 for tree in self.worktrees(cx) {
1825 if let Some(relative_path) = tree
1826 .read(cx)
1827 .as_local()
1828 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1829 {
1830 return Some((tree.clone(), relative_path.into()));
1831 }
1832 }
1833 None
1834 }
1835
1836 pub fn is_shared(&self) -> bool {
1837 match &self.client_state {
1838 ProjectClientState::Local { is_shared, .. } => *is_shared,
1839 ProjectClientState::Remote { .. } => false,
1840 }
1841 }
1842
1843 fn create_local_worktree(
1844 &self,
1845 abs_path: impl AsRef<Path>,
1846 weak: bool,
1847 cx: &mut ModelContext<Self>,
1848 ) -> Task<Result<ModelHandle<Worktree>>> {
1849 let fs = self.fs.clone();
1850 let client = self.client.clone();
1851 let path = Arc::from(abs_path.as_ref());
1852 cx.spawn(|project, mut cx| async move {
1853 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1854
1855 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1856 project.add_worktree(&worktree, cx);
1857 (project.remote_id(), project.is_shared())
1858 });
1859
1860 if let Some(project_id) = remote_project_id {
1861 worktree
1862 .update(&mut cx, |worktree, cx| {
1863 worktree.as_local_mut().unwrap().register(project_id, cx)
1864 })
1865 .await?;
1866 if is_shared {
1867 worktree
1868 .update(&mut cx, |worktree, cx| {
1869 worktree.as_local_mut().unwrap().share(project_id, cx)
1870 })
1871 .await?;
1872 }
1873 }
1874
1875 Ok(worktree)
1876 })
1877 }
1878
1879 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1880 self.worktrees.retain(|worktree| {
1881 worktree
1882 .upgrade(cx)
1883 .map_or(false, |w| w.read(cx).id() != id)
1884 });
1885 cx.notify();
1886 }
1887
1888 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1889 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1890 if worktree.read(cx).is_local() {
1891 cx.subscribe(&worktree, |this, worktree, _, cx| {
1892 this.update_local_worktree_buffers(worktree, cx);
1893 })
1894 .detach();
1895 }
1896
1897 let push_weak_handle = {
1898 let worktree = worktree.read(cx);
1899 worktree.is_local() && worktree.is_weak()
1900 };
1901 if push_weak_handle {
1902 cx.observe_release(&worktree, |this, cx| {
1903 this.worktrees
1904 .retain(|worktree| worktree.upgrade(cx).is_some());
1905 cx.notify();
1906 })
1907 .detach();
1908 self.worktrees
1909 .push(WorktreeHandle::Weak(worktree.downgrade()));
1910 } else {
1911 self.worktrees
1912 .push(WorktreeHandle::Strong(worktree.clone()));
1913 }
1914 cx.notify();
1915 }
1916
1917 fn update_local_worktree_buffers(
1918 &mut self,
1919 worktree_handle: ModelHandle<Worktree>,
1920 cx: &mut ModelContext<Self>,
1921 ) {
1922 let snapshot = worktree_handle.read(cx).snapshot();
1923 let mut buffers_to_delete = Vec::new();
1924 for (buffer_id, buffer) in &self.open_buffers {
1925 if let Some(buffer) = buffer.upgrade(cx) {
1926 buffer.update(cx, |buffer, cx| {
1927 if let Some(old_file) = File::from_dyn(buffer.file()) {
1928 if old_file.worktree != worktree_handle {
1929 return;
1930 }
1931
1932 let new_file = if let Some(entry) = old_file
1933 .entry_id
1934 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1935 {
1936 File {
1937 is_local: true,
1938 entry_id: Some(entry.id),
1939 mtime: entry.mtime,
1940 path: entry.path.clone(),
1941 worktree: worktree_handle.clone(),
1942 }
1943 } else if let Some(entry) =
1944 snapshot.entry_for_path(old_file.path().as_ref())
1945 {
1946 File {
1947 is_local: true,
1948 entry_id: Some(entry.id),
1949 mtime: entry.mtime,
1950 path: entry.path.clone(),
1951 worktree: worktree_handle.clone(),
1952 }
1953 } else {
1954 File {
1955 is_local: true,
1956 entry_id: None,
1957 path: old_file.path().clone(),
1958 mtime: old_file.mtime(),
1959 worktree: worktree_handle.clone(),
1960 }
1961 };
1962
1963 if let Some(project_id) = self.remote_id() {
1964 self.client
1965 .send(proto::UpdateBufferFile {
1966 project_id,
1967 buffer_id: *buffer_id as u64,
1968 file: Some(new_file.to_proto()),
1969 })
1970 .log_err();
1971 }
1972 buffer.file_updated(Box::new(new_file), cx).detach();
1973 }
1974 });
1975 } else {
1976 buffers_to_delete.push(*buffer_id);
1977 }
1978 }
1979
1980 for buffer_id in buffers_to_delete {
1981 self.open_buffers.remove(&buffer_id);
1982 }
1983 }
1984
1985 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1986 let new_active_entry = entry.and_then(|project_path| {
1987 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1988 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1989 Some(ProjectEntry {
1990 worktree_id: project_path.worktree_id,
1991 entry_id: entry.id,
1992 })
1993 });
1994 if new_active_entry != self.active_entry {
1995 self.active_entry = new_active_entry;
1996 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1997 }
1998 }
1999
2000 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2001 self.language_servers_with_diagnostics_running > 0
2002 }
2003
2004 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2005 let mut summary = DiagnosticSummary::default();
2006 for (_, path_summary) in self.diagnostic_summaries(cx) {
2007 summary.error_count += path_summary.error_count;
2008 summary.warning_count += path_summary.warning_count;
2009 summary.info_count += path_summary.info_count;
2010 summary.hint_count += path_summary.hint_count;
2011 }
2012 summary
2013 }
2014
2015 pub fn diagnostic_summaries<'a>(
2016 &'a self,
2017 cx: &'a AppContext,
2018 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2019 self.worktrees(cx).flat_map(move |worktree| {
2020 let worktree = worktree.read(cx);
2021 let worktree_id = worktree.id();
2022 worktree
2023 .diagnostic_summaries()
2024 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2025 })
2026 }
2027
2028 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2029 self.language_servers_with_diagnostics_running += 1;
2030 if self.language_servers_with_diagnostics_running == 1 {
2031 cx.emit(Event::DiskBasedDiagnosticsStarted);
2032 }
2033 }
2034
2035 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2036 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2037 self.language_servers_with_diagnostics_running -= 1;
2038 if self.language_servers_with_diagnostics_running == 0 {
2039 cx.emit(Event::DiskBasedDiagnosticsFinished);
2040 }
2041 }
2042
2043 pub fn active_entry(&self) -> Option<ProjectEntry> {
2044 self.active_entry
2045 }
2046
2047 // RPC message handlers
2048
2049 async fn handle_unshare_project(
2050 this: ModelHandle<Self>,
2051 _: TypedEnvelope<proto::UnshareProject>,
2052 _: Arc<Client>,
2053 mut cx: AsyncAppContext,
2054 ) -> Result<()> {
2055 this.update(&mut cx, |this, cx| {
2056 if let ProjectClientState::Remote {
2057 sharing_has_stopped,
2058 ..
2059 } = &mut this.client_state
2060 {
2061 *sharing_has_stopped = true;
2062 this.collaborators.clear();
2063 cx.notify();
2064 } else {
2065 unreachable!()
2066 }
2067 });
2068
2069 Ok(())
2070 }
2071
2072 async fn handle_add_collaborator(
2073 this: ModelHandle<Self>,
2074 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2075 _: Arc<Client>,
2076 mut cx: AsyncAppContext,
2077 ) -> Result<()> {
2078 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2079 let collaborator = envelope
2080 .payload
2081 .collaborator
2082 .take()
2083 .ok_or_else(|| anyhow!("empty collaborator"))?;
2084
2085 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2086 this.update(&mut cx, |this, cx| {
2087 this.collaborators
2088 .insert(collaborator.peer_id, collaborator);
2089 cx.notify();
2090 });
2091
2092 Ok(())
2093 }
2094
2095 async fn handle_remove_collaborator(
2096 this: ModelHandle<Self>,
2097 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2098 _: Arc<Client>,
2099 mut cx: AsyncAppContext,
2100 ) -> Result<()> {
2101 this.update(&mut cx, |this, cx| {
2102 let peer_id = PeerId(envelope.payload.peer_id);
2103 let replica_id = this
2104 .collaborators
2105 .remove(&peer_id)
2106 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2107 .replica_id;
2108 this.shared_buffers.remove(&peer_id);
2109 for (_, buffer) in &this.open_buffers {
2110 if let Some(buffer) = buffer.upgrade(cx) {
2111 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2112 }
2113 }
2114 cx.notify();
2115 Ok(())
2116 })
2117 }
2118
2119 async fn handle_share_worktree(
2120 this: ModelHandle<Self>,
2121 envelope: TypedEnvelope<proto::ShareWorktree>,
2122 client: Arc<Client>,
2123 mut cx: AsyncAppContext,
2124 ) -> Result<()> {
2125 this.update(&mut cx, |this, cx| {
2126 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2127 let replica_id = this.replica_id();
2128 let worktree = envelope
2129 .payload
2130 .worktree
2131 .ok_or_else(|| anyhow!("invalid worktree"))?;
2132 let (worktree, load_task) =
2133 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2134 this.add_worktree(&worktree, cx);
2135 load_task.detach();
2136 Ok(())
2137 })
2138 }
2139
2140 async fn handle_unregister_worktree(
2141 this: ModelHandle<Self>,
2142 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2143 _: Arc<Client>,
2144 mut cx: AsyncAppContext,
2145 ) -> Result<()> {
2146 this.update(&mut cx, |this, cx| {
2147 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2148 this.remove_worktree(worktree_id, cx);
2149 Ok(())
2150 })
2151 }
2152
2153 async fn handle_update_worktree(
2154 this: ModelHandle<Self>,
2155 envelope: TypedEnvelope<proto::UpdateWorktree>,
2156 _: Arc<Client>,
2157 mut cx: AsyncAppContext,
2158 ) -> Result<()> {
2159 this.update(&mut cx, |this, cx| {
2160 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2161 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2162 worktree.update(cx, |worktree, _| {
2163 let worktree = worktree.as_remote_mut().unwrap();
2164 worktree.update_from_remote(envelope)
2165 })?;
2166 }
2167 Ok(())
2168 })
2169 }
2170
2171 async fn handle_update_diagnostic_summary(
2172 this: ModelHandle<Self>,
2173 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2174 _: Arc<Client>,
2175 mut cx: AsyncAppContext,
2176 ) -> Result<()> {
2177 this.update(&mut cx, |this, cx| {
2178 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2179 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2180 if let Some(summary) = envelope.payload.summary {
2181 let project_path = ProjectPath {
2182 worktree_id,
2183 path: Path::new(&summary.path).into(),
2184 };
2185 worktree.update(cx, |worktree, _| {
2186 worktree
2187 .as_remote_mut()
2188 .unwrap()
2189 .update_diagnostic_summary(project_path.path.clone(), &summary);
2190 });
2191 cx.emit(Event::DiagnosticsUpdated(project_path));
2192 }
2193 }
2194 Ok(())
2195 })
2196 }
2197
2198 async fn handle_disk_based_diagnostics_updating(
2199 this: ModelHandle<Self>,
2200 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2201 _: Arc<Client>,
2202 mut cx: AsyncAppContext,
2203 ) -> Result<()> {
2204 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2205 Ok(())
2206 }
2207
2208 async fn handle_disk_based_diagnostics_updated(
2209 this: ModelHandle<Self>,
2210 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2211 _: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2215 Ok(())
2216 }
2217
2218 async fn handle_update_buffer(
2219 this: ModelHandle<Self>,
2220 envelope: TypedEnvelope<proto::UpdateBuffer>,
2221 _: Arc<Client>,
2222 mut cx: AsyncAppContext,
2223 ) -> Result<()> {
2224 this.update(&mut cx, |this, cx| {
2225 let payload = envelope.payload.clone();
2226 let buffer_id = payload.buffer_id;
2227 let ops = payload
2228 .operations
2229 .into_iter()
2230 .map(|op| language::proto::deserialize_operation(op))
2231 .collect::<Result<Vec<_>, _>>()?;
2232 let is_remote = this.is_remote();
2233 match this.open_buffers.entry(buffer_id) {
2234 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2235 OpenBuffer::Loaded(buffer) => {
2236 if let Some(buffer) = buffer.upgrade(cx) {
2237 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2238 }
2239 }
2240 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2241 },
2242 hash_map::Entry::Vacant(e) => {
2243 if is_remote && this.loading_buffers.len() > 0 {
2244 e.insert(OpenBuffer::Loading(ops));
2245 }
2246 }
2247 }
2248 Ok(())
2249 })
2250 }
2251
2252 async fn handle_update_buffer_file(
2253 this: ModelHandle<Self>,
2254 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2255 _: Arc<Client>,
2256 mut cx: AsyncAppContext,
2257 ) -> Result<()> {
2258 this.update(&mut cx, |this, cx| {
2259 let payload = envelope.payload.clone();
2260 let buffer_id = payload.buffer_id;
2261 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2262 let worktree = this
2263 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2264 .ok_or_else(|| anyhow!("no such worktree"))?;
2265 let file = File::from_proto(file, worktree.clone(), cx)?;
2266 let buffer = this
2267 .open_buffers
2268 .get_mut(&buffer_id)
2269 .and_then(|b| b.upgrade(cx))
2270 .ok_or_else(|| anyhow!("no such buffer"))?;
2271 buffer.update(cx, |buffer, cx| {
2272 buffer.file_updated(Box::new(file), cx).detach();
2273 });
2274 Ok(())
2275 })
2276 }
2277
2278 async fn handle_save_buffer(
2279 this: ModelHandle<Self>,
2280 envelope: TypedEnvelope<proto::SaveBuffer>,
2281 _: Arc<Client>,
2282 mut cx: AsyncAppContext,
2283 ) -> Result<proto::BufferSaved> {
2284 let buffer_id = envelope.payload.buffer_id;
2285 let sender_id = envelope.original_sender_id()?;
2286 let requested_version = envelope.payload.version.try_into()?;
2287
2288 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2289 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2290 let buffer = this
2291 .shared_buffers
2292 .get(&sender_id)
2293 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2294 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2295 Ok::<_, anyhow::Error>((project_id, buffer))
2296 })?;
2297
2298 if !buffer
2299 .read_with(&cx, |buffer, _| buffer.version())
2300 .observed_all(&requested_version)
2301 {
2302 Err(anyhow!("save request depends on unreceived edits"))?;
2303 }
2304
2305 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2306 Ok(proto::BufferSaved {
2307 project_id,
2308 buffer_id,
2309 version: (&saved_version).into(),
2310 mtime: Some(mtime.into()),
2311 })
2312 }
2313
2314 async fn handle_format_buffers(
2315 this: ModelHandle<Self>,
2316 envelope: TypedEnvelope<proto::FormatBuffers>,
2317 _: Arc<Client>,
2318 mut cx: AsyncAppContext,
2319 ) -> Result<proto::FormatBuffersResponse> {
2320 let sender_id = envelope.original_sender_id()?;
2321 let format = this.update(&mut cx, |this, cx| {
2322 let shared_buffers = this
2323 .shared_buffers
2324 .get(&sender_id)
2325 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2326 let mut buffers = HashSet::default();
2327 for buffer_id in &envelope.payload.buffer_ids {
2328 buffers.insert(
2329 shared_buffers
2330 .get(buffer_id)
2331 .cloned()
2332 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2333 );
2334 }
2335 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2336 })?;
2337
2338 let project_transaction = format.await?;
2339 let project_transaction = this.update(&mut cx, |this, cx| {
2340 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2341 });
2342 Ok(proto::FormatBuffersResponse {
2343 transaction: Some(project_transaction),
2344 })
2345 }
2346
2347 async fn handle_get_completions(
2348 this: ModelHandle<Self>,
2349 envelope: TypedEnvelope<proto::GetCompletions>,
2350 _: Arc<Client>,
2351 mut cx: AsyncAppContext,
2352 ) -> Result<proto::GetCompletionsResponse> {
2353 let sender_id = envelope.original_sender_id()?;
2354 let position = envelope
2355 .payload
2356 .position
2357 .and_then(language::proto::deserialize_anchor)
2358 .ok_or_else(|| anyhow!("invalid position"))?;
2359 let version = clock::Global::from(envelope.payload.version);
2360 let buffer = this.read_with(&cx, |this, _| {
2361 this.shared_buffers
2362 .get(&sender_id)
2363 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2364 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2365 })?;
2366 if !buffer
2367 .read_with(&cx, |buffer, _| buffer.version())
2368 .observed_all(&version)
2369 {
2370 Err(anyhow!("completion request depends on unreceived edits"))?;
2371 }
2372 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2373 let completions = this
2374 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2375 .await?;
2376
2377 Ok(proto::GetCompletionsResponse {
2378 completions: completions
2379 .iter()
2380 .map(language::proto::serialize_completion)
2381 .collect(),
2382 version: (&version).into(),
2383 })
2384 }
2385
2386 async fn handle_apply_additional_edits_for_completion(
2387 this: ModelHandle<Self>,
2388 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2389 _: Arc<Client>,
2390 mut cx: AsyncAppContext,
2391 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2392 let sender_id = envelope.original_sender_id()?;
2393 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2394 let buffer = this
2395 .shared_buffers
2396 .get(&sender_id)
2397 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2398 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2399 let language = buffer.read(cx).language();
2400 let completion = language::proto::deserialize_completion(
2401 envelope
2402 .payload
2403 .completion
2404 .ok_or_else(|| anyhow!("invalid completion"))?,
2405 language,
2406 )?;
2407 Ok::<_, anyhow::Error>(
2408 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2409 )
2410 })?;
2411
2412 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2413 transaction: apply_additional_edits
2414 .await?
2415 .as_ref()
2416 .map(language::proto::serialize_transaction),
2417 })
2418 }
2419
2420 async fn handle_get_code_actions(
2421 this: ModelHandle<Self>,
2422 envelope: TypedEnvelope<proto::GetCodeActions>,
2423 _: Arc<Client>,
2424 mut cx: AsyncAppContext,
2425 ) -> Result<proto::GetCodeActionsResponse> {
2426 let sender_id = envelope.original_sender_id()?;
2427 let start = envelope
2428 .payload
2429 .start
2430 .and_then(language::proto::deserialize_anchor)
2431 .ok_or_else(|| anyhow!("invalid start"))?;
2432 let end = envelope
2433 .payload
2434 .end
2435 .and_then(language::proto::deserialize_anchor)
2436 .ok_or_else(|| anyhow!("invalid end"))?;
2437 let buffer = this.update(&mut cx, |this, _| {
2438 this.shared_buffers
2439 .get(&sender_id)
2440 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2441 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2442 })?;
2443 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2444 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2445 Err(anyhow!("code action request references unreceived edits"))?;
2446 }
2447 let code_actions = this.update(&mut cx, |this, cx| {
2448 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2449 })?;
2450
2451 Ok(proto::GetCodeActionsResponse {
2452 actions: code_actions
2453 .await?
2454 .iter()
2455 .map(language::proto::serialize_code_action)
2456 .collect(),
2457 version: (&version).into(),
2458 })
2459 }
2460
2461 async fn handle_apply_code_action(
2462 this: ModelHandle<Self>,
2463 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2464 _: Arc<Client>,
2465 mut cx: AsyncAppContext,
2466 ) -> Result<proto::ApplyCodeActionResponse> {
2467 let sender_id = envelope.original_sender_id()?;
2468 let action = language::proto::deserialize_code_action(
2469 envelope
2470 .payload
2471 .action
2472 .ok_or_else(|| anyhow!("invalid action"))?,
2473 )?;
2474 let apply_code_action = this.update(&mut cx, |this, cx| {
2475 let buffer = this
2476 .shared_buffers
2477 .get(&sender_id)
2478 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2479 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2480 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2481 })?;
2482
2483 let project_transaction = apply_code_action.await?;
2484 let project_transaction = this.update(&mut cx, |this, cx| {
2485 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2486 });
2487 Ok(proto::ApplyCodeActionResponse {
2488 transaction: Some(project_transaction),
2489 })
2490 }
2491
2492 async fn handle_get_definition(
2493 this: ModelHandle<Self>,
2494 envelope: TypedEnvelope<proto::GetDefinition>,
2495 _: Arc<Client>,
2496 mut cx: AsyncAppContext,
2497 ) -> Result<proto::GetDefinitionResponse> {
2498 let sender_id = envelope.original_sender_id()?;
2499 let position = envelope
2500 .payload
2501 .position
2502 .and_then(deserialize_anchor)
2503 .ok_or_else(|| anyhow!("invalid position"))?;
2504 let definitions = this.update(&mut cx, |this, cx| {
2505 let source_buffer = this
2506 .shared_buffers
2507 .get(&sender_id)
2508 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2509 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2510 if source_buffer.read(cx).can_resolve(&position) {
2511 Ok(this.definition(&source_buffer, position, cx))
2512 } else {
2513 Err(anyhow!("cannot resolve position"))
2514 }
2515 })?;
2516
2517 let definitions = definitions.await?;
2518
2519 this.update(&mut cx, |this, cx| {
2520 let mut response = proto::GetDefinitionResponse {
2521 definitions: Default::default(),
2522 };
2523 for definition in definitions {
2524 let buffer =
2525 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2526 response.definitions.push(proto::Definition {
2527 target_start: Some(serialize_anchor(&definition.target_range.start)),
2528 target_end: Some(serialize_anchor(&definition.target_range.end)),
2529 buffer: Some(buffer),
2530 });
2531 }
2532 Ok(response)
2533 })
2534 }
2535
2536 async fn handle_open_buffer(
2537 this: ModelHandle<Self>,
2538 envelope: TypedEnvelope<proto::OpenBuffer>,
2539 _: Arc<Client>,
2540 mut cx: AsyncAppContext,
2541 ) -> anyhow::Result<proto::OpenBufferResponse> {
2542 let peer_id = envelope.original_sender_id()?;
2543 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2544 let open_buffer = this.update(&mut cx, |this, cx| {
2545 this.open_buffer(
2546 ProjectPath {
2547 worktree_id,
2548 path: PathBuf::from(envelope.payload.path).into(),
2549 },
2550 cx,
2551 )
2552 });
2553
2554 let buffer = open_buffer.await?;
2555 this.update(&mut cx, |this, cx| {
2556 Ok(proto::OpenBufferResponse {
2557 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2558 })
2559 })
2560 }
2561
2562 fn serialize_project_transaction_for_peer(
2563 &mut self,
2564 project_transaction: ProjectTransaction,
2565 peer_id: PeerId,
2566 cx: &AppContext,
2567 ) -> proto::ProjectTransaction {
2568 let mut serialized_transaction = proto::ProjectTransaction {
2569 buffers: Default::default(),
2570 transactions: Default::default(),
2571 };
2572 for (buffer, transaction) in project_transaction.0 {
2573 serialized_transaction
2574 .buffers
2575 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2576 serialized_transaction
2577 .transactions
2578 .push(language::proto::serialize_transaction(&transaction));
2579 }
2580 serialized_transaction
2581 }
2582
2583 fn deserialize_project_transaction(
2584 &mut self,
2585 message: proto::ProjectTransaction,
2586 push_to_history: bool,
2587 cx: &mut ModelContext<Self>,
2588 ) -> Task<Result<ProjectTransaction>> {
2589 cx.spawn(|this, mut cx| async move {
2590 let mut project_transaction = ProjectTransaction::default();
2591 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2592 let buffer = this
2593 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2594 .await?;
2595 let transaction = language::proto::deserialize_transaction(transaction)?;
2596 project_transaction.0.insert(buffer, transaction);
2597 }
2598 for (buffer, transaction) in &project_transaction.0 {
2599 buffer
2600 .update(&mut cx, |buffer, _| {
2601 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2602 })
2603 .await;
2604
2605 if push_to_history {
2606 buffer.update(&mut cx, |buffer, _| {
2607 buffer.push_transaction(transaction.clone(), Instant::now());
2608 });
2609 }
2610 }
2611
2612 Ok(project_transaction)
2613 })
2614 }
2615
2616 fn serialize_buffer_for_peer(
2617 &mut self,
2618 buffer: &ModelHandle<Buffer>,
2619 peer_id: PeerId,
2620 cx: &AppContext,
2621 ) -> proto::Buffer {
2622 let buffer_id = buffer.read(cx).remote_id();
2623 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2624 match shared_buffers.entry(buffer_id) {
2625 hash_map::Entry::Occupied(_) => proto::Buffer {
2626 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2627 },
2628 hash_map::Entry::Vacant(entry) => {
2629 entry.insert(buffer.clone());
2630 proto::Buffer {
2631 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2632 }
2633 }
2634 }
2635 }
2636
2637 fn deserialize_buffer(
2638 &mut self,
2639 buffer: proto::Buffer,
2640 cx: &mut ModelContext<Self>,
2641 ) -> Task<Result<ModelHandle<Buffer>>> {
2642 let replica_id = self.replica_id();
2643
2644 let mut opened_buffer_tx = self.opened_buffer.clone();
2645 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2646 cx.spawn(|this, mut cx| async move {
2647 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2648 proto::buffer::Variant::Id(id) => {
2649 let buffer = loop {
2650 let buffer = this.read_with(&cx, |this, cx| {
2651 this.open_buffers
2652 .get(&id)
2653 .and_then(|buffer| buffer.upgrade(cx))
2654 });
2655 if let Some(buffer) = buffer {
2656 break buffer;
2657 }
2658 opened_buffer_rx
2659 .recv()
2660 .await
2661 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2662 };
2663 Ok(buffer)
2664 }
2665 proto::buffer::Variant::State(mut buffer) => {
2666 let mut buffer_worktree = None;
2667 let mut buffer_file = None;
2668 if let Some(file) = buffer.file.take() {
2669 this.read_with(&cx, |this, cx| {
2670 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2671 let worktree =
2672 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2673 anyhow!("no worktree found for id {}", file.worktree_id)
2674 })?;
2675 buffer_file =
2676 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2677 as Box<dyn language::File>);
2678 buffer_worktree = Some(worktree);
2679 Ok::<_, anyhow::Error>(())
2680 })?;
2681 }
2682
2683 let buffer = cx.add_model(|cx| {
2684 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2685 });
2686 this.update(&mut cx, |this, cx| {
2687 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2688 })?;
2689
2690 let _ = opened_buffer_tx.send(()).await;
2691 Ok(buffer)
2692 }
2693 }
2694 })
2695 }
2696
2697 async fn handle_close_buffer(
2698 this: ModelHandle<Self>,
2699 envelope: TypedEnvelope<proto::CloseBuffer>,
2700 _: Arc<Client>,
2701 mut cx: AsyncAppContext,
2702 ) -> anyhow::Result<()> {
2703 this.update(&mut cx, |this, cx| {
2704 if let Some(shared_buffers) =
2705 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2706 {
2707 shared_buffers.remove(&envelope.payload.buffer_id);
2708 cx.notify();
2709 }
2710 Ok(())
2711 })
2712 }
2713
2714 async fn handle_buffer_saved(
2715 this: ModelHandle<Self>,
2716 envelope: TypedEnvelope<proto::BufferSaved>,
2717 _: Arc<Client>,
2718 mut cx: AsyncAppContext,
2719 ) -> Result<()> {
2720 let version = envelope.payload.version.try_into()?;
2721 let mtime = envelope
2722 .payload
2723 .mtime
2724 .ok_or_else(|| anyhow!("missing mtime"))?
2725 .into();
2726
2727 this.update(&mut cx, |this, cx| {
2728 let buffer = this
2729 .open_buffers
2730 .get(&envelope.payload.buffer_id)
2731 .and_then(|buffer| buffer.upgrade(cx));
2732 if let Some(buffer) = buffer {
2733 buffer.update(cx, |buffer, cx| {
2734 buffer.did_save(version, mtime, None, cx);
2735 });
2736 }
2737 Ok(())
2738 })
2739 }
2740
2741 async fn handle_buffer_reloaded(
2742 this: ModelHandle<Self>,
2743 envelope: TypedEnvelope<proto::BufferReloaded>,
2744 _: Arc<Client>,
2745 mut cx: AsyncAppContext,
2746 ) -> Result<()> {
2747 let payload = envelope.payload.clone();
2748 let version = payload.version.try_into()?;
2749 let mtime = payload
2750 .mtime
2751 .ok_or_else(|| anyhow!("missing mtime"))?
2752 .into();
2753 this.update(&mut cx, |this, cx| {
2754 let buffer = this
2755 .open_buffers
2756 .get(&payload.buffer_id)
2757 .and_then(|buffer| buffer.upgrade(cx));
2758 if let Some(buffer) = buffer {
2759 buffer.update(cx, |buffer, cx| {
2760 buffer.did_reload(version, mtime, cx);
2761 });
2762 }
2763 Ok(())
2764 })
2765 }
2766
2767 pub fn match_paths<'a>(
2768 &self,
2769 query: &'a str,
2770 include_ignored: bool,
2771 smart_case: bool,
2772 max_results: usize,
2773 cancel_flag: &'a AtomicBool,
2774 cx: &AppContext,
2775 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2776 let worktrees = self
2777 .worktrees(cx)
2778 .filter(|worktree| !worktree.read(cx).is_weak())
2779 .collect::<Vec<_>>();
2780 let include_root_name = worktrees.len() > 1;
2781 let candidate_sets = worktrees
2782 .into_iter()
2783 .map(|worktree| CandidateSet {
2784 snapshot: worktree.read(cx).snapshot(),
2785 include_ignored,
2786 include_root_name,
2787 })
2788 .collect::<Vec<_>>();
2789
2790 let background = cx.background().clone();
2791 async move {
2792 fuzzy::match_paths(
2793 candidate_sets.as_slice(),
2794 query,
2795 smart_case,
2796 max_results,
2797 cancel_flag,
2798 background,
2799 )
2800 .await
2801 }
2802 }
2803}
2804
2805impl WorktreeHandle {
2806 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2807 match self {
2808 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2809 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2810 }
2811 }
2812}
2813
2814impl OpenBuffer {
2815 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2816 match self {
2817 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2818 OpenBuffer::Loading(_) => None,
2819 }
2820 }
2821}
2822
2823struct CandidateSet {
2824 snapshot: Snapshot,
2825 include_ignored: bool,
2826 include_root_name: bool,
2827}
2828
2829impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2830 type Candidates = CandidateSetIter<'a>;
2831
2832 fn id(&self) -> usize {
2833 self.snapshot.id().to_usize()
2834 }
2835
2836 fn len(&self) -> usize {
2837 if self.include_ignored {
2838 self.snapshot.file_count()
2839 } else {
2840 self.snapshot.visible_file_count()
2841 }
2842 }
2843
2844 fn prefix(&self) -> Arc<str> {
2845 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2846 self.snapshot.root_name().into()
2847 } else if self.include_root_name {
2848 format!("{}/", self.snapshot.root_name()).into()
2849 } else {
2850 "".into()
2851 }
2852 }
2853
2854 fn candidates(&'a self, start: usize) -> Self::Candidates {
2855 CandidateSetIter {
2856 traversal: self.snapshot.files(self.include_ignored, start),
2857 }
2858 }
2859}
2860
2861struct CandidateSetIter<'a> {
2862 traversal: Traversal<'a>,
2863}
2864
2865impl<'a> Iterator for CandidateSetIter<'a> {
2866 type Item = PathMatchCandidate<'a>;
2867
2868 fn next(&mut self) -> Option<Self::Item> {
2869 self.traversal.next().map(|entry| {
2870 if let EntryKind::File(char_bag) = entry.kind {
2871 PathMatchCandidate {
2872 path: &entry.path,
2873 char_bag,
2874 }
2875 } else {
2876 unreachable!()
2877 }
2878 })
2879 }
2880}
2881
2882impl Entity for Project {
2883 type Event = Event;
2884
2885 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2886 match &self.client_state {
2887 ProjectClientState::Local { remote_id_rx, .. } => {
2888 if let Some(project_id) = *remote_id_rx.borrow() {
2889 self.client
2890 .send(proto::UnregisterProject { project_id })
2891 .log_err();
2892 }
2893 }
2894 ProjectClientState::Remote { remote_id, .. } => {
2895 self.client
2896 .send(proto::LeaveProject {
2897 project_id: *remote_id,
2898 })
2899 .log_err();
2900 }
2901 }
2902 }
2903
2904 fn app_will_quit(
2905 &mut self,
2906 _: &mut MutableAppContext,
2907 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2908 use futures::FutureExt;
2909
2910 let shutdown_futures = self
2911 .language_servers
2912 .drain()
2913 .filter_map(|(_, server)| server.shutdown())
2914 .collect::<Vec<_>>();
2915 Some(
2916 async move {
2917 futures::future::join_all(shutdown_futures).await;
2918 }
2919 .boxed(),
2920 )
2921 }
2922}
2923
2924impl Collaborator {
2925 fn from_proto(
2926 message: proto::Collaborator,
2927 user_store: &ModelHandle<UserStore>,
2928 cx: &mut AsyncAppContext,
2929 ) -> impl Future<Output = Result<Self>> {
2930 let user = user_store.update(cx, |user_store, cx| {
2931 user_store.fetch_user(message.user_id, cx)
2932 });
2933
2934 async move {
2935 Ok(Self {
2936 peer_id: PeerId(message.peer_id),
2937 user: user.await?,
2938 replica_id: message.replica_id as ReplicaId,
2939 })
2940 }
2941 }
2942}
2943
2944impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2945 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2946 Self {
2947 worktree_id,
2948 path: path.as_ref().into(),
2949 }
2950 }
2951}
2952
2953impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2954 fn from(options: lsp::CreateFileOptions) -> Self {
2955 Self {
2956 overwrite: options.overwrite.unwrap_or(false),
2957 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2958 }
2959 }
2960}
2961
2962impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2963 fn from(options: lsp::RenameFileOptions) -> Self {
2964 Self {
2965 overwrite: options.overwrite.unwrap_or(false),
2966 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2967 }
2968 }
2969}
2970
2971impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2972 fn from(options: lsp::DeleteFileOptions) -> Self {
2973 Self {
2974 recursive: options.recursive.unwrap_or(false),
2975 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2976 }
2977 }
2978}
2979
2980#[cfg(test)]
2981mod tests {
2982 use super::{Event, *};
2983 use fs::RealFs;
2984 use futures::StreamExt;
2985 use gpui::test::subscribe;
2986 use language::{
2987 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
2988 };
2989 use lsp::Url;
2990 use serde_json::json;
2991 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2992 use unindent::Unindent as _;
2993 use util::test::temp_tree;
2994 use worktree::WorktreeHandle as _;
2995
2996 #[gpui::test]
2997 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2998 let dir = temp_tree(json!({
2999 "root": {
3000 "apple": "",
3001 "banana": {
3002 "carrot": {
3003 "date": "",
3004 "endive": "",
3005 }
3006 },
3007 "fennel": {
3008 "grape": "",
3009 }
3010 }
3011 }));
3012
3013 let root_link_path = dir.path().join("root_link");
3014 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3015 unix::fs::symlink(
3016 &dir.path().join("root/fennel"),
3017 &dir.path().join("root/finnochio"),
3018 )
3019 .unwrap();
3020
3021 let project = Project::test(Arc::new(RealFs), &mut cx);
3022
3023 let (tree, _) = project
3024 .update(&mut cx, |project, cx| {
3025 project.find_or_create_local_worktree(&root_link_path, false, cx)
3026 })
3027 .await
3028 .unwrap();
3029
3030 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3031 .await;
3032 cx.read(|cx| {
3033 let tree = tree.read(cx);
3034 assert_eq!(tree.file_count(), 5);
3035 assert_eq!(
3036 tree.inode_for_path("fennel/grape"),
3037 tree.inode_for_path("finnochio/grape")
3038 );
3039 });
3040
3041 let cancel_flag = Default::default();
3042 let results = project
3043 .read_with(&cx, |project, cx| {
3044 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3045 })
3046 .await;
3047 assert_eq!(
3048 results
3049 .into_iter()
3050 .map(|result| result.path)
3051 .collect::<Vec<Arc<Path>>>(),
3052 vec![
3053 PathBuf::from("banana/carrot/date").into(),
3054 PathBuf::from("banana/carrot/endive").into(),
3055 ]
3056 );
3057 }
3058
3059 #[gpui::test]
3060 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3061 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3062 let progress_token = language_server_config
3063 .disk_based_diagnostics_progress_token
3064 .clone()
3065 .unwrap();
3066
3067 let language = Arc::new(Language::new(
3068 LanguageConfig {
3069 name: "Rust".to_string(),
3070 path_suffixes: vec!["rs".to_string()],
3071 language_server: Some(language_server_config),
3072 ..Default::default()
3073 },
3074 Some(tree_sitter_rust::language()),
3075 ));
3076
3077 let fs = FakeFs::new(cx.background());
3078 fs.insert_tree(
3079 "/dir",
3080 json!({
3081 "a.rs": "fn a() { A }",
3082 "b.rs": "const y: i32 = 1",
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs, &mut cx);
3088 project.update(&mut cx, |project, _| {
3089 Arc::get_mut(&mut project.languages).unwrap().add(language);
3090 });
3091
3092 let (tree, _) = project
3093 .update(&mut cx, |project, cx| {
3094 project.find_or_create_local_worktree("/dir", false, cx)
3095 })
3096 .await
3097 .unwrap();
3098 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3099
3100 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3101 .await;
3102
3103 // Cause worktree to start the fake language server
3104 let _buffer = project
3105 .update(&mut cx, |project, cx| {
3106 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3107 })
3108 .await
3109 .unwrap();
3110
3111 let mut events = subscribe(&project, &mut cx);
3112
3113 let mut fake_server = fake_servers.next().await.unwrap();
3114 fake_server.start_progress(&progress_token).await;
3115 assert_eq!(
3116 events.next().await.unwrap(),
3117 Event::DiskBasedDiagnosticsStarted
3118 );
3119
3120 fake_server.start_progress(&progress_token).await;
3121 fake_server.end_progress(&progress_token).await;
3122 fake_server.start_progress(&progress_token).await;
3123
3124 fake_server
3125 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3126 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3127 version: None,
3128 diagnostics: vec![lsp::Diagnostic {
3129 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3130 severity: Some(lsp::DiagnosticSeverity::ERROR),
3131 message: "undefined variable 'A'".to_string(),
3132 ..Default::default()
3133 }],
3134 })
3135 .await;
3136 assert_eq!(
3137 events.next().await.unwrap(),
3138 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3139 );
3140
3141 fake_server.end_progress(&progress_token).await;
3142 fake_server.end_progress(&progress_token).await;
3143 assert_eq!(
3144 events.next().await.unwrap(),
3145 Event::DiskBasedDiagnosticsUpdated
3146 );
3147 assert_eq!(
3148 events.next().await.unwrap(),
3149 Event::DiskBasedDiagnosticsFinished
3150 );
3151
3152 let buffer = project
3153 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3154 .await
3155 .unwrap();
3156
3157 buffer.read_with(&cx, |buffer, _| {
3158 let snapshot = buffer.snapshot();
3159 let diagnostics = snapshot
3160 .diagnostics_in_range::<_, Point>(0..buffer.len())
3161 .collect::<Vec<_>>();
3162 assert_eq!(
3163 diagnostics,
3164 &[DiagnosticEntry {
3165 range: Point::new(0, 9)..Point::new(0, 10),
3166 diagnostic: Diagnostic {
3167 severity: lsp::DiagnosticSeverity::ERROR,
3168 message: "undefined variable 'A'".to_string(),
3169 group_id: 0,
3170 is_primary: true,
3171 ..Default::default()
3172 }
3173 }]
3174 )
3175 });
3176 }
3177
3178 #[gpui::test]
3179 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3180 let dir = temp_tree(json!({
3181 "root": {
3182 "dir1": {},
3183 "dir2": {
3184 "dir3": {}
3185 }
3186 }
3187 }));
3188
3189 let project = Project::test(Arc::new(RealFs), &mut cx);
3190 let (tree, _) = project
3191 .update(&mut cx, |project, cx| {
3192 project.find_or_create_local_worktree(&dir.path(), false, cx)
3193 })
3194 .await
3195 .unwrap();
3196
3197 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3198 .await;
3199
3200 let cancel_flag = Default::default();
3201 let results = project
3202 .read_with(&cx, |project, cx| {
3203 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3204 })
3205 .await;
3206
3207 assert!(results.is_empty());
3208 }
3209
3210 #[gpui::test]
3211 async fn test_definition(mut cx: gpui::TestAppContext) {
3212 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3213 let language = Arc::new(Language::new(
3214 LanguageConfig {
3215 name: "Rust".to_string(),
3216 path_suffixes: vec!["rs".to_string()],
3217 language_server: Some(language_server_config),
3218 ..Default::default()
3219 },
3220 Some(tree_sitter_rust::language()),
3221 ));
3222
3223 let fs = FakeFs::new(cx.background());
3224 fs.insert_tree(
3225 "/dir",
3226 json!({
3227 "a.rs": "const fn a() { A }",
3228 "b.rs": "const y: i32 = crate::a()",
3229 }),
3230 )
3231 .await;
3232
3233 let project = Project::test(fs, &mut cx);
3234 project.update(&mut cx, |project, _| {
3235 Arc::get_mut(&mut project.languages).unwrap().add(language);
3236 });
3237
3238 let (tree, _) = project
3239 .update(&mut cx, |project, cx| {
3240 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3241 })
3242 .await
3243 .unwrap();
3244 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3245 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3246 .await;
3247
3248 let buffer = project
3249 .update(&mut cx, |project, cx| {
3250 project.open_buffer(
3251 ProjectPath {
3252 worktree_id,
3253 path: Path::new("").into(),
3254 },
3255 cx,
3256 )
3257 })
3258 .await
3259 .unwrap();
3260
3261 let mut fake_server = fake_servers.next().await.unwrap();
3262 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3263 let params = params.text_document_position_params;
3264 assert_eq!(
3265 params.text_document.uri.to_file_path().unwrap(),
3266 Path::new("/dir/b.rs"),
3267 );
3268 assert_eq!(params.position, lsp::Position::new(0, 22));
3269
3270 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3271 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3272 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3273 )))
3274 });
3275
3276 let mut definitions = project
3277 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3278 .await
3279 .unwrap();
3280
3281 assert_eq!(definitions.len(), 1);
3282 let definition = definitions.pop().unwrap();
3283 cx.update(|cx| {
3284 let target_buffer = definition.target_buffer.read(cx);
3285 assert_eq!(
3286 target_buffer
3287 .file()
3288 .unwrap()
3289 .as_local()
3290 .unwrap()
3291 .abs_path(cx),
3292 Path::new("/dir/a.rs"),
3293 );
3294 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3295 assert_eq!(
3296 list_worktrees(&project, cx),
3297 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3298 );
3299
3300 drop(definition);
3301 });
3302 cx.read(|cx| {
3303 assert_eq!(
3304 list_worktrees(&project, cx),
3305 [("/dir/b.rs".as_ref(), false)]
3306 );
3307 });
3308
3309 fn list_worktrees<'a>(
3310 project: &'a ModelHandle<Project>,
3311 cx: &'a AppContext,
3312 ) -> Vec<(&'a Path, bool)> {
3313 project
3314 .read(cx)
3315 .worktrees(cx)
3316 .map(|worktree| {
3317 let worktree = worktree.read(cx);
3318 (
3319 worktree.as_local().unwrap().abs_path().as_ref(),
3320 worktree.is_weak(),
3321 )
3322 })
3323 .collect::<Vec<_>>()
3324 }
3325 }
3326
3327 #[gpui::test]
3328 async fn test_save_file(mut cx: gpui::TestAppContext) {
3329 let fs = FakeFs::new(cx.background());
3330 fs.insert_tree(
3331 "/dir",
3332 json!({
3333 "file1": "the old contents",
3334 }),
3335 )
3336 .await;
3337
3338 let project = Project::test(fs.clone(), &mut cx);
3339 let worktree_id = project
3340 .update(&mut cx, |p, cx| {
3341 p.find_or_create_local_worktree("/dir", false, cx)
3342 })
3343 .await
3344 .unwrap()
3345 .0
3346 .read_with(&cx, |tree, _| tree.id());
3347
3348 let buffer = project
3349 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3350 .await
3351 .unwrap();
3352 buffer
3353 .update(&mut cx, |buffer, cx| {
3354 assert_eq!(buffer.text(), "the old contents");
3355 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3356 buffer.save(cx)
3357 })
3358 .await
3359 .unwrap();
3360
3361 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3362 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3363 }
3364
3365 #[gpui::test]
3366 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3367 let fs = FakeFs::new(cx.background());
3368 fs.insert_tree(
3369 "/dir",
3370 json!({
3371 "file1": "the old contents",
3372 }),
3373 )
3374 .await;
3375
3376 let project = Project::test(fs.clone(), &mut cx);
3377 let worktree_id = project
3378 .update(&mut cx, |p, cx| {
3379 p.find_or_create_local_worktree("/dir/file1", false, cx)
3380 })
3381 .await
3382 .unwrap()
3383 .0
3384 .read_with(&cx, |tree, _| tree.id());
3385
3386 let buffer = project
3387 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3388 .await
3389 .unwrap();
3390 buffer
3391 .update(&mut cx, |buffer, cx| {
3392 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3393 buffer.save(cx)
3394 })
3395 .await
3396 .unwrap();
3397
3398 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3399 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3400 }
3401
3402 #[gpui::test(retries = 5)]
3403 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3404 let dir = temp_tree(json!({
3405 "a": {
3406 "file1": "",
3407 "file2": "",
3408 "file3": "",
3409 },
3410 "b": {
3411 "c": {
3412 "file4": "",
3413 "file5": "",
3414 }
3415 }
3416 }));
3417
3418 let project = Project::test(Arc::new(RealFs), &mut cx);
3419 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3420
3421 let (tree, _) = project
3422 .update(&mut cx, |p, cx| {
3423 p.find_or_create_local_worktree(dir.path(), false, cx)
3424 })
3425 .await
3426 .unwrap();
3427 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3428
3429 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3430 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3431 async move { buffer.await.unwrap() }
3432 };
3433 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3434 tree.read_with(cx, |tree, _| {
3435 tree.entry_for_path(path)
3436 .expect(&format!("no entry for path {}", path))
3437 .id
3438 })
3439 };
3440
3441 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3442 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3443 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3444 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3445
3446 let file2_id = id_for_path("a/file2", &cx);
3447 let file3_id = id_for_path("a/file3", &cx);
3448 let file4_id = id_for_path("b/c/file4", &cx);
3449
3450 // Wait for the initial scan.
3451 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3452 .await;
3453
3454 // Create a remote copy of this worktree.
3455 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3456 let (remote, load_task) = cx.update(|cx| {
3457 Worktree::remote(
3458 1,
3459 1,
3460 initial_snapshot.to_proto(&Default::default(), Default::default()),
3461 rpc.clone(),
3462 cx,
3463 )
3464 });
3465 load_task.await;
3466
3467 cx.read(|cx| {
3468 assert!(!buffer2.read(cx).is_dirty());
3469 assert!(!buffer3.read(cx).is_dirty());
3470 assert!(!buffer4.read(cx).is_dirty());
3471 assert!(!buffer5.read(cx).is_dirty());
3472 });
3473
3474 // Rename and delete files and directories.
3475 tree.flush_fs_events(&cx).await;
3476 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3477 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3478 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3479 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3480 tree.flush_fs_events(&cx).await;
3481
3482 let expected_paths = vec![
3483 "a",
3484 "a/file1",
3485 "a/file2.new",
3486 "b",
3487 "d",
3488 "d/file3",
3489 "d/file4",
3490 ];
3491
3492 cx.read(|app| {
3493 assert_eq!(
3494 tree.read(app)
3495 .paths()
3496 .map(|p| p.to_str().unwrap())
3497 .collect::<Vec<_>>(),
3498 expected_paths
3499 );
3500
3501 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3502 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3503 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3504
3505 assert_eq!(
3506 buffer2.read(app).file().unwrap().path().as_ref(),
3507 Path::new("a/file2.new")
3508 );
3509 assert_eq!(
3510 buffer3.read(app).file().unwrap().path().as_ref(),
3511 Path::new("d/file3")
3512 );
3513 assert_eq!(
3514 buffer4.read(app).file().unwrap().path().as_ref(),
3515 Path::new("d/file4")
3516 );
3517 assert_eq!(
3518 buffer5.read(app).file().unwrap().path().as_ref(),
3519 Path::new("b/c/file5")
3520 );
3521
3522 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3523 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3524 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3525 assert!(buffer5.read(app).file().unwrap().is_deleted());
3526 });
3527
3528 // Update the remote worktree. Check that it becomes consistent with the
3529 // local worktree.
3530 remote.update(&mut cx, |remote, cx| {
3531 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3532 &initial_snapshot,
3533 1,
3534 1,
3535 0,
3536 true,
3537 );
3538 remote
3539 .as_remote_mut()
3540 .unwrap()
3541 .snapshot
3542 .apply_remote_update(update_message)
3543 .unwrap();
3544
3545 assert_eq!(
3546 remote
3547 .paths()
3548 .map(|p| p.to_str().unwrap())
3549 .collect::<Vec<_>>(),
3550 expected_paths
3551 );
3552 });
3553 }
3554
3555 #[gpui::test]
3556 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3557 let fs = FakeFs::new(cx.background());
3558 fs.insert_tree(
3559 "/the-dir",
3560 json!({
3561 "a.txt": "a-contents",
3562 "b.txt": "b-contents",
3563 }),
3564 )
3565 .await;
3566
3567 let project = Project::test(fs.clone(), &mut cx);
3568 let worktree_id = project
3569 .update(&mut cx, |p, cx| {
3570 p.find_or_create_local_worktree("/the-dir", false, cx)
3571 })
3572 .await
3573 .unwrap()
3574 .0
3575 .read_with(&cx, |tree, _| tree.id());
3576
3577 // Spawn multiple tasks to open paths, repeating some paths.
3578 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3579 (
3580 p.open_buffer((worktree_id, "a.txt"), cx),
3581 p.open_buffer((worktree_id, "b.txt"), cx),
3582 p.open_buffer((worktree_id, "a.txt"), cx),
3583 )
3584 });
3585
3586 let buffer_a_1 = buffer_a_1.await.unwrap();
3587 let buffer_a_2 = buffer_a_2.await.unwrap();
3588 let buffer_b = buffer_b.await.unwrap();
3589 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3590 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3591
3592 // There is only one buffer per path.
3593 let buffer_a_id = buffer_a_1.id();
3594 assert_eq!(buffer_a_2.id(), buffer_a_id);
3595
3596 // Open the same path again while it is still open.
3597 drop(buffer_a_1);
3598 let buffer_a_3 = project
3599 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3600 .await
3601 .unwrap();
3602
3603 // There's still only one buffer per path.
3604 assert_eq!(buffer_a_3.id(), buffer_a_id);
3605 }
3606
3607 #[gpui::test]
3608 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3609 use std::fs;
3610
3611 let dir = temp_tree(json!({
3612 "file1": "abc",
3613 "file2": "def",
3614 "file3": "ghi",
3615 }));
3616
3617 let project = Project::test(Arc::new(RealFs), &mut cx);
3618 let (worktree, _) = project
3619 .update(&mut cx, |p, cx| {
3620 p.find_or_create_local_worktree(dir.path(), false, cx)
3621 })
3622 .await
3623 .unwrap();
3624 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3625
3626 worktree.flush_fs_events(&cx).await;
3627 worktree
3628 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3629 .await;
3630
3631 let buffer1 = project
3632 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3633 .await
3634 .unwrap();
3635 let events = Rc::new(RefCell::new(Vec::new()));
3636
3637 // initially, the buffer isn't dirty.
3638 buffer1.update(&mut cx, |buffer, cx| {
3639 cx.subscribe(&buffer1, {
3640 let events = events.clone();
3641 move |_, _, event, _| events.borrow_mut().push(event.clone())
3642 })
3643 .detach();
3644
3645 assert!(!buffer.is_dirty());
3646 assert!(events.borrow().is_empty());
3647
3648 buffer.edit(vec![1..2], "", cx);
3649 });
3650
3651 // after the first edit, the buffer is dirty, and emits a dirtied event.
3652 buffer1.update(&mut cx, |buffer, cx| {
3653 assert!(buffer.text() == "ac");
3654 assert!(buffer.is_dirty());
3655 assert_eq!(
3656 *events.borrow(),
3657 &[language::Event::Edited, language::Event::Dirtied]
3658 );
3659 events.borrow_mut().clear();
3660 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3661 });
3662
3663 // after saving, the buffer is not dirty, and emits a saved event.
3664 buffer1.update(&mut cx, |buffer, cx| {
3665 assert!(!buffer.is_dirty());
3666 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3667 events.borrow_mut().clear();
3668
3669 buffer.edit(vec![1..1], "B", cx);
3670 buffer.edit(vec![2..2], "D", cx);
3671 });
3672
3673 // after editing again, the buffer is dirty, and emits another dirty event.
3674 buffer1.update(&mut cx, |buffer, cx| {
3675 assert!(buffer.text() == "aBDc");
3676 assert!(buffer.is_dirty());
3677 assert_eq!(
3678 *events.borrow(),
3679 &[
3680 language::Event::Edited,
3681 language::Event::Dirtied,
3682 language::Event::Edited,
3683 ],
3684 );
3685 events.borrow_mut().clear();
3686
3687 // TODO - currently, after restoring the buffer to its
3688 // previously-saved state, the is still considered dirty.
3689 buffer.edit([1..3], "", cx);
3690 assert!(buffer.text() == "ac");
3691 assert!(buffer.is_dirty());
3692 });
3693
3694 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3695
3696 // When a file is deleted, the buffer is considered dirty.
3697 let events = Rc::new(RefCell::new(Vec::new()));
3698 let buffer2 = project
3699 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3700 .await
3701 .unwrap();
3702 buffer2.update(&mut cx, |_, cx| {
3703 cx.subscribe(&buffer2, {
3704 let events = events.clone();
3705 move |_, _, event, _| events.borrow_mut().push(event.clone())
3706 })
3707 .detach();
3708 });
3709
3710 fs::remove_file(dir.path().join("file2")).unwrap();
3711 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3712 assert_eq!(
3713 *events.borrow(),
3714 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3715 );
3716
3717 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3718 let events = Rc::new(RefCell::new(Vec::new()));
3719 let buffer3 = project
3720 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3721 .await
3722 .unwrap();
3723 buffer3.update(&mut cx, |_, cx| {
3724 cx.subscribe(&buffer3, {
3725 let events = events.clone();
3726 move |_, _, event, _| events.borrow_mut().push(event.clone())
3727 })
3728 .detach();
3729 });
3730
3731 worktree.flush_fs_events(&cx).await;
3732 buffer3.update(&mut cx, |buffer, cx| {
3733 buffer.edit(Some(0..0), "x", cx);
3734 });
3735 events.borrow_mut().clear();
3736 fs::remove_file(dir.path().join("file3")).unwrap();
3737 buffer3
3738 .condition(&cx, |_, _| !events.borrow().is_empty())
3739 .await;
3740 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3741 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3742 }
3743
3744 #[gpui::test]
3745 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3746 use std::fs;
3747
3748 let initial_contents = "aaa\nbbbbb\nc\n";
3749 let dir = temp_tree(json!({ "the-file": initial_contents }));
3750
3751 let project = Project::test(Arc::new(RealFs), &mut cx);
3752 let (worktree, _) = project
3753 .update(&mut cx, |p, cx| {
3754 p.find_or_create_local_worktree(dir.path(), false, cx)
3755 })
3756 .await
3757 .unwrap();
3758 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3759
3760 worktree
3761 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3762 .await;
3763
3764 let abs_path = dir.path().join("the-file");
3765 let buffer = project
3766 .update(&mut cx, |p, cx| {
3767 p.open_buffer((worktree_id, "the-file"), cx)
3768 })
3769 .await
3770 .unwrap();
3771
3772 // TODO
3773 // Add a cursor on each row.
3774 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3775 // assert!(!buffer.is_dirty());
3776 // buffer.add_selection_set(
3777 // &(0..3)
3778 // .map(|row| Selection {
3779 // id: row as usize,
3780 // start: Point::new(row, 1),
3781 // end: Point::new(row, 1),
3782 // reversed: false,
3783 // goal: SelectionGoal::None,
3784 // })
3785 // .collect::<Vec<_>>(),
3786 // cx,
3787 // )
3788 // });
3789
3790 // Change the file on disk, adding two new lines of text, and removing
3791 // one line.
3792 buffer.read_with(&cx, |buffer, _| {
3793 assert!(!buffer.is_dirty());
3794 assert!(!buffer.has_conflict());
3795 });
3796 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3797 fs::write(&abs_path, new_contents).unwrap();
3798
3799 // Because the buffer was not modified, it is reloaded from disk. Its
3800 // contents are edited according to the diff between the old and new
3801 // file contents.
3802 buffer
3803 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3804 .await;
3805
3806 buffer.update(&mut cx, |buffer, _| {
3807 assert_eq!(buffer.text(), new_contents);
3808 assert!(!buffer.is_dirty());
3809 assert!(!buffer.has_conflict());
3810
3811 // TODO
3812 // let cursor_positions = buffer
3813 // .selection_set(selection_set_id)
3814 // .unwrap()
3815 // .selections::<Point>(&*buffer)
3816 // .map(|selection| {
3817 // assert_eq!(selection.start, selection.end);
3818 // selection.start
3819 // })
3820 // .collect::<Vec<_>>();
3821 // assert_eq!(
3822 // cursor_positions,
3823 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3824 // );
3825 });
3826
3827 // Modify the buffer
3828 buffer.update(&mut cx, |buffer, cx| {
3829 buffer.edit(vec![0..0], " ", cx);
3830 assert!(buffer.is_dirty());
3831 assert!(!buffer.has_conflict());
3832 });
3833
3834 // Change the file on disk again, adding blank lines to the beginning.
3835 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3836
3837 // Because the buffer is modified, it doesn't reload from disk, but is
3838 // marked as having a conflict.
3839 buffer
3840 .condition(&cx, |buffer, _| buffer.has_conflict())
3841 .await;
3842 }
3843
3844 #[gpui::test]
3845 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3846 let fs = FakeFs::new(cx.background());
3847 fs.insert_tree(
3848 "/the-dir",
3849 json!({
3850 "a.rs": "
3851 fn foo(mut v: Vec<usize>) {
3852 for x in &v {
3853 v.push(1);
3854 }
3855 }
3856 "
3857 .unindent(),
3858 }),
3859 )
3860 .await;
3861
3862 let project = Project::test(fs.clone(), &mut cx);
3863 let (worktree, _) = project
3864 .update(&mut cx, |p, cx| {
3865 p.find_or_create_local_worktree("/the-dir", false, cx)
3866 })
3867 .await
3868 .unwrap();
3869 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3870
3871 let buffer = project
3872 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3873 .await
3874 .unwrap();
3875
3876 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3877 let message = lsp::PublishDiagnosticsParams {
3878 uri: buffer_uri.clone(),
3879 diagnostics: vec![
3880 lsp::Diagnostic {
3881 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3882 severity: Some(DiagnosticSeverity::WARNING),
3883 message: "error 1".to_string(),
3884 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3885 location: lsp::Location {
3886 uri: buffer_uri.clone(),
3887 range: lsp::Range::new(
3888 lsp::Position::new(1, 8),
3889 lsp::Position::new(1, 9),
3890 ),
3891 },
3892 message: "error 1 hint 1".to_string(),
3893 }]),
3894 ..Default::default()
3895 },
3896 lsp::Diagnostic {
3897 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3898 severity: Some(DiagnosticSeverity::HINT),
3899 message: "error 1 hint 1".to_string(),
3900 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3901 location: lsp::Location {
3902 uri: buffer_uri.clone(),
3903 range: lsp::Range::new(
3904 lsp::Position::new(1, 8),
3905 lsp::Position::new(1, 9),
3906 ),
3907 },
3908 message: "original diagnostic".to_string(),
3909 }]),
3910 ..Default::default()
3911 },
3912 lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3914 severity: Some(DiagnosticSeverity::ERROR),
3915 message: "error 2".to_string(),
3916 related_information: Some(vec![
3917 lsp::DiagnosticRelatedInformation {
3918 location: lsp::Location {
3919 uri: buffer_uri.clone(),
3920 range: lsp::Range::new(
3921 lsp::Position::new(1, 13),
3922 lsp::Position::new(1, 15),
3923 ),
3924 },
3925 message: "error 2 hint 1".to_string(),
3926 },
3927 lsp::DiagnosticRelatedInformation {
3928 location: lsp::Location {
3929 uri: buffer_uri.clone(),
3930 range: lsp::Range::new(
3931 lsp::Position::new(1, 13),
3932 lsp::Position::new(1, 15),
3933 ),
3934 },
3935 message: "error 2 hint 2".to_string(),
3936 },
3937 ]),
3938 ..Default::default()
3939 },
3940 lsp::Diagnostic {
3941 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3942 severity: Some(DiagnosticSeverity::HINT),
3943 message: "error 2 hint 1".to_string(),
3944 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3945 location: lsp::Location {
3946 uri: buffer_uri.clone(),
3947 range: lsp::Range::new(
3948 lsp::Position::new(2, 8),
3949 lsp::Position::new(2, 17),
3950 ),
3951 },
3952 message: "original diagnostic".to_string(),
3953 }]),
3954 ..Default::default()
3955 },
3956 lsp::Diagnostic {
3957 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3958 severity: Some(DiagnosticSeverity::HINT),
3959 message: "error 2 hint 2".to_string(),
3960 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3961 location: lsp::Location {
3962 uri: buffer_uri.clone(),
3963 range: lsp::Range::new(
3964 lsp::Position::new(2, 8),
3965 lsp::Position::new(2, 17),
3966 ),
3967 },
3968 message: "original diagnostic".to_string(),
3969 }]),
3970 ..Default::default()
3971 },
3972 ],
3973 version: None,
3974 };
3975
3976 project
3977 .update(&mut cx, |p, cx| {
3978 p.update_diagnostics(message, &Default::default(), cx)
3979 })
3980 .unwrap();
3981 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3982
3983 assert_eq!(
3984 buffer
3985 .diagnostics_in_range::<_, Point>(0..buffer.len())
3986 .collect::<Vec<_>>(),
3987 &[
3988 DiagnosticEntry {
3989 range: Point::new(1, 8)..Point::new(1, 9),
3990 diagnostic: Diagnostic {
3991 severity: DiagnosticSeverity::WARNING,
3992 message: "error 1".to_string(),
3993 group_id: 0,
3994 is_primary: true,
3995 ..Default::default()
3996 }
3997 },
3998 DiagnosticEntry {
3999 range: Point::new(1, 8)..Point::new(1, 9),
4000 diagnostic: Diagnostic {
4001 severity: DiagnosticSeverity::HINT,
4002 message: "error 1 hint 1".to_string(),
4003 group_id: 0,
4004 is_primary: false,
4005 ..Default::default()
4006 }
4007 },
4008 DiagnosticEntry {
4009 range: Point::new(1, 13)..Point::new(1, 15),
4010 diagnostic: Diagnostic {
4011 severity: DiagnosticSeverity::HINT,
4012 message: "error 2 hint 1".to_string(),
4013 group_id: 1,
4014 is_primary: false,
4015 ..Default::default()
4016 }
4017 },
4018 DiagnosticEntry {
4019 range: Point::new(1, 13)..Point::new(1, 15),
4020 diagnostic: Diagnostic {
4021 severity: DiagnosticSeverity::HINT,
4022 message: "error 2 hint 2".to_string(),
4023 group_id: 1,
4024 is_primary: false,
4025 ..Default::default()
4026 }
4027 },
4028 DiagnosticEntry {
4029 range: Point::new(2, 8)..Point::new(2, 17),
4030 diagnostic: Diagnostic {
4031 severity: DiagnosticSeverity::ERROR,
4032 message: "error 2".to_string(),
4033 group_id: 1,
4034 is_primary: true,
4035 ..Default::default()
4036 }
4037 }
4038 ]
4039 );
4040
4041 assert_eq!(
4042 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4043 &[
4044 DiagnosticEntry {
4045 range: Point::new(1, 8)..Point::new(1, 9),
4046 diagnostic: Diagnostic {
4047 severity: DiagnosticSeverity::WARNING,
4048 message: "error 1".to_string(),
4049 group_id: 0,
4050 is_primary: true,
4051 ..Default::default()
4052 }
4053 },
4054 DiagnosticEntry {
4055 range: Point::new(1, 8)..Point::new(1, 9),
4056 diagnostic: Diagnostic {
4057 severity: DiagnosticSeverity::HINT,
4058 message: "error 1 hint 1".to_string(),
4059 group_id: 0,
4060 is_primary: false,
4061 ..Default::default()
4062 }
4063 },
4064 ]
4065 );
4066 assert_eq!(
4067 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4068 &[
4069 DiagnosticEntry {
4070 range: Point::new(1, 13)..Point::new(1, 15),
4071 diagnostic: Diagnostic {
4072 severity: DiagnosticSeverity::HINT,
4073 message: "error 2 hint 1".to_string(),
4074 group_id: 1,
4075 is_primary: false,
4076 ..Default::default()
4077 }
4078 },
4079 DiagnosticEntry {
4080 range: Point::new(1, 13)..Point::new(1, 15),
4081 diagnostic: Diagnostic {
4082 severity: DiagnosticSeverity::HINT,
4083 message: "error 2 hint 2".to_string(),
4084 group_id: 1,
4085 is_primary: false,
4086 ..Default::default()
4087 }
4088 },
4089 DiagnosticEntry {
4090 range: Point::new(2, 8)..Point::new(2, 17),
4091 diagnostic: Diagnostic {
4092 severity: DiagnosticSeverity::ERROR,
4093 message: "error 2".to_string(),
4094 group_id: 1,
4095 is_primary: true,
4096 ..Default::default()
4097 }
4098 }
4099 ]
4100 );
4101 }
4102}