1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 #[cfg(any(test, feature = "test-support"))]
348 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
349 self.shared_buffers
350 .get(&peer_id)
351 .and_then(|buffers| buffers.get(&remote_id))
352 .cloned()
353 }
354
355 #[cfg(any(test, feature = "test-support"))]
356 pub fn has_buffered_operations(&self) -> bool {
357 self.open_buffers
358 .values()
359 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
360 }
361
362 pub fn fs(&self) -> &Arc<dyn Fs> {
363 &self.fs
364 }
365
366 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
367 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
368 *remote_id_tx.borrow_mut() = remote_id;
369 }
370
371 self.subscriptions.clear();
372 if let Some(remote_id) = remote_id {
373 self.subscriptions
374 .push(self.client.add_model_for_remote_entity(remote_id, cx));
375 }
376 }
377
378 pub fn remote_id(&self) -> Option<u64> {
379 match &self.client_state {
380 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
381 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
382 }
383 }
384
385 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
386 let mut id = None;
387 let mut watch = None;
388 match &self.client_state {
389 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
390 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
391 }
392
393 async move {
394 if let Some(id) = id {
395 return id;
396 }
397 let mut watch = watch.unwrap();
398 loop {
399 let id = *watch.borrow();
400 if let Some(id) = id {
401 return id;
402 }
403 watch.recv().await;
404 }
405 }
406 }
407
408 pub fn replica_id(&self) -> ReplicaId {
409 match &self.client_state {
410 ProjectClientState::Local { .. } => 0,
411 ProjectClientState::Remote { replica_id, .. } => *replica_id,
412 }
413 }
414
415 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
416 &self.collaborators
417 }
418
419 pub fn worktrees<'a>(
420 &'a self,
421 cx: &'a AppContext,
422 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
423 self.worktrees
424 .iter()
425 .filter_map(move |worktree| worktree.upgrade(cx))
426 }
427
428 pub fn worktree_for_id(
429 &self,
430 id: WorktreeId,
431 cx: &AppContext,
432 ) -> Option<ModelHandle<Worktree>> {
433 self.worktrees(cx)
434 .find(|worktree| worktree.read(cx).id() == id)
435 }
436
437 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
438 let rpc = self.client.clone();
439 cx.spawn(|this, mut cx| async move {
440 let project_id = this.update(&mut cx, |this, _| {
441 if let ProjectClientState::Local {
442 is_shared,
443 remote_id_rx,
444 ..
445 } = &mut this.client_state
446 {
447 *is_shared = true;
448 remote_id_rx
449 .borrow()
450 .ok_or_else(|| anyhow!("no project id"))
451 } else {
452 Err(anyhow!("can't share a remote project"))
453 }
454 })?;
455
456 rpc.request(proto::ShareProject { project_id }).await?;
457 let mut tasks = Vec::new();
458 this.update(&mut cx, |this, cx| {
459 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
460 worktree.update(cx, |worktree, cx| {
461 let worktree = worktree.as_local_mut().unwrap();
462 tasks.push(worktree.share(project_id, cx));
463 });
464 }
465 });
466 for task in tasks {
467 task.await?;
468 }
469 this.update(&mut cx, |_, cx| cx.notify());
470 Ok(())
471 })
472 }
473
474 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
475 let rpc = self.client.clone();
476 cx.spawn(|this, mut cx| async move {
477 let project_id = this.update(&mut cx, |this, _| {
478 if let ProjectClientState::Local {
479 is_shared,
480 remote_id_rx,
481 ..
482 } = &mut this.client_state
483 {
484 *is_shared = false;
485 remote_id_rx
486 .borrow()
487 .ok_or_else(|| anyhow!("no project id"))
488 } else {
489 Err(anyhow!("can't share a remote project"))
490 }
491 })?;
492
493 rpc.send(proto::UnshareProject { project_id })?;
494 this.update(&mut cx, |this, cx| {
495 this.collaborators.clear();
496 this.shared_buffers.clear();
497 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
498 worktree.update(cx, |worktree, _| {
499 worktree.as_local_mut().unwrap().unshare();
500 });
501 }
502 cx.notify()
503 });
504 Ok(())
505 })
506 }
507
508 pub fn is_read_only(&self) -> bool {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => false,
511 ProjectClientState::Remote {
512 sharing_has_stopped,
513 ..
514 } => *sharing_has_stopped,
515 }
516 }
517
518 pub fn is_local(&self) -> bool {
519 match &self.client_state {
520 ProjectClientState::Local { .. } => true,
521 ProjectClientState::Remote { .. } => false,
522 }
523 }
524
525 pub fn is_remote(&self) -> bool {
526 !self.is_local()
527 }
528
529 pub fn open_buffer(
530 &mut self,
531 path: impl Into<ProjectPath>,
532 cx: &mut ModelContext<Self>,
533 ) -> Task<Result<ModelHandle<Buffer>>> {
534 let project_path = path.into();
535 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
536 worktree
537 } else {
538 return Task::ready(Err(anyhow!("no such worktree")));
539 };
540
541 // If there is already a buffer for the given path, then return it.
542 let existing_buffer = self.get_open_buffer(&project_path, cx);
543 if let Some(existing_buffer) = existing_buffer {
544 return Task::ready(Ok(existing_buffer));
545 }
546
547 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
548 // If the given path is already being loaded, then wait for that existing
549 // task to complete and return the same buffer.
550 hash_map::Entry::Occupied(e) => e.get().clone(),
551
552 // Otherwise, record the fact that this path is now being loaded.
553 hash_map::Entry::Vacant(entry) => {
554 let (mut tx, rx) = postage::watch::channel();
555 entry.insert(rx.clone());
556
557 let load_buffer = if worktree.read(cx).is_local() {
558 self.open_local_buffer(&project_path.path, &worktree, cx)
559 } else {
560 self.open_remote_buffer(&project_path.path, &worktree, cx)
561 };
562
563 cx.spawn(move |this, mut cx| async move {
564 let load_result = load_buffer.await;
565 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
566 // Record the fact that the buffer is no longer loading.
567 this.loading_buffers.remove(&project_path);
568 if this.loading_buffers.is_empty() {
569 this.open_buffers
570 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
571 }
572
573 let buffer = load_result.map_err(Arc::new)?;
574 Ok(buffer)
575 }));
576 })
577 .detach();
578 rx
579 }
580 };
581
582 cx.foreground().spawn(async move {
583 loop {
584 if let Some(result) = loading_watch.borrow().as_ref() {
585 match result {
586 Ok(buffer) => return Ok(buffer.clone()),
587 Err(error) => return Err(anyhow!("{}", error)),
588 }
589 }
590 loading_watch.recv().await;
591 }
592 })
593 }
594
595 fn open_local_buffer(
596 &mut self,
597 path: &Arc<Path>,
598 worktree: &ModelHandle<Worktree>,
599 cx: &mut ModelContext<Self>,
600 ) -> Task<Result<ModelHandle<Buffer>>> {
601 let load_buffer = worktree.update(cx, |worktree, cx| {
602 let worktree = worktree.as_local_mut().unwrap();
603 worktree.load_buffer(path, cx)
604 });
605 let worktree = worktree.downgrade();
606 cx.spawn(|this, mut cx| async move {
607 let buffer = load_buffer.await?;
608 let worktree = worktree
609 .upgrade(&cx)
610 .ok_or_else(|| anyhow!("worktree was removed"))?;
611 this.update(&mut cx, |this, cx| {
612 this.register_buffer(&buffer, Some(&worktree), cx)
613 })?;
614 Ok(buffer)
615 })
616 }
617
618 fn open_remote_buffer(
619 &mut self,
620 path: &Arc<Path>,
621 worktree: &ModelHandle<Worktree>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let rpc = self.client.clone();
625 let project_id = self.remote_id().unwrap();
626 let remote_worktree_id = worktree.read(cx).id();
627 let path = path.clone();
628 let path_string = path.to_string_lossy().to_string();
629 cx.spawn(|this, mut cx| async move {
630 let response = rpc
631 .request(proto::OpenBuffer {
632 project_id,
633 worktree_id: remote_worktree_id.to_proto(),
634 path: path_string,
635 })
636 .await?;
637 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
638 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
639 .await
640 })
641 }
642
643 fn open_local_buffer_from_lsp_path(
644 &mut self,
645 abs_path: lsp::Url,
646 lang_name: String,
647 lang_server: Arc<LanguageServer>,
648 cx: &mut ModelContext<Self>,
649 ) -> Task<Result<ModelHandle<Buffer>>> {
650 cx.spawn(|this, mut cx| async move {
651 let abs_path = abs_path
652 .to_file_path()
653 .map_err(|_| anyhow!("can't convert URI to path"))?;
654 let (worktree, relative_path) = if let Some(result) =
655 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
656 {
657 result
658 } else {
659 let worktree = this
660 .update(&mut cx, |this, cx| {
661 this.create_local_worktree(&abs_path, true, cx)
662 })
663 .await?;
664 this.update(&mut cx, |this, cx| {
665 this.language_servers
666 .insert((worktree.read(cx).id(), lang_name), lang_server);
667 });
668 (worktree, PathBuf::new())
669 };
670
671 let project_path = ProjectPath {
672 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
673 path: relative_path.into(),
674 };
675 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
676 .await
677 })
678 }
679
680 pub fn save_buffer_as(
681 &self,
682 buffer: ModelHandle<Buffer>,
683 abs_path: PathBuf,
684 cx: &mut ModelContext<Project>,
685 ) -> Task<Result<()>> {
686 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
687 cx.spawn(|this, mut cx| async move {
688 let (worktree, path) = worktree_task.await?;
689 worktree
690 .update(&mut cx, |worktree, cx| {
691 worktree
692 .as_local_mut()
693 .unwrap()
694 .save_buffer_as(buffer.clone(), path, cx)
695 })
696 .await?;
697 this.update(&mut cx, |this, cx| {
698 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
699 });
700 Ok(())
701 })
702 }
703
704 #[cfg(any(test, feature = "test-support"))]
705 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
706 let path = path.into();
707 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
708 self.open_buffers.iter().any(|(_, buffer)| {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
711 if file.worktree == worktree && file.path() == &path.path {
712 return true;
713 }
714 }
715 }
716 false
717 })
718 } else {
719 false
720 }
721 }
722
723 fn get_open_buffer(
724 &mut self,
725 path: &ProjectPath,
726 cx: &mut ModelContext<Self>,
727 ) -> Option<ModelHandle<Buffer>> {
728 let mut result = None;
729 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
730 self.open_buffers.retain(|_, buffer| {
731 if let Some(buffer) = buffer.upgrade(cx) {
732 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
733 if file.worktree == worktree && file.path() == &path.path {
734 result = Some(buffer);
735 }
736 }
737 true
738 } else {
739 false
740 }
741 });
742 result
743 }
744
745 fn register_buffer(
746 &mut self,
747 buffer: &ModelHandle<Buffer>,
748 worktree: Option<&ModelHandle<Worktree>>,
749 cx: &mut ModelContext<Self>,
750 ) -> Result<()> {
751 match self.open_buffers.insert(
752 buffer.read(cx).remote_id(),
753 OpenBuffer::Loaded(buffer.downgrade()),
754 ) {
755 None => {}
756 Some(OpenBuffer::Loading(operations)) => {
757 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
758 }
759 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
760 }
761 self.assign_language_to_buffer(&buffer, worktree, cx);
762 Ok(())
763 }
764
765 fn assign_language_to_buffer(
766 &mut self,
767 buffer: &ModelHandle<Buffer>,
768 worktree: Option<&ModelHandle<Worktree>>,
769 cx: &mut ModelContext<Self>,
770 ) -> Option<()> {
771 let (path, full_path) = {
772 let file = buffer.read(cx).file()?;
773 (file.path().clone(), file.full_path(cx))
774 };
775
776 // If the buffer has a language, set it and start/assign the language server
777 if let Some(language) = self.languages.select_language(&full_path) {
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language(Some(language.clone()), cx);
780 });
781
782 // For local worktrees, start a language server if needed.
783 // Also assign the language server and any previously stored diagnostics to the buffer.
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 let worktree_id = local_worktree.id();
786 let worktree_abs_path = local_worktree.abs_path().clone();
787
788 let language_server = match self
789 .language_servers
790 .entry((worktree_id, language.name().to_string()))
791 {
792 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
793 hash_map::Entry::Vacant(e) => Self::start_language_server(
794 self.client.clone(),
795 language.clone(),
796 &worktree_abs_path,
797 cx,
798 )
799 .map(|server| e.insert(server).clone()),
800 };
801
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_language_server(language_server, cx);
804 });
805 }
806 }
807
808 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
809 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
810 buffer.update(cx, |buffer, cx| {
811 buffer.update_diagnostics(diagnostics, None, cx).log_err();
812 });
813 }
814 }
815
816 None
817 }
818
819 fn start_language_server(
820 rpc: Arc<Client>,
821 language: Arc<Language>,
822 worktree_path: &Path,
823 cx: &mut ModelContext<Self>,
824 ) -> Option<Arc<LanguageServer>> {
825 enum LspEvent {
826 DiagnosticsStart,
827 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
828 DiagnosticsFinish,
829 }
830
831 let language_server = language
832 .start_server(worktree_path, cx)
833 .log_err()
834 .flatten()?;
835 let disk_based_sources = language
836 .disk_based_diagnostic_sources()
837 .cloned()
838 .unwrap_or_default();
839 let disk_based_diagnostics_progress_token =
840 language.disk_based_diagnostics_progress_token().cloned();
841 let has_disk_based_diagnostic_progress_token =
842 disk_based_diagnostics_progress_token.is_some();
843 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
844
845 // Listen for `PublishDiagnostics` notifications.
846 language_server
847 .on_notification::<lsp::notification::PublishDiagnostics, _>({
848 let diagnostics_tx = diagnostics_tx.clone();
849 move |params| {
850 if !has_disk_based_diagnostic_progress_token {
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
852 }
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
856 }
857 }
858 })
859 .detach();
860
861 // Listen for `Progress` notifications. Send an event when the language server
862 // transitions between running jobs and not running any jobs.
863 let mut running_jobs_for_this_server: i32 = 0;
864 language_server
865 .on_notification::<lsp::notification::Progress, _>(move |params| {
866 let token = match params.token {
867 lsp::NumberOrString::Number(_) => None,
868 lsp::NumberOrString::String(token) => Some(token),
869 };
870
871 if token == disk_based_diagnostics_progress_token {
872 match params.value {
873 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
874 lsp::WorkDoneProgress::Begin(_) => {
875 running_jobs_for_this_server += 1;
876 if running_jobs_for_this_server == 1 {
877 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
878 }
879 }
880 lsp::WorkDoneProgress::End(_) => {
881 running_jobs_for_this_server -= 1;
882 if running_jobs_for_this_server == 0 {
883 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
884 }
885 }
886 _ => {}
887 },
888 }
889 }
890 })
891 .detach();
892
893 // Process all the LSP events.
894 cx.spawn_weak(|this, mut cx| async move {
895 while let Ok(message) = diagnostics_rx.recv().await {
896 let this = this.upgrade(&cx)?;
897 match message {
898 LspEvent::DiagnosticsStart => {
899 this.update(&mut cx, |this, cx| {
900 this.disk_based_diagnostics_started(cx);
901 if let Some(project_id) = this.remote_id() {
902 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
903 .log_err();
904 }
905 });
906 }
907 LspEvent::DiagnosticsUpdate(mut params) => {
908 language.process_diagnostics(&mut params);
909 this.update(&mut cx, |this, cx| {
910 this.update_diagnostics(params, &disk_based_sources, cx)
911 .log_err();
912 });
913 }
914 LspEvent::DiagnosticsFinish => {
915 this.update(&mut cx, |this, cx| {
916 this.disk_based_diagnostics_finished(cx);
917 if let Some(project_id) = this.remote_id() {
918 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
919 .log_err();
920 }
921 });
922 }
923 }
924 }
925 Some(())
926 })
927 .detach();
928
929 Some(language_server)
930 }
931
932 pub fn update_diagnostics(
933 &mut self,
934 params: lsp::PublishDiagnosticsParams,
935 disk_based_sources: &HashSet<String>,
936 cx: &mut ModelContext<Self>,
937 ) -> Result<()> {
938 let abs_path = params
939 .uri
940 .to_file_path()
941 .map_err(|_| anyhow!("URI is not a file"))?;
942 let mut next_group_id = 0;
943 let mut diagnostics = Vec::default();
944 let mut primary_diagnostic_group_ids = HashMap::default();
945 let mut sources_by_group_id = HashMap::default();
946 let mut supporting_diagnostic_severities = HashMap::default();
947 for diagnostic in ¶ms.diagnostics {
948 let source = diagnostic.source.as_ref();
949 let code = diagnostic.code.as_ref().map(|code| match code {
950 lsp::NumberOrString::Number(code) => code.to_string(),
951 lsp::NumberOrString::String(code) => code.clone(),
952 });
953 let range = range_from_lsp(diagnostic.range);
954 let is_supporting = diagnostic
955 .related_information
956 .as_ref()
957 .map_or(false, |infos| {
958 infos.iter().any(|info| {
959 primary_diagnostic_group_ids.contains_key(&(
960 source,
961 code.clone(),
962 range_from_lsp(info.location.range),
963 ))
964 })
965 });
966
967 if is_supporting {
968 if let Some(severity) = diagnostic.severity {
969 supporting_diagnostic_severities
970 .insert((source, code.clone(), range), severity);
971 }
972 } else {
973 let group_id = post_inc(&mut next_group_id);
974 let is_disk_based =
975 source.map_or(false, |source| disk_based_sources.contains(source));
976
977 sources_by_group_id.insert(group_id, source);
978 primary_diagnostic_group_ids
979 .insert((source, code.clone(), range.clone()), group_id);
980
981 diagnostics.push(DiagnosticEntry {
982 range,
983 diagnostic: Diagnostic {
984 code: code.clone(),
985 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
986 message: diagnostic.message.clone(),
987 group_id,
988 is_primary: true,
989 is_valid: true,
990 is_disk_based,
991 },
992 });
993 if let Some(infos) = &diagnostic.related_information {
994 for info in infos {
995 if info.location.uri == params.uri && !info.message.is_empty() {
996 let range = range_from_lsp(info.location.range);
997 diagnostics.push(DiagnosticEntry {
998 range,
999 diagnostic: Diagnostic {
1000 code: code.clone(),
1001 severity: DiagnosticSeverity::INFORMATION,
1002 message: info.message.clone(),
1003 group_id,
1004 is_primary: false,
1005 is_valid: true,
1006 is_disk_based,
1007 },
1008 });
1009 }
1010 }
1011 }
1012 }
1013 }
1014
1015 for entry in &mut diagnostics {
1016 let diagnostic = &mut entry.diagnostic;
1017 if !diagnostic.is_primary {
1018 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1019 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1020 source,
1021 diagnostic.code.clone(),
1022 entry.range.clone(),
1023 )) {
1024 diagnostic.severity = severity;
1025 }
1026 }
1027 }
1028
1029 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1030 Ok(())
1031 }
1032
1033 pub fn update_diagnostic_entries(
1034 &mut self,
1035 abs_path: PathBuf,
1036 version: Option<i32>,
1037 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1038 cx: &mut ModelContext<Project>,
1039 ) -> Result<(), anyhow::Error> {
1040 let (worktree, relative_path) = self
1041 .find_local_worktree(&abs_path, cx)
1042 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1043 let project_path = ProjectPath {
1044 worktree_id: worktree.read(cx).id(),
1045 path: relative_path.into(),
1046 };
1047
1048 for buffer in self.open_buffers.values() {
1049 if let Some(buffer) = buffer.upgrade(cx) {
1050 if buffer
1051 .read(cx)
1052 .file()
1053 .map_or(false, |file| *file.path() == project_path.path)
1054 {
1055 buffer.update(cx, |buffer, cx| {
1056 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1057 })?;
1058 break;
1059 }
1060 }
1061 }
1062 worktree.update(cx, |worktree, cx| {
1063 worktree
1064 .as_local_mut()
1065 .ok_or_else(|| anyhow!("not a local worktree"))?
1066 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1067 })?;
1068 cx.emit(Event::DiagnosticsUpdated(project_path));
1069 Ok(())
1070 }
1071
1072 pub fn format(
1073 &self,
1074 buffers: HashSet<ModelHandle<Buffer>>,
1075 push_to_history: bool,
1076 cx: &mut ModelContext<Project>,
1077 ) -> Task<Result<ProjectTransaction>> {
1078 let mut local_buffers = Vec::new();
1079 let mut remote_buffers = None;
1080 for buffer_handle in buffers {
1081 let buffer = buffer_handle.read(cx);
1082 let worktree;
1083 if let Some(file) = File::from_dyn(buffer.file()) {
1084 worktree = file.worktree.clone();
1085 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1086 let lang_server;
1087 if let Some(lang) = buffer.language() {
1088 if let Some(server) = self
1089 .language_servers
1090 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1091 {
1092 lang_server = server.clone();
1093 } else {
1094 return Task::ready(Ok(Default::default()));
1095 };
1096 } else {
1097 return Task::ready(Ok(Default::default()));
1098 }
1099
1100 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1101 } else {
1102 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1103 }
1104 } else {
1105 return Task::ready(Ok(Default::default()));
1106 }
1107 }
1108
1109 let remote_buffers = self.remote_id().zip(remote_buffers);
1110 let client = self.client.clone();
1111
1112 cx.spawn(|this, mut cx| async move {
1113 let mut project_transaction = ProjectTransaction::default();
1114
1115 if let Some((project_id, remote_buffers)) = remote_buffers {
1116 let response = client
1117 .request(proto::FormatBuffers {
1118 project_id,
1119 buffer_ids: remote_buffers
1120 .iter()
1121 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1122 .collect(),
1123 })
1124 .await?
1125 .transaction
1126 .ok_or_else(|| anyhow!("missing transaction"))?;
1127 project_transaction = this
1128 .update(&mut cx, |this, cx| {
1129 this.deserialize_project_transaction(response, push_to_history, cx)
1130 })
1131 .await?;
1132 }
1133
1134 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1135 let lsp_edits = lang_server
1136 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1137 text_document: lsp::TextDocumentIdentifier::new(
1138 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1139 ),
1140 options: Default::default(),
1141 work_done_progress_params: Default::default(),
1142 })
1143 .await?;
1144
1145 if let Some(lsp_edits) = lsp_edits {
1146 let edits = buffer
1147 .update(&mut cx, |buffer, cx| {
1148 buffer.edits_from_lsp(lsp_edits, None, cx)
1149 })
1150 .await?;
1151 buffer.update(&mut cx, |buffer, cx| {
1152 buffer.finalize_last_transaction();
1153 buffer.start_transaction();
1154 for (range, text) in edits {
1155 buffer.edit([range], text, cx);
1156 }
1157 if buffer.end_transaction(cx).is_some() {
1158 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1159 if !push_to_history {
1160 buffer.forget_transaction(transaction.id);
1161 }
1162 project_transaction.0.insert(cx.handle(), transaction);
1163 }
1164 });
1165 }
1166 }
1167
1168 Ok(project_transaction)
1169 })
1170 }
1171
1172 pub fn definition<T: ToPointUtf16>(
1173 &self,
1174 source_buffer_handle: &ModelHandle<Buffer>,
1175 position: T,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Task<Result<Vec<Definition>>> {
1178 let source_buffer_handle = source_buffer_handle.clone();
1179 let source_buffer = source_buffer_handle.read(cx);
1180 let worktree;
1181 let buffer_abs_path;
1182 if let Some(file) = File::from_dyn(source_buffer.file()) {
1183 worktree = file.worktree.clone();
1184 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1185 } else {
1186 return Task::ready(Ok(Default::default()));
1187 };
1188
1189 let position = position.to_point_utf16(source_buffer);
1190
1191 if worktree.read(cx).as_local().is_some() {
1192 let buffer_abs_path = buffer_abs_path.unwrap();
1193 let lang_name;
1194 let lang_server;
1195 if let Some(lang) = source_buffer.language() {
1196 lang_name = lang.name().to_string();
1197 if let Some(server) = self
1198 .language_servers
1199 .get(&(worktree.read(cx).id(), lang_name.clone()))
1200 {
1201 lang_server = server.clone();
1202 } else {
1203 return Task::ready(Ok(Default::default()));
1204 };
1205 } else {
1206 return Task::ready(Ok(Default::default()));
1207 }
1208
1209 cx.spawn(|this, mut cx| async move {
1210 let response = lang_server
1211 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1212 text_document_position_params: lsp::TextDocumentPositionParams {
1213 text_document: lsp::TextDocumentIdentifier::new(
1214 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1215 ),
1216 position: lsp::Position::new(position.row, position.column),
1217 },
1218 work_done_progress_params: Default::default(),
1219 partial_result_params: Default::default(),
1220 })
1221 .await?;
1222
1223 let mut definitions = Vec::new();
1224 if let Some(response) = response {
1225 let mut unresolved_locations = Vec::new();
1226 match response {
1227 lsp::GotoDefinitionResponse::Scalar(loc) => {
1228 unresolved_locations.push((loc.uri, loc.range));
1229 }
1230 lsp::GotoDefinitionResponse::Array(locs) => {
1231 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1232 }
1233 lsp::GotoDefinitionResponse::Link(links) => {
1234 unresolved_locations.extend(
1235 links
1236 .into_iter()
1237 .map(|l| (l.target_uri, l.target_selection_range)),
1238 );
1239 }
1240 }
1241
1242 for (target_uri, target_range) in unresolved_locations {
1243 let target_buffer_handle = this
1244 .update(&mut cx, |this, cx| {
1245 this.open_local_buffer_from_lsp_path(
1246 target_uri,
1247 lang_name.clone(),
1248 lang_server.clone(),
1249 cx,
1250 )
1251 })
1252 .await?;
1253
1254 cx.read(|cx| {
1255 let target_buffer = target_buffer_handle.read(cx);
1256 let target_start = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1258 let target_end = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1260 definitions.push(Definition {
1261 target_buffer: target_buffer_handle,
1262 target_range: target_buffer.anchor_after(target_start)
1263 ..target_buffer.anchor_before(target_end),
1264 });
1265 });
1266 }
1267 }
1268
1269 Ok(definitions)
1270 })
1271 } else if let Some(project_id) = self.remote_id() {
1272 let client = self.client.clone();
1273 let request = proto::GetDefinition {
1274 project_id,
1275 buffer_id: source_buffer.remote_id(),
1276 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1277 };
1278 cx.spawn(|this, mut cx| async move {
1279 let response = client.request(request).await?;
1280 let mut definitions = Vec::new();
1281 for definition in response.definitions {
1282 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1283 let target_buffer = this
1284 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1285 .await?;
1286 let target_start = definition
1287 .target_start
1288 .and_then(deserialize_anchor)
1289 .ok_or_else(|| anyhow!("missing target start"))?;
1290 let target_end = definition
1291 .target_end
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target end"))?;
1294 definitions.push(Definition {
1295 target_buffer,
1296 target_range: target_start..target_end,
1297 })
1298 }
1299
1300 Ok(definitions)
1301 })
1302 } else {
1303 Task::ready(Ok(Default::default()))
1304 }
1305 }
1306
1307 pub fn completions<T: ToPointUtf16>(
1308 &self,
1309 source_buffer_handle: &ModelHandle<Buffer>,
1310 position: T,
1311 cx: &mut ModelContext<Self>,
1312 ) -> Task<Result<Vec<Completion>>> {
1313 let source_buffer_handle = source_buffer_handle.clone();
1314 let source_buffer = source_buffer_handle.read(cx);
1315 let buffer_id = source_buffer.remote_id();
1316 let language = source_buffer.language().cloned();
1317 let worktree;
1318 let buffer_abs_path;
1319 if let Some(file) = File::from_dyn(source_buffer.file()) {
1320 worktree = file.worktree.clone();
1321 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1322 } else {
1323 return Task::ready(Ok(Default::default()));
1324 };
1325
1326 let position = position.to_point_utf16(source_buffer);
1327 let anchor = source_buffer.anchor_after(position);
1328
1329 if worktree.read(cx).as_local().is_some() {
1330 let buffer_abs_path = buffer_abs_path.unwrap();
1331 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1332 server
1333 } else {
1334 return Task::ready(Ok(Default::default()));
1335 };
1336
1337 cx.spawn(|_, cx| async move {
1338 let completions = lang_server
1339 .request::<lsp::request::Completion>(lsp::CompletionParams {
1340 text_document_position: lsp::TextDocumentPositionParams::new(
1341 lsp::TextDocumentIdentifier::new(
1342 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1343 ),
1344 position.to_lsp_position(),
1345 ),
1346 context: Default::default(),
1347 work_done_progress_params: Default::default(),
1348 partial_result_params: Default::default(),
1349 })
1350 .await?;
1351
1352 let completions = if let Some(completions) = completions {
1353 match completions {
1354 lsp::CompletionResponse::Array(completions) => completions,
1355 lsp::CompletionResponse::List(list) => list.items,
1356 }
1357 } else {
1358 Default::default()
1359 };
1360
1361 source_buffer_handle.read_with(&cx, |this, _| {
1362 Ok(completions
1363 .into_iter()
1364 .filter_map(|lsp_completion| {
1365 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1366 lsp::CompletionTextEdit::Edit(edit) => {
1367 (range_from_lsp(edit.range), edit.new_text.clone())
1368 }
1369 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1370 log::info!("unsupported insert/replace completion");
1371 return None;
1372 }
1373 };
1374
1375 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1376 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1377 if clipped_start == old_range.start && clipped_end == old_range.end {
1378 Some(Completion {
1379 old_range: this.anchor_before(old_range.start)
1380 ..this.anchor_after(old_range.end),
1381 new_text,
1382 label: language
1383 .as_ref()
1384 .and_then(|l| l.label_for_completion(&lsp_completion))
1385 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1386 lsp_completion,
1387 })
1388 } else {
1389 None
1390 }
1391 })
1392 .collect())
1393 })
1394 })
1395 } else if let Some(project_id) = self.remote_id() {
1396 let rpc = self.client.clone();
1397 let message = proto::GetCompletions {
1398 project_id,
1399 buffer_id,
1400 position: Some(language::proto::serialize_anchor(&anchor)),
1401 version: (&source_buffer.version()).into(),
1402 };
1403 cx.spawn_weak(|_, mut cx| async move {
1404 let response = rpc.request(message).await?;
1405 source_buffer_handle
1406 .update(&mut cx, |buffer, _| {
1407 buffer.wait_for_version(response.version.into())
1408 })
1409 .await;
1410 response
1411 .completions
1412 .into_iter()
1413 .map(|completion| {
1414 language::proto::deserialize_completion(completion, language.as_ref())
1415 })
1416 .collect()
1417 })
1418 } else {
1419 Task::ready(Ok(Default::default()))
1420 }
1421 }
1422
1423 pub fn apply_additional_edits_for_completion(
1424 &self,
1425 buffer_handle: ModelHandle<Buffer>,
1426 completion: Completion,
1427 push_to_history: bool,
1428 cx: &mut ModelContext<Self>,
1429 ) -> Task<Result<Option<Transaction>>> {
1430 let buffer = buffer_handle.read(cx);
1431 let buffer_id = buffer.remote_id();
1432
1433 if self.is_local() {
1434 let lang_server = if let Some(language_server) = buffer.language_server() {
1435 language_server.clone()
1436 } else {
1437 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1438 };
1439
1440 cx.spawn(|_, mut cx| async move {
1441 let resolved_completion = lang_server
1442 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1443 .await?;
1444 if let Some(edits) = resolved_completion.additional_text_edits {
1445 let edits = buffer_handle
1446 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1447 .await?;
1448 buffer_handle.update(&mut cx, |buffer, cx| {
1449 buffer.finalize_last_transaction();
1450 buffer.start_transaction();
1451 for (range, text) in edits {
1452 buffer.edit([range], text, cx);
1453 }
1454 let transaction = if buffer.end_transaction(cx).is_some() {
1455 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1456 if !push_to_history {
1457 buffer.forget_transaction(transaction.id);
1458 }
1459 Some(transaction)
1460 } else {
1461 None
1462 };
1463 Ok(transaction)
1464 })
1465 } else {
1466 Ok(None)
1467 }
1468 })
1469 } else if let Some(project_id) = self.remote_id() {
1470 let client = self.client.clone();
1471 cx.spawn(|_, mut cx| async move {
1472 let response = client
1473 .request(proto::ApplyCompletionAdditionalEdits {
1474 project_id,
1475 buffer_id,
1476 completion: Some(language::proto::serialize_completion(&completion)),
1477 })
1478 .await?;
1479
1480 if let Some(transaction) = response.transaction {
1481 let transaction = language::proto::deserialize_transaction(transaction)?;
1482 buffer_handle
1483 .update(&mut cx, |buffer, _| {
1484 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1485 })
1486 .await;
1487 if push_to_history {
1488 buffer_handle.update(&mut cx, |buffer, _| {
1489 buffer.push_transaction(transaction.clone(), Instant::now());
1490 });
1491 }
1492 Ok(Some(transaction))
1493 } else {
1494 Ok(None)
1495 }
1496 })
1497 } else {
1498 Task::ready(Err(anyhow!("project does not have a remote id")))
1499 }
1500 }
1501
1502 pub fn code_actions<T: ToOffset>(
1503 &self,
1504 buffer_handle: &ModelHandle<Buffer>,
1505 range: Range<T>,
1506 cx: &mut ModelContext<Self>,
1507 ) -> Task<Result<Vec<CodeAction>>> {
1508 let buffer_handle = buffer_handle.clone();
1509 let buffer = buffer_handle.read(cx);
1510 let buffer_id = buffer.remote_id();
1511 let worktree;
1512 let buffer_abs_path;
1513 if let Some(file) = File::from_dyn(buffer.file()) {
1514 worktree = file.worktree.clone();
1515 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1516 } else {
1517 return Task::ready(Ok(Default::default()));
1518 };
1519 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1520
1521 if worktree.read(cx).as_local().is_some() {
1522 let buffer_abs_path = buffer_abs_path.unwrap();
1523 let lang_name;
1524 let lang_server;
1525 if let Some(lang) = buffer.language() {
1526 lang_name = lang.name().to_string();
1527 if let Some(server) = self
1528 .language_servers
1529 .get(&(worktree.read(cx).id(), lang_name.clone()))
1530 {
1531 lang_server = server.clone();
1532 } else {
1533 return Task::ready(Ok(Default::default()));
1534 };
1535 } else {
1536 return Task::ready(Ok(Default::default()));
1537 }
1538
1539 let lsp_range = lsp::Range::new(
1540 range.start.to_point_utf16(buffer).to_lsp_position(),
1541 range.end.to_point_utf16(buffer).to_lsp_position(),
1542 );
1543 cx.foreground().spawn(async move {
1544 Ok(lang_server
1545 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1546 text_document: lsp::TextDocumentIdentifier::new(
1547 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1548 ),
1549 range: lsp_range,
1550 work_done_progress_params: Default::default(),
1551 partial_result_params: Default::default(),
1552 context: lsp::CodeActionContext {
1553 diagnostics: Default::default(),
1554 only: Some(vec![
1555 lsp::CodeActionKind::QUICKFIX,
1556 lsp::CodeActionKind::REFACTOR,
1557 lsp::CodeActionKind::REFACTOR_EXTRACT,
1558 ]),
1559 },
1560 })
1561 .await?
1562 .unwrap_or_default()
1563 .into_iter()
1564 .filter_map(|entry| {
1565 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1566 Some(CodeAction {
1567 range: range.clone(),
1568 lsp_action,
1569 })
1570 } else {
1571 None
1572 }
1573 })
1574 .collect())
1575 })
1576 } else if let Some(project_id) = self.remote_id() {
1577 let rpc = self.client.clone();
1578 cx.foreground().spawn(async move {
1579 let response = rpc
1580 .request(proto::GetCodeActions {
1581 project_id,
1582 buffer_id,
1583 start: Some(language::proto::serialize_anchor(&range.start)),
1584 end: Some(language::proto::serialize_anchor(&range.end)),
1585 })
1586 .await?;
1587 response
1588 .actions
1589 .into_iter()
1590 .map(language::proto::deserialize_code_action)
1591 .collect()
1592 })
1593 } else {
1594 Task::ready(Ok(Default::default()))
1595 }
1596 }
1597
1598 pub fn apply_code_action(
1599 &self,
1600 buffer_handle: ModelHandle<Buffer>,
1601 mut action: CodeAction,
1602 push_to_history: bool,
1603 cx: &mut ModelContext<Self>,
1604 ) -> Task<Result<ProjectTransaction>> {
1605 if self.is_local() {
1606 let buffer = buffer_handle.read(cx);
1607 let lang_name = if let Some(lang) = buffer.language() {
1608 lang.name().to_string()
1609 } else {
1610 return Task::ready(Ok(Default::default()));
1611 };
1612 let lang_server = if let Some(language_server) = buffer.language_server() {
1613 language_server.clone()
1614 } else {
1615 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1616 };
1617 let range = action.range.to_point_utf16(buffer);
1618 let fs = self.fs.clone();
1619
1620 cx.spawn(|this, mut cx| async move {
1621 if let Some(lsp_range) = action
1622 .lsp_action
1623 .data
1624 .as_mut()
1625 .and_then(|d| d.get_mut("codeActionParams"))
1626 .and_then(|d| d.get_mut("range"))
1627 {
1628 *lsp_range = serde_json::to_value(&lsp::Range::new(
1629 range.start.to_lsp_position(),
1630 range.end.to_lsp_position(),
1631 ))
1632 .unwrap();
1633 action.lsp_action = lang_server
1634 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1635 .await?;
1636 } else {
1637 let actions = this
1638 .update(&mut cx, |this, cx| {
1639 this.code_actions(&buffer_handle, action.range, cx)
1640 })
1641 .await?;
1642 action.lsp_action = actions
1643 .into_iter()
1644 .find(|a| a.lsp_action.title == action.lsp_action.title)
1645 .ok_or_else(|| anyhow!("code action is outdated"))?
1646 .lsp_action;
1647 }
1648
1649 let mut operations = Vec::new();
1650 if let Some(edit) = action.lsp_action.edit {
1651 if let Some(document_changes) = edit.document_changes {
1652 match document_changes {
1653 lsp::DocumentChanges::Edits(edits) => operations
1654 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1655 lsp::DocumentChanges::Operations(ops) => operations = ops,
1656 }
1657 } else if let Some(changes) = edit.changes {
1658 operations.extend(changes.into_iter().map(|(uri, edits)| {
1659 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1660 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1661 uri,
1662 version: None,
1663 },
1664 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1665 })
1666 }));
1667 }
1668 }
1669
1670 let mut project_transaction = ProjectTransaction::default();
1671 for operation in operations {
1672 match operation {
1673 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1674 let abs_path = op
1675 .uri
1676 .to_file_path()
1677 .map_err(|_| anyhow!("can't convert URI to path"))?;
1678
1679 if let Some(parent_path) = abs_path.parent() {
1680 fs.create_dir(parent_path).await?;
1681 }
1682 if abs_path.ends_with("/") {
1683 fs.create_dir(&abs_path).await?;
1684 } else {
1685 fs.create_file(
1686 &abs_path,
1687 op.options.map(Into::into).unwrap_or_default(),
1688 )
1689 .await?;
1690 }
1691 }
1692 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1693 let source_abs_path = op
1694 .old_uri
1695 .to_file_path()
1696 .map_err(|_| anyhow!("can't convert URI to path"))?;
1697 let target_abs_path = op
1698 .new_uri
1699 .to_file_path()
1700 .map_err(|_| anyhow!("can't convert URI to path"))?;
1701 fs.rename(
1702 &source_abs_path,
1703 &target_abs_path,
1704 op.options.map(Into::into).unwrap_or_default(),
1705 )
1706 .await?;
1707 }
1708 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1709 let abs_path = op
1710 .uri
1711 .to_file_path()
1712 .map_err(|_| anyhow!("can't convert URI to path"))?;
1713 let options = op.options.map(Into::into).unwrap_or_default();
1714 if abs_path.ends_with("/") {
1715 fs.remove_dir(&abs_path, options).await?;
1716 } else {
1717 fs.remove_file(&abs_path, options).await?;
1718 }
1719 }
1720 lsp::DocumentChangeOperation::Edit(op) => {
1721 let buffer_to_edit = this
1722 .update(&mut cx, |this, cx| {
1723 this.open_local_buffer_from_lsp_path(
1724 op.text_document.uri,
1725 lang_name.clone(),
1726 lang_server.clone(),
1727 cx,
1728 )
1729 })
1730 .await?;
1731
1732 let edits = buffer_to_edit
1733 .update(&mut cx, |buffer, cx| {
1734 let edits = op.edits.into_iter().map(|edit| match edit {
1735 lsp::OneOf::Left(edit) => edit,
1736 lsp::OneOf::Right(edit) => edit.text_edit,
1737 });
1738 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1739 })
1740 .await?;
1741
1742 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1743 buffer.finalize_last_transaction();
1744 buffer.start_transaction();
1745 for (range, text) in edits {
1746 buffer.edit([range], text, cx);
1747 }
1748 let transaction = if buffer.end_transaction(cx).is_some() {
1749 let transaction =
1750 buffer.finalize_last_transaction().unwrap().clone();
1751 if !push_to_history {
1752 buffer.forget_transaction(transaction.id);
1753 }
1754 Some(transaction)
1755 } else {
1756 None
1757 };
1758
1759 transaction
1760 });
1761 if let Some(transaction) = transaction {
1762 project_transaction.0.insert(buffer_to_edit, transaction);
1763 }
1764 }
1765 }
1766 }
1767
1768 Ok(project_transaction)
1769 })
1770 } else if let Some(project_id) = self.remote_id() {
1771 let client = self.client.clone();
1772 let request = proto::ApplyCodeAction {
1773 project_id,
1774 buffer_id: buffer_handle.read(cx).remote_id(),
1775 action: Some(language::proto::serialize_code_action(&action)),
1776 };
1777 cx.spawn(|this, mut cx| async move {
1778 let response = client
1779 .request(request)
1780 .await?
1781 .transaction
1782 .ok_or_else(|| anyhow!("missing transaction"))?;
1783 this.update(&mut cx, |this, cx| {
1784 this.deserialize_project_transaction(response, push_to_history, cx)
1785 })
1786 .await
1787 })
1788 } else {
1789 Task::ready(Err(anyhow!("project does not have a remote id")))
1790 }
1791 }
1792
1793 pub fn find_or_create_local_worktree(
1794 &self,
1795 abs_path: impl AsRef<Path>,
1796 weak: bool,
1797 cx: &mut ModelContext<Self>,
1798 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1799 let abs_path = abs_path.as_ref();
1800 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1801 Task::ready(Ok((tree.clone(), relative_path.into())))
1802 } else {
1803 let worktree = self.create_local_worktree(abs_path, weak, cx);
1804 cx.foreground()
1805 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1806 }
1807 }
1808
1809 fn find_local_worktree(
1810 &self,
1811 abs_path: &Path,
1812 cx: &AppContext,
1813 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1814 for tree in self.worktrees(cx) {
1815 if let Some(relative_path) = tree
1816 .read(cx)
1817 .as_local()
1818 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1819 {
1820 return Some((tree.clone(), relative_path.into()));
1821 }
1822 }
1823 None
1824 }
1825
1826 pub fn is_shared(&self) -> bool {
1827 match &self.client_state {
1828 ProjectClientState::Local { is_shared, .. } => *is_shared,
1829 ProjectClientState::Remote { .. } => false,
1830 }
1831 }
1832
1833 fn create_local_worktree(
1834 &self,
1835 abs_path: impl AsRef<Path>,
1836 weak: bool,
1837 cx: &mut ModelContext<Self>,
1838 ) -> Task<Result<ModelHandle<Worktree>>> {
1839 let fs = self.fs.clone();
1840 let client = self.client.clone();
1841 let path = Arc::from(abs_path.as_ref());
1842 cx.spawn(|project, mut cx| async move {
1843 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1844
1845 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1846 project.add_worktree(&worktree, cx);
1847 (project.remote_id(), project.is_shared())
1848 });
1849
1850 if let Some(project_id) = remote_project_id {
1851 worktree
1852 .update(&mut cx, |worktree, cx| {
1853 worktree.as_local_mut().unwrap().register(project_id, cx)
1854 })
1855 .await?;
1856 if is_shared {
1857 worktree
1858 .update(&mut cx, |worktree, cx| {
1859 worktree.as_local_mut().unwrap().share(project_id, cx)
1860 })
1861 .await?;
1862 }
1863 }
1864
1865 Ok(worktree)
1866 })
1867 }
1868
1869 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1870 self.worktrees.retain(|worktree| {
1871 worktree
1872 .upgrade(cx)
1873 .map_or(false, |w| w.read(cx).id() != id)
1874 });
1875 cx.notify();
1876 }
1877
1878 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1879 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1880 if worktree.read(cx).is_local() {
1881 cx.subscribe(&worktree, |this, worktree, _, cx| {
1882 this.update_local_worktree_buffers(worktree, cx);
1883 })
1884 .detach();
1885 }
1886
1887 let push_weak_handle = {
1888 let worktree = worktree.read(cx);
1889 worktree.is_local() && worktree.is_weak()
1890 };
1891 if push_weak_handle {
1892 cx.observe_release(&worktree, |this, cx| {
1893 this.worktrees
1894 .retain(|worktree| worktree.upgrade(cx).is_some());
1895 cx.notify();
1896 })
1897 .detach();
1898 self.worktrees
1899 .push(WorktreeHandle::Weak(worktree.downgrade()));
1900 } else {
1901 self.worktrees
1902 .push(WorktreeHandle::Strong(worktree.clone()));
1903 }
1904 cx.notify();
1905 }
1906
1907 fn update_local_worktree_buffers(
1908 &mut self,
1909 worktree_handle: ModelHandle<Worktree>,
1910 cx: &mut ModelContext<Self>,
1911 ) {
1912 let snapshot = worktree_handle.read(cx).snapshot();
1913 let mut buffers_to_delete = Vec::new();
1914 for (buffer_id, buffer) in &self.open_buffers {
1915 if let Some(buffer) = buffer.upgrade(cx) {
1916 buffer.update(cx, |buffer, cx| {
1917 if let Some(old_file) = File::from_dyn(buffer.file()) {
1918 if old_file.worktree != worktree_handle {
1919 return;
1920 }
1921
1922 let new_file = if let Some(entry) = old_file
1923 .entry_id
1924 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1925 {
1926 File {
1927 is_local: true,
1928 entry_id: Some(entry.id),
1929 mtime: entry.mtime,
1930 path: entry.path.clone(),
1931 worktree: worktree_handle.clone(),
1932 }
1933 } else if let Some(entry) =
1934 snapshot.entry_for_path(old_file.path().as_ref())
1935 {
1936 File {
1937 is_local: true,
1938 entry_id: Some(entry.id),
1939 mtime: entry.mtime,
1940 path: entry.path.clone(),
1941 worktree: worktree_handle.clone(),
1942 }
1943 } else {
1944 File {
1945 is_local: true,
1946 entry_id: None,
1947 path: old_file.path().clone(),
1948 mtime: old_file.mtime(),
1949 worktree: worktree_handle.clone(),
1950 }
1951 };
1952
1953 if let Some(project_id) = self.remote_id() {
1954 self.client
1955 .send(proto::UpdateBufferFile {
1956 project_id,
1957 buffer_id: *buffer_id as u64,
1958 file: Some(new_file.to_proto()),
1959 })
1960 .log_err();
1961 }
1962 buffer.file_updated(Box::new(new_file), cx).detach();
1963 }
1964 });
1965 } else {
1966 buffers_to_delete.push(*buffer_id);
1967 }
1968 }
1969
1970 for buffer_id in buffers_to_delete {
1971 self.open_buffers.remove(&buffer_id);
1972 }
1973 }
1974
1975 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1976 let new_active_entry = entry.and_then(|project_path| {
1977 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1978 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1979 Some(ProjectEntry {
1980 worktree_id: project_path.worktree_id,
1981 entry_id: entry.id,
1982 })
1983 });
1984 if new_active_entry != self.active_entry {
1985 self.active_entry = new_active_entry;
1986 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1987 }
1988 }
1989
1990 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1991 self.language_servers_with_diagnostics_running > 0
1992 }
1993
1994 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1995 let mut summary = DiagnosticSummary::default();
1996 for (_, path_summary) in self.diagnostic_summaries(cx) {
1997 summary.error_count += path_summary.error_count;
1998 summary.warning_count += path_summary.warning_count;
1999 summary.info_count += path_summary.info_count;
2000 summary.hint_count += path_summary.hint_count;
2001 }
2002 summary
2003 }
2004
2005 pub fn diagnostic_summaries<'a>(
2006 &'a self,
2007 cx: &'a AppContext,
2008 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2009 self.worktrees(cx).flat_map(move |worktree| {
2010 let worktree = worktree.read(cx);
2011 let worktree_id = worktree.id();
2012 worktree
2013 .diagnostic_summaries()
2014 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2015 })
2016 }
2017
2018 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2019 self.language_servers_with_diagnostics_running += 1;
2020 if self.language_servers_with_diagnostics_running == 1 {
2021 cx.emit(Event::DiskBasedDiagnosticsStarted);
2022 }
2023 }
2024
2025 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2026 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2027 self.language_servers_with_diagnostics_running -= 1;
2028 if self.language_servers_with_diagnostics_running == 0 {
2029 cx.emit(Event::DiskBasedDiagnosticsFinished);
2030 }
2031 }
2032
2033 pub fn active_entry(&self) -> Option<ProjectEntry> {
2034 self.active_entry
2035 }
2036
2037 // RPC message handlers
2038
2039 async fn handle_unshare_project(
2040 this: ModelHandle<Self>,
2041 _: TypedEnvelope<proto::UnshareProject>,
2042 _: Arc<Client>,
2043 mut cx: AsyncAppContext,
2044 ) -> Result<()> {
2045 this.update(&mut cx, |this, cx| {
2046 if let ProjectClientState::Remote {
2047 sharing_has_stopped,
2048 ..
2049 } = &mut this.client_state
2050 {
2051 *sharing_has_stopped = true;
2052 this.collaborators.clear();
2053 cx.notify();
2054 } else {
2055 unreachable!()
2056 }
2057 });
2058
2059 Ok(())
2060 }
2061
2062 async fn handle_add_collaborator(
2063 this: ModelHandle<Self>,
2064 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2065 _: Arc<Client>,
2066 mut cx: AsyncAppContext,
2067 ) -> Result<()> {
2068 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2069 let collaborator = envelope
2070 .payload
2071 .collaborator
2072 .take()
2073 .ok_or_else(|| anyhow!("empty collaborator"))?;
2074
2075 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2076 this.update(&mut cx, |this, cx| {
2077 this.collaborators
2078 .insert(collaborator.peer_id, collaborator);
2079 cx.notify();
2080 });
2081
2082 Ok(())
2083 }
2084
2085 async fn handle_remove_collaborator(
2086 this: ModelHandle<Self>,
2087 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2088 _: Arc<Client>,
2089 mut cx: AsyncAppContext,
2090 ) -> Result<()> {
2091 this.update(&mut cx, |this, cx| {
2092 let peer_id = PeerId(envelope.payload.peer_id);
2093 let replica_id = this
2094 .collaborators
2095 .remove(&peer_id)
2096 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2097 .replica_id;
2098 this.shared_buffers.remove(&peer_id);
2099 for (_, buffer) in &this.open_buffers {
2100 if let Some(buffer) = buffer.upgrade(cx) {
2101 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2102 }
2103 }
2104 cx.notify();
2105 Ok(())
2106 })
2107 }
2108
2109 async fn handle_share_worktree(
2110 this: ModelHandle<Self>,
2111 envelope: TypedEnvelope<proto::ShareWorktree>,
2112 client: Arc<Client>,
2113 mut cx: AsyncAppContext,
2114 ) -> Result<()> {
2115 this.update(&mut cx, |this, cx| {
2116 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2117 let replica_id = this.replica_id();
2118 let worktree = envelope
2119 .payload
2120 .worktree
2121 .ok_or_else(|| anyhow!("invalid worktree"))?;
2122 let (worktree, load_task) =
2123 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2124 this.add_worktree(&worktree, cx);
2125 load_task.detach();
2126 Ok(())
2127 })
2128 }
2129
2130 async fn handle_unregister_worktree(
2131 this: ModelHandle<Self>,
2132 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2133 _: Arc<Client>,
2134 mut cx: AsyncAppContext,
2135 ) -> Result<()> {
2136 this.update(&mut cx, |this, cx| {
2137 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2138 this.remove_worktree(worktree_id, cx);
2139 Ok(())
2140 })
2141 }
2142
2143 async fn handle_update_worktree(
2144 this: ModelHandle<Self>,
2145 envelope: TypedEnvelope<proto::UpdateWorktree>,
2146 _: Arc<Client>,
2147 mut cx: AsyncAppContext,
2148 ) -> Result<()> {
2149 this.update(&mut cx, |this, cx| {
2150 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2151 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2152 worktree.update(cx, |worktree, _| {
2153 let worktree = worktree.as_remote_mut().unwrap();
2154 worktree.update_from_remote(envelope)
2155 })?;
2156 }
2157 Ok(())
2158 })
2159 }
2160
2161 async fn handle_update_diagnostic_summary(
2162 this: ModelHandle<Self>,
2163 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2164 _: Arc<Client>,
2165 mut cx: AsyncAppContext,
2166 ) -> Result<()> {
2167 this.update(&mut cx, |this, cx| {
2168 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2169 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2170 if let Some(summary) = envelope.payload.summary {
2171 let project_path = ProjectPath {
2172 worktree_id,
2173 path: Path::new(&summary.path).into(),
2174 };
2175 worktree.update(cx, |worktree, _| {
2176 worktree
2177 .as_remote_mut()
2178 .unwrap()
2179 .update_diagnostic_summary(project_path.path.clone(), &summary);
2180 });
2181 cx.emit(Event::DiagnosticsUpdated(project_path));
2182 }
2183 }
2184 Ok(())
2185 })
2186 }
2187
2188 async fn handle_disk_based_diagnostics_updating(
2189 this: ModelHandle<Self>,
2190 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2191 _: Arc<Client>,
2192 mut cx: AsyncAppContext,
2193 ) -> Result<()> {
2194 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2195 Ok(())
2196 }
2197
2198 async fn handle_disk_based_diagnostics_updated(
2199 this: ModelHandle<Self>,
2200 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2201 _: Arc<Client>,
2202 mut cx: AsyncAppContext,
2203 ) -> Result<()> {
2204 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2205 Ok(())
2206 }
2207
2208 async fn handle_update_buffer(
2209 this: ModelHandle<Self>,
2210 envelope: TypedEnvelope<proto::UpdateBuffer>,
2211 _: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| {
2215 let payload = envelope.payload.clone();
2216 let buffer_id = payload.buffer_id;
2217 let ops = payload
2218 .operations
2219 .into_iter()
2220 .map(|op| language::proto::deserialize_operation(op))
2221 .collect::<Result<Vec<_>, _>>()?;
2222 let is_remote = this.is_remote();
2223 match this.open_buffers.entry(buffer_id) {
2224 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2225 OpenBuffer::Loaded(buffer) => {
2226 if let Some(buffer) = buffer.upgrade(cx) {
2227 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2228 }
2229 }
2230 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2231 },
2232 hash_map::Entry::Vacant(e) => {
2233 if is_remote && this.loading_buffers.len() > 0 {
2234 e.insert(OpenBuffer::Loading(ops));
2235 }
2236 }
2237 }
2238 Ok(())
2239 })
2240 }
2241
2242 async fn handle_update_buffer_file(
2243 this: ModelHandle<Self>,
2244 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2245 _: Arc<Client>,
2246 mut cx: AsyncAppContext,
2247 ) -> Result<()> {
2248 this.update(&mut cx, |this, cx| {
2249 let payload = envelope.payload.clone();
2250 let buffer_id = payload.buffer_id;
2251 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2252 let worktree = this
2253 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2254 .ok_or_else(|| anyhow!("no such worktree"))?;
2255 let file = File::from_proto(file, worktree.clone(), cx)?;
2256 let buffer = this
2257 .open_buffers
2258 .get_mut(&buffer_id)
2259 .and_then(|b| b.upgrade(cx))
2260 .ok_or_else(|| anyhow!("no such buffer"))?;
2261 buffer.update(cx, |buffer, cx| {
2262 buffer.file_updated(Box::new(file), cx).detach();
2263 });
2264 Ok(())
2265 })
2266 }
2267
2268 async fn handle_save_buffer(
2269 this: ModelHandle<Self>,
2270 envelope: TypedEnvelope<proto::SaveBuffer>,
2271 _: Arc<Client>,
2272 mut cx: AsyncAppContext,
2273 ) -> Result<proto::BufferSaved> {
2274 let buffer_id = envelope.payload.buffer_id;
2275 let sender_id = envelope.original_sender_id()?;
2276 let requested_version = envelope.payload.version.try_into()?;
2277
2278 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2279 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2280 let buffer = this
2281 .shared_buffers
2282 .get(&sender_id)
2283 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2284 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2285 Ok::<_, anyhow::Error>((project_id, buffer))
2286 })?;
2287
2288 buffer
2289 .update(&mut cx, |buffer, _| {
2290 buffer.wait_for_version(requested_version)
2291 })
2292 .await;
2293
2294 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2295 Ok(proto::BufferSaved {
2296 project_id,
2297 buffer_id,
2298 version: (&saved_version).into(),
2299 mtime: Some(mtime.into()),
2300 })
2301 }
2302
2303 async fn handle_format_buffers(
2304 this: ModelHandle<Self>,
2305 envelope: TypedEnvelope<proto::FormatBuffers>,
2306 _: Arc<Client>,
2307 mut cx: AsyncAppContext,
2308 ) -> Result<proto::FormatBuffersResponse> {
2309 let sender_id = envelope.original_sender_id()?;
2310 let format = this.update(&mut cx, |this, cx| {
2311 let shared_buffers = this
2312 .shared_buffers
2313 .get(&sender_id)
2314 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2315 let mut buffers = HashSet::default();
2316 for buffer_id in &envelope.payload.buffer_ids {
2317 buffers.insert(
2318 shared_buffers
2319 .get(buffer_id)
2320 .cloned()
2321 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2322 );
2323 }
2324 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2325 })?;
2326
2327 let project_transaction = format.await?;
2328 let project_transaction = this.update(&mut cx, |this, cx| {
2329 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2330 });
2331 Ok(proto::FormatBuffersResponse {
2332 transaction: Some(project_transaction),
2333 })
2334 }
2335
2336 async fn handle_get_completions(
2337 this: ModelHandle<Self>,
2338 envelope: TypedEnvelope<proto::GetCompletions>,
2339 _: Arc<Client>,
2340 mut cx: AsyncAppContext,
2341 ) -> Result<proto::GetCompletionsResponse> {
2342 let sender_id = envelope.original_sender_id()?;
2343 let position = envelope
2344 .payload
2345 .position
2346 .and_then(language::proto::deserialize_anchor)
2347 .ok_or_else(|| anyhow!("invalid position"))?;
2348 let version = clock::Global::from(envelope.payload.version);
2349 let buffer = this.read_with(&cx, |this, _| {
2350 this.shared_buffers
2351 .get(&sender_id)
2352 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2353 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2354 })?;
2355 buffer
2356 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2357 .await;
2358 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2359 let completions = this
2360 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2361 .await?;
2362
2363 Ok(proto::GetCompletionsResponse {
2364 completions: completions
2365 .iter()
2366 .map(language::proto::serialize_completion)
2367 .collect(),
2368 version: (&version).into(),
2369 })
2370 }
2371
2372 async fn handle_apply_additional_edits_for_completion(
2373 this: ModelHandle<Self>,
2374 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2375 _: Arc<Client>,
2376 mut cx: AsyncAppContext,
2377 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2378 let sender_id = envelope.original_sender_id()?;
2379 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2380 let buffer = this
2381 .shared_buffers
2382 .get(&sender_id)
2383 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2384 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2385 let language = buffer.read(cx).language();
2386 let completion = language::proto::deserialize_completion(
2387 envelope
2388 .payload
2389 .completion
2390 .ok_or_else(|| anyhow!("invalid completion"))?,
2391 language,
2392 )?;
2393 Ok::<_, anyhow::Error>(
2394 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2395 )
2396 })?;
2397
2398 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2399 transaction: apply_additional_edits
2400 .await?
2401 .as_ref()
2402 .map(language::proto::serialize_transaction),
2403 })
2404 }
2405
2406 async fn handle_get_code_actions(
2407 this: ModelHandle<Self>,
2408 envelope: TypedEnvelope<proto::GetCodeActions>,
2409 _: Arc<Client>,
2410 mut cx: AsyncAppContext,
2411 ) -> Result<proto::GetCodeActionsResponse> {
2412 let sender_id = envelope.original_sender_id()?;
2413 let start = envelope
2414 .payload
2415 .start
2416 .and_then(language::proto::deserialize_anchor)
2417 .ok_or_else(|| anyhow!("invalid start"))?;
2418 let end = envelope
2419 .payload
2420 .end
2421 .and_then(language::proto::deserialize_anchor)
2422 .ok_or_else(|| anyhow!("invalid end"))?;
2423 let buffer = this.update(&mut cx, |this, _| {
2424 this.shared_buffers
2425 .get(&sender_id)
2426 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2427 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2428 })?;
2429 buffer
2430 .update(&mut cx, |buffer, _| {
2431 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2432 })
2433 .await;
2434 let code_actions = this.update(&mut cx, |this, cx| {
2435 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2436 })?;
2437
2438 Ok(proto::GetCodeActionsResponse {
2439 actions: code_actions
2440 .await?
2441 .iter()
2442 .map(language::proto::serialize_code_action)
2443 .collect(),
2444 })
2445 }
2446
2447 async fn handle_apply_code_action(
2448 this: ModelHandle<Self>,
2449 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2450 _: Arc<Client>,
2451 mut cx: AsyncAppContext,
2452 ) -> Result<proto::ApplyCodeActionResponse> {
2453 let sender_id = envelope.original_sender_id()?;
2454 let action = language::proto::deserialize_code_action(
2455 envelope
2456 .payload
2457 .action
2458 .ok_or_else(|| anyhow!("invalid action"))?,
2459 )?;
2460 let apply_code_action = this.update(&mut cx, |this, cx| {
2461 let buffer = this
2462 .shared_buffers
2463 .get(&sender_id)
2464 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2465 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2466 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2467 })?;
2468
2469 let project_transaction = apply_code_action.await?;
2470 let project_transaction = this.update(&mut cx, |this, cx| {
2471 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2472 });
2473 Ok(proto::ApplyCodeActionResponse {
2474 transaction: Some(project_transaction),
2475 })
2476 }
2477
2478 async fn handle_get_definition(
2479 this: ModelHandle<Self>,
2480 envelope: TypedEnvelope<proto::GetDefinition>,
2481 _: Arc<Client>,
2482 mut cx: AsyncAppContext,
2483 ) -> Result<proto::GetDefinitionResponse> {
2484 let sender_id = envelope.original_sender_id()?;
2485 let position = envelope
2486 .payload
2487 .position
2488 .and_then(deserialize_anchor)
2489 .ok_or_else(|| anyhow!("invalid position"))?;
2490 let definitions = this.update(&mut cx, |this, cx| {
2491 let source_buffer = this
2492 .shared_buffers
2493 .get(&sender_id)
2494 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2495 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2496 if source_buffer.read(cx).can_resolve(&position) {
2497 Ok(this.definition(&source_buffer, position, cx))
2498 } else {
2499 Err(anyhow!("cannot resolve position"))
2500 }
2501 })?;
2502
2503 let definitions = definitions.await?;
2504
2505 this.update(&mut cx, |this, cx| {
2506 let mut response = proto::GetDefinitionResponse {
2507 definitions: Default::default(),
2508 };
2509 for definition in definitions {
2510 let buffer =
2511 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2512 response.definitions.push(proto::Definition {
2513 target_start: Some(serialize_anchor(&definition.target_range.start)),
2514 target_end: Some(serialize_anchor(&definition.target_range.end)),
2515 buffer: Some(buffer),
2516 });
2517 }
2518 Ok(response)
2519 })
2520 }
2521
2522 async fn handle_open_buffer(
2523 this: ModelHandle<Self>,
2524 envelope: TypedEnvelope<proto::OpenBuffer>,
2525 _: Arc<Client>,
2526 mut cx: AsyncAppContext,
2527 ) -> anyhow::Result<proto::OpenBufferResponse> {
2528 let peer_id = envelope.original_sender_id()?;
2529 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2530 let open_buffer = this.update(&mut cx, |this, cx| {
2531 this.open_buffer(
2532 ProjectPath {
2533 worktree_id,
2534 path: PathBuf::from(envelope.payload.path).into(),
2535 },
2536 cx,
2537 )
2538 });
2539
2540 let buffer = open_buffer.await?;
2541 this.update(&mut cx, |this, cx| {
2542 Ok(proto::OpenBufferResponse {
2543 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2544 })
2545 })
2546 }
2547
2548 fn serialize_project_transaction_for_peer(
2549 &mut self,
2550 project_transaction: ProjectTransaction,
2551 peer_id: PeerId,
2552 cx: &AppContext,
2553 ) -> proto::ProjectTransaction {
2554 let mut serialized_transaction = proto::ProjectTransaction {
2555 buffers: Default::default(),
2556 transactions: Default::default(),
2557 };
2558 for (buffer, transaction) in project_transaction.0 {
2559 serialized_transaction
2560 .buffers
2561 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2562 serialized_transaction
2563 .transactions
2564 .push(language::proto::serialize_transaction(&transaction));
2565 }
2566 serialized_transaction
2567 }
2568
2569 fn deserialize_project_transaction(
2570 &mut self,
2571 message: proto::ProjectTransaction,
2572 push_to_history: bool,
2573 cx: &mut ModelContext<Self>,
2574 ) -> Task<Result<ProjectTransaction>> {
2575 cx.spawn(|this, mut cx| async move {
2576 let mut project_transaction = ProjectTransaction::default();
2577 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2578 let buffer = this
2579 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2580 .await?;
2581 let transaction = language::proto::deserialize_transaction(transaction)?;
2582 project_transaction.0.insert(buffer, transaction);
2583 }
2584 for (buffer, transaction) in &project_transaction.0 {
2585 buffer
2586 .update(&mut cx, |buffer, _| {
2587 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2588 })
2589 .await;
2590
2591 if push_to_history {
2592 buffer.update(&mut cx, |buffer, _| {
2593 buffer.push_transaction(transaction.clone(), Instant::now());
2594 });
2595 }
2596 }
2597
2598 Ok(project_transaction)
2599 })
2600 }
2601
2602 fn serialize_buffer_for_peer(
2603 &mut self,
2604 buffer: &ModelHandle<Buffer>,
2605 peer_id: PeerId,
2606 cx: &AppContext,
2607 ) -> proto::Buffer {
2608 let buffer_id = buffer.read(cx).remote_id();
2609 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2610 match shared_buffers.entry(buffer_id) {
2611 hash_map::Entry::Occupied(_) => proto::Buffer {
2612 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2613 },
2614 hash_map::Entry::Vacant(entry) => {
2615 entry.insert(buffer.clone());
2616 proto::Buffer {
2617 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2618 }
2619 }
2620 }
2621 }
2622
2623 fn deserialize_buffer(
2624 &mut self,
2625 buffer: proto::Buffer,
2626 cx: &mut ModelContext<Self>,
2627 ) -> Task<Result<ModelHandle<Buffer>>> {
2628 let replica_id = self.replica_id();
2629
2630 let mut opened_buffer_tx = self.opened_buffer.clone();
2631 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2632 cx.spawn(|this, mut cx| async move {
2633 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2634 proto::buffer::Variant::Id(id) => {
2635 let buffer = loop {
2636 let buffer = this.read_with(&cx, |this, cx| {
2637 this.open_buffers
2638 .get(&id)
2639 .and_then(|buffer| buffer.upgrade(cx))
2640 });
2641 if let Some(buffer) = buffer {
2642 break buffer;
2643 }
2644 opened_buffer_rx
2645 .recv()
2646 .await
2647 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2648 };
2649 Ok(buffer)
2650 }
2651 proto::buffer::Variant::State(mut buffer) => {
2652 let mut buffer_worktree = None;
2653 let mut buffer_file = None;
2654 if let Some(file) = buffer.file.take() {
2655 this.read_with(&cx, |this, cx| {
2656 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2657 let worktree =
2658 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2659 anyhow!("no worktree found for id {}", file.worktree_id)
2660 })?;
2661 buffer_file =
2662 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2663 as Box<dyn language::File>);
2664 buffer_worktree = Some(worktree);
2665 Ok::<_, anyhow::Error>(())
2666 })?;
2667 }
2668
2669 let buffer = cx.add_model(|cx| {
2670 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2671 });
2672 this.update(&mut cx, |this, cx| {
2673 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2674 })?;
2675
2676 let _ = opened_buffer_tx.send(()).await;
2677 Ok(buffer)
2678 }
2679 }
2680 })
2681 }
2682
2683 async fn handle_close_buffer(
2684 this: ModelHandle<Self>,
2685 envelope: TypedEnvelope<proto::CloseBuffer>,
2686 _: Arc<Client>,
2687 mut cx: AsyncAppContext,
2688 ) -> anyhow::Result<()> {
2689 this.update(&mut cx, |this, cx| {
2690 if let Some(shared_buffers) =
2691 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2692 {
2693 shared_buffers.remove(&envelope.payload.buffer_id);
2694 cx.notify();
2695 }
2696 Ok(())
2697 })
2698 }
2699
2700 async fn handle_buffer_saved(
2701 this: ModelHandle<Self>,
2702 envelope: TypedEnvelope<proto::BufferSaved>,
2703 _: Arc<Client>,
2704 mut cx: AsyncAppContext,
2705 ) -> Result<()> {
2706 let version = envelope.payload.version.try_into()?;
2707 let mtime = envelope
2708 .payload
2709 .mtime
2710 .ok_or_else(|| anyhow!("missing mtime"))?
2711 .into();
2712
2713 this.update(&mut cx, |this, cx| {
2714 let buffer = this
2715 .open_buffers
2716 .get(&envelope.payload.buffer_id)
2717 .and_then(|buffer| buffer.upgrade(cx));
2718 if let Some(buffer) = buffer {
2719 buffer.update(cx, |buffer, cx| {
2720 buffer.did_save(version, mtime, None, cx);
2721 });
2722 }
2723 Ok(())
2724 })
2725 }
2726
2727 async fn handle_buffer_reloaded(
2728 this: ModelHandle<Self>,
2729 envelope: TypedEnvelope<proto::BufferReloaded>,
2730 _: Arc<Client>,
2731 mut cx: AsyncAppContext,
2732 ) -> Result<()> {
2733 let payload = envelope.payload.clone();
2734 let version = payload.version.try_into()?;
2735 let mtime = payload
2736 .mtime
2737 .ok_or_else(|| anyhow!("missing mtime"))?
2738 .into();
2739 this.update(&mut cx, |this, cx| {
2740 let buffer = this
2741 .open_buffers
2742 .get(&payload.buffer_id)
2743 .and_then(|buffer| buffer.upgrade(cx));
2744 if let Some(buffer) = buffer {
2745 buffer.update(cx, |buffer, cx| {
2746 buffer.did_reload(version, mtime, cx);
2747 });
2748 }
2749 Ok(())
2750 })
2751 }
2752
2753 pub fn match_paths<'a>(
2754 &self,
2755 query: &'a str,
2756 include_ignored: bool,
2757 smart_case: bool,
2758 max_results: usize,
2759 cancel_flag: &'a AtomicBool,
2760 cx: &AppContext,
2761 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2762 let worktrees = self
2763 .worktrees(cx)
2764 .filter(|worktree| !worktree.read(cx).is_weak())
2765 .collect::<Vec<_>>();
2766 let include_root_name = worktrees.len() > 1;
2767 let candidate_sets = worktrees
2768 .into_iter()
2769 .map(|worktree| CandidateSet {
2770 snapshot: worktree.read(cx).snapshot(),
2771 include_ignored,
2772 include_root_name,
2773 })
2774 .collect::<Vec<_>>();
2775
2776 let background = cx.background().clone();
2777 async move {
2778 fuzzy::match_paths(
2779 candidate_sets.as_slice(),
2780 query,
2781 smart_case,
2782 max_results,
2783 cancel_flag,
2784 background,
2785 )
2786 .await
2787 }
2788 }
2789}
2790
2791impl WorktreeHandle {
2792 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2793 match self {
2794 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2795 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2796 }
2797 }
2798}
2799
2800impl OpenBuffer {
2801 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2802 match self {
2803 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2804 OpenBuffer::Loading(_) => None,
2805 }
2806 }
2807}
2808
2809struct CandidateSet {
2810 snapshot: Snapshot,
2811 include_ignored: bool,
2812 include_root_name: bool,
2813}
2814
2815impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2816 type Candidates = CandidateSetIter<'a>;
2817
2818 fn id(&self) -> usize {
2819 self.snapshot.id().to_usize()
2820 }
2821
2822 fn len(&self) -> usize {
2823 if self.include_ignored {
2824 self.snapshot.file_count()
2825 } else {
2826 self.snapshot.visible_file_count()
2827 }
2828 }
2829
2830 fn prefix(&self) -> Arc<str> {
2831 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2832 self.snapshot.root_name().into()
2833 } else if self.include_root_name {
2834 format!("{}/", self.snapshot.root_name()).into()
2835 } else {
2836 "".into()
2837 }
2838 }
2839
2840 fn candidates(&'a self, start: usize) -> Self::Candidates {
2841 CandidateSetIter {
2842 traversal: self.snapshot.files(self.include_ignored, start),
2843 }
2844 }
2845}
2846
2847struct CandidateSetIter<'a> {
2848 traversal: Traversal<'a>,
2849}
2850
2851impl<'a> Iterator for CandidateSetIter<'a> {
2852 type Item = PathMatchCandidate<'a>;
2853
2854 fn next(&mut self) -> Option<Self::Item> {
2855 self.traversal.next().map(|entry| {
2856 if let EntryKind::File(char_bag) = entry.kind {
2857 PathMatchCandidate {
2858 path: &entry.path,
2859 char_bag,
2860 }
2861 } else {
2862 unreachable!()
2863 }
2864 })
2865 }
2866}
2867
2868impl Entity for Project {
2869 type Event = Event;
2870
2871 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2872 match &self.client_state {
2873 ProjectClientState::Local { remote_id_rx, .. } => {
2874 if let Some(project_id) = *remote_id_rx.borrow() {
2875 self.client
2876 .send(proto::UnregisterProject { project_id })
2877 .log_err();
2878 }
2879 }
2880 ProjectClientState::Remote { remote_id, .. } => {
2881 self.client
2882 .send(proto::LeaveProject {
2883 project_id: *remote_id,
2884 })
2885 .log_err();
2886 }
2887 }
2888 }
2889
2890 fn app_will_quit(
2891 &mut self,
2892 _: &mut MutableAppContext,
2893 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2894 use futures::FutureExt;
2895
2896 let shutdown_futures = self
2897 .language_servers
2898 .drain()
2899 .filter_map(|(_, server)| server.shutdown())
2900 .collect::<Vec<_>>();
2901 Some(
2902 async move {
2903 futures::future::join_all(shutdown_futures).await;
2904 }
2905 .boxed(),
2906 )
2907 }
2908}
2909
2910impl Collaborator {
2911 fn from_proto(
2912 message: proto::Collaborator,
2913 user_store: &ModelHandle<UserStore>,
2914 cx: &mut AsyncAppContext,
2915 ) -> impl Future<Output = Result<Self>> {
2916 let user = user_store.update(cx, |user_store, cx| {
2917 user_store.fetch_user(message.user_id, cx)
2918 });
2919
2920 async move {
2921 Ok(Self {
2922 peer_id: PeerId(message.peer_id),
2923 user: user.await?,
2924 replica_id: message.replica_id as ReplicaId,
2925 })
2926 }
2927 }
2928}
2929
2930impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2931 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2932 Self {
2933 worktree_id,
2934 path: path.as_ref().into(),
2935 }
2936 }
2937}
2938
2939impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2940 fn from(options: lsp::CreateFileOptions) -> Self {
2941 Self {
2942 overwrite: options.overwrite.unwrap_or(false),
2943 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2944 }
2945 }
2946}
2947
2948impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2949 fn from(options: lsp::RenameFileOptions) -> Self {
2950 Self {
2951 overwrite: options.overwrite.unwrap_or(false),
2952 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2953 }
2954 }
2955}
2956
2957impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2958 fn from(options: lsp::DeleteFileOptions) -> Self {
2959 Self {
2960 recursive: options.recursive.unwrap_or(false),
2961 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2962 }
2963 }
2964}
2965
2966#[cfg(test)]
2967mod tests {
2968 use super::{Event, *};
2969 use client::test::FakeHttpClient;
2970 use fs::RealFs;
2971 use futures::StreamExt;
2972 use gpui::test::subscribe;
2973 use language::{
2974 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2975 LanguageServerConfig, Point,
2976 };
2977 use lsp::Url;
2978 use serde_json::json;
2979 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2980 use unindent::Unindent as _;
2981 use util::test::temp_tree;
2982 use worktree::WorktreeHandle as _;
2983
2984 #[gpui::test]
2985 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2986 let dir = temp_tree(json!({
2987 "root": {
2988 "apple": "",
2989 "banana": {
2990 "carrot": {
2991 "date": "",
2992 "endive": "",
2993 }
2994 },
2995 "fennel": {
2996 "grape": "",
2997 }
2998 }
2999 }));
3000
3001 let root_link_path = dir.path().join("root_link");
3002 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3003 unix::fs::symlink(
3004 &dir.path().join("root/fennel"),
3005 &dir.path().join("root/finnochio"),
3006 )
3007 .unwrap();
3008
3009 let project = Project::test(Arc::new(RealFs), &mut cx);
3010
3011 let (tree, _) = project
3012 .update(&mut cx, |project, cx| {
3013 project.find_or_create_local_worktree(&root_link_path, false, cx)
3014 })
3015 .await
3016 .unwrap();
3017
3018 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3019 .await;
3020 cx.read(|cx| {
3021 let tree = tree.read(cx);
3022 assert_eq!(tree.file_count(), 5);
3023 assert_eq!(
3024 tree.inode_for_path("fennel/grape"),
3025 tree.inode_for_path("finnochio/grape")
3026 );
3027 });
3028
3029 let cancel_flag = Default::default();
3030 let results = project
3031 .read_with(&cx, |project, cx| {
3032 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3033 })
3034 .await;
3035 assert_eq!(
3036 results
3037 .into_iter()
3038 .map(|result| result.path)
3039 .collect::<Vec<Arc<Path>>>(),
3040 vec![
3041 PathBuf::from("banana/carrot/date").into(),
3042 PathBuf::from("banana/carrot/endive").into(),
3043 ]
3044 );
3045 }
3046
3047 #[gpui::test]
3048 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3049 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3050 let progress_token = language_server_config
3051 .disk_based_diagnostics_progress_token
3052 .clone()
3053 .unwrap();
3054
3055 let mut languages = LanguageRegistry::new();
3056 languages.add(Arc::new(Language::new(
3057 LanguageConfig {
3058 name: "Rust".to_string(),
3059 path_suffixes: vec!["rs".to_string()],
3060 language_server: Some(language_server_config),
3061 ..Default::default()
3062 },
3063 Some(tree_sitter_rust::language()),
3064 )));
3065
3066 let dir = temp_tree(json!({
3067 "a.rs": "fn a() { A }",
3068 "b.rs": "const y: i32 = 1",
3069 }));
3070
3071 let http_client = FakeHttpClient::with_404_response();
3072 let client = Client::new(http_client.clone());
3073 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3074
3075 let project = cx.update(|cx| {
3076 Project::local(
3077 client,
3078 user_store,
3079 Arc::new(languages),
3080 Arc::new(RealFs),
3081 cx,
3082 )
3083 });
3084
3085 let (tree, _) = project
3086 .update(&mut cx, |project, cx| {
3087 project.find_or_create_local_worktree(dir.path(), false, cx)
3088 })
3089 .await
3090 .unwrap();
3091 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3092
3093 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3094 .await;
3095
3096 // Cause worktree to start the fake language server
3097 let _buffer = project
3098 .update(&mut cx, |project, cx| {
3099 project.open_buffer(
3100 ProjectPath {
3101 worktree_id,
3102 path: Path::new("b.rs").into(),
3103 },
3104 cx,
3105 )
3106 })
3107 .await
3108 .unwrap();
3109
3110 let mut events = subscribe(&project, &mut cx);
3111
3112 let mut fake_server = fake_servers.next().await.unwrap();
3113 fake_server.start_progress(&progress_token).await;
3114 assert_eq!(
3115 events.next().await.unwrap(),
3116 Event::DiskBasedDiagnosticsStarted
3117 );
3118
3119 fake_server.start_progress(&progress_token).await;
3120 fake_server.end_progress(&progress_token).await;
3121 fake_server.start_progress(&progress_token).await;
3122
3123 fake_server
3124 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3125 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3126 version: None,
3127 diagnostics: vec![lsp::Diagnostic {
3128 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3129 severity: Some(lsp::DiagnosticSeverity::ERROR),
3130 message: "undefined variable 'A'".to_string(),
3131 ..Default::default()
3132 }],
3133 })
3134 .await;
3135 assert_eq!(
3136 events.next().await.unwrap(),
3137 Event::DiagnosticsUpdated(ProjectPath {
3138 worktree_id,
3139 path: Arc::from(Path::new("a.rs"))
3140 })
3141 );
3142
3143 fake_server.end_progress(&progress_token).await;
3144 fake_server.end_progress(&progress_token).await;
3145 assert_eq!(
3146 events.next().await.unwrap(),
3147 Event::DiskBasedDiagnosticsUpdated
3148 );
3149 assert_eq!(
3150 events.next().await.unwrap(),
3151 Event::DiskBasedDiagnosticsFinished
3152 );
3153
3154 let buffer = project
3155 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3156 .await
3157 .unwrap();
3158
3159 buffer.read_with(&cx, |buffer, _| {
3160 let snapshot = buffer.snapshot();
3161 let diagnostics = snapshot
3162 .diagnostics_in_range::<_, Point>(0..buffer.len())
3163 .collect::<Vec<_>>();
3164 assert_eq!(
3165 diagnostics,
3166 &[DiagnosticEntry {
3167 range: Point::new(0, 9)..Point::new(0, 10),
3168 diagnostic: Diagnostic {
3169 severity: lsp::DiagnosticSeverity::ERROR,
3170 message: "undefined variable 'A'".to_string(),
3171 group_id: 0,
3172 is_primary: true,
3173 ..Default::default()
3174 }
3175 }]
3176 )
3177 });
3178 }
3179
3180 #[gpui::test]
3181 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3182 let dir = temp_tree(json!({
3183 "root": {
3184 "dir1": {},
3185 "dir2": {
3186 "dir3": {}
3187 }
3188 }
3189 }));
3190
3191 let project = Project::test(Arc::new(RealFs), &mut cx);
3192 let (tree, _) = project
3193 .update(&mut cx, |project, cx| {
3194 project.find_or_create_local_worktree(&dir.path(), false, cx)
3195 })
3196 .await
3197 .unwrap();
3198
3199 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3200 .await;
3201
3202 let cancel_flag = Default::default();
3203 let results = project
3204 .read_with(&cx, |project, cx| {
3205 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3206 })
3207 .await;
3208
3209 assert!(results.is_empty());
3210 }
3211
3212 #[gpui::test]
3213 async fn test_definition(mut cx: gpui::TestAppContext) {
3214 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3215
3216 let mut languages = LanguageRegistry::new();
3217 languages.add(Arc::new(Language::new(
3218 LanguageConfig {
3219 name: "Rust".to_string(),
3220 path_suffixes: vec!["rs".to_string()],
3221 language_server: Some(language_server_config),
3222 ..Default::default()
3223 },
3224 Some(tree_sitter_rust::language()),
3225 )));
3226
3227 let dir = temp_tree(json!({
3228 "a.rs": "const fn a() { A }",
3229 "b.rs": "const y: i32 = crate::a()",
3230 }));
3231 let dir_path = dir.path().to_path_buf();
3232
3233 let http_client = FakeHttpClient::with_404_response();
3234 let client = Client::new(http_client.clone());
3235 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3236 let project = cx.update(|cx| {
3237 Project::local(
3238 client,
3239 user_store,
3240 Arc::new(languages),
3241 Arc::new(RealFs),
3242 cx,
3243 )
3244 });
3245
3246 let (tree, _) = project
3247 .update(&mut cx, |project, cx| {
3248 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3249 })
3250 .await
3251 .unwrap();
3252 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3253 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3254 .await;
3255
3256 let buffer = project
3257 .update(&mut cx, |project, cx| {
3258 project.open_buffer(
3259 ProjectPath {
3260 worktree_id,
3261 path: Path::new("").into(),
3262 },
3263 cx,
3264 )
3265 })
3266 .await
3267 .unwrap();
3268
3269 let mut fake_server = fake_servers.next().await.unwrap();
3270 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3271 let params = params.text_document_position_params;
3272 assert_eq!(
3273 params.text_document.uri.to_file_path().unwrap(),
3274 dir_path.join("b.rs")
3275 );
3276 assert_eq!(params.position, lsp::Position::new(0, 22));
3277
3278 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3279 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3280 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3281 )))
3282 });
3283
3284 let mut definitions = project
3285 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3286 .await
3287 .unwrap();
3288
3289 assert_eq!(definitions.len(), 1);
3290 let definition = definitions.pop().unwrap();
3291 cx.update(|cx| {
3292 let target_buffer = definition.target_buffer.read(cx);
3293 assert_eq!(
3294 target_buffer
3295 .file()
3296 .unwrap()
3297 .as_local()
3298 .unwrap()
3299 .abs_path(cx),
3300 dir.path().join("a.rs")
3301 );
3302 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3303 assert_eq!(
3304 list_worktrees(&project, cx),
3305 [
3306 (dir.path().join("b.rs"), false),
3307 (dir.path().join("a.rs"), true)
3308 ]
3309 );
3310
3311 drop(definition);
3312 });
3313 cx.read(|cx| {
3314 assert_eq!(
3315 list_worktrees(&project, cx),
3316 [(dir.path().join("b.rs"), false)]
3317 );
3318 });
3319
3320 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3321 project
3322 .read(cx)
3323 .worktrees(cx)
3324 .map(|worktree| {
3325 let worktree = worktree.read(cx);
3326 (
3327 worktree.as_local().unwrap().abs_path().to_path_buf(),
3328 worktree.is_weak(),
3329 )
3330 })
3331 .collect::<Vec<_>>()
3332 }
3333 }
3334
3335 #[gpui::test]
3336 async fn test_save_file(mut cx: gpui::TestAppContext) {
3337 let fs = Arc::new(FakeFs::new(cx.background()));
3338 fs.insert_tree(
3339 "/dir",
3340 json!({
3341 "file1": "the old contents",
3342 }),
3343 )
3344 .await;
3345
3346 let project = Project::test(fs.clone(), &mut cx);
3347 let worktree_id = project
3348 .update(&mut cx, |p, cx| {
3349 p.find_or_create_local_worktree("/dir", false, cx)
3350 })
3351 .await
3352 .unwrap()
3353 .0
3354 .read_with(&cx, |tree, _| tree.id());
3355
3356 let buffer = project
3357 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3358 .await
3359 .unwrap();
3360 buffer
3361 .update(&mut cx, |buffer, cx| {
3362 assert_eq!(buffer.text(), "the old contents");
3363 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3364 buffer.save(cx)
3365 })
3366 .await
3367 .unwrap();
3368
3369 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3370 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3371 }
3372
3373 #[gpui::test]
3374 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3375 let fs = Arc::new(FakeFs::new(cx.background()));
3376 fs.insert_tree(
3377 "/dir",
3378 json!({
3379 "file1": "the old contents",
3380 }),
3381 )
3382 .await;
3383
3384 let project = Project::test(fs.clone(), &mut cx);
3385 let worktree_id = project
3386 .update(&mut cx, |p, cx| {
3387 p.find_or_create_local_worktree("/dir/file1", false, cx)
3388 })
3389 .await
3390 .unwrap()
3391 .0
3392 .read_with(&cx, |tree, _| tree.id());
3393
3394 let buffer = project
3395 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3396 .await
3397 .unwrap();
3398 buffer
3399 .update(&mut cx, |buffer, cx| {
3400 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3401 buffer.save(cx)
3402 })
3403 .await
3404 .unwrap();
3405
3406 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3407 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3408 }
3409
3410 #[gpui::test(retries = 5)]
3411 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3412 let dir = temp_tree(json!({
3413 "a": {
3414 "file1": "",
3415 "file2": "",
3416 "file3": "",
3417 },
3418 "b": {
3419 "c": {
3420 "file4": "",
3421 "file5": "",
3422 }
3423 }
3424 }));
3425
3426 let project = Project::test(Arc::new(RealFs), &mut cx);
3427 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3428
3429 let (tree, _) = project
3430 .update(&mut cx, |p, cx| {
3431 p.find_or_create_local_worktree(dir.path(), false, cx)
3432 })
3433 .await
3434 .unwrap();
3435 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3436
3437 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3438 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3439 async move { buffer.await.unwrap() }
3440 };
3441 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3442 tree.read_with(cx, |tree, _| {
3443 tree.entry_for_path(path)
3444 .expect(&format!("no entry for path {}", path))
3445 .id
3446 })
3447 };
3448
3449 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3450 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3451 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3452 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3453
3454 let file2_id = id_for_path("a/file2", &cx);
3455 let file3_id = id_for_path("a/file3", &cx);
3456 let file4_id = id_for_path("b/c/file4", &cx);
3457
3458 // Wait for the initial scan.
3459 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3460 .await;
3461
3462 // Create a remote copy of this worktree.
3463 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3464 let (remote, load_task) = cx.update(|cx| {
3465 Worktree::remote(
3466 1,
3467 1,
3468 initial_snapshot.to_proto(&Default::default(), Default::default()),
3469 rpc.clone(),
3470 cx,
3471 )
3472 });
3473 load_task.await;
3474
3475 cx.read(|cx| {
3476 assert!(!buffer2.read(cx).is_dirty());
3477 assert!(!buffer3.read(cx).is_dirty());
3478 assert!(!buffer4.read(cx).is_dirty());
3479 assert!(!buffer5.read(cx).is_dirty());
3480 });
3481
3482 // Rename and delete files and directories.
3483 tree.flush_fs_events(&cx).await;
3484 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3485 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3486 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3487 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3488 tree.flush_fs_events(&cx).await;
3489
3490 let expected_paths = vec![
3491 "a",
3492 "a/file1",
3493 "a/file2.new",
3494 "b",
3495 "d",
3496 "d/file3",
3497 "d/file4",
3498 ];
3499
3500 cx.read(|app| {
3501 assert_eq!(
3502 tree.read(app)
3503 .paths()
3504 .map(|p| p.to_str().unwrap())
3505 .collect::<Vec<_>>(),
3506 expected_paths
3507 );
3508
3509 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3510 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3511 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3512
3513 assert_eq!(
3514 buffer2.read(app).file().unwrap().path().as_ref(),
3515 Path::new("a/file2.new")
3516 );
3517 assert_eq!(
3518 buffer3.read(app).file().unwrap().path().as_ref(),
3519 Path::new("d/file3")
3520 );
3521 assert_eq!(
3522 buffer4.read(app).file().unwrap().path().as_ref(),
3523 Path::new("d/file4")
3524 );
3525 assert_eq!(
3526 buffer5.read(app).file().unwrap().path().as_ref(),
3527 Path::new("b/c/file5")
3528 );
3529
3530 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3531 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3532 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3533 assert!(buffer5.read(app).file().unwrap().is_deleted());
3534 });
3535
3536 // Update the remote worktree. Check that it becomes consistent with the
3537 // local worktree.
3538 remote.update(&mut cx, |remote, cx| {
3539 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3540 &initial_snapshot,
3541 1,
3542 1,
3543 0,
3544 true,
3545 );
3546 remote
3547 .as_remote_mut()
3548 .unwrap()
3549 .snapshot
3550 .apply_remote_update(update_message)
3551 .unwrap();
3552
3553 assert_eq!(
3554 remote
3555 .paths()
3556 .map(|p| p.to_str().unwrap())
3557 .collect::<Vec<_>>(),
3558 expected_paths
3559 );
3560 });
3561 }
3562
3563 #[gpui::test]
3564 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3565 let fs = Arc::new(FakeFs::new(cx.background()));
3566 fs.insert_tree(
3567 "/the-dir",
3568 json!({
3569 "a.txt": "a-contents",
3570 "b.txt": "b-contents",
3571 }),
3572 )
3573 .await;
3574
3575 let project = Project::test(fs.clone(), &mut cx);
3576 let worktree_id = project
3577 .update(&mut cx, |p, cx| {
3578 p.find_or_create_local_worktree("/the-dir", false, cx)
3579 })
3580 .await
3581 .unwrap()
3582 .0
3583 .read_with(&cx, |tree, _| tree.id());
3584
3585 // Spawn multiple tasks to open paths, repeating some paths.
3586 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3587 (
3588 p.open_buffer((worktree_id, "a.txt"), cx),
3589 p.open_buffer((worktree_id, "b.txt"), cx),
3590 p.open_buffer((worktree_id, "a.txt"), cx),
3591 )
3592 });
3593
3594 let buffer_a_1 = buffer_a_1.await.unwrap();
3595 let buffer_a_2 = buffer_a_2.await.unwrap();
3596 let buffer_b = buffer_b.await.unwrap();
3597 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3598 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3599
3600 // There is only one buffer per path.
3601 let buffer_a_id = buffer_a_1.id();
3602 assert_eq!(buffer_a_2.id(), buffer_a_id);
3603
3604 // Open the same path again while it is still open.
3605 drop(buffer_a_1);
3606 let buffer_a_3 = project
3607 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3608 .await
3609 .unwrap();
3610
3611 // There's still only one buffer per path.
3612 assert_eq!(buffer_a_3.id(), buffer_a_id);
3613 }
3614
3615 #[gpui::test]
3616 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3617 use std::fs;
3618
3619 let dir = temp_tree(json!({
3620 "file1": "abc",
3621 "file2": "def",
3622 "file3": "ghi",
3623 }));
3624
3625 let project = Project::test(Arc::new(RealFs), &mut cx);
3626 let (worktree, _) = project
3627 .update(&mut cx, |p, cx| {
3628 p.find_or_create_local_worktree(dir.path(), false, cx)
3629 })
3630 .await
3631 .unwrap();
3632 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3633
3634 worktree.flush_fs_events(&cx).await;
3635 worktree
3636 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3637 .await;
3638
3639 let buffer1 = project
3640 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3641 .await
3642 .unwrap();
3643 let events = Rc::new(RefCell::new(Vec::new()));
3644
3645 // initially, the buffer isn't dirty.
3646 buffer1.update(&mut cx, |buffer, cx| {
3647 cx.subscribe(&buffer1, {
3648 let events = events.clone();
3649 move |_, _, event, _| events.borrow_mut().push(event.clone())
3650 })
3651 .detach();
3652
3653 assert!(!buffer.is_dirty());
3654 assert!(events.borrow().is_empty());
3655
3656 buffer.edit(vec![1..2], "", cx);
3657 });
3658
3659 // after the first edit, the buffer is dirty, and emits a dirtied event.
3660 buffer1.update(&mut cx, |buffer, cx| {
3661 assert!(buffer.text() == "ac");
3662 assert!(buffer.is_dirty());
3663 assert_eq!(
3664 *events.borrow(),
3665 &[language::Event::Edited, language::Event::Dirtied]
3666 );
3667 events.borrow_mut().clear();
3668 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3669 });
3670
3671 // after saving, the buffer is not dirty, and emits a saved event.
3672 buffer1.update(&mut cx, |buffer, cx| {
3673 assert!(!buffer.is_dirty());
3674 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3675 events.borrow_mut().clear();
3676
3677 buffer.edit(vec![1..1], "B", cx);
3678 buffer.edit(vec![2..2], "D", cx);
3679 });
3680
3681 // after editing again, the buffer is dirty, and emits another dirty event.
3682 buffer1.update(&mut cx, |buffer, cx| {
3683 assert!(buffer.text() == "aBDc");
3684 assert!(buffer.is_dirty());
3685 assert_eq!(
3686 *events.borrow(),
3687 &[
3688 language::Event::Edited,
3689 language::Event::Dirtied,
3690 language::Event::Edited,
3691 ],
3692 );
3693 events.borrow_mut().clear();
3694
3695 // TODO - currently, after restoring the buffer to its
3696 // previously-saved state, the is still considered dirty.
3697 buffer.edit([1..3], "", cx);
3698 assert!(buffer.text() == "ac");
3699 assert!(buffer.is_dirty());
3700 });
3701
3702 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3703
3704 // When a file is deleted, the buffer is considered dirty.
3705 let events = Rc::new(RefCell::new(Vec::new()));
3706 let buffer2 = project
3707 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3708 .await
3709 .unwrap();
3710 buffer2.update(&mut cx, |_, cx| {
3711 cx.subscribe(&buffer2, {
3712 let events = events.clone();
3713 move |_, _, event, _| events.borrow_mut().push(event.clone())
3714 })
3715 .detach();
3716 });
3717
3718 fs::remove_file(dir.path().join("file2")).unwrap();
3719 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3720 assert_eq!(
3721 *events.borrow(),
3722 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3723 );
3724
3725 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3726 let events = Rc::new(RefCell::new(Vec::new()));
3727 let buffer3 = project
3728 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3729 .await
3730 .unwrap();
3731 buffer3.update(&mut cx, |_, cx| {
3732 cx.subscribe(&buffer3, {
3733 let events = events.clone();
3734 move |_, _, event, _| events.borrow_mut().push(event.clone())
3735 })
3736 .detach();
3737 });
3738
3739 worktree.flush_fs_events(&cx).await;
3740 buffer3.update(&mut cx, |buffer, cx| {
3741 buffer.edit(Some(0..0), "x", cx);
3742 });
3743 events.borrow_mut().clear();
3744 fs::remove_file(dir.path().join("file3")).unwrap();
3745 buffer3
3746 .condition(&cx, |_, _| !events.borrow().is_empty())
3747 .await;
3748 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3749 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3750 }
3751
3752 #[gpui::test]
3753 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3754 use std::fs;
3755
3756 let initial_contents = "aaa\nbbbbb\nc\n";
3757 let dir = temp_tree(json!({ "the-file": initial_contents }));
3758
3759 let project = Project::test(Arc::new(RealFs), &mut cx);
3760 let (worktree, _) = project
3761 .update(&mut cx, |p, cx| {
3762 p.find_or_create_local_worktree(dir.path(), false, cx)
3763 })
3764 .await
3765 .unwrap();
3766 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3767
3768 worktree
3769 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3770 .await;
3771
3772 let abs_path = dir.path().join("the-file");
3773 let buffer = project
3774 .update(&mut cx, |p, cx| {
3775 p.open_buffer((worktree_id, "the-file"), cx)
3776 })
3777 .await
3778 .unwrap();
3779
3780 // TODO
3781 // Add a cursor on each row.
3782 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3783 // assert!(!buffer.is_dirty());
3784 // buffer.add_selection_set(
3785 // &(0..3)
3786 // .map(|row| Selection {
3787 // id: row as usize,
3788 // start: Point::new(row, 1),
3789 // end: Point::new(row, 1),
3790 // reversed: false,
3791 // goal: SelectionGoal::None,
3792 // })
3793 // .collect::<Vec<_>>(),
3794 // cx,
3795 // )
3796 // });
3797
3798 // Change the file on disk, adding two new lines of text, and removing
3799 // one line.
3800 buffer.read_with(&cx, |buffer, _| {
3801 assert!(!buffer.is_dirty());
3802 assert!(!buffer.has_conflict());
3803 });
3804 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3805 fs::write(&abs_path, new_contents).unwrap();
3806
3807 // Because the buffer was not modified, it is reloaded from disk. Its
3808 // contents are edited according to the diff between the old and new
3809 // file contents.
3810 buffer
3811 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3812 .await;
3813
3814 buffer.update(&mut cx, |buffer, _| {
3815 assert_eq!(buffer.text(), new_contents);
3816 assert!(!buffer.is_dirty());
3817 assert!(!buffer.has_conflict());
3818
3819 // TODO
3820 // let cursor_positions = buffer
3821 // .selection_set(selection_set_id)
3822 // .unwrap()
3823 // .selections::<Point>(&*buffer)
3824 // .map(|selection| {
3825 // assert_eq!(selection.start, selection.end);
3826 // selection.start
3827 // })
3828 // .collect::<Vec<_>>();
3829 // assert_eq!(
3830 // cursor_positions,
3831 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3832 // );
3833 });
3834
3835 // Modify the buffer
3836 buffer.update(&mut cx, |buffer, cx| {
3837 buffer.edit(vec![0..0], " ", cx);
3838 assert!(buffer.is_dirty());
3839 assert!(!buffer.has_conflict());
3840 });
3841
3842 // Change the file on disk again, adding blank lines to the beginning.
3843 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3844
3845 // Because the buffer is modified, it doesn't reload from disk, but is
3846 // marked as having a conflict.
3847 buffer
3848 .condition(&cx, |buffer, _| buffer.has_conflict())
3849 .await;
3850 }
3851
3852 #[gpui::test]
3853 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3854 let fs = Arc::new(FakeFs::new(cx.background()));
3855 fs.insert_tree(
3856 "/the-dir",
3857 json!({
3858 "a.rs": "
3859 fn foo(mut v: Vec<usize>) {
3860 for x in &v {
3861 v.push(1);
3862 }
3863 }
3864 "
3865 .unindent(),
3866 }),
3867 )
3868 .await;
3869
3870 let project = Project::test(fs.clone(), &mut cx);
3871 let (worktree, _) = project
3872 .update(&mut cx, |p, cx| {
3873 p.find_or_create_local_worktree("/the-dir", false, cx)
3874 })
3875 .await
3876 .unwrap();
3877 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3878
3879 let buffer = project
3880 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3881 .await
3882 .unwrap();
3883
3884 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3885 let message = lsp::PublishDiagnosticsParams {
3886 uri: buffer_uri.clone(),
3887 diagnostics: vec![
3888 lsp::Diagnostic {
3889 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3890 severity: Some(DiagnosticSeverity::WARNING),
3891 message: "error 1".to_string(),
3892 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3893 location: lsp::Location {
3894 uri: buffer_uri.clone(),
3895 range: lsp::Range::new(
3896 lsp::Position::new(1, 8),
3897 lsp::Position::new(1, 9),
3898 ),
3899 },
3900 message: "error 1 hint 1".to_string(),
3901 }]),
3902 ..Default::default()
3903 },
3904 lsp::Diagnostic {
3905 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3906 severity: Some(DiagnosticSeverity::HINT),
3907 message: "error 1 hint 1".to_string(),
3908 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3909 location: lsp::Location {
3910 uri: buffer_uri.clone(),
3911 range: lsp::Range::new(
3912 lsp::Position::new(1, 8),
3913 lsp::Position::new(1, 9),
3914 ),
3915 },
3916 message: "original diagnostic".to_string(),
3917 }]),
3918 ..Default::default()
3919 },
3920 lsp::Diagnostic {
3921 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3922 severity: Some(DiagnosticSeverity::ERROR),
3923 message: "error 2".to_string(),
3924 related_information: Some(vec![
3925 lsp::DiagnosticRelatedInformation {
3926 location: lsp::Location {
3927 uri: buffer_uri.clone(),
3928 range: lsp::Range::new(
3929 lsp::Position::new(1, 13),
3930 lsp::Position::new(1, 15),
3931 ),
3932 },
3933 message: "error 2 hint 1".to_string(),
3934 },
3935 lsp::DiagnosticRelatedInformation {
3936 location: lsp::Location {
3937 uri: buffer_uri.clone(),
3938 range: lsp::Range::new(
3939 lsp::Position::new(1, 13),
3940 lsp::Position::new(1, 15),
3941 ),
3942 },
3943 message: "error 2 hint 2".to_string(),
3944 },
3945 ]),
3946 ..Default::default()
3947 },
3948 lsp::Diagnostic {
3949 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3950 severity: Some(DiagnosticSeverity::HINT),
3951 message: "error 2 hint 1".to_string(),
3952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3953 location: lsp::Location {
3954 uri: buffer_uri.clone(),
3955 range: lsp::Range::new(
3956 lsp::Position::new(2, 8),
3957 lsp::Position::new(2, 17),
3958 ),
3959 },
3960 message: "original diagnostic".to_string(),
3961 }]),
3962 ..Default::default()
3963 },
3964 lsp::Diagnostic {
3965 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3966 severity: Some(DiagnosticSeverity::HINT),
3967 message: "error 2 hint 2".to_string(),
3968 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3969 location: lsp::Location {
3970 uri: buffer_uri.clone(),
3971 range: lsp::Range::new(
3972 lsp::Position::new(2, 8),
3973 lsp::Position::new(2, 17),
3974 ),
3975 },
3976 message: "original diagnostic".to_string(),
3977 }]),
3978 ..Default::default()
3979 },
3980 ],
3981 version: None,
3982 };
3983
3984 project
3985 .update(&mut cx, |p, cx| {
3986 p.update_diagnostics(message, &Default::default(), cx)
3987 })
3988 .unwrap();
3989 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3990
3991 assert_eq!(
3992 buffer
3993 .diagnostics_in_range::<_, Point>(0..buffer.len())
3994 .collect::<Vec<_>>(),
3995 &[
3996 DiagnosticEntry {
3997 range: Point::new(1, 8)..Point::new(1, 9),
3998 diagnostic: Diagnostic {
3999 severity: DiagnosticSeverity::WARNING,
4000 message: "error 1".to_string(),
4001 group_id: 0,
4002 is_primary: true,
4003 ..Default::default()
4004 }
4005 },
4006 DiagnosticEntry {
4007 range: Point::new(1, 8)..Point::new(1, 9),
4008 diagnostic: Diagnostic {
4009 severity: DiagnosticSeverity::HINT,
4010 message: "error 1 hint 1".to_string(),
4011 group_id: 0,
4012 is_primary: false,
4013 ..Default::default()
4014 }
4015 },
4016 DiagnosticEntry {
4017 range: Point::new(1, 13)..Point::new(1, 15),
4018 diagnostic: Diagnostic {
4019 severity: DiagnosticSeverity::HINT,
4020 message: "error 2 hint 1".to_string(),
4021 group_id: 1,
4022 is_primary: false,
4023 ..Default::default()
4024 }
4025 },
4026 DiagnosticEntry {
4027 range: Point::new(1, 13)..Point::new(1, 15),
4028 diagnostic: Diagnostic {
4029 severity: DiagnosticSeverity::HINT,
4030 message: "error 2 hint 2".to_string(),
4031 group_id: 1,
4032 is_primary: false,
4033 ..Default::default()
4034 }
4035 },
4036 DiagnosticEntry {
4037 range: Point::new(2, 8)..Point::new(2, 17),
4038 diagnostic: Diagnostic {
4039 severity: DiagnosticSeverity::ERROR,
4040 message: "error 2".to_string(),
4041 group_id: 1,
4042 is_primary: true,
4043 ..Default::default()
4044 }
4045 }
4046 ]
4047 );
4048
4049 assert_eq!(
4050 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4051 &[
4052 DiagnosticEntry {
4053 range: Point::new(1, 8)..Point::new(1, 9),
4054 diagnostic: Diagnostic {
4055 severity: DiagnosticSeverity::WARNING,
4056 message: "error 1".to_string(),
4057 group_id: 0,
4058 is_primary: true,
4059 ..Default::default()
4060 }
4061 },
4062 DiagnosticEntry {
4063 range: Point::new(1, 8)..Point::new(1, 9),
4064 diagnostic: Diagnostic {
4065 severity: DiagnosticSeverity::HINT,
4066 message: "error 1 hint 1".to_string(),
4067 group_id: 0,
4068 is_primary: false,
4069 ..Default::default()
4070 }
4071 },
4072 ]
4073 );
4074 assert_eq!(
4075 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4076 &[
4077 DiagnosticEntry {
4078 range: Point::new(1, 13)..Point::new(1, 15),
4079 diagnostic: Diagnostic {
4080 severity: DiagnosticSeverity::HINT,
4081 message: "error 2 hint 1".to_string(),
4082 group_id: 1,
4083 is_primary: false,
4084 ..Default::default()
4085 }
4086 },
4087 DiagnosticEntry {
4088 range: Point::new(1, 13)..Point::new(1, 15),
4089 diagnostic: Diagnostic {
4090 severity: DiagnosticSeverity::HINT,
4091 message: "error 2 hint 2".to_string(),
4092 group_id: 1,
4093 is_primary: false,
4094 ..Default::default()
4095 }
4096 },
4097 DiagnosticEntry {
4098 range: Point::new(2, 8)..Point::new(2, 17),
4099 diagnostic: Diagnostic {
4100 severity: DiagnosticSeverity::ERROR,
4101 message: "error 2".to_string(),
4102 group_id: 1,
4103 is_primary: true,
4104 ..Default::default()
4105 }
4106 }
4107 ]
4108 );
4109 }
4110}