1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Operations(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn local(
165 client: Arc<Client>,
166 user_store: ModelHandle<UserStore>,
167 languages: Arc<LanguageRegistry>,
168 fs: Arc<dyn Fs>,
169 cx: &mut MutableAppContext,
170 ) -> ModelHandle<Self> {
171 cx.add_model(|cx: &mut ModelContext<Self>| {
172 let (remote_id_tx, remote_id_rx) = watch::channel();
173 let _maintain_remote_id_task = cx.spawn_weak({
174 let rpc = client.clone();
175 move |this, mut cx| {
176 async move {
177 let mut status = rpc.status();
178 while let Some(status) = status.recv().await {
179 if let Some(this) = this.upgrade(&cx) {
180 let remote_id = if let client::Status::Connected { .. } = status {
181 let response = rpc.request(proto::RegisterProject {}).await?;
182 Some(response.project_id)
183 } else {
184 None
185 };
186
187 if let Some(project_id) = remote_id {
188 let mut registrations = Vec::new();
189 this.update(&mut cx, |this, cx| {
190 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
191 registrations.push(worktree.update(
192 cx,
193 |worktree, cx| {
194 let worktree = worktree.as_local_mut().unwrap();
195 worktree.register(project_id, cx)
196 },
197 ));
198 }
199 });
200 for registration in registrations {
201 registration.await?;
202 }
203 }
204 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
205 }
206 }
207 Ok(())
208 }
209 .log_err()
210 }
211 });
212
213 Self {
214 worktrees: Default::default(),
215 collaborators: Default::default(),
216 open_buffers: Default::default(),
217 loading_buffers: Default::default(),
218 shared_buffers: Default::default(),
219 client_state: ProjectClientState::Local {
220 is_shared: false,
221 remote_id_tx,
222 remote_id_rx,
223 _maintain_remote_id_task,
224 },
225 opened_buffer: broadcast::channel(1).0,
226 subscriptions: Vec::new(),
227 active_entry: None,
228 languages,
229 client,
230 user_store,
231 fs,
232 language_servers_with_diagnostics_running: 0,
233 language_servers: Default::default(),
234 }
235 })
236 }
237
238 pub async fn remote(
239 remote_id: u64,
240 client: Arc<Client>,
241 user_store: ModelHandle<UserStore>,
242 languages: Arc<LanguageRegistry>,
243 fs: Arc<dyn Fs>,
244 cx: &mut AsyncAppContext,
245 ) -> Result<ModelHandle<Self>> {
246 client.authenticate_and_connect(&cx).await?;
247
248 let response = client
249 .request(proto::JoinProject {
250 project_id: remote_id,
251 })
252 .await?;
253
254 let replica_id = response.replica_id as ReplicaId;
255
256 let mut worktrees = Vec::new();
257 for worktree in response.worktrees {
258 let (worktree, load_task) = cx
259 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
260 worktrees.push(worktree);
261 load_task.detach();
262 }
263
264 let user_ids = response
265 .collaborators
266 .iter()
267 .map(|peer| peer.user_id)
268 .collect();
269 user_store
270 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
271 .await?;
272 let mut collaborators = HashMap::default();
273 for message in response.collaborators {
274 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
275 collaborators.insert(collaborator.peer_id, collaborator);
276 }
277
278 Ok(cx.add_model(|cx| {
279 let mut this = Self {
280 worktrees: Vec::new(),
281 open_buffers: Default::default(),
282 loading_buffers: Default::default(),
283 opened_buffer: broadcast::channel(1).0,
284 shared_buffers: Default::default(),
285 active_entry: None,
286 collaborators,
287 languages,
288 user_store,
289 fs,
290 subscriptions: vec![
291 client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
292 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
293 client.add_entity_message_handler(
294 remote_id,
295 cx,
296 Self::handle_remove_collaborator,
297 ),
298 client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
299 client.add_entity_message_handler(
300 remote_id,
301 cx,
302 Self::handle_unregister_worktree,
303 ),
304 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
305 client.add_entity_message_handler(
306 remote_id,
307 cx,
308 Self::handle_update_diagnostic_summary,
309 ),
310 client.add_entity_message_handler(
311 remote_id,
312 cx,
313 Self::handle_disk_based_diagnostics_updating,
314 ),
315 client.add_entity_message_handler(
316 remote_id,
317 cx,
318 Self::handle_disk_based_diagnostics_updated,
319 ),
320 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
321 client.add_entity_message_handler(
322 remote_id,
323 cx,
324 Self::handle_update_buffer_file,
325 ),
326 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
327 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
328 ],
329 client,
330 client_state: ProjectClientState::Remote {
331 sharing_has_stopped: false,
332 remote_id,
333 replica_id,
334 },
335 language_servers_with_diagnostics_running: 0,
336 language_servers: Default::default(),
337 };
338 for worktree in worktrees {
339 this.add_worktree(&worktree, cx);
340 }
341 this
342 }))
343 }
344
345 #[cfg(any(test, feature = "test-support"))]
346 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
347 let languages = Arc::new(LanguageRegistry::new());
348 let http_client = client::test::FakeHttpClient::with_404_response();
349 let client = client::Client::new(http_client.clone());
350 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
351 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
352 }
353
354 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
355 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
356 *remote_id_tx.borrow_mut() = remote_id;
357 }
358
359 self.subscriptions.clear();
360 if let Some(remote_id) = remote_id {
361 let client = &self.client;
362 self.subscriptions.extend([
363 client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
364 client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
365 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
366 client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
367 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
368 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
369 client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
370 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
371 client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
372 client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
373 client.add_entity_request_handler(
374 remote_id,
375 cx,
376 Self::handle_apply_additional_edits_for_completion,
377 ),
378 client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
379 client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
380 client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
381 ]);
382 }
383 }
384
385 pub fn remote_id(&self) -> Option<u64> {
386 match &self.client_state {
387 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
388 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
389 }
390 }
391
392 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
393 let mut id = None;
394 let mut watch = None;
395 match &self.client_state {
396 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
397 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
398 }
399
400 async move {
401 if let Some(id) = id {
402 return id;
403 }
404 let mut watch = watch.unwrap();
405 loop {
406 let id = *watch.borrow();
407 if let Some(id) = id {
408 return id;
409 }
410 watch.recv().await;
411 }
412 }
413 }
414
415 pub fn replica_id(&self) -> ReplicaId {
416 match &self.client_state {
417 ProjectClientState::Local { .. } => 0,
418 ProjectClientState::Remote { replica_id, .. } => *replica_id,
419 }
420 }
421
422 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
423 &self.collaborators
424 }
425
426 pub fn worktrees<'a>(
427 &'a self,
428 cx: &'a AppContext,
429 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
430 self.worktrees
431 .iter()
432 .filter_map(move |worktree| worktree.upgrade(cx))
433 }
434
435 pub fn worktree_for_id(
436 &self,
437 id: WorktreeId,
438 cx: &AppContext,
439 ) -> Option<ModelHandle<Worktree>> {
440 self.worktrees(cx)
441 .find(|worktree| worktree.read(cx).id() == id)
442 }
443
444 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
445 let rpc = self.client.clone();
446 cx.spawn(|this, mut cx| async move {
447 let project_id = this.update(&mut cx, |this, _| {
448 if let ProjectClientState::Local {
449 is_shared,
450 remote_id_rx,
451 ..
452 } = &mut this.client_state
453 {
454 *is_shared = true;
455 remote_id_rx
456 .borrow()
457 .ok_or_else(|| anyhow!("no project id"))
458 } else {
459 Err(anyhow!("can't share a remote project"))
460 }
461 })?;
462
463 rpc.request(proto::ShareProject { project_id }).await?;
464 let mut tasks = Vec::new();
465 this.update(&mut cx, |this, cx| {
466 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
467 worktree.update(cx, |worktree, cx| {
468 let worktree = worktree.as_local_mut().unwrap();
469 tasks.push(worktree.share(project_id, cx));
470 });
471 }
472 });
473 for task in tasks {
474 task.await?;
475 }
476 this.update(&mut cx, |_, cx| cx.notify());
477 Ok(())
478 })
479 }
480
481 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
482 let rpc = self.client.clone();
483 cx.spawn(|this, mut cx| async move {
484 let project_id = this.update(&mut cx, |this, _| {
485 if let ProjectClientState::Local {
486 is_shared,
487 remote_id_rx,
488 ..
489 } = &mut this.client_state
490 {
491 *is_shared = false;
492 remote_id_rx
493 .borrow()
494 .ok_or_else(|| anyhow!("no project id"))
495 } else {
496 Err(anyhow!("can't share a remote project"))
497 }
498 })?;
499
500 rpc.send(proto::UnshareProject { project_id })?;
501 this.update(&mut cx, |this, cx| {
502 this.collaborators.clear();
503 this.shared_buffers.clear();
504 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
505 worktree.update(cx, |worktree, _| {
506 worktree.as_local_mut().unwrap().unshare();
507 });
508 }
509 cx.notify()
510 });
511 Ok(())
512 })
513 }
514
515 pub fn is_read_only(&self) -> bool {
516 match &self.client_state {
517 ProjectClientState::Local { .. } => false,
518 ProjectClientState::Remote {
519 sharing_has_stopped,
520 ..
521 } => *sharing_has_stopped,
522 }
523 }
524
525 pub fn is_local(&self) -> bool {
526 match &self.client_state {
527 ProjectClientState::Local { .. } => true,
528 ProjectClientState::Remote { .. } => false,
529 }
530 }
531
532 pub fn open_buffer(
533 &mut self,
534 path: impl Into<ProjectPath>,
535 cx: &mut ModelContext<Self>,
536 ) -> Task<Result<ModelHandle<Buffer>>> {
537 let project_path = path.into();
538 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
539 worktree
540 } else {
541 return Task::ready(Err(anyhow!("no such worktree")));
542 };
543
544 // If there is already a buffer for the given path, then return it.
545 let existing_buffer = self.get_open_buffer(&project_path, cx);
546 if let Some(existing_buffer) = existing_buffer {
547 return Task::ready(Ok(existing_buffer));
548 }
549
550 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
551 // If the given path is already being loaded, then wait for that existing
552 // task to complete and return the same buffer.
553 hash_map::Entry::Occupied(e) => e.get().clone(),
554
555 // Otherwise, record the fact that this path is now being loaded.
556 hash_map::Entry::Vacant(entry) => {
557 let (mut tx, rx) = postage::watch::channel();
558 entry.insert(rx.clone());
559
560 let load_buffer = if worktree.read(cx).is_local() {
561 self.open_local_buffer(&project_path.path, &worktree, cx)
562 } else {
563 self.open_remote_buffer(&project_path.path, &worktree, cx)
564 };
565
566 cx.spawn(move |this, mut cx| async move {
567 let load_result = load_buffer.await;
568 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
569 // Record the fact that the buffer is no longer loading.
570 this.loading_buffers.remove(&project_path);
571 let buffer = load_result.map_err(Arc::new)?;
572 Ok(buffer)
573 }));
574 })
575 .detach();
576 rx
577 }
578 };
579
580 cx.foreground().spawn(async move {
581 loop {
582 if let Some(result) = loading_watch.borrow().as_ref() {
583 match result {
584 Ok(buffer) => return Ok(buffer.clone()),
585 Err(error) => return Err(anyhow!("{}", error)),
586 }
587 }
588 loading_watch.recv().await;
589 }
590 })
591 }
592
593 fn open_local_buffer(
594 &mut self,
595 path: &Arc<Path>,
596 worktree: &ModelHandle<Worktree>,
597 cx: &mut ModelContext<Self>,
598 ) -> Task<Result<ModelHandle<Buffer>>> {
599 let load_buffer = worktree.update(cx, |worktree, cx| {
600 let worktree = worktree.as_local_mut().unwrap();
601 worktree.load_buffer(path, cx)
602 });
603 let worktree = worktree.downgrade();
604 cx.spawn(|this, mut cx| async move {
605 let buffer = load_buffer.await?;
606 let worktree = worktree
607 .upgrade(&cx)
608 .ok_or_else(|| anyhow!("worktree was removed"))?;
609 this.update(&mut cx, |this, cx| {
610 this.register_buffer(&buffer, Some(&worktree), cx)
611 })?;
612 Ok(buffer)
613 })
614 }
615
616 fn open_remote_buffer(
617 &mut self,
618 path: &Arc<Path>,
619 worktree: &ModelHandle<Worktree>,
620 cx: &mut ModelContext<Self>,
621 ) -> Task<Result<ModelHandle<Buffer>>> {
622 let rpc = self.client.clone();
623 let project_id = self.remote_id().unwrap();
624 let remote_worktree_id = worktree.read(cx).id();
625 let path = path.clone();
626 let path_string = path.to_string_lossy().to_string();
627 cx.spawn(|this, mut cx| async move {
628 let response = rpc
629 .request(proto::OpenBuffer {
630 project_id,
631 worktree_id: remote_worktree_id.to_proto(),
632 path: path_string,
633 })
634 .await?;
635 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
636 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
637 .await
638 })
639 }
640
641 fn open_local_buffer_from_lsp_path(
642 &mut self,
643 abs_path: lsp::Url,
644 lang_name: String,
645 lang_server: Arc<LanguageServer>,
646 cx: &mut ModelContext<Self>,
647 ) -> Task<Result<ModelHandle<Buffer>>> {
648 cx.spawn(|this, mut cx| async move {
649 let abs_path = abs_path
650 .to_file_path()
651 .map_err(|_| anyhow!("can't convert URI to path"))?;
652 let (worktree, relative_path) = if let Some(result) =
653 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
654 {
655 result
656 } else {
657 let worktree = this
658 .update(&mut cx, |this, cx| {
659 this.create_local_worktree(&abs_path, true, cx)
660 })
661 .await?;
662 this.update(&mut cx, |this, cx| {
663 this.language_servers
664 .insert((worktree.read(cx).id(), lang_name), lang_server);
665 });
666 (worktree, PathBuf::new())
667 };
668
669 let project_path = ProjectPath {
670 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
671 path: relative_path.into(),
672 };
673 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
674 .await
675 })
676 }
677
678 pub fn save_buffer_as(
679 &self,
680 buffer: ModelHandle<Buffer>,
681 abs_path: PathBuf,
682 cx: &mut ModelContext<Project>,
683 ) -> Task<Result<()>> {
684 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
685 cx.spawn(|this, mut cx| async move {
686 let (worktree, path) = worktree_task.await?;
687 worktree
688 .update(&mut cx, |worktree, cx| {
689 worktree
690 .as_local_mut()
691 .unwrap()
692 .save_buffer_as(buffer.clone(), path, cx)
693 })
694 .await?;
695 this.update(&mut cx, |this, cx| {
696 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
697 });
698 Ok(())
699 })
700 }
701
702 #[cfg(any(test, feature = "test-support"))]
703 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
704 let path = path.into();
705 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
706 self.open_buffers.iter().any(|(_, buffer)| {
707 if let Some(buffer) = buffer.upgrade(cx) {
708 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
709 if file.worktree == worktree && file.path() == &path.path {
710 return true;
711 }
712 }
713 }
714 false
715 })
716 } else {
717 false
718 }
719 }
720
721 fn get_open_buffer(
722 &mut self,
723 path: &ProjectPath,
724 cx: &mut ModelContext<Self>,
725 ) -> Option<ModelHandle<Buffer>> {
726 let mut result = None;
727 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
728 self.open_buffers.retain(|_, buffer| {
729 if let Some(buffer) = buffer.upgrade(cx) {
730 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
731 if file.worktree == worktree && file.path() == &path.path {
732 result = Some(buffer);
733 }
734 }
735 true
736 } else {
737 false
738 }
739 });
740 result
741 }
742
743 fn register_buffer(
744 &mut self,
745 buffer: &ModelHandle<Buffer>,
746 worktree: Option<&ModelHandle<Worktree>>,
747 cx: &mut ModelContext<Self>,
748 ) -> Result<()> {
749 match self.open_buffers.insert(
750 buffer.read(cx).remote_id() as usize,
751 OpenBuffer::Loaded(buffer.downgrade()),
752 ) {
753 None => {}
754 Some(OpenBuffer::Operations(operations)) => {
755 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
756 }
757 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
758 }
759 self.assign_language_to_buffer(&buffer, worktree, cx);
760 Ok(())
761 }
762
763 fn assign_language_to_buffer(
764 &mut self,
765 buffer: &ModelHandle<Buffer>,
766 worktree: Option<&ModelHandle<Worktree>>,
767 cx: &mut ModelContext<Self>,
768 ) -> Option<()> {
769 let (path, full_path) = {
770 let file = buffer.read(cx).file()?;
771 (file.path().clone(), file.full_path(cx))
772 };
773
774 // If the buffer has a language, set it and start/assign the language server
775 if let Some(language) = self.languages.select_language(&full_path) {
776 buffer.update(cx, |buffer, cx| {
777 buffer.set_language(Some(language.clone()), cx);
778 });
779
780 // For local worktrees, start a language server if needed.
781 // Also assign the language server and any previously stored diagnostics to the buffer.
782 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
783 let worktree_id = local_worktree.id();
784 let worktree_abs_path = local_worktree.abs_path().clone();
785
786 let language_server = match self
787 .language_servers
788 .entry((worktree_id, language.name().to_string()))
789 {
790 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
791 hash_map::Entry::Vacant(e) => Self::start_language_server(
792 self.client.clone(),
793 language.clone(),
794 &worktree_abs_path,
795 cx,
796 )
797 .map(|server| e.insert(server).clone()),
798 };
799
800 buffer.update(cx, |buffer, cx| {
801 buffer.set_language_server(language_server, cx);
802 });
803 }
804 }
805
806 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
807 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
808 buffer.update(cx, |buffer, cx| {
809 buffer.update_diagnostics(diagnostics, None, cx).log_err();
810 });
811 }
812 }
813
814 None
815 }
816
817 fn start_language_server(
818 rpc: Arc<Client>,
819 language: Arc<Language>,
820 worktree_path: &Path,
821 cx: &mut ModelContext<Self>,
822 ) -> Option<Arc<LanguageServer>> {
823 enum LspEvent {
824 DiagnosticsStart,
825 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
826 DiagnosticsFinish,
827 }
828
829 let language_server = language
830 .start_server(worktree_path, cx)
831 .log_err()
832 .flatten()?;
833 let disk_based_sources = language
834 .disk_based_diagnostic_sources()
835 .cloned()
836 .unwrap_or_default();
837 let disk_based_diagnostics_progress_token =
838 language.disk_based_diagnostics_progress_token().cloned();
839 let has_disk_based_diagnostic_progress_token =
840 disk_based_diagnostics_progress_token.is_some();
841 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
842
843 // Listen for `PublishDiagnostics` notifications.
844 language_server
845 .on_notification::<lsp::notification::PublishDiagnostics, _>({
846 let diagnostics_tx = diagnostics_tx.clone();
847 move |params| {
848 if !has_disk_based_diagnostic_progress_token {
849 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
850 }
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
852 if !has_disk_based_diagnostic_progress_token {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
854 }
855 }
856 })
857 .detach();
858
859 // Listen for `Progress` notifications. Send an event when the language server
860 // transitions between running jobs and not running any jobs.
861 let mut running_jobs_for_this_server: i32 = 0;
862 language_server
863 .on_notification::<lsp::notification::Progress, _>(move |params| {
864 let token = match params.token {
865 lsp::NumberOrString::Number(_) => None,
866 lsp::NumberOrString::String(token) => Some(token),
867 };
868
869 if token == disk_based_diagnostics_progress_token {
870 match params.value {
871 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
872 lsp::WorkDoneProgress::Begin(_) => {
873 running_jobs_for_this_server += 1;
874 if running_jobs_for_this_server == 1 {
875 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
876 }
877 }
878 lsp::WorkDoneProgress::End(_) => {
879 running_jobs_for_this_server -= 1;
880 if running_jobs_for_this_server == 0 {
881 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
882 }
883 }
884 _ => {}
885 },
886 }
887 }
888 })
889 .detach();
890
891 // Process all the LSP events.
892 cx.spawn_weak(|this, mut cx| async move {
893 while let Ok(message) = diagnostics_rx.recv().await {
894 let this = this.upgrade(&cx)?;
895 match message {
896 LspEvent::DiagnosticsStart => {
897 this.update(&mut cx, |this, cx| {
898 this.disk_based_diagnostics_started(cx);
899 if let Some(project_id) = this.remote_id() {
900 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
901 .log_err();
902 }
903 });
904 }
905 LspEvent::DiagnosticsUpdate(mut params) => {
906 language.process_diagnostics(&mut params);
907 this.update(&mut cx, |this, cx| {
908 this.update_diagnostics(params, &disk_based_sources, cx)
909 .log_err();
910 });
911 }
912 LspEvent::DiagnosticsFinish => {
913 this.update(&mut cx, |this, cx| {
914 this.disk_based_diagnostics_finished(cx);
915 if let Some(project_id) = this.remote_id() {
916 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
917 .log_err();
918 }
919 });
920 }
921 }
922 }
923 Some(())
924 })
925 .detach();
926
927 Some(language_server)
928 }
929
930 pub fn update_diagnostics(
931 &mut self,
932 params: lsp::PublishDiagnosticsParams,
933 disk_based_sources: &HashSet<String>,
934 cx: &mut ModelContext<Self>,
935 ) -> Result<()> {
936 let abs_path = params
937 .uri
938 .to_file_path()
939 .map_err(|_| anyhow!("URI is not a file"))?;
940 let mut next_group_id = 0;
941 let mut diagnostics = Vec::default();
942 let mut primary_diagnostic_group_ids = HashMap::default();
943 let mut sources_by_group_id = HashMap::default();
944 let mut supporting_diagnostic_severities = HashMap::default();
945 for diagnostic in ¶ms.diagnostics {
946 let source = diagnostic.source.as_ref();
947 let code = diagnostic.code.as_ref().map(|code| match code {
948 lsp::NumberOrString::Number(code) => code.to_string(),
949 lsp::NumberOrString::String(code) => code.clone(),
950 });
951 let range = range_from_lsp(diagnostic.range);
952 let is_supporting = diagnostic
953 .related_information
954 .as_ref()
955 .map_or(false, |infos| {
956 infos.iter().any(|info| {
957 primary_diagnostic_group_ids.contains_key(&(
958 source,
959 code.clone(),
960 range_from_lsp(info.location.range),
961 ))
962 })
963 });
964
965 if is_supporting {
966 if let Some(severity) = diagnostic.severity {
967 supporting_diagnostic_severities
968 .insert((source, code.clone(), range), severity);
969 }
970 } else {
971 let group_id = post_inc(&mut next_group_id);
972 let is_disk_based =
973 source.map_or(false, |source| disk_based_sources.contains(source));
974
975 sources_by_group_id.insert(group_id, source);
976 primary_diagnostic_group_ids
977 .insert((source, code.clone(), range.clone()), group_id);
978
979 diagnostics.push(DiagnosticEntry {
980 range,
981 diagnostic: Diagnostic {
982 code: code.clone(),
983 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
984 message: diagnostic.message.clone(),
985 group_id,
986 is_primary: true,
987 is_valid: true,
988 is_disk_based,
989 },
990 });
991 if let Some(infos) = &diagnostic.related_information {
992 for info in infos {
993 if info.location.uri == params.uri && !info.message.is_empty() {
994 let range = range_from_lsp(info.location.range);
995 diagnostics.push(DiagnosticEntry {
996 range,
997 diagnostic: Diagnostic {
998 code: code.clone(),
999 severity: DiagnosticSeverity::INFORMATION,
1000 message: info.message.clone(),
1001 group_id,
1002 is_primary: false,
1003 is_valid: true,
1004 is_disk_based,
1005 },
1006 });
1007 }
1008 }
1009 }
1010 }
1011 }
1012
1013 for entry in &mut diagnostics {
1014 let diagnostic = &mut entry.diagnostic;
1015 if !diagnostic.is_primary {
1016 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1017 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1018 source,
1019 diagnostic.code.clone(),
1020 entry.range.clone(),
1021 )) {
1022 diagnostic.severity = severity;
1023 }
1024 }
1025 }
1026
1027 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1028 Ok(())
1029 }
1030
1031 pub fn update_diagnostic_entries(
1032 &mut self,
1033 abs_path: PathBuf,
1034 version: Option<i32>,
1035 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1036 cx: &mut ModelContext<Project>,
1037 ) -> Result<(), anyhow::Error> {
1038 let (worktree, relative_path) = self
1039 .find_local_worktree(&abs_path, cx)
1040 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1041 let project_path = ProjectPath {
1042 worktree_id: worktree.read(cx).id(),
1043 path: relative_path.into(),
1044 };
1045
1046 for buffer in self.open_buffers.values() {
1047 if let Some(buffer) = buffer.upgrade(cx) {
1048 if buffer
1049 .read(cx)
1050 .file()
1051 .map_or(false, |file| *file.path() == project_path.path)
1052 {
1053 buffer.update(cx, |buffer, cx| {
1054 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1055 })?;
1056 break;
1057 }
1058 }
1059 }
1060 worktree.update(cx, |worktree, cx| {
1061 worktree
1062 .as_local_mut()
1063 .ok_or_else(|| anyhow!("not a local worktree"))?
1064 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1065 })?;
1066 cx.emit(Event::DiagnosticsUpdated(project_path));
1067 Ok(())
1068 }
1069
1070 pub fn format(
1071 &self,
1072 buffers: HashSet<ModelHandle<Buffer>>,
1073 push_to_history: bool,
1074 cx: &mut ModelContext<Project>,
1075 ) -> Task<Result<ProjectTransaction>> {
1076 let mut local_buffers = Vec::new();
1077 let mut remote_buffers = None;
1078 for buffer_handle in buffers {
1079 let buffer = buffer_handle.read(cx);
1080 let worktree;
1081 if let Some(file) = File::from_dyn(buffer.file()) {
1082 worktree = file.worktree.clone();
1083 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1084 let lang_server;
1085 if let Some(lang) = buffer.language() {
1086 if let Some(server) = self
1087 .language_servers
1088 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1089 {
1090 lang_server = server.clone();
1091 } else {
1092 return Task::ready(Ok(Default::default()));
1093 };
1094 } else {
1095 return Task::ready(Ok(Default::default()));
1096 }
1097
1098 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1099 } else {
1100 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1101 }
1102 } else {
1103 return Task::ready(Ok(Default::default()));
1104 }
1105 }
1106
1107 let remote_buffers = self.remote_id().zip(remote_buffers);
1108 let client = self.client.clone();
1109
1110 cx.spawn(|this, mut cx| async move {
1111 let mut project_transaction = ProjectTransaction::default();
1112
1113 if let Some((project_id, remote_buffers)) = remote_buffers {
1114 let response = client
1115 .request(proto::FormatBuffers {
1116 project_id,
1117 buffer_ids: remote_buffers
1118 .iter()
1119 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1120 .collect(),
1121 })
1122 .await?
1123 .transaction
1124 .ok_or_else(|| anyhow!("missing transaction"))?;
1125 project_transaction = this
1126 .update(&mut cx, |this, cx| {
1127 this.deserialize_project_transaction(response, push_to_history, cx)
1128 })
1129 .await?;
1130 }
1131
1132 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1133 let lsp_edits = lang_server
1134 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1135 text_document: lsp::TextDocumentIdentifier::new(
1136 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1137 ),
1138 options: Default::default(),
1139 work_done_progress_params: Default::default(),
1140 })
1141 .await?;
1142
1143 if let Some(lsp_edits) = lsp_edits {
1144 let edits = buffer
1145 .update(&mut cx, |buffer, cx| {
1146 buffer.edits_from_lsp(lsp_edits, None, cx)
1147 })
1148 .await?;
1149 buffer.update(&mut cx, |buffer, cx| {
1150 buffer.finalize_last_transaction();
1151 buffer.start_transaction();
1152 for (range, text) in edits {
1153 buffer.edit([range], text, cx);
1154 }
1155 if buffer.end_transaction(cx).is_some() {
1156 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1157 if !push_to_history {
1158 buffer.forget_transaction(transaction.id);
1159 }
1160 project_transaction.0.insert(cx.handle(), transaction);
1161 }
1162 });
1163 }
1164 }
1165
1166 Ok(project_transaction)
1167 })
1168 }
1169
1170 pub fn definition<T: ToPointUtf16>(
1171 &self,
1172 source_buffer_handle: &ModelHandle<Buffer>,
1173 position: T,
1174 cx: &mut ModelContext<Self>,
1175 ) -> Task<Result<Vec<Definition>>> {
1176 let source_buffer_handle = source_buffer_handle.clone();
1177 let source_buffer = source_buffer_handle.read(cx);
1178 let worktree;
1179 let buffer_abs_path;
1180 if let Some(file) = File::from_dyn(source_buffer.file()) {
1181 worktree = file.worktree.clone();
1182 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1183 } else {
1184 return Task::ready(Ok(Default::default()));
1185 };
1186
1187 let position = position.to_point_utf16(source_buffer);
1188
1189 if worktree.read(cx).as_local().is_some() {
1190 let buffer_abs_path = buffer_abs_path.unwrap();
1191 let lang_name;
1192 let lang_server;
1193 if let Some(lang) = source_buffer.language() {
1194 lang_name = lang.name().to_string();
1195 if let Some(server) = self
1196 .language_servers
1197 .get(&(worktree.read(cx).id(), lang_name.clone()))
1198 {
1199 lang_server = server.clone();
1200 } else {
1201 return Task::ready(Ok(Default::default()));
1202 };
1203 } else {
1204 return Task::ready(Ok(Default::default()));
1205 }
1206
1207 cx.spawn(|this, mut cx| async move {
1208 let response = lang_server
1209 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1210 text_document_position_params: lsp::TextDocumentPositionParams {
1211 text_document: lsp::TextDocumentIdentifier::new(
1212 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1213 ),
1214 position: lsp::Position::new(position.row, position.column),
1215 },
1216 work_done_progress_params: Default::default(),
1217 partial_result_params: Default::default(),
1218 })
1219 .await?;
1220
1221 let mut definitions = Vec::new();
1222 if let Some(response) = response {
1223 let mut unresolved_locations = Vec::new();
1224 match response {
1225 lsp::GotoDefinitionResponse::Scalar(loc) => {
1226 unresolved_locations.push((loc.uri, loc.range));
1227 }
1228 lsp::GotoDefinitionResponse::Array(locs) => {
1229 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1230 }
1231 lsp::GotoDefinitionResponse::Link(links) => {
1232 unresolved_locations.extend(
1233 links
1234 .into_iter()
1235 .map(|l| (l.target_uri, l.target_selection_range)),
1236 );
1237 }
1238 }
1239
1240 for (target_uri, target_range) in unresolved_locations {
1241 let target_buffer_handle = this
1242 .update(&mut cx, |this, cx| {
1243 this.open_local_buffer_from_lsp_path(
1244 target_uri,
1245 lang_name.clone(),
1246 lang_server.clone(),
1247 cx,
1248 )
1249 })
1250 .await?;
1251
1252 cx.read(|cx| {
1253 let target_buffer = target_buffer_handle.read(cx);
1254 let target_start = target_buffer
1255 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1256 let target_end = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1258 definitions.push(Definition {
1259 target_buffer: target_buffer_handle,
1260 target_range: target_buffer.anchor_after(target_start)
1261 ..target_buffer.anchor_before(target_end),
1262 });
1263 });
1264 }
1265 }
1266
1267 Ok(definitions)
1268 })
1269 } else if let Some(project_id) = self.remote_id() {
1270 let client = self.client.clone();
1271 let request = proto::GetDefinition {
1272 project_id,
1273 buffer_id: source_buffer.remote_id(),
1274 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1275 };
1276 cx.spawn(|this, mut cx| async move {
1277 let response = client.request(request).await?;
1278 let mut definitions = Vec::new();
1279 for definition in response.definitions {
1280 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1281 let target_buffer = this
1282 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1283 .await?;
1284 let target_start = definition
1285 .target_start
1286 .and_then(deserialize_anchor)
1287 .ok_or_else(|| anyhow!("missing target start"))?;
1288 let target_end = definition
1289 .target_end
1290 .and_then(deserialize_anchor)
1291 .ok_or_else(|| anyhow!("missing target end"))?;
1292 definitions.push(Definition {
1293 target_buffer,
1294 target_range: target_start..target_end,
1295 })
1296 }
1297
1298 Ok(definitions)
1299 })
1300 } else {
1301 Task::ready(Ok(Default::default()))
1302 }
1303 }
1304
1305 pub fn completions<T: ToPointUtf16>(
1306 &self,
1307 source_buffer_handle: &ModelHandle<Buffer>,
1308 position: T,
1309 cx: &mut ModelContext<Self>,
1310 ) -> Task<Result<Vec<Completion>>> {
1311 let source_buffer_handle = source_buffer_handle.clone();
1312 let source_buffer = source_buffer_handle.read(cx);
1313 let buffer_id = source_buffer.remote_id();
1314 let language = source_buffer.language().cloned();
1315 let worktree;
1316 let buffer_abs_path;
1317 if let Some(file) = File::from_dyn(source_buffer.file()) {
1318 worktree = file.worktree.clone();
1319 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1320 } else {
1321 return Task::ready(Ok(Default::default()));
1322 };
1323
1324 let position = position.to_point_utf16(source_buffer);
1325 let anchor = source_buffer.anchor_after(position);
1326
1327 if worktree.read(cx).as_local().is_some() {
1328 let buffer_abs_path = buffer_abs_path.unwrap();
1329 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1330 server
1331 } else {
1332 return Task::ready(Ok(Default::default()));
1333 };
1334
1335 cx.spawn(|_, cx| async move {
1336 let completions = lang_server
1337 .request::<lsp::request::Completion>(lsp::CompletionParams {
1338 text_document_position: lsp::TextDocumentPositionParams::new(
1339 lsp::TextDocumentIdentifier::new(
1340 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1341 ),
1342 position.to_lsp_position(),
1343 ),
1344 context: Default::default(),
1345 work_done_progress_params: Default::default(),
1346 partial_result_params: Default::default(),
1347 })
1348 .await?;
1349
1350 let completions = if let Some(completions) = completions {
1351 match completions {
1352 lsp::CompletionResponse::Array(completions) => completions,
1353 lsp::CompletionResponse::List(list) => list.items,
1354 }
1355 } else {
1356 Default::default()
1357 };
1358
1359 source_buffer_handle.read_with(&cx, |this, _| {
1360 Ok(completions.into_iter().filter_map(|lsp_completion| {
1361 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1362 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1363 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1364 log::info!("received an insert and replace completion but we don't yet support that");
1365 return None
1366 },
1367 };
1368
1369 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1370 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1371 if clipped_start == old_range.start && clipped_end == old_range.end {
1372 Some(Completion {
1373 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1374 new_text,
1375 label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1376 lsp_completion,
1377 })
1378 } else {
1379 None
1380 }
1381 }).collect())
1382 })
1383
1384 })
1385 } else if let Some(project_id) = self.remote_id() {
1386 let rpc = self.client.clone();
1387 cx.foreground().spawn(async move {
1388 let response = rpc
1389 .request(proto::GetCompletions {
1390 project_id,
1391 buffer_id,
1392 position: Some(language::proto::serialize_anchor(&anchor)),
1393 })
1394 .await?;
1395 response
1396 .completions
1397 .into_iter()
1398 .map(|completion| {
1399 language::proto::deserialize_completion(completion, language.as_ref())
1400 })
1401 .collect()
1402 })
1403 } else {
1404 Task::ready(Ok(Default::default()))
1405 }
1406 }
1407
1408 pub fn apply_additional_edits_for_completion(
1409 &self,
1410 buffer_handle: ModelHandle<Buffer>,
1411 completion: Completion,
1412 push_to_history: bool,
1413 cx: &mut ModelContext<Self>,
1414 ) -> Task<Result<Option<Transaction>>> {
1415 let buffer = buffer_handle.read(cx);
1416 let buffer_id = buffer.remote_id();
1417
1418 if self.is_local() {
1419 let lang_server = if let Some(language_server) = buffer.language_server() {
1420 language_server.clone()
1421 } else {
1422 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1423 };
1424
1425 cx.spawn(|_, mut cx| async move {
1426 let resolved_completion = lang_server
1427 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1428 .await?;
1429 if let Some(edits) = resolved_completion.additional_text_edits {
1430 let edits = buffer_handle
1431 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1432 .await?;
1433 buffer_handle.update(&mut cx, |buffer, cx| {
1434 buffer.finalize_last_transaction();
1435 buffer.start_transaction();
1436 for (range, text) in edits {
1437 buffer.edit([range], text, cx);
1438 }
1439 let transaction = if buffer.end_transaction(cx).is_some() {
1440 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1441 if !push_to_history {
1442 buffer.forget_transaction(transaction.id);
1443 }
1444 Some(transaction)
1445 } else {
1446 None
1447 };
1448 Ok(transaction)
1449 })
1450 } else {
1451 Ok(None)
1452 }
1453 })
1454 } else if let Some(project_id) = self.remote_id() {
1455 let client = self.client.clone();
1456 cx.spawn(|_, mut cx| async move {
1457 let response = client
1458 .request(proto::ApplyCompletionAdditionalEdits {
1459 project_id,
1460 buffer_id,
1461 completion: Some(language::proto::serialize_completion(&completion)),
1462 })
1463 .await?;
1464
1465 if let Some(transaction) = response.transaction {
1466 let transaction = language::proto::deserialize_transaction(transaction)?;
1467 buffer_handle
1468 .update(&mut cx, |buffer, _| {
1469 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1470 })
1471 .await;
1472 if push_to_history {
1473 buffer_handle.update(&mut cx, |buffer, _| {
1474 buffer.push_transaction(transaction.clone(), Instant::now());
1475 });
1476 }
1477 Ok(Some(transaction))
1478 } else {
1479 Ok(None)
1480 }
1481 })
1482 } else {
1483 Task::ready(Err(anyhow!("project does not have a remote id")))
1484 }
1485 }
1486
1487 pub fn code_actions<T: ToOffset>(
1488 &self,
1489 buffer_handle: &ModelHandle<Buffer>,
1490 range: Range<T>,
1491 cx: &mut ModelContext<Self>,
1492 ) -> Task<Result<Vec<CodeAction>>> {
1493 let buffer_handle = buffer_handle.clone();
1494 let buffer = buffer_handle.read(cx);
1495 let buffer_id = buffer.remote_id();
1496 let worktree;
1497 let buffer_abs_path;
1498 if let Some(file) = File::from_dyn(buffer.file()) {
1499 worktree = file.worktree.clone();
1500 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1501 } else {
1502 return Task::ready(Ok(Default::default()));
1503 };
1504 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1505
1506 if worktree.read(cx).as_local().is_some() {
1507 let buffer_abs_path = buffer_abs_path.unwrap();
1508 let lang_name;
1509 let lang_server;
1510 if let Some(lang) = buffer.language() {
1511 lang_name = lang.name().to_string();
1512 if let Some(server) = self
1513 .language_servers
1514 .get(&(worktree.read(cx).id(), lang_name.clone()))
1515 {
1516 lang_server = server.clone();
1517 } else {
1518 return Task::ready(Ok(Default::default()));
1519 };
1520 } else {
1521 return Task::ready(Ok(Default::default()));
1522 }
1523
1524 let lsp_range = lsp::Range::new(
1525 range.start.to_point_utf16(buffer).to_lsp_position(),
1526 range.end.to_point_utf16(buffer).to_lsp_position(),
1527 );
1528 cx.foreground().spawn(async move {
1529 Ok(lang_server
1530 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1531 text_document: lsp::TextDocumentIdentifier::new(
1532 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1533 ),
1534 range: lsp_range,
1535 work_done_progress_params: Default::default(),
1536 partial_result_params: Default::default(),
1537 context: lsp::CodeActionContext {
1538 diagnostics: Default::default(),
1539 only: Some(vec![
1540 lsp::CodeActionKind::QUICKFIX,
1541 lsp::CodeActionKind::REFACTOR,
1542 lsp::CodeActionKind::REFACTOR_EXTRACT,
1543 ]),
1544 },
1545 })
1546 .await?
1547 .unwrap_or_default()
1548 .into_iter()
1549 .filter_map(|entry| {
1550 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1551 Some(CodeAction {
1552 range: range.clone(),
1553 lsp_action,
1554 })
1555 } else {
1556 None
1557 }
1558 })
1559 .collect())
1560 })
1561 } else if let Some(project_id) = self.remote_id() {
1562 let rpc = self.client.clone();
1563 cx.foreground().spawn(async move {
1564 let response = rpc
1565 .request(proto::GetCodeActions {
1566 project_id,
1567 buffer_id,
1568 start: Some(language::proto::serialize_anchor(&range.start)),
1569 end: Some(language::proto::serialize_anchor(&range.end)),
1570 })
1571 .await?;
1572 response
1573 .actions
1574 .into_iter()
1575 .map(language::proto::deserialize_code_action)
1576 .collect()
1577 })
1578 } else {
1579 Task::ready(Ok(Default::default()))
1580 }
1581 }
1582
1583 pub fn apply_code_action(
1584 &self,
1585 buffer_handle: ModelHandle<Buffer>,
1586 mut action: CodeAction,
1587 push_to_history: bool,
1588 cx: &mut ModelContext<Self>,
1589 ) -> Task<Result<ProjectTransaction>> {
1590 if self.is_local() {
1591 let buffer = buffer_handle.read(cx);
1592 let lang_name = if let Some(lang) = buffer.language() {
1593 lang.name().to_string()
1594 } else {
1595 return Task::ready(Ok(Default::default()));
1596 };
1597 let lang_server = if let Some(language_server) = buffer.language_server() {
1598 language_server.clone()
1599 } else {
1600 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1601 };
1602 let range = action.range.to_point_utf16(buffer);
1603 let fs = self.fs.clone();
1604
1605 cx.spawn(|this, mut cx| async move {
1606 if let Some(lsp_range) = action
1607 .lsp_action
1608 .data
1609 .as_mut()
1610 .and_then(|d| d.get_mut("codeActionParams"))
1611 .and_then(|d| d.get_mut("range"))
1612 {
1613 *lsp_range = serde_json::to_value(&lsp::Range::new(
1614 range.start.to_lsp_position(),
1615 range.end.to_lsp_position(),
1616 ))
1617 .unwrap();
1618 action.lsp_action = lang_server
1619 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1620 .await?;
1621 } else {
1622 let actions = this
1623 .update(&mut cx, |this, cx| {
1624 this.code_actions(&buffer_handle, action.range, cx)
1625 })
1626 .await?;
1627 action.lsp_action = actions
1628 .into_iter()
1629 .find(|a| a.lsp_action.title == action.lsp_action.title)
1630 .ok_or_else(|| anyhow!("code action is outdated"))?
1631 .lsp_action;
1632 }
1633
1634 let mut operations = Vec::new();
1635 if let Some(edit) = action.lsp_action.edit {
1636 if let Some(document_changes) = edit.document_changes {
1637 match document_changes {
1638 lsp::DocumentChanges::Edits(edits) => operations
1639 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1640 lsp::DocumentChanges::Operations(ops) => operations = ops,
1641 }
1642 } else if let Some(changes) = edit.changes {
1643 operations.extend(changes.into_iter().map(|(uri, edits)| {
1644 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1645 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1646 uri,
1647 version: None,
1648 },
1649 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1650 })
1651 }));
1652 }
1653 }
1654
1655 let mut project_transaction = ProjectTransaction::default();
1656 for operation in operations {
1657 match operation {
1658 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1659 let abs_path = op
1660 .uri
1661 .to_file_path()
1662 .map_err(|_| anyhow!("can't convert URI to path"))?;
1663
1664 if let Some(parent_path) = abs_path.parent() {
1665 fs.create_dir(parent_path).await?;
1666 }
1667 if abs_path.ends_with("/") {
1668 fs.create_dir(&abs_path).await?;
1669 } else {
1670 fs.create_file(
1671 &abs_path,
1672 op.options.map(Into::into).unwrap_or_default(),
1673 )
1674 .await?;
1675 }
1676 }
1677 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1678 let source_abs_path = op
1679 .old_uri
1680 .to_file_path()
1681 .map_err(|_| anyhow!("can't convert URI to path"))?;
1682 let target_abs_path = op
1683 .new_uri
1684 .to_file_path()
1685 .map_err(|_| anyhow!("can't convert URI to path"))?;
1686 fs.rename(
1687 &source_abs_path,
1688 &target_abs_path,
1689 op.options.map(Into::into).unwrap_or_default(),
1690 )
1691 .await?;
1692 }
1693 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1694 let abs_path = op
1695 .uri
1696 .to_file_path()
1697 .map_err(|_| anyhow!("can't convert URI to path"))?;
1698 let options = op.options.map(Into::into).unwrap_or_default();
1699 if abs_path.ends_with("/") {
1700 fs.remove_dir(&abs_path, options).await?;
1701 } else {
1702 fs.remove_file(&abs_path, options).await?;
1703 }
1704 }
1705 lsp::DocumentChangeOperation::Edit(op) => {
1706 let buffer_to_edit = this
1707 .update(&mut cx, |this, cx| {
1708 this.open_local_buffer_from_lsp_path(
1709 op.text_document.uri,
1710 lang_name.clone(),
1711 lang_server.clone(),
1712 cx,
1713 )
1714 })
1715 .await?;
1716
1717 let edits = buffer_to_edit
1718 .update(&mut cx, |buffer, cx| {
1719 let edits = op.edits.into_iter().map(|edit| match edit {
1720 lsp::OneOf::Left(edit) => edit,
1721 lsp::OneOf::Right(edit) => edit.text_edit,
1722 });
1723 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1724 })
1725 .await?;
1726
1727 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1728 buffer.finalize_last_transaction();
1729 buffer.start_transaction();
1730 for (range, text) in edits {
1731 buffer.edit([range], text, cx);
1732 }
1733 let transaction = if buffer.end_transaction(cx).is_some() {
1734 let transaction =
1735 buffer.finalize_last_transaction().unwrap().clone();
1736 if !push_to_history {
1737 buffer.forget_transaction(transaction.id);
1738 }
1739 Some(transaction)
1740 } else {
1741 None
1742 };
1743
1744 transaction
1745 });
1746 if let Some(transaction) = transaction {
1747 project_transaction.0.insert(buffer_to_edit, transaction);
1748 }
1749 }
1750 }
1751 }
1752
1753 Ok(project_transaction)
1754 })
1755 } else if let Some(project_id) = self.remote_id() {
1756 let client = self.client.clone();
1757 let request = proto::ApplyCodeAction {
1758 project_id,
1759 buffer_id: buffer_handle.read(cx).remote_id(),
1760 action: Some(language::proto::serialize_code_action(&action)),
1761 };
1762 cx.spawn(|this, mut cx| async move {
1763 let response = client
1764 .request(request)
1765 .await?
1766 .transaction
1767 .ok_or_else(|| anyhow!("missing transaction"))?;
1768 this.update(&mut cx, |this, cx| {
1769 this.deserialize_project_transaction(response, push_to_history, cx)
1770 })
1771 .await
1772 })
1773 } else {
1774 Task::ready(Err(anyhow!("project does not have a remote id")))
1775 }
1776 }
1777
1778 pub fn find_or_create_local_worktree(
1779 &self,
1780 abs_path: impl AsRef<Path>,
1781 weak: bool,
1782 cx: &mut ModelContext<Self>,
1783 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1784 let abs_path = abs_path.as_ref();
1785 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1786 Task::ready(Ok((tree.clone(), relative_path.into())))
1787 } else {
1788 let worktree = self.create_local_worktree(abs_path, weak, cx);
1789 cx.foreground()
1790 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1791 }
1792 }
1793
1794 fn find_local_worktree(
1795 &self,
1796 abs_path: &Path,
1797 cx: &AppContext,
1798 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1799 for tree in self.worktrees(cx) {
1800 if let Some(relative_path) = tree
1801 .read(cx)
1802 .as_local()
1803 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1804 {
1805 return Some((tree.clone(), relative_path.into()));
1806 }
1807 }
1808 None
1809 }
1810
1811 pub fn is_shared(&self) -> bool {
1812 match &self.client_state {
1813 ProjectClientState::Local { is_shared, .. } => *is_shared,
1814 ProjectClientState::Remote { .. } => false,
1815 }
1816 }
1817
1818 fn create_local_worktree(
1819 &self,
1820 abs_path: impl AsRef<Path>,
1821 weak: bool,
1822 cx: &mut ModelContext<Self>,
1823 ) -> Task<Result<ModelHandle<Worktree>>> {
1824 let fs = self.fs.clone();
1825 let client = self.client.clone();
1826 let path = Arc::from(abs_path.as_ref());
1827 cx.spawn(|project, mut cx| async move {
1828 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1829
1830 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1831 project.add_worktree(&worktree, cx);
1832 (project.remote_id(), project.is_shared())
1833 });
1834
1835 if let Some(project_id) = remote_project_id {
1836 worktree
1837 .update(&mut cx, |worktree, cx| {
1838 worktree.as_local_mut().unwrap().register(project_id, cx)
1839 })
1840 .await?;
1841 if is_shared {
1842 worktree
1843 .update(&mut cx, |worktree, cx| {
1844 worktree.as_local_mut().unwrap().share(project_id, cx)
1845 })
1846 .await?;
1847 }
1848 }
1849
1850 Ok(worktree)
1851 })
1852 }
1853
1854 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1855 self.worktrees.retain(|worktree| {
1856 worktree
1857 .upgrade(cx)
1858 .map_or(false, |w| w.read(cx).id() != id)
1859 });
1860 cx.notify();
1861 }
1862
1863 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1864 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1865 if worktree.read(cx).is_local() {
1866 cx.subscribe(&worktree, |this, worktree, _, cx| {
1867 this.update_local_worktree_buffers(worktree, cx);
1868 })
1869 .detach();
1870 }
1871
1872 let push_weak_handle = {
1873 let worktree = worktree.read(cx);
1874 worktree.is_local() && worktree.is_weak()
1875 };
1876 if push_weak_handle {
1877 cx.observe_release(&worktree, |this, cx| {
1878 this.worktrees
1879 .retain(|worktree| worktree.upgrade(cx).is_some());
1880 cx.notify();
1881 })
1882 .detach();
1883 self.worktrees
1884 .push(WorktreeHandle::Weak(worktree.downgrade()));
1885 } else {
1886 self.worktrees
1887 .push(WorktreeHandle::Strong(worktree.clone()));
1888 }
1889 cx.notify();
1890 }
1891
1892 fn update_local_worktree_buffers(
1893 &mut self,
1894 worktree_handle: ModelHandle<Worktree>,
1895 cx: &mut ModelContext<Self>,
1896 ) {
1897 let snapshot = worktree_handle.read(cx).snapshot();
1898 let mut buffers_to_delete = Vec::new();
1899 for (buffer_id, buffer) in &self.open_buffers {
1900 if let Some(buffer) = buffer.upgrade(cx) {
1901 buffer.update(cx, |buffer, cx| {
1902 if let Some(old_file) = File::from_dyn(buffer.file()) {
1903 if old_file.worktree != worktree_handle {
1904 return;
1905 }
1906
1907 let new_file = if let Some(entry) = old_file
1908 .entry_id
1909 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1910 {
1911 File {
1912 is_local: true,
1913 entry_id: Some(entry.id),
1914 mtime: entry.mtime,
1915 path: entry.path.clone(),
1916 worktree: worktree_handle.clone(),
1917 }
1918 } else if let Some(entry) =
1919 snapshot.entry_for_path(old_file.path().as_ref())
1920 {
1921 File {
1922 is_local: true,
1923 entry_id: Some(entry.id),
1924 mtime: entry.mtime,
1925 path: entry.path.clone(),
1926 worktree: worktree_handle.clone(),
1927 }
1928 } else {
1929 File {
1930 is_local: true,
1931 entry_id: None,
1932 path: old_file.path().clone(),
1933 mtime: old_file.mtime(),
1934 worktree: worktree_handle.clone(),
1935 }
1936 };
1937
1938 if let Some(project_id) = self.remote_id() {
1939 self.client
1940 .send(proto::UpdateBufferFile {
1941 project_id,
1942 buffer_id: *buffer_id as u64,
1943 file: Some(new_file.to_proto()),
1944 })
1945 .log_err();
1946 }
1947 buffer.file_updated(Box::new(new_file), cx).detach();
1948 }
1949 });
1950 } else {
1951 buffers_to_delete.push(*buffer_id);
1952 }
1953 }
1954
1955 for buffer_id in buffers_to_delete {
1956 self.open_buffers.remove(&buffer_id);
1957 }
1958 }
1959
1960 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1961 let new_active_entry = entry.and_then(|project_path| {
1962 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1963 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1964 Some(ProjectEntry {
1965 worktree_id: project_path.worktree_id,
1966 entry_id: entry.id,
1967 })
1968 });
1969 if new_active_entry != self.active_entry {
1970 self.active_entry = new_active_entry;
1971 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1972 }
1973 }
1974
1975 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1976 self.language_servers_with_diagnostics_running > 0
1977 }
1978
1979 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1980 let mut summary = DiagnosticSummary::default();
1981 for (_, path_summary) in self.diagnostic_summaries(cx) {
1982 summary.error_count += path_summary.error_count;
1983 summary.warning_count += path_summary.warning_count;
1984 summary.info_count += path_summary.info_count;
1985 summary.hint_count += path_summary.hint_count;
1986 }
1987 summary
1988 }
1989
1990 pub fn diagnostic_summaries<'a>(
1991 &'a self,
1992 cx: &'a AppContext,
1993 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1994 self.worktrees(cx).flat_map(move |worktree| {
1995 let worktree = worktree.read(cx);
1996 let worktree_id = worktree.id();
1997 worktree
1998 .diagnostic_summaries()
1999 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2000 })
2001 }
2002
2003 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2004 self.language_servers_with_diagnostics_running += 1;
2005 if self.language_servers_with_diagnostics_running == 1 {
2006 cx.emit(Event::DiskBasedDiagnosticsStarted);
2007 }
2008 }
2009
2010 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2011 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2012 self.language_servers_with_diagnostics_running -= 1;
2013 if self.language_servers_with_diagnostics_running == 0 {
2014 cx.emit(Event::DiskBasedDiagnosticsFinished);
2015 }
2016 }
2017
2018 pub fn active_entry(&self) -> Option<ProjectEntry> {
2019 self.active_entry
2020 }
2021
2022 // RPC message handlers
2023
2024 async fn handle_unshare_project(
2025 this: ModelHandle<Self>,
2026 _: TypedEnvelope<proto::UnshareProject>,
2027 _: Arc<Client>,
2028 mut cx: AsyncAppContext,
2029 ) -> Result<()> {
2030 this.update(&mut cx, |this, cx| {
2031 if let ProjectClientState::Remote {
2032 sharing_has_stopped,
2033 ..
2034 } = &mut this.client_state
2035 {
2036 *sharing_has_stopped = true;
2037 this.collaborators.clear();
2038 cx.notify();
2039 } else {
2040 unreachable!()
2041 }
2042 });
2043
2044 Ok(())
2045 }
2046
2047 async fn handle_add_collaborator(
2048 this: ModelHandle<Self>,
2049 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2050 _: Arc<Client>,
2051 mut cx: AsyncAppContext,
2052 ) -> Result<()> {
2053 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2054 let collaborator = envelope
2055 .payload
2056 .collaborator
2057 .take()
2058 .ok_or_else(|| anyhow!("empty collaborator"))?;
2059
2060 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2061 this.update(&mut cx, |this, cx| {
2062 this.collaborators
2063 .insert(collaborator.peer_id, collaborator);
2064 cx.notify();
2065 });
2066
2067 Ok(())
2068 }
2069
2070 async fn handle_remove_collaborator(
2071 this: ModelHandle<Self>,
2072 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2073 _: Arc<Client>,
2074 mut cx: AsyncAppContext,
2075 ) -> Result<()> {
2076 this.update(&mut cx, |this, cx| {
2077 let peer_id = PeerId(envelope.payload.peer_id);
2078 let replica_id = this
2079 .collaborators
2080 .remove(&peer_id)
2081 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2082 .replica_id;
2083 this.shared_buffers.remove(&peer_id);
2084 for (_, buffer) in &this.open_buffers {
2085 if let Some(buffer) = buffer.upgrade(cx) {
2086 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2087 }
2088 }
2089 cx.notify();
2090 Ok(())
2091 })
2092 }
2093
2094 async fn handle_share_worktree(
2095 this: ModelHandle<Self>,
2096 envelope: TypedEnvelope<proto::ShareWorktree>,
2097 client: Arc<Client>,
2098 mut cx: AsyncAppContext,
2099 ) -> Result<()> {
2100 this.update(&mut cx, |this, cx| {
2101 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2102 let replica_id = this.replica_id();
2103 let worktree = envelope
2104 .payload
2105 .worktree
2106 .ok_or_else(|| anyhow!("invalid worktree"))?;
2107 let (worktree, load_task) =
2108 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2109 this.add_worktree(&worktree, cx);
2110 load_task.detach();
2111 Ok(())
2112 })
2113 }
2114
2115 async fn handle_unregister_worktree(
2116 this: ModelHandle<Self>,
2117 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2118 _: Arc<Client>,
2119 mut cx: AsyncAppContext,
2120 ) -> Result<()> {
2121 this.update(&mut cx, |this, cx| {
2122 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2123 this.remove_worktree(worktree_id, cx);
2124 Ok(())
2125 })
2126 }
2127
2128 async fn handle_update_worktree(
2129 this: ModelHandle<Self>,
2130 envelope: TypedEnvelope<proto::UpdateWorktree>,
2131 _: Arc<Client>,
2132 mut cx: AsyncAppContext,
2133 ) -> Result<()> {
2134 this.update(&mut cx, |this, cx| {
2135 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2136 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2137 worktree.update(cx, |worktree, _| {
2138 let worktree = worktree.as_remote_mut().unwrap();
2139 worktree.update_from_remote(envelope)
2140 })?;
2141 }
2142 Ok(())
2143 })
2144 }
2145
2146 async fn handle_update_diagnostic_summary(
2147 this: ModelHandle<Self>,
2148 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2149 _: Arc<Client>,
2150 mut cx: AsyncAppContext,
2151 ) -> Result<()> {
2152 this.update(&mut cx, |this, cx| {
2153 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2154 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2155 if let Some(summary) = envelope.payload.summary {
2156 let project_path = ProjectPath {
2157 worktree_id,
2158 path: Path::new(&summary.path).into(),
2159 };
2160 worktree.update(cx, |worktree, _| {
2161 worktree
2162 .as_remote_mut()
2163 .unwrap()
2164 .update_diagnostic_summary(project_path.path.clone(), &summary);
2165 });
2166 cx.emit(Event::DiagnosticsUpdated(project_path));
2167 }
2168 }
2169 Ok(())
2170 })
2171 }
2172
2173 async fn handle_disk_based_diagnostics_updating(
2174 this: ModelHandle<Self>,
2175 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2176 _: Arc<Client>,
2177 mut cx: AsyncAppContext,
2178 ) -> Result<()> {
2179 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2180 Ok(())
2181 }
2182
2183 async fn handle_disk_based_diagnostics_updated(
2184 this: ModelHandle<Self>,
2185 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2186 _: Arc<Client>,
2187 mut cx: AsyncAppContext,
2188 ) -> Result<()> {
2189 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2190 Ok(())
2191 }
2192
2193 async fn handle_update_buffer(
2194 this: ModelHandle<Self>,
2195 envelope: TypedEnvelope<proto::UpdateBuffer>,
2196 _: Arc<Client>,
2197 mut cx: AsyncAppContext,
2198 ) -> Result<()> {
2199 this.update(&mut cx, |this, cx| {
2200 let payload = envelope.payload.clone();
2201 let buffer_id = payload.buffer_id as usize;
2202 let ops = payload
2203 .operations
2204 .into_iter()
2205 .map(|op| language::proto::deserialize_operation(op))
2206 .collect::<Result<Vec<_>, _>>()?;
2207 let buffer = this
2208 .open_buffers
2209 .entry(buffer_id)
2210 .or_insert_with(|| OpenBuffer::Operations(Vec::new()));
2211 match buffer {
2212 OpenBuffer::Loaded(buffer) => {
2213 if let Some(buffer) = buffer.upgrade(cx) {
2214 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2215 }
2216 }
2217 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2218 }
2219 Ok(())
2220 })
2221 }
2222
2223 async fn handle_update_buffer_file(
2224 this: ModelHandle<Self>,
2225 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2226 _: Arc<Client>,
2227 mut cx: AsyncAppContext,
2228 ) -> Result<()> {
2229 this.update(&mut cx, |this, cx| {
2230 let payload = envelope.payload.clone();
2231 let buffer_id = payload.buffer_id as usize;
2232 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2233 let worktree = this
2234 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2235 .ok_or_else(|| anyhow!("no such worktree"))?;
2236 let file = File::from_proto(file, worktree.clone(), cx)?;
2237 let buffer = this
2238 .open_buffers
2239 .get_mut(&buffer_id)
2240 .and_then(|b| b.upgrade(cx))
2241 .ok_or_else(|| anyhow!("no such buffer"))?;
2242 buffer.update(cx, |buffer, cx| {
2243 buffer.file_updated(Box::new(file), cx).detach();
2244 });
2245 Ok(())
2246 })
2247 }
2248
2249 async fn handle_save_buffer(
2250 this: ModelHandle<Self>,
2251 envelope: TypedEnvelope<proto::SaveBuffer>,
2252 _: Arc<Client>,
2253 mut cx: AsyncAppContext,
2254 ) -> Result<proto::BufferSaved> {
2255 let buffer_id = envelope.payload.buffer_id;
2256 let sender_id = envelope.original_sender_id()?;
2257 let requested_version = envelope.payload.version.try_into()?;
2258
2259 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2260 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2261 let buffer = this
2262 .shared_buffers
2263 .get(&sender_id)
2264 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2265 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2266 Ok::<_, anyhow::Error>((project_id, buffer))
2267 })?;
2268
2269 buffer
2270 .update(&mut cx, |buffer, _| {
2271 buffer.wait_for_version(requested_version)
2272 })
2273 .await;
2274
2275 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2276 Ok(proto::BufferSaved {
2277 project_id,
2278 buffer_id,
2279 version: (&saved_version).into(),
2280 mtime: Some(mtime.into()),
2281 })
2282 }
2283
2284 async fn handle_format_buffers(
2285 this: ModelHandle<Self>,
2286 envelope: TypedEnvelope<proto::FormatBuffers>,
2287 _: Arc<Client>,
2288 mut cx: AsyncAppContext,
2289 ) -> Result<proto::FormatBuffersResponse> {
2290 let sender_id = envelope.original_sender_id()?;
2291 let format = this.update(&mut cx, |this, cx| {
2292 let shared_buffers = this
2293 .shared_buffers
2294 .get(&sender_id)
2295 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2296 let mut buffers = HashSet::default();
2297 for buffer_id in &envelope.payload.buffer_ids {
2298 buffers.insert(
2299 shared_buffers
2300 .get(buffer_id)
2301 .cloned()
2302 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2303 );
2304 }
2305 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2306 })?;
2307
2308 let project_transaction = format.await?;
2309 let project_transaction = this.update(&mut cx, |this, cx| {
2310 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2311 });
2312 Ok(proto::FormatBuffersResponse {
2313 transaction: Some(project_transaction),
2314 })
2315 }
2316
2317 async fn handle_get_completions(
2318 this: ModelHandle<Self>,
2319 envelope: TypedEnvelope<proto::GetCompletions>,
2320 _: Arc<Client>,
2321 mut cx: AsyncAppContext,
2322 ) -> Result<proto::GetCompletionsResponse> {
2323 let sender_id = envelope.original_sender_id()?;
2324 let position = envelope
2325 .payload
2326 .position
2327 .and_then(language::proto::deserialize_anchor)
2328 .ok_or_else(|| anyhow!("invalid position"))?;
2329 let completions = this.update(&mut cx, |this, cx| {
2330 let buffer = this
2331 .shared_buffers
2332 .get(&sender_id)
2333 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2334 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2335 Ok::<_, anyhow::Error>(this.completions(&buffer, position, cx))
2336 })?;
2337
2338 Ok(proto::GetCompletionsResponse {
2339 completions: completions
2340 .await?
2341 .iter()
2342 .map(language::proto::serialize_completion)
2343 .collect(),
2344 })
2345 }
2346
2347 async fn handle_apply_additional_edits_for_completion(
2348 this: ModelHandle<Self>,
2349 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2350 _: Arc<Client>,
2351 mut cx: AsyncAppContext,
2352 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2353 let sender_id = envelope.original_sender_id()?;
2354 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2355 let buffer = this
2356 .shared_buffers
2357 .get(&sender_id)
2358 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2359 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2360 let language = buffer.read(cx).language();
2361 let completion = language::proto::deserialize_completion(
2362 envelope
2363 .payload
2364 .completion
2365 .ok_or_else(|| anyhow!("invalid completion"))?,
2366 language,
2367 )?;
2368 Ok::<_, anyhow::Error>(
2369 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2370 )
2371 })?;
2372
2373 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2374 transaction: apply_additional_edits
2375 .await?
2376 .as_ref()
2377 .map(language::proto::serialize_transaction),
2378 })
2379 }
2380
2381 async fn handle_get_code_actions(
2382 this: ModelHandle<Self>,
2383 envelope: TypedEnvelope<proto::GetCodeActions>,
2384 _: Arc<Client>,
2385 mut cx: AsyncAppContext,
2386 ) -> Result<proto::GetCodeActionsResponse> {
2387 let sender_id = envelope.original_sender_id()?;
2388 let start = envelope
2389 .payload
2390 .start
2391 .and_then(language::proto::deserialize_anchor)
2392 .ok_or_else(|| anyhow!("invalid start"))?;
2393 let end = envelope
2394 .payload
2395 .end
2396 .and_then(language::proto::deserialize_anchor)
2397 .ok_or_else(|| anyhow!("invalid end"))?;
2398 let buffer = this.update(&mut cx, |this, _| {
2399 this.shared_buffers
2400 .get(&sender_id)
2401 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2402 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2403 })?;
2404 buffer
2405 .update(&mut cx, |buffer, _| {
2406 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2407 })
2408 .await;
2409 let code_actions = this.update(&mut cx, |this, cx| {
2410 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2411 })?;
2412
2413 Ok(proto::GetCodeActionsResponse {
2414 actions: code_actions
2415 .await?
2416 .iter()
2417 .map(language::proto::serialize_code_action)
2418 .collect(),
2419 })
2420 }
2421
2422 async fn handle_apply_code_action(
2423 this: ModelHandle<Self>,
2424 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2425 _: Arc<Client>,
2426 mut cx: AsyncAppContext,
2427 ) -> Result<proto::ApplyCodeActionResponse> {
2428 let sender_id = envelope.original_sender_id()?;
2429 let action = language::proto::deserialize_code_action(
2430 envelope
2431 .payload
2432 .action
2433 .ok_or_else(|| anyhow!("invalid action"))?,
2434 )?;
2435 let apply_code_action = this.update(&mut cx, |this, cx| {
2436 let buffer = this
2437 .shared_buffers
2438 .get(&sender_id)
2439 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2440 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2441 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2442 })?;
2443
2444 let project_transaction = apply_code_action.await?;
2445 let project_transaction = this.update(&mut cx, |this, cx| {
2446 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2447 });
2448 Ok(proto::ApplyCodeActionResponse {
2449 transaction: Some(project_transaction),
2450 })
2451 }
2452
2453 async fn handle_get_definition(
2454 this: ModelHandle<Self>,
2455 envelope: TypedEnvelope<proto::GetDefinition>,
2456 _: Arc<Client>,
2457 mut cx: AsyncAppContext,
2458 ) -> Result<proto::GetDefinitionResponse> {
2459 let sender_id = envelope.original_sender_id()?;
2460 let position = envelope
2461 .payload
2462 .position
2463 .and_then(deserialize_anchor)
2464 .ok_or_else(|| anyhow!("invalid position"))?;
2465 let definitions = this.update(&mut cx, |this, cx| {
2466 let source_buffer = this
2467 .shared_buffers
2468 .get(&sender_id)
2469 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2470 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2471 if source_buffer.read(cx).can_resolve(&position) {
2472 Ok(this.definition(&source_buffer, position, cx))
2473 } else {
2474 Err(anyhow!("cannot resolve position"))
2475 }
2476 })?;
2477
2478 let definitions = definitions.await?;
2479
2480 this.update(&mut cx, |this, cx| {
2481 let mut response = proto::GetDefinitionResponse {
2482 definitions: Default::default(),
2483 };
2484 for definition in definitions {
2485 let buffer =
2486 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2487 response.definitions.push(proto::Definition {
2488 target_start: Some(serialize_anchor(&definition.target_range.start)),
2489 target_end: Some(serialize_anchor(&definition.target_range.end)),
2490 buffer: Some(buffer),
2491 });
2492 }
2493 Ok(response)
2494 })
2495 }
2496
2497 async fn handle_open_buffer(
2498 this: ModelHandle<Self>,
2499 envelope: TypedEnvelope<proto::OpenBuffer>,
2500 _: Arc<Client>,
2501 mut cx: AsyncAppContext,
2502 ) -> anyhow::Result<proto::OpenBufferResponse> {
2503 let peer_id = envelope.original_sender_id()?;
2504 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2505 let open_buffer = this.update(&mut cx, |this, cx| {
2506 this.open_buffer(
2507 ProjectPath {
2508 worktree_id,
2509 path: PathBuf::from(envelope.payload.path).into(),
2510 },
2511 cx,
2512 )
2513 });
2514
2515 let buffer = open_buffer.await?;
2516 this.update(&mut cx, |this, cx| {
2517 Ok(proto::OpenBufferResponse {
2518 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2519 })
2520 })
2521 }
2522
2523 fn serialize_project_transaction_for_peer(
2524 &mut self,
2525 project_transaction: ProjectTransaction,
2526 peer_id: PeerId,
2527 cx: &AppContext,
2528 ) -> proto::ProjectTransaction {
2529 let mut serialized_transaction = proto::ProjectTransaction {
2530 buffers: Default::default(),
2531 transactions: Default::default(),
2532 };
2533 for (buffer, transaction) in project_transaction.0 {
2534 serialized_transaction
2535 .buffers
2536 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2537 serialized_transaction
2538 .transactions
2539 .push(language::proto::serialize_transaction(&transaction));
2540 }
2541 serialized_transaction
2542 }
2543
2544 fn deserialize_project_transaction(
2545 &mut self,
2546 message: proto::ProjectTransaction,
2547 push_to_history: bool,
2548 cx: &mut ModelContext<Self>,
2549 ) -> Task<Result<ProjectTransaction>> {
2550 cx.spawn(|this, mut cx| async move {
2551 let mut project_transaction = ProjectTransaction::default();
2552 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2553 let buffer = this
2554 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2555 .await?;
2556 let transaction = language::proto::deserialize_transaction(transaction)?;
2557 project_transaction.0.insert(buffer, transaction);
2558 }
2559 for (buffer, transaction) in &project_transaction.0 {
2560 buffer
2561 .update(&mut cx, |buffer, _| {
2562 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2563 })
2564 .await;
2565
2566 if push_to_history {
2567 buffer.update(&mut cx, |buffer, _| {
2568 buffer.push_transaction(transaction.clone(), Instant::now());
2569 });
2570 }
2571 }
2572
2573 Ok(project_transaction)
2574 })
2575 }
2576
2577 fn serialize_buffer_for_peer(
2578 &mut self,
2579 buffer: &ModelHandle<Buffer>,
2580 peer_id: PeerId,
2581 cx: &AppContext,
2582 ) -> proto::Buffer {
2583 let buffer_id = buffer.read(cx).remote_id();
2584 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2585 match shared_buffers.entry(buffer_id) {
2586 hash_map::Entry::Occupied(_) => proto::Buffer {
2587 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2588 },
2589 hash_map::Entry::Vacant(entry) => {
2590 entry.insert(buffer.clone());
2591 proto::Buffer {
2592 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2593 }
2594 }
2595 }
2596 }
2597
2598 fn deserialize_buffer(
2599 &mut self,
2600 buffer: proto::Buffer,
2601 cx: &mut ModelContext<Self>,
2602 ) -> Task<Result<ModelHandle<Buffer>>> {
2603 let replica_id = self.replica_id();
2604
2605 let mut opened_buffer_tx = self.opened_buffer.clone();
2606 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2607 cx.spawn(|this, mut cx| async move {
2608 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2609 proto::buffer::Variant::Id(id) => {
2610 let buffer = loop {
2611 let buffer = this.read_with(&cx, |this, cx| {
2612 this.open_buffers
2613 .get(&(id as usize))
2614 .and_then(|buffer| buffer.upgrade(cx))
2615 });
2616 if let Some(buffer) = buffer {
2617 break buffer;
2618 }
2619 opened_buffer_rx
2620 .recv()
2621 .await
2622 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2623 };
2624 Ok(buffer)
2625 }
2626 proto::buffer::Variant::State(mut buffer) => {
2627 let mut buffer_worktree = None;
2628 let mut buffer_file = None;
2629 if let Some(file) = buffer.file.take() {
2630 this.read_with(&cx, |this, cx| {
2631 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2632 let worktree =
2633 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2634 anyhow!("no worktree found for id {}", file.worktree_id)
2635 })?;
2636 buffer_file =
2637 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2638 as Box<dyn language::File>);
2639 buffer_worktree = Some(worktree);
2640 Ok::<_, anyhow::Error>(())
2641 })?;
2642 }
2643
2644 let buffer = cx.add_model(|cx| {
2645 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2646 });
2647 this.update(&mut cx, |this, cx| {
2648 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2649 })?;
2650
2651 let _ = opened_buffer_tx.send(()).await;
2652 Ok(buffer)
2653 }
2654 }
2655 })
2656 }
2657
2658 async fn handle_close_buffer(
2659 this: ModelHandle<Self>,
2660 envelope: TypedEnvelope<proto::CloseBuffer>,
2661 _: Arc<Client>,
2662 mut cx: AsyncAppContext,
2663 ) -> anyhow::Result<()> {
2664 this.update(&mut cx, |this, cx| {
2665 if let Some(shared_buffers) =
2666 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2667 {
2668 shared_buffers.remove(&envelope.payload.buffer_id);
2669 cx.notify();
2670 }
2671 Ok(())
2672 })
2673 }
2674
2675 async fn handle_buffer_saved(
2676 this: ModelHandle<Self>,
2677 envelope: TypedEnvelope<proto::BufferSaved>,
2678 _: Arc<Client>,
2679 mut cx: AsyncAppContext,
2680 ) -> Result<()> {
2681 let version = envelope.payload.version.try_into()?;
2682 let mtime = envelope
2683 .payload
2684 .mtime
2685 .ok_or_else(|| anyhow!("missing mtime"))?
2686 .into();
2687
2688 this.update(&mut cx, |this, cx| {
2689 let buffer = this
2690 .open_buffers
2691 .get(&(envelope.payload.buffer_id as usize))
2692 .and_then(|buffer| buffer.upgrade(cx));
2693 if let Some(buffer) = buffer {
2694 buffer.update(cx, |buffer, cx| {
2695 buffer.did_save(version, mtime, None, cx);
2696 });
2697 }
2698 Ok(())
2699 })
2700 }
2701
2702 async fn handle_buffer_reloaded(
2703 this: ModelHandle<Self>,
2704 envelope: TypedEnvelope<proto::BufferReloaded>,
2705 _: Arc<Client>,
2706 mut cx: AsyncAppContext,
2707 ) -> Result<()> {
2708 let payload = envelope.payload.clone();
2709 let version = payload.version.try_into()?;
2710 let mtime = payload
2711 .mtime
2712 .ok_or_else(|| anyhow!("missing mtime"))?
2713 .into();
2714 this.update(&mut cx, |this, cx| {
2715 let buffer = this
2716 .open_buffers
2717 .get(&(payload.buffer_id as usize))
2718 .and_then(|buffer| buffer.upgrade(cx));
2719 if let Some(buffer) = buffer {
2720 buffer.update(cx, |buffer, cx| {
2721 buffer.did_reload(version, mtime, cx);
2722 });
2723 }
2724 Ok(())
2725 })
2726 }
2727
2728 pub fn match_paths<'a>(
2729 &self,
2730 query: &'a str,
2731 include_ignored: bool,
2732 smart_case: bool,
2733 max_results: usize,
2734 cancel_flag: &'a AtomicBool,
2735 cx: &AppContext,
2736 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2737 let worktrees = self
2738 .worktrees(cx)
2739 .filter(|worktree| !worktree.read(cx).is_weak())
2740 .collect::<Vec<_>>();
2741 let include_root_name = worktrees.len() > 1;
2742 let candidate_sets = worktrees
2743 .into_iter()
2744 .map(|worktree| CandidateSet {
2745 snapshot: worktree.read(cx).snapshot(),
2746 include_ignored,
2747 include_root_name,
2748 })
2749 .collect::<Vec<_>>();
2750
2751 let background = cx.background().clone();
2752 async move {
2753 fuzzy::match_paths(
2754 candidate_sets.as_slice(),
2755 query,
2756 smart_case,
2757 max_results,
2758 cancel_flag,
2759 background,
2760 )
2761 .await
2762 }
2763 }
2764}
2765
2766impl WorktreeHandle {
2767 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2768 match self {
2769 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2770 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2771 }
2772 }
2773}
2774
2775impl OpenBuffer {
2776 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2777 match self {
2778 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2779 OpenBuffer::Operations(_) => None,
2780 }
2781 }
2782}
2783
2784struct CandidateSet {
2785 snapshot: Snapshot,
2786 include_ignored: bool,
2787 include_root_name: bool,
2788}
2789
2790impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2791 type Candidates = CandidateSetIter<'a>;
2792
2793 fn id(&self) -> usize {
2794 self.snapshot.id().to_usize()
2795 }
2796
2797 fn len(&self) -> usize {
2798 if self.include_ignored {
2799 self.snapshot.file_count()
2800 } else {
2801 self.snapshot.visible_file_count()
2802 }
2803 }
2804
2805 fn prefix(&self) -> Arc<str> {
2806 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2807 self.snapshot.root_name().into()
2808 } else if self.include_root_name {
2809 format!("{}/", self.snapshot.root_name()).into()
2810 } else {
2811 "".into()
2812 }
2813 }
2814
2815 fn candidates(&'a self, start: usize) -> Self::Candidates {
2816 CandidateSetIter {
2817 traversal: self.snapshot.files(self.include_ignored, start),
2818 }
2819 }
2820}
2821
2822struct CandidateSetIter<'a> {
2823 traversal: Traversal<'a>,
2824}
2825
2826impl<'a> Iterator for CandidateSetIter<'a> {
2827 type Item = PathMatchCandidate<'a>;
2828
2829 fn next(&mut self) -> Option<Self::Item> {
2830 self.traversal.next().map(|entry| {
2831 if let EntryKind::File(char_bag) = entry.kind {
2832 PathMatchCandidate {
2833 path: &entry.path,
2834 char_bag,
2835 }
2836 } else {
2837 unreachable!()
2838 }
2839 })
2840 }
2841}
2842
2843impl Entity for Project {
2844 type Event = Event;
2845
2846 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2847 match &self.client_state {
2848 ProjectClientState::Local { remote_id_rx, .. } => {
2849 if let Some(project_id) = *remote_id_rx.borrow() {
2850 self.client
2851 .send(proto::UnregisterProject { project_id })
2852 .log_err();
2853 }
2854 }
2855 ProjectClientState::Remote { remote_id, .. } => {
2856 self.client
2857 .send(proto::LeaveProject {
2858 project_id: *remote_id,
2859 })
2860 .log_err();
2861 }
2862 }
2863 }
2864
2865 fn app_will_quit(
2866 &mut self,
2867 _: &mut MutableAppContext,
2868 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2869 use futures::FutureExt;
2870
2871 let shutdown_futures = self
2872 .language_servers
2873 .drain()
2874 .filter_map(|(_, server)| server.shutdown())
2875 .collect::<Vec<_>>();
2876 Some(
2877 async move {
2878 futures::future::join_all(shutdown_futures).await;
2879 }
2880 .boxed(),
2881 )
2882 }
2883}
2884
2885impl Collaborator {
2886 fn from_proto(
2887 message: proto::Collaborator,
2888 user_store: &ModelHandle<UserStore>,
2889 cx: &mut AsyncAppContext,
2890 ) -> impl Future<Output = Result<Self>> {
2891 let user = user_store.update(cx, |user_store, cx| {
2892 user_store.fetch_user(message.user_id, cx)
2893 });
2894
2895 async move {
2896 Ok(Self {
2897 peer_id: PeerId(message.peer_id),
2898 user: user.await?,
2899 replica_id: message.replica_id as ReplicaId,
2900 })
2901 }
2902 }
2903}
2904
2905impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2906 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2907 Self {
2908 worktree_id,
2909 path: path.as_ref().into(),
2910 }
2911 }
2912}
2913
2914impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2915 fn from(options: lsp::CreateFileOptions) -> Self {
2916 Self {
2917 overwrite: options.overwrite.unwrap_or(false),
2918 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2919 }
2920 }
2921}
2922
2923impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2924 fn from(options: lsp::RenameFileOptions) -> Self {
2925 Self {
2926 overwrite: options.overwrite.unwrap_or(false),
2927 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2928 }
2929 }
2930}
2931
2932impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2933 fn from(options: lsp::DeleteFileOptions) -> Self {
2934 Self {
2935 recursive: options.recursive.unwrap_or(false),
2936 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2937 }
2938 }
2939}
2940
2941#[cfg(test)]
2942mod tests {
2943 use super::{Event, *};
2944 use client::test::FakeHttpClient;
2945 use fs::RealFs;
2946 use futures::StreamExt;
2947 use gpui::test::subscribe;
2948 use language::{
2949 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2950 LanguageServerConfig, Point,
2951 };
2952 use lsp::Url;
2953 use serde_json::json;
2954 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2955 use unindent::Unindent as _;
2956 use util::test::temp_tree;
2957 use worktree::WorktreeHandle as _;
2958
2959 #[gpui::test]
2960 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2961 let dir = temp_tree(json!({
2962 "root": {
2963 "apple": "",
2964 "banana": {
2965 "carrot": {
2966 "date": "",
2967 "endive": "",
2968 }
2969 },
2970 "fennel": {
2971 "grape": "",
2972 }
2973 }
2974 }));
2975
2976 let root_link_path = dir.path().join("root_link");
2977 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2978 unix::fs::symlink(
2979 &dir.path().join("root/fennel"),
2980 &dir.path().join("root/finnochio"),
2981 )
2982 .unwrap();
2983
2984 let project = Project::test(Arc::new(RealFs), &mut cx);
2985
2986 let (tree, _) = project
2987 .update(&mut cx, |project, cx| {
2988 project.find_or_create_local_worktree(&root_link_path, false, cx)
2989 })
2990 .await
2991 .unwrap();
2992
2993 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2994 .await;
2995 cx.read(|cx| {
2996 let tree = tree.read(cx);
2997 assert_eq!(tree.file_count(), 5);
2998 assert_eq!(
2999 tree.inode_for_path("fennel/grape"),
3000 tree.inode_for_path("finnochio/grape")
3001 );
3002 });
3003
3004 let cancel_flag = Default::default();
3005 let results = project
3006 .read_with(&cx, |project, cx| {
3007 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3008 })
3009 .await;
3010 assert_eq!(
3011 results
3012 .into_iter()
3013 .map(|result| result.path)
3014 .collect::<Vec<Arc<Path>>>(),
3015 vec![
3016 PathBuf::from("banana/carrot/date").into(),
3017 PathBuf::from("banana/carrot/endive").into(),
3018 ]
3019 );
3020 }
3021
3022 #[gpui::test]
3023 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3024 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3025 let progress_token = language_server_config
3026 .disk_based_diagnostics_progress_token
3027 .clone()
3028 .unwrap();
3029
3030 let mut languages = LanguageRegistry::new();
3031 languages.add(Arc::new(Language::new(
3032 LanguageConfig {
3033 name: "Rust".to_string(),
3034 path_suffixes: vec!["rs".to_string()],
3035 language_server: Some(language_server_config),
3036 ..Default::default()
3037 },
3038 Some(tree_sitter_rust::language()),
3039 )));
3040
3041 let dir = temp_tree(json!({
3042 "a.rs": "fn a() { A }",
3043 "b.rs": "const y: i32 = 1",
3044 }));
3045
3046 let http_client = FakeHttpClient::with_404_response();
3047 let client = Client::new(http_client.clone());
3048 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3049
3050 let project = cx.update(|cx| {
3051 Project::local(
3052 client,
3053 user_store,
3054 Arc::new(languages),
3055 Arc::new(RealFs),
3056 cx,
3057 )
3058 });
3059
3060 let (tree, _) = project
3061 .update(&mut cx, |project, cx| {
3062 project.find_or_create_local_worktree(dir.path(), false, cx)
3063 })
3064 .await
3065 .unwrap();
3066 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3067
3068 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3069 .await;
3070
3071 // Cause worktree to start the fake language server
3072 let _buffer = project
3073 .update(&mut cx, |project, cx| {
3074 project.open_buffer(
3075 ProjectPath {
3076 worktree_id,
3077 path: Path::new("b.rs").into(),
3078 },
3079 cx,
3080 )
3081 })
3082 .await
3083 .unwrap();
3084
3085 let mut events = subscribe(&project, &mut cx);
3086
3087 fake_server.start_progress(&progress_token).await;
3088 assert_eq!(
3089 events.next().await.unwrap(),
3090 Event::DiskBasedDiagnosticsStarted
3091 );
3092
3093 fake_server.start_progress(&progress_token).await;
3094 fake_server.end_progress(&progress_token).await;
3095 fake_server.start_progress(&progress_token).await;
3096
3097 fake_server
3098 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3099 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3100 version: None,
3101 diagnostics: vec![lsp::Diagnostic {
3102 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3103 severity: Some(lsp::DiagnosticSeverity::ERROR),
3104 message: "undefined variable 'A'".to_string(),
3105 ..Default::default()
3106 }],
3107 })
3108 .await;
3109 assert_eq!(
3110 events.next().await.unwrap(),
3111 Event::DiagnosticsUpdated(ProjectPath {
3112 worktree_id,
3113 path: Arc::from(Path::new("a.rs"))
3114 })
3115 );
3116
3117 fake_server.end_progress(&progress_token).await;
3118 fake_server.end_progress(&progress_token).await;
3119 assert_eq!(
3120 events.next().await.unwrap(),
3121 Event::DiskBasedDiagnosticsUpdated
3122 );
3123 assert_eq!(
3124 events.next().await.unwrap(),
3125 Event::DiskBasedDiagnosticsFinished
3126 );
3127
3128 let buffer = project
3129 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3130 .await
3131 .unwrap();
3132
3133 buffer.read_with(&cx, |buffer, _| {
3134 let snapshot = buffer.snapshot();
3135 let diagnostics = snapshot
3136 .diagnostics_in_range::<_, Point>(0..buffer.len())
3137 .collect::<Vec<_>>();
3138 assert_eq!(
3139 diagnostics,
3140 &[DiagnosticEntry {
3141 range: Point::new(0, 9)..Point::new(0, 10),
3142 diagnostic: Diagnostic {
3143 severity: lsp::DiagnosticSeverity::ERROR,
3144 message: "undefined variable 'A'".to_string(),
3145 group_id: 0,
3146 is_primary: true,
3147 ..Default::default()
3148 }
3149 }]
3150 )
3151 });
3152 }
3153
3154 #[gpui::test]
3155 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3156 let dir = temp_tree(json!({
3157 "root": {
3158 "dir1": {},
3159 "dir2": {
3160 "dir3": {}
3161 }
3162 }
3163 }));
3164
3165 let project = Project::test(Arc::new(RealFs), &mut cx);
3166 let (tree, _) = project
3167 .update(&mut cx, |project, cx| {
3168 project.find_or_create_local_worktree(&dir.path(), false, cx)
3169 })
3170 .await
3171 .unwrap();
3172
3173 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3174 .await;
3175
3176 let cancel_flag = Default::default();
3177 let results = project
3178 .read_with(&cx, |project, cx| {
3179 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3180 })
3181 .await;
3182
3183 assert!(results.is_empty());
3184 }
3185
3186 #[gpui::test]
3187 async fn test_definition(mut cx: gpui::TestAppContext) {
3188 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3189
3190 let mut languages = LanguageRegistry::new();
3191 languages.add(Arc::new(Language::new(
3192 LanguageConfig {
3193 name: "Rust".to_string(),
3194 path_suffixes: vec!["rs".to_string()],
3195 language_server: Some(language_server_config),
3196 ..Default::default()
3197 },
3198 Some(tree_sitter_rust::language()),
3199 )));
3200
3201 let dir = temp_tree(json!({
3202 "a.rs": "const fn a() { A }",
3203 "b.rs": "const y: i32 = crate::a()",
3204 }));
3205 let dir_path = dir.path().to_path_buf();
3206
3207 let http_client = FakeHttpClient::with_404_response();
3208 let client = Client::new(http_client.clone());
3209 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3210 let project = cx.update(|cx| {
3211 Project::local(
3212 client,
3213 user_store,
3214 Arc::new(languages),
3215 Arc::new(RealFs),
3216 cx,
3217 )
3218 });
3219
3220 let (tree, _) = project
3221 .update(&mut cx, |project, cx| {
3222 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3223 })
3224 .await
3225 .unwrap();
3226 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3227 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3228 .await;
3229
3230 let buffer = project
3231 .update(&mut cx, |project, cx| {
3232 project.open_buffer(
3233 ProjectPath {
3234 worktree_id,
3235 path: Path::new("").into(),
3236 },
3237 cx,
3238 )
3239 })
3240 .await
3241 .unwrap();
3242
3243 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3244 let params = params.text_document_position_params;
3245 assert_eq!(
3246 params.text_document.uri.to_file_path().unwrap(),
3247 dir_path.join("b.rs")
3248 );
3249 assert_eq!(params.position, lsp::Position::new(0, 22));
3250
3251 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3252 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3253 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3254 )))
3255 });
3256
3257 let mut definitions = project
3258 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3259 .await
3260 .unwrap();
3261
3262 assert_eq!(definitions.len(), 1);
3263 let definition = definitions.pop().unwrap();
3264 cx.update(|cx| {
3265 let target_buffer = definition.target_buffer.read(cx);
3266 assert_eq!(
3267 target_buffer
3268 .file()
3269 .unwrap()
3270 .as_local()
3271 .unwrap()
3272 .abs_path(cx),
3273 dir.path().join("a.rs")
3274 );
3275 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3276 assert_eq!(
3277 list_worktrees(&project, cx),
3278 [
3279 (dir.path().join("b.rs"), false),
3280 (dir.path().join("a.rs"), true)
3281 ]
3282 );
3283
3284 drop(definition);
3285 });
3286 cx.read(|cx| {
3287 assert_eq!(
3288 list_worktrees(&project, cx),
3289 [(dir.path().join("b.rs"), false)]
3290 );
3291 });
3292
3293 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3294 project
3295 .read(cx)
3296 .worktrees(cx)
3297 .map(|worktree| {
3298 let worktree = worktree.read(cx);
3299 (
3300 worktree.as_local().unwrap().abs_path().to_path_buf(),
3301 worktree.is_weak(),
3302 )
3303 })
3304 .collect::<Vec<_>>()
3305 }
3306 }
3307
3308 #[gpui::test]
3309 async fn test_save_file(mut cx: gpui::TestAppContext) {
3310 let fs = Arc::new(FakeFs::new(cx.background()));
3311 fs.insert_tree(
3312 "/dir",
3313 json!({
3314 "file1": "the old contents",
3315 }),
3316 )
3317 .await;
3318
3319 let project = Project::test(fs.clone(), &mut cx);
3320 let worktree_id = project
3321 .update(&mut cx, |p, cx| {
3322 p.find_or_create_local_worktree("/dir", false, cx)
3323 })
3324 .await
3325 .unwrap()
3326 .0
3327 .read_with(&cx, |tree, _| tree.id());
3328
3329 let buffer = project
3330 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3331 .await
3332 .unwrap();
3333 buffer
3334 .update(&mut cx, |buffer, cx| {
3335 assert_eq!(buffer.text(), "the old contents");
3336 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3337 buffer.save(cx)
3338 })
3339 .await
3340 .unwrap();
3341
3342 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3343 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3344 }
3345
3346 #[gpui::test]
3347 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3348 let fs = Arc::new(FakeFs::new(cx.background()));
3349 fs.insert_tree(
3350 "/dir",
3351 json!({
3352 "file1": "the old contents",
3353 }),
3354 )
3355 .await;
3356
3357 let project = Project::test(fs.clone(), &mut cx);
3358 let worktree_id = project
3359 .update(&mut cx, |p, cx| {
3360 p.find_or_create_local_worktree("/dir/file1", false, cx)
3361 })
3362 .await
3363 .unwrap()
3364 .0
3365 .read_with(&cx, |tree, _| tree.id());
3366
3367 let buffer = project
3368 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3369 .await
3370 .unwrap();
3371 buffer
3372 .update(&mut cx, |buffer, cx| {
3373 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3374 buffer.save(cx)
3375 })
3376 .await
3377 .unwrap();
3378
3379 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3380 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3381 }
3382
3383 #[gpui::test(retries = 5)]
3384 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3385 let dir = temp_tree(json!({
3386 "a": {
3387 "file1": "",
3388 "file2": "",
3389 "file3": "",
3390 },
3391 "b": {
3392 "c": {
3393 "file4": "",
3394 "file5": "",
3395 }
3396 }
3397 }));
3398
3399 let project = Project::test(Arc::new(RealFs), &mut cx);
3400 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3401
3402 let (tree, _) = project
3403 .update(&mut cx, |p, cx| {
3404 p.find_or_create_local_worktree(dir.path(), false, cx)
3405 })
3406 .await
3407 .unwrap();
3408 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3409
3410 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3411 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3412 async move { buffer.await.unwrap() }
3413 };
3414 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3415 tree.read_with(cx, |tree, _| {
3416 tree.entry_for_path(path)
3417 .expect(&format!("no entry for path {}", path))
3418 .id
3419 })
3420 };
3421
3422 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3423 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3424 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3425 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3426
3427 let file2_id = id_for_path("a/file2", &cx);
3428 let file3_id = id_for_path("a/file3", &cx);
3429 let file4_id = id_for_path("b/c/file4", &cx);
3430
3431 // Wait for the initial scan.
3432 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3433 .await;
3434
3435 // Create a remote copy of this worktree.
3436 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3437 let (remote, load_task) = cx.update(|cx| {
3438 Worktree::remote(
3439 1,
3440 1,
3441 initial_snapshot.to_proto(&Default::default(), Default::default()),
3442 rpc.clone(),
3443 cx,
3444 )
3445 });
3446 load_task.await;
3447
3448 cx.read(|cx| {
3449 assert!(!buffer2.read(cx).is_dirty());
3450 assert!(!buffer3.read(cx).is_dirty());
3451 assert!(!buffer4.read(cx).is_dirty());
3452 assert!(!buffer5.read(cx).is_dirty());
3453 });
3454
3455 // Rename and delete files and directories.
3456 tree.flush_fs_events(&cx).await;
3457 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3458 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3459 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3460 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3461 tree.flush_fs_events(&cx).await;
3462
3463 let expected_paths = vec![
3464 "a",
3465 "a/file1",
3466 "a/file2.new",
3467 "b",
3468 "d",
3469 "d/file3",
3470 "d/file4",
3471 ];
3472
3473 cx.read(|app| {
3474 assert_eq!(
3475 tree.read(app)
3476 .paths()
3477 .map(|p| p.to_str().unwrap())
3478 .collect::<Vec<_>>(),
3479 expected_paths
3480 );
3481
3482 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3483 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3484 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3485
3486 assert_eq!(
3487 buffer2.read(app).file().unwrap().path().as_ref(),
3488 Path::new("a/file2.new")
3489 );
3490 assert_eq!(
3491 buffer3.read(app).file().unwrap().path().as_ref(),
3492 Path::new("d/file3")
3493 );
3494 assert_eq!(
3495 buffer4.read(app).file().unwrap().path().as_ref(),
3496 Path::new("d/file4")
3497 );
3498 assert_eq!(
3499 buffer5.read(app).file().unwrap().path().as_ref(),
3500 Path::new("b/c/file5")
3501 );
3502
3503 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3504 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3505 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3506 assert!(buffer5.read(app).file().unwrap().is_deleted());
3507 });
3508
3509 // Update the remote worktree. Check that it becomes consistent with the
3510 // local worktree.
3511 remote.update(&mut cx, |remote, cx| {
3512 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3513 &initial_snapshot,
3514 1,
3515 1,
3516 0,
3517 true,
3518 );
3519 remote
3520 .as_remote_mut()
3521 .unwrap()
3522 .snapshot
3523 .apply_remote_update(update_message)
3524 .unwrap();
3525
3526 assert_eq!(
3527 remote
3528 .paths()
3529 .map(|p| p.to_str().unwrap())
3530 .collect::<Vec<_>>(),
3531 expected_paths
3532 );
3533 });
3534 }
3535
3536 #[gpui::test]
3537 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3538 let fs = Arc::new(FakeFs::new(cx.background()));
3539 fs.insert_tree(
3540 "/the-dir",
3541 json!({
3542 "a.txt": "a-contents",
3543 "b.txt": "b-contents",
3544 }),
3545 )
3546 .await;
3547
3548 let project = Project::test(fs.clone(), &mut cx);
3549 let worktree_id = project
3550 .update(&mut cx, |p, cx| {
3551 p.find_or_create_local_worktree("/the-dir", false, cx)
3552 })
3553 .await
3554 .unwrap()
3555 .0
3556 .read_with(&cx, |tree, _| tree.id());
3557
3558 // Spawn multiple tasks to open paths, repeating some paths.
3559 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3560 (
3561 p.open_buffer((worktree_id, "a.txt"), cx),
3562 p.open_buffer((worktree_id, "b.txt"), cx),
3563 p.open_buffer((worktree_id, "a.txt"), cx),
3564 )
3565 });
3566
3567 let buffer_a_1 = buffer_a_1.await.unwrap();
3568 let buffer_a_2 = buffer_a_2.await.unwrap();
3569 let buffer_b = buffer_b.await.unwrap();
3570 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3571 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3572
3573 // There is only one buffer per path.
3574 let buffer_a_id = buffer_a_1.id();
3575 assert_eq!(buffer_a_2.id(), buffer_a_id);
3576
3577 // Open the same path again while it is still open.
3578 drop(buffer_a_1);
3579 let buffer_a_3 = project
3580 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3581 .await
3582 .unwrap();
3583
3584 // There's still only one buffer per path.
3585 assert_eq!(buffer_a_3.id(), buffer_a_id);
3586 }
3587
3588 #[gpui::test]
3589 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3590 use std::fs;
3591
3592 let dir = temp_tree(json!({
3593 "file1": "abc",
3594 "file2": "def",
3595 "file3": "ghi",
3596 }));
3597
3598 let project = Project::test(Arc::new(RealFs), &mut cx);
3599 let (worktree, _) = project
3600 .update(&mut cx, |p, cx| {
3601 p.find_or_create_local_worktree(dir.path(), false, cx)
3602 })
3603 .await
3604 .unwrap();
3605 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3606
3607 worktree.flush_fs_events(&cx).await;
3608 worktree
3609 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3610 .await;
3611
3612 let buffer1 = project
3613 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3614 .await
3615 .unwrap();
3616 let events = Rc::new(RefCell::new(Vec::new()));
3617
3618 // initially, the buffer isn't dirty.
3619 buffer1.update(&mut cx, |buffer, cx| {
3620 cx.subscribe(&buffer1, {
3621 let events = events.clone();
3622 move |_, _, event, _| events.borrow_mut().push(event.clone())
3623 })
3624 .detach();
3625
3626 assert!(!buffer.is_dirty());
3627 assert!(events.borrow().is_empty());
3628
3629 buffer.edit(vec![1..2], "", cx);
3630 });
3631
3632 // after the first edit, the buffer is dirty, and emits a dirtied event.
3633 buffer1.update(&mut cx, |buffer, cx| {
3634 assert!(buffer.text() == "ac");
3635 assert!(buffer.is_dirty());
3636 assert_eq!(
3637 *events.borrow(),
3638 &[language::Event::Edited, language::Event::Dirtied]
3639 );
3640 events.borrow_mut().clear();
3641 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3642 });
3643
3644 // after saving, the buffer is not dirty, and emits a saved event.
3645 buffer1.update(&mut cx, |buffer, cx| {
3646 assert!(!buffer.is_dirty());
3647 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3648 events.borrow_mut().clear();
3649
3650 buffer.edit(vec![1..1], "B", cx);
3651 buffer.edit(vec![2..2], "D", cx);
3652 });
3653
3654 // after editing again, the buffer is dirty, and emits another dirty event.
3655 buffer1.update(&mut cx, |buffer, cx| {
3656 assert!(buffer.text() == "aBDc");
3657 assert!(buffer.is_dirty());
3658 assert_eq!(
3659 *events.borrow(),
3660 &[
3661 language::Event::Edited,
3662 language::Event::Dirtied,
3663 language::Event::Edited,
3664 ],
3665 );
3666 events.borrow_mut().clear();
3667
3668 // TODO - currently, after restoring the buffer to its
3669 // previously-saved state, the is still considered dirty.
3670 buffer.edit([1..3], "", cx);
3671 assert!(buffer.text() == "ac");
3672 assert!(buffer.is_dirty());
3673 });
3674
3675 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3676
3677 // When a file is deleted, the buffer is considered dirty.
3678 let events = Rc::new(RefCell::new(Vec::new()));
3679 let buffer2 = project
3680 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3681 .await
3682 .unwrap();
3683 buffer2.update(&mut cx, |_, cx| {
3684 cx.subscribe(&buffer2, {
3685 let events = events.clone();
3686 move |_, _, event, _| events.borrow_mut().push(event.clone())
3687 })
3688 .detach();
3689 });
3690
3691 fs::remove_file(dir.path().join("file2")).unwrap();
3692 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3693 assert_eq!(
3694 *events.borrow(),
3695 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3696 );
3697
3698 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3699 let events = Rc::new(RefCell::new(Vec::new()));
3700 let buffer3 = project
3701 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3702 .await
3703 .unwrap();
3704 buffer3.update(&mut cx, |_, cx| {
3705 cx.subscribe(&buffer3, {
3706 let events = events.clone();
3707 move |_, _, event, _| events.borrow_mut().push(event.clone())
3708 })
3709 .detach();
3710 });
3711
3712 worktree.flush_fs_events(&cx).await;
3713 buffer3.update(&mut cx, |buffer, cx| {
3714 buffer.edit(Some(0..0), "x", cx);
3715 });
3716 events.borrow_mut().clear();
3717 fs::remove_file(dir.path().join("file3")).unwrap();
3718 buffer3
3719 .condition(&cx, |_, _| !events.borrow().is_empty())
3720 .await;
3721 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3722 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3723 }
3724
3725 #[gpui::test]
3726 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3727 use std::fs;
3728
3729 let initial_contents = "aaa\nbbbbb\nc\n";
3730 let dir = temp_tree(json!({ "the-file": initial_contents }));
3731
3732 let project = Project::test(Arc::new(RealFs), &mut cx);
3733 let (worktree, _) = project
3734 .update(&mut cx, |p, cx| {
3735 p.find_or_create_local_worktree(dir.path(), false, cx)
3736 })
3737 .await
3738 .unwrap();
3739 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3740
3741 worktree
3742 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3743 .await;
3744
3745 let abs_path = dir.path().join("the-file");
3746 let buffer = project
3747 .update(&mut cx, |p, cx| {
3748 p.open_buffer((worktree_id, "the-file"), cx)
3749 })
3750 .await
3751 .unwrap();
3752
3753 // TODO
3754 // Add a cursor on each row.
3755 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3756 // assert!(!buffer.is_dirty());
3757 // buffer.add_selection_set(
3758 // &(0..3)
3759 // .map(|row| Selection {
3760 // id: row as usize,
3761 // start: Point::new(row, 1),
3762 // end: Point::new(row, 1),
3763 // reversed: false,
3764 // goal: SelectionGoal::None,
3765 // })
3766 // .collect::<Vec<_>>(),
3767 // cx,
3768 // )
3769 // });
3770
3771 // Change the file on disk, adding two new lines of text, and removing
3772 // one line.
3773 buffer.read_with(&cx, |buffer, _| {
3774 assert!(!buffer.is_dirty());
3775 assert!(!buffer.has_conflict());
3776 });
3777 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3778 fs::write(&abs_path, new_contents).unwrap();
3779
3780 // Because the buffer was not modified, it is reloaded from disk. Its
3781 // contents are edited according to the diff between the old and new
3782 // file contents.
3783 buffer
3784 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3785 .await;
3786
3787 buffer.update(&mut cx, |buffer, _| {
3788 assert_eq!(buffer.text(), new_contents);
3789 assert!(!buffer.is_dirty());
3790 assert!(!buffer.has_conflict());
3791
3792 // TODO
3793 // let cursor_positions = buffer
3794 // .selection_set(selection_set_id)
3795 // .unwrap()
3796 // .selections::<Point>(&*buffer)
3797 // .map(|selection| {
3798 // assert_eq!(selection.start, selection.end);
3799 // selection.start
3800 // })
3801 // .collect::<Vec<_>>();
3802 // assert_eq!(
3803 // cursor_positions,
3804 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3805 // );
3806 });
3807
3808 // Modify the buffer
3809 buffer.update(&mut cx, |buffer, cx| {
3810 buffer.edit(vec![0..0], " ", cx);
3811 assert!(buffer.is_dirty());
3812 assert!(!buffer.has_conflict());
3813 });
3814
3815 // Change the file on disk again, adding blank lines to the beginning.
3816 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3817
3818 // Because the buffer is modified, it doesn't reload from disk, but is
3819 // marked as having a conflict.
3820 buffer
3821 .condition(&cx, |buffer, _| buffer.has_conflict())
3822 .await;
3823 }
3824
3825 #[gpui::test]
3826 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3827 let fs = Arc::new(FakeFs::new(cx.background()));
3828 fs.insert_tree(
3829 "/the-dir",
3830 json!({
3831 "a.rs": "
3832 fn foo(mut v: Vec<usize>) {
3833 for x in &v {
3834 v.push(1);
3835 }
3836 }
3837 "
3838 .unindent(),
3839 }),
3840 )
3841 .await;
3842
3843 let project = Project::test(fs.clone(), &mut cx);
3844 let (worktree, _) = project
3845 .update(&mut cx, |p, cx| {
3846 p.find_or_create_local_worktree("/the-dir", false, cx)
3847 })
3848 .await
3849 .unwrap();
3850 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3851
3852 let buffer = project
3853 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3854 .await
3855 .unwrap();
3856
3857 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3858 let message = lsp::PublishDiagnosticsParams {
3859 uri: buffer_uri.clone(),
3860 diagnostics: vec![
3861 lsp::Diagnostic {
3862 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3863 severity: Some(DiagnosticSeverity::WARNING),
3864 message: "error 1".to_string(),
3865 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3866 location: lsp::Location {
3867 uri: buffer_uri.clone(),
3868 range: lsp::Range::new(
3869 lsp::Position::new(1, 8),
3870 lsp::Position::new(1, 9),
3871 ),
3872 },
3873 message: "error 1 hint 1".to_string(),
3874 }]),
3875 ..Default::default()
3876 },
3877 lsp::Diagnostic {
3878 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3879 severity: Some(DiagnosticSeverity::HINT),
3880 message: "error 1 hint 1".to_string(),
3881 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3882 location: lsp::Location {
3883 uri: buffer_uri.clone(),
3884 range: lsp::Range::new(
3885 lsp::Position::new(1, 8),
3886 lsp::Position::new(1, 9),
3887 ),
3888 },
3889 message: "original diagnostic".to_string(),
3890 }]),
3891 ..Default::default()
3892 },
3893 lsp::Diagnostic {
3894 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3895 severity: Some(DiagnosticSeverity::ERROR),
3896 message: "error 2".to_string(),
3897 related_information: Some(vec![
3898 lsp::DiagnosticRelatedInformation {
3899 location: lsp::Location {
3900 uri: buffer_uri.clone(),
3901 range: lsp::Range::new(
3902 lsp::Position::new(1, 13),
3903 lsp::Position::new(1, 15),
3904 ),
3905 },
3906 message: "error 2 hint 1".to_string(),
3907 },
3908 lsp::DiagnosticRelatedInformation {
3909 location: lsp::Location {
3910 uri: buffer_uri.clone(),
3911 range: lsp::Range::new(
3912 lsp::Position::new(1, 13),
3913 lsp::Position::new(1, 15),
3914 ),
3915 },
3916 message: "error 2 hint 2".to_string(),
3917 },
3918 ]),
3919 ..Default::default()
3920 },
3921 lsp::Diagnostic {
3922 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3923 severity: Some(DiagnosticSeverity::HINT),
3924 message: "error 2 hint 1".to_string(),
3925 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3926 location: lsp::Location {
3927 uri: buffer_uri.clone(),
3928 range: lsp::Range::new(
3929 lsp::Position::new(2, 8),
3930 lsp::Position::new(2, 17),
3931 ),
3932 },
3933 message: "original diagnostic".to_string(),
3934 }]),
3935 ..Default::default()
3936 },
3937 lsp::Diagnostic {
3938 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3939 severity: Some(DiagnosticSeverity::HINT),
3940 message: "error 2 hint 2".to_string(),
3941 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3942 location: lsp::Location {
3943 uri: buffer_uri.clone(),
3944 range: lsp::Range::new(
3945 lsp::Position::new(2, 8),
3946 lsp::Position::new(2, 17),
3947 ),
3948 },
3949 message: "original diagnostic".to_string(),
3950 }]),
3951 ..Default::default()
3952 },
3953 ],
3954 version: None,
3955 };
3956
3957 project
3958 .update(&mut cx, |p, cx| {
3959 p.update_diagnostics(message, &Default::default(), cx)
3960 })
3961 .unwrap();
3962 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3963
3964 assert_eq!(
3965 buffer
3966 .diagnostics_in_range::<_, Point>(0..buffer.len())
3967 .collect::<Vec<_>>(),
3968 &[
3969 DiagnosticEntry {
3970 range: Point::new(1, 8)..Point::new(1, 9),
3971 diagnostic: Diagnostic {
3972 severity: DiagnosticSeverity::WARNING,
3973 message: "error 1".to_string(),
3974 group_id: 0,
3975 is_primary: true,
3976 ..Default::default()
3977 }
3978 },
3979 DiagnosticEntry {
3980 range: Point::new(1, 8)..Point::new(1, 9),
3981 diagnostic: Diagnostic {
3982 severity: DiagnosticSeverity::HINT,
3983 message: "error 1 hint 1".to_string(),
3984 group_id: 0,
3985 is_primary: false,
3986 ..Default::default()
3987 }
3988 },
3989 DiagnosticEntry {
3990 range: Point::new(1, 13)..Point::new(1, 15),
3991 diagnostic: Diagnostic {
3992 severity: DiagnosticSeverity::HINT,
3993 message: "error 2 hint 1".to_string(),
3994 group_id: 1,
3995 is_primary: false,
3996 ..Default::default()
3997 }
3998 },
3999 DiagnosticEntry {
4000 range: Point::new(1, 13)..Point::new(1, 15),
4001 diagnostic: Diagnostic {
4002 severity: DiagnosticSeverity::HINT,
4003 message: "error 2 hint 2".to_string(),
4004 group_id: 1,
4005 is_primary: false,
4006 ..Default::default()
4007 }
4008 },
4009 DiagnosticEntry {
4010 range: Point::new(2, 8)..Point::new(2, 17),
4011 diagnostic: Diagnostic {
4012 severity: DiagnosticSeverity::ERROR,
4013 message: "error 2".to_string(),
4014 group_id: 1,
4015 is_primary: true,
4016 ..Default::default()
4017 }
4018 }
4019 ]
4020 );
4021
4022 assert_eq!(
4023 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4024 &[
4025 DiagnosticEntry {
4026 range: Point::new(1, 8)..Point::new(1, 9),
4027 diagnostic: Diagnostic {
4028 severity: DiagnosticSeverity::WARNING,
4029 message: "error 1".to_string(),
4030 group_id: 0,
4031 is_primary: true,
4032 ..Default::default()
4033 }
4034 },
4035 DiagnosticEntry {
4036 range: Point::new(1, 8)..Point::new(1, 9),
4037 diagnostic: Diagnostic {
4038 severity: DiagnosticSeverity::HINT,
4039 message: "error 1 hint 1".to_string(),
4040 group_id: 0,
4041 is_primary: false,
4042 ..Default::default()
4043 }
4044 },
4045 ]
4046 );
4047 assert_eq!(
4048 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4049 &[
4050 DiagnosticEntry {
4051 range: Point::new(1, 13)..Point::new(1, 15),
4052 diagnostic: Diagnostic {
4053 severity: DiagnosticSeverity::HINT,
4054 message: "error 2 hint 1".to_string(),
4055 group_id: 1,
4056 is_primary: false,
4057 ..Default::default()
4058 }
4059 },
4060 DiagnosticEntry {
4061 range: Point::new(1, 13)..Point::new(1, 15),
4062 diagnostic: Diagnostic {
4063 severity: DiagnosticSeverity::HINT,
4064 message: "error 2 hint 2".to_string(),
4065 group_id: 1,
4066 is_primary: false,
4067 ..Default::default()
4068 }
4069 },
4070 DiagnosticEntry {
4071 range: Point::new(2, 8)..Point::new(2, 17),
4072 diagnostic: Diagnostic {
4073 severity: DiagnosticSeverity::ERROR,
4074 message: "error 2".to_string(),
4075 group_id: 1,
4076 is_primary: true,
4077 ..Default::default()
4078 }
4079 }
4080 ]
4081 );
4082 }
4083}