1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Operations(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn local(
165 client: Arc<Client>,
166 user_store: ModelHandle<UserStore>,
167 languages: Arc<LanguageRegistry>,
168 fs: Arc<dyn Fs>,
169 cx: &mut MutableAppContext,
170 ) -> ModelHandle<Self> {
171 cx.add_model(|cx: &mut ModelContext<Self>| {
172 let (remote_id_tx, remote_id_rx) = watch::channel();
173 let _maintain_remote_id_task = cx.spawn_weak({
174 let rpc = client.clone();
175 move |this, mut cx| {
176 async move {
177 let mut status = rpc.status();
178 while let Some(status) = status.recv().await {
179 if let Some(this) = this.upgrade(&cx) {
180 let remote_id = if let client::Status::Connected { .. } = status {
181 let response = rpc.request(proto::RegisterProject {}).await?;
182 Some(response.project_id)
183 } else {
184 None
185 };
186
187 if let Some(project_id) = remote_id {
188 let mut registrations = Vec::new();
189 this.update(&mut cx, |this, cx| {
190 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
191 registrations.push(worktree.update(
192 cx,
193 |worktree, cx| {
194 let worktree = worktree.as_local_mut().unwrap();
195 worktree.register(project_id, cx)
196 },
197 ));
198 }
199 });
200 for registration in registrations {
201 registration.await?;
202 }
203 }
204 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
205 }
206 }
207 Ok(())
208 }
209 .log_err()
210 }
211 });
212
213 Self {
214 worktrees: Default::default(),
215 collaborators: Default::default(),
216 open_buffers: Default::default(),
217 loading_buffers: Default::default(),
218 shared_buffers: Default::default(),
219 client_state: ProjectClientState::Local {
220 is_shared: false,
221 remote_id_tx,
222 remote_id_rx,
223 _maintain_remote_id_task,
224 },
225 opened_buffer: broadcast::channel(1).0,
226 subscriptions: Vec::new(),
227 active_entry: None,
228 languages,
229 client,
230 user_store,
231 fs,
232 language_servers_with_diagnostics_running: 0,
233 language_servers: Default::default(),
234 }
235 })
236 }
237
238 pub async fn remote(
239 remote_id: u64,
240 client: Arc<Client>,
241 user_store: ModelHandle<UserStore>,
242 languages: Arc<LanguageRegistry>,
243 fs: Arc<dyn Fs>,
244 cx: &mut AsyncAppContext,
245 ) -> Result<ModelHandle<Self>> {
246 client.authenticate_and_connect(&cx).await?;
247
248 let response = client
249 .request(proto::JoinProject {
250 project_id: remote_id,
251 })
252 .await?;
253
254 let replica_id = response.replica_id as ReplicaId;
255
256 let mut worktrees = Vec::new();
257 for worktree in response.worktrees {
258 let (worktree, load_task) = cx
259 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
260 worktrees.push(worktree);
261 load_task.detach();
262 }
263
264 let user_ids = response
265 .collaborators
266 .iter()
267 .map(|peer| peer.user_id)
268 .collect();
269 user_store
270 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
271 .await?;
272 let mut collaborators = HashMap::default();
273 for message in response.collaborators {
274 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
275 collaborators.insert(collaborator.peer_id, collaborator);
276 }
277
278 Ok(cx.add_model(|cx| {
279 let mut this = Self {
280 worktrees: Vec::new(),
281 open_buffers: Default::default(),
282 loading_buffers: Default::default(),
283 opened_buffer: broadcast::channel(1).0,
284 shared_buffers: Default::default(),
285 active_entry: None,
286 collaborators,
287 languages,
288 user_store,
289 fs,
290 subscriptions: vec![
291 client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
292 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
293 client.add_entity_message_handler(
294 remote_id,
295 cx,
296 Self::handle_remove_collaborator,
297 ),
298 client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
299 client.add_entity_message_handler(
300 remote_id,
301 cx,
302 Self::handle_unregister_worktree,
303 ),
304 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
305 client.add_entity_message_handler(
306 remote_id,
307 cx,
308 Self::handle_update_diagnostic_summary,
309 ),
310 client.add_entity_message_handler(
311 remote_id,
312 cx,
313 Self::handle_disk_based_diagnostics_updating,
314 ),
315 client.add_entity_message_handler(
316 remote_id,
317 cx,
318 Self::handle_disk_based_diagnostics_updated,
319 ),
320 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
321 client.add_entity_message_handler(
322 remote_id,
323 cx,
324 Self::handle_update_buffer_file,
325 ),
326 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
327 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
328 ],
329 client,
330 client_state: ProjectClientState::Remote {
331 sharing_has_stopped: false,
332 remote_id,
333 replica_id,
334 },
335 language_servers_with_diagnostics_running: 0,
336 language_servers: Default::default(),
337 };
338 for worktree in worktrees {
339 this.add_worktree(&worktree, cx);
340 }
341 this
342 }))
343 }
344
345 #[cfg(any(test, feature = "test-support"))]
346 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
347 let languages = Arc::new(LanguageRegistry::new());
348 let http_client = client::test::FakeHttpClient::with_404_response();
349 let client = client::Client::new(http_client.clone());
350 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
351 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
352 }
353
354 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
355 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
356 *remote_id_tx.borrow_mut() = remote_id;
357 }
358
359 self.subscriptions.clear();
360 if let Some(remote_id) = remote_id {
361 let client = &self.client;
362 self.subscriptions.extend([
363 client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
364 client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
365 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
366 client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
367 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
368 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
369 client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
370 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
371 client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
372 client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
373 client.add_entity_request_handler(
374 remote_id,
375 cx,
376 Self::handle_apply_additional_edits_for_completion,
377 ),
378 client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
379 client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
380 client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
381 ]);
382 }
383 }
384
385 pub fn remote_id(&self) -> Option<u64> {
386 match &self.client_state {
387 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
388 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
389 }
390 }
391
392 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
393 let mut id = None;
394 let mut watch = None;
395 match &self.client_state {
396 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
397 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
398 }
399
400 async move {
401 if let Some(id) = id {
402 return id;
403 }
404 let mut watch = watch.unwrap();
405 loop {
406 let id = *watch.borrow();
407 if let Some(id) = id {
408 return id;
409 }
410 watch.recv().await;
411 }
412 }
413 }
414
415 pub fn replica_id(&self) -> ReplicaId {
416 match &self.client_state {
417 ProjectClientState::Local { .. } => 0,
418 ProjectClientState::Remote { replica_id, .. } => *replica_id,
419 }
420 }
421
422 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
423 &self.collaborators
424 }
425
426 pub fn worktrees<'a>(
427 &'a self,
428 cx: &'a AppContext,
429 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
430 self.worktrees
431 .iter()
432 .filter_map(move |worktree| worktree.upgrade(cx))
433 }
434
435 pub fn worktree_for_id(
436 &self,
437 id: WorktreeId,
438 cx: &AppContext,
439 ) -> Option<ModelHandle<Worktree>> {
440 self.worktrees(cx)
441 .find(|worktree| worktree.read(cx).id() == id)
442 }
443
444 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
445 let rpc = self.client.clone();
446 cx.spawn(|this, mut cx| async move {
447 let project_id = this.update(&mut cx, |this, _| {
448 if let ProjectClientState::Local {
449 is_shared,
450 remote_id_rx,
451 ..
452 } = &mut this.client_state
453 {
454 *is_shared = true;
455 remote_id_rx
456 .borrow()
457 .ok_or_else(|| anyhow!("no project id"))
458 } else {
459 Err(anyhow!("can't share a remote project"))
460 }
461 })?;
462
463 rpc.request(proto::ShareProject { project_id }).await?;
464 let mut tasks = Vec::new();
465 this.update(&mut cx, |this, cx| {
466 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
467 worktree.update(cx, |worktree, cx| {
468 let worktree = worktree.as_local_mut().unwrap();
469 tasks.push(worktree.share(project_id, cx));
470 });
471 }
472 });
473 for task in tasks {
474 task.await?;
475 }
476 this.update(&mut cx, |_, cx| cx.notify());
477 Ok(())
478 })
479 }
480
481 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
482 let rpc = self.client.clone();
483 cx.spawn(|this, mut cx| async move {
484 let project_id = this.update(&mut cx, |this, _| {
485 if let ProjectClientState::Local {
486 is_shared,
487 remote_id_rx,
488 ..
489 } = &mut this.client_state
490 {
491 *is_shared = false;
492 remote_id_rx
493 .borrow()
494 .ok_or_else(|| anyhow!("no project id"))
495 } else {
496 Err(anyhow!("can't share a remote project"))
497 }
498 })?;
499
500 rpc.send(proto::UnshareProject { project_id })?;
501 this.update(&mut cx, |this, cx| {
502 this.collaborators.clear();
503 this.shared_buffers.clear();
504 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
505 worktree.update(cx, |worktree, _| {
506 worktree.as_local_mut().unwrap().unshare();
507 });
508 }
509 cx.notify()
510 });
511 Ok(())
512 })
513 }
514
515 pub fn is_read_only(&self) -> bool {
516 match &self.client_state {
517 ProjectClientState::Local { .. } => false,
518 ProjectClientState::Remote {
519 sharing_has_stopped,
520 ..
521 } => *sharing_has_stopped,
522 }
523 }
524
525 pub fn is_local(&self) -> bool {
526 match &self.client_state {
527 ProjectClientState::Local { .. } => true,
528 ProjectClientState::Remote { .. } => false,
529 }
530 }
531
532 pub fn open_buffer(
533 &mut self,
534 path: impl Into<ProjectPath>,
535 cx: &mut ModelContext<Self>,
536 ) -> Task<Result<ModelHandle<Buffer>>> {
537 let project_path = path.into();
538 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
539 worktree
540 } else {
541 return Task::ready(Err(anyhow!("no such worktree")));
542 };
543
544 // If there is already a buffer for the given path, then return it.
545 let existing_buffer = self.get_open_buffer(&project_path, cx);
546 if let Some(existing_buffer) = existing_buffer {
547 return Task::ready(Ok(existing_buffer));
548 }
549
550 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
551 // If the given path is already being loaded, then wait for that existing
552 // task to complete and return the same buffer.
553 hash_map::Entry::Occupied(e) => e.get().clone(),
554
555 // Otherwise, record the fact that this path is now being loaded.
556 hash_map::Entry::Vacant(entry) => {
557 let (mut tx, rx) = postage::watch::channel();
558 entry.insert(rx.clone());
559
560 let load_buffer = if worktree.read(cx).is_local() {
561 self.open_local_buffer(&project_path.path, &worktree, cx)
562 } else {
563 self.open_remote_buffer(&project_path.path, &worktree, cx)
564 };
565
566 cx.spawn(move |this, mut cx| async move {
567 let load_result = load_buffer.await;
568 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
569 // Record the fact that the buffer is no longer loading.
570 this.loading_buffers.remove(&project_path);
571 let buffer = load_result.map_err(Arc::new)?;
572 Ok(buffer)
573 }));
574 })
575 .detach();
576 rx
577 }
578 };
579
580 cx.foreground().spawn(async move {
581 loop {
582 if let Some(result) = loading_watch.borrow().as_ref() {
583 match result {
584 Ok(buffer) => return Ok(buffer.clone()),
585 Err(error) => return Err(anyhow!("{}", error)),
586 }
587 }
588 loading_watch.recv().await;
589 }
590 })
591 }
592
593 fn open_local_buffer(
594 &mut self,
595 path: &Arc<Path>,
596 worktree: &ModelHandle<Worktree>,
597 cx: &mut ModelContext<Self>,
598 ) -> Task<Result<ModelHandle<Buffer>>> {
599 let load_buffer = worktree.update(cx, |worktree, cx| {
600 let worktree = worktree.as_local_mut().unwrap();
601 worktree.load_buffer(path, cx)
602 });
603 let worktree = worktree.downgrade();
604 cx.spawn(|this, mut cx| async move {
605 let buffer = load_buffer.await?;
606 let worktree = worktree
607 .upgrade(&cx)
608 .ok_or_else(|| anyhow!("worktree was removed"))?;
609 this.update(&mut cx, |this, cx| {
610 this.register_buffer(&buffer, Some(&worktree), cx)
611 })?;
612 Ok(buffer)
613 })
614 }
615
616 fn open_remote_buffer(
617 &mut self,
618 path: &Arc<Path>,
619 worktree: &ModelHandle<Worktree>,
620 cx: &mut ModelContext<Self>,
621 ) -> Task<Result<ModelHandle<Buffer>>> {
622 let rpc = self.client.clone();
623 let project_id = self.remote_id().unwrap();
624 let remote_worktree_id = worktree.read(cx).id();
625 let path = path.clone();
626 let path_string = path.to_string_lossy().to_string();
627 cx.spawn(|this, mut cx| async move {
628 let response = rpc
629 .request(proto::OpenBuffer {
630 project_id,
631 worktree_id: remote_worktree_id.to_proto(),
632 path: path_string,
633 })
634 .await?;
635 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
636 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
637 .await
638 })
639 }
640
641 fn open_local_buffer_from_lsp_path(
642 &mut self,
643 abs_path: lsp::Url,
644 lang_name: String,
645 lang_server: Arc<LanguageServer>,
646 cx: &mut ModelContext<Self>,
647 ) -> Task<Result<ModelHandle<Buffer>>> {
648 cx.spawn(|this, mut cx| async move {
649 let abs_path = abs_path
650 .to_file_path()
651 .map_err(|_| anyhow!("can't convert URI to path"))?;
652 let (worktree, relative_path) = if let Some(result) =
653 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
654 {
655 result
656 } else {
657 let worktree = this
658 .update(&mut cx, |this, cx| {
659 this.create_local_worktree(&abs_path, true, cx)
660 })
661 .await?;
662 this.update(&mut cx, |this, cx| {
663 this.language_servers
664 .insert((worktree.read(cx).id(), lang_name), lang_server);
665 });
666 (worktree, PathBuf::new())
667 };
668
669 let project_path = ProjectPath {
670 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
671 path: relative_path.into(),
672 };
673 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
674 .await
675 })
676 }
677
678 pub fn save_buffer_as(
679 &self,
680 buffer: ModelHandle<Buffer>,
681 abs_path: PathBuf,
682 cx: &mut ModelContext<Project>,
683 ) -> Task<Result<()>> {
684 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
685 cx.spawn(|this, mut cx| async move {
686 let (worktree, path) = worktree_task.await?;
687 worktree
688 .update(&mut cx, |worktree, cx| {
689 worktree
690 .as_local_mut()
691 .unwrap()
692 .save_buffer_as(buffer.clone(), path, cx)
693 })
694 .await?;
695 this.update(&mut cx, |this, cx| {
696 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
697 });
698 Ok(())
699 })
700 }
701
702 #[cfg(any(test, feature = "test-support"))]
703 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
704 let path = path.into();
705 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
706 self.open_buffers.iter().any(|(_, buffer)| {
707 if let Some(buffer) = buffer.upgrade(cx) {
708 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
709 if file.worktree == worktree && file.path() == &path.path {
710 return true;
711 }
712 }
713 }
714 false
715 })
716 } else {
717 false
718 }
719 }
720
721 fn get_open_buffer(
722 &mut self,
723 path: &ProjectPath,
724 cx: &mut ModelContext<Self>,
725 ) -> Option<ModelHandle<Buffer>> {
726 let mut result = None;
727 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
728 self.open_buffers.retain(|_, buffer| {
729 if let Some(buffer) = buffer.upgrade(cx) {
730 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
731 if file.worktree == worktree && file.path() == &path.path {
732 result = Some(buffer);
733 }
734 }
735 true
736 } else {
737 false
738 }
739 });
740 result
741 }
742
743 fn register_buffer(
744 &mut self,
745 buffer: &ModelHandle<Buffer>,
746 worktree: Option<&ModelHandle<Worktree>>,
747 cx: &mut ModelContext<Self>,
748 ) -> Result<()> {
749 match self.open_buffers.insert(
750 buffer.read(cx).remote_id() as usize,
751 OpenBuffer::Loaded(buffer.downgrade()),
752 ) {
753 None => {}
754 Some(OpenBuffer::Operations(operations)) => {
755 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
756 }
757 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
758 }
759 self.assign_language_to_buffer(&buffer, worktree, cx);
760 Ok(())
761 }
762
763 fn assign_language_to_buffer(
764 &mut self,
765 buffer: &ModelHandle<Buffer>,
766 worktree: Option<&ModelHandle<Worktree>>,
767 cx: &mut ModelContext<Self>,
768 ) -> Option<()> {
769 let (path, full_path) = {
770 let file = buffer.read(cx).file()?;
771 (file.path().clone(), file.full_path(cx))
772 };
773
774 // If the buffer has a language, set it and start/assign the language server
775 if let Some(language) = self.languages.select_language(&full_path) {
776 buffer.update(cx, |buffer, cx| {
777 buffer.set_language(Some(language.clone()), cx);
778 });
779
780 // For local worktrees, start a language server if needed.
781 // Also assign the language server and any previously stored diagnostics to the buffer.
782 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
783 let worktree_id = local_worktree.id();
784 let worktree_abs_path = local_worktree.abs_path().clone();
785
786 let language_server = match self
787 .language_servers
788 .entry((worktree_id, language.name().to_string()))
789 {
790 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
791 hash_map::Entry::Vacant(e) => Self::start_language_server(
792 self.client.clone(),
793 language.clone(),
794 &worktree_abs_path,
795 cx,
796 )
797 .map(|server| e.insert(server).clone()),
798 };
799
800 buffer.update(cx, |buffer, cx| {
801 buffer.set_language_server(language_server, cx);
802 });
803 }
804 }
805
806 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
807 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
808 buffer.update(cx, |buffer, cx| {
809 buffer.update_diagnostics(diagnostics, None, cx).log_err();
810 });
811 }
812 }
813
814 None
815 }
816
817 fn start_language_server(
818 rpc: Arc<Client>,
819 language: Arc<Language>,
820 worktree_path: &Path,
821 cx: &mut ModelContext<Self>,
822 ) -> Option<Arc<LanguageServer>> {
823 enum LspEvent {
824 DiagnosticsStart,
825 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
826 DiagnosticsFinish,
827 }
828
829 let language_server = language
830 .start_server(worktree_path, cx)
831 .log_err()
832 .flatten()?;
833 let disk_based_sources = language
834 .disk_based_diagnostic_sources()
835 .cloned()
836 .unwrap_or_default();
837 let disk_based_diagnostics_progress_token =
838 language.disk_based_diagnostics_progress_token().cloned();
839 let has_disk_based_diagnostic_progress_token =
840 disk_based_diagnostics_progress_token.is_some();
841 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
842
843 // Listen for `PublishDiagnostics` notifications.
844 language_server
845 .on_notification::<lsp::notification::PublishDiagnostics, _>({
846 let diagnostics_tx = diagnostics_tx.clone();
847 move |params| {
848 if !has_disk_based_diagnostic_progress_token {
849 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
850 }
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
852 if !has_disk_based_diagnostic_progress_token {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
854 }
855 }
856 })
857 .detach();
858
859 // Listen for `Progress` notifications. Send an event when the language server
860 // transitions between running jobs and not running any jobs.
861 let mut running_jobs_for_this_server: i32 = 0;
862 language_server
863 .on_notification::<lsp::notification::Progress, _>(move |params| {
864 let token = match params.token {
865 lsp::NumberOrString::Number(_) => None,
866 lsp::NumberOrString::String(token) => Some(token),
867 };
868
869 if token == disk_based_diagnostics_progress_token {
870 match params.value {
871 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
872 lsp::WorkDoneProgress::Begin(_) => {
873 running_jobs_for_this_server += 1;
874 if running_jobs_for_this_server == 1 {
875 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
876 }
877 }
878 lsp::WorkDoneProgress::End(_) => {
879 running_jobs_for_this_server -= 1;
880 if running_jobs_for_this_server == 0 {
881 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
882 }
883 }
884 _ => {}
885 },
886 }
887 }
888 })
889 .detach();
890
891 // Process all the LSP events.
892 cx.spawn_weak(|this, mut cx| async move {
893 while let Ok(message) = diagnostics_rx.recv().await {
894 let this = this.upgrade(&cx)?;
895 match message {
896 LspEvent::DiagnosticsStart => {
897 this.update(&mut cx, |this, cx| {
898 this.disk_based_diagnostics_started(cx);
899 if let Some(project_id) = this.remote_id() {
900 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
901 .log_err();
902 }
903 });
904 }
905 LspEvent::DiagnosticsUpdate(mut params) => {
906 language.process_diagnostics(&mut params);
907 this.update(&mut cx, |this, cx| {
908 this.update_diagnostics(params, &disk_based_sources, cx)
909 .log_err();
910 });
911 }
912 LspEvent::DiagnosticsFinish => {
913 this.update(&mut cx, |this, cx| {
914 this.disk_based_diagnostics_finished(cx);
915 if let Some(project_id) = this.remote_id() {
916 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
917 .log_err();
918 }
919 });
920 }
921 }
922 }
923 Some(())
924 })
925 .detach();
926
927 Some(language_server)
928 }
929
930 pub fn update_diagnostics(
931 &mut self,
932 params: lsp::PublishDiagnosticsParams,
933 disk_based_sources: &HashSet<String>,
934 cx: &mut ModelContext<Self>,
935 ) -> Result<()> {
936 let abs_path = params
937 .uri
938 .to_file_path()
939 .map_err(|_| anyhow!("URI is not a file"))?;
940 let mut next_group_id = 0;
941 let mut diagnostics = Vec::default();
942 let mut primary_diagnostic_group_ids = HashMap::default();
943 let mut sources_by_group_id = HashMap::default();
944 let mut supporting_diagnostic_severities = HashMap::default();
945 for diagnostic in ¶ms.diagnostics {
946 let source = diagnostic.source.as_ref();
947 let code = diagnostic.code.as_ref().map(|code| match code {
948 lsp::NumberOrString::Number(code) => code.to_string(),
949 lsp::NumberOrString::String(code) => code.clone(),
950 });
951 let range = range_from_lsp(diagnostic.range);
952 let is_supporting = diagnostic
953 .related_information
954 .as_ref()
955 .map_or(false, |infos| {
956 infos.iter().any(|info| {
957 primary_diagnostic_group_ids.contains_key(&(
958 source,
959 code.clone(),
960 range_from_lsp(info.location.range),
961 ))
962 })
963 });
964
965 if is_supporting {
966 if let Some(severity) = diagnostic.severity {
967 supporting_diagnostic_severities
968 .insert((source, code.clone(), range), severity);
969 }
970 } else {
971 let group_id = post_inc(&mut next_group_id);
972 let is_disk_based =
973 source.map_or(false, |source| disk_based_sources.contains(source));
974
975 sources_by_group_id.insert(group_id, source);
976 primary_diagnostic_group_ids
977 .insert((source, code.clone(), range.clone()), group_id);
978
979 diagnostics.push(DiagnosticEntry {
980 range,
981 diagnostic: Diagnostic {
982 code: code.clone(),
983 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
984 message: diagnostic.message.clone(),
985 group_id,
986 is_primary: true,
987 is_valid: true,
988 is_disk_based,
989 },
990 });
991 if let Some(infos) = &diagnostic.related_information {
992 for info in infos {
993 if info.location.uri == params.uri && !info.message.is_empty() {
994 let range = range_from_lsp(info.location.range);
995 diagnostics.push(DiagnosticEntry {
996 range,
997 diagnostic: Diagnostic {
998 code: code.clone(),
999 severity: DiagnosticSeverity::INFORMATION,
1000 message: info.message.clone(),
1001 group_id,
1002 is_primary: false,
1003 is_valid: true,
1004 is_disk_based,
1005 },
1006 });
1007 }
1008 }
1009 }
1010 }
1011 }
1012
1013 for entry in &mut diagnostics {
1014 let diagnostic = &mut entry.diagnostic;
1015 if !diagnostic.is_primary {
1016 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1017 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1018 source,
1019 diagnostic.code.clone(),
1020 entry.range.clone(),
1021 )) {
1022 diagnostic.severity = severity;
1023 }
1024 }
1025 }
1026
1027 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1028 Ok(())
1029 }
1030
1031 pub fn update_diagnostic_entries(
1032 &mut self,
1033 abs_path: PathBuf,
1034 version: Option<i32>,
1035 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1036 cx: &mut ModelContext<Project>,
1037 ) -> Result<(), anyhow::Error> {
1038 let (worktree, relative_path) = self
1039 .find_local_worktree(&abs_path, cx)
1040 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1041 let project_path = ProjectPath {
1042 worktree_id: worktree.read(cx).id(),
1043 path: relative_path.into(),
1044 };
1045
1046 for buffer in self.open_buffers.values() {
1047 if let Some(buffer) = buffer.upgrade(cx) {
1048 if buffer
1049 .read(cx)
1050 .file()
1051 .map_or(false, |file| *file.path() == project_path.path)
1052 {
1053 buffer.update(cx, |buffer, cx| {
1054 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1055 })?;
1056 break;
1057 }
1058 }
1059 }
1060 worktree.update(cx, |worktree, cx| {
1061 worktree
1062 .as_local_mut()
1063 .ok_or_else(|| anyhow!("not a local worktree"))?
1064 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1065 })?;
1066 cx.emit(Event::DiagnosticsUpdated(project_path));
1067 Ok(())
1068 }
1069
1070 pub fn format(
1071 &self,
1072 buffers: HashSet<ModelHandle<Buffer>>,
1073 push_to_history: bool,
1074 cx: &mut ModelContext<Project>,
1075 ) -> Task<Result<ProjectTransaction>> {
1076 let mut local_buffers = Vec::new();
1077 let mut remote_buffers = None;
1078 for buffer_handle in buffers {
1079 let buffer = buffer_handle.read(cx);
1080 let worktree;
1081 if let Some(file) = File::from_dyn(buffer.file()) {
1082 worktree = file.worktree.clone();
1083 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1084 let lang_server;
1085 if let Some(lang) = buffer.language() {
1086 if let Some(server) = self
1087 .language_servers
1088 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1089 {
1090 lang_server = server.clone();
1091 } else {
1092 return Task::ready(Ok(Default::default()));
1093 };
1094 } else {
1095 return Task::ready(Ok(Default::default()));
1096 }
1097
1098 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1099 } else {
1100 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1101 }
1102 } else {
1103 return Task::ready(Ok(Default::default()));
1104 }
1105 }
1106
1107 let remote_buffers = self.remote_id().zip(remote_buffers);
1108 let client = self.client.clone();
1109
1110 cx.spawn(|this, mut cx| async move {
1111 let mut project_transaction = ProjectTransaction::default();
1112
1113 if let Some((project_id, remote_buffers)) = remote_buffers {
1114 let response = client
1115 .request(proto::FormatBuffers {
1116 project_id,
1117 buffer_ids: remote_buffers
1118 .iter()
1119 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1120 .collect(),
1121 })
1122 .await?
1123 .transaction
1124 .ok_or_else(|| anyhow!("missing transaction"))?;
1125 project_transaction = this
1126 .update(&mut cx, |this, cx| {
1127 this.deserialize_project_transaction(response, push_to_history, cx)
1128 })
1129 .await?;
1130 }
1131
1132 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1133 let lsp_edits = lang_server
1134 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1135 text_document: lsp::TextDocumentIdentifier::new(
1136 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1137 ),
1138 options: Default::default(),
1139 work_done_progress_params: Default::default(),
1140 })
1141 .await?;
1142
1143 if let Some(lsp_edits) = lsp_edits {
1144 let edits = buffer
1145 .update(&mut cx, |buffer, cx| {
1146 buffer.edits_from_lsp(lsp_edits, None, cx)
1147 })
1148 .await?;
1149 buffer.update(&mut cx, |buffer, cx| {
1150 buffer.finalize_last_transaction();
1151 buffer.start_transaction();
1152 for (range, text) in edits {
1153 buffer.edit([range], text, cx);
1154 }
1155 if buffer.end_transaction(cx).is_some() {
1156 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1157 if !push_to_history {
1158 buffer.forget_transaction(transaction.id);
1159 }
1160 project_transaction.0.insert(cx.handle(), transaction);
1161 }
1162 });
1163 }
1164 }
1165
1166 Ok(project_transaction)
1167 })
1168 }
1169
1170 pub fn definition<T: ToPointUtf16>(
1171 &self,
1172 source_buffer_handle: &ModelHandle<Buffer>,
1173 position: T,
1174 cx: &mut ModelContext<Self>,
1175 ) -> Task<Result<Vec<Definition>>> {
1176 let source_buffer_handle = source_buffer_handle.clone();
1177 let source_buffer = source_buffer_handle.read(cx);
1178 let worktree;
1179 let buffer_abs_path;
1180 if let Some(file) = File::from_dyn(source_buffer.file()) {
1181 worktree = file.worktree.clone();
1182 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1183 } else {
1184 return Task::ready(Ok(Default::default()));
1185 };
1186
1187 let position = position.to_point_utf16(source_buffer);
1188
1189 if worktree.read(cx).as_local().is_some() {
1190 let buffer_abs_path = buffer_abs_path.unwrap();
1191 let lang_name;
1192 let lang_server;
1193 if let Some(lang) = source_buffer.language() {
1194 lang_name = lang.name().to_string();
1195 if let Some(server) = self
1196 .language_servers
1197 .get(&(worktree.read(cx).id(), lang_name.clone()))
1198 {
1199 lang_server = server.clone();
1200 } else {
1201 return Task::ready(Ok(Default::default()));
1202 };
1203 } else {
1204 return Task::ready(Ok(Default::default()));
1205 }
1206
1207 cx.spawn(|this, mut cx| async move {
1208 let response = lang_server
1209 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1210 text_document_position_params: lsp::TextDocumentPositionParams {
1211 text_document: lsp::TextDocumentIdentifier::new(
1212 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1213 ),
1214 position: lsp::Position::new(position.row, position.column),
1215 },
1216 work_done_progress_params: Default::default(),
1217 partial_result_params: Default::default(),
1218 })
1219 .await?;
1220
1221 let mut definitions = Vec::new();
1222 if let Some(response) = response {
1223 let mut unresolved_locations = Vec::new();
1224 match response {
1225 lsp::GotoDefinitionResponse::Scalar(loc) => {
1226 unresolved_locations.push((loc.uri, loc.range));
1227 }
1228 lsp::GotoDefinitionResponse::Array(locs) => {
1229 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1230 }
1231 lsp::GotoDefinitionResponse::Link(links) => {
1232 unresolved_locations.extend(
1233 links
1234 .into_iter()
1235 .map(|l| (l.target_uri, l.target_selection_range)),
1236 );
1237 }
1238 }
1239
1240 for (target_uri, target_range) in unresolved_locations {
1241 let target_buffer_handle = this
1242 .update(&mut cx, |this, cx| {
1243 this.open_local_buffer_from_lsp_path(
1244 target_uri,
1245 lang_name.clone(),
1246 lang_server.clone(),
1247 cx,
1248 )
1249 })
1250 .await?;
1251
1252 cx.read(|cx| {
1253 let target_buffer = target_buffer_handle.read(cx);
1254 let target_start = target_buffer
1255 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1256 let target_end = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1258 definitions.push(Definition {
1259 target_buffer: target_buffer_handle,
1260 target_range: target_buffer.anchor_after(target_start)
1261 ..target_buffer.anchor_before(target_end),
1262 });
1263 });
1264 }
1265 }
1266
1267 Ok(definitions)
1268 })
1269 } else if let Some(project_id) = self.remote_id() {
1270 let client = self.client.clone();
1271 let request = proto::GetDefinition {
1272 project_id,
1273 buffer_id: source_buffer.remote_id(),
1274 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1275 };
1276 cx.spawn(|this, mut cx| async move {
1277 let response = client.request(request).await?;
1278 let mut definitions = Vec::new();
1279 for definition in response.definitions {
1280 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1281 let target_buffer = this
1282 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1283 .await?;
1284 let target_start = definition
1285 .target_start
1286 .and_then(deserialize_anchor)
1287 .ok_or_else(|| anyhow!("missing target start"))?;
1288 let target_end = definition
1289 .target_end
1290 .and_then(deserialize_anchor)
1291 .ok_or_else(|| anyhow!("missing target end"))?;
1292 definitions.push(Definition {
1293 target_buffer,
1294 target_range: target_start..target_end,
1295 })
1296 }
1297
1298 Ok(definitions)
1299 })
1300 } else {
1301 Task::ready(Ok(Default::default()))
1302 }
1303 }
1304
1305 pub fn completions<T: ToPointUtf16>(
1306 &self,
1307 source_buffer_handle: &ModelHandle<Buffer>,
1308 position: T,
1309 cx: &mut ModelContext<Self>,
1310 ) -> Task<Result<Vec<Completion>>> {
1311 let source_buffer_handle = source_buffer_handle.clone();
1312 let source_buffer = source_buffer_handle.read(cx);
1313 let buffer_id = source_buffer.remote_id();
1314 let language = source_buffer.language().cloned();
1315 let worktree;
1316 let buffer_abs_path;
1317 if let Some(file) = File::from_dyn(source_buffer.file()) {
1318 worktree = file.worktree.clone();
1319 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1320 } else {
1321 return Task::ready(Ok(Default::default()));
1322 };
1323
1324 let position = position.to_point_utf16(source_buffer);
1325 let anchor = source_buffer.anchor_after(position);
1326
1327 if worktree.read(cx).as_local().is_some() {
1328 let buffer_abs_path = buffer_abs_path.unwrap();
1329 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1330 server
1331 } else {
1332 return Task::ready(Ok(Default::default()));
1333 };
1334
1335 cx.spawn(|_, cx| async move {
1336 let completions = lang_server
1337 .request::<lsp::request::Completion>(lsp::CompletionParams {
1338 text_document_position: lsp::TextDocumentPositionParams::new(
1339 lsp::TextDocumentIdentifier::new(
1340 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1341 ),
1342 position.to_lsp_position(),
1343 ),
1344 context: Default::default(),
1345 work_done_progress_params: Default::default(),
1346 partial_result_params: Default::default(),
1347 })
1348 .await?;
1349
1350 let completions = if let Some(completions) = completions {
1351 match completions {
1352 lsp::CompletionResponse::Array(completions) => completions,
1353 lsp::CompletionResponse::List(list) => list.items,
1354 }
1355 } else {
1356 Default::default()
1357 };
1358
1359 source_buffer_handle.read_with(&cx, |this, _| {
1360 Ok(completions
1361 .into_iter()
1362 .filter_map(|lsp_completion| {
1363 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1364 lsp::CompletionTextEdit::Edit(edit) => {
1365 (range_from_lsp(edit.range), edit.new_text.clone())
1366 }
1367 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1368 log::info!("unsupported insert/replace completion");
1369 return None;
1370 }
1371 };
1372
1373 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1374 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1375 if clipped_start == old_range.start && clipped_end == old_range.end {
1376 Some(Completion {
1377 old_range: this.anchor_before(old_range.start)
1378 ..this.anchor_after(old_range.end),
1379 new_text,
1380 label: language
1381 .as_ref()
1382 .and_then(|l| l.label_for_completion(&lsp_completion))
1383 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1384 lsp_completion,
1385 })
1386 } else {
1387 None
1388 }
1389 })
1390 .collect())
1391 })
1392 })
1393 } else if let Some(project_id) = self.remote_id() {
1394 let rpc = self.client.clone();
1395 let message = proto::GetCompletions {
1396 project_id,
1397 buffer_id,
1398 position: Some(language::proto::serialize_anchor(&anchor)),
1399 version: (&source_buffer.version()).into(),
1400 };
1401 cx.spawn_weak(|_, mut cx| async move {
1402 let response = rpc.request(message).await?;
1403 source_buffer_handle
1404 .update(&mut cx, |buffer, _| {
1405 buffer.wait_for_version(response.version.into())
1406 })
1407 .await;
1408 response
1409 .completions
1410 .into_iter()
1411 .map(|completion| {
1412 language::proto::deserialize_completion(completion, language.as_ref())
1413 })
1414 .collect()
1415 })
1416 } else {
1417 Task::ready(Ok(Default::default()))
1418 }
1419 }
1420
1421 pub fn apply_additional_edits_for_completion(
1422 &self,
1423 buffer_handle: ModelHandle<Buffer>,
1424 completion: Completion,
1425 push_to_history: bool,
1426 cx: &mut ModelContext<Self>,
1427 ) -> Task<Result<Option<Transaction>>> {
1428 let buffer = buffer_handle.read(cx);
1429 let buffer_id = buffer.remote_id();
1430
1431 if self.is_local() {
1432 let lang_server = if let Some(language_server) = buffer.language_server() {
1433 language_server.clone()
1434 } else {
1435 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1436 };
1437
1438 cx.spawn(|_, mut cx| async move {
1439 let resolved_completion = lang_server
1440 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1441 .await?;
1442 if let Some(edits) = resolved_completion.additional_text_edits {
1443 let edits = buffer_handle
1444 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1445 .await?;
1446 buffer_handle.update(&mut cx, |buffer, cx| {
1447 buffer.finalize_last_transaction();
1448 buffer.start_transaction();
1449 for (range, text) in edits {
1450 buffer.edit([range], text, cx);
1451 }
1452 let transaction = if buffer.end_transaction(cx).is_some() {
1453 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1454 if !push_to_history {
1455 buffer.forget_transaction(transaction.id);
1456 }
1457 Some(transaction)
1458 } else {
1459 None
1460 };
1461 Ok(transaction)
1462 })
1463 } else {
1464 Ok(None)
1465 }
1466 })
1467 } else if let Some(project_id) = self.remote_id() {
1468 let client = self.client.clone();
1469 cx.spawn(|_, mut cx| async move {
1470 let response = client
1471 .request(proto::ApplyCompletionAdditionalEdits {
1472 project_id,
1473 buffer_id,
1474 completion: Some(language::proto::serialize_completion(&completion)),
1475 })
1476 .await?;
1477
1478 if let Some(transaction) = response.transaction {
1479 let transaction = language::proto::deserialize_transaction(transaction)?;
1480 buffer_handle
1481 .update(&mut cx, |buffer, _| {
1482 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1483 })
1484 .await;
1485 if push_to_history {
1486 buffer_handle.update(&mut cx, |buffer, _| {
1487 buffer.push_transaction(transaction.clone(), Instant::now());
1488 });
1489 }
1490 Ok(Some(transaction))
1491 } else {
1492 Ok(None)
1493 }
1494 })
1495 } else {
1496 Task::ready(Err(anyhow!("project does not have a remote id")))
1497 }
1498 }
1499
1500 pub fn code_actions<T: ToOffset>(
1501 &self,
1502 buffer_handle: &ModelHandle<Buffer>,
1503 range: Range<T>,
1504 cx: &mut ModelContext<Self>,
1505 ) -> Task<Result<Vec<CodeAction>>> {
1506 let buffer_handle = buffer_handle.clone();
1507 let buffer = buffer_handle.read(cx);
1508 let buffer_id = buffer.remote_id();
1509 let worktree;
1510 let buffer_abs_path;
1511 if let Some(file) = File::from_dyn(buffer.file()) {
1512 worktree = file.worktree.clone();
1513 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1514 } else {
1515 return Task::ready(Ok(Default::default()));
1516 };
1517 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1518
1519 if worktree.read(cx).as_local().is_some() {
1520 let buffer_abs_path = buffer_abs_path.unwrap();
1521 let lang_name;
1522 let lang_server;
1523 if let Some(lang) = buffer.language() {
1524 lang_name = lang.name().to_string();
1525 if let Some(server) = self
1526 .language_servers
1527 .get(&(worktree.read(cx).id(), lang_name.clone()))
1528 {
1529 lang_server = server.clone();
1530 } else {
1531 return Task::ready(Ok(Default::default()));
1532 };
1533 } else {
1534 return Task::ready(Ok(Default::default()));
1535 }
1536
1537 let lsp_range = lsp::Range::new(
1538 range.start.to_point_utf16(buffer).to_lsp_position(),
1539 range.end.to_point_utf16(buffer).to_lsp_position(),
1540 );
1541 cx.foreground().spawn(async move {
1542 Ok(lang_server
1543 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1544 text_document: lsp::TextDocumentIdentifier::new(
1545 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1546 ),
1547 range: lsp_range,
1548 work_done_progress_params: Default::default(),
1549 partial_result_params: Default::default(),
1550 context: lsp::CodeActionContext {
1551 diagnostics: Default::default(),
1552 only: Some(vec![
1553 lsp::CodeActionKind::QUICKFIX,
1554 lsp::CodeActionKind::REFACTOR,
1555 lsp::CodeActionKind::REFACTOR_EXTRACT,
1556 ]),
1557 },
1558 })
1559 .await?
1560 .unwrap_or_default()
1561 .into_iter()
1562 .filter_map(|entry| {
1563 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1564 Some(CodeAction {
1565 range: range.clone(),
1566 lsp_action,
1567 })
1568 } else {
1569 None
1570 }
1571 })
1572 .collect())
1573 })
1574 } else if let Some(project_id) = self.remote_id() {
1575 let rpc = self.client.clone();
1576 cx.foreground().spawn(async move {
1577 let response = rpc
1578 .request(proto::GetCodeActions {
1579 project_id,
1580 buffer_id,
1581 start: Some(language::proto::serialize_anchor(&range.start)),
1582 end: Some(language::proto::serialize_anchor(&range.end)),
1583 })
1584 .await?;
1585 response
1586 .actions
1587 .into_iter()
1588 .map(language::proto::deserialize_code_action)
1589 .collect()
1590 })
1591 } else {
1592 Task::ready(Ok(Default::default()))
1593 }
1594 }
1595
1596 pub fn apply_code_action(
1597 &self,
1598 buffer_handle: ModelHandle<Buffer>,
1599 mut action: CodeAction,
1600 push_to_history: bool,
1601 cx: &mut ModelContext<Self>,
1602 ) -> Task<Result<ProjectTransaction>> {
1603 if self.is_local() {
1604 let buffer = buffer_handle.read(cx);
1605 let lang_name = if let Some(lang) = buffer.language() {
1606 lang.name().to_string()
1607 } else {
1608 return Task::ready(Ok(Default::default()));
1609 };
1610 let lang_server = if let Some(language_server) = buffer.language_server() {
1611 language_server.clone()
1612 } else {
1613 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1614 };
1615 let range = action.range.to_point_utf16(buffer);
1616 let fs = self.fs.clone();
1617
1618 cx.spawn(|this, mut cx| async move {
1619 if let Some(lsp_range) = action
1620 .lsp_action
1621 .data
1622 .as_mut()
1623 .and_then(|d| d.get_mut("codeActionParams"))
1624 .and_then(|d| d.get_mut("range"))
1625 {
1626 *lsp_range = serde_json::to_value(&lsp::Range::new(
1627 range.start.to_lsp_position(),
1628 range.end.to_lsp_position(),
1629 ))
1630 .unwrap();
1631 action.lsp_action = lang_server
1632 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1633 .await?;
1634 } else {
1635 let actions = this
1636 .update(&mut cx, |this, cx| {
1637 this.code_actions(&buffer_handle, action.range, cx)
1638 })
1639 .await?;
1640 action.lsp_action = actions
1641 .into_iter()
1642 .find(|a| a.lsp_action.title == action.lsp_action.title)
1643 .ok_or_else(|| anyhow!("code action is outdated"))?
1644 .lsp_action;
1645 }
1646
1647 let mut operations = Vec::new();
1648 if let Some(edit) = action.lsp_action.edit {
1649 if let Some(document_changes) = edit.document_changes {
1650 match document_changes {
1651 lsp::DocumentChanges::Edits(edits) => operations
1652 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1653 lsp::DocumentChanges::Operations(ops) => operations = ops,
1654 }
1655 } else if let Some(changes) = edit.changes {
1656 operations.extend(changes.into_iter().map(|(uri, edits)| {
1657 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1658 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1659 uri,
1660 version: None,
1661 },
1662 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1663 })
1664 }));
1665 }
1666 }
1667
1668 let mut project_transaction = ProjectTransaction::default();
1669 for operation in operations {
1670 match operation {
1671 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1672 let abs_path = op
1673 .uri
1674 .to_file_path()
1675 .map_err(|_| anyhow!("can't convert URI to path"))?;
1676
1677 if let Some(parent_path) = abs_path.parent() {
1678 fs.create_dir(parent_path).await?;
1679 }
1680 if abs_path.ends_with("/") {
1681 fs.create_dir(&abs_path).await?;
1682 } else {
1683 fs.create_file(
1684 &abs_path,
1685 op.options.map(Into::into).unwrap_or_default(),
1686 )
1687 .await?;
1688 }
1689 }
1690 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1691 let source_abs_path = op
1692 .old_uri
1693 .to_file_path()
1694 .map_err(|_| anyhow!("can't convert URI to path"))?;
1695 let target_abs_path = op
1696 .new_uri
1697 .to_file_path()
1698 .map_err(|_| anyhow!("can't convert URI to path"))?;
1699 fs.rename(
1700 &source_abs_path,
1701 &target_abs_path,
1702 op.options.map(Into::into).unwrap_or_default(),
1703 )
1704 .await?;
1705 }
1706 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1707 let abs_path = op
1708 .uri
1709 .to_file_path()
1710 .map_err(|_| anyhow!("can't convert URI to path"))?;
1711 let options = op.options.map(Into::into).unwrap_or_default();
1712 if abs_path.ends_with("/") {
1713 fs.remove_dir(&abs_path, options).await?;
1714 } else {
1715 fs.remove_file(&abs_path, options).await?;
1716 }
1717 }
1718 lsp::DocumentChangeOperation::Edit(op) => {
1719 let buffer_to_edit = this
1720 .update(&mut cx, |this, cx| {
1721 this.open_local_buffer_from_lsp_path(
1722 op.text_document.uri,
1723 lang_name.clone(),
1724 lang_server.clone(),
1725 cx,
1726 )
1727 })
1728 .await?;
1729
1730 let edits = buffer_to_edit
1731 .update(&mut cx, |buffer, cx| {
1732 let edits = op.edits.into_iter().map(|edit| match edit {
1733 lsp::OneOf::Left(edit) => edit,
1734 lsp::OneOf::Right(edit) => edit.text_edit,
1735 });
1736 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1737 })
1738 .await?;
1739
1740 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1741 buffer.finalize_last_transaction();
1742 buffer.start_transaction();
1743 for (range, text) in edits {
1744 buffer.edit([range], text, cx);
1745 }
1746 let transaction = if buffer.end_transaction(cx).is_some() {
1747 let transaction =
1748 buffer.finalize_last_transaction().unwrap().clone();
1749 if !push_to_history {
1750 buffer.forget_transaction(transaction.id);
1751 }
1752 Some(transaction)
1753 } else {
1754 None
1755 };
1756
1757 transaction
1758 });
1759 if let Some(transaction) = transaction {
1760 project_transaction.0.insert(buffer_to_edit, transaction);
1761 }
1762 }
1763 }
1764 }
1765
1766 Ok(project_transaction)
1767 })
1768 } else if let Some(project_id) = self.remote_id() {
1769 let client = self.client.clone();
1770 let request = proto::ApplyCodeAction {
1771 project_id,
1772 buffer_id: buffer_handle.read(cx).remote_id(),
1773 action: Some(language::proto::serialize_code_action(&action)),
1774 };
1775 cx.spawn(|this, mut cx| async move {
1776 let response = client
1777 .request(request)
1778 .await?
1779 .transaction
1780 .ok_or_else(|| anyhow!("missing transaction"))?;
1781 this.update(&mut cx, |this, cx| {
1782 this.deserialize_project_transaction(response, push_to_history, cx)
1783 })
1784 .await
1785 })
1786 } else {
1787 Task::ready(Err(anyhow!("project does not have a remote id")))
1788 }
1789 }
1790
1791 pub fn find_or_create_local_worktree(
1792 &self,
1793 abs_path: impl AsRef<Path>,
1794 weak: bool,
1795 cx: &mut ModelContext<Self>,
1796 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1797 let abs_path = abs_path.as_ref();
1798 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1799 Task::ready(Ok((tree.clone(), relative_path.into())))
1800 } else {
1801 let worktree = self.create_local_worktree(abs_path, weak, cx);
1802 cx.foreground()
1803 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1804 }
1805 }
1806
1807 fn find_local_worktree(
1808 &self,
1809 abs_path: &Path,
1810 cx: &AppContext,
1811 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1812 for tree in self.worktrees(cx) {
1813 if let Some(relative_path) = tree
1814 .read(cx)
1815 .as_local()
1816 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1817 {
1818 return Some((tree.clone(), relative_path.into()));
1819 }
1820 }
1821 None
1822 }
1823
1824 pub fn is_shared(&self) -> bool {
1825 match &self.client_state {
1826 ProjectClientState::Local { is_shared, .. } => *is_shared,
1827 ProjectClientState::Remote { .. } => false,
1828 }
1829 }
1830
1831 fn create_local_worktree(
1832 &self,
1833 abs_path: impl AsRef<Path>,
1834 weak: bool,
1835 cx: &mut ModelContext<Self>,
1836 ) -> Task<Result<ModelHandle<Worktree>>> {
1837 let fs = self.fs.clone();
1838 let client = self.client.clone();
1839 let path = Arc::from(abs_path.as_ref());
1840 cx.spawn(|project, mut cx| async move {
1841 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1842
1843 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1844 project.add_worktree(&worktree, cx);
1845 (project.remote_id(), project.is_shared())
1846 });
1847
1848 if let Some(project_id) = remote_project_id {
1849 worktree
1850 .update(&mut cx, |worktree, cx| {
1851 worktree.as_local_mut().unwrap().register(project_id, cx)
1852 })
1853 .await?;
1854 if is_shared {
1855 worktree
1856 .update(&mut cx, |worktree, cx| {
1857 worktree.as_local_mut().unwrap().share(project_id, cx)
1858 })
1859 .await?;
1860 }
1861 }
1862
1863 Ok(worktree)
1864 })
1865 }
1866
1867 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1868 self.worktrees.retain(|worktree| {
1869 worktree
1870 .upgrade(cx)
1871 .map_or(false, |w| w.read(cx).id() != id)
1872 });
1873 cx.notify();
1874 }
1875
1876 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1877 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1878 if worktree.read(cx).is_local() {
1879 cx.subscribe(&worktree, |this, worktree, _, cx| {
1880 this.update_local_worktree_buffers(worktree, cx);
1881 })
1882 .detach();
1883 }
1884
1885 let push_weak_handle = {
1886 let worktree = worktree.read(cx);
1887 worktree.is_local() && worktree.is_weak()
1888 };
1889 if push_weak_handle {
1890 cx.observe_release(&worktree, |this, cx| {
1891 this.worktrees
1892 .retain(|worktree| worktree.upgrade(cx).is_some());
1893 cx.notify();
1894 })
1895 .detach();
1896 self.worktrees
1897 .push(WorktreeHandle::Weak(worktree.downgrade()));
1898 } else {
1899 self.worktrees
1900 .push(WorktreeHandle::Strong(worktree.clone()));
1901 }
1902 cx.notify();
1903 }
1904
1905 fn update_local_worktree_buffers(
1906 &mut self,
1907 worktree_handle: ModelHandle<Worktree>,
1908 cx: &mut ModelContext<Self>,
1909 ) {
1910 let snapshot = worktree_handle.read(cx).snapshot();
1911 let mut buffers_to_delete = Vec::new();
1912 for (buffer_id, buffer) in &self.open_buffers {
1913 if let Some(buffer) = buffer.upgrade(cx) {
1914 buffer.update(cx, |buffer, cx| {
1915 if let Some(old_file) = File::from_dyn(buffer.file()) {
1916 if old_file.worktree != worktree_handle {
1917 return;
1918 }
1919
1920 let new_file = if let Some(entry) = old_file
1921 .entry_id
1922 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1923 {
1924 File {
1925 is_local: true,
1926 entry_id: Some(entry.id),
1927 mtime: entry.mtime,
1928 path: entry.path.clone(),
1929 worktree: worktree_handle.clone(),
1930 }
1931 } else if let Some(entry) =
1932 snapshot.entry_for_path(old_file.path().as_ref())
1933 {
1934 File {
1935 is_local: true,
1936 entry_id: Some(entry.id),
1937 mtime: entry.mtime,
1938 path: entry.path.clone(),
1939 worktree: worktree_handle.clone(),
1940 }
1941 } else {
1942 File {
1943 is_local: true,
1944 entry_id: None,
1945 path: old_file.path().clone(),
1946 mtime: old_file.mtime(),
1947 worktree: worktree_handle.clone(),
1948 }
1949 };
1950
1951 if let Some(project_id) = self.remote_id() {
1952 self.client
1953 .send(proto::UpdateBufferFile {
1954 project_id,
1955 buffer_id: *buffer_id as u64,
1956 file: Some(new_file.to_proto()),
1957 })
1958 .log_err();
1959 }
1960 buffer.file_updated(Box::new(new_file), cx).detach();
1961 }
1962 });
1963 } else {
1964 buffers_to_delete.push(*buffer_id);
1965 }
1966 }
1967
1968 for buffer_id in buffers_to_delete {
1969 self.open_buffers.remove(&buffer_id);
1970 }
1971 }
1972
1973 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1974 let new_active_entry = entry.and_then(|project_path| {
1975 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1976 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1977 Some(ProjectEntry {
1978 worktree_id: project_path.worktree_id,
1979 entry_id: entry.id,
1980 })
1981 });
1982 if new_active_entry != self.active_entry {
1983 self.active_entry = new_active_entry;
1984 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1985 }
1986 }
1987
1988 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1989 self.language_servers_with_diagnostics_running > 0
1990 }
1991
1992 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1993 let mut summary = DiagnosticSummary::default();
1994 for (_, path_summary) in self.diagnostic_summaries(cx) {
1995 summary.error_count += path_summary.error_count;
1996 summary.warning_count += path_summary.warning_count;
1997 summary.info_count += path_summary.info_count;
1998 summary.hint_count += path_summary.hint_count;
1999 }
2000 summary
2001 }
2002
2003 pub fn diagnostic_summaries<'a>(
2004 &'a self,
2005 cx: &'a AppContext,
2006 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2007 self.worktrees(cx).flat_map(move |worktree| {
2008 let worktree = worktree.read(cx);
2009 let worktree_id = worktree.id();
2010 worktree
2011 .diagnostic_summaries()
2012 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2013 })
2014 }
2015
2016 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2017 self.language_servers_with_diagnostics_running += 1;
2018 if self.language_servers_with_diagnostics_running == 1 {
2019 cx.emit(Event::DiskBasedDiagnosticsStarted);
2020 }
2021 }
2022
2023 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2024 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2025 self.language_servers_with_diagnostics_running -= 1;
2026 if self.language_servers_with_diagnostics_running == 0 {
2027 cx.emit(Event::DiskBasedDiagnosticsFinished);
2028 }
2029 }
2030
2031 pub fn active_entry(&self) -> Option<ProjectEntry> {
2032 self.active_entry
2033 }
2034
2035 // RPC message handlers
2036
2037 async fn handle_unshare_project(
2038 this: ModelHandle<Self>,
2039 _: TypedEnvelope<proto::UnshareProject>,
2040 _: Arc<Client>,
2041 mut cx: AsyncAppContext,
2042 ) -> Result<()> {
2043 this.update(&mut cx, |this, cx| {
2044 if let ProjectClientState::Remote {
2045 sharing_has_stopped,
2046 ..
2047 } = &mut this.client_state
2048 {
2049 *sharing_has_stopped = true;
2050 this.collaborators.clear();
2051 cx.notify();
2052 } else {
2053 unreachable!()
2054 }
2055 });
2056
2057 Ok(())
2058 }
2059
2060 async fn handle_add_collaborator(
2061 this: ModelHandle<Self>,
2062 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2063 _: Arc<Client>,
2064 mut cx: AsyncAppContext,
2065 ) -> Result<()> {
2066 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2067 let collaborator = envelope
2068 .payload
2069 .collaborator
2070 .take()
2071 .ok_or_else(|| anyhow!("empty collaborator"))?;
2072
2073 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2074 this.update(&mut cx, |this, cx| {
2075 this.collaborators
2076 .insert(collaborator.peer_id, collaborator);
2077 cx.notify();
2078 });
2079
2080 Ok(())
2081 }
2082
2083 async fn handle_remove_collaborator(
2084 this: ModelHandle<Self>,
2085 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2086 _: Arc<Client>,
2087 mut cx: AsyncAppContext,
2088 ) -> Result<()> {
2089 this.update(&mut cx, |this, cx| {
2090 let peer_id = PeerId(envelope.payload.peer_id);
2091 let replica_id = this
2092 .collaborators
2093 .remove(&peer_id)
2094 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2095 .replica_id;
2096 this.shared_buffers.remove(&peer_id);
2097 for (_, buffer) in &this.open_buffers {
2098 if let Some(buffer) = buffer.upgrade(cx) {
2099 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2100 }
2101 }
2102 cx.notify();
2103 Ok(())
2104 })
2105 }
2106
2107 async fn handle_share_worktree(
2108 this: ModelHandle<Self>,
2109 envelope: TypedEnvelope<proto::ShareWorktree>,
2110 client: Arc<Client>,
2111 mut cx: AsyncAppContext,
2112 ) -> Result<()> {
2113 this.update(&mut cx, |this, cx| {
2114 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2115 let replica_id = this.replica_id();
2116 let worktree = envelope
2117 .payload
2118 .worktree
2119 .ok_or_else(|| anyhow!("invalid worktree"))?;
2120 let (worktree, load_task) =
2121 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2122 this.add_worktree(&worktree, cx);
2123 load_task.detach();
2124 Ok(())
2125 })
2126 }
2127
2128 async fn handle_unregister_worktree(
2129 this: ModelHandle<Self>,
2130 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2131 _: Arc<Client>,
2132 mut cx: AsyncAppContext,
2133 ) -> Result<()> {
2134 this.update(&mut cx, |this, cx| {
2135 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2136 this.remove_worktree(worktree_id, cx);
2137 Ok(())
2138 })
2139 }
2140
2141 async fn handle_update_worktree(
2142 this: ModelHandle<Self>,
2143 envelope: TypedEnvelope<proto::UpdateWorktree>,
2144 _: Arc<Client>,
2145 mut cx: AsyncAppContext,
2146 ) -> Result<()> {
2147 this.update(&mut cx, |this, cx| {
2148 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2149 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2150 worktree.update(cx, |worktree, _| {
2151 let worktree = worktree.as_remote_mut().unwrap();
2152 worktree.update_from_remote(envelope)
2153 })?;
2154 }
2155 Ok(())
2156 })
2157 }
2158
2159 async fn handle_update_diagnostic_summary(
2160 this: ModelHandle<Self>,
2161 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2162 _: Arc<Client>,
2163 mut cx: AsyncAppContext,
2164 ) -> Result<()> {
2165 this.update(&mut cx, |this, cx| {
2166 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2167 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2168 if let Some(summary) = envelope.payload.summary {
2169 let project_path = ProjectPath {
2170 worktree_id,
2171 path: Path::new(&summary.path).into(),
2172 };
2173 worktree.update(cx, |worktree, _| {
2174 worktree
2175 .as_remote_mut()
2176 .unwrap()
2177 .update_diagnostic_summary(project_path.path.clone(), &summary);
2178 });
2179 cx.emit(Event::DiagnosticsUpdated(project_path));
2180 }
2181 }
2182 Ok(())
2183 })
2184 }
2185
2186 async fn handle_disk_based_diagnostics_updating(
2187 this: ModelHandle<Self>,
2188 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2189 _: Arc<Client>,
2190 mut cx: AsyncAppContext,
2191 ) -> Result<()> {
2192 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2193 Ok(())
2194 }
2195
2196 async fn handle_disk_based_diagnostics_updated(
2197 this: ModelHandle<Self>,
2198 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2199 _: Arc<Client>,
2200 mut cx: AsyncAppContext,
2201 ) -> Result<()> {
2202 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2203 Ok(())
2204 }
2205
2206 async fn handle_update_buffer(
2207 this: ModelHandle<Self>,
2208 envelope: TypedEnvelope<proto::UpdateBuffer>,
2209 _: Arc<Client>,
2210 mut cx: AsyncAppContext,
2211 ) -> Result<()> {
2212 this.update(&mut cx, |this, cx| {
2213 let payload = envelope.payload.clone();
2214 let buffer_id = payload.buffer_id as usize;
2215 let ops = payload
2216 .operations
2217 .into_iter()
2218 .map(|op| language::proto::deserialize_operation(op))
2219 .collect::<Result<Vec<_>, _>>()?;
2220 let buffer = this
2221 .open_buffers
2222 .entry(buffer_id)
2223 .or_insert_with(|| OpenBuffer::Operations(Vec::new()));
2224 match buffer {
2225 OpenBuffer::Loaded(buffer) => {
2226 if let Some(buffer) = buffer.upgrade(cx) {
2227 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2228 }
2229 }
2230 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2231 }
2232 Ok(())
2233 })
2234 }
2235
2236 async fn handle_update_buffer_file(
2237 this: ModelHandle<Self>,
2238 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2239 _: Arc<Client>,
2240 mut cx: AsyncAppContext,
2241 ) -> Result<()> {
2242 this.update(&mut cx, |this, cx| {
2243 let payload = envelope.payload.clone();
2244 let buffer_id = payload.buffer_id as usize;
2245 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2246 let worktree = this
2247 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2248 .ok_or_else(|| anyhow!("no such worktree"))?;
2249 let file = File::from_proto(file, worktree.clone(), cx)?;
2250 let buffer = this
2251 .open_buffers
2252 .get_mut(&buffer_id)
2253 .and_then(|b| b.upgrade(cx))
2254 .ok_or_else(|| anyhow!("no such buffer"))?;
2255 buffer.update(cx, |buffer, cx| {
2256 buffer.file_updated(Box::new(file), cx).detach();
2257 });
2258 Ok(())
2259 })
2260 }
2261
2262 async fn handle_save_buffer(
2263 this: ModelHandle<Self>,
2264 envelope: TypedEnvelope<proto::SaveBuffer>,
2265 _: Arc<Client>,
2266 mut cx: AsyncAppContext,
2267 ) -> Result<proto::BufferSaved> {
2268 let buffer_id = envelope.payload.buffer_id;
2269 let sender_id = envelope.original_sender_id()?;
2270 let requested_version = envelope.payload.version.try_into()?;
2271
2272 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2273 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2274 let buffer = this
2275 .shared_buffers
2276 .get(&sender_id)
2277 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2278 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2279 Ok::<_, anyhow::Error>((project_id, buffer))
2280 })?;
2281
2282 buffer
2283 .update(&mut cx, |buffer, _| {
2284 buffer.wait_for_version(requested_version)
2285 })
2286 .await;
2287
2288 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2289 Ok(proto::BufferSaved {
2290 project_id,
2291 buffer_id,
2292 version: (&saved_version).into(),
2293 mtime: Some(mtime.into()),
2294 })
2295 }
2296
2297 async fn handle_format_buffers(
2298 this: ModelHandle<Self>,
2299 envelope: TypedEnvelope<proto::FormatBuffers>,
2300 _: Arc<Client>,
2301 mut cx: AsyncAppContext,
2302 ) -> Result<proto::FormatBuffersResponse> {
2303 let sender_id = envelope.original_sender_id()?;
2304 let format = this.update(&mut cx, |this, cx| {
2305 let shared_buffers = this
2306 .shared_buffers
2307 .get(&sender_id)
2308 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2309 let mut buffers = HashSet::default();
2310 for buffer_id in &envelope.payload.buffer_ids {
2311 buffers.insert(
2312 shared_buffers
2313 .get(buffer_id)
2314 .cloned()
2315 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2316 );
2317 }
2318 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2319 })?;
2320
2321 let project_transaction = format.await?;
2322 let project_transaction = this.update(&mut cx, |this, cx| {
2323 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2324 });
2325 Ok(proto::FormatBuffersResponse {
2326 transaction: Some(project_transaction),
2327 })
2328 }
2329
2330 async fn handle_get_completions(
2331 this: ModelHandle<Self>,
2332 envelope: TypedEnvelope<proto::GetCompletions>,
2333 _: Arc<Client>,
2334 mut cx: AsyncAppContext,
2335 ) -> Result<proto::GetCompletionsResponse> {
2336 let sender_id = envelope.original_sender_id()?;
2337 let position = envelope
2338 .payload
2339 .position
2340 .and_then(language::proto::deserialize_anchor)
2341 .ok_or_else(|| anyhow!("invalid position"))?;
2342 let version = clock::Global::from(envelope.payload.version);
2343 let buffer = this.read_with(&cx, |this, _| {
2344 this.shared_buffers
2345 .get(&sender_id)
2346 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2347 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2348 })?;
2349 buffer
2350 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2351 .await;
2352 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2353 let completions = this
2354 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2355 .await?;
2356
2357 Ok(proto::GetCompletionsResponse {
2358 completions: completions
2359 .iter()
2360 .map(language::proto::serialize_completion)
2361 .collect(),
2362 version: (&version).into(),
2363 })
2364 }
2365
2366 async fn handle_apply_additional_edits_for_completion(
2367 this: ModelHandle<Self>,
2368 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2369 _: Arc<Client>,
2370 mut cx: AsyncAppContext,
2371 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2372 let sender_id = envelope.original_sender_id()?;
2373 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2374 let buffer = this
2375 .shared_buffers
2376 .get(&sender_id)
2377 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2378 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2379 let language = buffer.read(cx).language();
2380 let completion = language::proto::deserialize_completion(
2381 envelope
2382 .payload
2383 .completion
2384 .ok_or_else(|| anyhow!("invalid completion"))?,
2385 language,
2386 )?;
2387 Ok::<_, anyhow::Error>(
2388 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2389 )
2390 })?;
2391
2392 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2393 transaction: apply_additional_edits
2394 .await?
2395 .as_ref()
2396 .map(language::proto::serialize_transaction),
2397 })
2398 }
2399
2400 async fn handle_get_code_actions(
2401 this: ModelHandle<Self>,
2402 envelope: TypedEnvelope<proto::GetCodeActions>,
2403 _: Arc<Client>,
2404 mut cx: AsyncAppContext,
2405 ) -> Result<proto::GetCodeActionsResponse> {
2406 let sender_id = envelope.original_sender_id()?;
2407 let start = envelope
2408 .payload
2409 .start
2410 .and_then(language::proto::deserialize_anchor)
2411 .ok_or_else(|| anyhow!("invalid start"))?;
2412 let end = envelope
2413 .payload
2414 .end
2415 .and_then(language::proto::deserialize_anchor)
2416 .ok_or_else(|| anyhow!("invalid end"))?;
2417 let buffer = this.update(&mut cx, |this, _| {
2418 this.shared_buffers
2419 .get(&sender_id)
2420 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2421 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2422 })?;
2423 buffer
2424 .update(&mut cx, |buffer, _| {
2425 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2426 })
2427 .await;
2428 let code_actions = this.update(&mut cx, |this, cx| {
2429 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2430 })?;
2431
2432 Ok(proto::GetCodeActionsResponse {
2433 actions: code_actions
2434 .await?
2435 .iter()
2436 .map(language::proto::serialize_code_action)
2437 .collect(),
2438 })
2439 }
2440
2441 async fn handle_apply_code_action(
2442 this: ModelHandle<Self>,
2443 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2444 _: Arc<Client>,
2445 mut cx: AsyncAppContext,
2446 ) -> Result<proto::ApplyCodeActionResponse> {
2447 let sender_id = envelope.original_sender_id()?;
2448 let action = language::proto::deserialize_code_action(
2449 envelope
2450 .payload
2451 .action
2452 .ok_or_else(|| anyhow!("invalid action"))?,
2453 )?;
2454 let apply_code_action = this.update(&mut cx, |this, cx| {
2455 let buffer = this
2456 .shared_buffers
2457 .get(&sender_id)
2458 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2459 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2460 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2461 })?;
2462
2463 let project_transaction = apply_code_action.await?;
2464 let project_transaction = this.update(&mut cx, |this, cx| {
2465 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2466 });
2467 Ok(proto::ApplyCodeActionResponse {
2468 transaction: Some(project_transaction),
2469 })
2470 }
2471
2472 async fn handle_get_definition(
2473 this: ModelHandle<Self>,
2474 envelope: TypedEnvelope<proto::GetDefinition>,
2475 _: Arc<Client>,
2476 mut cx: AsyncAppContext,
2477 ) -> Result<proto::GetDefinitionResponse> {
2478 let sender_id = envelope.original_sender_id()?;
2479 let position = envelope
2480 .payload
2481 .position
2482 .and_then(deserialize_anchor)
2483 .ok_or_else(|| anyhow!("invalid position"))?;
2484 let definitions = this.update(&mut cx, |this, cx| {
2485 let source_buffer = this
2486 .shared_buffers
2487 .get(&sender_id)
2488 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2489 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2490 if source_buffer.read(cx).can_resolve(&position) {
2491 Ok(this.definition(&source_buffer, position, cx))
2492 } else {
2493 Err(anyhow!("cannot resolve position"))
2494 }
2495 })?;
2496
2497 let definitions = definitions.await?;
2498
2499 this.update(&mut cx, |this, cx| {
2500 let mut response = proto::GetDefinitionResponse {
2501 definitions: Default::default(),
2502 };
2503 for definition in definitions {
2504 let buffer =
2505 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2506 response.definitions.push(proto::Definition {
2507 target_start: Some(serialize_anchor(&definition.target_range.start)),
2508 target_end: Some(serialize_anchor(&definition.target_range.end)),
2509 buffer: Some(buffer),
2510 });
2511 }
2512 Ok(response)
2513 })
2514 }
2515
2516 async fn handle_open_buffer(
2517 this: ModelHandle<Self>,
2518 envelope: TypedEnvelope<proto::OpenBuffer>,
2519 _: Arc<Client>,
2520 mut cx: AsyncAppContext,
2521 ) -> anyhow::Result<proto::OpenBufferResponse> {
2522 let peer_id = envelope.original_sender_id()?;
2523 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2524 let open_buffer = this.update(&mut cx, |this, cx| {
2525 this.open_buffer(
2526 ProjectPath {
2527 worktree_id,
2528 path: PathBuf::from(envelope.payload.path).into(),
2529 },
2530 cx,
2531 )
2532 });
2533
2534 let buffer = open_buffer.await?;
2535 this.update(&mut cx, |this, cx| {
2536 Ok(proto::OpenBufferResponse {
2537 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2538 })
2539 })
2540 }
2541
2542 fn serialize_project_transaction_for_peer(
2543 &mut self,
2544 project_transaction: ProjectTransaction,
2545 peer_id: PeerId,
2546 cx: &AppContext,
2547 ) -> proto::ProjectTransaction {
2548 let mut serialized_transaction = proto::ProjectTransaction {
2549 buffers: Default::default(),
2550 transactions: Default::default(),
2551 };
2552 for (buffer, transaction) in project_transaction.0 {
2553 serialized_transaction
2554 .buffers
2555 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2556 serialized_transaction
2557 .transactions
2558 .push(language::proto::serialize_transaction(&transaction));
2559 }
2560 serialized_transaction
2561 }
2562
2563 fn deserialize_project_transaction(
2564 &mut self,
2565 message: proto::ProjectTransaction,
2566 push_to_history: bool,
2567 cx: &mut ModelContext<Self>,
2568 ) -> Task<Result<ProjectTransaction>> {
2569 cx.spawn(|this, mut cx| async move {
2570 let mut project_transaction = ProjectTransaction::default();
2571 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2572 let buffer = this
2573 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2574 .await?;
2575 let transaction = language::proto::deserialize_transaction(transaction)?;
2576 project_transaction.0.insert(buffer, transaction);
2577 }
2578 for (buffer, transaction) in &project_transaction.0 {
2579 buffer
2580 .update(&mut cx, |buffer, _| {
2581 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2582 })
2583 .await;
2584
2585 if push_to_history {
2586 buffer.update(&mut cx, |buffer, _| {
2587 buffer.push_transaction(transaction.clone(), Instant::now());
2588 });
2589 }
2590 }
2591
2592 Ok(project_transaction)
2593 })
2594 }
2595
2596 fn serialize_buffer_for_peer(
2597 &mut self,
2598 buffer: &ModelHandle<Buffer>,
2599 peer_id: PeerId,
2600 cx: &AppContext,
2601 ) -> proto::Buffer {
2602 let buffer_id = buffer.read(cx).remote_id();
2603 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2604 match shared_buffers.entry(buffer_id) {
2605 hash_map::Entry::Occupied(_) => proto::Buffer {
2606 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2607 },
2608 hash_map::Entry::Vacant(entry) => {
2609 entry.insert(buffer.clone());
2610 proto::Buffer {
2611 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2612 }
2613 }
2614 }
2615 }
2616
2617 fn deserialize_buffer(
2618 &mut self,
2619 buffer: proto::Buffer,
2620 cx: &mut ModelContext<Self>,
2621 ) -> Task<Result<ModelHandle<Buffer>>> {
2622 let replica_id = self.replica_id();
2623
2624 let mut opened_buffer_tx = self.opened_buffer.clone();
2625 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2626 cx.spawn(|this, mut cx| async move {
2627 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2628 proto::buffer::Variant::Id(id) => {
2629 let buffer = loop {
2630 let buffer = this.read_with(&cx, |this, cx| {
2631 this.open_buffers
2632 .get(&(id as usize))
2633 .and_then(|buffer| buffer.upgrade(cx))
2634 });
2635 if let Some(buffer) = buffer {
2636 break buffer;
2637 }
2638 opened_buffer_rx
2639 .recv()
2640 .await
2641 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2642 };
2643 Ok(buffer)
2644 }
2645 proto::buffer::Variant::State(mut buffer) => {
2646 let mut buffer_worktree = None;
2647 let mut buffer_file = None;
2648 if let Some(file) = buffer.file.take() {
2649 this.read_with(&cx, |this, cx| {
2650 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2651 let worktree =
2652 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2653 anyhow!("no worktree found for id {}", file.worktree_id)
2654 })?;
2655 buffer_file =
2656 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2657 as Box<dyn language::File>);
2658 buffer_worktree = Some(worktree);
2659 Ok::<_, anyhow::Error>(())
2660 })?;
2661 }
2662
2663 let buffer = cx.add_model(|cx| {
2664 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2665 });
2666 this.update(&mut cx, |this, cx| {
2667 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2668 })?;
2669
2670 let _ = opened_buffer_tx.send(()).await;
2671 Ok(buffer)
2672 }
2673 }
2674 })
2675 }
2676
2677 async fn handle_close_buffer(
2678 this: ModelHandle<Self>,
2679 envelope: TypedEnvelope<proto::CloseBuffer>,
2680 _: Arc<Client>,
2681 mut cx: AsyncAppContext,
2682 ) -> anyhow::Result<()> {
2683 this.update(&mut cx, |this, cx| {
2684 if let Some(shared_buffers) =
2685 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2686 {
2687 shared_buffers.remove(&envelope.payload.buffer_id);
2688 cx.notify();
2689 }
2690 Ok(())
2691 })
2692 }
2693
2694 async fn handle_buffer_saved(
2695 this: ModelHandle<Self>,
2696 envelope: TypedEnvelope<proto::BufferSaved>,
2697 _: Arc<Client>,
2698 mut cx: AsyncAppContext,
2699 ) -> Result<()> {
2700 let version = envelope.payload.version.try_into()?;
2701 let mtime = envelope
2702 .payload
2703 .mtime
2704 .ok_or_else(|| anyhow!("missing mtime"))?
2705 .into();
2706
2707 this.update(&mut cx, |this, cx| {
2708 let buffer = this
2709 .open_buffers
2710 .get(&(envelope.payload.buffer_id as usize))
2711 .and_then(|buffer| buffer.upgrade(cx));
2712 if let Some(buffer) = buffer {
2713 buffer.update(cx, |buffer, cx| {
2714 buffer.did_save(version, mtime, None, cx);
2715 });
2716 }
2717 Ok(())
2718 })
2719 }
2720
2721 async fn handle_buffer_reloaded(
2722 this: ModelHandle<Self>,
2723 envelope: TypedEnvelope<proto::BufferReloaded>,
2724 _: Arc<Client>,
2725 mut cx: AsyncAppContext,
2726 ) -> Result<()> {
2727 let payload = envelope.payload.clone();
2728 let version = payload.version.try_into()?;
2729 let mtime = payload
2730 .mtime
2731 .ok_or_else(|| anyhow!("missing mtime"))?
2732 .into();
2733 this.update(&mut cx, |this, cx| {
2734 let buffer = this
2735 .open_buffers
2736 .get(&(payload.buffer_id as usize))
2737 .and_then(|buffer| buffer.upgrade(cx));
2738 if let Some(buffer) = buffer {
2739 buffer.update(cx, |buffer, cx| {
2740 buffer.did_reload(version, mtime, cx);
2741 });
2742 }
2743 Ok(())
2744 })
2745 }
2746
2747 pub fn match_paths<'a>(
2748 &self,
2749 query: &'a str,
2750 include_ignored: bool,
2751 smart_case: bool,
2752 max_results: usize,
2753 cancel_flag: &'a AtomicBool,
2754 cx: &AppContext,
2755 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2756 let worktrees = self
2757 .worktrees(cx)
2758 .filter(|worktree| !worktree.read(cx).is_weak())
2759 .collect::<Vec<_>>();
2760 let include_root_name = worktrees.len() > 1;
2761 let candidate_sets = worktrees
2762 .into_iter()
2763 .map(|worktree| CandidateSet {
2764 snapshot: worktree.read(cx).snapshot(),
2765 include_ignored,
2766 include_root_name,
2767 })
2768 .collect::<Vec<_>>();
2769
2770 let background = cx.background().clone();
2771 async move {
2772 fuzzy::match_paths(
2773 candidate_sets.as_slice(),
2774 query,
2775 smart_case,
2776 max_results,
2777 cancel_flag,
2778 background,
2779 )
2780 .await
2781 }
2782 }
2783}
2784
2785impl WorktreeHandle {
2786 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2787 match self {
2788 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2789 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2790 }
2791 }
2792}
2793
2794impl OpenBuffer {
2795 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2796 match self {
2797 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2798 OpenBuffer::Operations(_) => None,
2799 }
2800 }
2801}
2802
2803struct CandidateSet {
2804 snapshot: Snapshot,
2805 include_ignored: bool,
2806 include_root_name: bool,
2807}
2808
2809impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2810 type Candidates = CandidateSetIter<'a>;
2811
2812 fn id(&self) -> usize {
2813 self.snapshot.id().to_usize()
2814 }
2815
2816 fn len(&self) -> usize {
2817 if self.include_ignored {
2818 self.snapshot.file_count()
2819 } else {
2820 self.snapshot.visible_file_count()
2821 }
2822 }
2823
2824 fn prefix(&self) -> Arc<str> {
2825 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2826 self.snapshot.root_name().into()
2827 } else if self.include_root_name {
2828 format!("{}/", self.snapshot.root_name()).into()
2829 } else {
2830 "".into()
2831 }
2832 }
2833
2834 fn candidates(&'a self, start: usize) -> Self::Candidates {
2835 CandidateSetIter {
2836 traversal: self.snapshot.files(self.include_ignored, start),
2837 }
2838 }
2839}
2840
2841struct CandidateSetIter<'a> {
2842 traversal: Traversal<'a>,
2843}
2844
2845impl<'a> Iterator for CandidateSetIter<'a> {
2846 type Item = PathMatchCandidate<'a>;
2847
2848 fn next(&mut self) -> Option<Self::Item> {
2849 self.traversal.next().map(|entry| {
2850 if let EntryKind::File(char_bag) = entry.kind {
2851 PathMatchCandidate {
2852 path: &entry.path,
2853 char_bag,
2854 }
2855 } else {
2856 unreachable!()
2857 }
2858 })
2859 }
2860}
2861
2862impl Entity for Project {
2863 type Event = Event;
2864
2865 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2866 match &self.client_state {
2867 ProjectClientState::Local { remote_id_rx, .. } => {
2868 if let Some(project_id) = *remote_id_rx.borrow() {
2869 self.client
2870 .send(proto::UnregisterProject { project_id })
2871 .log_err();
2872 }
2873 }
2874 ProjectClientState::Remote { remote_id, .. } => {
2875 self.client
2876 .send(proto::LeaveProject {
2877 project_id: *remote_id,
2878 })
2879 .log_err();
2880 }
2881 }
2882 }
2883
2884 fn app_will_quit(
2885 &mut self,
2886 _: &mut MutableAppContext,
2887 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2888 use futures::FutureExt;
2889
2890 let shutdown_futures = self
2891 .language_servers
2892 .drain()
2893 .filter_map(|(_, server)| server.shutdown())
2894 .collect::<Vec<_>>();
2895 Some(
2896 async move {
2897 futures::future::join_all(shutdown_futures).await;
2898 }
2899 .boxed(),
2900 )
2901 }
2902}
2903
2904impl Collaborator {
2905 fn from_proto(
2906 message: proto::Collaborator,
2907 user_store: &ModelHandle<UserStore>,
2908 cx: &mut AsyncAppContext,
2909 ) -> impl Future<Output = Result<Self>> {
2910 let user = user_store.update(cx, |user_store, cx| {
2911 user_store.fetch_user(message.user_id, cx)
2912 });
2913
2914 async move {
2915 Ok(Self {
2916 peer_id: PeerId(message.peer_id),
2917 user: user.await?,
2918 replica_id: message.replica_id as ReplicaId,
2919 })
2920 }
2921 }
2922}
2923
2924impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2925 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2926 Self {
2927 worktree_id,
2928 path: path.as_ref().into(),
2929 }
2930 }
2931}
2932
2933impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2934 fn from(options: lsp::CreateFileOptions) -> Self {
2935 Self {
2936 overwrite: options.overwrite.unwrap_or(false),
2937 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2938 }
2939 }
2940}
2941
2942impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2943 fn from(options: lsp::RenameFileOptions) -> Self {
2944 Self {
2945 overwrite: options.overwrite.unwrap_or(false),
2946 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2947 }
2948 }
2949}
2950
2951impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2952 fn from(options: lsp::DeleteFileOptions) -> Self {
2953 Self {
2954 recursive: options.recursive.unwrap_or(false),
2955 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2956 }
2957 }
2958}
2959
2960#[cfg(test)]
2961mod tests {
2962 use super::{Event, *};
2963 use client::test::FakeHttpClient;
2964 use fs::RealFs;
2965 use futures::StreamExt;
2966 use gpui::test::subscribe;
2967 use language::{
2968 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2969 LanguageServerConfig, Point,
2970 };
2971 use lsp::Url;
2972 use serde_json::json;
2973 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2974 use unindent::Unindent as _;
2975 use util::test::temp_tree;
2976 use worktree::WorktreeHandle as _;
2977
2978 #[gpui::test]
2979 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2980 let dir = temp_tree(json!({
2981 "root": {
2982 "apple": "",
2983 "banana": {
2984 "carrot": {
2985 "date": "",
2986 "endive": "",
2987 }
2988 },
2989 "fennel": {
2990 "grape": "",
2991 }
2992 }
2993 }));
2994
2995 let root_link_path = dir.path().join("root_link");
2996 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2997 unix::fs::symlink(
2998 &dir.path().join("root/fennel"),
2999 &dir.path().join("root/finnochio"),
3000 )
3001 .unwrap();
3002
3003 let project = Project::test(Arc::new(RealFs), &mut cx);
3004
3005 let (tree, _) = project
3006 .update(&mut cx, |project, cx| {
3007 project.find_or_create_local_worktree(&root_link_path, false, cx)
3008 })
3009 .await
3010 .unwrap();
3011
3012 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3013 .await;
3014 cx.read(|cx| {
3015 let tree = tree.read(cx);
3016 assert_eq!(tree.file_count(), 5);
3017 assert_eq!(
3018 tree.inode_for_path("fennel/grape"),
3019 tree.inode_for_path("finnochio/grape")
3020 );
3021 });
3022
3023 let cancel_flag = Default::default();
3024 let results = project
3025 .read_with(&cx, |project, cx| {
3026 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3027 })
3028 .await;
3029 assert_eq!(
3030 results
3031 .into_iter()
3032 .map(|result| result.path)
3033 .collect::<Vec<Arc<Path>>>(),
3034 vec![
3035 PathBuf::from("banana/carrot/date").into(),
3036 PathBuf::from("banana/carrot/endive").into(),
3037 ]
3038 );
3039 }
3040
3041 #[gpui::test]
3042 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3043 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3044 let progress_token = language_server_config
3045 .disk_based_diagnostics_progress_token
3046 .clone()
3047 .unwrap();
3048
3049 let mut languages = LanguageRegistry::new();
3050 languages.add(Arc::new(Language::new(
3051 LanguageConfig {
3052 name: "Rust".to_string(),
3053 path_suffixes: vec!["rs".to_string()],
3054 language_server: Some(language_server_config),
3055 ..Default::default()
3056 },
3057 Some(tree_sitter_rust::language()),
3058 )));
3059
3060 let dir = temp_tree(json!({
3061 "a.rs": "fn a() { A }",
3062 "b.rs": "const y: i32 = 1",
3063 }));
3064
3065 let http_client = FakeHttpClient::with_404_response();
3066 let client = Client::new(http_client.clone());
3067 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3068
3069 let project = cx.update(|cx| {
3070 Project::local(
3071 client,
3072 user_store,
3073 Arc::new(languages),
3074 Arc::new(RealFs),
3075 cx,
3076 )
3077 });
3078
3079 let (tree, _) = project
3080 .update(&mut cx, |project, cx| {
3081 project.find_or_create_local_worktree(dir.path(), false, cx)
3082 })
3083 .await
3084 .unwrap();
3085 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3086
3087 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3088 .await;
3089
3090 // Cause worktree to start the fake language server
3091 let _buffer = project
3092 .update(&mut cx, |project, cx| {
3093 project.open_buffer(
3094 ProjectPath {
3095 worktree_id,
3096 path: Path::new("b.rs").into(),
3097 },
3098 cx,
3099 )
3100 })
3101 .await
3102 .unwrap();
3103
3104 let mut events = subscribe(&project, &mut cx);
3105
3106 fake_server.start_progress(&progress_token).await;
3107 assert_eq!(
3108 events.next().await.unwrap(),
3109 Event::DiskBasedDiagnosticsStarted
3110 );
3111
3112 fake_server.start_progress(&progress_token).await;
3113 fake_server.end_progress(&progress_token).await;
3114 fake_server.start_progress(&progress_token).await;
3115
3116 fake_server
3117 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3118 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3119 version: None,
3120 diagnostics: vec![lsp::Diagnostic {
3121 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3122 severity: Some(lsp::DiagnosticSeverity::ERROR),
3123 message: "undefined variable 'A'".to_string(),
3124 ..Default::default()
3125 }],
3126 })
3127 .await;
3128 assert_eq!(
3129 events.next().await.unwrap(),
3130 Event::DiagnosticsUpdated(ProjectPath {
3131 worktree_id,
3132 path: Arc::from(Path::new("a.rs"))
3133 })
3134 );
3135
3136 fake_server.end_progress(&progress_token).await;
3137 fake_server.end_progress(&progress_token).await;
3138 assert_eq!(
3139 events.next().await.unwrap(),
3140 Event::DiskBasedDiagnosticsUpdated
3141 );
3142 assert_eq!(
3143 events.next().await.unwrap(),
3144 Event::DiskBasedDiagnosticsFinished
3145 );
3146
3147 let buffer = project
3148 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3149 .await
3150 .unwrap();
3151
3152 buffer.read_with(&cx, |buffer, _| {
3153 let snapshot = buffer.snapshot();
3154 let diagnostics = snapshot
3155 .diagnostics_in_range::<_, Point>(0..buffer.len())
3156 .collect::<Vec<_>>();
3157 assert_eq!(
3158 diagnostics,
3159 &[DiagnosticEntry {
3160 range: Point::new(0, 9)..Point::new(0, 10),
3161 diagnostic: Diagnostic {
3162 severity: lsp::DiagnosticSeverity::ERROR,
3163 message: "undefined variable 'A'".to_string(),
3164 group_id: 0,
3165 is_primary: true,
3166 ..Default::default()
3167 }
3168 }]
3169 )
3170 });
3171 }
3172
3173 #[gpui::test]
3174 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3175 let dir = temp_tree(json!({
3176 "root": {
3177 "dir1": {},
3178 "dir2": {
3179 "dir3": {}
3180 }
3181 }
3182 }));
3183
3184 let project = Project::test(Arc::new(RealFs), &mut cx);
3185 let (tree, _) = project
3186 .update(&mut cx, |project, cx| {
3187 project.find_or_create_local_worktree(&dir.path(), false, cx)
3188 })
3189 .await
3190 .unwrap();
3191
3192 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3193 .await;
3194
3195 let cancel_flag = Default::default();
3196 let results = project
3197 .read_with(&cx, |project, cx| {
3198 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3199 })
3200 .await;
3201
3202 assert!(results.is_empty());
3203 }
3204
3205 #[gpui::test]
3206 async fn test_definition(mut cx: gpui::TestAppContext) {
3207 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3208
3209 let mut languages = LanguageRegistry::new();
3210 languages.add(Arc::new(Language::new(
3211 LanguageConfig {
3212 name: "Rust".to_string(),
3213 path_suffixes: vec!["rs".to_string()],
3214 language_server: Some(language_server_config),
3215 ..Default::default()
3216 },
3217 Some(tree_sitter_rust::language()),
3218 )));
3219
3220 let dir = temp_tree(json!({
3221 "a.rs": "const fn a() { A }",
3222 "b.rs": "const y: i32 = crate::a()",
3223 }));
3224 let dir_path = dir.path().to_path_buf();
3225
3226 let http_client = FakeHttpClient::with_404_response();
3227 let client = Client::new(http_client.clone());
3228 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3229 let project = cx.update(|cx| {
3230 Project::local(
3231 client,
3232 user_store,
3233 Arc::new(languages),
3234 Arc::new(RealFs),
3235 cx,
3236 )
3237 });
3238
3239 let (tree, _) = project
3240 .update(&mut cx, |project, cx| {
3241 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3242 })
3243 .await
3244 .unwrap();
3245 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3246 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3247 .await;
3248
3249 let buffer = project
3250 .update(&mut cx, |project, cx| {
3251 project.open_buffer(
3252 ProjectPath {
3253 worktree_id,
3254 path: Path::new("").into(),
3255 },
3256 cx,
3257 )
3258 })
3259 .await
3260 .unwrap();
3261
3262 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3263 let params = params.text_document_position_params;
3264 assert_eq!(
3265 params.text_document.uri.to_file_path().unwrap(),
3266 dir_path.join("b.rs")
3267 );
3268 assert_eq!(params.position, lsp::Position::new(0, 22));
3269
3270 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3271 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3272 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3273 )))
3274 });
3275
3276 let mut definitions = project
3277 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3278 .await
3279 .unwrap();
3280
3281 assert_eq!(definitions.len(), 1);
3282 let definition = definitions.pop().unwrap();
3283 cx.update(|cx| {
3284 let target_buffer = definition.target_buffer.read(cx);
3285 assert_eq!(
3286 target_buffer
3287 .file()
3288 .unwrap()
3289 .as_local()
3290 .unwrap()
3291 .abs_path(cx),
3292 dir.path().join("a.rs")
3293 );
3294 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3295 assert_eq!(
3296 list_worktrees(&project, cx),
3297 [
3298 (dir.path().join("b.rs"), false),
3299 (dir.path().join("a.rs"), true)
3300 ]
3301 );
3302
3303 drop(definition);
3304 });
3305 cx.read(|cx| {
3306 assert_eq!(
3307 list_worktrees(&project, cx),
3308 [(dir.path().join("b.rs"), false)]
3309 );
3310 });
3311
3312 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3313 project
3314 .read(cx)
3315 .worktrees(cx)
3316 .map(|worktree| {
3317 let worktree = worktree.read(cx);
3318 (
3319 worktree.as_local().unwrap().abs_path().to_path_buf(),
3320 worktree.is_weak(),
3321 )
3322 })
3323 .collect::<Vec<_>>()
3324 }
3325 }
3326
3327 #[gpui::test]
3328 async fn test_save_file(mut cx: gpui::TestAppContext) {
3329 let fs = Arc::new(FakeFs::new(cx.background()));
3330 fs.insert_tree(
3331 "/dir",
3332 json!({
3333 "file1": "the old contents",
3334 }),
3335 )
3336 .await;
3337
3338 let project = Project::test(fs.clone(), &mut cx);
3339 let worktree_id = project
3340 .update(&mut cx, |p, cx| {
3341 p.find_or_create_local_worktree("/dir", false, cx)
3342 })
3343 .await
3344 .unwrap()
3345 .0
3346 .read_with(&cx, |tree, _| tree.id());
3347
3348 let buffer = project
3349 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3350 .await
3351 .unwrap();
3352 buffer
3353 .update(&mut cx, |buffer, cx| {
3354 assert_eq!(buffer.text(), "the old contents");
3355 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3356 buffer.save(cx)
3357 })
3358 .await
3359 .unwrap();
3360
3361 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3362 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3363 }
3364
3365 #[gpui::test]
3366 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3367 let fs = Arc::new(FakeFs::new(cx.background()));
3368 fs.insert_tree(
3369 "/dir",
3370 json!({
3371 "file1": "the old contents",
3372 }),
3373 )
3374 .await;
3375
3376 let project = Project::test(fs.clone(), &mut cx);
3377 let worktree_id = project
3378 .update(&mut cx, |p, cx| {
3379 p.find_or_create_local_worktree("/dir/file1", false, cx)
3380 })
3381 .await
3382 .unwrap()
3383 .0
3384 .read_with(&cx, |tree, _| tree.id());
3385
3386 let buffer = project
3387 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3388 .await
3389 .unwrap();
3390 buffer
3391 .update(&mut cx, |buffer, cx| {
3392 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3393 buffer.save(cx)
3394 })
3395 .await
3396 .unwrap();
3397
3398 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3399 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3400 }
3401
3402 #[gpui::test(retries = 5)]
3403 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3404 let dir = temp_tree(json!({
3405 "a": {
3406 "file1": "",
3407 "file2": "",
3408 "file3": "",
3409 },
3410 "b": {
3411 "c": {
3412 "file4": "",
3413 "file5": "",
3414 }
3415 }
3416 }));
3417
3418 let project = Project::test(Arc::new(RealFs), &mut cx);
3419 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3420
3421 let (tree, _) = project
3422 .update(&mut cx, |p, cx| {
3423 p.find_or_create_local_worktree(dir.path(), false, cx)
3424 })
3425 .await
3426 .unwrap();
3427 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3428
3429 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3430 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3431 async move { buffer.await.unwrap() }
3432 };
3433 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3434 tree.read_with(cx, |tree, _| {
3435 tree.entry_for_path(path)
3436 .expect(&format!("no entry for path {}", path))
3437 .id
3438 })
3439 };
3440
3441 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3442 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3443 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3444 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3445
3446 let file2_id = id_for_path("a/file2", &cx);
3447 let file3_id = id_for_path("a/file3", &cx);
3448 let file4_id = id_for_path("b/c/file4", &cx);
3449
3450 // Wait for the initial scan.
3451 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3452 .await;
3453
3454 // Create a remote copy of this worktree.
3455 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3456 let (remote, load_task) = cx.update(|cx| {
3457 Worktree::remote(
3458 1,
3459 1,
3460 initial_snapshot.to_proto(&Default::default(), Default::default()),
3461 rpc.clone(),
3462 cx,
3463 )
3464 });
3465 load_task.await;
3466
3467 cx.read(|cx| {
3468 assert!(!buffer2.read(cx).is_dirty());
3469 assert!(!buffer3.read(cx).is_dirty());
3470 assert!(!buffer4.read(cx).is_dirty());
3471 assert!(!buffer5.read(cx).is_dirty());
3472 });
3473
3474 // Rename and delete files and directories.
3475 tree.flush_fs_events(&cx).await;
3476 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3477 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3478 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3479 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3480 tree.flush_fs_events(&cx).await;
3481
3482 let expected_paths = vec![
3483 "a",
3484 "a/file1",
3485 "a/file2.new",
3486 "b",
3487 "d",
3488 "d/file3",
3489 "d/file4",
3490 ];
3491
3492 cx.read(|app| {
3493 assert_eq!(
3494 tree.read(app)
3495 .paths()
3496 .map(|p| p.to_str().unwrap())
3497 .collect::<Vec<_>>(),
3498 expected_paths
3499 );
3500
3501 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3502 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3503 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3504
3505 assert_eq!(
3506 buffer2.read(app).file().unwrap().path().as_ref(),
3507 Path::new("a/file2.new")
3508 );
3509 assert_eq!(
3510 buffer3.read(app).file().unwrap().path().as_ref(),
3511 Path::new("d/file3")
3512 );
3513 assert_eq!(
3514 buffer4.read(app).file().unwrap().path().as_ref(),
3515 Path::new("d/file4")
3516 );
3517 assert_eq!(
3518 buffer5.read(app).file().unwrap().path().as_ref(),
3519 Path::new("b/c/file5")
3520 );
3521
3522 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3523 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3524 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3525 assert!(buffer5.read(app).file().unwrap().is_deleted());
3526 });
3527
3528 // Update the remote worktree. Check that it becomes consistent with the
3529 // local worktree.
3530 remote.update(&mut cx, |remote, cx| {
3531 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3532 &initial_snapshot,
3533 1,
3534 1,
3535 0,
3536 true,
3537 );
3538 remote
3539 .as_remote_mut()
3540 .unwrap()
3541 .snapshot
3542 .apply_remote_update(update_message)
3543 .unwrap();
3544
3545 assert_eq!(
3546 remote
3547 .paths()
3548 .map(|p| p.to_str().unwrap())
3549 .collect::<Vec<_>>(),
3550 expected_paths
3551 );
3552 });
3553 }
3554
3555 #[gpui::test]
3556 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3557 let fs = Arc::new(FakeFs::new(cx.background()));
3558 fs.insert_tree(
3559 "/the-dir",
3560 json!({
3561 "a.txt": "a-contents",
3562 "b.txt": "b-contents",
3563 }),
3564 )
3565 .await;
3566
3567 let project = Project::test(fs.clone(), &mut cx);
3568 let worktree_id = project
3569 .update(&mut cx, |p, cx| {
3570 p.find_or_create_local_worktree("/the-dir", false, cx)
3571 })
3572 .await
3573 .unwrap()
3574 .0
3575 .read_with(&cx, |tree, _| tree.id());
3576
3577 // Spawn multiple tasks to open paths, repeating some paths.
3578 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3579 (
3580 p.open_buffer((worktree_id, "a.txt"), cx),
3581 p.open_buffer((worktree_id, "b.txt"), cx),
3582 p.open_buffer((worktree_id, "a.txt"), cx),
3583 )
3584 });
3585
3586 let buffer_a_1 = buffer_a_1.await.unwrap();
3587 let buffer_a_2 = buffer_a_2.await.unwrap();
3588 let buffer_b = buffer_b.await.unwrap();
3589 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3590 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3591
3592 // There is only one buffer per path.
3593 let buffer_a_id = buffer_a_1.id();
3594 assert_eq!(buffer_a_2.id(), buffer_a_id);
3595
3596 // Open the same path again while it is still open.
3597 drop(buffer_a_1);
3598 let buffer_a_3 = project
3599 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3600 .await
3601 .unwrap();
3602
3603 // There's still only one buffer per path.
3604 assert_eq!(buffer_a_3.id(), buffer_a_id);
3605 }
3606
3607 #[gpui::test]
3608 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3609 use std::fs;
3610
3611 let dir = temp_tree(json!({
3612 "file1": "abc",
3613 "file2": "def",
3614 "file3": "ghi",
3615 }));
3616
3617 let project = Project::test(Arc::new(RealFs), &mut cx);
3618 let (worktree, _) = project
3619 .update(&mut cx, |p, cx| {
3620 p.find_or_create_local_worktree(dir.path(), false, cx)
3621 })
3622 .await
3623 .unwrap();
3624 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3625
3626 worktree.flush_fs_events(&cx).await;
3627 worktree
3628 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3629 .await;
3630
3631 let buffer1 = project
3632 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3633 .await
3634 .unwrap();
3635 let events = Rc::new(RefCell::new(Vec::new()));
3636
3637 // initially, the buffer isn't dirty.
3638 buffer1.update(&mut cx, |buffer, cx| {
3639 cx.subscribe(&buffer1, {
3640 let events = events.clone();
3641 move |_, _, event, _| events.borrow_mut().push(event.clone())
3642 })
3643 .detach();
3644
3645 assert!(!buffer.is_dirty());
3646 assert!(events.borrow().is_empty());
3647
3648 buffer.edit(vec![1..2], "", cx);
3649 });
3650
3651 // after the first edit, the buffer is dirty, and emits a dirtied event.
3652 buffer1.update(&mut cx, |buffer, cx| {
3653 assert!(buffer.text() == "ac");
3654 assert!(buffer.is_dirty());
3655 assert_eq!(
3656 *events.borrow(),
3657 &[language::Event::Edited, language::Event::Dirtied]
3658 );
3659 events.borrow_mut().clear();
3660 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3661 });
3662
3663 // after saving, the buffer is not dirty, and emits a saved event.
3664 buffer1.update(&mut cx, |buffer, cx| {
3665 assert!(!buffer.is_dirty());
3666 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3667 events.borrow_mut().clear();
3668
3669 buffer.edit(vec![1..1], "B", cx);
3670 buffer.edit(vec![2..2], "D", cx);
3671 });
3672
3673 // after editing again, the buffer is dirty, and emits another dirty event.
3674 buffer1.update(&mut cx, |buffer, cx| {
3675 assert!(buffer.text() == "aBDc");
3676 assert!(buffer.is_dirty());
3677 assert_eq!(
3678 *events.borrow(),
3679 &[
3680 language::Event::Edited,
3681 language::Event::Dirtied,
3682 language::Event::Edited,
3683 ],
3684 );
3685 events.borrow_mut().clear();
3686
3687 // TODO - currently, after restoring the buffer to its
3688 // previously-saved state, the is still considered dirty.
3689 buffer.edit([1..3], "", cx);
3690 assert!(buffer.text() == "ac");
3691 assert!(buffer.is_dirty());
3692 });
3693
3694 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3695
3696 // When a file is deleted, the buffer is considered dirty.
3697 let events = Rc::new(RefCell::new(Vec::new()));
3698 let buffer2 = project
3699 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3700 .await
3701 .unwrap();
3702 buffer2.update(&mut cx, |_, cx| {
3703 cx.subscribe(&buffer2, {
3704 let events = events.clone();
3705 move |_, _, event, _| events.borrow_mut().push(event.clone())
3706 })
3707 .detach();
3708 });
3709
3710 fs::remove_file(dir.path().join("file2")).unwrap();
3711 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3712 assert_eq!(
3713 *events.borrow(),
3714 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3715 );
3716
3717 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3718 let events = Rc::new(RefCell::new(Vec::new()));
3719 let buffer3 = project
3720 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3721 .await
3722 .unwrap();
3723 buffer3.update(&mut cx, |_, cx| {
3724 cx.subscribe(&buffer3, {
3725 let events = events.clone();
3726 move |_, _, event, _| events.borrow_mut().push(event.clone())
3727 })
3728 .detach();
3729 });
3730
3731 worktree.flush_fs_events(&cx).await;
3732 buffer3.update(&mut cx, |buffer, cx| {
3733 buffer.edit(Some(0..0), "x", cx);
3734 });
3735 events.borrow_mut().clear();
3736 fs::remove_file(dir.path().join("file3")).unwrap();
3737 buffer3
3738 .condition(&cx, |_, _| !events.borrow().is_empty())
3739 .await;
3740 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3741 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3742 }
3743
3744 #[gpui::test]
3745 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3746 use std::fs;
3747
3748 let initial_contents = "aaa\nbbbbb\nc\n";
3749 let dir = temp_tree(json!({ "the-file": initial_contents }));
3750
3751 let project = Project::test(Arc::new(RealFs), &mut cx);
3752 let (worktree, _) = project
3753 .update(&mut cx, |p, cx| {
3754 p.find_or_create_local_worktree(dir.path(), false, cx)
3755 })
3756 .await
3757 .unwrap();
3758 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3759
3760 worktree
3761 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3762 .await;
3763
3764 let abs_path = dir.path().join("the-file");
3765 let buffer = project
3766 .update(&mut cx, |p, cx| {
3767 p.open_buffer((worktree_id, "the-file"), cx)
3768 })
3769 .await
3770 .unwrap();
3771
3772 // TODO
3773 // Add a cursor on each row.
3774 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3775 // assert!(!buffer.is_dirty());
3776 // buffer.add_selection_set(
3777 // &(0..3)
3778 // .map(|row| Selection {
3779 // id: row as usize,
3780 // start: Point::new(row, 1),
3781 // end: Point::new(row, 1),
3782 // reversed: false,
3783 // goal: SelectionGoal::None,
3784 // })
3785 // .collect::<Vec<_>>(),
3786 // cx,
3787 // )
3788 // });
3789
3790 // Change the file on disk, adding two new lines of text, and removing
3791 // one line.
3792 buffer.read_with(&cx, |buffer, _| {
3793 assert!(!buffer.is_dirty());
3794 assert!(!buffer.has_conflict());
3795 });
3796 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3797 fs::write(&abs_path, new_contents).unwrap();
3798
3799 // Because the buffer was not modified, it is reloaded from disk. Its
3800 // contents are edited according to the diff between the old and new
3801 // file contents.
3802 buffer
3803 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3804 .await;
3805
3806 buffer.update(&mut cx, |buffer, _| {
3807 assert_eq!(buffer.text(), new_contents);
3808 assert!(!buffer.is_dirty());
3809 assert!(!buffer.has_conflict());
3810
3811 // TODO
3812 // let cursor_positions = buffer
3813 // .selection_set(selection_set_id)
3814 // .unwrap()
3815 // .selections::<Point>(&*buffer)
3816 // .map(|selection| {
3817 // assert_eq!(selection.start, selection.end);
3818 // selection.start
3819 // })
3820 // .collect::<Vec<_>>();
3821 // assert_eq!(
3822 // cursor_positions,
3823 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3824 // );
3825 });
3826
3827 // Modify the buffer
3828 buffer.update(&mut cx, |buffer, cx| {
3829 buffer.edit(vec![0..0], " ", cx);
3830 assert!(buffer.is_dirty());
3831 assert!(!buffer.has_conflict());
3832 });
3833
3834 // Change the file on disk again, adding blank lines to the beginning.
3835 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3836
3837 // Because the buffer is modified, it doesn't reload from disk, but is
3838 // marked as having a conflict.
3839 buffer
3840 .condition(&cx, |buffer, _| buffer.has_conflict())
3841 .await;
3842 }
3843
3844 #[gpui::test]
3845 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3846 let fs = Arc::new(FakeFs::new(cx.background()));
3847 fs.insert_tree(
3848 "/the-dir",
3849 json!({
3850 "a.rs": "
3851 fn foo(mut v: Vec<usize>) {
3852 for x in &v {
3853 v.push(1);
3854 }
3855 }
3856 "
3857 .unindent(),
3858 }),
3859 )
3860 .await;
3861
3862 let project = Project::test(fs.clone(), &mut cx);
3863 let (worktree, _) = project
3864 .update(&mut cx, |p, cx| {
3865 p.find_or_create_local_worktree("/the-dir", false, cx)
3866 })
3867 .await
3868 .unwrap();
3869 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3870
3871 let buffer = project
3872 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3873 .await
3874 .unwrap();
3875
3876 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3877 let message = lsp::PublishDiagnosticsParams {
3878 uri: buffer_uri.clone(),
3879 diagnostics: vec![
3880 lsp::Diagnostic {
3881 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3882 severity: Some(DiagnosticSeverity::WARNING),
3883 message: "error 1".to_string(),
3884 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3885 location: lsp::Location {
3886 uri: buffer_uri.clone(),
3887 range: lsp::Range::new(
3888 lsp::Position::new(1, 8),
3889 lsp::Position::new(1, 9),
3890 ),
3891 },
3892 message: "error 1 hint 1".to_string(),
3893 }]),
3894 ..Default::default()
3895 },
3896 lsp::Diagnostic {
3897 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3898 severity: Some(DiagnosticSeverity::HINT),
3899 message: "error 1 hint 1".to_string(),
3900 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3901 location: lsp::Location {
3902 uri: buffer_uri.clone(),
3903 range: lsp::Range::new(
3904 lsp::Position::new(1, 8),
3905 lsp::Position::new(1, 9),
3906 ),
3907 },
3908 message: "original diagnostic".to_string(),
3909 }]),
3910 ..Default::default()
3911 },
3912 lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3914 severity: Some(DiagnosticSeverity::ERROR),
3915 message: "error 2".to_string(),
3916 related_information: Some(vec![
3917 lsp::DiagnosticRelatedInformation {
3918 location: lsp::Location {
3919 uri: buffer_uri.clone(),
3920 range: lsp::Range::new(
3921 lsp::Position::new(1, 13),
3922 lsp::Position::new(1, 15),
3923 ),
3924 },
3925 message: "error 2 hint 1".to_string(),
3926 },
3927 lsp::DiagnosticRelatedInformation {
3928 location: lsp::Location {
3929 uri: buffer_uri.clone(),
3930 range: lsp::Range::new(
3931 lsp::Position::new(1, 13),
3932 lsp::Position::new(1, 15),
3933 ),
3934 },
3935 message: "error 2 hint 2".to_string(),
3936 },
3937 ]),
3938 ..Default::default()
3939 },
3940 lsp::Diagnostic {
3941 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3942 severity: Some(DiagnosticSeverity::HINT),
3943 message: "error 2 hint 1".to_string(),
3944 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3945 location: lsp::Location {
3946 uri: buffer_uri.clone(),
3947 range: lsp::Range::new(
3948 lsp::Position::new(2, 8),
3949 lsp::Position::new(2, 17),
3950 ),
3951 },
3952 message: "original diagnostic".to_string(),
3953 }]),
3954 ..Default::default()
3955 },
3956 lsp::Diagnostic {
3957 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3958 severity: Some(DiagnosticSeverity::HINT),
3959 message: "error 2 hint 2".to_string(),
3960 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3961 location: lsp::Location {
3962 uri: buffer_uri.clone(),
3963 range: lsp::Range::new(
3964 lsp::Position::new(2, 8),
3965 lsp::Position::new(2, 17),
3966 ),
3967 },
3968 message: "original diagnostic".to_string(),
3969 }]),
3970 ..Default::default()
3971 },
3972 ],
3973 version: None,
3974 };
3975
3976 project
3977 .update(&mut cx, |p, cx| {
3978 p.update_diagnostics(message, &Default::default(), cx)
3979 })
3980 .unwrap();
3981 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3982
3983 assert_eq!(
3984 buffer
3985 .diagnostics_in_range::<_, Point>(0..buffer.len())
3986 .collect::<Vec<_>>(),
3987 &[
3988 DiagnosticEntry {
3989 range: Point::new(1, 8)..Point::new(1, 9),
3990 diagnostic: Diagnostic {
3991 severity: DiagnosticSeverity::WARNING,
3992 message: "error 1".to_string(),
3993 group_id: 0,
3994 is_primary: true,
3995 ..Default::default()
3996 }
3997 },
3998 DiagnosticEntry {
3999 range: Point::new(1, 8)..Point::new(1, 9),
4000 diagnostic: Diagnostic {
4001 severity: DiagnosticSeverity::HINT,
4002 message: "error 1 hint 1".to_string(),
4003 group_id: 0,
4004 is_primary: false,
4005 ..Default::default()
4006 }
4007 },
4008 DiagnosticEntry {
4009 range: Point::new(1, 13)..Point::new(1, 15),
4010 diagnostic: Diagnostic {
4011 severity: DiagnosticSeverity::HINT,
4012 message: "error 2 hint 1".to_string(),
4013 group_id: 1,
4014 is_primary: false,
4015 ..Default::default()
4016 }
4017 },
4018 DiagnosticEntry {
4019 range: Point::new(1, 13)..Point::new(1, 15),
4020 diagnostic: Diagnostic {
4021 severity: DiagnosticSeverity::HINT,
4022 message: "error 2 hint 2".to_string(),
4023 group_id: 1,
4024 is_primary: false,
4025 ..Default::default()
4026 }
4027 },
4028 DiagnosticEntry {
4029 range: Point::new(2, 8)..Point::new(2, 17),
4030 diagnostic: Diagnostic {
4031 severity: DiagnosticSeverity::ERROR,
4032 message: "error 2".to_string(),
4033 group_id: 1,
4034 is_primary: true,
4035 ..Default::default()
4036 }
4037 }
4038 ]
4039 );
4040
4041 assert_eq!(
4042 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4043 &[
4044 DiagnosticEntry {
4045 range: Point::new(1, 8)..Point::new(1, 9),
4046 diagnostic: Diagnostic {
4047 severity: DiagnosticSeverity::WARNING,
4048 message: "error 1".to_string(),
4049 group_id: 0,
4050 is_primary: true,
4051 ..Default::default()
4052 }
4053 },
4054 DiagnosticEntry {
4055 range: Point::new(1, 8)..Point::new(1, 9),
4056 diagnostic: Diagnostic {
4057 severity: DiagnosticSeverity::HINT,
4058 message: "error 1 hint 1".to_string(),
4059 group_id: 0,
4060 is_primary: false,
4061 ..Default::default()
4062 }
4063 },
4064 ]
4065 );
4066 assert_eq!(
4067 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4068 &[
4069 DiagnosticEntry {
4070 range: Point::new(1, 13)..Point::new(1, 15),
4071 diagnostic: Diagnostic {
4072 severity: DiagnosticSeverity::HINT,
4073 message: "error 2 hint 1".to_string(),
4074 group_id: 1,
4075 is_primary: false,
4076 ..Default::default()
4077 }
4078 },
4079 DiagnosticEntry {
4080 range: Point::new(1, 13)..Point::new(1, 15),
4081 diagnostic: Diagnostic {
4082 severity: DiagnosticSeverity::HINT,
4083 message: "error 2 hint 2".to_string(),
4084 group_id: 1,
4085 is_primary: false,
4086 ..Default::default()
4087 }
4088 },
4089 DiagnosticEntry {
4090 range: Point::new(2, 8)..Point::new(2, 17),
4091 diagnostic: Diagnostic {
4092 severity: DiagnosticSeverity::ERROR,
4093 message: "error 2".to_string(),
4094 group_id: 1,
4095 is_primary: true,
4096 ..Default::default()
4097 }
4098 }
4099 ]
4100 );
4101 }
4102}