1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Context, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 #[cfg(any(test, feature = "test-support"))]
348 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
349 self.shared_buffers
350 .get(&peer_id)
351 .and_then(|buffers| buffers.get(&remote_id))
352 .cloned()
353 }
354
355 #[cfg(any(test, feature = "test-support"))]
356 pub fn has_buffered_operations(&self) -> bool {
357 self.open_buffers
358 .values()
359 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
360 }
361
362 pub fn fs(&self) -> &Arc<dyn Fs> {
363 &self.fs
364 }
365
366 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
367 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
368 *remote_id_tx.borrow_mut() = remote_id;
369 }
370
371 self.subscriptions.clear();
372 if let Some(remote_id) = remote_id {
373 self.subscriptions
374 .push(self.client.add_model_for_remote_entity(remote_id, cx));
375 }
376 }
377
378 pub fn remote_id(&self) -> Option<u64> {
379 match &self.client_state {
380 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
381 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
382 }
383 }
384
385 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
386 let mut id = None;
387 let mut watch = None;
388 match &self.client_state {
389 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
390 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
391 }
392
393 async move {
394 if let Some(id) = id {
395 return id;
396 }
397 let mut watch = watch.unwrap();
398 loop {
399 let id = *watch.borrow();
400 if let Some(id) = id {
401 return id;
402 }
403 watch.recv().await;
404 }
405 }
406 }
407
408 pub fn replica_id(&self) -> ReplicaId {
409 match &self.client_state {
410 ProjectClientState::Local { .. } => 0,
411 ProjectClientState::Remote { replica_id, .. } => *replica_id,
412 }
413 }
414
415 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
416 &self.collaborators
417 }
418
419 pub fn worktrees<'a>(
420 &'a self,
421 cx: &'a AppContext,
422 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
423 self.worktrees
424 .iter()
425 .filter_map(move |worktree| worktree.upgrade(cx))
426 }
427
428 pub fn worktree_for_id(
429 &self,
430 id: WorktreeId,
431 cx: &AppContext,
432 ) -> Option<ModelHandle<Worktree>> {
433 self.worktrees(cx)
434 .find(|worktree| worktree.read(cx).id() == id)
435 }
436
437 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
438 let rpc = self.client.clone();
439 cx.spawn(|this, mut cx| async move {
440 let project_id = this.update(&mut cx, |this, _| {
441 if let ProjectClientState::Local {
442 is_shared,
443 remote_id_rx,
444 ..
445 } = &mut this.client_state
446 {
447 *is_shared = true;
448 remote_id_rx
449 .borrow()
450 .ok_or_else(|| anyhow!("no project id"))
451 } else {
452 Err(anyhow!("can't share a remote project"))
453 }
454 })?;
455
456 rpc.request(proto::ShareProject { project_id }).await?;
457 let mut tasks = Vec::new();
458 this.update(&mut cx, |this, cx| {
459 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
460 worktree.update(cx, |worktree, cx| {
461 let worktree = worktree.as_local_mut().unwrap();
462 tasks.push(worktree.share(project_id, cx));
463 });
464 }
465 });
466 for task in tasks {
467 task.await?;
468 }
469 this.update(&mut cx, |_, cx| cx.notify());
470 Ok(())
471 })
472 }
473
474 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
475 let rpc = self.client.clone();
476 cx.spawn(|this, mut cx| async move {
477 let project_id = this.update(&mut cx, |this, _| {
478 if let ProjectClientState::Local {
479 is_shared,
480 remote_id_rx,
481 ..
482 } = &mut this.client_state
483 {
484 *is_shared = false;
485 remote_id_rx
486 .borrow()
487 .ok_or_else(|| anyhow!("no project id"))
488 } else {
489 Err(anyhow!("can't share a remote project"))
490 }
491 })?;
492
493 rpc.send(proto::UnshareProject { project_id })?;
494 this.update(&mut cx, |this, cx| {
495 this.collaborators.clear();
496 this.shared_buffers.clear();
497 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
498 worktree.update(cx, |worktree, _| {
499 worktree.as_local_mut().unwrap().unshare();
500 });
501 }
502 cx.notify()
503 });
504 Ok(())
505 })
506 }
507
508 pub fn is_read_only(&self) -> bool {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => false,
511 ProjectClientState::Remote {
512 sharing_has_stopped,
513 ..
514 } => *sharing_has_stopped,
515 }
516 }
517
518 pub fn is_local(&self) -> bool {
519 match &self.client_state {
520 ProjectClientState::Local { .. } => true,
521 ProjectClientState::Remote { .. } => false,
522 }
523 }
524
525 pub fn is_remote(&self) -> bool {
526 !self.is_local()
527 }
528
529 pub fn open_buffer(
530 &mut self,
531 path: impl Into<ProjectPath>,
532 cx: &mut ModelContext<Self>,
533 ) -> Task<Result<ModelHandle<Buffer>>> {
534 let project_path = path.into();
535 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
536 worktree
537 } else {
538 return Task::ready(Err(anyhow!("no such worktree")));
539 };
540
541 // If there is already a buffer for the given path, then return it.
542 let existing_buffer = self.get_open_buffer(&project_path, cx);
543 if let Some(existing_buffer) = existing_buffer {
544 return Task::ready(Ok(existing_buffer));
545 }
546
547 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
548 // If the given path is already being loaded, then wait for that existing
549 // task to complete and return the same buffer.
550 hash_map::Entry::Occupied(e) => e.get().clone(),
551
552 // Otherwise, record the fact that this path is now being loaded.
553 hash_map::Entry::Vacant(entry) => {
554 let (mut tx, rx) = postage::watch::channel();
555 entry.insert(rx.clone());
556
557 let load_buffer = if worktree.read(cx).is_local() {
558 self.open_local_buffer(&project_path.path, &worktree, cx)
559 } else {
560 self.open_remote_buffer(&project_path.path, &worktree, cx)
561 };
562
563 cx.spawn(move |this, mut cx| async move {
564 let load_result = load_buffer.await;
565 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
566 // Record the fact that the buffer is no longer loading.
567 this.loading_buffers.remove(&project_path);
568 if this.loading_buffers.is_empty() {
569 this.open_buffers
570 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
571 }
572
573 let buffer = load_result.map_err(Arc::new)?;
574 Ok(buffer)
575 }));
576 })
577 .detach();
578 rx
579 }
580 };
581
582 cx.foreground().spawn(async move {
583 loop {
584 if let Some(result) = loading_watch.borrow().as_ref() {
585 match result {
586 Ok(buffer) => return Ok(buffer.clone()),
587 Err(error) => return Err(anyhow!("{}", error)),
588 }
589 }
590 loading_watch.recv().await;
591 }
592 })
593 }
594
595 fn open_local_buffer(
596 &mut self,
597 path: &Arc<Path>,
598 worktree: &ModelHandle<Worktree>,
599 cx: &mut ModelContext<Self>,
600 ) -> Task<Result<ModelHandle<Buffer>>> {
601 let load_buffer = worktree.update(cx, |worktree, cx| {
602 let worktree = worktree.as_local_mut().unwrap();
603 worktree.load_buffer(path, cx)
604 });
605 let worktree = worktree.downgrade();
606 cx.spawn(|this, mut cx| async move {
607 let buffer = load_buffer.await?;
608 let worktree = worktree
609 .upgrade(&cx)
610 .ok_or_else(|| anyhow!("worktree was removed"))?;
611 this.update(&mut cx, |this, cx| {
612 this.register_buffer(&buffer, Some(&worktree), cx)
613 })?;
614 Ok(buffer)
615 })
616 }
617
618 fn open_remote_buffer(
619 &mut self,
620 path: &Arc<Path>,
621 worktree: &ModelHandle<Worktree>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let rpc = self.client.clone();
625 let project_id = self.remote_id().unwrap();
626 let remote_worktree_id = worktree.read(cx).id();
627 let path = path.clone();
628 let path_string = path.to_string_lossy().to_string();
629 cx.spawn(|this, mut cx| async move {
630 let response = rpc
631 .request(proto::OpenBuffer {
632 project_id,
633 worktree_id: remote_worktree_id.to_proto(),
634 path: path_string,
635 })
636 .await?;
637 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
638 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
639 .await
640 })
641 }
642
643 fn open_local_buffer_from_lsp_path(
644 &mut self,
645 abs_path: lsp::Url,
646 lang_name: String,
647 lang_server: Arc<LanguageServer>,
648 cx: &mut ModelContext<Self>,
649 ) -> Task<Result<ModelHandle<Buffer>>> {
650 cx.spawn(|this, mut cx| async move {
651 let abs_path = abs_path
652 .to_file_path()
653 .map_err(|_| anyhow!("can't convert URI to path"))?;
654 let (worktree, relative_path) = if let Some(result) =
655 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
656 {
657 result
658 } else {
659 let worktree = this
660 .update(&mut cx, |this, cx| {
661 this.create_local_worktree(&abs_path, true, cx)
662 })
663 .await?;
664 this.update(&mut cx, |this, cx| {
665 this.language_servers
666 .insert((worktree.read(cx).id(), lang_name), lang_server);
667 });
668 (worktree, PathBuf::new())
669 };
670
671 let project_path = ProjectPath {
672 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
673 path: relative_path.into(),
674 };
675 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
676 .await
677 })
678 }
679
680 pub fn save_buffer_as(
681 &self,
682 buffer: ModelHandle<Buffer>,
683 abs_path: PathBuf,
684 cx: &mut ModelContext<Project>,
685 ) -> Task<Result<()>> {
686 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
687 cx.spawn(|this, mut cx| async move {
688 let (worktree, path) = worktree_task.await?;
689 worktree
690 .update(&mut cx, |worktree, cx| {
691 worktree
692 .as_local_mut()
693 .unwrap()
694 .save_buffer_as(buffer.clone(), path, cx)
695 })
696 .await?;
697 this.update(&mut cx, |this, cx| {
698 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
699 });
700 Ok(())
701 })
702 }
703
704 #[cfg(any(test, feature = "test-support"))]
705 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
706 let path = path.into();
707 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
708 self.open_buffers.iter().any(|(_, buffer)| {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
711 if file.worktree == worktree && file.path() == &path.path {
712 return true;
713 }
714 }
715 }
716 false
717 })
718 } else {
719 false
720 }
721 }
722
723 fn get_open_buffer(
724 &mut self,
725 path: &ProjectPath,
726 cx: &mut ModelContext<Self>,
727 ) -> Option<ModelHandle<Buffer>> {
728 let mut result = None;
729 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
730 self.open_buffers.retain(|_, buffer| {
731 if let Some(buffer) = buffer.upgrade(cx) {
732 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
733 if file.worktree == worktree && file.path() == &path.path {
734 result = Some(buffer);
735 }
736 }
737 true
738 } else {
739 false
740 }
741 });
742 result
743 }
744
745 fn register_buffer(
746 &mut self,
747 buffer: &ModelHandle<Buffer>,
748 worktree: Option<&ModelHandle<Worktree>>,
749 cx: &mut ModelContext<Self>,
750 ) -> Result<()> {
751 match self.open_buffers.insert(
752 buffer.read(cx).remote_id(),
753 OpenBuffer::Loaded(buffer.downgrade()),
754 ) {
755 None => {}
756 Some(OpenBuffer::Loading(operations)) => {
757 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
758 }
759 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
760 }
761 self.assign_language_to_buffer(&buffer, worktree, cx);
762 Ok(())
763 }
764
765 fn assign_language_to_buffer(
766 &mut self,
767 buffer: &ModelHandle<Buffer>,
768 worktree: Option<&ModelHandle<Worktree>>,
769 cx: &mut ModelContext<Self>,
770 ) -> Option<()> {
771 let (path, full_path) = {
772 let file = buffer.read(cx).file()?;
773 (file.path().clone(), file.full_path(cx))
774 };
775
776 // If the buffer has a language, set it and start/assign the language server
777 if let Some(language) = self.languages.select_language(&full_path) {
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language(Some(language.clone()), cx);
780 });
781
782 // For local worktrees, start a language server if needed.
783 // Also assign the language server and any previously stored diagnostics to the buffer.
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 let worktree_id = local_worktree.id();
786 let worktree_abs_path = local_worktree.abs_path().clone();
787
788 let language_server = match self
789 .language_servers
790 .entry((worktree_id, language.name().to_string()))
791 {
792 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
793 hash_map::Entry::Vacant(e) => Self::start_language_server(
794 self.client.clone(),
795 language.clone(),
796 &worktree_abs_path,
797 cx,
798 )
799 .map(|server| e.insert(server).clone()),
800 };
801
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_language_server(language_server, cx);
804 });
805 }
806 }
807
808 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
809 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
810 buffer.update(cx, |buffer, cx| {
811 buffer.update_diagnostics(diagnostics, None, cx).log_err();
812 });
813 }
814 }
815
816 None
817 }
818
819 fn start_language_server(
820 rpc: Arc<Client>,
821 language: Arc<Language>,
822 worktree_path: &Path,
823 cx: &mut ModelContext<Self>,
824 ) -> Option<Arc<LanguageServer>> {
825 enum LspEvent {
826 DiagnosticsStart,
827 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
828 DiagnosticsFinish,
829 }
830
831 let language_server = language
832 .start_server(worktree_path, cx)
833 .log_err()
834 .flatten()?;
835 let disk_based_sources = language
836 .disk_based_diagnostic_sources()
837 .cloned()
838 .unwrap_or_default();
839 let disk_based_diagnostics_progress_token =
840 language.disk_based_diagnostics_progress_token().cloned();
841 let has_disk_based_diagnostic_progress_token =
842 disk_based_diagnostics_progress_token.is_some();
843 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
844
845 // Listen for `PublishDiagnostics` notifications.
846 language_server
847 .on_notification::<lsp::notification::PublishDiagnostics, _>({
848 let diagnostics_tx = diagnostics_tx.clone();
849 move |params| {
850 if !has_disk_based_diagnostic_progress_token {
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
852 }
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
856 }
857 }
858 })
859 .detach();
860
861 // Listen for `Progress` notifications. Send an event when the language server
862 // transitions between running jobs and not running any jobs.
863 let mut running_jobs_for_this_server: i32 = 0;
864 language_server
865 .on_notification::<lsp::notification::Progress, _>(move |params| {
866 let token = match params.token {
867 lsp::NumberOrString::Number(_) => None,
868 lsp::NumberOrString::String(token) => Some(token),
869 };
870
871 if token == disk_based_diagnostics_progress_token {
872 match params.value {
873 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
874 lsp::WorkDoneProgress::Begin(_) => {
875 running_jobs_for_this_server += 1;
876 if running_jobs_for_this_server == 1 {
877 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
878 }
879 }
880 lsp::WorkDoneProgress::End(_) => {
881 running_jobs_for_this_server -= 1;
882 if running_jobs_for_this_server == 0 {
883 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
884 }
885 }
886 _ => {}
887 },
888 }
889 }
890 })
891 .detach();
892
893 // Process all the LSP events.
894 cx.spawn_weak(|this, mut cx| async move {
895 while let Ok(message) = diagnostics_rx.recv().await {
896 let this = this.upgrade(&cx)?;
897 match message {
898 LspEvent::DiagnosticsStart => {
899 this.update(&mut cx, |this, cx| {
900 this.disk_based_diagnostics_started(cx);
901 if let Some(project_id) = this.remote_id() {
902 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
903 .log_err();
904 }
905 });
906 }
907 LspEvent::DiagnosticsUpdate(mut params) => {
908 language.process_diagnostics(&mut params);
909 this.update(&mut cx, |this, cx| {
910 this.update_diagnostics(params, &disk_based_sources, cx)
911 .log_err();
912 });
913 }
914 LspEvent::DiagnosticsFinish => {
915 this.update(&mut cx, |this, cx| {
916 this.disk_based_diagnostics_finished(cx);
917 if let Some(project_id) = this.remote_id() {
918 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
919 .log_err();
920 }
921 });
922 }
923 }
924 }
925 Some(())
926 })
927 .detach();
928
929 Some(language_server)
930 }
931
932 pub fn update_diagnostics(
933 &mut self,
934 params: lsp::PublishDiagnosticsParams,
935 disk_based_sources: &HashSet<String>,
936 cx: &mut ModelContext<Self>,
937 ) -> Result<()> {
938 let abs_path = params
939 .uri
940 .to_file_path()
941 .map_err(|_| anyhow!("URI is not a file"))?;
942 let mut next_group_id = 0;
943 let mut diagnostics = Vec::default();
944 let mut primary_diagnostic_group_ids = HashMap::default();
945 let mut sources_by_group_id = HashMap::default();
946 let mut supporting_diagnostic_severities = HashMap::default();
947 for diagnostic in ¶ms.diagnostics {
948 let source = diagnostic.source.as_ref();
949 let code = diagnostic.code.as_ref().map(|code| match code {
950 lsp::NumberOrString::Number(code) => code.to_string(),
951 lsp::NumberOrString::String(code) => code.clone(),
952 });
953 let range = range_from_lsp(diagnostic.range);
954 let is_supporting = diagnostic
955 .related_information
956 .as_ref()
957 .map_or(false, |infos| {
958 infos.iter().any(|info| {
959 primary_diagnostic_group_ids.contains_key(&(
960 source,
961 code.clone(),
962 range_from_lsp(info.location.range),
963 ))
964 })
965 });
966
967 if is_supporting {
968 if let Some(severity) = diagnostic.severity {
969 supporting_diagnostic_severities
970 .insert((source, code.clone(), range), severity);
971 }
972 } else {
973 let group_id = post_inc(&mut next_group_id);
974 let is_disk_based =
975 source.map_or(false, |source| disk_based_sources.contains(source));
976
977 sources_by_group_id.insert(group_id, source);
978 primary_diagnostic_group_ids
979 .insert((source, code.clone(), range.clone()), group_id);
980
981 diagnostics.push(DiagnosticEntry {
982 range,
983 diagnostic: Diagnostic {
984 code: code.clone(),
985 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
986 message: diagnostic.message.clone(),
987 group_id,
988 is_primary: true,
989 is_valid: true,
990 is_disk_based,
991 },
992 });
993 if let Some(infos) = &diagnostic.related_information {
994 for info in infos {
995 if info.location.uri == params.uri && !info.message.is_empty() {
996 let range = range_from_lsp(info.location.range);
997 diagnostics.push(DiagnosticEntry {
998 range,
999 diagnostic: Diagnostic {
1000 code: code.clone(),
1001 severity: DiagnosticSeverity::INFORMATION,
1002 message: info.message.clone(),
1003 group_id,
1004 is_primary: false,
1005 is_valid: true,
1006 is_disk_based,
1007 },
1008 });
1009 }
1010 }
1011 }
1012 }
1013 }
1014
1015 for entry in &mut diagnostics {
1016 let diagnostic = &mut entry.diagnostic;
1017 if !diagnostic.is_primary {
1018 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1019 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1020 source,
1021 diagnostic.code.clone(),
1022 entry.range.clone(),
1023 )) {
1024 diagnostic.severity = severity;
1025 }
1026 }
1027 }
1028
1029 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1030 Ok(())
1031 }
1032
1033 pub fn update_diagnostic_entries(
1034 &mut self,
1035 abs_path: PathBuf,
1036 version: Option<i32>,
1037 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1038 cx: &mut ModelContext<Project>,
1039 ) -> Result<(), anyhow::Error> {
1040 let (worktree, relative_path) = self
1041 .find_local_worktree(&abs_path, cx)
1042 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1043 let project_path = ProjectPath {
1044 worktree_id: worktree.read(cx).id(),
1045 path: relative_path.into(),
1046 };
1047
1048 for buffer in self.open_buffers.values() {
1049 if let Some(buffer) = buffer.upgrade(cx) {
1050 if buffer
1051 .read(cx)
1052 .file()
1053 .map_or(false, |file| *file.path() == project_path.path)
1054 {
1055 buffer.update(cx, |buffer, cx| {
1056 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1057 })?;
1058 break;
1059 }
1060 }
1061 }
1062 worktree.update(cx, |worktree, cx| {
1063 worktree
1064 .as_local_mut()
1065 .ok_or_else(|| anyhow!("not a local worktree"))?
1066 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1067 })?;
1068 cx.emit(Event::DiagnosticsUpdated(project_path));
1069 Ok(())
1070 }
1071
1072 pub fn format(
1073 &self,
1074 buffers: HashSet<ModelHandle<Buffer>>,
1075 push_to_history: bool,
1076 cx: &mut ModelContext<Project>,
1077 ) -> Task<Result<ProjectTransaction>> {
1078 let mut local_buffers = Vec::new();
1079 let mut remote_buffers = None;
1080 for buffer_handle in buffers {
1081 let buffer = buffer_handle.read(cx);
1082 let worktree;
1083 if let Some(file) = File::from_dyn(buffer.file()) {
1084 worktree = file.worktree.clone();
1085 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1086 let lang_server;
1087 if let Some(lang) = buffer.language() {
1088 if let Some(server) = self
1089 .language_servers
1090 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1091 {
1092 lang_server = server.clone();
1093 } else {
1094 return Task::ready(Ok(Default::default()));
1095 };
1096 } else {
1097 return Task::ready(Ok(Default::default()));
1098 }
1099
1100 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1101 } else {
1102 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1103 }
1104 } else {
1105 return Task::ready(Ok(Default::default()));
1106 }
1107 }
1108
1109 let remote_buffers = self.remote_id().zip(remote_buffers);
1110 let client = self.client.clone();
1111
1112 cx.spawn(|this, mut cx| async move {
1113 let mut project_transaction = ProjectTransaction::default();
1114
1115 if let Some((project_id, remote_buffers)) = remote_buffers {
1116 let response = client
1117 .request(proto::FormatBuffers {
1118 project_id,
1119 buffer_ids: remote_buffers
1120 .iter()
1121 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1122 .collect(),
1123 })
1124 .await?
1125 .transaction
1126 .ok_or_else(|| anyhow!("missing transaction"))?;
1127 project_transaction = this
1128 .update(&mut cx, |this, cx| {
1129 this.deserialize_project_transaction(response, push_to_history, cx)
1130 })
1131 .await?;
1132 }
1133
1134 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1135 let lsp_edits = lang_server
1136 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1137 text_document: lsp::TextDocumentIdentifier::new(
1138 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1139 ),
1140 options: Default::default(),
1141 work_done_progress_params: Default::default(),
1142 })
1143 .await?;
1144
1145 if let Some(lsp_edits) = lsp_edits {
1146 let edits = buffer
1147 .update(&mut cx, |buffer, cx| {
1148 buffer.edits_from_lsp(lsp_edits, None, cx)
1149 })
1150 .await?;
1151 buffer.update(&mut cx, |buffer, cx| {
1152 buffer.finalize_last_transaction();
1153 buffer.start_transaction();
1154 for (range, text) in edits {
1155 buffer.edit([range], text, cx);
1156 }
1157 if buffer.end_transaction(cx).is_some() {
1158 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1159 if !push_to_history {
1160 buffer.forget_transaction(transaction.id);
1161 }
1162 project_transaction.0.insert(cx.handle(), transaction);
1163 }
1164 });
1165 }
1166 }
1167
1168 Ok(project_transaction)
1169 })
1170 }
1171
1172 pub fn definition<T: ToPointUtf16>(
1173 &self,
1174 source_buffer_handle: &ModelHandle<Buffer>,
1175 position: T,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Task<Result<Vec<Definition>>> {
1178 let source_buffer_handle = source_buffer_handle.clone();
1179 let source_buffer = source_buffer_handle.read(cx);
1180 let worktree;
1181 let buffer_abs_path;
1182 if let Some(file) = File::from_dyn(source_buffer.file()) {
1183 worktree = file.worktree.clone();
1184 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1185 } else {
1186 return Task::ready(Ok(Default::default()));
1187 };
1188
1189 let position = position.to_point_utf16(source_buffer);
1190
1191 if worktree.read(cx).as_local().is_some() {
1192 let buffer_abs_path = buffer_abs_path.unwrap();
1193 let lang_name;
1194 let lang_server;
1195 if let Some(lang) = source_buffer.language() {
1196 lang_name = lang.name().to_string();
1197 if let Some(server) = self
1198 .language_servers
1199 .get(&(worktree.read(cx).id(), lang_name.clone()))
1200 {
1201 lang_server = server.clone();
1202 } else {
1203 return Task::ready(Ok(Default::default()));
1204 };
1205 } else {
1206 return Task::ready(Ok(Default::default()));
1207 }
1208
1209 cx.spawn(|this, mut cx| async move {
1210 let response = lang_server
1211 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1212 text_document_position_params: lsp::TextDocumentPositionParams {
1213 text_document: lsp::TextDocumentIdentifier::new(
1214 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1215 ),
1216 position: lsp::Position::new(position.row, position.column),
1217 },
1218 work_done_progress_params: Default::default(),
1219 partial_result_params: Default::default(),
1220 })
1221 .await?;
1222
1223 let mut definitions = Vec::new();
1224 if let Some(response) = response {
1225 let mut unresolved_locations = Vec::new();
1226 match response {
1227 lsp::GotoDefinitionResponse::Scalar(loc) => {
1228 unresolved_locations.push((loc.uri, loc.range));
1229 }
1230 lsp::GotoDefinitionResponse::Array(locs) => {
1231 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1232 }
1233 lsp::GotoDefinitionResponse::Link(links) => {
1234 unresolved_locations.extend(
1235 links
1236 .into_iter()
1237 .map(|l| (l.target_uri, l.target_selection_range)),
1238 );
1239 }
1240 }
1241
1242 for (target_uri, target_range) in unresolved_locations {
1243 let target_buffer_handle = this
1244 .update(&mut cx, |this, cx| {
1245 this.open_local_buffer_from_lsp_path(
1246 target_uri,
1247 lang_name.clone(),
1248 lang_server.clone(),
1249 cx,
1250 )
1251 })
1252 .await?;
1253
1254 cx.read(|cx| {
1255 let target_buffer = target_buffer_handle.read(cx);
1256 let target_start = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1258 let target_end = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1260 definitions.push(Definition {
1261 target_buffer: target_buffer_handle,
1262 target_range: target_buffer.anchor_after(target_start)
1263 ..target_buffer.anchor_before(target_end),
1264 });
1265 });
1266 }
1267 }
1268
1269 Ok(definitions)
1270 })
1271 } else if let Some(project_id) = self.remote_id() {
1272 let client = self.client.clone();
1273 let request = proto::GetDefinition {
1274 project_id,
1275 buffer_id: source_buffer.remote_id(),
1276 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1277 };
1278 cx.spawn(|this, mut cx| async move {
1279 let response = client.request(request).await?;
1280 let mut definitions = Vec::new();
1281 for definition in response.definitions {
1282 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1283 let target_buffer = this
1284 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1285 .await?;
1286 let target_start = definition
1287 .target_start
1288 .and_then(deserialize_anchor)
1289 .ok_or_else(|| anyhow!("missing target start"))?;
1290 let target_end = definition
1291 .target_end
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target end"))?;
1294 definitions.push(Definition {
1295 target_buffer,
1296 target_range: target_start..target_end,
1297 })
1298 }
1299
1300 Ok(definitions)
1301 })
1302 } else {
1303 Task::ready(Ok(Default::default()))
1304 }
1305 }
1306
1307 pub fn completions<T: ToPointUtf16>(
1308 &self,
1309 source_buffer_handle: &ModelHandle<Buffer>,
1310 position: T,
1311 cx: &mut ModelContext<Self>,
1312 ) -> Task<Result<Vec<Completion>>> {
1313 let source_buffer_handle = source_buffer_handle.clone();
1314 let source_buffer = source_buffer_handle.read(cx);
1315 let buffer_id = source_buffer.remote_id();
1316 let language = source_buffer.language().cloned();
1317 let worktree;
1318 let buffer_abs_path;
1319 if let Some(file) = File::from_dyn(source_buffer.file()) {
1320 worktree = file.worktree.clone();
1321 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1322 } else {
1323 return Task::ready(Ok(Default::default()));
1324 };
1325
1326 let position = position.to_point_utf16(source_buffer);
1327 let anchor = source_buffer.anchor_after(position);
1328
1329 if worktree.read(cx).as_local().is_some() {
1330 let buffer_abs_path = buffer_abs_path.unwrap();
1331 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1332 server
1333 } else {
1334 return Task::ready(Ok(Default::default()));
1335 };
1336
1337 cx.spawn(|_, cx| async move {
1338 let completions = lang_server
1339 .request::<lsp::request::Completion>(lsp::CompletionParams {
1340 text_document_position: lsp::TextDocumentPositionParams::new(
1341 lsp::TextDocumentIdentifier::new(
1342 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1343 ),
1344 position.to_lsp_position(),
1345 ),
1346 context: Default::default(),
1347 work_done_progress_params: Default::default(),
1348 partial_result_params: Default::default(),
1349 })
1350 .await
1351 .context("lsp completion request failed")?;
1352
1353 let completions = if let Some(completions) = completions {
1354 match completions {
1355 lsp::CompletionResponse::Array(completions) => completions,
1356 lsp::CompletionResponse::List(list) => list.items,
1357 }
1358 } else {
1359 Default::default()
1360 };
1361
1362 source_buffer_handle.read_with(&cx, |this, _| {
1363 Ok(completions
1364 .into_iter()
1365 .filter_map(|lsp_completion| {
1366 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1367 lsp::CompletionTextEdit::Edit(edit) => {
1368 (range_from_lsp(edit.range), edit.new_text.clone())
1369 }
1370 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1371 log::info!("unsupported insert/replace completion");
1372 return None;
1373 }
1374 };
1375
1376 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1377 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1378 if clipped_start == old_range.start && clipped_end == old_range.end {
1379 Some(Completion {
1380 old_range: this.anchor_before(old_range.start)
1381 ..this.anchor_after(old_range.end),
1382 new_text,
1383 label: language
1384 .as_ref()
1385 .and_then(|l| l.label_for_completion(&lsp_completion))
1386 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1387 lsp_completion,
1388 })
1389 } else {
1390 None
1391 }
1392 })
1393 .collect())
1394 })
1395 })
1396 } else if let Some(project_id) = self.remote_id() {
1397 let rpc = self.client.clone();
1398 let message = proto::GetCompletions {
1399 project_id,
1400 buffer_id,
1401 position: Some(language::proto::serialize_anchor(&anchor)),
1402 version: (&source_buffer.version()).into(),
1403 };
1404 cx.spawn_weak(|_, mut cx| async move {
1405 let response = rpc.request(message).await?;
1406
1407 source_buffer_handle
1408 .update(&mut cx, |buffer, _| {
1409 buffer.wait_for_version(response.version.into())
1410 })
1411 .await;
1412
1413 response
1414 .completions
1415 .into_iter()
1416 .map(|completion| {
1417 language::proto::deserialize_completion(completion, language.as_ref())
1418 })
1419 .collect()
1420 })
1421 } else {
1422 Task::ready(Ok(Default::default()))
1423 }
1424 }
1425
1426 pub fn apply_additional_edits_for_completion(
1427 &self,
1428 buffer_handle: ModelHandle<Buffer>,
1429 completion: Completion,
1430 push_to_history: bool,
1431 cx: &mut ModelContext<Self>,
1432 ) -> Task<Result<Option<Transaction>>> {
1433 let buffer = buffer_handle.read(cx);
1434 let buffer_id = buffer.remote_id();
1435
1436 if self.is_local() {
1437 let lang_server = if let Some(language_server) = buffer.language_server() {
1438 language_server.clone()
1439 } else {
1440 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1441 };
1442
1443 cx.spawn(|_, mut cx| async move {
1444 let resolved_completion = lang_server
1445 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1446 .await?;
1447 if let Some(edits) = resolved_completion.additional_text_edits {
1448 let edits = buffer_handle
1449 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1450 .await?;
1451 buffer_handle.update(&mut cx, |buffer, cx| {
1452 buffer.finalize_last_transaction();
1453 buffer.start_transaction();
1454 for (range, text) in edits {
1455 buffer.edit([range], text, cx);
1456 }
1457 let transaction = if buffer.end_transaction(cx).is_some() {
1458 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1459 if !push_to_history {
1460 buffer.forget_transaction(transaction.id);
1461 }
1462 Some(transaction)
1463 } else {
1464 None
1465 };
1466 Ok(transaction)
1467 })
1468 } else {
1469 Ok(None)
1470 }
1471 })
1472 } else if let Some(project_id) = self.remote_id() {
1473 let client = self.client.clone();
1474 cx.spawn(|_, mut cx| async move {
1475 let response = client
1476 .request(proto::ApplyCompletionAdditionalEdits {
1477 project_id,
1478 buffer_id,
1479 completion: Some(language::proto::serialize_completion(&completion)),
1480 })
1481 .await?;
1482
1483 if let Some(transaction) = response.transaction {
1484 let transaction = language::proto::deserialize_transaction(transaction)?;
1485 buffer_handle
1486 .update(&mut cx, |buffer, _| {
1487 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1488 })
1489 .await;
1490 if push_to_history {
1491 buffer_handle.update(&mut cx, |buffer, _| {
1492 buffer.push_transaction(transaction.clone(), Instant::now());
1493 });
1494 }
1495 Ok(Some(transaction))
1496 } else {
1497 Ok(None)
1498 }
1499 })
1500 } else {
1501 Task::ready(Err(anyhow!("project does not have a remote id")))
1502 }
1503 }
1504
1505 pub fn code_actions<T: ToOffset>(
1506 &self,
1507 buffer_handle: &ModelHandle<Buffer>,
1508 range: Range<T>,
1509 cx: &mut ModelContext<Self>,
1510 ) -> Task<Result<Vec<CodeAction>>> {
1511 let buffer_handle = buffer_handle.clone();
1512 let buffer = buffer_handle.read(cx);
1513 let buffer_id = buffer.remote_id();
1514 let worktree;
1515 let buffer_abs_path;
1516 if let Some(file) = File::from_dyn(buffer.file()) {
1517 worktree = file.worktree.clone();
1518 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1519 } else {
1520 return Task::ready(Ok(Default::default()));
1521 };
1522 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1523
1524 if worktree.read(cx).as_local().is_some() {
1525 let buffer_abs_path = buffer_abs_path.unwrap();
1526 let lang_name;
1527 let lang_server;
1528 if let Some(lang) = buffer.language() {
1529 lang_name = lang.name().to_string();
1530 if let Some(server) = self
1531 .language_servers
1532 .get(&(worktree.read(cx).id(), lang_name.clone()))
1533 {
1534 lang_server = server.clone();
1535 } else {
1536 return Task::ready(Ok(Default::default()));
1537 };
1538 } else {
1539 return Task::ready(Ok(Default::default()));
1540 }
1541
1542 let lsp_range = lsp::Range::new(
1543 range.start.to_point_utf16(buffer).to_lsp_position(),
1544 range.end.to_point_utf16(buffer).to_lsp_position(),
1545 );
1546 cx.foreground().spawn(async move {
1547 Ok(lang_server
1548 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1549 text_document: lsp::TextDocumentIdentifier::new(
1550 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1551 ),
1552 range: lsp_range,
1553 work_done_progress_params: Default::default(),
1554 partial_result_params: Default::default(),
1555 context: lsp::CodeActionContext {
1556 diagnostics: Default::default(),
1557 only: Some(vec![
1558 lsp::CodeActionKind::QUICKFIX,
1559 lsp::CodeActionKind::REFACTOR,
1560 lsp::CodeActionKind::REFACTOR_EXTRACT,
1561 ]),
1562 },
1563 })
1564 .await?
1565 .unwrap_or_default()
1566 .into_iter()
1567 .filter_map(|entry| {
1568 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1569 Some(CodeAction {
1570 range: range.clone(),
1571 lsp_action,
1572 })
1573 } else {
1574 None
1575 }
1576 })
1577 .collect())
1578 })
1579 } else if let Some(project_id) = self.remote_id() {
1580 let rpc = self.client.clone();
1581 cx.spawn_weak(|_, mut cx| async move {
1582 let response = rpc
1583 .request(proto::GetCodeActions {
1584 project_id,
1585 buffer_id,
1586 start: Some(language::proto::serialize_anchor(&range.start)),
1587 end: Some(language::proto::serialize_anchor(&range.end)),
1588 })
1589 .await?;
1590
1591 buffer_handle
1592 .update(&mut cx, |buffer, _| {
1593 buffer.wait_for_version(response.version.into())
1594 })
1595 .await;
1596
1597 response
1598 .actions
1599 .into_iter()
1600 .map(language::proto::deserialize_code_action)
1601 .collect()
1602 })
1603 } else {
1604 Task::ready(Ok(Default::default()))
1605 }
1606 }
1607
1608 pub fn apply_code_action(
1609 &self,
1610 buffer_handle: ModelHandle<Buffer>,
1611 mut action: CodeAction,
1612 push_to_history: bool,
1613 cx: &mut ModelContext<Self>,
1614 ) -> Task<Result<ProjectTransaction>> {
1615 if self.is_local() {
1616 let buffer = buffer_handle.read(cx);
1617 let lang_name = if let Some(lang) = buffer.language() {
1618 lang.name().to_string()
1619 } else {
1620 return Task::ready(Ok(Default::default()));
1621 };
1622 let lang_server = if let Some(language_server) = buffer.language_server() {
1623 language_server.clone()
1624 } else {
1625 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1626 };
1627 let range = action.range.to_point_utf16(buffer);
1628 let fs = self.fs.clone();
1629
1630 cx.spawn(|this, mut cx| async move {
1631 if let Some(lsp_range) = action
1632 .lsp_action
1633 .data
1634 .as_mut()
1635 .and_then(|d| d.get_mut("codeActionParams"))
1636 .and_then(|d| d.get_mut("range"))
1637 {
1638 *lsp_range = serde_json::to_value(&lsp::Range::new(
1639 range.start.to_lsp_position(),
1640 range.end.to_lsp_position(),
1641 ))
1642 .unwrap();
1643 action.lsp_action = lang_server
1644 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1645 .await?;
1646 } else {
1647 let actions = this
1648 .update(&mut cx, |this, cx| {
1649 this.code_actions(&buffer_handle, action.range, cx)
1650 })
1651 .await?;
1652 action.lsp_action = actions
1653 .into_iter()
1654 .find(|a| a.lsp_action.title == action.lsp_action.title)
1655 .ok_or_else(|| anyhow!("code action is outdated"))?
1656 .lsp_action;
1657 }
1658
1659 let mut operations = Vec::new();
1660 if let Some(edit) = action.lsp_action.edit {
1661 if let Some(document_changes) = edit.document_changes {
1662 match document_changes {
1663 lsp::DocumentChanges::Edits(edits) => operations
1664 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1665 lsp::DocumentChanges::Operations(ops) => operations = ops,
1666 }
1667 } else if let Some(changes) = edit.changes {
1668 operations.extend(changes.into_iter().map(|(uri, edits)| {
1669 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1670 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1671 uri,
1672 version: None,
1673 },
1674 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1675 })
1676 }));
1677 }
1678 }
1679
1680 let mut project_transaction = ProjectTransaction::default();
1681 for operation in operations {
1682 match operation {
1683 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1684 let abs_path = op
1685 .uri
1686 .to_file_path()
1687 .map_err(|_| anyhow!("can't convert URI to path"))?;
1688
1689 if let Some(parent_path) = abs_path.parent() {
1690 fs.create_dir(parent_path).await?;
1691 }
1692 if abs_path.ends_with("/") {
1693 fs.create_dir(&abs_path).await?;
1694 } else {
1695 fs.create_file(
1696 &abs_path,
1697 op.options.map(Into::into).unwrap_or_default(),
1698 )
1699 .await?;
1700 }
1701 }
1702 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1703 let source_abs_path = op
1704 .old_uri
1705 .to_file_path()
1706 .map_err(|_| anyhow!("can't convert URI to path"))?;
1707 let target_abs_path = op
1708 .new_uri
1709 .to_file_path()
1710 .map_err(|_| anyhow!("can't convert URI to path"))?;
1711 fs.rename(
1712 &source_abs_path,
1713 &target_abs_path,
1714 op.options.map(Into::into).unwrap_or_default(),
1715 )
1716 .await?;
1717 }
1718 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1719 let abs_path = op
1720 .uri
1721 .to_file_path()
1722 .map_err(|_| anyhow!("can't convert URI to path"))?;
1723 let options = op.options.map(Into::into).unwrap_or_default();
1724 if abs_path.ends_with("/") {
1725 fs.remove_dir(&abs_path, options).await?;
1726 } else {
1727 fs.remove_file(&abs_path, options).await?;
1728 }
1729 }
1730 lsp::DocumentChangeOperation::Edit(op) => {
1731 let buffer_to_edit = this
1732 .update(&mut cx, |this, cx| {
1733 this.open_local_buffer_from_lsp_path(
1734 op.text_document.uri,
1735 lang_name.clone(),
1736 lang_server.clone(),
1737 cx,
1738 )
1739 })
1740 .await?;
1741
1742 let edits = buffer_to_edit
1743 .update(&mut cx, |buffer, cx| {
1744 let edits = op.edits.into_iter().map(|edit| match edit {
1745 lsp::OneOf::Left(edit) => edit,
1746 lsp::OneOf::Right(edit) => edit.text_edit,
1747 });
1748 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1749 })
1750 .await?;
1751
1752 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1753 buffer.finalize_last_transaction();
1754 buffer.start_transaction();
1755 for (range, text) in edits {
1756 buffer.edit([range], text, cx);
1757 }
1758 let transaction = if buffer.end_transaction(cx).is_some() {
1759 let transaction =
1760 buffer.finalize_last_transaction().unwrap().clone();
1761 if !push_to_history {
1762 buffer.forget_transaction(transaction.id);
1763 }
1764 Some(transaction)
1765 } else {
1766 None
1767 };
1768
1769 transaction
1770 });
1771 if let Some(transaction) = transaction {
1772 project_transaction.0.insert(buffer_to_edit, transaction);
1773 }
1774 }
1775 }
1776 }
1777
1778 Ok(project_transaction)
1779 })
1780 } else if let Some(project_id) = self.remote_id() {
1781 let client = self.client.clone();
1782 let request = proto::ApplyCodeAction {
1783 project_id,
1784 buffer_id: buffer_handle.read(cx).remote_id(),
1785 action: Some(language::proto::serialize_code_action(&action)),
1786 };
1787 cx.spawn(|this, mut cx| async move {
1788 let response = client
1789 .request(request)
1790 .await?
1791 .transaction
1792 .ok_or_else(|| anyhow!("missing transaction"))?;
1793 this.update(&mut cx, |this, cx| {
1794 this.deserialize_project_transaction(response, push_to_history, cx)
1795 })
1796 .await
1797 })
1798 } else {
1799 Task::ready(Err(anyhow!("project does not have a remote id")))
1800 }
1801 }
1802
1803 pub fn find_or_create_local_worktree(
1804 &self,
1805 abs_path: impl AsRef<Path>,
1806 weak: bool,
1807 cx: &mut ModelContext<Self>,
1808 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1809 let abs_path = abs_path.as_ref();
1810 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1811 Task::ready(Ok((tree.clone(), relative_path.into())))
1812 } else {
1813 let worktree = self.create_local_worktree(abs_path, weak, cx);
1814 cx.foreground()
1815 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1816 }
1817 }
1818
1819 fn find_local_worktree(
1820 &self,
1821 abs_path: &Path,
1822 cx: &AppContext,
1823 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1824 for tree in self.worktrees(cx) {
1825 if let Some(relative_path) = tree
1826 .read(cx)
1827 .as_local()
1828 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1829 {
1830 return Some((tree.clone(), relative_path.into()));
1831 }
1832 }
1833 None
1834 }
1835
1836 pub fn is_shared(&self) -> bool {
1837 match &self.client_state {
1838 ProjectClientState::Local { is_shared, .. } => *is_shared,
1839 ProjectClientState::Remote { .. } => false,
1840 }
1841 }
1842
1843 fn create_local_worktree(
1844 &self,
1845 abs_path: impl AsRef<Path>,
1846 weak: bool,
1847 cx: &mut ModelContext<Self>,
1848 ) -> Task<Result<ModelHandle<Worktree>>> {
1849 let fs = self.fs.clone();
1850 let client = self.client.clone();
1851 let path = Arc::from(abs_path.as_ref());
1852 cx.spawn(|project, mut cx| async move {
1853 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1854
1855 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1856 project.add_worktree(&worktree, cx);
1857 (project.remote_id(), project.is_shared())
1858 });
1859
1860 if let Some(project_id) = remote_project_id {
1861 worktree
1862 .update(&mut cx, |worktree, cx| {
1863 worktree.as_local_mut().unwrap().register(project_id, cx)
1864 })
1865 .await?;
1866 if is_shared {
1867 worktree
1868 .update(&mut cx, |worktree, cx| {
1869 worktree.as_local_mut().unwrap().share(project_id, cx)
1870 })
1871 .await?;
1872 }
1873 }
1874
1875 Ok(worktree)
1876 })
1877 }
1878
1879 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1880 self.worktrees.retain(|worktree| {
1881 worktree
1882 .upgrade(cx)
1883 .map_or(false, |w| w.read(cx).id() != id)
1884 });
1885 cx.notify();
1886 }
1887
1888 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1889 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1890 if worktree.read(cx).is_local() {
1891 cx.subscribe(&worktree, |this, worktree, _, cx| {
1892 this.update_local_worktree_buffers(worktree, cx);
1893 })
1894 .detach();
1895 }
1896
1897 let push_weak_handle = {
1898 let worktree = worktree.read(cx);
1899 worktree.is_local() && worktree.is_weak()
1900 };
1901 if push_weak_handle {
1902 cx.observe_release(&worktree, |this, cx| {
1903 this.worktrees
1904 .retain(|worktree| worktree.upgrade(cx).is_some());
1905 cx.notify();
1906 })
1907 .detach();
1908 self.worktrees
1909 .push(WorktreeHandle::Weak(worktree.downgrade()));
1910 } else {
1911 self.worktrees
1912 .push(WorktreeHandle::Strong(worktree.clone()));
1913 }
1914 cx.notify();
1915 }
1916
1917 fn update_local_worktree_buffers(
1918 &mut self,
1919 worktree_handle: ModelHandle<Worktree>,
1920 cx: &mut ModelContext<Self>,
1921 ) {
1922 let snapshot = worktree_handle.read(cx).snapshot();
1923 let mut buffers_to_delete = Vec::new();
1924 for (buffer_id, buffer) in &self.open_buffers {
1925 if let Some(buffer) = buffer.upgrade(cx) {
1926 buffer.update(cx, |buffer, cx| {
1927 if let Some(old_file) = File::from_dyn(buffer.file()) {
1928 if old_file.worktree != worktree_handle {
1929 return;
1930 }
1931
1932 let new_file = if let Some(entry) = old_file
1933 .entry_id
1934 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1935 {
1936 File {
1937 is_local: true,
1938 entry_id: Some(entry.id),
1939 mtime: entry.mtime,
1940 path: entry.path.clone(),
1941 worktree: worktree_handle.clone(),
1942 }
1943 } else if let Some(entry) =
1944 snapshot.entry_for_path(old_file.path().as_ref())
1945 {
1946 File {
1947 is_local: true,
1948 entry_id: Some(entry.id),
1949 mtime: entry.mtime,
1950 path: entry.path.clone(),
1951 worktree: worktree_handle.clone(),
1952 }
1953 } else {
1954 File {
1955 is_local: true,
1956 entry_id: None,
1957 path: old_file.path().clone(),
1958 mtime: old_file.mtime(),
1959 worktree: worktree_handle.clone(),
1960 }
1961 };
1962
1963 if let Some(project_id) = self.remote_id() {
1964 self.client
1965 .send(proto::UpdateBufferFile {
1966 project_id,
1967 buffer_id: *buffer_id as u64,
1968 file: Some(new_file.to_proto()),
1969 })
1970 .log_err();
1971 }
1972 buffer.file_updated(Box::new(new_file), cx).detach();
1973 }
1974 });
1975 } else {
1976 buffers_to_delete.push(*buffer_id);
1977 }
1978 }
1979
1980 for buffer_id in buffers_to_delete {
1981 self.open_buffers.remove(&buffer_id);
1982 }
1983 }
1984
1985 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1986 let new_active_entry = entry.and_then(|project_path| {
1987 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1988 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1989 Some(ProjectEntry {
1990 worktree_id: project_path.worktree_id,
1991 entry_id: entry.id,
1992 })
1993 });
1994 if new_active_entry != self.active_entry {
1995 self.active_entry = new_active_entry;
1996 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1997 }
1998 }
1999
2000 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2001 self.language_servers_with_diagnostics_running > 0
2002 }
2003
2004 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2005 let mut summary = DiagnosticSummary::default();
2006 for (_, path_summary) in self.diagnostic_summaries(cx) {
2007 summary.error_count += path_summary.error_count;
2008 summary.warning_count += path_summary.warning_count;
2009 summary.info_count += path_summary.info_count;
2010 summary.hint_count += path_summary.hint_count;
2011 }
2012 summary
2013 }
2014
2015 pub fn diagnostic_summaries<'a>(
2016 &'a self,
2017 cx: &'a AppContext,
2018 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2019 self.worktrees(cx).flat_map(move |worktree| {
2020 let worktree = worktree.read(cx);
2021 let worktree_id = worktree.id();
2022 worktree
2023 .diagnostic_summaries()
2024 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2025 })
2026 }
2027
2028 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2029 self.language_servers_with_diagnostics_running += 1;
2030 if self.language_servers_with_diagnostics_running == 1 {
2031 cx.emit(Event::DiskBasedDiagnosticsStarted);
2032 }
2033 }
2034
2035 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2036 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2037 self.language_servers_with_diagnostics_running -= 1;
2038 if self.language_servers_with_diagnostics_running == 0 {
2039 cx.emit(Event::DiskBasedDiagnosticsFinished);
2040 }
2041 }
2042
2043 pub fn active_entry(&self) -> Option<ProjectEntry> {
2044 self.active_entry
2045 }
2046
2047 // RPC message handlers
2048
2049 async fn handle_unshare_project(
2050 this: ModelHandle<Self>,
2051 _: TypedEnvelope<proto::UnshareProject>,
2052 _: Arc<Client>,
2053 mut cx: AsyncAppContext,
2054 ) -> Result<()> {
2055 this.update(&mut cx, |this, cx| {
2056 if let ProjectClientState::Remote {
2057 sharing_has_stopped,
2058 ..
2059 } = &mut this.client_state
2060 {
2061 *sharing_has_stopped = true;
2062 this.collaborators.clear();
2063 cx.notify();
2064 } else {
2065 unreachable!()
2066 }
2067 });
2068
2069 Ok(())
2070 }
2071
2072 async fn handle_add_collaborator(
2073 this: ModelHandle<Self>,
2074 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2075 _: Arc<Client>,
2076 mut cx: AsyncAppContext,
2077 ) -> Result<()> {
2078 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2079 let collaborator = envelope
2080 .payload
2081 .collaborator
2082 .take()
2083 .ok_or_else(|| anyhow!("empty collaborator"))?;
2084
2085 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2086 this.update(&mut cx, |this, cx| {
2087 this.collaborators
2088 .insert(collaborator.peer_id, collaborator);
2089 cx.notify();
2090 });
2091
2092 Ok(())
2093 }
2094
2095 async fn handle_remove_collaborator(
2096 this: ModelHandle<Self>,
2097 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2098 _: Arc<Client>,
2099 mut cx: AsyncAppContext,
2100 ) -> Result<()> {
2101 this.update(&mut cx, |this, cx| {
2102 let peer_id = PeerId(envelope.payload.peer_id);
2103 let replica_id = this
2104 .collaborators
2105 .remove(&peer_id)
2106 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2107 .replica_id;
2108 this.shared_buffers.remove(&peer_id);
2109 for (_, buffer) in &this.open_buffers {
2110 if let Some(buffer) = buffer.upgrade(cx) {
2111 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2112 }
2113 }
2114 cx.notify();
2115 Ok(())
2116 })
2117 }
2118
2119 async fn handle_share_worktree(
2120 this: ModelHandle<Self>,
2121 envelope: TypedEnvelope<proto::ShareWorktree>,
2122 client: Arc<Client>,
2123 mut cx: AsyncAppContext,
2124 ) -> Result<()> {
2125 this.update(&mut cx, |this, cx| {
2126 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2127 let replica_id = this.replica_id();
2128 let worktree = envelope
2129 .payload
2130 .worktree
2131 .ok_or_else(|| anyhow!("invalid worktree"))?;
2132 let (worktree, load_task) =
2133 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2134 this.add_worktree(&worktree, cx);
2135 load_task.detach();
2136 Ok(())
2137 })
2138 }
2139
2140 async fn handle_unregister_worktree(
2141 this: ModelHandle<Self>,
2142 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2143 _: Arc<Client>,
2144 mut cx: AsyncAppContext,
2145 ) -> Result<()> {
2146 this.update(&mut cx, |this, cx| {
2147 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2148 this.remove_worktree(worktree_id, cx);
2149 Ok(())
2150 })
2151 }
2152
2153 async fn handle_update_worktree(
2154 this: ModelHandle<Self>,
2155 envelope: TypedEnvelope<proto::UpdateWorktree>,
2156 _: Arc<Client>,
2157 mut cx: AsyncAppContext,
2158 ) -> Result<()> {
2159 this.update(&mut cx, |this, cx| {
2160 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2161 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2162 worktree.update(cx, |worktree, _| {
2163 let worktree = worktree.as_remote_mut().unwrap();
2164 worktree.update_from_remote(envelope)
2165 })?;
2166 }
2167 Ok(())
2168 })
2169 }
2170
2171 async fn handle_update_diagnostic_summary(
2172 this: ModelHandle<Self>,
2173 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2174 _: Arc<Client>,
2175 mut cx: AsyncAppContext,
2176 ) -> Result<()> {
2177 this.update(&mut cx, |this, cx| {
2178 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2179 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2180 if let Some(summary) = envelope.payload.summary {
2181 let project_path = ProjectPath {
2182 worktree_id,
2183 path: Path::new(&summary.path).into(),
2184 };
2185 worktree.update(cx, |worktree, _| {
2186 worktree
2187 .as_remote_mut()
2188 .unwrap()
2189 .update_diagnostic_summary(project_path.path.clone(), &summary);
2190 });
2191 cx.emit(Event::DiagnosticsUpdated(project_path));
2192 }
2193 }
2194 Ok(())
2195 })
2196 }
2197
2198 async fn handle_disk_based_diagnostics_updating(
2199 this: ModelHandle<Self>,
2200 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2201 _: Arc<Client>,
2202 mut cx: AsyncAppContext,
2203 ) -> Result<()> {
2204 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2205 Ok(())
2206 }
2207
2208 async fn handle_disk_based_diagnostics_updated(
2209 this: ModelHandle<Self>,
2210 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2211 _: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2215 Ok(())
2216 }
2217
2218 async fn handle_update_buffer(
2219 this: ModelHandle<Self>,
2220 envelope: TypedEnvelope<proto::UpdateBuffer>,
2221 _: Arc<Client>,
2222 mut cx: AsyncAppContext,
2223 ) -> Result<()> {
2224 this.update(&mut cx, |this, cx| {
2225 let payload = envelope.payload.clone();
2226 let buffer_id = payload.buffer_id;
2227 let ops = payload
2228 .operations
2229 .into_iter()
2230 .map(|op| language::proto::deserialize_operation(op))
2231 .collect::<Result<Vec<_>, _>>()?;
2232 let is_remote = this.is_remote();
2233 match this.open_buffers.entry(buffer_id) {
2234 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2235 OpenBuffer::Loaded(buffer) => {
2236 if let Some(buffer) = buffer.upgrade(cx) {
2237 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2238 }
2239 }
2240 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2241 },
2242 hash_map::Entry::Vacant(e) => {
2243 if is_remote && this.loading_buffers.len() > 0 {
2244 e.insert(OpenBuffer::Loading(ops));
2245 }
2246 }
2247 }
2248 Ok(())
2249 })
2250 }
2251
2252 async fn handle_update_buffer_file(
2253 this: ModelHandle<Self>,
2254 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2255 _: Arc<Client>,
2256 mut cx: AsyncAppContext,
2257 ) -> Result<()> {
2258 this.update(&mut cx, |this, cx| {
2259 let payload = envelope.payload.clone();
2260 let buffer_id = payload.buffer_id;
2261 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2262 let worktree = this
2263 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2264 .ok_or_else(|| anyhow!("no such worktree"))?;
2265 let file = File::from_proto(file, worktree.clone(), cx)?;
2266 let buffer = this
2267 .open_buffers
2268 .get_mut(&buffer_id)
2269 .and_then(|b| b.upgrade(cx))
2270 .ok_or_else(|| anyhow!("no such buffer"))?;
2271 buffer.update(cx, |buffer, cx| {
2272 buffer.file_updated(Box::new(file), cx).detach();
2273 });
2274 Ok(())
2275 })
2276 }
2277
2278 async fn handle_save_buffer(
2279 this: ModelHandle<Self>,
2280 envelope: TypedEnvelope<proto::SaveBuffer>,
2281 _: Arc<Client>,
2282 mut cx: AsyncAppContext,
2283 ) -> Result<proto::BufferSaved> {
2284 let buffer_id = envelope.payload.buffer_id;
2285 let sender_id = envelope.original_sender_id()?;
2286 let requested_version = envelope.payload.version.try_into()?;
2287
2288 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2289 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2290 let buffer = this
2291 .shared_buffers
2292 .get(&sender_id)
2293 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2294 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2295 Ok::<_, anyhow::Error>((project_id, buffer))
2296 })?;
2297
2298 if !buffer
2299 .read_with(&cx, |buffer, _| buffer.version())
2300 .observed_all(&requested_version)
2301 {
2302 Err(anyhow!("save request depends on unreceived edits"))?;
2303 }
2304
2305 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2306 Ok(proto::BufferSaved {
2307 project_id,
2308 buffer_id,
2309 version: (&saved_version).into(),
2310 mtime: Some(mtime.into()),
2311 })
2312 }
2313
2314 async fn handle_format_buffers(
2315 this: ModelHandle<Self>,
2316 envelope: TypedEnvelope<proto::FormatBuffers>,
2317 _: Arc<Client>,
2318 mut cx: AsyncAppContext,
2319 ) -> Result<proto::FormatBuffersResponse> {
2320 let sender_id = envelope.original_sender_id()?;
2321 let format = this.update(&mut cx, |this, cx| {
2322 let shared_buffers = this
2323 .shared_buffers
2324 .get(&sender_id)
2325 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2326 let mut buffers = HashSet::default();
2327 for buffer_id in &envelope.payload.buffer_ids {
2328 buffers.insert(
2329 shared_buffers
2330 .get(buffer_id)
2331 .cloned()
2332 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2333 );
2334 }
2335 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2336 })?;
2337
2338 let project_transaction = format.await?;
2339 let project_transaction = this.update(&mut cx, |this, cx| {
2340 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2341 });
2342 Ok(proto::FormatBuffersResponse {
2343 transaction: Some(project_transaction),
2344 })
2345 }
2346
2347 async fn handle_get_completions(
2348 this: ModelHandle<Self>,
2349 envelope: TypedEnvelope<proto::GetCompletions>,
2350 _: Arc<Client>,
2351 mut cx: AsyncAppContext,
2352 ) -> Result<proto::GetCompletionsResponse> {
2353 let sender_id = envelope.original_sender_id()?;
2354 let position = envelope
2355 .payload
2356 .position
2357 .and_then(language::proto::deserialize_anchor)
2358 .ok_or_else(|| anyhow!("invalid position"))?;
2359 let version = clock::Global::from(envelope.payload.version);
2360 let buffer = this.read_with(&cx, |this, _| {
2361 this.shared_buffers
2362 .get(&sender_id)
2363 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2364 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2365 })?;
2366 if !buffer
2367 .read_with(&cx, |buffer, _| buffer.version())
2368 .observed_all(&version)
2369 {
2370 Err(anyhow!("completion request depends on unreceived edits"))?;
2371 }
2372 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2373 let completions = this
2374 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2375 .await?;
2376
2377 Ok(proto::GetCompletionsResponse {
2378 completions: completions
2379 .iter()
2380 .map(language::proto::serialize_completion)
2381 .collect(),
2382 version: (&version).into(),
2383 })
2384 }
2385
2386 async fn handle_apply_additional_edits_for_completion(
2387 this: ModelHandle<Self>,
2388 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2389 _: Arc<Client>,
2390 mut cx: AsyncAppContext,
2391 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2392 let sender_id = envelope.original_sender_id()?;
2393 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2394 let buffer = this
2395 .shared_buffers
2396 .get(&sender_id)
2397 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2398 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2399 let language = buffer.read(cx).language();
2400 let completion = language::proto::deserialize_completion(
2401 envelope
2402 .payload
2403 .completion
2404 .ok_or_else(|| anyhow!("invalid completion"))?,
2405 language,
2406 )?;
2407 Ok::<_, anyhow::Error>(
2408 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2409 )
2410 })?;
2411
2412 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2413 transaction: apply_additional_edits
2414 .await?
2415 .as_ref()
2416 .map(language::proto::serialize_transaction),
2417 })
2418 }
2419
2420 async fn handle_get_code_actions(
2421 this: ModelHandle<Self>,
2422 envelope: TypedEnvelope<proto::GetCodeActions>,
2423 _: Arc<Client>,
2424 mut cx: AsyncAppContext,
2425 ) -> Result<proto::GetCodeActionsResponse> {
2426 let sender_id = envelope.original_sender_id()?;
2427 let start = envelope
2428 .payload
2429 .start
2430 .and_then(language::proto::deserialize_anchor)
2431 .ok_or_else(|| anyhow!("invalid start"))?;
2432 let end = envelope
2433 .payload
2434 .end
2435 .and_then(language::proto::deserialize_anchor)
2436 .ok_or_else(|| anyhow!("invalid end"))?;
2437 let buffer = this.update(&mut cx, |this, _| {
2438 this.shared_buffers
2439 .get(&sender_id)
2440 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2441 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2442 })?;
2443 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2444 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2445 Err(anyhow!("code action request references unreceived edits"))?;
2446 }
2447 let code_actions = this.update(&mut cx, |this, cx| {
2448 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2449 })?;
2450
2451 Ok(proto::GetCodeActionsResponse {
2452 actions: code_actions
2453 .await?
2454 .iter()
2455 .map(language::proto::serialize_code_action)
2456 .collect(),
2457 version: (&version).into(),
2458 })
2459 }
2460
2461 async fn handle_apply_code_action(
2462 this: ModelHandle<Self>,
2463 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2464 _: Arc<Client>,
2465 mut cx: AsyncAppContext,
2466 ) -> Result<proto::ApplyCodeActionResponse> {
2467 let sender_id = envelope.original_sender_id()?;
2468 let action = language::proto::deserialize_code_action(
2469 envelope
2470 .payload
2471 .action
2472 .ok_or_else(|| anyhow!("invalid action"))?,
2473 )?;
2474 let apply_code_action = this.update(&mut cx, |this, cx| {
2475 let buffer = this
2476 .shared_buffers
2477 .get(&sender_id)
2478 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2479 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2480 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2481 })?;
2482
2483 let project_transaction = apply_code_action.await?;
2484 let project_transaction = this.update(&mut cx, |this, cx| {
2485 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2486 });
2487 Ok(proto::ApplyCodeActionResponse {
2488 transaction: Some(project_transaction),
2489 })
2490 }
2491
2492 async fn handle_get_definition(
2493 this: ModelHandle<Self>,
2494 envelope: TypedEnvelope<proto::GetDefinition>,
2495 _: Arc<Client>,
2496 mut cx: AsyncAppContext,
2497 ) -> Result<proto::GetDefinitionResponse> {
2498 let sender_id = envelope.original_sender_id()?;
2499 let position = envelope
2500 .payload
2501 .position
2502 .and_then(deserialize_anchor)
2503 .ok_or_else(|| anyhow!("invalid position"))?;
2504 let definitions = this.update(&mut cx, |this, cx| {
2505 let source_buffer = this
2506 .shared_buffers
2507 .get(&sender_id)
2508 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2509 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2510 if source_buffer.read(cx).can_resolve(&position) {
2511 Ok(this.definition(&source_buffer, position, cx))
2512 } else {
2513 Err(anyhow!("cannot resolve position"))
2514 }
2515 })?;
2516
2517 let definitions = definitions.await?;
2518
2519 this.update(&mut cx, |this, cx| {
2520 let mut response = proto::GetDefinitionResponse {
2521 definitions: Default::default(),
2522 };
2523 for definition in definitions {
2524 let buffer =
2525 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2526 response.definitions.push(proto::Definition {
2527 target_start: Some(serialize_anchor(&definition.target_range.start)),
2528 target_end: Some(serialize_anchor(&definition.target_range.end)),
2529 buffer: Some(buffer),
2530 });
2531 }
2532 Ok(response)
2533 })
2534 }
2535
2536 async fn handle_open_buffer(
2537 this: ModelHandle<Self>,
2538 envelope: TypedEnvelope<proto::OpenBuffer>,
2539 _: Arc<Client>,
2540 mut cx: AsyncAppContext,
2541 ) -> anyhow::Result<proto::OpenBufferResponse> {
2542 let peer_id = envelope.original_sender_id()?;
2543 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2544 let open_buffer = this.update(&mut cx, |this, cx| {
2545 this.open_buffer(
2546 ProjectPath {
2547 worktree_id,
2548 path: PathBuf::from(envelope.payload.path).into(),
2549 },
2550 cx,
2551 )
2552 });
2553
2554 let buffer = open_buffer.await?;
2555 this.update(&mut cx, |this, cx| {
2556 Ok(proto::OpenBufferResponse {
2557 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2558 })
2559 })
2560 }
2561
2562 fn serialize_project_transaction_for_peer(
2563 &mut self,
2564 project_transaction: ProjectTransaction,
2565 peer_id: PeerId,
2566 cx: &AppContext,
2567 ) -> proto::ProjectTransaction {
2568 let mut serialized_transaction = proto::ProjectTransaction {
2569 buffers: Default::default(),
2570 transactions: Default::default(),
2571 };
2572 for (buffer, transaction) in project_transaction.0 {
2573 serialized_transaction
2574 .buffers
2575 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2576 serialized_transaction
2577 .transactions
2578 .push(language::proto::serialize_transaction(&transaction));
2579 }
2580 serialized_transaction
2581 }
2582
2583 fn deserialize_project_transaction(
2584 &mut self,
2585 message: proto::ProjectTransaction,
2586 push_to_history: bool,
2587 cx: &mut ModelContext<Self>,
2588 ) -> Task<Result<ProjectTransaction>> {
2589 cx.spawn(|this, mut cx| async move {
2590 let mut project_transaction = ProjectTransaction::default();
2591 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2592 let buffer = this
2593 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2594 .await?;
2595 let transaction = language::proto::deserialize_transaction(transaction)?;
2596 project_transaction.0.insert(buffer, transaction);
2597 }
2598 for (buffer, transaction) in &project_transaction.0 {
2599 buffer
2600 .update(&mut cx, |buffer, _| {
2601 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2602 })
2603 .await;
2604
2605 if push_to_history {
2606 buffer.update(&mut cx, |buffer, _| {
2607 buffer.push_transaction(transaction.clone(), Instant::now());
2608 });
2609 }
2610 }
2611
2612 Ok(project_transaction)
2613 })
2614 }
2615
2616 fn serialize_buffer_for_peer(
2617 &mut self,
2618 buffer: &ModelHandle<Buffer>,
2619 peer_id: PeerId,
2620 cx: &AppContext,
2621 ) -> proto::Buffer {
2622 let buffer_id = buffer.read(cx).remote_id();
2623 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2624 match shared_buffers.entry(buffer_id) {
2625 hash_map::Entry::Occupied(_) => proto::Buffer {
2626 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2627 },
2628 hash_map::Entry::Vacant(entry) => {
2629 entry.insert(buffer.clone());
2630 proto::Buffer {
2631 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2632 }
2633 }
2634 }
2635 }
2636
2637 fn deserialize_buffer(
2638 &mut self,
2639 buffer: proto::Buffer,
2640 cx: &mut ModelContext<Self>,
2641 ) -> Task<Result<ModelHandle<Buffer>>> {
2642 let replica_id = self.replica_id();
2643
2644 let mut opened_buffer_tx = self.opened_buffer.clone();
2645 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2646 cx.spawn(|this, mut cx| async move {
2647 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2648 proto::buffer::Variant::Id(id) => {
2649 let buffer = loop {
2650 let buffer = this.read_with(&cx, |this, cx| {
2651 this.open_buffers
2652 .get(&id)
2653 .and_then(|buffer| buffer.upgrade(cx))
2654 });
2655 if let Some(buffer) = buffer {
2656 break buffer;
2657 }
2658 opened_buffer_rx
2659 .recv()
2660 .await
2661 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2662 };
2663 Ok(buffer)
2664 }
2665 proto::buffer::Variant::State(mut buffer) => {
2666 let mut buffer_worktree = None;
2667 let mut buffer_file = None;
2668 if let Some(file) = buffer.file.take() {
2669 this.read_with(&cx, |this, cx| {
2670 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2671 let worktree =
2672 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2673 anyhow!("no worktree found for id {}", file.worktree_id)
2674 })?;
2675 buffer_file =
2676 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2677 as Box<dyn language::File>);
2678 buffer_worktree = Some(worktree);
2679 Ok::<_, anyhow::Error>(())
2680 })?;
2681 }
2682
2683 let buffer = cx.add_model(|cx| {
2684 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2685 });
2686 this.update(&mut cx, |this, cx| {
2687 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2688 })?;
2689
2690 let _ = opened_buffer_tx.send(()).await;
2691 Ok(buffer)
2692 }
2693 }
2694 })
2695 }
2696
2697 async fn handle_close_buffer(
2698 this: ModelHandle<Self>,
2699 envelope: TypedEnvelope<proto::CloseBuffer>,
2700 _: Arc<Client>,
2701 mut cx: AsyncAppContext,
2702 ) -> anyhow::Result<()> {
2703 this.update(&mut cx, |this, cx| {
2704 if let Some(shared_buffers) =
2705 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2706 {
2707 shared_buffers.remove(&envelope.payload.buffer_id);
2708 cx.notify();
2709 }
2710 Ok(())
2711 })
2712 }
2713
2714 async fn handle_buffer_saved(
2715 this: ModelHandle<Self>,
2716 envelope: TypedEnvelope<proto::BufferSaved>,
2717 _: Arc<Client>,
2718 mut cx: AsyncAppContext,
2719 ) -> Result<()> {
2720 let version = envelope.payload.version.try_into()?;
2721 let mtime = envelope
2722 .payload
2723 .mtime
2724 .ok_or_else(|| anyhow!("missing mtime"))?
2725 .into();
2726
2727 this.update(&mut cx, |this, cx| {
2728 let buffer = this
2729 .open_buffers
2730 .get(&envelope.payload.buffer_id)
2731 .and_then(|buffer| buffer.upgrade(cx));
2732 if let Some(buffer) = buffer {
2733 buffer.update(cx, |buffer, cx| {
2734 buffer.did_save(version, mtime, None, cx);
2735 });
2736 }
2737 Ok(())
2738 })
2739 }
2740
2741 async fn handle_buffer_reloaded(
2742 this: ModelHandle<Self>,
2743 envelope: TypedEnvelope<proto::BufferReloaded>,
2744 _: Arc<Client>,
2745 mut cx: AsyncAppContext,
2746 ) -> Result<()> {
2747 let payload = envelope.payload.clone();
2748 let version = payload.version.try_into()?;
2749 let mtime = payload
2750 .mtime
2751 .ok_or_else(|| anyhow!("missing mtime"))?
2752 .into();
2753 this.update(&mut cx, |this, cx| {
2754 let buffer = this
2755 .open_buffers
2756 .get(&payload.buffer_id)
2757 .and_then(|buffer| buffer.upgrade(cx));
2758 if let Some(buffer) = buffer {
2759 buffer.update(cx, |buffer, cx| {
2760 buffer.did_reload(version, mtime, cx);
2761 });
2762 }
2763 Ok(())
2764 })
2765 }
2766
2767 pub fn match_paths<'a>(
2768 &self,
2769 query: &'a str,
2770 include_ignored: bool,
2771 smart_case: bool,
2772 max_results: usize,
2773 cancel_flag: &'a AtomicBool,
2774 cx: &AppContext,
2775 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2776 let worktrees = self
2777 .worktrees(cx)
2778 .filter(|worktree| !worktree.read(cx).is_weak())
2779 .collect::<Vec<_>>();
2780 let include_root_name = worktrees.len() > 1;
2781 let candidate_sets = worktrees
2782 .into_iter()
2783 .map(|worktree| CandidateSet {
2784 snapshot: worktree.read(cx).snapshot(),
2785 include_ignored,
2786 include_root_name,
2787 })
2788 .collect::<Vec<_>>();
2789
2790 let background = cx.background().clone();
2791 async move {
2792 fuzzy::match_paths(
2793 candidate_sets.as_slice(),
2794 query,
2795 smart_case,
2796 max_results,
2797 cancel_flag,
2798 background,
2799 )
2800 .await
2801 }
2802 }
2803}
2804
2805impl WorktreeHandle {
2806 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2807 match self {
2808 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2809 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2810 }
2811 }
2812}
2813
2814impl OpenBuffer {
2815 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2816 match self {
2817 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2818 OpenBuffer::Loading(_) => None,
2819 }
2820 }
2821}
2822
2823struct CandidateSet {
2824 snapshot: Snapshot,
2825 include_ignored: bool,
2826 include_root_name: bool,
2827}
2828
2829impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2830 type Candidates = CandidateSetIter<'a>;
2831
2832 fn id(&self) -> usize {
2833 self.snapshot.id().to_usize()
2834 }
2835
2836 fn len(&self) -> usize {
2837 if self.include_ignored {
2838 self.snapshot.file_count()
2839 } else {
2840 self.snapshot.visible_file_count()
2841 }
2842 }
2843
2844 fn prefix(&self) -> Arc<str> {
2845 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2846 self.snapshot.root_name().into()
2847 } else if self.include_root_name {
2848 format!("{}/", self.snapshot.root_name()).into()
2849 } else {
2850 "".into()
2851 }
2852 }
2853
2854 fn candidates(&'a self, start: usize) -> Self::Candidates {
2855 CandidateSetIter {
2856 traversal: self.snapshot.files(self.include_ignored, start),
2857 }
2858 }
2859}
2860
2861struct CandidateSetIter<'a> {
2862 traversal: Traversal<'a>,
2863}
2864
2865impl<'a> Iterator for CandidateSetIter<'a> {
2866 type Item = PathMatchCandidate<'a>;
2867
2868 fn next(&mut self) -> Option<Self::Item> {
2869 self.traversal.next().map(|entry| {
2870 if let EntryKind::File(char_bag) = entry.kind {
2871 PathMatchCandidate {
2872 path: &entry.path,
2873 char_bag,
2874 }
2875 } else {
2876 unreachable!()
2877 }
2878 })
2879 }
2880}
2881
2882impl Entity for Project {
2883 type Event = Event;
2884
2885 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2886 match &self.client_state {
2887 ProjectClientState::Local { remote_id_rx, .. } => {
2888 if let Some(project_id) = *remote_id_rx.borrow() {
2889 self.client
2890 .send(proto::UnregisterProject { project_id })
2891 .log_err();
2892 }
2893 }
2894 ProjectClientState::Remote { remote_id, .. } => {
2895 self.client
2896 .send(proto::LeaveProject {
2897 project_id: *remote_id,
2898 })
2899 .log_err();
2900 }
2901 }
2902 }
2903
2904 fn app_will_quit(
2905 &mut self,
2906 _: &mut MutableAppContext,
2907 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2908 use futures::FutureExt;
2909
2910 let shutdown_futures = self
2911 .language_servers
2912 .drain()
2913 .filter_map(|(_, server)| server.shutdown())
2914 .collect::<Vec<_>>();
2915 Some(
2916 async move {
2917 futures::future::join_all(shutdown_futures).await;
2918 }
2919 .boxed(),
2920 )
2921 }
2922}
2923
2924impl Collaborator {
2925 fn from_proto(
2926 message: proto::Collaborator,
2927 user_store: &ModelHandle<UserStore>,
2928 cx: &mut AsyncAppContext,
2929 ) -> impl Future<Output = Result<Self>> {
2930 let user = user_store.update(cx, |user_store, cx| {
2931 user_store.fetch_user(message.user_id, cx)
2932 });
2933
2934 async move {
2935 Ok(Self {
2936 peer_id: PeerId(message.peer_id),
2937 user: user.await?,
2938 replica_id: message.replica_id as ReplicaId,
2939 })
2940 }
2941 }
2942}
2943
2944impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2945 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2946 Self {
2947 worktree_id,
2948 path: path.as_ref().into(),
2949 }
2950 }
2951}
2952
2953impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2954 fn from(options: lsp::CreateFileOptions) -> Self {
2955 Self {
2956 overwrite: options.overwrite.unwrap_or(false),
2957 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2958 }
2959 }
2960}
2961
2962impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2963 fn from(options: lsp::RenameFileOptions) -> Self {
2964 Self {
2965 overwrite: options.overwrite.unwrap_or(false),
2966 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2967 }
2968 }
2969}
2970
2971impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2972 fn from(options: lsp::DeleteFileOptions) -> Self {
2973 Self {
2974 recursive: options.recursive.unwrap_or(false),
2975 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2976 }
2977 }
2978}
2979
2980#[cfg(test)]
2981mod tests {
2982 use super::{Event, *};
2983 use client::test::FakeHttpClient;
2984 use fs::RealFs;
2985 use futures::StreamExt;
2986 use gpui::test::subscribe;
2987 use language::{
2988 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2989 LanguageServerConfig, Point,
2990 };
2991 use lsp::Url;
2992 use serde_json::json;
2993 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2994 use unindent::Unindent as _;
2995 use util::test::temp_tree;
2996 use worktree::WorktreeHandle as _;
2997
2998 #[gpui::test]
2999 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3000 let dir = temp_tree(json!({
3001 "root": {
3002 "apple": "",
3003 "banana": {
3004 "carrot": {
3005 "date": "",
3006 "endive": "",
3007 }
3008 },
3009 "fennel": {
3010 "grape": "",
3011 }
3012 }
3013 }));
3014
3015 let root_link_path = dir.path().join("root_link");
3016 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3017 unix::fs::symlink(
3018 &dir.path().join("root/fennel"),
3019 &dir.path().join("root/finnochio"),
3020 )
3021 .unwrap();
3022
3023 let project = Project::test(Arc::new(RealFs), &mut cx);
3024
3025 let (tree, _) = project
3026 .update(&mut cx, |project, cx| {
3027 project.find_or_create_local_worktree(&root_link_path, false, cx)
3028 })
3029 .await
3030 .unwrap();
3031
3032 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3033 .await;
3034 cx.read(|cx| {
3035 let tree = tree.read(cx);
3036 assert_eq!(tree.file_count(), 5);
3037 assert_eq!(
3038 tree.inode_for_path("fennel/grape"),
3039 tree.inode_for_path("finnochio/grape")
3040 );
3041 });
3042
3043 let cancel_flag = Default::default();
3044 let results = project
3045 .read_with(&cx, |project, cx| {
3046 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3047 })
3048 .await;
3049 assert_eq!(
3050 results
3051 .into_iter()
3052 .map(|result| result.path)
3053 .collect::<Vec<Arc<Path>>>(),
3054 vec![
3055 PathBuf::from("banana/carrot/date").into(),
3056 PathBuf::from("banana/carrot/endive").into(),
3057 ]
3058 );
3059 }
3060
3061 #[gpui::test]
3062 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3063 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3064 let progress_token = language_server_config
3065 .disk_based_diagnostics_progress_token
3066 .clone()
3067 .unwrap();
3068
3069 let mut languages = LanguageRegistry::new();
3070 languages.add(Arc::new(Language::new(
3071 LanguageConfig {
3072 name: "Rust".to_string(),
3073 path_suffixes: vec!["rs".to_string()],
3074 language_server: Some(language_server_config),
3075 ..Default::default()
3076 },
3077 Some(tree_sitter_rust::language()),
3078 )));
3079
3080 let dir = temp_tree(json!({
3081 "a.rs": "fn a() { A }",
3082 "b.rs": "const y: i32 = 1",
3083 }));
3084
3085 let http_client = FakeHttpClient::with_404_response();
3086 let client = Client::new(http_client.clone());
3087 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3088
3089 let project = cx.update(|cx| {
3090 Project::local(
3091 client,
3092 user_store,
3093 Arc::new(languages),
3094 Arc::new(RealFs),
3095 cx,
3096 )
3097 });
3098
3099 let (tree, _) = project
3100 .update(&mut cx, |project, cx| {
3101 project.find_or_create_local_worktree(dir.path(), false, cx)
3102 })
3103 .await
3104 .unwrap();
3105 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3106
3107 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3108 .await;
3109
3110 // Cause worktree to start the fake language server
3111 let _buffer = project
3112 .update(&mut cx, |project, cx| {
3113 project.open_buffer(
3114 ProjectPath {
3115 worktree_id,
3116 path: Path::new("b.rs").into(),
3117 },
3118 cx,
3119 )
3120 })
3121 .await
3122 .unwrap();
3123
3124 let mut events = subscribe(&project, &mut cx);
3125
3126 let mut fake_server = fake_servers.next().await.unwrap();
3127 fake_server.start_progress(&progress_token).await;
3128 assert_eq!(
3129 events.next().await.unwrap(),
3130 Event::DiskBasedDiagnosticsStarted
3131 );
3132
3133 fake_server.start_progress(&progress_token).await;
3134 fake_server.end_progress(&progress_token).await;
3135 fake_server.start_progress(&progress_token).await;
3136
3137 fake_server
3138 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3139 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3140 version: None,
3141 diagnostics: vec![lsp::Diagnostic {
3142 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3143 severity: Some(lsp::DiagnosticSeverity::ERROR),
3144 message: "undefined variable 'A'".to_string(),
3145 ..Default::default()
3146 }],
3147 })
3148 .await;
3149 assert_eq!(
3150 events.next().await.unwrap(),
3151 Event::DiagnosticsUpdated(ProjectPath {
3152 worktree_id,
3153 path: Arc::from(Path::new("a.rs"))
3154 })
3155 );
3156
3157 fake_server.end_progress(&progress_token).await;
3158 fake_server.end_progress(&progress_token).await;
3159 assert_eq!(
3160 events.next().await.unwrap(),
3161 Event::DiskBasedDiagnosticsUpdated
3162 );
3163 assert_eq!(
3164 events.next().await.unwrap(),
3165 Event::DiskBasedDiagnosticsFinished
3166 );
3167
3168 let buffer = project
3169 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3170 .await
3171 .unwrap();
3172
3173 buffer.read_with(&cx, |buffer, _| {
3174 let snapshot = buffer.snapshot();
3175 let diagnostics = snapshot
3176 .diagnostics_in_range::<_, Point>(0..buffer.len())
3177 .collect::<Vec<_>>();
3178 assert_eq!(
3179 diagnostics,
3180 &[DiagnosticEntry {
3181 range: Point::new(0, 9)..Point::new(0, 10),
3182 diagnostic: Diagnostic {
3183 severity: lsp::DiagnosticSeverity::ERROR,
3184 message: "undefined variable 'A'".to_string(),
3185 group_id: 0,
3186 is_primary: true,
3187 ..Default::default()
3188 }
3189 }]
3190 )
3191 });
3192 }
3193
3194 #[gpui::test]
3195 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3196 let dir = temp_tree(json!({
3197 "root": {
3198 "dir1": {},
3199 "dir2": {
3200 "dir3": {}
3201 }
3202 }
3203 }));
3204
3205 let project = Project::test(Arc::new(RealFs), &mut cx);
3206 let (tree, _) = project
3207 .update(&mut cx, |project, cx| {
3208 project.find_or_create_local_worktree(&dir.path(), false, cx)
3209 })
3210 .await
3211 .unwrap();
3212
3213 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3214 .await;
3215
3216 let cancel_flag = Default::default();
3217 let results = project
3218 .read_with(&cx, |project, cx| {
3219 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3220 })
3221 .await;
3222
3223 assert!(results.is_empty());
3224 }
3225
3226 #[gpui::test]
3227 async fn test_definition(mut cx: gpui::TestAppContext) {
3228 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3229
3230 let mut languages = LanguageRegistry::new();
3231 languages.add(Arc::new(Language::new(
3232 LanguageConfig {
3233 name: "Rust".to_string(),
3234 path_suffixes: vec!["rs".to_string()],
3235 language_server: Some(language_server_config),
3236 ..Default::default()
3237 },
3238 Some(tree_sitter_rust::language()),
3239 )));
3240
3241 let dir = temp_tree(json!({
3242 "a.rs": "const fn a() { A }",
3243 "b.rs": "const y: i32 = crate::a()",
3244 }));
3245 let dir_path = dir.path().to_path_buf();
3246
3247 let http_client = FakeHttpClient::with_404_response();
3248 let client = Client::new(http_client.clone());
3249 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3250 let project = cx.update(|cx| {
3251 Project::local(
3252 client,
3253 user_store,
3254 Arc::new(languages),
3255 Arc::new(RealFs),
3256 cx,
3257 )
3258 });
3259
3260 let (tree, _) = project
3261 .update(&mut cx, |project, cx| {
3262 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3263 })
3264 .await
3265 .unwrap();
3266 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3267 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3268 .await;
3269
3270 let buffer = project
3271 .update(&mut cx, |project, cx| {
3272 project.open_buffer(
3273 ProjectPath {
3274 worktree_id,
3275 path: Path::new("").into(),
3276 },
3277 cx,
3278 )
3279 })
3280 .await
3281 .unwrap();
3282
3283 let mut fake_server = fake_servers.next().await.unwrap();
3284 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3285 let params = params.text_document_position_params;
3286 assert_eq!(
3287 params.text_document.uri.to_file_path().unwrap(),
3288 dir_path.join("b.rs")
3289 );
3290 assert_eq!(params.position, lsp::Position::new(0, 22));
3291
3292 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3293 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3294 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3295 )))
3296 });
3297
3298 let mut definitions = project
3299 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3300 .await
3301 .unwrap();
3302
3303 assert_eq!(definitions.len(), 1);
3304 let definition = definitions.pop().unwrap();
3305 cx.update(|cx| {
3306 let target_buffer = definition.target_buffer.read(cx);
3307 assert_eq!(
3308 target_buffer
3309 .file()
3310 .unwrap()
3311 .as_local()
3312 .unwrap()
3313 .abs_path(cx),
3314 dir.path().join("a.rs")
3315 );
3316 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3317 assert_eq!(
3318 list_worktrees(&project, cx),
3319 [
3320 (dir.path().join("b.rs"), false),
3321 (dir.path().join("a.rs"), true)
3322 ]
3323 );
3324
3325 drop(definition);
3326 });
3327 cx.read(|cx| {
3328 assert_eq!(
3329 list_worktrees(&project, cx),
3330 [(dir.path().join("b.rs"), false)]
3331 );
3332 });
3333
3334 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3335 project
3336 .read(cx)
3337 .worktrees(cx)
3338 .map(|worktree| {
3339 let worktree = worktree.read(cx);
3340 (
3341 worktree.as_local().unwrap().abs_path().to_path_buf(),
3342 worktree.is_weak(),
3343 )
3344 })
3345 .collect::<Vec<_>>()
3346 }
3347 }
3348
3349 #[gpui::test]
3350 async fn test_save_file(mut cx: gpui::TestAppContext) {
3351 let fs = Arc::new(FakeFs::new(cx.background()));
3352 fs.insert_tree(
3353 "/dir",
3354 json!({
3355 "file1": "the old contents",
3356 }),
3357 )
3358 .await;
3359
3360 let project = Project::test(fs.clone(), &mut cx);
3361 let worktree_id = project
3362 .update(&mut cx, |p, cx| {
3363 p.find_or_create_local_worktree("/dir", false, cx)
3364 })
3365 .await
3366 .unwrap()
3367 .0
3368 .read_with(&cx, |tree, _| tree.id());
3369
3370 let buffer = project
3371 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3372 .await
3373 .unwrap();
3374 buffer
3375 .update(&mut cx, |buffer, cx| {
3376 assert_eq!(buffer.text(), "the old contents");
3377 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3378 buffer.save(cx)
3379 })
3380 .await
3381 .unwrap();
3382
3383 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3384 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3385 }
3386
3387 #[gpui::test]
3388 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3389 let fs = Arc::new(FakeFs::new(cx.background()));
3390 fs.insert_tree(
3391 "/dir",
3392 json!({
3393 "file1": "the old contents",
3394 }),
3395 )
3396 .await;
3397
3398 let project = Project::test(fs.clone(), &mut cx);
3399 let worktree_id = project
3400 .update(&mut cx, |p, cx| {
3401 p.find_or_create_local_worktree("/dir/file1", false, cx)
3402 })
3403 .await
3404 .unwrap()
3405 .0
3406 .read_with(&cx, |tree, _| tree.id());
3407
3408 let buffer = project
3409 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3410 .await
3411 .unwrap();
3412 buffer
3413 .update(&mut cx, |buffer, cx| {
3414 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3415 buffer.save(cx)
3416 })
3417 .await
3418 .unwrap();
3419
3420 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3421 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3422 }
3423
3424 #[gpui::test(retries = 5)]
3425 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3426 let dir = temp_tree(json!({
3427 "a": {
3428 "file1": "",
3429 "file2": "",
3430 "file3": "",
3431 },
3432 "b": {
3433 "c": {
3434 "file4": "",
3435 "file5": "",
3436 }
3437 }
3438 }));
3439
3440 let project = Project::test(Arc::new(RealFs), &mut cx);
3441 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3442
3443 let (tree, _) = project
3444 .update(&mut cx, |p, cx| {
3445 p.find_or_create_local_worktree(dir.path(), false, cx)
3446 })
3447 .await
3448 .unwrap();
3449 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3450
3451 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3452 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3453 async move { buffer.await.unwrap() }
3454 };
3455 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3456 tree.read_with(cx, |tree, _| {
3457 tree.entry_for_path(path)
3458 .expect(&format!("no entry for path {}", path))
3459 .id
3460 })
3461 };
3462
3463 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3464 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3465 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3466 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3467
3468 let file2_id = id_for_path("a/file2", &cx);
3469 let file3_id = id_for_path("a/file3", &cx);
3470 let file4_id = id_for_path("b/c/file4", &cx);
3471
3472 // Wait for the initial scan.
3473 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3474 .await;
3475
3476 // Create a remote copy of this worktree.
3477 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3478 let (remote, load_task) = cx.update(|cx| {
3479 Worktree::remote(
3480 1,
3481 1,
3482 initial_snapshot.to_proto(&Default::default(), Default::default()),
3483 rpc.clone(),
3484 cx,
3485 )
3486 });
3487 load_task.await;
3488
3489 cx.read(|cx| {
3490 assert!(!buffer2.read(cx).is_dirty());
3491 assert!(!buffer3.read(cx).is_dirty());
3492 assert!(!buffer4.read(cx).is_dirty());
3493 assert!(!buffer5.read(cx).is_dirty());
3494 });
3495
3496 // Rename and delete files and directories.
3497 tree.flush_fs_events(&cx).await;
3498 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3499 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3500 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3501 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3502 tree.flush_fs_events(&cx).await;
3503
3504 let expected_paths = vec![
3505 "a",
3506 "a/file1",
3507 "a/file2.new",
3508 "b",
3509 "d",
3510 "d/file3",
3511 "d/file4",
3512 ];
3513
3514 cx.read(|app| {
3515 assert_eq!(
3516 tree.read(app)
3517 .paths()
3518 .map(|p| p.to_str().unwrap())
3519 .collect::<Vec<_>>(),
3520 expected_paths
3521 );
3522
3523 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3524 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3525 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3526
3527 assert_eq!(
3528 buffer2.read(app).file().unwrap().path().as_ref(),
3529 Path::new("a/file2.new")
3530 );
3531 assert_eq!(
3532 buffer3.read(app).file().unwrap().path().as_ref(),
3533 Path::new("d/file3")
3534 );
3535 assert_eq!(
3536 buffer4.read(app).file().unwrap().path().as_ref(),
3537 Path::new("d/file4")
3538 );
3539 assert_eq!(
3540 buffer5.read(app).file().unwrap().path().as_ref(),
3541 Path::new("b/c/file5")
3542 );
3543
3544 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3545 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3546 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3547 assert!(buffer5.read(app).file().unwrap().is_deleted());
3548 });
3549
3550 // Update the remote worktree. Check that it becomes consistent with the
3551 // local worktree.
3552 remote.update(&mut cx, |remote, cx| {
3553 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3554 &initial_snapshot,
3555 1,
3556 1,
3557 0,
3558 true,
3559 );
3560 remote
3561 .as_remote_mut()
3562 .unwrap()
3563 .snapshot
3564 .apply_remote_update(update_message)
3565 .unwrap();
3566
3567 assert_eq!(
3568 remote
3569 .paths()
3570 .map(|p| p.to_str().unwrap())
3571 .collect::<Vec<_>>(),
3572 expected_paths
3573 );
3574 });
3575 }
3576
3577 #[gpui::test]
3578 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3579 let fs = Arc::new(FakeFs::new(cx.background()));
3580 fs.insert_tree(
3581 "/the-dir",
3582 json!({
3583 "a.txt": "a-contents",
3584 "b.txt": "b-contents",
3585 }),
3586 )
3587 .await;
3588
3589 let project = Project::test(fs.clone(), &mut cx);
3590 let worktree_id = project
3591 .update(&mut cx, |p, cx| {
3592 p.find_or_create_local_worktree("/the-dir", false, cx)
3593 })
3594 .await
3595 .unwrap()
3596 .0
3597 .read_with(&cx, |tree, _| tree.id());
3598
3599 // Spawn multiple tasks to open paths, repeating some paths.
3600 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3601 (
3602 p.open_buffer((worktree_id, "a.txt"), cx),
3603 p.open_buffer((worktree_id, "b.txt"), cx),
3604 p.open_buffer((worktree_id, "a.txt"), cx),
3605 )
3606 });
3607
3608 let buffer_a_1 = buffer_a_1.await.unwrap();
3609 let buffer_a_2 = buffer_a_2.await.unwrap();
3610 let buffer_b = buffer_b.await.unwrap();
3611 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3612 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3613
3614 // There is only one buffer per path.
3615 let buffer_a_id = buffer_a_1.id();
3616 assert_eq!(buffer_a_2.id(), buffer_a_id);
3617
3618 // Open the same path again while it is still open.
3619 drop(buffer_a_1);
3620 let buffer_a_3 = project
3621 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3622 .await
3623 .unwrap();
3624
3625 // There's still only one buffer per path.
3626 assert_eq!(buffer_a_3.id(), buffer_a_id);
3627 }
3628
3629 #[gpui::test]
3630 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3631 use std::fs;
3632
3633 let dir = temp_tree(json!({
3634 "file1": "abc",
3635 "file2": "def",
3636 "file3": "ghi",
3637 }));
3638
3639 let project = Project::test(Arc::new(RealFs), &mut cx);
3640 let (worktree, _) = project
3641 .update(&mut cx, |p, cx| {
3642 p.find_or_create_local_worktree(dir.path(), false, cx)
3643 })
3644 .await
3645 .unwrap();
3646 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3647
3648 worktree.flush_fs_events(&cx).await;
3649 worktree
3650 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3651 .await;
3652
3653 let buffer1 = project
3654 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3655 .await
3656 .unwrap();
3657 let events = Rc::new(RefCell::new(Vec::new()));
3658
3659 // initially, the buffer isn't dirty.
3660 buffer1.update(&mut cx, |buffer, cx| {
3661 cx.subscribe(&buffer1, {
3662 let events = events.clone();
3663 move |_, _, event, _| events.borrow_mut().push(event.clone())
3664 })
3665 .detach();
3666
3667 assert!(!buffer.is_dirty());
3668 assert!(events.borrow().is_empty());
3669
3670 buffer.edit(vec![1..2], "", cx);
3671 });
3672
3673 // after the first edit, the buffer is dirty, and emits a dirtied event.
3674 buffer1.update(&mut cx, |buffer, cx| {
3675 assert!(buffer.text() == "ac");
3676 assert!(buffer.is_dirty());
3677 assert_eq!(
3678 *events.borrow(),
3679 &[language::Event::Edited, language::Event::Dirtied]
3680 );
3681 events.borrow_mut().clear();
3682 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3683 });
3684
3685 // after saving, the buffer is not dirty, and emits a saved event.
3686 buffer1.update(&mut cx, |buffer, cx| {
3687 assert!(!buffer.is_dirty());
3688 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3689 events.borrow_mut().clear();
3690
3691 buffer.edit(vec![1..1], "B", cx);
3692 buffer.edit(vec![2..2], "D", cx);
3693 });
3694
3695 // after editing again, the buffer is dirty, and emits another dirty event.
3696 buffer1.update(&mut cx, |buffer, cx| {
3697 assert!(buffer.text() == "aBDc");
3698 assert!(buffer.is_dirty());
3699 assert_eq!(
3700 *events.borrow(),
3701 &[
3702 language::Event::Edited,
3703 language::Event::Dirtied,
3704 language::Event::Edited,
3705 ],
3706 );
3707 events.borrow_mut().clear();
3708
3709 // TODO - currently, after restoring the buffer to its
3710 // previously-saved state, the is still considered dirty.
3711 buffer.edit([1..3], "", cx);
3712 assert!(buffer.text() == "ac");
3713 assert!(buffer.is_dirty());
3714 });
3715
3716 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3717
3718 // When a file is deleted, the buffer is considered dirty.
3719 let events = Rc::new(RefCell::new(Vec::new()));
3720 let buffer2 = project
3721 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3722 .await
3723 .unwrap();
3724 buffer2.update(&mut cx, |_, cx| {
3725 cx.subscribe(&buffer2, {
3726 let events = events.clone();
3727 move |_, _, event, _| events.borrow_mut().push(event.clone())
3728 })
3729 .detach();
3730 });
3731
3732 fs::remove_file(dir.path().join("file2")).unwrap();
3733 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3734 assert_eq!(
3735 *events.borrow(),
3736 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3737 );
3738
3739 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3740 let events = Rc::new(RefCell::new(Vec::new()));
3741 let buffer3 = project
3742 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3743 .await
3744 .unwrap();
3745 buffer3.update(&mut cx, |_, cx| {
3746 cx.subscribe(&buffer3, {
3747 let events = events.clone();
3748 move |_, _, event, _| events.borrow_mut().push(event.clone())
3749 })
3750 .detach();
3751 });
3752
3753 worktree.flush_fs_events(&cx).await;
3754 buffer3.update(&mut cx, |buffer, cx| {
3755 buffer.edit(Some(0..0), "x", cx);
3756 });
3757 events.borrow_mut().clear();
3758 fs::remove_file(dir.path().join("file3")).unwrap();
3759 buffer3
3760 .condition(&cx, |_, _| !events.borrow().is_empty())
3761 .await;
3762 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3763 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3764 }
3765
3766 #[gpui::test]
3767 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3768 use std::fs;
3769
3770 let initial_contents = "aaa\nbbbbb\nc\n";
3771 let dir = temp_tree(json!({ "the-file": initial_contents }));
3772
3773 let project = Project::test(Arc::new(RealFs), &mut cx);
3774 let (worktree, _) = project
3775 .update(&mut cx, |p, cx| {
3776 p.find_or_create_local_worktree(dir.path(), false, cx)
3777 })
3778 .await
3779 .unwrap();
3780 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3781
3782 worktree
3783 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3784 .await;
3785
3786 let abs_path = dir.path().join("the-file");
3787 let buffer = project
3788 .update(&mut cx, |p, cx| {
3789 p.open_buffer((worktree_id, "the-file"), cx)
3790 })
3791 .await
3792 .unwrap();
3793
3794 // TODO
3795 // Add a cursor on each row.
3796 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3797 // assert!(!buffer.is_dirty());
3798 // buffer.add_selection_set(
3799 // &(0..3)
3800 // .map(|row| Selection {
3801 // id: row as usize,
3802 // start: Point::new(row, 1),
3803 // end: Point::new(row, 1),
3804 // reversed: false,
3805 // goal: SelectionGoal::None,
3806 // })
3807 // .collect::<Vec<_>>(),
3808 // cx,
3809 // )
3810 // });
3811
3812 // Change the file on disk, adding two new lines of text, and removing
3813 // one line.
3814 buffer.read_with(&cx, |buffer, _| {
3815 assert!(!buffer.is_dirty());
3816 assert!(!buffer.has_conflict());
3817 });
3818 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3819 fs::write(&abs_path, new_contents).unwrap();
3820
3821 // Because the buffer was not modified, it is reloaded from disk. Its
3822 // contents are edited according to the diff between the old and new
3823 // file contents.
3824 buffer
3825 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3826 .await;
3827
3828 buffer.update(&mut cx, |buffer, _| {
3829 assert_eq!(buffer.text(), new_contents);
3830 assert!(!buffer.is_dirty());
3831 assert!(!buffer.has_conflict());
3832
3833 // TODO
3834 // let cursor_positions = buffer
3835 // .selection_set(selection_set_id)
3836 // .unwrap()
3837 // .selections::<Point>(&*buffer)
3838 // .map(|selection| {
3839 // assert_eq!(selection.start, selection.end);
3840 // selection.start
3841 // })
3842 // .collect::<Vec<_>>();
3843 // assert_eq!(
3844 // cursor_positions,
3845 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3846 // );
3847 });
3848
3849 // Modify the buffer
3850 buffer.update(&mut cx, |buffer, cx| {
3851 buffer.edit(vec![0..0], " ", cx);
3852 assert!(buffer.is_dirty());
3853 assert!(!buffer.has_conflict());
3854 });
3855
3856 // Change the file on disk again, adding blank lines to the beginning.
3857 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3858
3859 // Because the buffer is modified, it doesn't reload from disk, but is
3860 // marked as having a conflict.
3861 buffer
3862 .condition(&cx, |buffer, _| buffer.has_conflict())
3863 .await;
3864 }
3865
3866 #[gpui::test]
3867 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3868 let fs = Arc::new(FakeFs::new(cx.background()));
3869 fs.insert_tree(
3870 "/the-dir",
3871 json!({
3872 "a.rs": "
3873 fn foo(mut v: Vec<usize>) {
3874 for x in &v {
3875 v.push(1);
3876 }
3877 }
3878 "
3879 .unindent(),
3880 }),
3881 )
3882 .await;
3883
3884 let project = Project::test(fs.clone(), &mut cx);
3885 let (worktree, _) = project
3886 .update(&mut cx, |p, cx| {
3887 p.find_or_create_local_worktree("/the-dir", false, cx)
3888 })
3889 .await
3890 .unwrap();
3891 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3892
3893 let buffer = project
3894 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3895 .await
3896 .unwrap();
3897
3898 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3899 let message = lsp::PublishDiagnosticsParams {
3900 uri: buffer_uri.clone(),
3901 diagnostics: vec![
3902 lsp::Diagnostic {
3903 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3904 severity: Some(DiagnosticSeverity::WARNING),
3905 message: "error 1".to_string(),
3906 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3907 location: lsp::Location {
3908 uri: buffer_uri.clone(),
3909 range: lsp::Range::new(
3910 lsp::Position::new(1, 8),
3911 lsp::Position::new(1, 9),
3912 ),
3913 },
3914 message: "error 1 hint 1".to_string(),
3915 }]),
3916 ..Default::default()
3917 },
3918 lsp::Diagnostic {
3919 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3920 severity: Some(DiagnosticSeverity::HINT),
3921 message: "error 1 hint 1".to_string(),
3922 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3923 location: lsp::Location {
3924 uri: buffer_uri.clone(),
3925 range: lsp::Range::new(
3926 lsp::Position::new(1, 8),
3927 lsp::Position::new(1, 9),
3928 ),
3929 },
3930 message: "original diagnostic".to_string(),
3931 }]),
3932 ..Default::default()
3933 },
3934 lsp::Diagnostic {
3935 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3936 severity: Some(DiagnosticSeverity::ERROR),
3937 message: "error 2".to_string(),
3938 related_information: Some(vec![
3939 lsp::DiagnosticRelatedInformation {
3940 location: lsp::Location {
3941 uri: buffer_uri.clone(),
3942 range: lsp::Range::new(
3943 lsp::Position::new(1, 13),
3944 lsp::Position::new(1, 15),
3945 ),
3946 },
3947 message: "error 2 hint 1".to_string(),
3948 },
3949 lsp::DiagnosticRelatedInformation {
3950 location: lsp::Location {
3951 uri: buffer_uri.clone(),
3952 range: lsp::Range::new(
3953 lsp::Position::new(1, 13),
3954 lsp::Position::new(1, 15),
3955 ),
3956 },
3957 message: "error 2 hint 2".to_string(),
3958 },
3959 ]),
3960 ..Default::default()
3961 },
3962 lsp::Diagnostic {
3963 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3964 severity: Some(DiagnosticSeverity::HINT),
3965 message: "error 2 hint 1".to_string(),
3966 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3967 location: lsp::Location {
3968 uri: buffer_uri.clone(),
3969 range: lsp::Range::new(
3970 lsp::Position::new(2, 8),
3971 lsp::Position::new(2, 17),
3972 ),
3973 },
3974 message: "original diagnostic".to_string(),
3975 }]),
3976 ..Default::default()
3977 },
3978 lsp::Diagnostic {
3979 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3980 severity: Some(DiagnosticSeverity::HINT),
3981 message: "error 2 hint 2".to_string(),
3982 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3983 location: lsp::Location {
3984 uri: buffer_uri.clone(),
3985 range: lsp::Range::new(
3986 lsp::Position::new(2, 8),
3987 lsp::Position::new(2, 17),
3988 ),
3989 },
3990 message: "original diagnostic".to_string(),
3991 }]),
3992 ..Default::default()
3993 },
3994 ],
3995 version: None,
3996 };
3997
3998 project
3999 .update(&mut cx, |p, cx| {
4000 p.update_diagnostics(message, &Default::default(), cx)
4001 })
4002 .unwrap();
4003 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4004
4005 assert_eq!(
4006 buffer
4007 .diagnostics_in_range::<_, Point>(0..buffer.len())
4008 .collect::<Vec<_>>(),
4009 &[
4010 DiagnosticEntry {
4011 range: Point::new(1, 8)..Point::new(1, 9),
4012 diagnostic: Diagnostic {
4013 severity: DiagnosticSeverity::WARNING,
4014 message: "error 1".to_string(),
4015 group_id: 0,
4016 is_primary: true,
4017 ..Default::default()
4018 }
4019 },
4020 DiagnosticEntry {
4021 range: Point::new(1, 8)..Point::new(1, 9),
4022 diagnostic: Diagnostic {
4023 severity: DiagnosticSeverity::HINT,
4024 message: "error 1 hint 1".to_string(),
4025 group_id: 0,
4026 is_primary: false,
4027 ..Default::default()
4028 }
4029 },
4030 DiagnosticEntry {
4031 range: Point::new(1, 13)..Point::new(1, 15),
4032 diagnostic: Diagnostic {
4033 severity: DiagnosticSeverity::HINT,
4034 message: "error 2 hint 1".to_string(),
4035 group_id: 1,
4036 is_primary: false,
4037 ..Default::default()
4038 }
4039 },
4040 DiagnosticEntry {
4041 range: Point::new(1, 13)..Point::new(1, 15),
4042 diagnostic: Diagnostic {
4043 severity: DiagnosticSeverity::HINT,
4044 message: "error 2 hint 2".to_string(),
4045 group_id: 1,
4046 is_primary: false,
4047 ..Default::default()
4048 }
4049 },
4050 DiagnosticEntry {
4051 range: Point::new(2, 8)..Point::new(2, 17),
4052 diagnostic: Diagnostic {
4053 severity: DiagnosticSeverity::ERROR,
4054 message: "error 2".to_string(),
4055 group_id: 1,
4056 is_primary: true,
4057 ..Default::default()
4058 }
4059 }
4060 ]
4061 );
4062
4063 assert_eq!(
4064 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4065 &[
4066 DiagnosticEntry {
4067 range: Point::new(1, 8)..Point::new(1, 9),
4068 diagnostic: Diagnostic {
4069 severity: DiagnosticSeverity::WARNING,
4070 message: "error 1".to_string(),
4071 group_id: 0,
4072 is_primary: true,
4073 ..Default::default()
4074 }
4075 },
4076 DiagnosticEntry {
4077 range: Point::new(1, 8)..Point::new(1, 9),
4078 diagnostic: Diagnostic {
4079 severity: DiagnosticSeverity::HINT,
4080 message: "error 1 hint 1".to_string(),
4081 group_id: 0,
4082 is_primary: false,
4083 ..Default::default()
4084 }
4085 },
4086 ]
4087 );
4088 assert_eq!(
4089 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4090 &[
4091 DiagnosticEntry {
4092 range: Point::new(1, 13)..Point::new(1, 15),
4093 diagnostic: Diagnostic {
4094 severity: DiagnosticSeverity::HINT,
4095 message: "error 2 hint 1".to_string(),
4096 group_id: 1,
4097 is_primary: false,
4098 ..Default::default()
4099 }
4100 },
4101 DiagnosticEntry {
4102 range: Point::new(1, 13)..Point::new(1, 15),
4103 diagnostic: Diagnostic {
4104 severity: DiagnosticSeverity::HINT,
4105 message: "error 2 hint 2".to_string(),
4106 group_id: 1,
4107 is_primary: false,
4108 ..Default::default()
4109 }
4110 },
4111 DiagnosticEntry {
4112 range: Point::new(2, 8)..Point::new(2, 17),
4113 diagnostic: Diagnostic {
4114 severity: DiagnosticSeverity::ERROR,
4115 message: "error 2".to_string(),
4116 group_id: 1,
4117 is_primary: true,
4118 ..Default::default()
4119 }
4120 }
4121 ]
4122 );
4123 }
4124}