1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Context, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 #[cfg(any(test, feature = "test-support"))]
348 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
349 self.shared_buffers
350 .get(&peer_id)
351 .and_then(|buffers| buffers.get(&remote_id))
352 .cloned()
353 }
354
355 #[cfg(any(test, feature = "test-support"))]
356 pub fn has_buffered_operations(&self) -> bool {
357 self.open_buffers
358 .values()
359 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
360 }
361
362 pub fn fs(&self) -> &Arc<dyn Fs> {
363 &self.fs
364 }
365
366 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
367 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
368 *remote_id_tx.borrow_mut() = remote_id;
369 }
370
371 self.subscriptions.clear();
372 if let Some(remote_id) = remote_id {
373 self.subscriptions
374 .push(self.client.add_model_for_remote_entity(remote_id, cx));
375 }
376 }
377
378 pub fn remote_id(&self) -> Option<u64> {
379 match &self.client_state {
380 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
381 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
382 }
383 }
384
385 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
386 let mut id = None;
387 let mut watch = None;
388 match &self.client_state {
389 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
390 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
391 }
392
393 async move {
394 if let Some(id) = id {
395 return id;
396 }
397 let mut watch = watch.unwrap();
398 loop {
399 let id = *watch.borrow();
400 if let Some(id) = id {
401 return id;
402 }
403 watch.recv().await;
404 }
405 }
406 }
407
408 pub fn replica_id(&self) -> ReplicaId {
409 match &self.client_state {
410 ProjectClientState::Local { .. } => 0,
411 ProjectClientState::Remote { replica_id, .. } => *replica_id,
412 }
413 }
414
415 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
416 &self.collaborators
417 }
418
419 pub fn worktrees<'a>(
420 &'a self,
421 cx: &'a AppContext,
422 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
423 self.worktrees
424 .iter()
425 .filter_map(move |worktree| worktree.upgrade(cx))
426 }
427
428 pub fn worktree_for_id(
429 &self,
430 id: WorktreeId,
431 cx: &AppContext,
432 ) -> Option<ModelHandle<Worktree>> {
433 self.worktrees(cx)
434 .find(|worktree| worktree.read(cx).id() == id)
435 }
436
437 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
438 let rpc = self.client.clone();
439 cx.spawn(|this, mut cx| async move {
440 let project_id = this.update(&mut cx, |this, _| {
441 if let ProjectClientState::Local {
442 is_shared,
443 remote_id_rx,
444 ..
445 } = &mut this.client_state
446 {
447 *is_shared = true;
448 remote_id_rx
449 .borrow()
450 .ok_or_else(|| anyhow!("no project id"))
451 } else {
452 Err(anyhow!("can't share a remote project"))
453 }
454 })?;
455
456 rpc.request(proto::ShareProject { project_id }).await?;
457 let mut tasks = Vec::new();
458 this.update(&mut cx, |this, cx| {
459 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
460 worktree.update(cx, |worktree, cx| {
461 let worktree = worktree.as_local_mut().unwrap();
462 tasks.push(worktree.share(project_id, cx));
463 });
464 }
465 });
466 for task in tasks {
467 task.await?;
468 }
469 this.update(&mut cx, |_, cx| cx.notify());
470 Ok(())
471 })
472 }
473
474 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
475 let rpc = self.client.clone();
476 cx.spawn(|this, mut cx| async move {
477 let project_id = this.update(&mut cx, |this, _| {
478 if let ProjectClientState::Local {
479 is_shared,
480 remote_id_rx,
481 ..
482 } = &mut this.client_state
483 {
484 *is_shared = false;
485 remote_id_rx
486 .borrow()
487 .ok_or_else(|| anyhow!("no project id"))
488 } else {
489 Err(anyhow!("can't share a remote project"))
490 }
491 })?;
492
493 rpc.send(proto::UnshareProject { project_id })?;
494 this.update(&mut cx, |this, cx| {
495 this.collaborators.clear();
496 this.shared_buffers.clear();
497 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
498 worktree.update(cx, |worktree, _| {
499 worktree.as_local_mut().unwrap().unshare();
500 });
501 }
502 cx.notify()
503 });
504 Ok(())
505 })
506 }
507
508 pub fn is_read_only(&self) -> bool {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => false,
511 ProjectClientState::Remote {
512 sharing_has_stopped,
513 ..
514 } => *sharing_has_stopped,
515 }
516 }
517
518 pub fn is_local(&self) -> bool {
519 match &self.client_state {
520 ProjectClientState::Local { .. } => true,
521 ProjectClientState::Remote { .. } => false,
522 }
523 }
524
525 pub fn is_remote(&self) -> bool {
526 !self.is_local()
527 }
528
529 pub fn open_buffer(
530 &mut self,
531 path: impl Into<ProjectPath>,
532 cx: &mut ModelContext<Self>,
533 ) -> Task<Result<ModelHandle<Buffer>>> {
534 let project_path = path.into();
535 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
536 worktree
537 } else {
538 return Task::ready(Err(anyhow!("no such worktree")));
539 };
540
541 // If there is already a buffer for the given path, then return it.
542 let existing_buffer = self.get_open_buffer(&project_path, cx);
543 if let Some(existing_buffer) = existing_buffer {
544 return Task::ready(Ok(existing_buffer));
545 }
546
547 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
548 // If the given path is already being loaded, then wait for that existing
549 // task to complete and return the same buffer.
550 hash_map::Entry::Occupied(e) => e.get().clone(),
551
552 // Otherwise, record the fact that this path is now being loaded.
553 hash_map::Entry::Vacant(entry) => {
554 let (mut tx, rx) = postage::watch::channel();
555 entry.insert(rx.clone());
556
557 let load_buffer = if worktree.read(cx).is_local() {
558 self.open_local_buffer(&project_path.path, &worktree, cx)
559 } else {
560 self.open_remote_buffer(&project_path.path, &worktree, cx)
561 };
562
563 cx.spawn(move |this, mut cx| async move {
564 let load_result = load_buffer.await;
565 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
566 // Record the fact that the buffer is no longer loading.
567 this.loading_buffers.remove(&project_path);
568 if this.loading_buffers.is_empty() {
569 this.open_buffers
570 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
571 }
572
573 let buffer = load_result.map_err(Arc::new)?;
574 Ok(buffer)
575 }));
576 })
577 .detach();
578 rx
579 }
580 };
581
582 cx.foreground().spawn(async move {
583 loop {
584 if let Some(result) = loading_watch.borrow().as_ref() {
585 match result {
586 Ok(buffer) => return Ok(buffer.clone()),
587 Err(error) => return Err(anyhow!("{}", error)),
588 }
589 }
590 loading_watch.recv().await;
591 }
592 })
593 }
594
595 fn open_local_buffer(
596 &mut self,
597 path: &Arc<Path>,
598 worktree: &ModelHandle<Worktree>,
599 cx: &mut ModelContext<Self>,
600 ) -> Task<Result<ModelHandle<Buffer>>> {
601 let load_buffer = worktree.update(cx, |worktree, cx| {
602 let worktree = worktree.as_local_mut().unwrap();
603 worktree.load_buffer(path, cx)
604 });
605 let worktree = worktree.downgrade();
606 cx.spawn(|this, mut cx| async move {
607 let buffer = load_buffer.await?;
608 let worktree = worktree
609 .upgrade(&cx)
610 .ok_or_else(|| anyhow!("worktree was removed"))?;
611 this.update(&mut cx, |this, cx| {
612 this.register_buffer(&buffer, Some(&worktree), cx)
613 })?;
614 Ok(buffer)
615 })
616 }
617
618 fn open_remote_buffer(
619 &mut self,
620 path: &Arc<Path>,
621 worktree: &ModelHandle<Worktree>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let rpc = self.client.clone();
625 let project_id = self.remote_id().unwrap();
626 let remote_worktree_id = worktree.read(cx).id();
627 let path = path.clone();
628 let path_string = path.to_string_lossy().to_string();
629 cx.spawn(|this, mut cx| async move {
630 let response = rpc
631 .request(proto::OpenBuffer {
632 project_id,
633 worktree_id: remote_worktree_id.to_proto(),
634 path: path_string,
635 })
636 .await?;
637 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
638 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
639 .await
640 })
641 }
642
643 fn open_local_buffer_from_lsp_path(
644 &mut self,
645 abs_path: lsp::Url,
646 lang_name: String,
647 lang_server: Arc<LanguageServer>,
648 cx: &mut ModelContext<Self>,
649 ) -> Task<Result<ModelHandle<Buffer>>> {
650 cx.spawn(|this, mut cx| async move {
651 let abs_path = abs_path
652 .to_file_path()
653 .map_err(|_| anyhow!("can't convert URI to path"))?;
654 let (worktree, relative_path) = if let Some(result) =
655 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
656 {
657 result
658 } else {
659 let worktree = this
660 .update(&mut cx, |this, cx| {
661 this.create_local_worktree(&abs_path, true, cx)
662 })
663 .await?;
664 this.update(&mut cx, |this, cx| {
665 this.language_servers
666 .insert((worktree.read(cx).id(), lang_name), lang_server);
667 });
668 (worktree, PathBuf::new())
669 };
670
671 let project_path = ProjectPath {
672 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
673 path: relative_path.into(),
674 };
675 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
676 .await
677 })
678 }
679
680 pub fn save_buffer_as(
681 &self,
682 buffer: ModelHandle<Buffer>,
683 abs_path: PathBuf,
684 cx: &mut ModelContext<Project>,
685 ) -> Task<Result<()>> {
686 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
687 cx.spawn(|this, mut cx| async move {
688 let (worktree, path) = worktree_task.await?;
689 worktree
690 .update(&mut cx, |worktree, cx| {
691 worktree
692 .as_local_mut()
693 .unwrap()
694 .save_buffer_as(buffer.clone(), path, cx)
695 })
696 .await?;
697 this.update(&mut cx, |this, cx| {
698 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
699 });
700 Ok(())
701 })
702 }
703
704 #[cfg(any(test, feature = "test-support"))]
705 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
706 let path = path.into();
707 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
708 self.open_buffers.iter().any(|(_, buffer)| {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
711 if file.worktree == worktree && file.path() == &path.path {
712 return true;
713 }
714 }
715 }
716 false
717 })
718 } else {
719 false
720 }
721 }
722
723 fn get_open_buffer(
724 &mut self,
725 path: &ProjectPath,
726 cx: &mut ModelContext<Self>,
727 ) -> Option<ModelHandle<Buffer>> {
728 let mut result = None;
729 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
730 self.open_buffers.retain(|_, buffer| {
731 if let Some(buffer) = buffer.upgrade(cx) {
732 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
733 if file.worktree == worktree && file.path() == &path.path {
734 result = Some(buffer);
735 }
736 }
737 true
738 } else {
739 false
740 }
741 });
742 result
743 }
744
745 fn register_buffer(
746 &mut self,
747 buffer: &ModelHandle<Buffer>,
748 worktree: Option<&ModelHandle<Worktree>>,
749 cx: &mut ModelContext<Self>,
750 ) -> Result<()> {
751 match self.open_buffers.insert(
752 buffer.read(cx).remote_id(),
753 OpenBuffer::Loaded(buffer.downgrade()),
754 ) {
755 None => {}
756 Some(OpenBuffer::Loading(operations)) => {
757 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
758 }
759 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
760 }
761 self.assign_language_to_buffer(&buffer, worktree, cx);
762 Ok(())
763 }
764
765 fn assign_language_to_buffer(
766 &mut self,
767 buffer: &ModelHandle<Buffer>,
768 worktree: Option<&ModelHandle<Worktree>>,
769 cx: &mut ModelContext<Self>,
770 ) -> Option<()> {
771 let (path, full_path) = {
772 let file = buffer.read(cx).file()?;
773 (file.path().clone(), file.full_path(cx))
774 };
775
776 // If the buffer has a language, set it and start/assign the language server
777 if let Some(language) = self.languages.select_language(&full_path) {
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language(Some(language.clone()), cx);
780 });
781
782 // For local worktrees, start a language server if needed.
783 // Also assign the language server and any previously stored diagnostics to the buffer.
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 let worktree_id = local_worktree.id();
786 let worktree_abs_path = local_worktree.abs_path().clone();
787
788 let language_server = match self
789 .language_servers
790 .entry((worktree_id, language.name().to_string()))
791 {
792 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
793 hash_map::Entry::Vacant(e) => Self::start_language_server(
794 self.client.clone(),
795 language.clone(),
796 &worktree_abs_path,
797 cx,
798 )
799 .map(|server| e.insert(server).clone()),
800 };
801
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_language_server(language_server, cx);
804 });
805 }
806 }
807
808 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
809 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
810 buffer.update(cx, |buffer, cx| {
811 buffer.update_diagnostics(diagnostics, None, cx).log_err();
812 });
813 }
814 }
815
816 None
817 }
818
819 fn start_language_server(
820 rpc: Arc<Client>,
821 language: Arc<Language>,
822 worktree_path: &Path,
823 cx: &mut ModelContext<Self>,
824 ) -> Option<Arc<LanguageServer>> {
825 enum LspEvent {
826 DiagnosticsStart,
827 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
828 DiagnosticsFinish,
829 }
830
831 let language_server = language
832 .start_server(worktree_path, cx)
833 .log_err()
834 .flatten()?;
835 let disk_based_sources = language
836 .disk_based_diagnostic_sources()
837 .cloned()
838 .unwrap_or_default();
839 let disk_based_diagnostics_progress_token =
840 language.disk_based_diagnostics_progress_token().cloned();
841 let has_disk_based_diagnostic_progress_token =
842 disk_based_diagnostics_progress_token.is_some();
843 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
844
845 // Listen for `PublishDiagnostics` notifications.
846 language_server
847 .on_notification::<lsp::notification::PublishDiagnostics, _>({
848 let diagnostics_tx = diagnostics_tx.clone();
849 move |params| {
850 if !has_disk_based_diagnostic_progress_token {
851 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
852 }
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
856 }
857 }
858 })
859 .detach();
860
861 // Listen for `Progress` notifications. Send an event when the language server
862 // transitions between running jobs and not running any jobs.
863 let mut running_jobs_for_this_server: i32 = 0;
864 language_server
865 .on_notification::<lsp::notification::Progress, _>(move |params| {
866 let token = match params.token {
867 lsp::NumberOrString::Number(_) => None,
868 lsp::NumberOrString::String(token) => Some(token),
869 };
870
871 if token == disk_based_diagnostics_progress_token {
872 match params.value {
873 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
874 lsp::WorkDoneProgress::Begin(_) => {
875 running_jobs_for_this_server += 1;
876 if running_jobs_for_this_server == 1 {
877 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
878 }
879 }
880 lsp::WorkDoneProgress::End(_) => {
881 running_jobs_for_this_server -= 1;
882 if running_jobs_for_this_server == 0 {
883 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
884 }
885 }
886 _ => {}
887 },
888 }
889 }
890 })
891 .detach();
892
893 // Process all the LSP events.
894 cx.spawn_weak(|this, mut cx| async move {
895 while let Ok(message) = diagnostics_rx.recv().await {
896 let this = this.upgrade(&cx)?;
897 match message {
898 LspEvent::DiagnosticsStart => {
899 this.update(&mut cx, |this, cx| {
900 this.disk_based_diagnostics_started(cx);
901 if let Some(project_id) = this.remote_id() {
902 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
903 .log_err();
904 }
905 });
906 }
907 LspEvent::DiagnosticsUpdate(mut params) => {
908 language.process_diagnostics(&mut params);
909 this.update(&mut cx, |this, cx| {
910 this.update_diagnostics(params, &disk_based_sources, cx)
911 .log_err();
912 });
913 }
914 LspEvent::DiagnosticsFinish => {
915 this.update(&mut cx, |this, cx| {
916 this.disk_based_diagnostics_finished(cx);
917 if let Some(project_id) = this.remote_id() {
918 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
919 .log_err();
920 }
921 });
922 }
923 }
924 }
925 Some(())
926 })
927 .detach();
928
929 Some(language_server)
930 }
931
932 pub fn update_diagnostics(
933 &mut self,
934 params: lsp::PublishDiagnosticsParams,
935 disk_based_sources: &HashSet<String>,
936 cx: &mut ModelContext<Self>,
937 ) -> Result<()> {
938 let abs_path = params
939 .uri
940 .to_file_path()
941 .map_err(|_| anyhow!("URI is not a file"))?;
942 let mut next_group_id = 0;
943 let mut diagnostics = Vec::default();
944 let mut primary_diagnostic_group_ids = HashMap::default();
945 let mut sources_by_group_id = HashMap::default();
946 let mut supporting_diagnostic_severities = HashMap::default();
947 for diagnostic in ¶ms.diagnostics {
948 let source = diagnostic.source.as_ref();
949 let code = diagnostic.code.as_ref().map(|code| match code {
950 lsp::NumberOrString::Number(code) => code.to_string(),
951 lsp::NumberOrString::String(code) => code.clone(),
952 });
953 let range = range_from_lsp(diagnostic.range);
954 let is_supporting = diagnostic
955 .related_information
956 .as_ref()
957 .map_or(false, |infos| {
958 infos.iter().any(|info| {
959 primary_diagnostic_group_ids.contains_key(&(
960 source,
961 code.clone(),
962 range_from_lsp(info.location.range),
963 ))
964 })
965 });
966
967 if is_supporting {
968 if let Some(severity) = diagnostic.severity {
969 supporting_diagnostic_severities
970 .insert((source, code.clone(), range), severity);
971 }
972 } else {
973 let group_id = post_inc(&mut next_group_id);
974 let is_disk_based =
975 source.map_or(false, |source| disk_based_sources.contains(source));
976
977 sources_by_group_id.insert(group_id, source);
978 primary_diagnostic_group_ids
979 .insert((source, code.clone(), range.clone()), group_id);
980
981 diagnostics.push(DiagnosticEntry {
982 range,
983 diagnostic: Diagnostic {
984 code: code.clone(),
985 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
986 message: diagnostic.message.clone(),
987 group_id,
988 is_primary: true,
989 is_valid: true,
990 is_disk_based,
991 },
992 });
993 if let Some(infos) = &diagnostic.related_information {
994 for info in infos {
995 if info.location.uri == params.uri && !info.message.is_empty() {
996 let range = range_from_lsp(info.location.range);
997 diagnostics.push(DiagnosticEntry {
998 range,
999 diagnostic: Diagnostic {
1000 code: code.clone(),
1001 severity: DiagnosticSeverity::INFORMATION,
1002 message: info.message.clone(),
1003 group_id,
1004 is_primary: false,
1005 is_valid: true,
1006 is_disk_based,
1007 },
1008 });
1009 }
1010 }
1011 }
1012 }
1013 }
1014
1015 for entry in &mut diagnostics {
1016 let diagnostic = &mut entry.diagnostic;
1017 if !diagnostic.is_primary {
1018 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1019 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1020 source,
1021 diagnostic.code.clone(),
1022 entry.range.clone(),
1023 )) {
1024 diagnostic.severity = severity;
1025 }
1026 }
1027 }
1028
1029 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1030 Ok(())
1031 }
1032
1033 pub fn update_diagnostic_entries(
1034 &mut self,
1035 abs_path: PathBuf,
1036 version: Option<i32>,
1037 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1038 cx: &mut ModelContext<Project>,
1039 ) -> Result<(), anyhow::Error> {
1040 let (worktree, relative_path) = self
1041 .find_local_worktree(&abs_path, cx)
1042 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1043 let project_path = ProjectPath {
1044 worktree_id: worktree.read(cx).id(),
1045 path: relative_path.into(),
1046 };
1047
1048 for buffer in self.open_buffers.values() {
1049 if let Some(buffer) = buffer.upgrade(cx) {
1050 if buffer
1051 .read(cx)
1052 .file()
1053 .map_or(false, |file| *file.path() == project_path.path)
1054 {
1055 buffer.update(cx, |buffer, cx| {
1056 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1057 })?;
1058 break;
1059 }
1060 }
1061 }
1062 worktree.update(cx, |worktree, cx| {
1063 worktree
1064 .as_local_mut()
1065 .ok_or_else(|| anyhow!("not a local worktree"))?
1066 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1067 })?;
1068 cx.emit(Event::DiagnosticsUpdated(project_path));
1069 Ok(())
1070 }
1071
1072 pub fn format(
1073 &self,
1074 buffers: HashSet<ModelHandle<Buffer>>,
1075 push_to_history: bool,
1076 cx: &mut ModelContext<Project>,
1077 ) -> Task<Result<ProjectTransaction>> {
1078 let mut local_buffers = Vec::new();
1079 let mut remote_buffers = None;
1080 for buffer_handle in buffers {
1081 let buffer = buffer_handle.read(cx);
1082 let worktree;
1083 if let Some(file) = File::from_dyn(buffer.file()) {
1084 worktree = file.worktree.clone();
1085 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1086 let lang_server;
1087 if let Some(lang) = buffer.language() {
1088 if let Some(server) = self
1089 .language_servers
1090 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1091 {
1092 lang_server = server.clone();
1093 } else {
1094 return Task::ready(Ok(Default::default()));
1095 };
1096 } else {
1097 return Task::ready(Ok(Default::default()));
1098 }
1099
1100 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1101 } else {
1102 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1103 }
1104 } else {
1105 return Task::ready(Ok(Default::default()));
1106 }
1107 }
1108
1109 let remote_buffers = self.remote_id().zip(remote_buffers);
1110 let client = self.client.clone();
1111
1112 cx.spawn(|this, mut cx| async move {
1113 let mut project_transaction = ProjectTransaction::default();
1114
1115 if let Some((project_id, remote_buffers)) = remote_buffers {
1116 let response = client
1117 .request(proto::FormatBuffers {
1118 project_id,
1119 buffer_ids: remote_buffers
1120 .iter()
1121 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1122 .collect(),
1123 })
1124 .await?
1125 .transaction
1126 .ok_or_else(|| anyhow!("missing transaction"))?;
1127 project_transaction = this
1128 .update(&mut cx, |this, cx| {
1129 this.deserialize_project_transaction(response, push_to_history, cx)
1130 })
1131 .await?;
1132 }
1133
1134 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1135 let lsp_edits = lang_server
1136 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1137 text_document: lsp::TextDocumentIdentifier::new(
1138 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1139 ),
1140 options: Default::default(),
1141 work_done_progress_params: Default::default(),
1142 })
1143 .await?;
1144
1145 if let Some(lsp_edits) = lsp_edits {
1146 let edits = buffer
1147 .update(&mut cx, |buffer, cx| {
1148 buffer.edits_from_lsp(lsp_edits, None, cx)
1149 })
1150 .await?;
1151 buffer.update(&mut cx, |buffer, cx| {
1152 buffer.finalize_last_transaction();
1153 buffer.start_transaction();
1154 for (range, text) in edits {
1155 buffer.edit([range], text, cx);
1156 }
1157 if buffer.end_transaction(cx).is_some() {
1158 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1159 if !push_to_history {
1160 buffer.forget_transaction(transaction.id);
1161 }
1162 project_transaction.0.insert(cx.handle(), transaction);
1163 }
1164 });
1165 }
1166 }
1167
1168 Ok(project_transaction)
1169 })
1170 }
1171
1172 pub fn definition<T: ToPointUtf16>(
1173 &self,
1174 source_buffer_handle: &ModelHandle<Buffer>,
1175 position: T,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Task<Result<Vec<Definition>>> {
1178 let source_buffer_handle = source_buffer_handle.clone();
1179 let source_buffer = source_buffer_handle.read(cx);
1180 let worktree;
1181 let buffer_abs_path;
1182 if let Some(file) = File::from_dyn(source_buffer.file()) {
1183 worktree = file.worktree.clone();
1184 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1185 } else {
1186 return Task::ready(Ok(Default::default()));
1187 };
1188
1189 let position = position.to_point_utf16(source_buffer);
1190
1191 if worktree.read(cx).as_local().is_some() {
1192 let buffer_abs_path = buffer_abs_path.unwrap();
1193 let lang_name;
1194 let lang_server;
1195 if let Some(lang) = source_buffer.language() {
1196 lang_name = lang.name().to_string();
1197 if let Some(server) = self
1198 .language_servers
1199 .get(&(worktree.read(cx).id(), lang_name.clone()))
1200 {
1201 lang_server = server.clone();
1202 } else {
1203 return Task::ready(Ok(Default::default()));
1204 };
1205 } else {
1206 return Task::ready(Ok(Default::default()));
1207 }
1208
1209 cx.spawn(|this, mut cx| async move {
1210 let response = lang_server
1211 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1212 text_document_position_params: lsp::TextDocumentPositionParams {
1213 text_document: lsp::TextDocumentIdentifier::new(
1214 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1215 ),
1216 position: lsp::Position::new(position.row, position.column),
1217 },
1218 work_done_progress_params: Default::default(),
1219 partial_result_params: Default::default(),
1220 })
1221 .await?;
1222
1223 let mut definitions = Vec::new();
1224 if let Some(response) = response {
1225 let mut unresolved_locations = Vec::new();
1226 match response {
1227 lsp::GotoDefinitionResponse::Scalar(loc) => {
1228 unresolved_locations.push((loc.uri, loc.range));
1229 }
1230 lsp::GotoDefinitionResponse::Array(locs) => {
1231 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1232 }
1233 lsp::GotoDefinitionResponse::Link(links) => {
1234 unresolved_locations.extend(
1235 links
1236 .into_iter()
1237 .map(|l| (l.target_uri, l.target_selection_range)),
1238 );
1239 }
1240 }
1241
1242 for (target_uri, target_range) in unresolved_locations {
1243 let target_buffer_handle = this
1244 .update(&mut cx, |this, cx| {
1245 this.open_local_buffer_from_lsp_path(
1246 target_uri,
1247 lang_name.clone(),
1248 lang_server.clone(),
1249 cx,
1250 )
1251 })
1252 .await?;
1253
1254 cx.read(|cx| {
1255 let target_buffer = target_buffer_handle.read(cx);
1256 let target_start = target_buffer
1257 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1258 let target_end = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1260 definitions.push(Definition {
1261 target_buffer: target_buffer_handle,
1262 target_range: target_buffer.anchor_after(target_start)
1263 ..target_buffer.anchor_before(target_end),
1264 });
1265 });
1266 }
1267 }
1268
1269 Ok(definitions)
1270 })
1271 } else if let Some(project_id) = self.remote_id() {
1272 let client = self.client.clone();
1273 let request = proto::GetDefinition {
1274 project_id,
1275 buffer_id: source_buffer.remote_id(),
1276 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1277 };
1278 cx.spawn(|this, mut cx| async move {
1279 let response = client.request(request).await?;
1280 let mut definitions = Vec::new();
1281 for definition in response.definitions {
1282 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1283 let target_buffer = this
1284 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1285 .await?;
1286 let target_start = definition
1287 .target_start
1288 .and_then(deserialize_anchor)
1289 .ok_or_else(|| anyhow!("missing target start"))?;
1290 let target_end = definition
1291 .target_end
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target end"))?;
1294 definitions.push(Definition {
1295 target_buffer,
1296 target_range: target_start..target_end,
1297 })
1298 }
1299
1300 Ok(definitions)
1301 })
1302 } else {
1303 Task::ready(Ok(Default::default()))
1304 }
1305 }
1306
1307 pub fn completions<T: ToPointUtf16>(
1308 &self,
1309 source_buffer_handle: &ModelHandle<Buffer>,
1310 position: T,
1311 cx: &mut ModelContext<Self>,
1312 ) -> Task<Result<Vec<Completion>>> {
1313 let source_buffer_handle = source_buffer_handle.clone();
1314 let source_buffer = source_buffer_handle.read(cx);
1315 let buffer_id = source_buffer.remote_id();
1316 let language = source_buffer.language().cloned();
1317 let worktree;
1318 let buffer_abs_path;
1319 if let Some(file) = File::from_dyn(source_buffer.file()) {
1320 worktree = file.worktree.clone();
1321 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1322 } else {
1323 return Task::ready(Ok(Default::default()));
1324 };
1325
1326 let position = position.to_point_utf16(source_buffer);
1327 let anchor = source_buffer.anchor_after(position);
1328
1329 if worktree.read(cx).as_local().is_some() {
1330 let buffer_abs_path = buffer_abs_path.unwrap();
1331 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1332 server
1333 } else {
1334 return Task::ready(Ok(Default::default()));
1335 };
1336
1337 cx.spawn(|_, cx| async move {
1338 let completions = lang_server
1339 .request::<lsp::request::Completion>(lsp::CompletionParams {
1340 text_document_position: lsp::TextDocumentPositionParams::new(
1341 lsp::TextDocumentIdentifier::new(
1342 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1343 ),
1344 position.to_lsp_position(),
1345 ),
1346 context: Default::default(),
1347 work_done_progress_params: Default::default(),
1348 partial_result_params: Default::default(),
1349 })
1350 .await
1351 .context("lsp completion request failed")?;
1352
1353 let completions = if let Some(completions) = completions {
1354 match completions {
1355 lsp::CompletionResponse::Array(completions) => completions,
1356 lsp::CompletionResponse::List(list) => list.items,
1357 }
1358 } else {
1359 Default::default()
1360 };
1361
1362 source_buffer_handle.read_with(&cx, |this, _| {
1363 Ok(completions
1364 .into_iter()
1365 .filter_map(|lsp_completion| {
1366 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1367 lsp::CompletionTextEdit::Edit(edit) => {
1368 (range_from_lsp(edit.range), edit.new_text.clone())
1369 }
1370 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1371 log::info!("unsupported insert/replace completion");
1372 return None;
1373 }
1374 };
1375
1376 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1377 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1378 if clipped_start == old_range.start && clipped_end == old_range.end {
1379 Some(Completion {
1380 old_range: this.anchor_before(old_range.start)
1381 ..this.anchor_after(old_range.end),
1382 new_text,
1383 label: language
1384 .as_ref()
1385 .and_then(|l| l.label_for_completion(&lsp_completion))
1386 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1387 lsp_completion,
1388 })
1389 } else {
1390 None
1391 }
1392 })
1393 .collect())
1394 })
1395 })
1396 } else if let Some(project_id) = self.remote_id() {
1397 let rpc = self.client.clone();
1398 let message = proto::GetCompletions {
1399 project_id,
1400 buffer_id,
1401 position: Some(language::proto::serialize_anchor(&anchor)),
1402 version: (&source_buffer.version()).into(),
1403 };
1404 cx.spawn_weak(|_, cx| async move {
1405 let response = rpc.request(message).await?;
1406
1407 if !source_buffer_handle
1408 .read_with(&cx, |buffer, _| buffer.version())
1409 .observed_all(&response.version.into())
1410 {
1411 Err(anyhow!("completion response depends on unreceived edits"))?;
1412 }
1413
1414 response
1415 .completions
1416 .into_iter()
1417 .map(|completion| {
1418 language::proto::deserialize_completion(completion, language.as_ref())
1419 })
1420 .collect()
1421 })
1422 } else {
1423 Task::ready(Ok(Default::default()))
1424 }
1425 }
1426
1427 pub fn apply_additional_edits_for_completion(
1428 &self,
1429 buffer_handle: ModelHandle<Buffer>,
1430 completion: Completion,
1431 push_to_history: bool,
1432 cx: &mut ModelContext<Self>,
1433 ) -> Task<Result<Option<Transaction>>> {
1434 let buffer = buffer_handle.read(cx);
1435 let buffer_id = buffer.remote_id();
1436
1437 if self.is_local() {
1438 let lang_server = if let Some(language_server) = buffer.language_server() {
1439 language_server.clone()
1440 } else {
1441 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1442 };
1443
1444 cx.spawn(|_, mut cx| async move {
1445 let resolved_completion = lang_server
1446 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1447 .await?;
1448 if let Some(edits) = resolved_completion.additional_text_edits {
1449 let edits = buffer_handle
1450 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1451 .await?;
1452 buffer_handle.update(&mut cx, |buffer, cx| {
1453 buffer.finalize_last_transaction();
1454 buffer.start_transaction();
1455 for (range, text) in edits {
1456 buffer.edit([range], text, cx);
1457 }
1458 let transaction = if buffer.end_transaction(cx).is_some() {
1459 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1460 if !push_to_history {
1461 buffer.forget_transaction(transaction.id);
1462 }
1463 Some(transaction)
1464 } else {
1465 None
1466 };
1467 Ok(transaction)
1468 })
1469 } else {
1470 Ok(None)
1471 }
1472 })
1473 } else if let Some(project_id) = self.remote_id() {
1474 let client = self.client.clone();
1475 cx.spawn(|_, mut cx| async move {
1476 let response = client
1477 .request(proto::ApplyCompletionAdditionalEdits {
1478 project_id,
1479 buffer_id,
1480 completion: Some(language::proto::serialize_completion(&completion)),
1481 })
1482 .await?;
1483
1484 if let Some(transaction) = response.transaction {
1485 let transaction = language::proto::deserialize_transaction(transaction)?;
1486 buffer_handle
1487 .update(&mut cx, |buffer, _| {
1488 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1489 })
1490 .await;
1491 if push_to_history {
1492 buffer_handle.update(&mut cx, |buffer, _| {
1493 buffer.push_transaction(transaction.clone(), Instant::now());
1494 });
1495 }
1496 Ok(Some(transaction))
1497 } else {
1498 Ok(None)
1499 }
1500 })
1501 } else {
1502 Task::ready(Err(anyhow!("project does not have a remote id")))
1503 }
1504 }
1505
1506 pub fn code_actions<T: ToOffset>(
1507 &self,
1508 buffer_handle: &ModelHandle<Buffer>,
1509 range: Range<T>,
1510 cx: &mut ModelContext<Self>,
1511 ) -> Task<Result<Vec<CodeAction>>> {
1512 let buffer_handle = buffer_handle.clone();
1513 let buffer = buffer_handle.read(cx);
1514 let buffer_id = buffer.remote_id();
1515 let worktree;
1516 let buffer_abs_path;
1517 if let Some(file) = File::from_dyn(buffer.file()) {
1518 worktree = file.worktree.clone();
1519 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1520 } else {
1521 return Task::ready(Ok(Default::default()));
1522 };
1523 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1524
1525 if worktree.read(cx).as_local().is_some() {
1526 let buffer_abs_path = buffer_abs_path.unwrap();
1527 let lang_name;
1528 let lang_server;
1529 if let Some(lang) = buffer.language() {
1530 lang_name = lang.name().to_string();
1531 if let Some(server) = self
1532 .language_servers
1533 .get(&(worktree.read(cx).id(), lang_name.clone()))
1534 {
1535 lang_server = server.clone();
1536 } else {
1537 return Task::ready(Ok(Default::default()));
1538 };
1539 } else {
1540 return Task::ready(Ok(Default::default()));
1541 }
1542
1543 let lsp_range = lsp::Range::new(
1544 range.start.to_point_utf16(buffer).to_lsp_position(),
1545 range.end.to_point_utf16(buffer).to_lsp_position(),
1546 );
1547 cx.foreground().spawn(async move {
1548 Ok(lang_server
1549 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1550 text_document: lsp::TextDocumentIdentifier::new(
1551 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1552 ),
1553 range: lsp_range,
1554 work_done_progress_params: Default::default(),
1555 partial_result_params: Default::default(),
1556 context: lsp::CodeActionContext {
1557 diagnostics: Default::default(),
1558 only: Some(vec![
1559 lsp::CodeActionKind::QUICKFIX,
1560 lsp::CodeActionKind::REFACTOR,
1561 lsp::CodeActionKind::REFACTOR_EXTRACT,
1562 ]),
1563 },
1564 })
1565 .await?
1566 .unwrap_or_default()
1567 .into_iter()
1568 .filter_map(|entry| {
1569 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1570 Some(CodeAction {
1571 range: range.clone(),
1572 lsp_action,
1573 })
1574 } else {
1575 None
1576 }
1577 })
1578 .collect())
1579 })
1580 } else if let Some(project_id) = self.remote_id() {
1581 let rpc = self.client.clone();
1582 cx.foreground().spawn(async move {
1583 let _buffer = buffer_handle.clone();
1584 let response = rpc
1585 .request(proto::GetCodeActions {
1586 project_id,
1587 buffer_id,
1588 start: Some(language::proto::serialize_anchor(&range.start)),
1589 end: Some(language::proto::serialize_anchor(&range.end)),
1590 })
1591 .await?;
1592 response
1593 .actions
1594 .into_iter()
1595 .map(language::proto::deserialize_code_action)
1596 .collect()
1597 })
1598 } else {
1599 Task::ready(Ok(Default::default()))
1600 }
1601 }
1602
1603 pub fn apply_code_action(
1604 &self,
1605 buffer_handle: ModelHandle<Buffer>,
1606 mut action: CodeAction,
1607 push_to_history: bool,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Task<Result<ProjectTransaction>> {
1610 if self.is_local() {
1611 let buffer = buffer_handle.read(cx);
1612 let lang_name = if let Some(lang) = buffer.language() {
1613 lang.name().to_string()
1614 } else {
1615 return Task::ready(Ok(Default::default()));
1616 };
1617 let lang_server = if let Some(language_server) = buffer.language_server() {
1618 language_server.clone()
1619 } else {
1620 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1621 };
1622 let range = action.range.to_point_utf16(buffer);
1623 let fs = self.fs.clone();
1624
1625 cx.spawn(|this, mut cx| async move {
1626 if let Some(lsp_range) = action
1627 .lsp_action
1628 .data
1629 .as_mut()
1630 .and_then(|d| d.get_mut("codeActionParams"))
1631 .and_then(|d| d.get_mut("range"))
1632 {
1633 *lsp_range = serde_json::to_value(&lsp::Range::new(
1634 range.start.to_lsp_position(),
1635 range.end.to_lsp_position(),
1636 ))
1637 .unwrap();
1638 action.lsp_action = lang_server
1639 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1640 .await?;
1641 } else {
1642 let actions = this
1643 .update(&mut cx, |this, cx| {
1644 this.code_actions(&buffer_handle, action.range, cx)
1645 })
1646 .await?;
1647 action.lsp_action = actions
1648 .into_iter()
1649 .find(|a| a.lsp_action.title == action.lsp_action.title)
1650 .ok_or_else(|| anyhow!("code action is outdated"))?
1651 .lsp_action;
1652 }
1653
1654 let mut operations = Vec::new();
1655 if let Some(edit) = action.lsp_action.edit {
1656 if let Some(document_changes) = edit.document_changes {
1657 match document_changes {
1658 lsp::DocumentChanges::Edits(edits) => operations
1659 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1660 lsp::DocumentChanges::Operations(ops) => operations = ops,
1661 }
1662 } else if let Some(changes) = edit.changes {
1663 operations.extend(changes.into_iter().map(|(uri, edits)| {
1664 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1665 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1666 uri,
1667 version: None,
1668 },
1669 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1670 })
1671 }));
1672 }
1673 }
1674
1675 let mut project_transaction = ProjectTransaction::default();
1676 for operation in operations {
1677 match operation {
1678 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1679 let abs_path = op
1680 .uri
1681 .to_file_path()
1682 .map_err(|_| anyhow!("can't convert URI to path"))?;
1683
1684 if let Some(parent_path) = abs_path.parent() {
1685 fs.create_dir(parent_path).await?;
1686 }
1687 if abs_path.ends_with("/") {
1688 fs.create_dir(&abs_path).await?;
1689 } else {
1690 fs.create_file(
1691 &abs_path,
1692 op.options.map(Into::into).unwrap_or_default(),
1693 )
1694 .await?;
1695 }
1696 }
1697 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1698 let source_abs_path = op
1699 .old_uri
1700 .to_file_path()
1701 .map_err(|_| anyhow!("can't convert URI to path"))?;
1702 let target_abs_path = op
1703 .new_uri
1704 .to_file_path()
1705 .map_err(|_| anyhow!("can't convert URI to path"))?;
1706 fs.rename(
1707 &source_abs_path,
1708 &target_abs_path,
1709 op.options.map(Into::into).unwrap_or_default(),
1710 )
1711 .await?;
1712 }
1713 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1714 let abs_path = op
1715 .uri
1716 .to_file_path()
1717 .map_err(|_| anyhow!("can't convert URI to path"))?;
1718 let options = op.options.map(Into::into).unwrap_or_default();
1719 if abs_path.ends_with("/") {
1720 fs.remove_dir(&abs_path, options).await?;
1721 } else {
1722 fs.remove_file(&abs_path, options).await?;
1723 }
1724 }
1725 lsp::DocumentChangeOperation::Edit(op) => {
1726 let buffer_to_edit = this
1727 .update(&mut cx, |this, cx| {
1728 this.open_local_buffer_from_lsp_path(
1729 op.text_document.uri,
1730 lang_name.clone(),
1731 lang_server.clone(),
1732 cx,
1733 )
1734 })
1735 .await?;
1736
1737 let edits = buffer_to_edit
1738 .update(&mut cx, |buffer, cx| {
1739 let edits = op.edits.into_iter().map(|edit| match edit {
1740 lsp::OneOf::Left(edit) => edit,
1741 lsp::OneOf::Right(edit) => edit.text_edit,
1742 });
1743 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1744 })
1745 .await?;
1746
1747 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1748 buffer.finalize_last_transaction();
1749 buffer.start_transaction();
1750 for (range, text) in edits {
1751 buffer.edit([range], text, cx);
1752 }
1753 let transaction = if buffer.end_transaction(cx).is_some() {
1754 let transaction =
1755 buffer.finalize_last_transaction().unwrap().clone();
1756 if !push_to_history {
1757 buffer.forget_transaction(transaction.id);
1758 }
1759 Some(transaction)
1760 } else {
1761 None
1762 };
1763
1764 transaction
1765 });
1766 if let Some(transaction) = transaction {
1767 project_transaction.0.insert(buffer_to_edit, transaction);
1768 }
1769 }
1770 }
1771 }
1772
1773 Ok(project_transaction)
1774 })
1775 } else if let Some(project_id) = self.remote_id() {
1776 let client = self.client.clone();
1777 let request = proto::ApplyCodeAction {
1778 project_id,
1779 buffer_id: buffer_handle.read(cx).remote_id(),
1780 action: Some(language::proto::serialize_code_action(&action)),
1781 };
1782 cx.spawn(|this, mut cx| async move {
1783 let response = client
1784 .request(request)
1785 .await?
1786 .transaction
1787 .ok_or_else(|| anyhow!("missing transaction"))?;
1788 this.update(&mut cx, |this, cx| {
1789 this.deserialize_project_transaction(response, push_to_history, cx)
1790 })
1791 .await
1792 })
1793 } else {
1794 Task::ready(Err(anyhow!("project does not have a remote id")))
1795 }
1796 }
1797
1798 pub fn find_or_create_local_worktree(
1799 &self,
1800 abs_path: impl AsRef<Path>,
1801 weak: bool,
1802 cx: &mut ModelContext<Self>,
1803 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1804 let abs_path = abs_path.as_ref();
1805 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1806 Task::ready(Ok((tree.clone(), relative_path.into())))
1807 } else {
1808 let worktree = self.create_local_worktree(abs_path, weak, cx);
1809 cx.foreground()
1810 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1811 }
1812 }
1813
1814 fn find_local_worktree(
1815 &self,
1816 abs_path: &Path,
1817 cx: &AppContext,
1818 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1819 for tree in self.worktrees(cx) {
1820 if let Some(relative_path) = tree
1821 .read(cx)
1822 .as_local()
1823 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1824 {
1825 return Some((tree.clone(), relative_path.into()));
1826 }
1827 }
1828 None
1829 }
1830
1831 pub fn is_shared(&self) -> bool {
1832 match &self.client_state {
1833 ProjectClientState::Local { is_shared, .. } => *is_shared,
1834 ProjectClientState::Remote { .. } => false,
1835 }
1836 }
1837
1838 fn create_local_worktree(
1839 &self,
1840 abs_path: impl AsRef<Path>,
1841 weak: bool,
1842 cx: &mut ModelContext<Self>,
1843 ) -> Task<Result<ModelHandle<Worktree>>> {
1844 let fs = self.fs.clone();
1845 let client = self.client.clone();
1846 let path = Arc::from(abs_path.as_ref());
1847 cx.spawn(|project, mut cx| async move {
1848 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1849
1850 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1851 project.add_worktree(&worktree, cx);
1852 (project.remote_id(), project.is_shared())
1853 });
1854
1855 if let Some(project_id) = remote_project_id {
1856 worktree
1857 .update(&mut cx, |worktree, cx| {
1858 worktree.as_local_mut().unwrap().register(project_id, cx)
1859 })
1860 .await?;
1861 if is_shared {
1862 worktree
1863 .update(&mut cx, |worktree, cx| {
1864 worktree.as_local_mut().unwrap().share(project_id, cx)
1865 })
1866 .await?;
1867 }
1868 }
1869
1870 Ok(worktree)
1871 })
1872 }
1873
1874 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1875 self.worktrees.retain(|worktree| {
1876 worktree
1877 .upgrade(cx)
1878 .map_or(false, |w| w.read(cx).id() != id)
1879 });
1880 cx.notify();
1881 }
1882
1883 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1884 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1885 if worktree.read(cx).is_local() {
1886 cx.subscribe(&worktree, |this, worktree, _, cx| {
1887 this.update_local_worktree_buffers(worktree, cx);
1888 })
1889 .detach();
1890 }
1891
1892 let push_weak_handle = {
1893 let worktree = worktree.read(cx);
1894 worktree.is_local() && worktree.is_weak()
1895 };
1896 if push_weak_handle {
1897 cx.observe_release(&worktree, |this, cx| {
1898 this.worktrees
1899 .retain(|worktree| worktree.upgrade(cx).is_some());
1900 cx.notify();
1901 })
1902 .detach();
1903 self.worktrees
1904 .push(WorktreeHandle::Weak(worktree.downgrade()));
1905 } else {
1906 self.worktrees
1907 .push(WorktreeHandle::Strong(worktree.clone()));
1908 }
1909 cx.notify();
1910 }
1911
1912 fn update_local_worktree_buffers(
1913 &mut self,
1914 worktree_handle: ModelHandle<Worktree>,
1915 cx: &mut ModelContext<Self>,
1916 ) {
1917 let snapshot = worktree_handle.read(cx).snapshot();
1918 let mut buffers_to_delete = Vec::new();
1919 for (buffer_id, buffer) in &self.open_buffers {
1920 if let Some(buffer) = buffer.upgrade(cx) {
1921 buffer.update(cx, |buffer, cx| {
1922 if let Some(old_file) = File::from_dyn(buffer.file()) {
1923 if old_file.worktree != worktree_handle {
1924 return;
1925 }
1926
1927 let new_file = if let Some(entry) = old_file
1928 .entry_id
1929 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1930 {
1931 File {
1932 is_local: true,
1933 entry_id: Some(entry.id),
1934 mtime: entry.mtime,
1935 path: entry.path.clone(),
1936 worktree: worktree_handle.clone(),
1937 }
1938 } else if let Some(entry) =
1939 snapshot.entry_for_path(old_file.path().as_ref())
1940 {
1941 File {
1942 is_local: true,
1943 entry_id: Some(entry.id),
1944 mtime: entry.mtime,
1945 path: entry.path.clone(),
1946 worktree: worktree_handle.clone(),
1947 }
1948 } else {
1949 File {
1950 is_local: true,
1951 entry_id: None,
1952 path: old_file.path().clone(),
1953 mtime: old_file.mtime(),
1954 worktree: worktree_handle.clone(),
1955 }
1956 };
1957
1958 if let Some(project_id) = self.remote_id() {
1959 self.client
1960 .send(proto::UpdateBufferFile {
1961 project_id,
1962 buffer_id: *buffer_id as u64,
1963 file: Some(new_file.to_proto()),
1964 })
1965 .log_err();
1966 }
1967 buffer.file_updated(Box::new(new_file), cx).detach();
1968 }
1969 });
1970 } else {
1971 buffers_to_delete.push(*buffer_id);
1972 }
1973 }
1974
1975 for buffer_id in buffers_to_delete {
1976 self.open_buffers.remove(&buffer_id);
1977 }
1978 }
1979
1980 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1981 let new_active_entry = entry.and_then(|project_path| {
1982 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1983 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1984 Some(ProjectEntry {
1985 worktree_id: project_path.worktree_id,
1986 entry_id: entry.id,
1987 })
1988 });
1989 if new_active_entry != self.active_entry {
1990 self.active_entry = new_active_entry;
1991 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1992 }
1993 }
1994
1995 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1996 self.language_servers_with_diagnostics_running > 0
1997 }
1998
1999 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2000 let mut summary = DiagnosticSummary::default();
2001 for (_, path_summary) in self.diagnostic_summaries(cx) {
2002 summary.error_count += path_summary.error_count;
2003 summary.warning_count += path_summary.warning_count;
2004 summary.info_count += path_summary.info_count;
2005 summary.hint_count += path_summary.hint_count;
2006 }
2007 summary
2008 }
2009
2010 pub fn diagnostic_summaries<'a>(
2011 &'a self,
2012 cx: &'a AppContext,
2013 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2014 self.worktrees(cx).flat_map(move |worktree| {
2015 let worktree = worktree.read(cx);
2016 let worktree_id = worktree.id();
2017 worktree
2018 .diagnostic_summaries()
2019 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2020 })
2021 }
2022
2023 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2024 self.language_servers_with_diagnostics_running += 1;
2025 if self.language_servers_with_diagnostics_running == 1 {
2026 cx.emit(Event::DiskBasedDiagnosticsStarted);
2027 }
2028 }
2029
2030 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2031 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2032 self.language_servers_with_diagnostics_running -= 1;
2033 if self.language_servers_with_diagnostics_running == 0 {
2034 cx.emit(Event::DiskBasedDiagnosticsFinished);
2035 }
2036 }
2037
2038 pub fn active_entry(&self) -> Option<ProjectEntry> {
2039 self.active_entry
2040 }
2041
2042 // RPC message handlers
2043
2044 async fn handle_unshare_project(
2045 this: ModelHandle<Self>,
2046 _: TypedEnvelope<proto::UnshareProject>,
2047 _: Arc<Client>,
2048 mut cx: AsyncAppContext,
2049 ) -> Result<()> {
2050 this.update(&mut cx, |this, cx| {
2051 if let ProjectClientState::Remote {
2052 sharing_has_stopped,
2053 ..
2054 } = &mut this.client_state
2055 {
2056 *sharing_has_stopped = true;
2057 this.collaborators.clear();
2058 cx.notify();
2059 } else {
2060 unreachable!()
2061 }
2062 });
2063
2064 Ok(())
2065 }
2066
2067 async fn handle_add_collaborator(
2068 this: ModelHandle<Self>,
2069 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2070 _: Arc<Client>,
2071 mut cx: AsyncAppContext,
2072 ) -> Result<()> {
2073 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2074 let collaborator = envelope
2075 .payload
2076 .collaborator
2077 .take()
2078 .ok_or_else(|| anyhow!("empty collaborator"))?;
2079
2080 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2081 this.update(&mut cx, |this, cx| {
2082 this.collaborators
2083 .insert(collaborator.peer_id, collaborator);
2084 cx.notify();
2085 });
2086
2087 Ok(())
2088 }
2089
2090 async fn handle_remove_collaborator(
2091 this: ModelHandle<Self>,
2092 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2093 _: Arc<Client>,
2094 mut cx: AsyncAppContext,
2095 ) -> Result<()> {
2096 this.update(&mut cx, |this, cx| {
2097 let peer_id = PeerId(envelope.payload.peer_id);
2098 let replica_id = this
2099 .collaborators
2100 .remove(&peer_id)
2101 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2102 .replica_id;
2103 this.shared_buffers.remove(&peer_id);
2104 for (_, buffer) in &this.open_buffers {
2105 if let Some(buffer) = buffer.upgrade(cx) {
2106 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2107 }
2108 }
2109 cx.notify();
2110 Ok(())
2111 })
2112 }
2113
2114 async fn handle_share_worktree(
2115 this: ModelHandle<Self>,
2116 envelope: TypedEnvelope<proto::ShareWorktree>,
2117 client: Arc<Client>,
2118 mut cx: AsyncAppContext,
2119 ) -> Result<()> {
2120 this.update(&mut cx, |this, cx| {
2121 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2122 let replica_id = this.replica_id();
2123 let worktree = envelope
2124 .payload
2125 .worktree
2126 .ok_or_else(|| anyhow!("invalid worktree"))?;
2127 let (worktree, load_task) =
2128 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2129 this.add_worktree(&worktree, cx);
2130 load_task.detach();
2131 Ok(())
2132 })
2133 }
2134
2135 async fn handle_unregister_worktree(
2136 this: ModelHandle<Self>,
2137 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2138 _: Arc<Client>,
2139 mut cx: AsyncAppContext,
2140 ) -> Result<()> {
2141 this.update(&mut cx, |this, cx| {
2142 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2143 this.remove_worktree(worktree_id, cx);
2144 Ok(())
2145 })
2146 }
2147
2148 async fn handle_update_worktree(
2149 this: ModelHandle<Self>,
2150 envelope: TypedEnvelope<proto::UpdateWorktree>,
2151 _: Arc<Client>,
2152 mut cx: AsyncAppContext,
2153 ) -> Result<()> {
2154 this.update(&mut cx, |this, cx| {
2155 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2156 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2157 worktree.update(cx, |worktree, _| {
2158 let worktree = worktree.as_remote_mut().unwrap();
2159 worktree.update_from_remote(envelope)
2160 })?;
2161 }
2162 Ok(())
2163 })
2164 }
2165
2166 async fn handle_update_diagnostic_summary(
2167 this: ModelHandle<Self>,
2168 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2169 _: Arc<Client>,
2170 mut cx: AsyncAppContext,
2171 ) -> Result<()> {
2172 this.update(&mut cx, |this, cx| {
2173 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2174 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2175 if let Some(summary) = envelope.payload.summary {
2176 let project_path = ProjectPath {
2177 worktree_id,
2178 path: Path::new(&summary.path).into(),
2179 };
2180 worktree.update(cx, |worktree, _| {
2181 worktree
2182 .as_remote_mut()
2183 .unwrap()
2184 .update_diagnostic_summary(project_path.path.clone(), &summary);
2185 });
2186 cx.emit(Event::DiagnosticsUpdated(project_path));
2187 }
2188 }
2189 Ok(())
2190 })
2191 }
2192
2193 async fn handle_disk_based_diagnostics_updating(
2194 this: ModelHandle<Self>,
2195 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2196 _: Arc<Client>,
2197 mut cx: AsyncAppContext,
2198 ) -> Result<()> {
2199 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2200 Ok(())
2201 }
2202
2203 async fn handle_disk_based_diagnostics_updated(
2204 this: ModelHandle<Self>,
2205 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2206 _: Arc<Client>,
2207 mut cx: AsyncAppContext,
2208 ) -> Result<()> {
2209 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2210 Ok(())
2211 }
2212
2213 async fn handle_update_buffer(
2214 this: ModelHandle<Self>,
2215 envelope: TypedEnvelope<proto::UpdateBuffer>,
2216 _: Arc<Client>,
2217 mut cx: AsyncAppContext,
2218 ) -> Result<()> {
2219 this.update(&mut cx, |this, cx| {
2220 let payload = envelope.payload.clone();
2221 let buffer_id = payload.buffer_id;
2222 let ops = payload
2223 .operations
2224 .into_iter()
2225 .map(|op| language::proto::deserialize_operation(op))
2226 .collect::<Result<Vec<_>, _>>()?;
2227 let is_remote = this.is_remote();
2228 match this.open_buffers.entry(buffer_id) {
2229 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2230 OpenBuffer::Loaded(buffer) => {
2231 if let Some(buffer) = buffer.upgrade(cx) {
2232 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2233 }
2234 }
2235 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2236 },
2237 hash_map::Entry::Vacant(e) => {
2238 if is_remote && this.loading_buffers.len() > 0 {
2239 e.insert(OpenBuffer::Loading(ops));
2240 }
2241 }
2242 }
2243 Ok(())
2244 })
2245 }
2246
2247 async fn handle_update_buffer_file(
2248 this: ModelHandle<Self>,
2249 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2250 _: Arc<Client>,
2251 mut cx: AsyncAppContext,
2252 ) -> Result<()> {
2253 this.update(&mut cx, |this, cx| {
2254 let payload = envelope.payload.clone();
2255 let buffer_id = payload.buffer_id;
2256 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2257 let worktree = this
2258 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2259 .ok_or_else(|| anyhow!("no such worktree"))?;
2260 let file = File::from_proto(file, worktree.clone(), cx)?;
2261 let buffer = this
2262 .open_buffers
2263 .get_mut(&buffer_id)
2264 .and_then(|b| b.upgrade(cx))
2265 .ok_or_else(|| anyhow!("no such buffer"))?;
2266 buffer.update(cx, |buffer, cx| {
2267 buffer.file_updated(Box::new(file), cx).detach();
2268 });
2269 Ok(())
2270 })
2271 }
2272
2273 async fn handle_save_buffer(
2274 this: ModelHandle<Self>,
2275 envelope: TypedEnvelope<proto::SaveBuffer>,
2276 _: Arc<Client>,
2277 mut cx: AsyncAppContext,
2278 ) -> Result<proto::BufferSaved> {
2279 let buffer_id = envelope.payload.buffer_id;
2280 let sender_id = envelope.original_sender_id()?;
2281 let requested_version = envelope.payload.version.try_into()?;
2282
2283 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2284 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2285 let buffer = this
2286 .shared_buffers
2287 .get(&sender_id)
2288 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2289 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2290 Ok::<_, anyhow::Error>((project_id, buffer))
2291 })?;
2292
2293 if !buffer
2294 .read_with(&cx, |buffer, _| buffer.version())
2295 .observed_all(&requested_version)
2296 {
2297 Err(anyhow!("save request depends on unreceived edits"))?;
2298 }
2299
2300 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2301 Ok(proto::BufferSaved {
2302 project_id,
2303 buffer_id,
2304 version: (&saved_version).into(),
2305 mtime: Some(mtime.into()),
2306 })
2307 }
2308
2309 async fn handle_format_buffers(
2310 this: ModelHandle<Self>,
2311 envelope: TypedEnvelope<proto::FormatBuffers>,
2312 _: Arc<Client>,
2313 mut cx: AsyncAppContext,
2314 ) -> Result<proto::FormatBuffersResponse> {
2315 let sender_id = envelope.original_sender_id()?;
2316 let format = this.update(&mut cx, |this, cx| {
2317 let shared_buffers = this
2318 .shared_buffers
2319 .get(&sender_id)
2320 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2321 let mut buffers = HashSet::default();
2322 for buffer_id in &envelope.payload.buffer_ids {
2323 buffers.insert(
2324 shared_buffers
2325 .get(buffer_id)
2326 .cloned()
2327 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2328 );
2329 }
2330 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2331 })?;
2332
2333 let project_transaction = format.await?;
2334 let project_transaction = this.update(&mut cx, |this, cx| {
2335 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2336 });
2337 Ok(proto::FormatBuffersResponse {
2338 transaction: Some(project_transaction),
2339 })
2340 }
2341
2342 async fn handle_get_completions(
2343 this: ModelHandle<Self>,
2344 envelope: TypedEnvelope<proto::GetCompletions>,
2345 _: Arc<Client>,
2346 mut cx: AsyncAppContext,
2347 ) -> Result<proto::GetCompletionsResponse> {
2348 let sender_id = envelope.original_sender_id()?;
2349 let position = envelope
2350 .payload
2351 .position
2352 .and_then(language::proto::deserialize_anchor)
2353 .ok_or_else(|| anyhow!("invalid position"))?;
2354 let version = clock::Global::from(envelope.payload.version);
2355 let buffer = this.read_with(&cx, |this, _| {
2356 this.shared_buffers
2357 .get(&sender_id)
2358 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2359 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2360 })?;
2361 if !buffer
2362 .read_with(&cx, |buffer, _| buffer.version())
2363 .observed_all(&version)
2364 {
2365 Err(anyhow!("completion request depends on unreceived edits"))?;
2366 }
2367 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2368 let completions = this
2369 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2370 .await?;
2371
2372 Ok(proto::GetCompletionsResponse {
2373 completions: completions
2374 .iter()
2375 .map(language::proto::serialize_completion)
2376 .collect(),
2377 version: (&version).into(),
2378 })
2379 }
2380
2381 async fn handle_apply_additional_edits_for_completion(
2382 this: ModelHandle<Self>,
2383 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2384 _: Arc<Client>,
2385 mut cx: AsyncAppContext,
2386 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2387 let sender_id = envelope.original_sender_id()?;
2388 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2389 let buffer = this
2390 .shared_buffers
2391 .get(&sender_id)
2392 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2393 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2394 let language = buffer.read(cx).language();
2395 let completion = language::proto::deserialize_completion(
2396 envelope
2397 .payload
2398 .completion
2399 .ok_or_else(|| anyhow!("invalid completion"))?,
2400 language,
2401 )?;
2402 Ok::<_, anyhow::Error>(
2403 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2404 )
2405 })?;
2406
2407 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2408 transaction: apply_additional_edits
2409 .await?
2410 .as_ref()
2411 .map(language::proto::serialize_transaction),
2412 })
2413 }
2414
2415 async fn handle_get_code_actions(
2416 this: ModelHandle<Self>,
2417 envelope: TypedEnvelope<proto::GetCodeActions>,
2418 _: Arc<Client>,
2419 mut cx: AsyncAppContext,
2420 ) -> Result<proto::GetCodeActionsResponse> {
2421 let sender_id = envelope.original_sender_id()?;
2422 let start = envelope
2423 .payload
2424 .start
2425 .and_then(language::proto::deserialize_anchor)
2426 .ok_or_else(|| anyhow!("invalid start"))?;
2427 let end = envelope
2428 .payload
2429 .end
2430 .and_then(language::proto::deserialize_anchor)
2431 .ok_or_else(|| anyhow!("invalid end"))?;
2432 let buffer = this.update(&mut cx, |this, _| {
2433 this.shared_buffers
2434 .get(&sender_id)
2435 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2436 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2437 })?;
2438 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2439 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2440 Err(anyhow!("code action request references unreceived edits"))?;
2441 }
2442 let code_actions = this.update(&mut cx, |this, cx| {
2443 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2444 })?;
2445
2446 Ok(proto::GetCodeActionsResponse {
2447 actions: code_actions
2448 .await?
2449 .iter()
2450 .map(language::proto::serialize_code_action)
2451 .collect(),
2452 })
2453 }
2454
2455 async fn handle_apply_code_action(
2456 this: ModelHandle<Self>,
2457 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2458 _: Arc<Client>,
2459 mut cx: AsyncAppContext,
2460 ) -> Result<proto::ApplyCodeActionResponse> {
2461 let sender_id = envelope.original_sender_id()?;
2462 let action = language::proto::deserialize_code_action(
2463 envelope
2464 .payload
2465 .action
2466 .ok_or_else(|| anyhow!("invalid action"))?,
2467 )?;
2468 let apply_code_action = this.update(&mut cx, |this, cx| {
2469 let buffer = this
2470 .shared_buffers
2471 .get(&sender_id)
2472 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2473 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2474 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2475 })?;
2476
2477 let project_transaction = apply_code_action.await?;
2478 let project_transaction = this.update(&mut cx, |this, cx| {
2479 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2480 });
2481 Ok(proto::ApplyCodeActionResponse {
2482 transaction: Some(project_transaction),
2483 })
2484 }
2485
2486 async fn handle_get_definition(
2487 this: ModelHandle<Self>,
2488 envelope: TypedEnvelope<proto::GetDefinition>,
2489 _: Arc<Client>,
2490 mut cx: AsyncAppContext,
2491 ) -> Result<proto::GetDefinitionResponse> {
2492 let sender_id = envelope.original_sender_id()?;
2493 let position = envelope
2494 .payload
2495 .position
2496 .and_then(deserialize_anchor)
2497 .ok_or_else(|| anyhow!("invalid position"))?;
2498 let definitions = this.update(&mut cx, |this, cx| {
2499 let source_buffer = this
2500 .shared_buffers
2501 .get(&sender_id)
2502 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2503 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2504 if source_buffer.read(cx).can_resolve(&position) {
2505 Ok(this.definition(&source_buffer, position, cx))
2506 } else {
2507 Err(anyhow!("cannot resolve position"))
2508 }
2509 })?;
2510
2511 let definitions = definitions.await?;
2512
2513 this.update(&mut cx, |this, cx| {
2514 let mut response = proto::GetDefinitionResponse {
2515 definitions: Default::default(),
2516 };
2517 for definition in definitions {
2518 let buffer =
2519 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2520 response.definitions.push(proto::Definition {
2521 target_start: Some(serialize_anchor(&definition.target_range.start)),
2522 target_end: Some(serialize_anchor(&definition.target_range.end)),
2523 buffer: Some(buffer),
2524 });
2525 }
2526 Ok(response)
2527 })
2528 }
2529
2530 async fn handle_open_buffer(
2531 this: ModelHandle<Self>,
2532 envelope: TypedEnvelope<proto::OpenBuffer>,
2533 _: Arc<Client>,
2534 mut cx: AsyncAppContext,
2535 ) -> anyhow::Result<proto::OpenBufferResponse> {
2536 let peer_id = envelope.original_sender_id()?;
2537 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2538 let open_buffer = this.update(&mut cx, |this, cx| {
2539 this.open_buffer(
2540 ProjectPath {
2541 worktree_id,
2542 path: PathBuf::from(envelope.payload.path).into(),
2543 },
2544 cx,
2545 )
2546 });
2547
2548 let buffer = open_buffer.await?;
2549 this.update(&mut cx, |this, cx| {
2550 Ok(proto::OpenBufferResponse {
2551 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2552 })
2553 })
2554 }
2555
2556 fn serialize_project_transaction_for_peer(
2557 &mut self,
2558 project_transaction: ProjectTransaction,
2559 peer_id: PeerId,
2560 cx: &AppContext,
2561 ) -> proto::ProjectTransaction {
2562 let mut serialized_transaction = proto::ProjectTransaction {
2563 buffers: Default::default(),
2564 transactions: Default::default(),
2565 };
2566 for (buffer, transaction) in project_transaction.0 {
2567 serialized_transaction
2568 .buffers
2569 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2570 serialized_transaction
2571 .transactions
2572 .push(language::proto::serialize_transaction(&transaction));
2573 }
2574 serialized_transaction
2575 }
2576
2577 fn deserialize_project_transaction(
2578 &mut self,
2579 message: proto::ProjectTransaction,
2580 push_to_history: bool,
2581 cx: &mut ModelContext<Self>,
2582 ) -> Task<Result<ProjectTransaction>> {
2583 cx.spawn(|this, mut cx| async move {
2584 let mut project_transaction = ProjectTransaction::default();
2585 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2586 let buffer = this
2587 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2588 .await?;
2589 let transaction = language::proto::deserialize_transaction(transaction)?;
2590 project_transaction.0.insert(buffer, transaction);
2591 }
2592 for (buffer, transaction) in &project_transaction.0 {
2593 buffer
2594 .update(&mut cx, |buffer, _| {
2595 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2596 })
2597 .await;
2598
2599 if push_to_history {
2600 buffer.update(&mut cx, |buffer, _| {
2601 buffer.push_transaction(transaction.clone(), Instant::now());
2602 });
2603 }
2604 }
2605
2606 Ok(project_transaction)
2607 })
2608 }
2609
2610 fn serialize_buffer_for_peer(
2611 &mut self,
2612 buffer: &ModelHandle<Buffer>,
2613 peer_id: PeerId,
2614 cx: &AppContext,
2615 ) -> proto::Buffer {
2616 let buffer_id = buffer.read(cx).remote_id();
2617 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2618 match shared_buffers.entry(buffer_id) {
2619 hash_map::Entry::Occupied(_) => proto::Buffer {
2620 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2621 },
2622 hash_map::Entry::Vacant(entry) => {
2623 entry.insert(buffer.clone());
2624 proto::Buffer {
2625 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2626 }
2627 }
2628 }
2629 }
2630
2631 fn deserialize_buffer(
2632 &mut self,
2633 buffer: proto::Buffer,
2634 cx: &mut ModelContext<Self>,
2635 ) -> Task<Result<ModelHandle<Buffer>>> {
2636 let replica_id = self.replica_id();
2637
2638 let mut opened_buffer_tx = self.opened_buffer.clone();
2639 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2640 cx.spawn(|this, mut cx| async move {
2641 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2642 proto::buffer::Variant::Id(id) => {
2643 let buffer = loop {
2644 let buffer = this.read_with(&cx, |this, cx| {
2645 this.open_buffers
2646 .get(&id)
2647 .and_then(|buffer| buffer.upgrade(cx))
2648 });
2649 if let Some(buffer) = buffer {
2650 break buffer;
2651 }
2652 opened_buffer_rx
2653 .recv()
2654 .await
2655 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2656 };
2657 Ok(buffer)
2658 }
2659 proto::buffer::Variant::State(mut buffer) => {
2660 let mut buffer_worktree = None;
2661 let mut buffer_file = None;
2662 if let Some(file) = buffer.file.take() {
2663 this.read_with(&cx, |this, cx| {
2664 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2665 let worktree =
2666 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2667 anyhow!("no worktree found for id {}", file.worktree_id)
2668 })?;
2669 buffer_file =
2670 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2671 as Box<dyn language::File>);
2672 buffer_worktree = Some(worktree);
2673 Ok::<_, anyhow::Error>(())
2674 })?;
2675 }
2676
2677 let buffer = cx.add_model(|cx| {
2678 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2679 });
2680 this.update(&mut cx, |this, cx| {
2681 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2682 })?;
2683
2684 let _ = opened_buffer_tx.send(()).await;
2685 Ok(buffer)
2686 }
2687 }
2688 })
2689 }
2690
2691 async fn handle_close_buffer(
2692 this: ModelHandle<Self>,
2693 envelope: TypedEnvelope<proto::CloseBuffer>,
2694 _: Arc<Client>,
2695 mut cx: AsyncAppContext,
2696 ) -> anyhow::Result<()> {
2697 this.update(&mut cx, |this, cx| {
2698 if let Some(shared_buffers) =
2699 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2700 {
2701 shared_buffers.remove(&envelope.payload.buffer_id);
2702 cx.notify();
2703 }
2704 Ok(())
2705 })
2706 }
2707
2708 async fn handle_buffer_saved(
2709 this: ModelHandle<Self>,
2710 envelope: TypedEnvelope<proto::BufferSaved>,
2711 _: Arc<Client>,
2712 mut cx: AsyncAppContext,
2713 ) -> Result<()> {
2714 let version = envelope.payload.version.try_into()?;
2715 let mtime = envelope
2716 .payload
2717 .mtime
2718 .ok_or_else(|| anyhow!("missing mtime"))?
2719 .into();
2720
2721 this.update(&mut cx, |this, cx| {
2722 let buffer = this
2723 .open_buffers
2724 .get(&envelope.payload.buffer_id)
2725 .and_then(|buffer| buffer.upgrade(cx));
2726 if let Some(buffer) = buffer {
2727 buffer.update(cx, |buffer, cx| {
2728 buffer.did_save(version, mtime, None, cx);
2729 });
2730 }
2731 Ok(())
2732 })
2733 }
2734
2735 async fn handle_buffer_reloaded(
2736 this: ModelHandle<Self>,
2737 envelope: TypedEnvelope<proto::BufferReloaded>,
2738 _: Arc<Client>,
2739 mut cx: AsyncAppContext,
2740 ) -> Result<()> {
2741 let payload = envelope.payload.clone();
2742 let version = payload.version.try_into()?;
2743 let mtime = payload
2744 .mtime
2745 .ok_or_else(|| anyhow!("missing mtime"))?
2746 .into();
2747 this.update(&mut cx, |this, cx| {
2748 let buffer = this
2749 .open_buffers
2750 .get(&payload.buffer_id)
2751 .and_then(|buffer| buffer.upgrade(cx));
2752 if let Some(buffer) = buffer {
2753 buffer.update(cx, |buffer, cx| {
2754 buffer.did_reload(version, mtime, cx);
2755 });
2756 }
2757 Ok(())
2758 })
2759 }
2760
2761 pub fn match_paths<'a>(
2762 &self,
2763 query: &'a str,
2764 include_ignored: bool,
2765 smart_case: bool,
2766 max_results: usize,
2767 cancel_flag: &'a AtomicBool,
2768 cx: &AppContext,
2769 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2770 let worktrees = self
2771 .worktrees(cx)
2772 .filter(|worktree| !worktree.read(cx).is_weak())
2773 .collect::<Vec<_>>();
2774 let include_root_name = worktrees.len() > 1;
2775 let candidate_sets = worktrees
2776 .into_iter()
2777 .map(|worktree| CandidateSet {
2778 snapshot: worktree.read(cx).snapshot(),
2779 include_ignored,
2780 include_root_name,
2781 })
2782 .collect::<Vec<_>>();
2783
2784 let background = cx.background().clone();
2785 async move {
2786 fuzzy::match_paths(
2787 candidate_sets.as_slice(),
2788 query,
2789 smart_case,
2790 max_results,
2791 cancel_flag,
2792 background,
2793 )
2794 .await
2795 }
2796 }
2797}
2798
2799impl WorktreeHandle {
2800 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2801 match self {
2802 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2803 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2804 }
2805 }
2806}
2807
2808impl OpenBuffer {
2809 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2810 match self {
2811 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2812 OpenBuffer::Loading(_) => None,
2813 }
2814 }
2815}
2816
2817struct CandidateSet {
2818 snapshot: Snapshot,
2819 include_ignored: bool,
2820 include_root_name: bool,
2821}
2822
2823impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2824 type Candidates = CandidateSetIter<'a>;
2825
2826 fn id(&self) -> usize {
2827 self.snapshot.id().to_usize()
2828 }
2829
2830 fn len(&self) -> usize {
2831 if self.include_ignored {
2832 self.snapshot.file_count()
2833 } else {
2834 self.snapshot.visible_file_count()
2835 }
2836 }
2837
2838 fn prefix(&self) -> Arc<str> {
2839 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2840 self.snapshot.root_name().into()
2841 } else if self.include_root_name {
2842 format!("{}/", self.snapshot.root_name()).into()
2843 } else {
2844 "".into()
2845 }
2846 }
2847
2848 fn candidates(&'a self, start: usize) -> Self::Candidates {
2849 CandidateSetIter {
2850 traversal: self.snapshot.files(self.include_ignored, start),
2851 }
2852 }
2853}
2854
2855struct CandidateSetIter<'a> {
2856 traversal: Traversal<'a>,
2857}
2858
2859impl<'a> Iterator for CandidateSetIter<'a> {
2860 type Item = PathMatchCandidate<'a>;
2861
2862 fn next(&mut self) -> Option<Self::Item> {
2863 self.traversal.next().map(|entry| {
2864 if let EntryKind::File(char_bag) = entry.kind {
2865 PathMatchCandidate {
2866 path: &entry.path,
2867 char_bag,
2868 }
2869 } else {
2870 unreachable!()
2871 }
2872 })
2873 }
2874}
2875
2876impl Entity for Project {
2877 type Event = Event;
2878
2879 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2880 match &self.client_state {
2881 ProjectClientState::Local { remote_id_rx, .. } => {
2882 if let Some(project_id) = *remote_id_rx.borrow() {
2883 self.client
2884 .send(proto::UnregisterProject { project_id })
2885 .log_err();
2886 }
2887 }
2888 ProjectClientState::Remote { remote_id, .. } => {
2889 self.client
2890 .send(proto::LeaveProject {
2891 project_id: *remote_id,
2892 })
2893 .log_err();
2894 }
2895 }
2896 }
2897
2898 fn app_will_quit(
2899 &mut self,
2900 _: &mut MutableAppContext,
2901 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2902 use futures::FutureExt;
2903
2904 let shutdown_futures = self
2905 .language_servers
2906 .drain()
2907 .filter_map(|(_, server)| server.shutdown())
2908 .collect::<Vec<_>>();
2909 Some(
2910 async move {
2911 futures::future::join_all(shutdown_futures).await;
2912 }
2913 .boxed(),
2914 )
2915 }
2916}
2917
2918impl Collaborator {
2919 fn from_proto(
2920 message: proto::Collaborator,
2921 user_store: &ModelHandle<UserStore>,
2922 cx: &mut AsyncAppContext,
2923 ) -> impl Future<Output = Result<Self>> {
2924 let user = user_store.update(cx, |user_store, cx| {
2925 user_store.fetch_user(message.user_id, cx)
2926 });
2927
2928 async move {
2929 Ok(Self {
2930 peer_id: PeerId(message.peer_id),
2931 user: user.await?,
2932 replica_id: message.replica_id as ReplicaId,
2933 })
2934 }
2935 }
2936}
2937
2938impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2939 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2940 Self {
2941 worktree_id,
2942 path: path.as_ref().into(),
2943 }
2944 }
2945}
2946
2947impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2948 fn from(options: lsp::CreateFileOptions) -> Self {
2949 Self {
2950 overwrite: options.overwrite.unwrap_or(false),
2951 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2952 }
2953 }
2954}
2955
2956impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2957 fn from(options: lsp::RenameFileOptions) -> Self {
2958 Self {
2959 overwrite: options.overwrite.unwrap_or(false),
2960 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2961 }
2962 }
2963}
2964
2965impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2966 fn from(options: lsp::DeleteFileOptions) -> Self {
2967 Self {
2968 recursive: options.recursive.unwrap_or(false),
2969 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2970 }
2971 }
2972}
2973
2974#[cfg(test)]
2975mod tests {
2976 use super::{Event, *};
2977 use client::test::FakeHttpClient;
2978 use fs::RealFs;
2979 use futures::StreamExt;
2980 use gpui::test::subscribe;
2981 use language::{
2982 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2983 LanguageServerConfig, Point,
2984 };
2985 use lsp::Url;
2986 use serde_json::json;
2987 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2988 use unindent::Unindent as _;
2989 use util::test::temp_tree;
2990 use worktree::WorktreeHandle as _;
2991
2992 #[gpui::test]
2993 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2994 let dir = temp_tree(json!({
2995 "root": {
2996 "apple": "",
2997 "banana": {
2998 "carrot": {
2999 "date": "",
3000 "endive": "",
3001 }
3002 },
3003 "fennel": {
3004 "grape": "",
3005 }
3006 }
3007 }));
3008
3009 let root_link_path = dir.path().join("root_link");
3010 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3011 unix::fs::symlink(
3012 &dir.path().join("root/fennel"),
3013 &dir.path().join("root/finnochio"),
3014 )
3015 .unwrap();
3016
3017 let project = Project::test(Arc::new(RealFs), &mut cx);
3018
3019 let (tree, _) = project
3020 .update(&mut cx, |project, cx| {
3021 project.find_or_create_local_worktree(&root_link_path, false, cx)
3022 })
3023 .await
3024 .unwrap();
3025
3026 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3027 .await;
3028 cx.read(|cx| {
3029 let tree = tree.read(cx);
3030 assert_eq!(tree.file_count(), 5);
3031 assert_eq!(
3032 tree.inode_for_path("fennel/grape"),
3033 tree.inode_for_path("finnochio/grape")
3034 );
3035 });
3036
3037 let cancel_flag = Default::default();
3038 let results = project
3039 .read_with(&cx, |project, cx| {
3040 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3041 })
3042 .await;
3043 assert_eq!(
3044 results
3045 .into_iter()
3046 .map(|result| result.path)
3047 .collect::<Vec<Arc<Path>>>(),
3048 vec![
3049 PathBuf::from("banana/carrot/date").into(),
3050 PathBuf::from("banana/carrot/endive").into(),
3051 ]
3052 );
3053 }
3054
3055 #[gpui::test]
3056 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3057 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3058 let progress_token = language_server_config
3059 .disk_based_diagnostics_progress_token
3060 .clone()
3061 .unwrap();
3062
3063 let mut languages = LanguageRegistry::new();
3064 languages.add(Arc::new(Language::new(
3065 LanguageConfig {
3066 name: "Rust".to_string(),
3067 path_suffixes: vec!["rs".to_string()],
3068 language_server: Some(language_server_config),
3069 ..Default::default()
3070 },
3071 Some(tree_sitter_rust::language()),
3072 )));
3073
3074 let dir = temp_tree(json!({
3075 "a.rs": "fn a() { A }",
3076 "b.rs": "const y: i32 = 1",
3077 }));
3078
3079 let http_client = FakeHttpClient::with_404_response();
3080 let client = Client::new(http_client.clone());
3081 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3082
3083 let project = cx.update(|cx| {
3084 Project::local(
3085 client,
3086 user_store,
3087 Arc::new(languages),
3088 Arc::new(RealFs),
3089 cx,
3090 )
3091 });
3092
3093 let (tree, _) = project
3094 .update(&mut cx, |project, cx| {
3095 project.find_or_create_local_worktree(dir.path(), false, cx)
3096 })
3097 .await
3098 .unwrap();
3099 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3100
3101 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3102 .await;
3103
3104 // Cause worktree to start the fake language server
3105 let _buffer = project
3106 .update(&mut cx, |project, cx| {
3107 project.open_buffer(
3108 ProjectPath {
3109 worktree_id,
3110 path: Path::new("b.rs").into(),
3111 },
3112 cx,
3113 )
3114 })
3115 .await
3116 .unwrap();
3117
3118 let mut events = subscribe(&project, &mut cx);
3119
3120 let mut fake_server = fake_servers.next().await.unwrap();
3121 fake_server.start_progress(&progress_token).await;
3122 assert_eq!(
3123 events.next().await.unwrap(),
3124 Event::DiskBasedDiagnosticsStarted
3125 );
3126
3127 fake_server.start_progress(&progress_token).await;
3128 fake_server.end_progress(&progress_token).await;
3129 fake_server.start_progress(&progress_token).await;
3130
3131 fake_server
3132 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3133 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3134 version: None,
3135 diagnostics: vec![lsp::Diagnostic {
3136 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3137 severity: Some(lsp::DiagnosticSeverity::ERROR),
3138 message: "undefined variable 'A'".to_string(),
3139 ..Default::default()
3140 }],
3141 })
3142 .await;
3143 assert_eq!(
3144 events.next().await.unwrap(),
3145 Event::DiagnosticsUpdated(ProjectPath {
3146 worktree_id,
3147 path: Arc::from(Path::new("a.rs"))
3148 })
3149 );
3150
3151 fake_server.end_progress(&progress_token).await;
3152 fake_server.end_progress(&progress_token).await;
3153 assert_eq!(
3154 events.next().await.unwrap(),
3155 Event::DiskBasedDiagnosticsUpdated
3156 );
3157 assert_eq!(
3158 events.next().await.unwrap(),
3159 Event::DiskBasedDiagnosticsFinished
3160 );
3161
3162 let buffer = project
3163 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3164 .await
3165 .unwrap();
3166
3167 buffer.read_with(&cx, |buffer, _| {
3168 let snapshot = buffer.snapshot();
3169 let diagnostics = snapshot
3170 .diagnostics_in_range::<_, Point>(0..buffer.len())
3171 .collect::<Vec<_>>();
3172 assert_eq!(
3173 diagnostics,
3174 &[DiagnosticEntry {
3175 range: Point::new(0, 9)..Point::new(0, 10),
3176 diagnostic: Diagnostic {
3177 severity: lsp::DiagnosticSeverity::ERROR,
3178 message: "undefined variable 'A'".to_string(),
3179 group_id: 0,
3180 is_primary: true,
3181 ..Default::default()
3182 }
3183 }]
3184 )
3185 });
3186 }
3187
3188 #[gpui::test]
3189 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3190 let dir = temp_tree(json!({
3191 "root": {
3192 "dir1": {},
3193 "dir2": {
3194 "dir3": {}
3195 }
3196 }
3197 }));
3198
3199 let project = Project::test(Arc::new(RealFs), &mut cx);
3200 let (tree, _) = project
3201 .update(&mut cx, |project, cx| {
3202 project.find_or_create_local_worktree(&dir.path(), false, cx)
3203 })
3204 .await
3205 .unwrap();
3206
3207 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3208 .await;
3209
3210 let cancel_flag = Default::default();
3211 let results = project
3212 .read_with(&cx, |project, cx| {
3213 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3214 })
3215 .await;
3216
3217 assert!(results.is_empty());
3218 }
3219
3220 #[gpui::test]
3221 async fn test_definition(mut cx: gpui::TestAppContext) {
3222 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3223
3224 let mut languages = LanguageRegistry::new();
3225 languages.add(Arc::new(Language::new(
3226 LanguageConfig {
3227 name: "Rust".to_string(),
3228 path_suffixes: vec!["rs".to_string()],
3229 language_server: Some(language_server_config),
3230 ..Default::default()
3231 },
3232 Some(tree_sitter_rust::language()),
3233 )));
3234
3235 let dir = temp_tree(json!({
3236 "a.rs": "const fn a() { A }",
3237 "b.rs": "const y: i32 = crate::a()",
3238 }));
3239 let dir_path = dir.path().to_path_buf();
3240
3241 let http_client = FakeHttpClient::with_404_response();
3242 let client = Client::new(http_client.clone());
3243 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3244 let project = cx.update(|cx| {
3245 Project::local(
3246 client,
3247 user_store,
3248 Arc::new(languages),
3249 Arc::new(RealFs),
3250 cx,
3251 )
3252 });
3253
3254 let (tree, _) = project
3255 .update(&mut cx, |project, cx| {
3256 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3257 })
3258 .await
3259 .unwrap();
3260 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3261 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3262 .await;
3263
3264 let buffer = project
3265 .update(&mut cx, |project, cx| {
3266 project.open_buffer(
3267 ProjectPath {
3268 worktree_id,
3269 path: Path::new("").into(),
3270 },
3271 cx,
3272 )
3273 })
3274 .await
3275 .unwrap();
3276
3277 let mut fake_server = fake_servers.next().await.unwrap();
3278 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3279 let params = params.text_document_position_params;
3280 assert_eq!(
3281 params.text_document.uri.to_file_path().unwrap(),
3282 dir_path.join("b.rs")
3283 );
3284 assert_eq!(params.position, lsp::Position::new(0, 22));
3285
3286 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3287 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3288 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3289 )))
3290 });
3291
3292 let mut definitions = project
3293 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3294 .await
3295 .unwrap();
3296
3297 assert_eq!(definitions.len(), 1);
3298 let definition = definitions.pop().unwrap();
3299 cx.update(|cx| {
3300 let target_buffer = definition.target_buffer.read(cx);
3301 assert_eq!(
3302 target_buffer
3303 .file()
3304 .unwrap()
3305 .as_local()
3306 .unwrap()
3307 .abs_path(cx),
3308 dir.path().join("a.rs")
3309 );
3310 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3311 assert_eq!(
3312 list_worktrees(&project, cx),
3313 [
3314 (dir.path().join("b.rs"), false),
3315 (dir.path().join("a.rs"), true)
3316 ]
3317 );
3318
3319 drop(definition);
3320 });
3321 cx.read(|cx| {
3322 assert_eq!(
3323 list_worktrees(&project, cx),
3324 [(dir.path().join("b.rs"), false)]
3325 );
3326 });
3327
3328 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3329 project
3330 .read(cx)
3331 .worktrees(cx)
3332 .map(|worktree| {
3333 let worktree = worktree.read(cx);
3334 (
3335 worktree.as_local().unwrap().abs_path().to_path_buf(),
3336 worktree.is_weak(),
3337 )
3338 })
3339 .collect::<Vec<_>>()
3340 }
3341 }
3342
3343 #[gpui::test]
3344 async fn test_save_file(mut cx: gpui::TestAppContext) {
3345 let fs = Arc::new(FakeFs::new(cx.background()));
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "file1": "the old contents",
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs.clone(), &mut cx);
3355 let worktree_id = project
3356 .update(&mut cx, |p, cx| {
3357 p.find_or_create_local_worktree("/dir", false, cx)
3358 })
3359 .await
3360 .unwrap()
3361 .0
3362 .read_with(&cx, |tree, _| tree.id());
3363
3364 let buffer = project
3365 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3366 .await
3367 .unwrap();
3368 buffer
3369 .update(&mut cx, |buffer, cx| {
3370 assert_eq!(buffer.text(), "the old contents");
3371 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3372 buffer.save(cx)
3373 })
3374 .await
3375 .unwrap();
3376
3377 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3378 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3379 }
3380
3381 #[gpui::test]
3382 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3383 let fs = Arc::new(FakeFs::new(cx.background()));
3384 fs.insert_tree(
3385 "/dir",
3386 json!({
3387 "file1": "the old contents",
3388 }),
3389 )
3390 .await;
3391
3392 let project = Project::test(fs.clone(), &mut cx);
3393 let worktree_id = project
3394 .update(&mut cx, |p, cx| {
3395 p.find_or_create_local_worktree("/dir/file1", false, cx)
3396 })
3397 .await
3398 .unwrap()
3399 .0
3400 .read_with(&cx, |tree, _| tree.id());
3401
3402 let buffer = project
3403 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3404 .await
3405 .unwrap();
3406 buffer
3407 .update(&mut cx, |buffer, cx| {
3408 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3409 buffer.save(cx)
3410 })
3411 .await
3412 .unwrap();
3413
3414 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3415 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3416 }
3417
3418 #[gpui::test(retries = 5)]
3419 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3420 let dir = temp_tree(json!({
3421 "a": {
3422 "file1": "",
3423 "file2": "",
3424 "file3": "",
3425 },
3426 "b": {
3427 "c": {
3428 "file4": "",
3429 "file5": "",
3430 }
3431 }
3432 }));
3433
3434 let project = Project::test(Arc::new(RealFs), &mut cx);
3435 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3436
3437 let (tree, _) = project
3438 .update(&mut cx, |p, cx| {
3439 p.find_or_create_local_worktree(dir.path(), false, cx)
3440 })
3441 .await
3442 .unwrap();
3443 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3444
3445 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3446 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3447 async move { buffer.await.unwrap() }
3448 };
3449 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3450 tree.read_with(cx, |tree, _| {
3451 tree.entry_for_path(path)
3452 .expect(&format!("no entry for path {}", path))
3453 .id
3454 })
3455 };
3456
3457 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3458 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3459 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3460 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3461
3462 let file2_id = id_for_path("a/file2", &cx);
3463 let file3_id = id_for_path("a/file3", &cx);
3464 let file4_id = id_for_path("b/c/file4", &cx);
3465
3466 // Wait for the initial scan.
3467 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3468 .await;
3469
3470 // Create a remote copy of this worktree.
3471 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3472 let (remote, load_task) = cx.update(|cx| {
3473 Worktree::remote(
3474 1,
3475 1,
3476 initial_snapshot.to_proto(&Default::default(), Default::default()),
3477 rpc.clone(),
3478 cx,
3479 )
3480 });
3481 load_task.await;
3482
3483 cx.read(|cx| {
3484 assert!(!buffer2.read(cx).is_dirty());
3485 assert!(!buffer3.read(cx).is_dirty());
3486 assert!(!buffer4.read(cx).is_dirty());
3487 assert!(!buffer5.read(cx).is_dirty());
3488 });
3489
3490 // Rename and delete files and directories.
3491 tree.flush_fs_events(&cx).await;
3492 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3493 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3494 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3495 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3496 tree.flush_fs_events(&cx).await;
3497
3498 let expected_paths = vec![
3499 "a",
3500 "a/file1",
3501 "a/file2.new",
3502 "b",
3503 "d",
3504 "d/file3",
3505 "d/file4",
3506 ];
3507
3508 cx.read(|app| {
3509 assert_eq!(
3510 tree.read(app)
3511 .paths()
3512 .map(|p| p.to_str().unwrap())
3513 .collect::<Vec<_>>(),
3514 expected_paths
3515 );
3516
3517 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3518 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3519 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3520
3521 assert_eq!(
3522 buffer2.read(app).file().unwrap().path().as_ref(),
3523 Path::new("a/file2.new")
3524 );
3525 assert_eq!(
3526 buffer3.read(app).file().unwrap().path().as_ref(),
3527 Path::new("d/file3")
3528 );
3529 assert_eq!(
3530 buffer4.read(app).file().unwrap().path().as_ref(),
3531 Path::new("d/file4")
3532 );
3533 assert_eq!(
3534 buffer5.read(app).file().unwrap().path().as_ref(),
3535 Path::new("b/c/file5")
3536 );
3537
3538 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3539 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3540 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3541 assert!(buffer5.read(app).file().unwrap().is_deleted());
3542 });
3543
3544 // Update the remote worktree. Check that it becomes consistent with the
3545 // local worktree.
3546 remote.update(&mut cx, |remote, cx| {
3547 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3548 &initial_snapshot,
3549 1,
3550 1,
3551 0,
3552 true,
3553 );
3554 remote
3555 .as_remote_mut()
3556 .unwrap()
3557 .snapshot
3558 .apply_remote_update(update_message)
3559 .unwrap();
3560
3561 assert_eq!(
3562 remote
3563 .paths()
3564 .map(|p| p.to_str().unwrap())
3565 .collect::<Vec<_>>(),
3566 expected_paths
3567 );
3568 });
3569 }
3570
3571 #[gpui::test]
3572 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3573 let fs = Arc::new(FakeFs::new(cx.background()));
3574 fs.insert_tree(
3575 "/the-dir",
3576 json!({
3577 "a.txt": "a-contents",
3578 "b.txt": "b-contents",
3579 }),
3580 )
3581 .await;
3582
3583 let project = Project::test(fs.clone(), &mut cx);
3584 let worktree_id = project
3585 .update(&mut cx, |p, cx| {
3586 p.find_or_create_local_worktree("/the-dir", false, cx)
3587 })
3588 .await
3589 .unwrap()
3590 .0
3591 .read_with(&cx, |tree, _| tree.id());
3592
3593 // Spawn multiple tasks to open paths, repeating some paths.
3594 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3595 (
3596 p.open_buffer((worktree_id, "a.txt"), cx),
3597 p.open_buffer((worktree_id, "b.txt"), cx),
3598 p.open_buffer((worktree_id, "a.txt"), cx),
3599 )
3600 });
3601
3602 let buffer_a_1 = buffer_a_1.await.unwrap();
3603 let buffer_a_2 = buffer_a_2.await.unwrap();
3604 let buffer_b = buffer_b.await.unwrap();
3605 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3606 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3607
3608 // There is only one buffer per path.
3609 let buffer_a_id = buffer_a_1.id();
3610 assert_eq!(buffer_a_2.id(), buffer_a_id);
3611
3612 // Open the same path again while it is still open.
3613 drop(buffer_a_1);
3614 let buffer_a_3 = project
3615 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3616 .await
3617 .unwrap();
3618
3619 // There's still only one buffer per path.
3620 assert_eq!(buffer_a_3.id(), buffer_a_id);
3621 }
3622
3623 #[gpui::test]
3624 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3625 use std::fs;
3626
3627 let dir = temp_tree(json!({
3628 "file1": "abc",
3629 "file2": "def",
3630 "file3": "ghi",
3631 }));
3632
3633 let project = Project::test(Arc::new(RealFs), &mut cx);
3634 let (worktree, _) = project
3635 .update(&mut cx, |p, cx| {
3636 p.find_or_create_local_worktree(dir.path(), false, cx)
3637 })
3638 .await
3639 .unwrap();
3640 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3641
3642 worktree.flush_fs_events(&cx).await;
3643 worktree
3644 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3645 .await;
3646
3647 let buffer1 = project
3648 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3649 .await
3650 .unwrap();
3651 let events = Rc::new(RefCell::new(Vec::new()));
3652
3653 // initially, the buffer isn't dirty.
3654 buffer1.update(&mut cx, |buffer, cx| {
3655 cx.subscribe(&buffer1, {
3656 let events = events.clone();
3657 move |_, _, event, _| events.borrow_mut().push(event.clone())
3658 })
3659 .detach();
3660
3661 assert!(!buffer.is_dirty());
3662 assert!(events.borrow().is_empty());
3663
3664 buffer.edit(vec![1..2], "", cx);
3665 });
3666
3667 // after the first edit, the buffer is dirty, and emits a dirtied event.
3668 buffer1.update(&mut cx, |buffer, cx| {
3669 assert!(buffer.text() == "ac");
3670 assert!(buffer.is_dirty());
3671 assert_eq!(
3672 *events.borrow(),
3673 &[language::Event::Edited, language::Event::Dirtied]
3674 );
3675 events.borrow_mut().clear();
3676 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3677 });
3678
3679 // after saving, the buffer is not dirty, and emits a saved event.
3680 buffer1.update(&mut cx, |buffer, cx| {
3681 assert!(!buffer.is_dirty());
3682 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3683 events.borrow_mut().clear();
3684
3685 buffer.edit(vec![1..1], "B", cx);
3686 buffer.edit(vec![2..2], "D", cx);
3687 });
3688
3689 // after editing again, the buffer is dirty, and emits another dirty event.
3690 buffer1.update(&mut cx, |buffer, cx| {
3691 assert!(buffer.text() == "aBDc");
3692 assert!(buffer.is_dirty());
3693 assert_eq!(
3694 *events.borrow(),
3695 &[
3696 language::Event::Edited,
3697 language::Event::Dirtied,
3698 language::Event::Edited,
3699 ],
3700 );
3701 events.borrow_mut().clear();
3702
3703 // TODO - currently, after restoring the buffer to its
3704 // previously-saved state, the is still considered dirty.
3705 buffer.edit([1..3], "", cx);
3706 assert!(buffer.text() == "ac");
3707 assert!(buffer.is_dirty());
3708 });
3709
3710 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3711
3712 // When a file is deleted, the buffer is considered dirty.
3713 let events = Rc::new(RefCell::new(Vec::new()));
3714 let buffer2 = project
3715 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3716 .await
3717 .unwrap();
3718 buffer2.update(&mut cx, |_, cx| {
3719 cx.subscribe(&buffer2, {
3720 let events = events.clone();
3721 move |_, _, event, _| events.borrow_mut().push(event.clone())
3722 })
3723 .detach();
3724 });
3725
3726 fs::remove_file(dir.path().join("file2")).unwrap();
3727 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3728 assert_eq!(
3729 *events.borrow(),
3730 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3731 );
3732
3733 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3734 let events = Rc::new(RefCell::new(Vec::new()));
3735 let buffer3 = project
3736 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3737 .await
3738 .unwrap();
3739 buffer3.update(&mut cx, |_, cx| {
3740 cx.subscribe(&buffer3, {
3741 let events = events.clone();
3742 move |_, _, event, _| events.borrow_mut().push(event.clone())
3743 })
3744 .detach();
3745 });
3746
3747 worktree.flush_fs_events(&cx).await;
3748 buffer3.update(&mut cx, |buffer, cx| {
3749 buffer.edit(Some(0..0), "x", cx);
3750 });
3751 events.borrow_mut().clear();
3752 fs::remove_file(dir.path().join("file3")).unwrap();
3753 buffer3
3754 .condition(&cx, |_, _| !events.borrow().is_empty())
3755 .await;
3756 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3757 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3758 }
3759
3760 #[gpui::test]
3761 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3762 use std::fs;
3763
3764 let initial_contents = "aaa\nbbbbb\nc\n";
3765 let dir = temp_tree(json!({ "the-file": initial_contents }));
3766
3767 let project = Project::test(Arc::new(RealFs), &mut cx);
3768 let (worktree, _) = project
3769 .update(&mut cx, |p, cx| {
3770 p.find_or_create_local_worktree(dir.path(), false, cx)
3771 })
3772 .await
3773 .unwrap();
3774 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3775
3776 worktree
3777 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3778 .await;
3779
3780 let abs_path = dir.path().join("the-file");
3781 let buffer = project
3782 .update(&mut cx, |p, cx| {
3783 p.open_buffer((worktree_id, "the-file"), cx)
3784 })
3785 .await
3786 .unwrap();
3787
3788 // TODO
3789 // Add a cursor on each row.
3790 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3791 // assert!(!buffer.is_dirty());
3792 // buffer.add_selection_set(
3793 // &(0..3)
3794 // .map(|row| Selection {
3795 // id: row as usize,
3796 // start: Point::new(row, 1),
3797 // end: Point::new(row, 1),
3798 // reversed: false,
3799 // goal: SelectionGoal::None,
3800 // })
3801 // .collect::<Vec<_>>(),
3802 // cx,
3803 // )
3804 // });
3805
3806 // Change the file on disk, adding two new lines of text, and removing
3807 // one line.
3808 buffer.read_with(&cx, |buffer, _| {
3809 assert!(!buffer.is_dirty());
3810 assert!(!buffer.has_conflict());
3811 });
3812 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3813 fs::write(&abs_path, new_contents).unwrap();
3814
3815 // Because the buffer was not modified, it is reloaded from disk. Its
3816 // contents are edited according to the diff between the old and new
3817 // file contents.
3818 buffer
3819 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3820 .await;
3821
3822 buffer.update(&mut cx, |buffer, _| {
3823 assert_eq!(buffer.text(), new_contents);
3824 assert!(!buffer.is_dirty());
3825 assert!(!buffer.has_conflict());
3826
3827 // TODO
3828 // let cursor_positions = buffer
3829 // .selection_set(selection_set_id)
3830 // .unwrap()
3831 // .selections::<Point>(&*buffer)
3832 // .map(|selection| {
3833 // assert_eq!(selection.start, selection.end);
3834 // selection.start
3835 // })
3836 // .collect::<Vec<_>>();
3837 // assert_eq!(
3838 // cursor_positions,
3839 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3840 // );
3841 });
3842
3843 // Modify the buffer
3844 buffer.update(&mut cx, |buffer, cx| {
3845 buffer.edit(vec![0..0], " ", cx);
3846 assert!(buffer.is_dirty());
3847 assert!(!buffer.has_conflict());
3848 });
3849
3850 // Change the file on disk again, adding blank lines to the beginning.
3851 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3852
3853 // Because the buffer is modified, it doesn't reload from disk, but is
3854 // marked as having a conflict.
3855 buffer
3856 .condition(&cx, |buffer, _| buffer.has_conflict())
3857 .await;
3858 }
3859
3860 #[gpui::test]
3861 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3862 let fs = Arc::new(FakeFs::new(cx.background()));
3863 fs.insert_tree(
3864 "/the-dir",
3865 json!({
3866 "a.rs": "
3867 fn foo(mut v: Vec<usize>) {
3868 for x in &v {
3869 v.push(1);
3870 }
3871 }
3872 "
3873 .unindent(),
3874 }),
3875 )
3876 .await;
3877
3878 let project = Project::test(fs.clone(), &mut cx);
3879 let (worktree, _) = project
3880 .update(&mut cx, |p, cx| {
3881 p.find_or_create_local_worktree("/the-dir", false, cx)
3882 })
3883 .await
3884 .unwrap();
3885 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3886
3887 let buffer = project
3888 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3889 .await
3890 .unwrap();
3891
3892 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3893 let message = lsp::PublishDiagnosticsParams {
3894 uri: buffer_uri.clone(),
3895 diagnostics: vec![
3896 lsp::Diagnostic {
3897 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3898 severity: Some(DiagnosticSeverity::WARNING),
3899 message: "error 1".to_string(),
3900 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3901 location: lsp::Location {
3902 uri: buffer_uri.clone(),
3903 range: lsp::Range::new(
3904 lsp::Position::new(1, 8),
3905 lsp::Position::new(1, 9),
3906 ),
3907 },
3908 message: "error 1 hint 1".to_string(),
3909 }]),
3910 ..Default::default()
3911 },
3912 lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3914 severity: Some(DiagnosticSeverity::HINT),
3915 message: "error 1 hint 1".to_string(),
3916 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3917 location: lsp::Location {
3918 uri: buffer_uri.clone(),
3919 range: lsp::Range::new(
3920 lsp::Position::new(1, 8),
3921 lsp::Position::new(1, 9),
3922 ),
3923 },
3924 message: "original diagnostic".to_string(),
3925 }]),
3926 ..Default::default()
3927 },
3928 lsp::Diagnostic {
3929 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3930 severity: Some(DiagnosticSeverity::ERROR),
3931 message: "error 2".to_string(),
3932 related_information: Some(vec![
3933 lsp::DiagnosticRelatedInformation {
3934 location: lsp::Location {
3935 uri: buffer_uri.clone(),
3936 range: lsp::Range::new(
3937 lsp::Position::new(1, 13),
3938 lsp::Position::new(1, 15),
3939 ),
3940 },
3941 message: "error 2 hint 1".to_string(),
3942 },
3943 lsp::DiagnosticRelatedInformation {
3944 location: lsp::Location {
3945 uri: buffer_uri.clone(),
3946 range: lsp::Range::new(
3947 lsp::Position::new(1, 13),
3948 lsp::Position::new(1, 15),
3949 ),
3950 },
3951 message: "error 2 hint 2".to_string(),
3952 },
3953 ]),
3954 ..Default::default()
3955 },
3956 lsp::Diagnostic {
3957 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3958 severity: Some(DiagnosticSeverity::HINT),
3959 message: "error 2 hint 1".to_string(),
3960 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3961 location: lsp::Location {
3962 uri: buffer_uri.clone(),
3963 range: lsp::Range::new(
3964 lsp::Position::new(2, 8),
3965 lsp::Position::new(2, 17),
3966 ),
3967 },
3968 message: "original diagnostic".to_string(),
3969 }]),
3970 ..Default::default()
3971 },
3972 lsp::Diagnostic {
3973 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3974 severity: Some(DiagnosticSeverity::HINT),
3975 message: "error 2 hint 2".to_string(),
3976 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3977 location: lsp::Location {
3978 uri: buffer_uri.clone(),
3979 range: lsp::Range::new(
3980 lsp::Position::new(2, 8),
3981 lsp::Position::new(2, 17),
3982 ),
3983 },
3984 message: "original diagnostic".to_string(),
3985 }]),
3986 ..Default::default()
3987 },
3988 ],
3989 version: None,
3990 };
3991
3992 project
3993 .update(&mut cx, |p, cx| {
3994 p.update_diagnostics(message, &Default::default(), cx)
3995 })
3996 .unwrap();
3997 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3998
3999 assert_eq!(
4000 buffer
4001 .diagnostics_in_range::<_, Point>(0..buffer.len())
4002 .collect::<Vec<_>>(),
4003 &[
4004 DiagnosticEntry {
4005 range: Point::new(1, 8)..Point::new(1, 9),
4006 diagnostic: Diagnostic {
4007 severity: DiagnosticSeverity::WARNING,
4008 message: "error 1".to_string(),
4009 group_id: 0,
4010 is_primary: true,
4011 ..Default::default()
4012 }
4013 },
4014 DiagnosticEntry {
4015 range: Point::new(1, 8)..Point::new(1, 9),
4016 diagnostic: Diagnostic {
4017 severity: DiagnosticSeverity::HINT,
4018 message: "error 1 hint 1".to_string(),
4019 group_id: 0,
4020 is_primary: false,
4021 ..Default::default()
4022 }
4023 },
4024 DiagnosticEntry {
4025 range: Point::new(1, 13)..Point::new(1, 15),
4026 diagnostic: Diagnostic {
4027 severity: DiagnosticSeverity::HINT,
4028 message: "error 2 hint 1".to_string(),
4029 group_id: 1,
4030 is_primary: false,
4031 ..Default::default()
4032 }
4033 },
4034 DiagnosticEntry {
4035 range: Point::new(1, 13)..Point::new(1, 15),
4036 diagnostic: Diagnostic {
4037 severity: DiagnosticSeverity::HINT,
4038 message: "error 2 hint 2".to_string(),
4039 group_id: 1,
4040 is_primary: false,
4041 ..Default::default()
4042 }
4043 },
4044 DiagnosticEntry {
4045 range: Point::new(2, 8)..Point::new(2, 17),
4046 diagnostic: Diagnostic {
4047 severity: DiagnosticSeverity::ERROR,
4048 message: "error 2".to_string(),
4049 group_id: 1,
4050 is_primary: true,
4051 ..Default::default()
4052 }
4053 }
4054 ]
4055 );
4056
4057 assert_eq!(
4058 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4059 &[
4060 DiagnosticEntry {
4061 range: Point::new(1, 8)..Point::new(1, 9),
4062 diagnostic: Diagnostic {
4063 severity: DiagnosticSeverity::WARNING,
4064 message: "error 1".to_string(),
4065 group_id: 0,
4066 is_primary: true,
4067 ..Default::default()
4068 }
4069 },
4070 DiagnosticEntry {
4071 range: Point::new(1, 8)..Point::new(1, 9),
4072 diagnostic: Diagnostic {
4073 severity: DiagnosticSeverity::HINT,
4074 message: "error 1 hint 1".to_string(),
4075 group_id: 0,
4076 is_primary: false,
4077 ..Default::default()
4078 }
4079 },
4080 ]
4081 );
4082 assert_eq!(
4083 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4084 &[
4085 DiagnosticEntry {
4086 range: Point::new(1, 13)..Point::new(1, 15),
4087 diagnostic: Diagnostic {
4088 severity: DiagnosticSeverity::HINT,
4089 message: "error 2 hint 1".to_string(),
4090 group_id: 1,
4091 is_primary: false,
4092 ..Default::default()
4093 }
4094 },
4095 DiagnosticEntry {
4096 range: Point::new(1, 13)..Point::new(1, 15),
4097 diagnostic: Diagnostic {
4098 severity: DiagnosticSeverity::HINT,
4099 message: "error 2 hint 2".to_string(),
4100 group_id: 1,
4101 is_primary: false,
4102 ..Default::default()
4103 }
4104 },
4105 DiagnosticEntry {
4106 range: Point::new(2, 8)..Point::new(2, 17),
4107 diagnostic: Diagnostic {
4108 severity: DiagnosticSeverity::ERROR,
4109 message: "error 2".to_string(),
4110 group_id: 1,
4111 is_primary: true,
4112 ..Default::default()
4113 }
4114 }
4115 ]
4116 );
4117 }
4118}