1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Operations(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let this = cx.add_model(|cx| {
290 let mut this = Self {
291 worktrees: Vec::new(),
292 open_buffers: Default::default(),
293 loading_buffers: Default::default(),
294 opened_buffer: broadcast::channel(1).0,
295 shared_buffers: Default::default(),
296 active_entry: None,
297 collaborators: Default::default(),
298 languages,
299 user_store: user_store.clone(),
300 fs,
301 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
302 client,
303 client_state: ProjectClientState::Remote {
304 sharing_has_stopped: false,
305 remote_id,
306 replica_id,
307 },
308 language_servers_with_diagnostics_running: 0,
309 language_servers: Default::default(),
310 };
311 for worktree in worktrees {
312 this.add_worktree(&worktree, cx);
313 }
314 this
315 });
316
317 let user_ids = response
318 .collaborators
319 .iter()
320 .map(|peer| peer.user_id)
321 .collect();
322 user_store
323 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
324 .await?;
325 let mut collaborators = HashMap::default();
326 for message in response.collaborators {
327 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
328 collaborators.insert(collaborator.peer_id, collaborator);
329 }
330
331 this.update(cx, |this, _| {
332 this.collaborators = collaborators;
333 });
334
335 Ok(this)
336 }
337
338 #[cfg(any(test, feature = "test-support"))]
339 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
340 let languages = Arc::new(LanguageRegistry::new());
341 let http_client = client::test::FakeHttpClient::with_404_response();
342 let client = client::Client::new(http_client.clone());
343 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
344 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
345 }
346
347 pub fn fs(&self) -> &Arc<dyn Fs> {
348 &self.fs
349 }
350
351 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
352 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
353 *remote_id_tx.borrow_mut() = remote_id;
354 }
355
356 self.subscriptions.clear();
357 if let Some(remote_id) = remote_id {
358 self.subscriptions
359 .push(self.client.add_model_for_remote_entity(remote_id, cx));
360 }
361 }
362
363 pub fn remote_id(&self) -> Option<u64> {
364 match &self.client_state {
365 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
366 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
367 }
368 }
369
370 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
371 let mut id = None;
372 let mut watch = None;
373 match &self.client_state {
374 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
375 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
376 }
377
378 async move {
379 if let Some(id) = id {
380 return id;
381 }
382 let mut watch = watch.unwrap();
383 loop {
384 let id = *watch.borrow();
385 if let Some(id) = id {
386 return id;
387 }
388 watch.recv().await;
389 }
390 }
391 }
392
393 pub fn replica_id(&self) -> ReplicaId {
394 match &self.client_state {
395 ProjectClientState::Local { .. } => 0,
396 ProjectClientState::Remote { replica_id, .. } => *replica_id,
397 }
398 }
399
400 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
401 &self.collaborators
402 }
403
404 pub fn worktrees<'a>(
405 &'a self,
406 cx: &'a AppContext,
407 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
408 self.worktrees
409 .iter()
410 .filter_map(move |worktree| worktree.upgrade(cx))
411 }
412
413 pub fn worktree_for_id(
414 &self,
415 id: WorktreeId,
416 cx: &AppContext,
417 ) -> Option<ModelHandle<Worktree>> {
418 self.worktrees(cx)
419 .find(|worktree| worktree.read(cx).id() == id)
420 }
421
422 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
423 let rpc = self.client.clone();
424 cx.spawn(|this, mut cx| async move {
425 let project_id = this.update(&mut cx, |this, _| {
426 if let ProjectClientState::Local {
427 is_shared,
428 remote_id_rx,
429 ..
430 } = &mut this.client_state
431 {
432 *is_shared = true;
433 remote_id_rx
434 .borrow()
435 .ok_or_else(|| anyhow!("no project id"))
436 } else {
437 Err(anyhow!("can't share a remote project"))
438 }
439 })?;
440
441 rpc.request(proto::ShareProject { project_id }).await?;
442 let mut tasks = Vec::new();
443 this.update(&mut cx, |this, cx| {
444 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
445 worktree.update(cx, |worktree, cx| {
446 let worktree = worktree.as_local_mut().unwrap();
447 tasks.push(worktree.share(project_id, cx));
448 });
449 }
450 });
451 for task in tasks {
452 task.await?;
453 }
454 this.update(&mut cx, |_, cx| cx.notify());
455 Ok(())
456 })
457 }
458
459 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
460 let rpc = self.client.clone();
461 cx.spawn(|this, mut cx| async move {
462 let project_id = this.update(&mut cx, |this, _| {
463 if let ProjectClientState::Local {
464 is_shared,
465 remote_id_rx,
466 ..
467 } = &mut this.client_state
468 {
469 *is_shared = false;
470 remote_id_rx
471 .borrow()
472 .ok_or_else(|| anyhow!("no project id"))
473 } else {
474 Err(anyhow!("can't share a remote project"))
475 }
476 })?;
477
478 rpc.send(proto::UnshareProject { project_id })?;
479 this.update(&mut cx, |this, cx| {
480 this.collaborators.clear();
481 this.shared_buffers.clear();
482 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
483 worktree.update(cx, |worktree, _| {
484 worktree.as_local_mut().unwrap().unshare();
485 });
486 }
487 cx.notify()
488 });
489 Ok(())
490 })
491 }
492
493 pub fn is_read_only(&self) -> bool {
494 match &self.client_state {
495 ProjectClientState::Local { .. } => false,
496 ProjectClientState::Remote {
497 sharing_has_stopped,
498 ..
499 } => *sharing_has_stopped,
500 }
501 }
502
503 pub fn is_local(&self) -> bool {
504 match &self.client_state {
505 ProjectClientState::Local { .. } => true,
506 ProjectClientState::Remote { .. } => false,
507 }
508 }
509
510 pub fn open_buffer(
511 &mut self,
512 path: impl Into<ProjectPath>,
513 cx: &mut ModelContext<Self>,
514 ) -> Task<Result<ModelHandle<Buffer>>> {
515 let project_path = path.into();
516 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
517 worktree
518 } else {
519 return Task::ready(Err(anyhow!("no such worktree")));
520 };
521
522 // If there is already a buffer for the given path, then return it.
523 let existing_buffer = self.get_open_buffer(&project_path, cx);
524 if let Some(existing_buffer) = existing_buffer {
525 return Task::ready(Ok(existing_buffer));
526 }
527
528 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
529 // If the given path is already being loaded, then wait for that existing
530 // task to complete and return the same buffer.
531 hash_map::Entry::Occupied(e) => e.get().clone(),
532
533 // Otherwise, record the fact that this path is now being loaded.
534 hash_map::Entry::Vacant(entry) => {
535 let (mut tx, rx) = postage::watch::channel();
536 entry.insert(rx.clone());
537
538 let load_buffer = if worktree.read(cx).is_local() {
539 self.open_local_buffer(&project_path.path, &worktree, cx)
540 } else {
541 self.open_remote_buffer(&project_path.path, &worktree, cx)
542 };
543
544 cx.spawn(move |this, mut cx| async move {
545 let load_result = load_buffer.await;
546 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
547 // Record the fact that the buffer is no longer loading.
548 this.loading_buffers.remove(&project_path);
549 let buffer = load_result.map_err(Arc::new)?;
550 Ok(buffer)
551 }));
552 })
553 .detach();
554 rx
555 }
556 };
557
558 cx.foreground().spawn(async move {
559 loop {
560 if let Some(result) = loading_watch.borrow().as_ref() {
561 match result {
562 Ok(buffer) => return Ok(buffer.clone()),
563 Err(error) => return Err(anyhow!("{}", error)),
564 }
565 }
566 loading_watch.recv().await;
567 }
568 })
569 }
570
571 fn open_local_buffer(
572 &mut self,
573 path: &Arc<Path>,
574 worktree: &ModelHandle<Worktree>,
575 cx: &mut ModelContext<Self>,
576 ) -> Task<Result<ModelHandle<Buffer>>> {
577 let load_buffer = worktree.update(cx, |worktree, cx| {
578 let worktree = worktree.as_local_mut().unwrap();
579 worktree.load_buffer(path, cx)
580 });
581 let worktree = worktree.downgrade();
582 cx.spawn(|this, mut cx| async move {
583 let buffer = load_buffer.await?;
584 let worktree = worktree
585 .upgrade(&cx)
586 .ok_or_else(|| anyhow!("worktree was removed"))?;
587 this.update(&mut cx, |this, cx| {
588 this.register_buffer(&buffer, Some(&worktree), cx)
589 })?;
590 Ok(buffer)
591 })
592 }
593
594 fn open_remote_buffer(
595 &mut self,
596 path: &Arc<Path>,
597 worktree: &ModelHandle<Worktree>,
598 cx: &mut ModelContext<Self>,
599 ) -> Task<Result<ModelHandle<Buffer>>> {
600 let rpc = self.client.clone();
601 let project_id = self.remote_id().unwrap();
602 let remote_worktree_id = worktree.read(cx).id();
603 let path = path.clone();
604 let path_string = path.to_string_lossy().to_string();
605 cx.spawn(|this, mut cx| async move {
606 let response = rpc
607 .request(proto::OpenBuffer {
608 project_id,
609 worktree_id: remote_worktree_id.to_proto(),
610 path: path_string,
611 })
612 .await?;
613 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
614 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
615 .await
616 })
617 }
618
619 fn open_local_buffer_from_lsp_path(
620 &mut self,
621 abs_path: lsp::Url,
622 lang_name: String,
623 lang_server: Arc<LanguageServer>,
624 cx: &mut ModelContext<Self>,
625 ) -> Task<Result<ModelHandle<Buffer>>> {
626 cx.spawn(|this, mut cx| async move {
627 let abs_path = abs_path
628 .to_file_path()
629 .map_err(|_| anyhow!("can't convert URI to path"))?;
630 let (worktree, relative_path) = if let Some(result) =
631 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
632 {
633 result
634 } else {
635 let worktree = this
636 .update(&mut cx, |this, cx| {
637 this.create_local_worktree(&abs_path, true, cx)
638 })
639 .await?;
640 this.update(&mut cx, |this, cx| {
641 this.language_servers
642 .insert((worktree.read(cx).id(), lang_name), lang_server);
643 });
644 (worktree, PathBuf::new())
645 };
646
647 let project_path = ProjectPath {
648 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
649 path: relative_path.into(),
650 };
651 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
652 .await
653 })
654 }
655
656 pub fn save_buffer_as(
657 &self,
658 buffer: ModelHandle<Buffer>,
659 abs_path: PathBuf,
660 cx: &mut ModelContext<Project>,
661 ) -> Task<Result<()>> {
662 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
663 cx.spawn(|this, mut cx| async move {
664 let (worktree, path) = worktree_task.await?;
665 worktree
666 .update(&mut cx, |worktree, cx| {
667 worktree
668 .as_local_mut()
669 .unwrap()
670 .save_buffer_as(buffer.clone(), path, cx)
671 })
672 .await?;
673 this.update(&mut cx, |this, cx| {
674 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
675 });
676 Ok(())
677 })
678 }
679
680 #[cfg(any(test, feature = "test-support"))]
681 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
682 let path = path.into();
683 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
684 self.open_buffers.iter().any(|(_, buffer)| {
685 if let Some(buffer) = buffer.upgrade(cx) {
686 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
687 if file.worktree == worktree && file.path() == &path.path {
688 return true;
689 }
690 }
691 }
692 false
693 })
694 } else {
695 false
696 }
697 }
698
699 fn get_open_buffer(
700 &mut self,
701 path: &ProjectPath,
702 cx: &mut ModelContext<Self>,
703 ) -> Option<ModelHandle<Buffer>> {
704 let mut result = None;
705 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
706 self.open_buffers.retain(|_, buffer| {
707 if let Some(buffer) = buffer.upgrade(cx) {
708 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
709 if file.worktree == worktree && file.path() == &path.path {
710 result = Some(buffer);
711 }
712 }
713 true
714 } else {
715 false
716 }
717 });
718 result
719 }
720
721 fn register_buffer(
722 &mut self,
723 buffer: &ModelHandle<Buffer>,
724 worktree: Option<&ModelHandle<Worktree>>,
725 cx: &mut ModelContext<Self>,
726 ) -> Result<()> {
727 match self.open_buffers.insert(
728 buffer.read(cx).remote_id(),
729 OpenBuffer::Loaded(buffer.downgrade()),
730 ) {
731 None => {}
732 Some(OpenBuffer::Operations(operations)) => {
733 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
734 }
735 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
736 }
737 self.assign_language_to_buffer(&buffer, worktree, cx);
738 Ok(())
739 }
740
741 fn assign_language_to_buffer(
742 &mut self,
743 buffer: &ModelHandle<Buffer>,
744 worktree: Option<&ModelHandle<Worktree>>,
745 cx: &mut ModelContext<Self>,
746 ) -> Option<()> {
747 let (path, full_path) = {
748 let file = buffer.read(cx).file()?;
749 (file.path().clone(), file.full_path(cx))
750 };
751
752 // If the buffer has a language, set it and start/assign the language server
753 if let Some(language) = self.languages.select_language(&full_path) {
754 buffer.update(cx, |buffer, cx| {
755 buffer.set_language(Some(language.clone()), cx);
756 });
757
758 // For local worktrees, start a language server if needed.
759 // Also assign the language server and any previously stored diagnostics to the buffer.
760 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
761 let worktree_id = local_worktree.id();
762 let worktree_abs_path = local_worktree.abs_path().clone();
763
764 let language_server = match self
765 .language_servers
766 .entry((worktree_id, language.name().to_string()))
767 {
768 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
769 hash_map::Entry::Vacant(e) => Self::start_language_server(
770 self.client.clone(),
771 language.clone(),
772 &worktree_abs_path,
773 cx,
774 )
775 .map(|server| e.insert(server).clone()),
776 };
777
778 buffer.update(cx, |buffer, cx| {
779 buffer.set_language_server(language_server, cx);
780 });
781 }
782 }
783
784 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
785 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
786 buffer.update(cx, |buffer, cx| {
787 buffer.update_diagnostics(diagnostics, None, cx).log_err();
788 });
789 }
790 }
791
792 None
793 }
794
795 fn start_language_server(
796 rpc: Arc<Client>,
797 language: Arc<Language>,
798 worktree_path: &Path,
799 cx: &mut ModelContext<Self>,
800 ) -> Option<Arc<LanguageServer>> {
801 enum LspEvent {
802 DiagnosticsStart,
803 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
804 DiagnosticsFinish,
805 }
806
807 let language_server = language
808 .start_server(worktree_path, cx)
809 .log_err()
810 .flatten()?;
811 let disk_based_sources = language
812 .disk_based_diagnostic_sources()
813 .cloned()
814 .unwrap_or_default();
815 let disk_based_diagnostics_progress_token =
816 language.disk_based_diagnostics_progress_token().cloned();
817 let has_disk_based_diagnostic_progress_token =
818 disk_based_diagnostics_progress_token.is_some();
819 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
820
821 // Listen for `PublishDiagnostics` notifications.
822 language_server
823 .on_notification::<lsp::notification::PublishDiagnostics, _>({
824 let diagnostics_tx = diagnostics_tx.clone();
825 move |params| {
826 if !has_disk_based_diagnostic_progress_token {
827 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
828 }
829 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
830 if !has_disk_based_diagnostic_progress_token {
831 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
832 }
833 }
834 })
835 .detach();
836
837 // Listen for `Progress` notifications. Send an event when the language server
838 // transitions between running jobs and not running any jobs.
839 let mut running_jobs_for_this_server: i32 = 0;
840 language_server
841 .on_notification::<lsp::notification::Progress, _>(move |params| {
842 let token = match params.token {
843 lsp::NumberOrString::Number(_) => None,
844 lsp::NumberOrString::String(token) => Some(token),
845 };
846
847 if token == disk_based_diagnostics_progress_token {
848 match params.value {
849 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
850 lsp::WorkDoneProgress::Begin(_) => {
851 running_jobs_for_this_server += 1;
852 if running_jobs_for_this_server == 1 {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
854 }
855 }
856 lsp::WorkDoneProgress::End(_) => {
857 running_jobs_for_this_server -= 1;
858 if running_jobs_for_this_server == 0 {
859 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
860 }
861 }
862 _ => {}
863 },
864 }
865 }
866 })
867 .detach();
868
869 // Process all the LSP events.
870 cx.spawn_weak(|this, mut cx| async move {
871 while let Ok(message) = diagnostics_rx.recv().await {
872 let this = this.upgrade(&cx)?;
873 match message {
874 LspEvent::DiagnosticsStart => {
875 this.update(&mut cx, |this, cx| {
876 this.disk_based_diagnostics_started(cx);
877 if let Some(project_id) = this.remote_id() {
878 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
879 .log_err();
880 }
881 });
882 }
883 LspEvent::DiagnosticsUpdate(mut params) => {
884 language.process_diagnostics(&mut params);
885 this.update(&mut cx, |this, cx| {
886 this.update_diagnostics(params, &disk_based_sources, cx)
887 .log_err();
888 });
889 }
890 LspEvent::DiagnosticsFinish => {
891 this.update(&mut cx, |this, cx| {
892 this.disk_based_diagnostics_finished(cx);
893 if let Some(project_id) = this.remote_id() {
894 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
895 .log_err();
896 }
897 });
898 }
899 }
900 }
901 Some(())
902 })
903 .detach();
904
905 Some(language_server)
906 }
907
908 pub fn update_diagnostics(
909 &mut self,
910 params: lsp::PublishDiagnosticsParams,
911 disk_based_sources: &HashSet<String>,
912 cx: &mut ModelContext<Self>,
913 ) -> Result<()> {
914 let abs_path = params
915 .uri
916 .to_file_path()
917 .map_err(|_| anyhow!("URI is not a file"))?;
918 let mut next_group_id = 0;
919 let mut diagnostics = Vec::default();
920 let mut primary_diagnostic_group_ids = HashMap::default();
921 let mut sources_by_group_id = HashMap::default();
922 let mut supporting_diagnostic_severities = HashMap::default();
923 for diagnostic in ¶ms.diagnostics {
924 let source = diagnostic.source.as_ref();
925 let code = diagnostic.code.as_ref().map(|code| match code {
926 lsp::NumberOrString::Number(code) => code.to_string(),
927 lsp::NumberOrString::String(code) => code.clone(),
928 });
929 let range = range_from_lsp(diagnostic.range);
930 let is_supporting = diagnostic
931 .related_information
932 .as_ref()
933 .map_or(false, |infos| {
934 infos.iter().any(|info| {
935 primary_diagnostic_group_ids.contains_key(&(
936 source,
937 code.clone(),
938 range_from_lsp(info.location.range),
939 ))
940 })
941 });
942
943 if is_supporting {
944 if let Some(severity) = diagnostic.severity {
945 supporting_diagnostic_severities
946 .insert((source, code.clone(), range), severity);
947 }
948 } else {
949 let group_id = post_inc(&mut next_group_id);
950 let is_disk_based =
951 source.map_or(false, |source| disk_based_sources.contains(source));
952
953 sources_by_group_id.insert(group_id, source);
954 primary_diagnostic_group_ids
955 .insert((source, code.clone(), range.clone()), group_id);
956
957 diagnostics.push(DiagnosticEntry {
958 range,
959 diagnostic: Diagnostic {
960 code: code.clone(),
961 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
962 message: diagnostic.message.clone(),
963 group_id,
964 is_primary: true,
965 is_valid: true,
966 is_disk_based,
967 },
968 });
969 if let Some(infos) = &diagnostic.related_information {
970 for info in infos {
971 if info.location.uri == params.uri && !info.message.is_empty() {
972 let range = range_from_lsp(info.location.range);
973 diagnostics.push(DiagnosticEntry {
974 range,
975 diagnostic: Diagnostic {
976 code: code.clone(),
977 severity: DiagnosticSeverity::INFORMATION,
978 message: info.message.clone(),
979 group_id,
980 is_primary: false,
981 is_valid: true,
982 is_disk_based,
983 },
984 });
985 }
986 }
987 }
988 }
989 }
990
991 for entry in &mut diagnostics {
992 let diagnostic = &mut entry.diagnostic;
993 if !diagnostic.is_primary {
994 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
995 if let Some(&severity) = supporting_diagnostic_severities.get(&(
996 source,
997 diagnostic.code.clone(),
998 entry.range.clone(),
999 )) {
1000 diagnostic.severity = severity;
1001 }
1002 }
1003 }
1004
1005 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1006 Ok(())
1007 }
1008
1009 pub fn update_diagnostic_entries(
1010 &mut self,
1011 abs_path: PathBuf,
1012 version: Option<i32>,
1013 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1014 cx: &mut ModelContext<Project>,
1015 ) -> Result<(), anyhow::Error> {
1016 let (worktree, relative_path) = self
1017 .find_local_worktree(&abs_path, cx)
1018 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1019 let project_path = ProjectPath {
1020 worktree_id: worktree.read(cx).id(),
1021 path: relative_path.into(),
1022 };
1023
1024 for buffer in self.open_buffers.values() {
1025 if let Some(buffer) = buffer.upgrade(cx) {
1026 if buffer
1027 .read(cx)
1028 .file()
1029 .map_or(false, |file| *file.path() == project_path.path)
1030 {
1031 buffer.update(cx, |buffer, cx| {
1032 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1033 })?;
1034 break;
1035 }
1036 }
1037 }
1038 worktree.update(cx, |worktree, cx| {
1039 worktree
1040 .as_local_mut()
1041 .ok_or_else(|| anyhow!("not a local worktree"))?
1042 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1043 })?;
1044 cx.emit(Event::DiagnosticsUpdated(project_path));
1045 Ok(())
1046 }
1047
1048 pub fn format(
1049 &self,
1050 buffers: HashSet<ModelHandle<Buffer>>,
1051 push_to_history: bool,
1052 cx: &mut ModelContext<Project>,
1053 ) -> Task<Result<ProjectTransaction>> {
1054 let mut local_buffers = Vec::new();
1055 let mut remote_buffers = None;
1056 for buffer_handle in buffers {
1057 let buffer = buffer_handle.read(cx);
1058 let worktree;
1059 if let Some(file) = File::from_dyn(buffer.file()) {
1060 worktree = file.worktree.clone();
1061 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1062 let lang_server;
1063 if let Some(lang) = buffer.language() {
1064 if let Some(server) = self
1065 .language_servers
1066 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1067 {
1068 lang_server = server.clone();
1069 } else {
1070 return Task::ready(Ok(Default::default()));
1071 };
1072 } else {
1073 return Task::ready(Ok(Default::default()));
1074 }
1075
1076 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1077 } else {
1078 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1079 }
1080 } else {
1081 return Task::ready(Ok(Default::default()));
1082 }
1083 }
1084
1085 let remote_buffers = self.remote_id().zip(remote_buffers);
1086 let client = self.client.clone();
1087
1088 cx.spawn(|this, mut cx| async move {
1089 let mut project_transaction = ProjectTransaction::default();
1090
1091 if let Some((project_id, remote_buffers)) = remote_buffers {
1092 let response = client
1093 .request(proto::FormatBuffers {
1094 project_id,
1095 buffer_ids: remote_buffers
1096 .iter()
1097 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1098 .collect(),
1099 })
1100 .await?
1101 .transaction
1102 .ok_or_else(|| anyhow!("missing transaction"))?;
1103 project_transaction = this
1104 .update(&mut cx, |this, cx| {
1105 this.deserialize_project_transaction(response, push_to_history, cx)
1106 })
1107 .await?;
1108 }
1109
1110 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1111 let lsp_edits = lang_server
1112 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1113 text_document: lsp::TextDocumentIdentifier::new(
1114 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1115 ),
1116 options: Default::default(),
1117 work_done_progress_params: Default::default(),
1118 })
1119 .await?;
1120
1121 if let Some(lsp_edits) = lsp_edits {
1122 let edits = buffer
1123 .update(&mut cx, |buffer, cx| {
1124 buffer.edits_from_lsp(lsp_edits, None, cx)
1125 })
1126 .await?;
1127 buffer.update(&mut cx, |buffer, cx| {
1128 buffer.finalize_last_transaction();
1129 buffer.start_transaction();
1130 for (range, text) in edits {
1131 buffer.edit([range], text, cx);
1132 }
1133 if buffer.end_transaction(cx).is_some() {
1134 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1135 if !push_to_history {
1136 buffer.forget_transaction(transaction.id);
1137 }
1138 project_transaction.0.insert(cx.handle(), transaction);
1139 }
1140 });
1141 }
1142 }
1143
1144 Ok(project_transaction)
1145 })
1146 }
1147
1148 pub fn definition<T: ToPointUtf16>(
1149 &self,
1150 source_buffer_handle: &ModelHandle<Buffer>,
1151 position: T,
1152 cx: &mut ModelContext<Self>,
1153 ) -> Task<Result<Vec<Definition>>> {
1154 let source_buffer_handle = source_buffer_handle.clone();
1155 let source_buffer = source_buffer_handle.read(cx);
1156 let worktree;
1157 let buffer_abs_path;
1158 if let Some(file) = File::from_dyn(source_buffer.file()) {
1159 worktree = file.worktree.clone();
1160 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1161 } else {
1162 return Task::ready(Ok(Default::default()));
1163 };
1164
1165 let position = position.to_point_utf16(source_buffer);
1166
1167 if worktree.read(cx).as_local().is_some() {
1168 let buffer_abs_path = buffer_abs_path.unwrap();
1169 let lang_name;
1170 let lang_server;
1171 if let Some(lang) = source_buffer.language() {
1172 lang_name = lang.name().to_string();
1173 if let Some(server) = self
1174 .language_servers
1175 .get(&(worktree.read(cx).id(), lang_name.clone()))
1176 {
1177 lang_server = server.clone();
1178 } else {
1179 return Task::ready(Ok(Default::default()));
1180 };
1181 } else {
1182 return Task::ready(Ok(Default::default()));
1183 }
1184
1185 cx.spawn(|this, mut cx| async move {
1186 let response = lang_server
1187 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1188 text_document_position_params: lsp::TextDocumentPositionParams {
1189 text_document: lsp::TextDocumentIdentifier::new(
1190 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1191 ),
1192 position: lsp::Position::new(position.row, position.column),
1193 },
1194 work_done_progress_params: Default::default(),
1195 partial_result_params: Default::default(),
1196 })
1197 .await?;
1198
1199 let mut definitions = Vec::new();
1200 if let Some(response) = response {
1201 let mut unresolved_locations = Vec::new();
1202 match response {
1203 lsp::GotoDefinitionResponse::Scalar(loc) => {
1204 unresolved_locations.push((loc.uri, loc.range));
1205 }
1206 lsp::GotoDefinitionResponse::Array(locs) => {
1207 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1208 }
1209 lsp::GotoDefinitionResponse::Link(links) => {
1210 unresolved_locations.extend(
1211 links
1212 .into_iter()
1213 .map(|l| (l.target_uri, l.target_selection_range)),
1214 );
1215 }
1216 }
1217
1218 for (target_uri, target_range) in unresolved_locations {
1219 let target_buffer_handle = this
1220 .update(&mut cx, |this, cx| {
1221 this.open_local_buffer_from_lsp_path(
1222 target_uri,
1223 lang_name.clone(),
1224 lang_server.clone(),
1225 cx,
1226 )
1227 })
1228 .await?;
1229
1230 cx.read(|cx| {
1231 let target_buffer = target_buffer_handle.read(cx);
1232 let target_start = target_buffer
1233 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1234 let target_end = target_buffer
1235 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1236 definitions.push(Definition {
1237 target_buffer: target_buffer_handle,
1238 target_range: target_buffer.anchor_after(target_start)
1239 ..target_buffer.anchor_before(target_end),
1240 });
1241 });
1242 }
1243 }
1244
1245 Ok(definitions)
1246 })
1247 } else if let Some(project_id) = self.remote_id() {
1248 let client = self.client.clone();
1249 let request = proto::GetDefinition {
1250 project_id,
1251 buffer_id: source_buffer.remote_id(),
1252 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1253 };
1254 cx.spawn(|this, mut cx| async move {
1255 let response = client.request(request).await?;
1256 let mut definitions = Vec::new();
1257 for definition in response.definitions {
1258 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1259 let target_buffer = this
1260 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1261 .await?;
1262 let target_start = definition
1263 .target_start
1264 .and_then(deserialize_anchor)
1265 .ok_or_else(|| anyhow!("missing target start"))?;
1266 let target_end = definition
1267 .target_end
1268 .and_then(deserialize_anchor)
1269 .ok_or_else(|| anyhow!("missing target end"))?;
1270 definitions.push(Definition {
1271 target_buffer,
1272 target_range: target_start..target_end,
1273 })
1274 }
1275
1276 Ok(definitions)
1277 })
1278 } else {
1279 Task::ready(Ok(Default::default()))
1280 }
1281 }
1282
1283 pub fn completions<T: ToPointUtf16>(
1284 &self,
1285 source_buffer_handle: &ModelHandle<Buffer>,
1286 position: T,
1287 cx: &mut ModelContext<Self>,
1288 ) -> Task<Result<Vec<Completion>>> {
1289 let source_buffer_handle = source_buffer_handle.clone();
1290 let source_buffer = source_buffer_handle.read(cx);
1291 let buffer_id = source_buffer.remote_id();
1292 let language = source_buffer.language().cloned();
1293 let worktree;
1294 let buffer_abs_path;
1295 if let Some(file) = File::from_dyn(source_buffer.file()) {
1296 worktree = file.worktree.clone();
1297 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1298 } else {
1299 return Task::ready(Ok(Default::default()));
1300 };
1301
1302 let position = position.to_point_utf16(source_buffer);
1303 let anchor = source_buffer.anchor_after(position);
1304
1305 if worktree.read(cx).as_local().is_some() {
1306 let buffer_abs_path = buffer_abs_path.unwrap();
1307 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1308 server
1309 } else {
1310 return Task::ready(Ok(Default::default()));
1311 };
1312
1313 cx.spawn(|_, cx| async move {
1314 let completions = lang_server
1315 .request::<lsp::request::Completion>(lsp::CompletionParams {
1316 text_document_position: lsp::TextDocumentPositionParams::new(
1317 lsp::TextDocumentIdentifier::new(
1318 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1319 ),
1320 position.to_lsp_position(),
1321 ),
1322 context: Default::default(),
1323 work_done_progress_params: Default::default(),
1324 partial_result_params: Default::default(),
1325 })
1326 .await?;
1327
1328 let completions = if let Some(completions) = completions {
1329 match completions {
1330 lsp::CompletionResponse::Array(completions) => completions,
1331 lsp::CompletionResponse::List(list) => list.items,
1332 }
1333 } else {
1334 Default::default()
1335 };
1336
1337 source_buffer_handle.read_with(&cx, |this, _| {
1338 Ok(completions
1339 .into_iter()
1340 .filter_map(|lsp_completion| {
1341 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1342 lsp::CompletionTextEdit::Edit(edit) => {
1343 (range_from_lsp(edit.range), edit.new_text.clone())
1344 }
1345 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1346 log::info!("unsupported insert/replace completion");
1347 return None;
1348 }
1349 };
1350
1351 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1352 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1353 if clipped_start == old_range.start && clipped_end == old_range.end {
1354 Some(Completion {
1355 old_range: this.anchor_before(old_range.start)
1356 ..this.anchor_after(old_range.end),
1357 new_text,
1358 label: language
1359 .as_ref()
1360 .and_then(|l| l.label_for_completion(&lsp_completion))
1361 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1362 lsp_completion,
1363 })
1364 } else {
1365 None
1366 }
1367 })
1368 .collect())
1369 })
1370 })
1371 } else if let Some(project_id) = self.remote_id() {
1372 let rpc = self.client.clone();
1373 let message = proto::GetCompletions {
1374 project_id,
1375 buffer_id,
1376 position: Some(language::proto::serialize_anchor(&anchor)),
1377 version: (&source_buffer.version()).into(),
1378 };
1379 cx.spawn_weak(|_, mut cx| async move {
1380 let response = rpc.request(message).await?;
1381 source_buffer_handle
1382 .update(&mut cx, |buffer, _| {
1383 buffer.wait_for_version(response.version.into())
1384 })
1385 .await;
1386 response
1387 .completions
1388 .into_iter()
1389 .map(|completion| {
1390 language::proto::deserialize_completion(completion, language.as_ref())
1391 })
1392 .collect()
1393 })
1394 } else {
1395 Task::ready(Ok(Default::default()))
1396 }
1397 }
1398
1399 pub fn apply_additional_edits_for_completion(
1400 &self,
1401 buffer_handle: ModelHandle<Buffer>,
1402 completion: Completion,
1403 push_to_history: bool,
1404 cx: &mut ModelContext<Self>,
1405 ) -> Task<Result<Option<Transaction>>> {
1406 let buffer = buffer_handle.read(cx);
1407 let buffer_id = buffer.remote_id();
1408
1409 if self.is_local() {
1410 let lang_server = if let Some(language_server) = buffer.language_server() {
1411 language_server.clone()
1412 } else {
1413 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1414 };
1415
1416 cx.spawn(|_, mut cx| async move {
1417 let resolved_completion = lang_server
1418 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1419 .await?;
1420 if let Some(edits) = resolved_completion.additional_text_edits {
1421 let edits = buffer_handle
1422 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1423 .await?;
1424 buffer_handle.update(&mut cx, |buffer, cx| {
1425 buffer.finalize_last_transaction();
1426 buffer.start_transaction();
1427 for (range, text) in edits {
1428 buffer.edit([range], text, cx);
1429 }
1430 let transaction = if buffer.end_transaction(cx).is_some() {
1431 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1432 if !push_to_history {
1433 buffer.forget_transaction(transaction.id);
1434 }
1435 Some(transaction)
1436 } else {
1437 None
1438 };
1439 Ok(transaction)
1440 })
1441 } else {
1442 Ok(None)
1443 }
1444 })
1445 } else if let Some(project_id) = self.remote_id() {
1446 let client = self.client.clone();
1447 cx.spawn(|_, mut cx| async move {
1448 let response = client
1449 .request(proto::ApplyCompletionAdditionalEdits {
1450 project_id,
1451 buffer_id,
1452 completion: Some(language::proto::serialize_completion(&completion)),
1453 })
1454 .await?;
1455
1456 if let Some(transaction) = response.transaction {
1457 let transaction = language::proto::deserialize_transaction(transaction)?;
1458 buffer_handle
1459 .update(&mut cx, |buffer, _| {
1460 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1461 })
1462 .await;
1463 if push_to_history {
1464 buffer_handle.update(&mut cx, |buffer, _| {
1465 buffer.push_transaction(transaction.clone(), Instant::now());
1466 });
1467 }
1468 Ok(Some(transaction))
1469 } else {
1470 Ok(None)
1471 }
1472 })
1473 } else {
1474 Task::ready(Err(anyhow!("project does not have a remote id")))
1475 }
1476 }
1477
1478 pub fn code_actions<T: ToOffset>(
1479 &self,
1480 buffer_handle: &ModelHandle<Buffer>,
1481 range: Range<T>,
1482 cx: &mut ModelContext<Self>,
1483 ) -> Task<Result<Vec<CodeAction>>> {
1484 let buffer_handle = buffer_handle.clone();
1485 let buffer = buffer_handle.read(cx);
1486 let buffer_id = buffer.remote_id();
1487 let worktree;
1488 let buffer_abs_path;
1489 if let Some(file) = File::from_dyn(buffer.file()) {
1490 worktree = file.worktree.clone();
1491 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1492 } else {
1493 return Task::ready(Ok(Default::default()));
1494 };
1495 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1496
1497 if worktree.read(cx).as_local().is_some() {
1498 let buffer_abs_path = buffer_abs_path.unwrap();
1499 let lang_name;
1500 let lang_server;
1501 if let Some(lang) = buffer.language() {
1502 lang_name = lang.name().to_string();
1503 if let Some(server) = self
1504 .language_servers
1505 .get(&(worktree.read(cx).id(), lang_name.clone()))
1506 {
1507 lang_server = server.clone();
1508 } else {
1509 return Task::ready(Ok(Default::default()));
1510 };
1511 } else {
1512 return Task::ready(Ok(Default::default()));
1513 }
1514
1515 let lsp_range = lsp::Range::new(
1516 range.start.to_point_utf16(buffer).to_lsp_position(),
1517 range.end.to_point_utf16(buffer).to_lsp_position(),
1518 );
1519 cx.foreground().spawn(async move {
1520 Ok(lang_server
1521 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1522 text_document: lsp::TextDocumentIdentifier::new(
1523 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1524 ),
1525 range: lsp_range,
1526 work_done_progress_params: Default::default(),
1527 partial_result_params: Default::default(),
1528 context: lsp::CodeActionContext {
1529 diagnostics: Default::default(),
1530 only: Some(vec![
1531 lsp::CodeActionKind::QUICKFIX,
1532 lsp::CodeActionKind::REFACTOR,
1533 lsp::CodeActionKind::REFACTOR_EXTRACT,
1534 ]),
1535 },
1536 })
1537 .await?
1538 .unwrap_or_default()
1539 .into_iter()
1540 .filter_map(|entry| {
1541 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1542 Some(CodeAction {
1543 range: range.clone(),
1544 lsp_action,
1545 })
1546 } else {
1547 None
1548 }
1549 })
1550 .collect())
1551 })
1552 } else if let Some(project_id) = self.remote_id() {
1553 let rpc = self.client.clone();
1554 cx.foreground().spawn(async move {
1555 let response = rpc
1556 .request(proto::GetCodeActions {
1557 project_id,
1558 buffer_id,
1559 start: Some(language::proto::serialize_anchor(&range.start)),
1560 end: Some(language::proto::serialize_anchor(&range.end)),
1561 })
1562 .await?;
1563 response
1564 .actions
1565 .into_iter()
1566 .map(language::proto::deserialize_code_action)
1567 .collect()
1568 })
1569 } else {
1570 Task::ready(Ok(Default::default()))
1571 }
1572 }
1573
1574 pub fn apply_code_action(
1575 &self,
1576 buffer_handle: ModelHandle<Buffer>,
1577 mut action: CodeAction,
1578 push_to_history: bool,
1579 cx: &mut ModelContext<Self>,
1580 ) -> Task<Result<ProjectTransaction>> {
1581 if self.is_local() {
1582 let buffer = buffer_handle.read(cx);
1583 let lang_name = if let Some(lang) = buffer.language() {
1584 lang.name().to_string()
1585 } else {
1586 return Task::ready(Ok(Default::default()));
1587 };
1588 let lang_server = if let Some(language_server) = buffer.language_server() {
1589 language_server.clone()
1590 } else {
1591 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1592 };
1593 let range = action.range.to_point_utf16(buffer);
1594 let fs = self.fs.clone();
1595
1596 cx.spawn(|this, mut cx| async move {
1597 if let Some(lsp_range) = action
1598 .lsp_action
1599 .data
1600 .as_mut()
1601 .and_then(|d| d.get_mut("codeActionParams"))
1602 .and_then(|d| d.get_mut("range"))
1603 {
1604 *lsp_range = serde_json::to_value(&lsp::Range::new(
1605 range.start.to_lsp_position(),
1606 range.end.to_lsp_position(),
1607 ))
1608 .unwrap();
1609 action.lsp_action = lang_server
1610 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1611 .await?;
1612 } else {
1613 let actions = this
1614 .update(&mut cx, |this, cx| {
1615 this.code_actions(&buffer_handle, action.range, cx)
1616 })
1617 .await?;
1618 action.lsp_action = actions
1619 .into_iter()
1620 .find(|a| a.lsp_action.title == action.lsp_action.title)
1621 .ok_or_else(|| anyhow!("code action is outdated"))?
1622 .lsp_action;
1623 }
1624
1625 let mut operations = Vec::new();
1626 if let Some(edit) = action.lsp_action.edit {
1627 if let Some(document_changes) = edit.document_changes {
1628 match document_changes {
1629 lsp::DocumentChanges::Edits(edits) => operations
1630 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1631 lsp::DocumentChanges::Operations(ops) => operations = ops,
1632 }
1633 } else if let Some(changes) = edit.changes {
1634 operations.extend(changes.into_iter().map(|(uri, edits)| {
1635 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1636 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1637 uri,
1638 version: None,
1639 },
1640 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1641 })
1642 }));
1643 }
1644 }
1645
1646 let mut project_transaction = ProjectTransaction::default();
1647 for operation in operations {
1648 match operation {
1649 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1650 let abs_path = op
1651 .uri
1652 .to_file_path()
1653 .map_err(|_| anyhow!("can't convert URI to path"))?;
1654
1655 if let Some(parent_path) = abs_path.parent() {
1656 fs.create_dir(parent_path).await?;
1657 }
1658 if abs_path.ends_with("/") {
1659 fs.create_dir(&abs_path).await?;
1660 } else {
1661 fs.create_file(
1662 &abs_path,
1663 op.options.map(Into::into).unwrap_or_default(),
1664 )
1665 .await?;
1666 }
1667 }
1668 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1669 let source_abs_path = op
1670 .old_uri
1671 .to_file_path()
1672 .map_err(|_| anyhow!("can't convert URI to path"))?;
1673 let target_abs_path = op
1674 .new_uri
1675 .to_file_path()
1676 .map_err(|_| anyhow!("can't convert URI to path"))?;
1677 fs.rename(
1678 &source_abs_path,
1679 &target_abs_path,
1680 op.options.map(Into::into).unwrap_or_default(),
1681 )
1682 .await?;
1683 }
1684 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1685 let abs_path = op
1686 .uri
1687 .to_file_path()
1688 .map_err(|_| anyhow!("can't convert URI to path"))?;
1689 let options = op.options.map(Into::into).unwrap_or_default();
1690 if abs_path.ends_with("/") {
1691 fs.remove_dir(&abs_path, options).await?;
1692 } else {
1693 fs.remove_file(&abs_path, options).await?;
1694 }
1695 }
1696 lsp::DocumentChangeOperation::Edit(op) => {
1697 let buffer_to_edit = this
1698 .update(&mut cx, |this, cx| {
1699 this.open_local_buffer_from_lsp_path(
1700 op.text_document.uri,
1701 lang_name.clone(),
1702 lang_server.clone(),
1703 cx,
1704 )
1705 })
1706 .await?;
1707
1708 let edits = buffer_to_edit
1709 .update(&mut cx, |buffer, cx| {
1710 let edits = op.edits.into_iter().map(|edit| match edit {
1711 lsp::OneOf::Left(edit) => edit,
1712 lsp::OneOf::Right(edit) => edit.text_edit,
1713 });
1714 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1715 })
1716 .await?;
1717
1718 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1719 buffer.finalize_last_transaction();
1720 buffer.start_transaction();
1721 for (range, text) in edits {
1722 buffer.edit([range], text, cx);
1723 }
1724 let transaction = if buffer.end_transaction(cx).is_some() {
1725 let transaction =
1726 buffer.finalize_last_transaction().unwrap().clone();
1727 if !push_to_history {
1728 buffer.forget_transaction(transaction.id);
1729 }
1730 Some(transaction)
1731 } else {
1732 None
1733 };
1734
1735 transaction
1736 });
1737 if let Some(transaction) = transaction {
1738 project_transaction.0.insert(buffer_to_edit, transaction);
1739 }
1740 }
1741 }
1742 }
1743
1744 Ok(project_transaction)
1745 })
1746 } else if let Some(project_id) = self.remote_id() {
1747 let client = self.client.clone();
1748 let request = proto::ApplyCodeAction {
1749 project_id,
1750 buffer_id: buffer_handle.read(cx).remote_id(),
1751 action: Some(language::proto::serialize_code_action(&action)),
1752 };
1753 cx.spawn(|this, mut cx| async move {
1754 let response = client
1755 .request(request)
1756 .await?
1757 .transaction
1758 .ok_or_else(|| anyhow!("missing transaction"))?;
1759 this.update(&mut cx, |this, cx| {
1760 this.deserialize_project_transaction(response, push_to_history, cx)
1761 })
1762 .await
1763 })
1764 } else {
1765 Task::ready(Err(anyhow!("project does not have a remote id")))
1766 }
1767 }
1768
1769 pub fn find_or_create_local_worktree(
1770 &self,
1771 abs_path: impl AsRef<Path>,
1772 weak: bool,
1773 cx: &mut ModelContext<Self>,
1774 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1775 let abs_path = abs_path.as_ref();
1776 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1777 Task::ready(Ok((tree.clone(), relative_path.into())))
1778 } else {
1779 let worktree = self.create_local_worktree(abs_path, weak, cx);
1780 cx.foreground()
1781 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1782 }
1783 }
1784
1785 fn find_local_worktree(
1786 &self,
1787 abs_path: &Path,
1788 cx: &AppContext,
1789 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1790 for tree in self.worktrees(cx) {
1791 if let Some(relative_path) = tree
1792 .read(cx)
1793 .as_local()
1794 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1795 {
1796 return Some((tree.clone(), relative_path.into()));
1797 }
1798 }
1799 None
1800 }
1801
1802 pub fn is_shared(&self) -> bool {
1803 match &self.client_state {
1804 ProjectClientState::Local { is_shared, .. } => *is_shared,
1805 ProjectClientState::Remote { .. } => false,
1806 }
1807 }
1808
1809 fn create_local_worktree(
1810 &self,
1811 abs_path: impl AsRef<Path>,
1812 weak: bool,
1813 cx: &mut ModelContext<Self>,
1814 ) -> Task<Result<ModelHandle<Worktree>>> {
1815 let fs = self.fs.clone();
1816 let client = self.client.clone();
1817 let path = Arc::from(abs_path.as_ref());
1818 cx.spawn(|project, mut cx| async move {
1819 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1820
1821 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1822 project.add_worktree(&worktree, cx);
1823 (project.remote_id(), project.is_shared())
1824 });
1825
1826 if let Some(project_id) = remote_project_id {
1827 worktree
1828 .update(&mut cx, |worktree, cx| {
1829 worktree.as_local_mut().unwrap().register(project_id, cx)
1830 })
1831 .await?;
1832 if is_shared {
1833 worktree
1834 .update(&mut cx, |worktree, cx| {
1835 worktree.as_local_mut().unwrap().share(project_id, cx)
1836 })
1837 .await?;
1838 }
1839 }
1840
1841 Ok(worktree)
1842 })
1843 }
1844
1845 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1846 self.worktrees.retain(|worktree| {
1847 worktree
1848 .upgrade(cx)
1849 .map_or(false, |w| w.read(cx).id() != id)
1850 });
1851 cx.notify();
1852 }
1853
1854 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1855 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1856 if worktree.read(cx).is_local() {
1857 cx.subscribe(&worktree, |this, worktree, _, cx| {
1858 this.update_local_worktree_buffers(worktree, cx);
1859 })
1860 .detach();
1861 }
1862
1863 let push_weak_handle = {
1864 let worktree = worktree.read(cx);
1865 worktree.is_local() && worktree.is_weak()
1866 };
1867 if push_weak_handle {
1868 cx.observe_release(&worktree, |this, cx| {
1869 this.worktrees
1870 .retain(|worktree| worktree.upgrade(cx).is_some());
1871 cx.notify();
1872 })
1873 .detach();
1874 self.worktrees
1875 .push(WorktreeHandle::Weak(worktree.downgrade()));
1876 } else {
1877 self.worktrees
1878 .push(WorktreeHandle::Strong(worktree.clone()));
1879 }
1880 cx.notify();
1881 }
1882
1883 fn update_local_worktree_buffers(
1884 &mut self,
1885 worktree_handle: ModelHandle<Worktree>,
1886 cx: &mut ModelContext<Self>,
1887 ) {
1888 let snapshot = worktree_handle.read(cx).snapshot();
1889 let mut buffers_to_delete = Vec::new();
1890 for (buffer_id, buffer) in &self.open_buffers {
1891 if let Some(buffer) = buffer.upgrade(cx) {
1892 buffer.update(cx, |buffer, cx| {
1893 if let Some(old_file) = File::from_dyn(buffer.file()) {
1894 if old_file.worktree != worktree_handle {
1895 return;
1896 }
1897
1898 let new_file = if let Some(entry) = old_file
1899 .entry_id
1900 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1901 {
1902 File {
1903 is_local: true,
1904 entry_id: Some(entry.id),
1905 mtime: entry.mtime,
1906 path: entry.path.clone(),
1907 worktree: worktree_handle.clone(),
1908 }
1909 } else if let Some(entry) =
1910 snapshot.entry_for_path(old_file.path().as_ref())
1911 {
1912 File {
1913 is_local: true,
1914 entry_id: Some(entry.id),
1915 mtime: entry.mtime,
1916 path: entry.path.clone(),
1917 worktree: worktree_handle.clone(),
1918 }
1919 } else {
1920 File {
1921 is_local: true,
1922 entry_id: None,
1923 path: old_file.path().clone(),
1924 mtime: old_file.mtime(),
1925 worktree: worktree_handle.clone(),
1926 }
1927 };
1928
1929 if let Some(project_id) = self.remote_id() {
1930 self.client
1931 .send(proto::UpdateBufferFile {
1932 project_id,
1933 buffer_id: *buffer_id as u64,
1934 file: Some(new_file.to_proto()),
1935 })
1936 .log_err();
1937 }
1938 buffer.file_updated(Box::new(new_file), cx).detach();
1939 }
1940 });
1941 } else {
1942 buffers_to_delete.push(*buffer_id);
1943 }
1944 }
1945
1946 for buffer_id in buffers_to_delete {
1947 self.open_buffers.remove(&buffer_id);
1948 }
1949 }
1950
1951 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1952 let new_active_entry = entry.and_then(|project_path| {
1953 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1954 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1955 Some(ProjectEntry {
1956 worktree_id: project_path.worktree_id,
1957 entry_id: entry.id,
1958 })
1959 });
1960 if new_active_entry != self.active_entry {
1961 self.active_entry = new_active_entry;
1962 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1963 }
1964 }
1965
1966 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1967 self.language_servers_with_diagnostics_running > 0
1968 }
1969
1970 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1971 let mut summary = DiagnosticSummary::default();
1972 for (_, path_summary) in self.diagnostic_summaries(cx) {
1973 summary.error_count += path_summary.error_count;
1974 summary.warning_count += path_summary.warning_count;
1975 summary.info_count += path_summary.info_count;
1976 summary.hint_count += path_summary.hint_count;
1977 }
1978 summary
1979 }
1980
1981 pub fn diagnostic_summaries<'a>(
1982 &'a self,
1983 cx: &'a AppContext,
1984 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1985 self.worktrees(cx).flat_map(move |worktree| {
1986 let worktree = worktree.read(cx);
1987 let worktree_id = worktree.id();
1988 worktree
1989 .diagnostic_summaries()
1990 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1991 })
1992 }
1993
1994 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1995 self.language_servers_with_diagnostics_running += 1;
1996 if self.language_servers_with_diagnostics_running == 1 {
1997 cx.emit(Event::DiskBasedDiagnosticsStarted);
1998 }
1999 }
2000
2001 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2002 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2003 self.language_servers_with_diagnostics_running -= 1;
2004 if self.language_servers_with_diagnostics_running == 0 {
2005 cx.emit(Event::DiskBasedDiagnosticsFinished);
2006 }
2007 }
2008
2009 pub fn active_entry(&self) -> Option<ProjectEntry> {
2010 self.active_entry
2011 }
2012
2013 // RPC message handlers
2014
2015 async fn handle_unshare_project(
2016 this: ModelHandle<Self>,
2017 _: TypedEnvelope<proto::UnshareProject>,
2018 _: Arc<Client>,
2019 mut cx: AsyncAppContext,
2020 ) -> Result<()> {
2021 this.update(&mut cx, |this, cx| {
2022 if let ProjectClientState::Remote {
2023 sharing_has_stopped,
2024 ..
2025 } = &mut this.client_state
2026 {
2027 *sharing_has_stopped = true;
2028 this.collaborators.clear();
2029 cx.notify();
2030 } else {
2031 unreachable!()
2032 }
2033 });
2034
2035 Ok(())
2036 }
2037
2038 async fn handle_add_collaborator(
2039 this: ModelHandle<Self>,
2040 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2041 _: Arc<Client>,
2042 mut cx: AsyncAppContext,
2043 ) -> Result<()> {
2044 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2045 let collaborator = envelope
2046 .payload
2047 .collaborator
2048 .take()
2049 .ok_or_else(|| anyhow!("empty collaborator"))?;
2050
2051 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2052 this.update(&mut cx, |this, cx| {
2053 this.collaborators
2054 .insert(collaborator.peer_id, collaborator);
2055 cx.notify();
2056 });
2057
2058 Ok(())
2059 }
2060
2061 async fn handle_remove_collaborator(
2062 this: ModelHandle<Self>,
2063 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2064 _: Arc<Client>,
2065 mut cx: AsyncAppContext,
2066 ) -> Result<()> {
2067 this.update(&mut cx, |this, cx| {
2068 let peer_id = PeerId(envelope.payload.peer_id);
2069 let replica_id = this
2070 .collaborators
2071 .remove(&peer_id)
2072 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2073 .replica_id;
2074 this.shared_buffers.remove(&peer_id);
2075 for (_, buffer) in &this.open_buffers {
2076 if let Some(buffer) = buffer.upgrade(cx) {
2077 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2078 }
2079 }
2080 cx.notify();
2081 Ok(())
2082 })
2083 }
2084
2085 async fn handle_share_worktree(
2086 this: ModelHandle<Self>,
2087 envelope: TypedEnvelope<proto::ShareWorktree>,
2088 client: Arc<Client>,
2089 mut cx: AsyncAppContext,
2090 ) -> Result<()> {
2091 this.update(&mut cx, |this, cx| {
2092 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2093 let replica_id = this.replica_id();
2094 let worktree = envelope
2095 .payload
2096 .worktree
2097 .ok_or_else(|| anyhow!("invalid worktree"))?;
2098 let (worktree, load_task) =
2099 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2100 this.add_worktree(&worktree, cx);
2101 load_task.detach();
2102 Ok(())
2103 })
2104 }
2105
2106 async fn handle_unregister_worktree(
2107 this: ModelHandle<Self>,
2108 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2109 _: Arc<Client>,
2110 mut cx: AsyncAppContext,
2111 ) -> Result<()> {
2112 this.update(&mut cx, |this, cx| {
2113 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2114 this.remove_worktree(worktree_id, cx);
2115 Ok(())
2116 })
2117 }
2118
2119 async fn handle_update_worktree(
2120 this: ModelHandle<Self>,
2121 envelope: TypedEnvelope<proto::UpdateWorktree>,
2122 _: Arc<Client>,
2123 mut cx: AsyncAppContext,
2124 ) -> Result<()> {
2125 this.update(&mut cx, |this, cx| {
2126 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2127 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2128 worktree.update(cx, |worktree, _| {
2129 let worktree = worktree.as_remote_mut().unwrap();
2130 worktree.update_from_remote(envelope)
2131 })?;
2132 }
2133 Ok(())
2134 })
2135 }
2136
2137 async fn handle_update_diagnostic_summary(
2138 this: ModelHandle<Self>,
2139 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2140 _: Arc<Client>,
2141 mut cx: AsyncAppContext,
2142 ) -> Result<()> {
2143 this.update(&mut cx, |this, cx| {
2144 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2145 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2146 if let Some(summary) = envelope.payload.summary {
2147 let project_path = ProjectPath {
2148 worktree_id,
2149 path: Path::new(&summary.path).into(),
2150 };
2151 worktree.update(cx, |worktree, _| {
2152 worktree
2153 .as_remote_mut()
2154 .unwrap()
2155 .update_diagnostic_summary(project_path.path.clone(), &summary);
2156 });
2157 cx.emit(Event::DiagnosticsUpdated(project_path));
2158 }
2159 }
2160 Ok(())
2161 })
2162 }
2163
2164 async fn handle_disk_based_diagnostics_updating(
2165 this: ModelHandle<Self>,
2166 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2167 _: Arc<Client>,
2168 mut cx: AsyncAppContext,
2169 ) -> Result<()> {
2170 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2171 Ok(())
2172 }
2173
2174 async fn handle_disk_based_diagnostics_updated(
2175 this: ModelHandle<Self>,
2176 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2177 _: Arc<Client>,
2178 mut cx: AsyncAppContext,
2179 ) -> Result<()> {
2180 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2181 Ok(())
2182 }
2183
2184 async fn handle_update_buffer(
2185 this: ModelHandle<Self>,
2186 envelope: TypedEnvelope<proto::UpdateBuffer>,
2187 _: Arc<Client>,
2188 mut cx: AsyncAppContext,
2189 ) -> Result<()> {
2190 this.update(&mut cx, |this, cx| {
2191 let payload = envelope.payload.clone();
2192 let buffer_id = payload.buffer_id;
2193 let ops = payload
2194 .operations
2195 .into_iter()
2196 .map(|op| language::proto::deserialize_operation(op))
2197 .collect::<Result<Vec<_>, _>>()?;
2198 let buffer = this
2199 .open_buffers
2200 .entry(buffer_id)
2201 .or_insert_with(|| OpenBuffer::Operations(Vec::new()));
2202 match buffer {
2203 OpenBuffer::Loaded(buffer) => {
2204 if let Some(buffer) = buffer.upgrade(cx) {
2205 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2206 }
2207 }
2208 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2209 }
2210 Ok(())
2211 })
2212 }
2213
2214 async fn handle_update_buffer_file(
2215 this: ModelHandle<Self>,
2216 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2217 _: Arc<Client>,
2218 mut cx: AsyncAppContext,
2219 ) -> Result<()> {
2220 this.update(&mut cx, |this, cx| {
2221 let payload = envelope.payload.clone();
2222 let buffer_id = payload.buffer_id;
2223 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2224 let worktree = this
2225 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2226 .ok_or_else(|| anyhow!("no such worktree"))?;
2227 let file = File::from_proto(file, worktree.clone(), cx)?;
2228 let buffer = this
2229 .open_buffers
2230 .get_mut(&buffer_id)
2231 .and_then(|b| b.upgrade(cx))
2232 .ok_or_else(|| anyhow!("no such buffer"))?;
2233 buffer.update(cx, |buffer, cx| {
2234 buffer.file_updated(Box::new(file), cx).detach();
2235 });
2236 Ok(())
2237 })
2238 }
2239
2240 async fn handle_save_buffer(
2241 this: ModelHandle<Self>,
2242 envelope: TypedEnvelope<proto::SaveBuffer>,
2243 _: Arc<Client>,
2244 mut cx: AsyncAppContext,
2245 ) -> Result<proto::BufferSaved> {
2246 let buffer_id = envelope.payload.buffer_id;
2247 let sender_id = envelope.original_sender_id()?;
2248 let requested_version = envelope.payload.version.try_into()?;
2249
2250 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2251 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2252 let buffer = this
2253 .shared_buffers
2254 .get(&sender_id)
2255 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2256 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2257 Ok::<_, anyhow::Error>((project_id, buffer))
2258 })?;
2259
2260 buffer
2261 .update(&mut cx, |buffer, _| {
2262 buffer.wait_for_version(requested_version)
2263 })
2264 .await;
2265
2266 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2267 Ok(proto::BufferSaved {
2268 project_id,
2269 buffer_id,
2270 version: (&saved_version).into(),
2271 mtime: Some(mtime.into()),
2272 })
2273 }
2274
2275 async fn handle_format_buffers(
2276 this: ModelHandle<Self>,
2277 envelope: TypedEnvelope<proto::FormatBuffers>,
2278 _: Arc<Client>,
2279 mut cx: AsyncAppContext,
2280 ) -> Result<proto::FormatBuffersResponse> {
2281 let sender_id = envelope.original_sender_id()?;
2282 let format = this.update(&mut cx, |this, cx| {
2283 let shared_buffers = this
2284 .shared_buffers
2285 .get(&sender_id)
2286 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2287 let mut buffers = HashSet::default();
2288 for buffer_id in &envelope.payload.buffer_ids {
2289 buffers.insert(
2290 shared_buffers
2291 .get(buffer_id)
2292 .cloned()
2293 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2294 );
2295 }
2296 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2297 })?;
2298
2299 let project_transaction = format.await?;
2300 let project_transaction = this.update(&mut cx, |this, cx| {
2301 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2302 });
2303 Ok(proto::FormatBuffersResponse {
2304 transaction: Some(project_transaction),
2305 })
2306 }
2307
2308 async fn handle_get_completions(
2309 this: ModelHandle<Self>,
2310 envelope: TypedEnvelope<proto::GetCompletions>,
2311 _: Arc<Client>,
2312 mut cx: AsyncAppContext,
2313 ) -> Result<proto::GetCompletionsResponse> {
2314 let sender_id = envelope.original_sender_id()?;
2315 let position = envelope
2316 .payload
2317 .position
2318 .and_then(language::proto::deserialize_anchor)
2319 .ok_or_else(|| anyhow!("invalid position"))?;
2320 let version = clock::Global::from(envelope.payload.version);
2321 let buffer = this.read_with(&cx, |this, _| {
2322 this.shared_buffers
2323 .get(&sender_id)
2324 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2325 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2326 })?;
2327 buffer
2328 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2329 .await;
2330 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2331 let completions = this
2332 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2333 .await?;
2334
2335 Ok(proto::GetCompletionsResponse {
2336 completions: completions
2337 .iter()
2338 .map(language::proto::serialize_completion)
2339 .collect(),
2340 version: (&version).into(),
2341 })
2342 }
2343
2344 async fn handle_apply_additional_edits_for_completion(
2345 this: ModelHandle<Self>,
2346 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2347 _: Arc<Client>,
2348 mut cx: AsyncAppContext,
2349 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2350 let sender_id = envelope.original_sender_id()?;
2351 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2352 let buffer = this
2353 .shared_buffers
2354 .get(&sender_id)
2355 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2356 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2357 let language = buffer.read(cx).language();
2358 let completion = language::proto::deserialize_completion(
2359 envelope
2360 .payload
2361 .completion
2362 .ok_or_else(|| anyhow!("invalid completion"))?,
2363 language,
2364 )?;
2365 Ok::<_, anyhow::Error>(
2366 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2367 )
2368 })?;
2369
2370 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2371 transaction: apply_additional_edits
2372 .await?
2373 .as_ref()
2374 .map(language::proto::serialize_transaction),
2375 })
2376 }
2377
2378 async fn handle_get_code_actions(
2379 this: ModelHandle<Self>,
2380 envelope: TypedEnvelope<proto::GetCodeActions>,
2381 _: Arc<Client>,
2382 mut cx: AsyncAppContext,
2383 ) -> Result<proto::GetCodeActionsResponse> {
2384 let sender_id = envelope.original_sender_id()?;
2385 let start = envelope
2386 .payload
2387 .start
2388 .and_then(language::proto::deserialize_anchor)
2389 .ok_or_else(|| anyhow!("invalid start"))?;
2390 let end = envelope
2391 .payload
2392 .end
2393 .and_then(language::proto::deserialize_anchor)
2394 .ok_or_else(|| anyhow!("invalid end"))?;
2395 let buffer = this.update(&mut cx, |this, _| {
2396 this.shared_buffers
2397 .get(&sender_id)
2398 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2399 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2400 })?;
2401 buffer
2402 .update(&mut cx, |buffer, _| {
2403 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2404 })
2405 .await;
2406 let code_actions = this.update(&mut cx, |this, cx| {
2407 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2408 })?;
2409
2410 Ok(proto::GetCodeActionsResponse {
2411 actions: code_actions
2412 .await?
2413 .iter()
2414 .map(language::proto::serialize_code_action)
2415 .collect(),
2416 })
2417 }
2418
2419 async fn handle_apply_code_action(
2420 this: ModelHandle<Self>,
2421 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2422 _: Arc<Client>,
2423 mut cx: AsyncAppContext,
2424 ) -> Result<proto::ApplyCodeActionResponse> {
2425 let sender_id = envelope.original_sender_id()?;
2426 let action = language::proto::deserialize_code_action(
2427 envelope
2428 .payload
2429 .action
2430 .ok_or_else(|| anyhow!("invalid action"))?,
2431 )?;
2432 let apply_code_action = this.update(&mut cx, |this, cx| {
2433 let buffer = this
2434 .shared_buffers
2435 .get(&sender_id)
2436 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2437 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2438 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2439 })?;
2440
2441 let project_transaction = apply_code_action.await?;
2442 let project_transaction = this.update(&mut cx, |this, cx| {
2443 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2444 });
2445 Ok(proto::ApplyCodeActionResponse {
2446 transaction: Some(project_transaction),
2447 })
2448 }
2449
2450 async fn handle_get_definition(
2451 this: ModelHandle<Self>,
2452 envelope: TypedEnvelope<proto::GetDefinition>,
2453 _: Arc<Client>,
2454 mut cx: AsyncAppContext,
2455 ) -> Result<proto::GetDefinitionResponse> {
2456 let sender_id = envelope.original_sender_id()?;
2457 let position = envelope
2458 .payload
2459 .position
2460 .and_then(deserialize_anchor)
2461 .ok_or_else(|| anyhow!("invalid position"))?;
2462 let definitions = this.update(&mut cx, |this, cx| {
2463 let source_buffer = this
2464 .shared_buffers
2465 .get(&sender_id)
2466 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2467 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2468 if source_buffer.read(cx).can_resolve(&position) {
2469 Ok(this.definition(&source_buffer, position, cx))
2470 } else {
2471 Err(anyhow!("cannot resolve position"))
2472 }
2473 })?;
2474
2475 let definitions = definitions.await?;
2476
2477 this.update(&mut cx, |this, cx| {
2478 let mut response = proto::GetDefinitionResponse {
2479 definitions: Default::default(),
2480 };
2481 for definition in definitions {
2482 let buffer =
2483 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2484 response.definitions.push(proto::Definition {
2485 target_start: Some(serialize_anchor(&definition.target_range.start)),
2486 target_end: Some(serialize_anchor(&definition.target_range.end)),
2487 buffer: Some(buffer),
2488 });
2489 }
2490 Ok(response)
2491 })
2492 }
2493
2494 async fn handle_open_buffer(
2495 this: ModelHandle<Self>,
2496 envelope: TypedEnvelope<proto::OpenBuffer>,
2497 _: Arc<Client>,
2498 mut cx: AsyncAppContext,
2499 ) -> anyhow::Result<proto::OpenBufferResponse> {
2500 let peer_id = envelope.original_sender_id()?;
2501 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2502 let open_buffer = this.update(&mut cx, |this, cx| {
2503 this.open_buffer(
2504 ProjectPath {
2505 worktree_id,
2506 path: PathBuf::from(envelope.payload.path).into(),
2507 },
2508 cx,
2509 )
2510 });
2511
2512 let buffer = open_buffer.await?;
2513 this.update(&mut cx, |this, cx| {
2514 Ok(proto::OpenBufferResponse {
2515 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2516 })
2517 })
2518 }
2519
2520 fn serialize_project_transaction_for_peer(
2521 &mut self,
2522 project_transaction: ProjectTransaction,
2523 peer_id: PeerId,
2524 cx: &AppContext,
2525 ) -> proto::ProjectTransaction {
2526 let mut serialized_transaction = proto::ProjectTransaction {
2527 buffers: Default::default(),
2528 transactions: Default::default(),
2529 };
2530 for (buffer, transaction) in project_transaction.0 {
2531 serialized_transaction
2532 .buffers
2533 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2534 serialized_transaction
2535 .transactions
2536 .push(language::proto::serialize_transaction(&transaction));
2537 }
2538 serialized_transaction
2539 }
2540
2541 fn deserialize_project_transaction(
2542 &mut self,
2543 message: proto::ProjectTransaction,
2544 push_to_history: bool,
2545 cx: &mut ModelContext<Self>,
2546 ) -> Task<Result<ProjectTransaction>> {
2547 cx.spawn(|this, mut cx| async move {
2548 let mut project_transaction = ProjectTransaction::default();
2549 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2550 let buffer = this
2551 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2552 .await?;
2553 let transaction = language::proto::deserialize_transaction(transaction)?;
2554 project_transaction.0.insert(buffer, transaction);
2555 }
2556 for (buffer, transaction) in &project_transaction.0 {
2557 buffer
2558 .update(&mut cx, |buffer, _| {
2559 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2560 })
2561 .await;
2562
2563 if push_to_history {
2564 buffer.update(&mut cx, |buffer, _| {
2565 buffer.push_transaction(transaction.clone(), Instant::now());
2566 });
2567 }
2568 }
2569
2570 Ok(project_transaction)
2571 })
2572 }
2573
2574 fn serialize_buffer_for_peer(
2575 &mut self,
2576 buffer: &ModelHandle<Buffer>,
2577 peer_id: PeerId,
2578 cx: &AppContext,
2579 ) -> proto::Buffer {
2580 let buffer_id = buffer.read(cx).remote_id();
2581 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2582 match shared_buffers.entry(buffer_id) {
2583 hash_map::Entry::Occupied(_) => proto::Buffer {
2584 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2585 },
2586 hash_map::Entry::Vacant(entry) => {
2587 entry.insert(buffer.clone());
2588 proto::Buffer {
2589 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2590 }
2591 }
2592 }
2593 }
2594
2595 fn deserialize_buffer(
2596 &mut self,
2597 buffer: proto::Buffer,
2598 cx: &mut ModelContext<Self>,
2599 ) -> Task<Result<ModelHandle<Buffer>>> {
2600 let replica_id = self.replica_id();
2601
2602 let mut opened_buffer_tx = self.opened_buffer.clone();
2603 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2604 cx.spawn(|this, mut cx| async move {
2605 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2606 proto::buffer::Variant::Id(id) => {
2607 let buffer = loop {
2608 let buffer = this.read_with(&cx, |this, cx| {
2609 this.open_buffers
2610 .get(&id)
2611 .and_then(|buffer| buffer.upgrade(cx))
2612 });
2613 if let Some(buffer) = buffer {
2614 break buffer;
2615 }
2616 opened_buffer_rx
2617 .recv()
2618 .await
2619 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2620 };
2621 Ok(buffer)
2622 }
2623 proto::buffer::Variant::State(mut buffer) => {
2624 let mut buffer_worktree = None;
2625 let mut buffer_file = None;
2626 if let Some(file) = buffer.file.take() {
2627 this.read_with(&cx, |this, cx| {
2628 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2629 let worktree =
2630 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2631 anyhow!("no worktree found for id {}", file.worktree_id)
2632 })?;
2633 buffer_file =
2634 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2635 as Box<dyn language::File>);
2636 buffer_worktree = Some(worktree);
2637 Ok::<_, anyhow::Error>(())
2638 })?;
2639 }
2640
2641 let buffer = cx.add_model(|cx| {
2642 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2643 });
2644 this.update(&mut cx, |this, cx| {
2645 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2646 })?;
2647
2648 let _ = opened_buffer_tx.send(()).await;
2649 Ok(buffer)
2650 }
2651 }
2652 })
2653 }
2654
2655 async fn handle_close_buffer(
2656 this: ModelHandle<Self>,
2657 envelope: TypedEnvelope<proto::CloseBuffer>,
2658 _: Arc<Client>,
2659 mut cx: AsyncAppContext,
2660 ) -> anyhow::Result<()> {
2661 this.update(&mut cx, |this, cx| {
2662 if let Some(shared_buffers) =
2663 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2664 {
2665 shared_buffers.remove(&envelope.payload.buffer_id);
2666 cx.notify();
2667 }
2668 Ok(())
2669 })
2670 }
2671
2672 async fn handle_buffer_saved(
2673 this: ModelHandle<Self>,
2674 envelope: TypedEnvelope<proto::BufferSaved>,
2675 _: Arc<Client>,
2676 mut cx: AsyncAppContext,
2677 ) -> Result<()> {
2678 let version = envelope.payload.version.try_into()?;
2679 let mtime = envelope
2680 .payload
2681 .mtime
2682 .ok_or_else(|| anyhow!("missing mtime"))?
2683 .into();
2684
2685 this.update(&mut cx, |this, cx| {
2686 let buffer = this
2687 .open_buffers
2688 .get(&envelope.payload.buffer_id)
2689 .and_then(|buffer| buffer.upgrade(cx));
2690 if let Some(buffer) = buffer {
2691 buffer.update(cx, |buffer, cx| {
2692 buffer.did_save(version, mtime, None, cx);
2693 });
2694 }
2695 Ok(())
2696 })
2697 }
2698
2699 async fn handle_buffer_reloaded(
2700 this: ModelHandle<Self>,
2701 envelope: TypedEnvelope<proto::BufferReloaded>,
2702 _: Arc<Client>,
2703 mut cx: AsyncAppContext,
2704 ) -> Result<()> {
2705 let payload = envelope.payload.clone();
2706 let version = payload.version.try_into()?;
2707 let mtime = payload
2708 .mtime
2709 .ok_or_else(|| anyhow!("missing mtime"))?
2710 .into();
2711 this.update(&mut cx, |this, cx| {
2712 let buffer = this
2713 .open_buffers
2714 .get(&payload.buffer_id)
2715 .and_then(|buffer| buffer.upgrade(cx));
2716 if let Some(buffer) = buffer {
2717 buffer.update(cx, |buffer, cx| {
2718 buffer.did_reload(version, mtime, cx);
2719 });
2720 }
2721 Ok(())
2722 })
2723 }
2724
2725 pub fn match_paths<'a>(
2726 &self,
2727 query: &'a str,
2728 include_ignored: bool,
2729 smart_case: bool,
2730 max_results: usize,
2731 cancel_flag: &'a AtomicBool,
2732 cx: &AppContext,
2733 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2734 let worktrees = self
2735 .worktrees(cx)
2736 .filter(|worktree| !worktree.read(cx).is_weak())
2737 .collect::<Vec<_>>();
2738 let include_root_name = worktrees.len() > 1;
2739 let candidate_sets = worktrees
2740 .into_iter()
2741 .map(|worktree| CandidateSet {
2742 snapshot: worktree.read(cx).snapshot(),
2743 include_ignored,
2744 include_root_name,
2745 })
2746 .collect::<Vec<_>>();
2747
2748 let background = cx.background().clone();
2749 async move {
2750 fuzzy::match_paths(
2751 candidate_sets.as_slice(),
2752 query,
2753 smart_case,
2754 max_results,
2755 cancel_flag,
2756 background,
2757 )
2758 .await
2759 }
2760 }
2761}
2762
2763impl WorktreeHandle {
2764 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2765 match self {
2766 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2767 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2768 }
2769 }
2770}
2771
2772impl OpenBuffer {
2773 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2774 match self {
2775 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2776 OpenBuffer::Operations(_) => None,
2777 }
2778 }
2779}
2780
2781struct CandidateSet {
2782 snapshot: Snapshot,
2783 include_ignored: bool,
2784 include_root_name: bool,
2785}
2786
2787impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2788 type Candidates = CandidateSetIter<'a>;
2789
2790 fn id(&self) -> usize {
2791 self.snapshot.id().to_usize()
2792 }
2793
2794 fn len(&self) -> usize {
2795 if self.include_ignored {
2796 self.snapshot.file_count()
2797 } else {
2798 self.snapshot.visible_file_count()
2799 }
2800 }
2801
2802 fn prefix(&self) -> Arc<str> {
2803 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2804 self.snapshot.root_name().into()
2805 } else if self.include_root_name {
2806 format!("{}/", self.snapshot.root_name()).into()
2807 } else {
2808 "".into()
2809 }
2810 }
2811
2812 fn candidates(&'a self, start: usize) -> Self::Candidates {
2813 CandidateSetIter {
2814 traversal: self.snapshot.files(self.include_ignored, start),
2815 }
2816 }
2817}
2818
2819struct CandidateSetIter<'a> {
2820 traversal: Traversal<'a>,
2821}
2822
2823impl<'a> Iterator for CandidateSetIter<'a> {
2824 type Item = PathMatchCandidate<'a>;
2825
2826 fn next(&mut self) -> Option<Self::Item> {
2827 self.traversal.next().map(|entry| {
2828 if let EntryKind::File(char_bag) = entry.kind {
2829 PathMatchCandidate {
2830 path: &entry.path,
2831 char_bag,
2832 }
2833 } else {
2834 unreachable!()
2835 }
2836 })
2837 }
2838}
2839
2840impl Entity for Project {
2841 type Event = Event;
2842
2843 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2844 match &self.client_state {
2845 ProjectClientState::Local { remote_id_rx, .. } => {
2846 if let Some(project_id) = *remote_id_rx.borrow() {
2847 self.client
2848 .send(proto::UnregisterProject { project_id })
2849 .log_err();
2850 }
2851 }
2852 ProjectClientState::Remote { remote_id, .. } => {
2853 self.client
2854 .send(proto::LeaveProject {
2855 project_id: *remote_id,
2856 })
2857 .log_err();
2858 }
2859 }
2860 }
2861
2862 fn app_will_quit(
2863 &mut self,
2864 _: &mut MutableAppContext,
2865 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2866 use futures::FutureExt;
2867
2868 let shutdown_futures = self
2869 .language_servers
2870 .drain()
2871 .filter_map(|(_, server)| server.shutdown())
2872 .collect::<Vec<_>>();
2873 Some(
2874 async move {
2875 futures::future::join_all(shutdown_futures).await;
2876 }
2877 .boxed(),
2878 )
2879 }
2880}
2881
2882impl Collaborator {
2883 fn from_proto(
2884 message: proto::Collaborator,
2885 user_store: &ModelHandle<UserStore>,
2886 cx: &mut AsyncAppContext,
2887 ) -> impl Future<Output = Result<Self>> {
2888 let user = user_store.update(cx, |user_store, cx| {
2889 user_store.fetch_user(message.user_id, cx)
2890 });
2891
2892 async move {
2893 Ok(Self {
2894 peer_id: PeerId(message.peer_id),
2895 user: user.await?,
2896 replica_id: message.replica_id as ReplicaId,
2897 })
2898 }
2899 }
2900}
2901
2902impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2903 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2904 Self {
2905 worktree_id,
2906 path: path.as_ref().into(),
2907 }
2908 }
2909}
2910
2911impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2912 fn from(options: lsp::CreateFileOptions) -> Self {
2913 Self {
2914 overwrite: options.overwrite.unwrap_or(false),
2915 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2916 }
2917 }
2918}
2919
2920impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2921 fn from(options: lsp::RenameFileOptions) -> Self {
2922 Self {
2923 overwrite: options.overwrite.unwrap_or(false),
2924 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2925 }
2926 }
2927}
2928
2929impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2930 fn from(options: lsp::DeleteFileOptions) -> Self {
2931 Self {
2932 recursive: options.recursive.unwrap_or(false),
2933 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2934 }
2935 }
2936}
2937
2938#[cfg(test)]
2939mod tests {
2940 use super::{Event, *};
2941 use client::test::FakeHttpClient;
2942 use fs::RealFs;
2943 use futures::StreamExt;
2944 use gpui::test::subscribe;
2945 use language::{
2946 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2947 LanguageServerConfig, Point,
2948 };
2949 use lsp::Url;
2950 use serde_json::json;
2951 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2952 use unindent::Unindent as _;
2953 use util::test::temp_tree;
2954 use worktree::WorktreeHandle as _;
2955
2956 #[gpui::test]
2957 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2958 let dir = temp_tree(json!({
2959 "root": {
2960 "apple": "",
2961 "banana": {
2962 "carrot": {
2963 "date": "",
2964 "endive": "",
2965 }
2966 },
2967 "fennel": {
2968 "grape": "",
2969 }
2970 }
2971 }));
2972
2973 let root_link_path = dir.path().join("root_link");
2974 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2975 unix::fs::symlink(
2976 &dir.path().join("root/fennel"),
2977 &dir.path().join("root/finnochio"),
2978 )
2979 .unwrap();
2980
2981 let project = Project::test(Arc::new(RealFs), &mut cx);
2982
2983 let (tree, _) = project
2984 .update(&mut cx, |project, cx| {
2985 project.find_or_create_local_worktree(&root_link_path, false, cx)
2986 })
2987 .await
2988 .unwrap();
2989
2990 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2991 .await;
2992 cx.read(|cx| {
2993 let tree = tree.read(cx);
2994 assert_eq!(tree.file_count(), 5);
2995 assert_eq!(
2996 tree.inode_for_path("fennel/grape"),
2997 tree.inode_for_path("finnochio/grape")
2998 );
2999 });
3000
3001 let cancel_flag = Default::default();
3002 let results = project
3003 .read_with(&cx, |project, cx| {
3004 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3005 })
3006 .await;
3007 assert_eq!(
3008 results
3009 .into_iter()
3010 .map(|result| result.path)
3011 .collect::<Vec<Arc<Path>>>(),
3012 vec![
3013 PathBuf::from("banana/carrot/date").into(),
3014 PathBuf::from("banana/carrot/endive").into(),
3015 ]
3016 );
3017 }
3018
3019 #[gpui::test]
3020 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3021 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3022 let progress_token = language_server_config
3023 .disk_based_diagnostics_progress_token
3024 .clone()
3025 .unwrap();
3026
3027 let mut languages = LanguageRegistry::new();
3028 languages.add(Arc::new(Language::new(
3029 LanguageConfig {
3030 name: "Rust".to_string(),
3031 path_suffixes: vec!["rs".to_string()],
3032 language_server: Some(language_server_config),
3033 ..Default::default()
3034 },
3035 Some(tree_sitter_rust::language()),
3036 )));
3037
3038 let dir = temp_tree(json!({
3039 "a.rs": "fn a() { A }",
3040 "b.rs": "const y: i32 = 1",
3041 }));
3042
3043 let http_client = FakeHttpClient::with_404_response();
3044 let client = Client::new(http_client.clone());
3045 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3046
3047 let project = cx.update(|cx| {
3048 Project::local(
3049 client,
3050 user_store,
3051 Arc::new(languages),
3052 Arc::new(RealFs),
3053 cx,
3054 )
3055 });
3056
3057 let (tree, _) = project
3058 .update(&mut cx, |project, cx| {
3059 project.find_or_create_local_worktree(dir.path(), false, cx)
3060 })
3061 .await
3062 .unwrap();
3063 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3064
3065 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3066 .await;
3067
3068 // Cause worktree to start the fake language server
3069 let _buffer = project
3070 .update(&mut cx, |project, cx| {
3071 project.open_buffer(
3072 ProjectPath {
3073 worktree_id,
3074 path: Path::new("b.rs").into(),
3075 },
3076 cx,
3077 )
3078 })
3079 .await
3080 .unwrap();
3081
3082 let mut events = subscribe(&project, &mut cx);
3083
3084 fake_server.start_progress(&progress_token).await;
3085 assert_eq!(
3086 events.next().await.unwrap(),
3087 Event::DiskBasedDiagnosticsStarted
3088 );
3089
3090 fake_server.start_progress(&progress_token).await;
3091 fake_server.end_progress(&progress_token).await;
3092 fake_server.start_progress(&progress_token).await;
3093
3094 fake_server
3095 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3096 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3097 version: None,
3098 diagnostics: vec![lsp::Diagnostic {
3099 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3100 severity: Some(lsp::DiagnosticSeverity::ERROR),
3101 message: "undefined variable 'A'".to_string(),
3102 ..Default::default()
3103 }],
3104 })
3105 .await;
3106 assert_eq!(
3107 events.next().await.unwrap(),
3108 Event::DiagnosticsUpdated(ProjectPath {
3109 worktree_id,
3110 path: Arc::from(Path::new("a.rs"))
3111 })
3112 );
3113
3114 fake_server.end_progress(&progress_token).await;
3115 fake_server.end_progress(&progress_token).await;
3116 assert_eq!(
3117 events.next().await.unwrap(),
3118 Event::DiskBasedDiagnosticsUpdated
3119 );
3120 assert_eq!(
3121 events.next().await.unwrap(),
3122 Event::DiskBasedDiagnosticsFinished
3123 );
3124
3125 let buffer = project
3126 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3127 .await
3128 .unwrap();
3129
3130 buffer.read_with(&cx, |buffer, _| {
3131 let snapshot = buffer.snapshot();
3132 let diagnostics = snapshot
3133 .diagnostics_in_range::<_, Point>(0..buffer.len())
3134 .collect::<Vec<_>>();
3135 assert_eq!(
3136 diagnostics,
3137 &[DiagnosticEntry {
3138 range: Point::new(0, 9)..Point::new(0, 10),
3139 diagnostic: Diagnostic {
3140 severity: lsp::DiagnosticSeverity::ERROR,
3141 message: "undefined variable 'A'".to_string(),
3142 group_id: 0,
3143 is_primary: true,
3144 ..Default::default()
3145 }
3146 }]
3147 )
3148 });
3149 }
3150
3151 #[gpui::test]
3152 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3153 let dir = temp_tree(json!({
3154 "root": {
3155 "dir1": {},
3156 "dir2": {
3157 "dir3": {}
3158 }
3159 }
3160 }));
3161
3162 let project = Project::test(Arc::new(RealFs), &mut cx);
3163 let (tree, _) = project
3164 .update(&mut cx, |project, cx| {
3165 project.find_or_create_local_worktree(&dir.path(), false, cx)
3166 })
3167 .await
3168 .unwrap();
3169
3170 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3171 .await;
3172
3173 let cancel_flag = Default::default();
3174 let results = project
3175 .read_with(&cx, |project, cx| {
3176 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3177 })
3178 .await;
3179
3180 assert!(results.is_empty());
3181 }
3182
3183 #[gpui::test]
3184 async fn test_definition(mut cx: gpui::TestAppContext) {
3185 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3186
3187 let mut languages = LanguageRegistry::new();
3188 languages.add(Arc::new(Language::new(
3189 LanguageConfig {
3190 name: "Rust".to_string(),
3191 path_suffixes: vec!["rs".to_string()],
3192 language_server: Some(language_server_config),
3193 ..Default::default()
3194 },
3195 Some(tree_sitter_rust::language()),
3196 )));
3197
3198 let dir = temp_tree(json!({
3199 "a.rs": "const fn a() { A }",
3200 "b.rs": "const y: i32 = crate::a()",
3201 }));
3202 let dir_path = dir.path().to_path_buf();
3203
3204 let http_client = FakeHttpClient::with_404_response();
3205 let client = Client::new(http_client.clone());
3206 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3207 let project = cx.update(|cx| {
3208 Project::local(
3209 client,
3210 user_store,
3211 Arc::new(languages),
3212 Arc::new(RealFs),
3213 cx,
3214 )
3215 });
3216
3217 let (tree, _) = project
3218 .update(&mut cx, |project, cx| {
3219 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3220 })
3221 .await
3222 .unwrap();
3223 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3224 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3225 .await;
3226
3227 let buffer = project
3228 .update(&mut cx, |project, cx| {
3229 project.open_buffer(
3230 ProjectPath {
3231 worktree_id,
3232 path: Path::new("").into(),
3233 },
3234 cx,
3235 )
3236 })
3237 .await
3238 .unwrap();
3239
3240 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3241 let params = params.text_document_position_params;
3242 assert_eq!(
3243 params.text_document.uri.to_file_path().unwrap(),
3244 dir_path.join("b.rs")
3245 );
3246 assert_eq!(params.position, lsp::Position::new(0, 22));
3247
3248 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3249 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3250 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3251 )))
3252 });
3253
3254 let mut definitions = project
3255 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3256 .await
3257 .unwrap();
3258
3259 assert_eq!(definitions.len(), 1);
3260 let definition = definitions.pop().unwrap();
3261 cx.update(|cx| {
3262 let target_buffer = definition.target_buffer.read(cx);
3263 assert_eq!(
3264 target_buffer
3265 .file()
3266 .unwrap()
3267 .as_local()
3268 .unwrap()
3269 .abs_path(cx),
3270 dir.path().join("a.rs")
3271 );
3272 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3273 assert_eq!(
3274 list_worktrees(&project, cx),
3275 [
3276 (dir.path().join("b.rs"), false),
3277 (dir.path().join("a.rs"), true)
3278 ]
3279 );
3280
3281 drop(definition);
3282 });
3283 cx.read(|cx| {
3284 assert_eq!(
3285 list_worktrees(&project, cx),
3286 [(dir.path().join("b.rs"), false)]
3287 );
3288 });
3289
3290 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3291 project
3292 .read(cx)
3293 .worktrees(cx)
3294 .map(|worktree| {
3295 let worktree = worktree.read(cx);
3296 (
3297 worktree.as_local().unwrap().abs_path().to_path_buf(),
3298 worktree.is_weak(),
3299 )
3300 })
3301 .collect::<Vec<_>>()
3302 }
3303 }
3304
3305 #[gpui::test]
3306 async fn test_save_file(mut cx: gpui::TestAppContext) {
3307 let fs = Arc::new(FakeFs::new(cx.background()));
3308 fs.insert_tree(
3309 "/dir",
3310 json!({
3311 "file1": "the old contents",
3312 }),
3313 )
3314 .await;
3315
3316 let project = Project::test(fs.clone(), &mut cx);
3317 let worktree_id = project
3318 .update(&mut cx, |p, cx| {
3319 p.find_or_create_local_worktree("/dir", false, cx)
3320 })
3321 .await
3322 .unwrap()
3323 .0
3324 .read_with(&cx, |tree, _| tree.id());
3325
3326 let buffer = project
3327 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3328 .await
3329 .unwrap();
3330 buffer
3331 .update(&mut cx, |buffer, cx| {
3332 assert_eq!(buffer.text(), "the old contents");
3333 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3334 buffer.save(cx)
3335 })
3336 .await
3337 .unwrap();
3338
3339 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3340 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3341 }
3342
3343 #[gpui::test]
3344 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3345 let fs = Arc::new(FakeFs::new(cx.background()));
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "file1": "the old contents",
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs.clone(), &mut cx);
3355 let worktree_id = project
3356 .update(&mut cx, |p, cx| {
3357 p.find_or_create_local_worktree("/dir/file1", false, cx)
3358 })
3359 .await
3360 .unwrap()
3361 .0
3362 .read_with(&cx, |tree, _| tree.id());
3363
3364 let buffer = project
3365 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3366 .await
3367 .unwrap();
3368 buffer
3369 .update(&mut cx, |buffer, cx| {
3370 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3371 buffer.save(cx)
3372 })
3373 .await
3374 .unwrap();
3375
3376 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3377 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3378 }
3379
3380 #[gpui::test(retries = 5)]
3381 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3382 let dir = temp_tree(json!({
3383 "a": {
3384 "file1": "",
3385 "file2": "",
3386 "file3": "",
3387 },
3388 "b": {
3389 "c": {
3390 "file4": "",
3391 "file5": "",
3392 }
3393 }
3394 }));
3395
3396 let project = Project::test(Arc::new(RealFs), &mut cx);
3397 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3398
3399 let (tree, _) = project
3400 .update(&mut cx, |p, cx| {
3401 p.find_or_create_local_worktree(dir.path(), false, cx)
3402 })
3403 .await
3404 .unwrap();
3405 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3406
3407 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3408 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3409 async move { buffer.await.unwrap() }
3410 };
3411 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3412 tree.read_with(cx, |tree, _| {
3413 tree.entry_for_path(path)
3414 .expect(&format!("no entry for path {}", path))
3415 .id
3416 })
3417 };
3418
3419 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3420 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3421 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3422 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3423
3424 let file2_id = id_for_path("a/file2", &cx);
3425 let file3_id = id_for_path("a/file3", &cx);
3426 let file4_id = id_for_path("b/c/file4", &cx);
3427
3428 // Wait for the initial scan.
3429 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3430 .await;
3431
3432 // Create a remote copy of this worktree.
3433 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3434 let (remote, load_task) = cx.update(|cx| {
3435 Worktree::remote(
3436 1,
3437 1,
3438 initial_snapshot.to_proto(&Default::default(), Default::default()),
3439 rpc.clone(),
3440 cx,
3441 )
3442 });
3443 load_task.await;
3444
3445 cx.read(|cx| {
3446 assert!(!buffer2.read(cx).is_dirty());
3447 assert!(!buffer3.read(cx).is_dirty());
3448 assert!(!buffer4.read(cx).is_dirty());
3449 assert!(!buffer5.read(cx).is_dirty());
3450 });
3451
3452 // Rename and delete files and directories.
3453 tree.flush_fs_events(&cx).await;
3454 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3455 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3456 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3457 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3458 tree.flush_fs_events(&cx).await;
3459
3460 let expected_paths = vec![
3461 "a",
3462 "a/file1",
3463 "a/file2.new",
3464 "b",
3465 "d",
3466 "d/file3",
3467 "d/file4",
3468 ];
3469
3470 cx.read(|app| {
3471 assert_eq!(
3472 tree.read(app)
3473 .paths()
3474 .map(|p| p.to_str().unwrap())
3475 .collect::<Vec<_>>(),
3476 expected_paths
3477 );
3478
3479 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3480 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3481 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3482
3483 assert_eq!(
3484 buffer2.read(app).file().unwrap().path().as_ref(),
3485 Path::new("a/file2.new")
3486 );
3487 assert_eq!(
3488 buffer3.read(app).file().unwrap().path().as_ref(),
3489 Path::new("d/file3")
3490 );
3491 assert_eq!(
3492 buffer4.read(app).file().unwrap().path().as_ref(),
3493 Path::new("d/file4")
3494 );
3495 assert_eq!(
3496 buffer5.read(app).file().unwrap().path().as_ref(),
3497 Path::new("b/c/file5")
3498 );
3499
3500 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3501 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3502 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3503 assert!(buffer5.read(app).file().unwrap().is_deleted());
3504 });
3505
3506 // Update the remote worktree. Check that it becomes consistent with the
3507 // local worktree.
3508 remote.update(&mut cx, |remote, cx| {
3509 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3510 &initial_snapshot,
3511 1,
3512 1,
3513 0,
3514 true,
3515 );
3516 remote
3517 .as_remote_mut()
3518 .unwrap()
3519 .snapshot
3520 .apply_remote_update(update_message)
3521 .unwrap();
3522
3523 assert_eq!(
3524 remote
3525 .paths()
3526 .map(|p| p.to_str().unwrap())
3527 .collect::<Vec<_>>(),
3528 expected_paths
3529 );
3530 });
3531 }
3532
3533 #[gpui::test]
3534 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3535 let fs = Arc::new(FakeFs::new(cx.background()));
3536 fs.insert_tree(
3537 "/the-dir",
3538 json!({
3539 "a.txt": "a-contents",
3540 "b.txt": "b-contents",
3541 }),
3542 )
3543 .await;
3544
3545 let project = Project::test(fs.clone(), &mut cx);
3546 let worktree_id = project
3547 .update(&mut cx, |p, cx| {
3548 p.find_or_create_local_worktree("/the-dir", false, cx)
3549 })
3550 .await
3551 .unwrap()
3552 .0
3553 .read_with(&cx, |tree, _| tree.id());
3554
3555 // Spawn multiple tasks to open paths, repeating some paths.
3556 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3557 (
3558 p.open_buffer((worktree_id, "a.txt"), cx),
3559 p.open_buffer((worktree_id, "b.txt"), cx),
3560 p.open_buffer((worktree_id, "a.txt"), cx),
3561 )
3562 });
3563
3564 let buffer_a_1 = buffer_a_1.await.unwrap();
3565 let buffer_a_2 = buffer_a_2.await.unwrap();
3566 let buffer_b = buffer_b.await.unwrap();
3567 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3568 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3569
3570 // There is only one buffer per path.
3571 let buffer_a_id = buffer_a_1.id();
3572 assert_eq!(buffer_a_2.id(), buffer_a_id);
3573
3574 // Open the same path again while it is still open.
3575 drop(buffer_a_1);
3576 let buffer_a_3 = project
3577 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3578 .await
3579 .unwrap();
3580
3581 // There's still only one buffer per path.
3582 assert_eq!(buffer_a_3.id(), buffer_a_id);
3583 }
3584
3585 #[gpui::test]
3586 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3587 use std::fs;
3588
3589 let dir = temp_tree(json!({
3590 "file1": "abc",
3591 "file2": "def",
3592 "file3": "ghi",
3593 }));
3594
3595 let project = Project::test(Arc::new(RealFs), &mut cx);
3596 let (worktree, _) = project
3597 .update(&mut cx, |p, cx| {
3598 p.find_or_create_local_worktree(dir.path(), false, cx)
3599 })
3600 .await
3601 .unwrap();
3602 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3603
3604 worktree.flush_fs_events(&cx).await;
3605 worktree
3606 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3607 .await;
3608
3609 let buffer1 = project
3610 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3611 .await
3612 .unwrap();
3613 let events = Rc::new(RefCell::new(Vec::new()));
3614
3615 // initially, the buffer isn't dirty.
3616 buffer1.update(&mut cx, |buffer, cx| {
3617 cx.subscribe(&buffer1, {
3618 let events = events.clone();
3619 move |_, _, event, _| events.borrow_mut().push(event.clone())
3620 })
3621 .detach();
3622
3623 assert!(!buffer.is_dirty());
3624 assert!(events.borrow().is_empty());
3625
3626 buffer.edit(vec![1..2], "", cx);
3627 });
3628
3629 // after the first edit, the buffer is dirty, and emits a dirtied event.
3630 buffer1.update(&mut cx, |buffer, cx| {
3631 assert!(buffer.text() == "ac");
3632 assert!(buffer.is_dirty());
3633 assert_eq!(
3634 *events.borrow(),
3635 &[language::Event::Edited, language::Event::Dirtied]
3636 );
3637 events.borrow_mut().clear();
3638 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3639 });
3640
3641 // after saving, the buffer is not dirty, and emits a saved event.
3642 buffer1.update(&mut cx, |buffer, cx| {
3643 assert!(!buffer.is_dirty());
3644 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3645 events.borrow_mut().clear();
3646
3647 buffer.edit(vec![1..1], "B", cx);
3648 buffer.edit(vec![2..2], "D", cx);
3649 });
3650
3651 // after editing again, the buffer is dirty, and emits another dirty event.
3652 buffer1.update(&mut cx, |buffer, cx| {
3653 assert!(buffer.text() == "aBDc");
3654 assert!(buffer.is_dirty());
3655 assert_eq!(
3656 *events.borrow(),
3657 &[
3658 language::Event::Edited,
3659 language::Event::Dirtied,
3660 language::Event::Edited,
3661 ],
3662 );
3663 events.borrow_mut().clear();
3664
3665 // TODO - currently, after restoring the buffer to its
3666 // previously-saved state, the is still considered dirty.
3667 buffer.edit([1..3], "", cx);
3668 assert!(buffer.text() == "ac");
3669 assert!(buffer.is_dirty());
3670 });
3671
3672 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3673
3674 // When a file is deleted, the buffer is considered dirty.
3675 let events = Rc::new(RefCell::new(Vec::new()));
3676 let buffer2 = project
3677 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3678 .await
3679 .unwrap();
3680 buffer2.update(&mut cx, |_, cx| {
3681 cx.subscribe(&buffer2, {
3682 let events = events.clone();
3683 move |_, _, event, _| events.borrow_mut().push(event.clone())
3684 })
3685 .detach();
3686 });
3687
3688 fs::remove_file(dir.path().join("file2")).unwrap();
3689 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3690 assert_eq!(
3691 *events.borrow(),
3692 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3693 );
3694
3695 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3696 let events = Rc::new(RefCell::new(Vec::new()));
3697 let buffer3 = project
3698 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3699 .await
3700 .unwrap();
3701 buffer3.update(&mut cx, |_, cx| {
3702 cx.subscribe(&buffer3, {
3703 let events = events.clone();
3704 move |_, _, event, _| events.borrow_mut().push(event.clone())
3705 })
3706 .detach();
3707 });
3708
3709 worktree.flush_fs_events(&cx).await;
3710 buffer3.update(&mut cx, |buffer, cx| {
3711 buffer.edit(Some(0..0), "x", cx);
3712 });
3713 events.borrow_mut().clear();
3714 fs::remove_file(dir.path().join("file3")).unwrap();
3715 buffer3
3716 .condition(&cx, |_, _| !events.borrow().is_empty())
3717 .await;
3718 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3719 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3720 }
3721
3722 #[gpui::test]
3723 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3724 use std::fs;
3725
3726 let initial_contents = "aaa\nbbbbb\nc\n";
3727 let dir = temp_tree(json!({ "the-file": initial_contents }));
3728
3729 let project = Project::test(Arc::new(RealFs), &mut cx);
3730 let (worktree, _) = project
3731 .update(&mut cx, |p, cx| {
3732 p.find_or_create_local_worktree(dir.path(), false, cx)
3733 })
3734 .await
3735 .unwrap();
3736 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3737
3738 worktree
3739 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3740 .await;
3741
3742 let abs_path = dir.path().join("the-file");
3743 let buffer = project
3744 .update(&mut cx, |p, cx| {
3745 p.open_buffer((worktree_id, "the-file"), cx)
3746 })
3747 .await
3748 .unwrap();
3749
3750 // TODO
3751 // Add a cursor on each row.
3752 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3753 // assert!(!buffer.is_dirty());
3754 // buffer.add_selection_set(
3755 // &(0..3)
3756 // .map(|row| Selection {
3757 // id: row as usize,
3758 // start: Point::new(row, 1),
3759 // end: Point::new(row, 1),
3760 // reversed: false,
3761 // goal: SelectionGoal::None,
3762 // })
3763 // .collect::<Vec<_>>(),
3764 // cx,
3765 // )
3766 // });
3767
3768 // Change the file on disk, adding two new lines of text, and removing
3769 // one line.
3770 buffer.read_with(&cx, |buffer, _| {
3771 assert!(!buffer.is_dirty());
3772 assert!(!buffer.has_conflict());
3773 });
3774 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3775 fs::write(&abs_path, new_contents).unwrap();
3776
3777 // Because the buffer was not modified, it is reloaded from disk. Its
3778 // contents are edited according to the diff between the old and new
3779 // file contents.
3780 buffer
3781 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3782 .await;
3783
3784 buffer.update(&mut cx, |buffer, _| {
3785 assert_eq!(buffer.text(), new_contents);
3786 assert!(!buffer.is_dirty());
3787 assert!(!buffer.has_conflict());
3788
3789 // TODO
3790 // let cursor_positions = buffer
3791 // .selection_set(selection_set_id)
3792 // .unwrap()
3793 // .selections::<Point>(&*buffer)
3794 // .map(|selection| {
3795 // assert_eq!(selection.start, selection.end);
3796 // selection.start
3797 // })
3798 // .collect::<Vec<_>>();
3799 // assert_eq!(
3800 // cursor_positions,
3801 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3802 // );
3803 });
3804
3805 // Modify the buffer
3806 buffer.update(&mut cx, |buffer, cx| {
3807 buffer.edit(vec![0..0], " ", cx);
3808 assert!(buffer.is_dirty());
3809 assert!(!buffer.has_conflict());
3810 });
3811
3812 // Change the file on disk again, adding blank lines to the beginning.
3813 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3814
3815 // Because the buffer is modified, it doesn't reload from disk, but is
3816 // marked as having a conflict.
3817 buffer
3818 .condition(&cx, |buffer, _| buffer.has_conflict())
3819 .await;
3820 }
3821
3822 #[gpui::test]
3823 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3824 let fs = Arc::new(FakeFs::new(cx.background()));
3825 fs.insert_tree(
3826 "/the-dir",
3827 json!({
3828 "a.rs": "
3829 fn foo(mut v: Vec<usize>) {
3830 for x in &v {
3831 v.push(1);
3832 }
3833 }
3834 "
3835 .unindent(),
3836 }),
3837 )
3838 .await;
3839
3840 let project = Project::test(fs.clone(), &mut cx);
3841 let (worktree, _) = project
3842 .update(&mut cx, |p, cx| {
3843 p.find_or_create_local_worktree("/the-dir", false, cx)
3844 })
3845 .await
3846 .unwrap();
3847 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3848
3849 let buffer = project
3850 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3851 .await
3852 .unwrap();
3853
3854 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3855 let message = lsp::PublishDiagnosticsParams {
3856 uri: buffer_uri.clone(),
3857 diagnostics: vec![
3858 lsp::Diagnostic {
3859 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3860 severity: Some(DiagnosticSeverity::WARNING),
3861 message: "error 1".to_string(),
3862 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3863 location: lsp::Location {
3864 uri: buffer_uri.clone(),
3865 range: lsp::Range::new(
3866 lsp::Position::new(1, 8),
3867 lsp::Position::new(1, 9),
3868 ),
3869 },
3870 message: "error 1 hint 1".to_string(),
3871 }]),
3872 ..Default::default()
3873 },
3874 lsp::Diagnostic {
3875 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3876 severity: Some(DiagnosticSeverity::HINT),
3877 message: "error 1 hint 1".to_string(),
3878 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3879 location: lsp::Location {
3880 uri: buffer_uri.clone(),
3881 range: lsp::Range::new(
3882 lsp::Position::new(1, 8),
3883 lsp::Position::new(1, 9),
3884 ),
3885 },
3886 message: "original diagnostic".to_string(),
3887 }]),
3888 ..Default::default()
3889 },
3890 lsp::Diagnostic {
3891 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3892 severity: Some(DiagnosticSeverity::ERROR),
3893 message: "error 2".to_string(),
3894 related_information: Some(vec![
3895 lsp::DiagnosticRelatedInformation {
3896 location: lsp::Location {
3897 uri: buffer_uri.clone(),
3898 range: lsp::Range::new(
3899 lsp::Position::new(1, 13),
3900 lsp::Position::new(1, 15),
3901 ),
3902 },
3903 message: "error 2 hint 1".to_string(),
3904 },
3905 lsp::DiagnosticRelatedInformation {
3906 location: lsp::Location {
3907 uri: buffer_uri.clone(),
3908 range: lsp::Range::new(
3909 lsp::Position::new(1, 13),
3910 lsp::Position::new(1, 15),
3911 ),
3912 },
3913 message: "error 2 hint 2".to_string(),
3914 },
3915 ]),
3916 ..Default::default()
3917 },
3918 lsp::Diagnostic {
3919 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3920 severity: Some(DiagnosticSeverity::HINT),
3921 message: "error 2 hint 1".to_string(),
3922 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3923 location: lsp::Location {
3924 uri: buffer_uri.clone(),
3925 range: lsp::Range::new(
3926 lsp::Position::new(2, 8),
3927 lsp::Position::new(2, 17),
3928 ),
3929 },
3930 message: "original diagnostic".to_string(),
3931 }]),
3932 ..Default::default()
3933 },
3934 lsp::Diagnostic {
3935 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3936 severity: Some(DiagnosticSeverity::HINT),
3937 message: "error 2 hint 2".to_string(),
3938 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3939 location: lsp::Location {
3940 uri: buffer_uri.clone(),
3941 range: lsp::Range::new(
3942 lsp::Position::new(2, 8),
3943 lsp::Position::new(2, 17),
3944 ),
3945 },
3946 message: "original diagnostic".to_string(),
3947 }]),
3948 ..Default::default()
3949 },
3950 ],
3951 version: None,
3952 };
3953
3954 project
3955 .update(&mut cx, |p, cx| {
3956 p.update_diagnostics(message, &Default::default(), cx)
3957 })
3958 .unwrap();
3959 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3960
3961 assert_eq!(
3962 buffer
3963 .diagnostics_in_range::<_, Point>(0..buffer.len())
3964 .collect::<Vec<_>>(),
3965 &[
3966 DiagnosticEntry {
3967 range: Point::new(1, 8)..Point::new(1, 9),
3968 diagnostic: Diagnostic {
3969 severity: DiagnosticSeverity::WARNING,
3970 message: "error 1".to_string(),
3971 group_id: 0,
3972 is_primary: true,
3973 ..Default::default()
3974 }
3975 },
3976 DiagnosticEntry {
3977 range: Point::new(1, 8)..Point::new(1, 9),
3978 diagnostic: Diagnostic {
3979 severity: DiagnosticSeverity::HINT,
3980 message: "error 1 hint 1".to_string(),
3981 group_id: 0,
3982 is_primary: false,
3983 ..Default::default()
3984 }
3985 },
3986 DiagnosticEntry {
3987 range: Point::new(1, 13)..Point::new(1, 15),
3988 diagnostic: Diagnostic {
3989 severity: DiagnosticSeverity::HINT,
3990 message: "error 2 hint 1".to_string(),
3991 group_id: 1,
3992 is_primary: false,
3993 ..Default::default()
3994 }
3995 },
3996 DiagnosticEntry {
3997 range: Point::new(1, 13)..Point::new(1, 15),
3998 diagnostic: Diagnostic {
3999 severity: DiagnosticSeverity::HINT,
4000 message: "error 2 hint 2".to_string(),
4001 group_id: 1,
4002 is_primary: false,
4003 ..Default::default()
4004 }
4005 },
4006 DiagnosticEntry {
4007 range: Point::new(2, 8)..Point::new(2, 17),
4008 diagnostic: Diagnostic {
4009 severity: DiagnosticSeverity::ERROR,
4010 message: "error 2".to_string(),
4011 group_id: 1,
4012 is_primary: true,
4013 ..Default::default()
4014 }
4015 }
4016 ]
4017 );
4018
4019 assert_eq!(
4020 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4021 &[
4022 DiagnosticEntry {
4023 range: Point::new(1, 8)..Point::new(1, 9),
4024 diagnostic: Diagnostic {
4025 severity: DiagnosticSeverity::WARNING,
4026 message: "error 1".to_string(),
4027 group_id: 0,
4028 is_primary: true,
4029 ..Default::default()
4030 }
4031 },
4032 DiagnosticEntry {
4033 range: Point::new(1, 8)..Point::new(1, 9),
4034 diagnostic: Diagnostic {
4035 severity: DiagnosticSeverity::HINT,
4036 message: "error 1 hint 1".to_string(),
4037 group_id: 0,
4038 is_primary: false,
4039 ..Default::default()
4040 }
4041 },
4042 ]
4043 );
4044 assert_eq!(
4045 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4046 &[
4047 DiagnosticEntry {
4048 range: Point::new(1, 13)..Point::new(1, 15),
4049 diagnostic: Diagnostic {
4050 severity: DiagnosticSeverity::HINT,
4051 message: "error 2 hint 1".to_string(),
4052 group_id: 1,
4053 is_primary: false,
4054 ..Default::default()
4055 }
4056 },
4057 DiagnosticEntry {
4058 range: Point::new(1, 13)..Point::new(1, 15),
4059 diagnostic: Diagnostic {
4060 severity: DiagnosticSeverity::HINT,
4061 message: "error 2 hint 2".to_string(),
4062 group_id: 1,
4063 is_primary: false,
4064 ..Default::default()
4065 }
4066 },
4067 DiagnosticEntry {
4068 range: Point::new(2, 8)..Point::new(2, 17),
4069 diagnostic: Diagnostic {
4070 severity: DiagnosticSeverity::ERROR,
4071 message: "error 2".to_string(),
4072 group_id: 1,
4073 is_primary: true,
4074 ..Default::default()
4075 }
4076 }
4077 ]
4078 );
4079 }
4080}