1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, Bias, Buffer, CodeAction, Completion, CompletionLabel, Diagnostic,
19 DiagnosticEntry, File as _, Language, LanguageRegistry, PointUtf16, ToLspPosition,
20 ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{prelude::Stream, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
50 loading_buffers: HashMap<
51 ProjectPath,
52 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
53 >,
54 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
55}
56
57enum WorktreeHandle {
58 Strong(ModelHandle<Worktree>),
59 Weak(WeakModelHandle<Worktree>),
60}
61
62enum ProjectClientState {
63 Local {
64 is_shared: bool,
65 remote_id_tx: watch::Sender<Option<u64>>,
66 remote_id_rx: watch::Receiver<Option<u64>>,
67 _maintain_remote_id_task: Task<Option<()>>,
68 },
69 Remote {
70 sharing_has_stopped: bool,
71 remote_id: u64,
72 replica_id: ReplicaId,
73 },
74}
75
76#[derive(Clone, Debug)]
77pub struct Collaborator {
78 pub user: Arc<User>,
79 pub peer_id: PeerId,
80 pub replica_id: ReplicaId,
81}
82
83#[derive(Clone, Debug, PartialEq)]
84pub enum Event {
85 ActiveEntryChanged(Option<ProjectEntry>),
86 WorktreeRemoved(WorktreeId),
87 DiskBasedDiagnosticsStarted,
88 DiskBasedDiagnosticsUpdated,
89 DiskBasedDiagnosticsFinished,
90 DiagnosticsUpdated(ProjectPath),
91}
92
93#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
94pub struct ProjectPath {
95 pub worktree_id: WorktreeId,
96 pub path: Arc<Path>,
97}
98
99#[derive(Clone, Debug, Default, PartialEq)]
100pub struct DiagnosticSummary {
101 pub error_count: usize,
102 pub warning_count: usize,
103 pub info_count: usize,
104 pub hint_count: usize,
105}
106
107#[derive(Debug)]
108pub struct Definition {
109 pub target_buffer: ModelHandle<Buffer>,
110 pub target_range: Range<language::Anchor>,
111}
112
113#[derive(Default)]
114pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
115
116impl DiagnosticSummary {
117 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
118 let mut this = Self {
119 error_count: 0,
120 warning_count: 0,
121 info_count: 0,
122 hint_count: 0,
123 };
124
125 for entry in diagnostics {
126 if entry.diagnostic.is_primary {
127 match entry.diagnostic.severity {
128 DiagnosticSeverity::ERROR => this.error_count += 1,
129 DiagnosticSeverity::WARNING => this.warning_count += 1,
130 DiagnosticSeverity::INFORMATION => this.info_count += 1,
131 DiagnosticSeverity::HINT => this.hint_count += 1,
132 _ => {}
133 }
134 }
135 }
136
137 this
138 }
139
140 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
141 proto::DiagnosticSummary {
142 path: path.to_string_lossy().to_string(),
143 error_count: self.error_count as u32,
144 warning_count: self.warning_count as u32,
145 info_count: self.info_count as u32,
146 hint_count: self.hint_count as u32,
147 }
148 }
149}
150
151#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
152pub struct ProjectEntry {
153 pub worktree_id: WorktreeId,
154 pub entry_id: usize,
155}
156
157impl Project {
158 pub fn local(
159 client: Arc<Client>,
160 user_store: ModelHandle<UserStore>,
161 languages: Arc<LanguageRegistry>,
162 fs: Arc<dyn Fs>,
163 cx: &mut MutableAppContext,
164 ) -> ModelHandle<Self> {
165 cx.add_model(|cx: &mut ModelContext<Self>| {
166 let (remote_id_tx, remote_id_rx) = watch::channel();
167 let _maintain_remote_id_task = cx.spawn_weak({
168 let rpc = client.clone();
169 move |this, mut cx| {
170 async move {
171 let mut status = rpc.status();
172 while let Some(status) = status.recv().await {
173 if let Some(this) = this.upgrade(&cx) {
174 let remote_id = if let client::Status::Connected { .. } = status {
175 let response = rpc.request(proto::RegisterProject {}).await?;
176 Some(response.project_id)
177 } else {
178 None
179 };
180
181 if let Some(project_id) = remote_id {
182 let mut registrations = Vec::new();
183 this.update(&mut cx, |this, cx| {
184 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
185 registrations.push(worktree.update(
186 cx,
187 |worktree, cx| {
188 let worktree = worktree.as_local_mut().unwrap();
189 worktree.register(project_id, cx)
190 },
191 ));
192 }
193 });
194 for registration in registrations {
195 registration.await?;
196 }
197 }
198 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
199 }
200 }
201 Ok(())
202 }
203 .log_err()
204 }
205 });
206
207 Self {
208 worktrees: Default::default(),
209 collaborators: Default::default(),
210 open_buffers: Default::default(),
211 loading_buffers: Default::default(),
212 shared_buffers: Default::default(),
213 client_state: ProjectClientState::Local {
214 is_shared: false,
215 remote_id_tx,
216 remote_id_rx,
217 _maintain_remote_id_task,
218 },
219 subscriptions: Vec::new(),
220 active_entry: None,
221 languages,
222 client,
223 user_store,
224 fs,
225 language_servers_with_diagnostics_running: 0,
226 language_servers: Default::default(),
227 }
228 })
229 }
230
231 pub async fn remote(
232 remote_id: u64,
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut AsyncAppContext,
238 ) -> Result<ModelHandle<Self>> {
239 client.authenticate_and_connect(&cx).await?;
240
241 let response = client
242 .request(proto::JoinProject {
243 project_id: remote_id,
244 })
245 .await?;
246
247 let replica_id = response.replica_id as ReplicaId;
248
249 let mut worktrees = Vec::new();
250 for worktree in response.worktrees {
251 let (worktree, load_task) = cx
252 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
253 worktrees.push(worktree);
254 load_task.detach();
255 }
256
257 let user_ids = response
258 .collaborators
259 .iter()
260 .map(|peer| peer.user_id)
261 .collect();
262 user_store
263 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
264 .await?;
265 let mut collaborators = HashMap::default();
266 for message in response.collaborators {
267 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
268 collaborators.insert(collaborator.peer_id, collaborator);
269 }
270
271 Ok(cx.add_model(|cx| {
272 let mut this = Self {
273 worktrees: Vec::new(),
274 open_buffers: Default::default(),
275 loading_buffers: Default::default(),
276 shared_buffers: Default::default(),
277 active_entry: None,
278 collaborators,
279 languages,
280 user_store,
281 fs,
282 subscriptions: vec![
283 client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
284 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
285 client.add_entity_message_handler(
286 remote_id,
287 cx,
288 Self::handle_remove_collaborator,
289 ),
290 client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
291 client.add_entity_message_handler(
292 remote_id,
293 cx,
294 Self::handle_unregister_worktree,
295 ),
296 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
297 client.add_entity_message_handler(
298 remote_id,
299 cx,
300 Self::handle_update_diagnostic_summary,
301 ),
302 client.add_entity_message_handler(
303 remote_id,
304 cx,
305 Self::handle_disk_based_diagnostics_updating,
306 ),
307 client.add_entity_message_handler(
308 remote_id,
309 cx,
310 Self::handle_disk_based_diagnostics_updated,
311 ),
312 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
313 client.add_entity_message_handler(
314 remote_id,
315 cx,
316 Self::handle_update_buffer_file,
317 ),
318 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
319 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
320 ],
321 client,
322 client_state: ProjectClientState::Remote {
323 sharing_has_stopped: false,
324 remote_id,
325 replica_id,
326 },
327 language_servers_with_diagnostics_running: 0,
328 language_servers: Default::default(),
329 };
330 for worktree in worktrees {
331 this.add_worktree(&worktree, cx);
332 }
333 this
334 }))
335 }
336
337 #[cfg(any(test, feature = "test-support"))]
338 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
339 let languages = Arc::new(LanguageRegistry::new());
340 let http_client = client::test::FakeHttpClient::with_404_response();
341 let client = client::Client::new(http_client.clone());
342 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
343 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
344 }
345
346 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
347 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
348 *remote_id_tx.borrow_mut() = remote_id;
349 }
350
351 self.subscriptions.clear();
352 if let Some(remote_id) = remote_id {
353 let client = &self.client;
354 self.subscriptions.extend([
355 client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
356 client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
357 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
358 client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
359 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
360 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
361 client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
362 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
363 client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
364 client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
365 client.add_entity_request_handler(
366 remote_id,
367 cx,
368 Self::handle_apply_additional_edits_for_completion,
369 ),
370 client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
371 client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
372 client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
373 ]);
374 }
375 }
376
377 pub fn remote_id(&self) -> Option<u64> {
378 match &self.client_state {
379 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
380 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
381 }
382 }
383
384 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
385 let mut id = None;
386 let mut watch = None;
387 match &self.client_state {
388 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
389 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
390 }
391
392 async move {
393 if let Some(id) = id {
394 return id;
395 }
396 let mut watch = watch.unwrap();
397 loop {
398 let id = *watch.borrow();
399 if let Some(id) = id {
400 return id;
401 }
402 watch.recv().await;
403 }
404 }
405 }
406
407 pub fn replica_id(&self) -> ReplicaId {
408 match &self.client_state {
409 ProjectClientState::Local { .. } => 0,
410 ProjectClientState::Remote { replica_id, .. } => *replica_id,
411 }
412 }
413
414 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
415 &self.collaborators
416 }
417
418 pub fn worktrees<'a>(
419 &'a self,
420 cx: &'a AppContext,
421 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
422 self.worktrees
423 .iter()
424 .filter_map(move |worktree| worktree.upgrade(cx))
425 }
426
427 pub fn worktree_for_id(
428 &self,
429 id: WorktreeId,
430 cx: &AppContext,
431 ) -> Option<ModelHandle<Worktree>> {
432 self.worktrees(cx)
433 .find(|worktree| worktree.read(cx).id() == id)
434 }
435
436 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
437 let rpc = self.client.clone();
438 cx.spawn(|this, mut cx| async move {
439 let project_id = this.update(&mut cx, |this, _| {
440 if let ProjectClientState::Local {
441 is_shared,
442 remote_id_rx,
443 ..
444 } = &mut this.client_state
445 {
446 *is_shared = true;
447 remote_id_rx
448 .borrow()
449 .ok_or_else(|| anyhow!("no project id"))
450 } else {
451 Err(anyhow!("can't share a remote project"))
452 }
453 })?;
454
455 rpc.request(proto::ShareProject { project_id }).await?;
456 let mut tasks = Vec::new();
457 this.update(&mut cx, |this, cx| {
458 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
459 worktree.update(cx, |worktree, cx| {
460 let worktree = worktree.as_local_mut().unwrap();
461 tasks.push(worktree.share(project_id, cx));
462 });
463 }
464 });
465 for task in tasks {
466 task.await?;
467 }
468 this.update(&mut cx, |_, cx| cx.notify());
469 Ok(())
470 })
471 }
472
473 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
474 let rpc = self.client.clone();
475 cx.spawn(|this, mut cx| async move {
476 let project_id = this.update(&mut cx, |this, _| {
477 if let ProjectClientState::Local {
478 is_shared,
479 remote_id_rx,
480 ..
481 } = &mut this.client_state
482 {
483 *is_shared = false;
484 remote_id_rx
485 .borrow()
486 .ok_or_else(|| anyhow!("no project id"))
487 } else {
488 Err(anyhow!("can't share a remote project"))
489 }
490 })?;
491
492 rpc.send(proto::UnshareProject { project_id })?;
493 this.update(&mut cx, |this, cx| {
494 this.collaborators.clear();
495 this.shared_buffers.clear();
496 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
497 worktree.update(cx, |worktree, _| {
498 worktree.as_local_mut().unwrap().unshare();
499 });
500 }
501 cx.notify()
502 });
503 Ok(())
504 })
505 }
506
507 pub fn is_read_only(&self) -> bool {
508 match &self.client_state {
509 ProjectClientState::Local { .. } => false,
510 ProjectClientState::Remote {
511 sharing_has_stopped,
512 ..
513 } => *sharing_has_stopped,
514 }
515 }
516
517 pub fn is_local(&self) -> bool {
518 match &self.client_state {
519 ProjectClientState::Local { .. } => true,
520 ProjectClientState::Remote { .. } => false,
521 }
522 }
523
524 pub fn open_buffer(
525 &mut self,
526 path: impl Into<ProjectPath>,
527 cx: &mut ModelContext<Self>,
528 ) -> Task<Result<ModelHandle<Buffer>>> {
529 let project_path = path.into();
530 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
531 worktree
532 } else {
533 return Task::ready(Err(anyhow!("no such worktree")));
534 };
535
536 // If there is already a buffer for the given path, then return it.
537 let existing_buffer = self.get_open_buffer(&project_path, cx);
538 if let Some(existing_buffer) = existing_buffer {
539 return Task::ready(Ok(existing_buffer));
540 }
541
542 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
543 // If the given path is already being loaded, then wait for that existing
544 // task to complete and return the same buffer.
545 hash_map::Entry::Occupied(e) => e.get().clone(),
546
547 // Otherwise, record the fact that this path is now being loaded.
548 hash_map::Entry::Vacant(entry) => {
549 let (mut tx, rx) = postage::watch::channel();
550 entry.insert(rx.clone());
551
552 let load_buffer = if worktree.read(cx).is_local() {
553 self.open_local_buffer(&project_path.path, &worktree, cx)
554 } else {
555 self.open_remote_buffer(&project_path.path, &worktree, cx)
556 };
557
558 cx.spawn(move |this, mut cx| async move {
559 let load_result = load_buffer.await;
560 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
561 // Record the fact that the buffer is no longer loading.
562 this.loading_buffers.remove(&project_path);
563 let buffer = load_result.map_err(Arc::new)?;
564 Ok(buffer)
565 }));
566 })
567 .detach();
568 rx
569 }
570 };
571
572 cx.foreground().spawn(async move {
573 loop {
574 if let Some(result) = loading_watch.borrow().as_ref() {
575 match result {
576 Ok(buffer) => return Ok(buffer.clone()),
577 Err(error) => return Err(anyhow!("{}", error)),
578 }
579 }
580 loading_watch.recv().await;
581 }
582 })
583 }
584
585 fn open_local_buffer(
586 &mut self,
587 path: &Arc<Path>,
588 worktree: &ModelHandle<Worktree>,
589 cx: &mut ModelContext<Self>,
590 ) -> Task<Result<ModelHandle<Buffer>>> {
591 let load_buffer = worktree.update(cx, |worktree, cx| {
592 let worktree = worktree.as_local_mut().unwrap();
593 worktree.load_buffer(path, cx)
594 });
595 let worktree = worktree.downgrade();
596 cx.spawn(|this, mut cx| async move {
597 let buffer = load_buffer.await?;
598 let worktree = worktree
599 .upgrade(&cx)
600 .ok_or_else(|| anyhow!("worktree was removed"))?;
601 this.update(&mut cx, |this, cx| {
602 this.register_buffer(&buffer, Some(&worktree), cx)
603 })?;
604 Ok(buffer)
605 })
606 }
607
608 fn open_remote_buffer(
609 &mut self,
610 path: &Arc<Path>,
611 worktree: &ModelHandle<Worktree>,
612 cx: &mut ModelContext<Self>,
613 ) -> Task<Result<ModelHandle<Buffer>>> {
614 let rpc = self.client.clone();
615 let project_id = self.remote_id().unwrap();
616 let remote_worktree_id = worktree.read(cx).id();
617 let path = path.clone();
618 let path_string = path.to_string_lossy().to_string();
619 cx.spawn(|this, mut cx| async move {
620 let response = rpc
621 .request(proto::OpenBuffer {
622 project_id,
623 worktree_id: remote_worktree_id.to_proto(),
624 path: path_string,
625 })
626 .await?;
627 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
628 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
629 })
630 }
631
632 fn open_local_buffer_from_lsp_path(
633 &mut self,
634 abs_path: lsp::Url,
635 lang_name: String,
636 lang_server: Arc<LanguageServer>,
637 cx: &mut ModelContext<Self>,
638 ) -> Task<Result<ModelHandle<Buffer>>> {
639 cx.spawn(|this, mut cx| async move {
640 let abs_path = abs_path
641 .to_file_path()
642 .map_err(|_| anyhow!("can't convert URI to path"))?;
643 let (worktree, relative_path) = if let Some(result) =
644 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
645 {
646 result
647 } else {
648 let worktree = this
649 .update(&mut cx, |this, cx| {
650 this.create_local_worktree(&abs_path, true, cx)
651 })
652 .await?;
653 this.update(&mut cx, |this, cx| {
654 this.language_servers
655 .insert((worktree.read(cx).id(), lang_name), lang_server);
656 });
657 (worktree, PathBuf::new())
658 };
659
660 let project_path = ProjectPath {
661 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
662 path: relative_path.into(),
663 };
664 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
665 .await
666 })
667 }
668
669 pub fn save_buffer_as(
670 &self,
671 buffer: ModelHandle<Buffer>,
672 abs_path: PathBuf,
673 cx: &mut ModelContext<Project>,
674 ) -> Task<Result<()>> {
675 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
676 cx.spawn(|this, mut cx| async move {
677 let (worktree, path) = worktree_task.await?;
678 worktree
679 .update(&mut cx, |worktree, cx| {
680 worktree
681 .as_local_mut()
682 .unwrap()
683 .save_buffer_as(buffer.clone(), path, cx)
684 })
685 .await?;
686 this.update(&mut cx, |this, cx| {
687 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
688 });
689 Ok(())
690 })
691 }
692
693 #[cfg(any(test, feature = "test-support"))]
694 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
695 let path = path.into();
696 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
697 self.open_buffers.iter().any(|(_, buffer)| {
698 if let Some(buffer) = buffer.upgrade(cx) {
699 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
700 if file.worktree == worktree && file.path() == &path.path {
701 return true;
702 }
703 }
704 }
705 false
706 })
707 } else {
708 false
709 }
710 }
711
712 fn get_open_buffer(
713 &mut self,
714 path: &ProjectPath,
715 cx: &mut ModelContext<Self>,
716 ) -> Option<ModelHandle<Buffer>> {
717 let mut result = None;
718 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
719 self.open_buffers.retain(|_, buffer| {
720 if let Some(buffer) = buffer.upgrade(cx) {
721 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
722 if file.worktree == worktree && file.path() == &path.path {
723 result = Some(buffer);
724 }
725 }
726 true
727 } else {
728 false
729 }
730 });
731 result
732 }
733
734 fn register_buffer(
735 &mut self,
736 buffer: &ModelHandle<Buffer>,
737 worktree: Option<&ModelHandle<Worktree>>,
738 cx: &mut ModelContext<Self>,
739 ) -> Result<()> {
740 if self
741 .open_buffers
742 .insert(buffer.read(cx).remote_id() as usize, buffer.downgrade())
743 .is_some()
744 {
745 return Err(anyhow!("registered the same buffer twice"));
746 }
747 self.assign_language_to_buffer(&buffer, worktree, cx);
748 Ok(())
749 }
750
751 fn assign_language_to_buffer(
752 &mut self,
753 buffer: &ModelHandle<Buffer>,
754 worktree: Option<&ModelHandle<Worktree>>,
755 cx: &mut ModelContext<Self>,
756 ) -> Option<()> {
757 let (path, full_path) = {
758 let file = buffer.read(cx).file()?;
759 (file.path().clone(), file.full_path(cx))
760 };
761
762 // If the buffer has a language, set it and start/assign the language server
763 if let Some(language) = self.languages.select_language(&full_path) {
764 buffer.update(cx, |buffer, cx| {
765 buffer.set_language(Some(language.clone()), cx);
766 });
767
768 // For local worktrees, start a language server if needed.
769 // Also assign the language server and any previously stored diagnostics to the buffer.
770 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
771 let worktree_id = local_worktree.id();
772 let worktree_abs_path = local_worktree.abs_path().clone();
773
774 let language_server = match self
775 .language_servers
776 .entry((worktree_id, language.name().to_string()))
777 {
778 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
779 hash_map::Entry::Vacant(e) => Self::start_language_server(
780 self.client.clone(),
781 language.clone(),
782 &worktree_abs_path,
783 cx,
784 )
785 .map(|server| e.insert(server).clone()),
786 };
787
788 buffer.update(cx, |buffer, cx| {
789 buffer.set_language_server(language_server, cx);
790 });
791 }
792 }
793
794 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
795 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
796 buffer.update(cx, |buffer, cx| {
797 buffer.update_diagnostics(diagnostics, None, cx).log_err();
798 });
799 }
800 }
801
802 None
803 }
804
805 fn start_language_server(
806 rpc: Arc<Client>,
807 language: Arc<Language>,
808 worktree_path: &Path,
809 cx: &mut ModelContext<Self>,
810 ) -> Option<Arc<LanguageServer>> {
811 enum LspEvent {
812 DiagnosticsStart,
813 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
814 DiagnosticsFinish,
815 }
816
817 let language_server = language
818 .start_server(worktree_path, cx)
819 .log_err()
820 .flatten()?;
821 let disk_based_sources = language
822 .disk_based_diagnostic_sources()
823 .cloned()
824 .unwrap_or_default();
825 let disk_based_diagnostics_progress_token =
826 language.disk_based_diagnostics_progress_token().cloned();
827 let has_disk_based_diagnostic_progress_token =
828 disk_based_diagnostics_progress_token.is_some();
829 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
830
831 // Listen for `PublishDiagnostics` notifications.
832 language_server
833 .on_notification::<lsp::notification::PublishDiagnostics, _>({
834 let diagnostics_tx = diagnostics_tx.clone();
835 move |params| {
836 if !has_disk_based_diagnostic_progress_token {
837 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
838 }
839 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
840 if !has_disk_based_diagnostic_progress_token {
841 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
842 }
843 }
844 })
845 .detach();
846
847 // Listen for `Progress` notifications. Send an event when the language server
848 // transitions between running jobs and not running any jobs.
849 let mut running_jobs_for_this_server: i32 = 0;
850 language_server
851 .on_notification::<lsp::notification::Progress, _>(move |params| {
852 let token = match params.token {
853 lsp::NumberOrString::Number(_) => None,
854 lsp::NumberOrString::String(token) => Some(token),
855 };
856
857 if token == disk_based_diagnostics_progress_token {
858 match params.value {
859 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
860 lsp::WorkDoneProgress::Begin(_) => {
861 running_jobs_for_this_server += 1;
862 if running_jobs_for_this_server == 1 {
863 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
864 }
865 }
866 lsp::WorkDoneProgress::End(_) => {
867 running_jobs_for_this_server -= 1;
868 if running_jobs_for_this_server == 0 {
869 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
870 }
871 }
872 _ => {}
873 },
874 }
875 }
876 })
877 .detach();
878
879 // Process all the LSP events.
880 cx.spawn_weak(|this, mut cx| async move {
881 while let Ok(message) = diagnostics_rx.recv().await {
882 let this = this.upgrade(&cx)?;
883 match message {
884 LspEvent::DiagnosticsStart => {
885 this.update(&mut cx, |this, cx| {
886 this.disk_based_diagnostics_started(cx);
887 if let Some(project_id) = this.remote_id() {
888 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
889 .log_err();
890 }
891 });
892 }
893 LspEvent::DiagnosticsUpdate(mut params) => {
894 language.process_diagnostics(&mut params);
895 this.update(&mut cx, |this, cx| {
896 this.update_diagnostics(params, &disk_based_sources, cx)
897 .log_err();
898 });
899 }
900 LspEvent::DiagnosticsFinish => {
901 this.update(&mut cx, |this, cx| {
902 this.disk_based_diagnostics_finished(cx);
903 if let Some(project_id) = this.remote_id() {
904 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
905 .log_err();
906 }
907 });
908 }
909 }
910 }
911 Some(())
912 })
913 .detach();
914
915 Some(language_server)
916 }
917
918 pub fn update_diagnostics(
919 &mut self,
920 params: lsp::PublishDiagnosticsParams,
921 disk_based_sources: &HashSet<String>,
922 cx: &mut ModelContext<Self>,
923 ) -> Result<()> {
924 let abs_path = params
925 .uri
926 .to_file_path()
927 .map_err(|_| anyhow!("URI is not a file"))?;
928 let mut next_group_id = 0;
929 let mut diagnostics = Vec::default();
930 let mut primary_diagnostic_group_ids = HashMap::default();
931 let mut sources_by_group_id = HashMap::default();
932 let mut supporting_diagnostic_severities = HashMap::default();
933 for diagnostic in ¶ms.diagnostics {
934 let source = diagnostic.source.as_ref();
935 let code = diagnostic.code.as_ref().map(|code| match code {
936 lsp::NumberOrString::Number(code) => code.to_string(),
937 lsp::NumberOrString::String(code) => code.clone(),
938 });
939 let range = range_from_lsp(diagnostic.range);
940 let is_supporting = diagnostic
941 .related_information
942 .as_ref()
943 .map_or(false, |infos| {
944 infos.iter().any(|info| {
945 primary_diagnostic_group_ids.contains_key(&(
946 source,
947 code.clone(),
948 range_from_lsp(info.location.range),
949 ))
950 })
951 });
952
953 if is_supporting {
954 if let Some(severity) = diagnostic.severity {
955 supporting_diagnostic_severities
956 .insert((source, code.clone(), range), severity);
957 }
958 } else {
959 let group_id = post_inc(&mut next_group_id);
960 let is_disk_based =
961 source.map_or(false, |source| disk_based_sources.contains(source));
962
963 sources_by_group_id.insert(group_id, source);
964 primary_diagnostic_group_ids
965 .insert((source, code.clone(), range.clone()), group_id);
966
967 diagnostics.push(DiagnosticEntry {
968 range,
969 diagnostic: Diagnostic {
970 code: code.clone(),
971 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
972 message: diagnostic.message.clone(),
973 group_id,
974 is_primary: true,
975 is_valid: true,
976 is_disk_based,
977 },
978 });
979 if let Some(infos) = &diagnostic.related_information {
980 for info in infos {
981 if info.location.uri == params.uri && !info.message.is_empty() {
982 let range = range_from_lsp(info.location.range);
983 diagnostics.push(DiagnosticEntry {
984 range,
985 diagnostic: Diagnostic {
986 code: code.clone(),
987 severity: DiagnosticSeverity::INFORMATION,
988 message: info.message.clone(),
989 group_id,
990 is_primary: false,
991 is_valid: true,
992 is_disk_based,
993 },
994 });
995 }
996 }
997 }
998 }
999 }
1000
1001 for entry in &mut diagnostics {
1002 let diagnostic = &mut entry.diagnostic;
1003 if !diagnostic.is_primary {
1004 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1005 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1006 source,
1007 diagnostic.code.clone(),
1008 entry.range.clone(),
1009 )) {
1010 diagnostic.severity = severity;
1011 }
1012 }
1013 }
1014
1015 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1016 Ok(())
1017 }
1018
1019 pub fn update_diagnostic_entries(
1020 &mut self,
1021 abs_path: PathBuf,
1022 version: Option<i32>,
1023 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1024 cx: &mut ModelContext<Project>,
1025 ) -> Result<(), anyhow::Error> {
1026 let (worktree, relative_path) = self
1027 .find_local_worktree(&abs_path, cx)
1028 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1029 let project_path = ProjectPath {
1030 worktree_id: worktree.read(cx).id(),
1031 path: relative_path.into(),
1032 };
1033
1034 for buffer in self.open_buffers.values() {
1035 if let Some(buffer) = buffer.upgrade(cx) {
1036 if buffer
1037 .read(cx)
1038 .file()
1039 .map_or(false, |file| *file.path() == project_path.path)
1040 {
1041 buffer.update(cx, |buffer, cx| {
1042 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1043 })?;
1044 break;
1045 }
1046 }
1047 }
1048 worktree.update(cx, |worktree, cx| {
1049 worktree
1050 .as_local_mut()
1051 .ok_or_else(|| anyhow!("not a local worktree"))?
1052 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1053 })?;
1054 cx.emit(Event::DiagnosticsUpdated(project_path));
1055 Ok(())
1056 }
1057
1058 pub fn format(
1059 &self,
1060 buffers: HashSet<ModelHandle<Buffer>>,
1061 push_to_history: bool,
1062 cx: &mut ModelContext<Project>,
1063 ) -> Task<Result<ProjectTransaction>> {
1064 let mut local_buffers = Vec::new();
1065 let mut remote_buffers = None;
1066 for buffer_handle in buffers {
1067 let buffer = buffer_handle.read(cx);
1068 let worktree;
1069 if let Some(file) = File::from_dyn(buffer.file()) {
1070 worktree = file.worktree.clone();
1071 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1072 let lang_server;
1073 if let Some(lang) = buffer.language() {
1074 if let Some(server) = self
1075 .language_servers
1076 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1077 {
1078 lang_server = server.clone();
1079 } else {
1080 return Task::ready(Ok(Default::default()));
1081 };
1082 } else {
1083 return Task::ready(Ok(Default::default()));
1084 }
1085
1086 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1087 } else {
1088 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1089 }
1090 } else {
1091 return Task::ready(Ok(Default::default()));
1092 }
1093 }
1094
1095 let remote_buffers = self.remote_id().zip(remote_buffers);
1096 let client = self.client.clone();
1097
1098 cx.spawn(|this, mut cx| async move {
1099 let mut project_transaction = ProjectTransaction::default();
1100
1101 if let Some((project_id, remote_buffers)) = remote_buffers {
1102 let response = client
1103 .request(proto::FormatBuffers {
1104 project_id,
1105 buffer_ids: remote_buffers
1106 .iter()
1107 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1108 .collect(),
1109 })
1110 .await?
1111 .transaction
1112 .ok_or_else(|| anyhow!("missing transaction"))?;
1113 project_transaction = this
1114 .update(&mut cx, |this, cx| {
1115 this.deserialize_project_transaction(response, push_to_history, cx)
1116 })
1117 .await?;
1118 }
1119
1120 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1121 let lsp_edits = lang_server
1122 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1123 text_document: lsp::TextDocumentIdentifier::new(
1124 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1125 ),
1126 options: Default::default(),
1127 work_done_progress_params: Default::default(),
1128 })
1129 .await?;
1130
1131 if let Some(lsp_edits) = lsp_edits {
1132 let edits = buffer
1133 .update(&mut cx, |buffer, cx| {
1134 buffer.edits_from_lsp(lsp_edits, None, cx)
1135 })
1136 .await?;
1137 buffer.update(&mut cx, |buffer, cx| {
1138 buffer.finalize_last_transaction();
1139 buffer.start_transaction();
1140 for (range, text) in edits {
1141 buffer.edit([range], text, cx);
1142 }
1143 if buffer.end_transaction(cx).is_some() {
1144 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1145 if !push_to_history {
1146 buffer.forget_transaction(transaction.id);
1147 }
1148 project_transaction.0.insert(cx.handle(), transaction);
1149 }
1150 });
1151 }
1152 }
1153
1154 Ok(project_transaction)
1155 })
1156 }
1157
1158 pub fn definition<T: ToPointUtf16>(
1159 &self,
1160 source_buffer_handle: &ModelHandle<Buffer>,
1161 position: T,
1162 cx: &mut ModelContext<Self>,
1163 ) -> Task<Result<Vec<Definition>>> {
1164 let source_buffer_handle = source_buffer_handle.clone();
1165 let source_buffer = source_buffer_handle.read(cx);
1166 let worktree;
1167 let buffer_abs_path;
1168 if let Some(file) = File::from_dyn(source_buffer.file()) {
1169 worktree = file.worktree.clone();
1170 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1171 } else {
1172 return Task::ready(Ok(Default::default()));
1173 };
1174
1175 let position = position.to_point_utf16(source_buffer);
1176
1177 if worktree.read(cx).as_local().is_some() {
1178 let buffer_abs_path = buffer_abs_path.unwrap();
1179 let lang_name;
1180 let lang_server;
1181 if let Some(lang) = source_buffer.language() {
1182 lang_name = lang.name().to_string();
1183 if let Some(server) = self
1184 .language_servers
1185 .get(&(worktree.read(cx).id(), lang_name.clone()))
1186 {
1187 lang_server = server.clone();
1188 } else {
1189 return Task::ready(Ok(Default::default()));
1190 };
1191 } else {
1192 return Task::ready(Ok(Default::default()));
1193 }
1194
1195 cx.spawn(|this, mut cx| async move {
1196 let response = lang_server
1197 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1198 text_document_position_params: lsp::TextDocumentPositionParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1201 ),
1202 position: lsp::Position::new(position.row, position.column),
1203 },
1204 work_done_progress_params: Default::default(),
1205 partial_result_params: Default::default(),
1206 })
1207 .await?;
1208
1209 let mut definitions = Vec::new();
1210 if let Some(response) = response {
1211 let mut unresolved_locations = Vec::new();
1212 match response {
1213 lsp::GotoDefinitionResponse::Scalar(loc) => {
1214 unresolved_locations.push((loc.uri, loc.range));
1215 }
1216 lsp::GotoDefinitionResponse::Array(locs) => {
1217 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1218 }
1219 lsp::GotoDefinitionResponse::Link(links) => {
1220 unresolved_locations.extend(
1221 links
1222 .into_iter()
1223 .map(|l| (l.target_uri, l.target_selection_range)),
1224 );
1225 }
1226 }
1227
1228 for (target_uri, target_range) in unresolved_locations {
1229 let target_buffer_handle = this
1230 .update(&mut cx, |this, cx| {
1231 this.open_local_buffer_from_lsp_path(
1232 target_uri,
1233 lang_name.clone(),
1234 lang_server.clone(),
1235 cx,
1236 )
1237 })
1238 .await?;
1239
1240 cx.read(|cx| {
1241 let target_buffer = target_buffer_handle.read(cx);
1242 let target_start = target_buffer
1243 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1244 let target_end = target_buffer
1245 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1246 definitions.push(Definition {
1247 target_buffer: target_buffer_handle,
1248 target_range: target_buffer.anchor_after(target_start)
1249 ..target_buffer.anchor_before(target_end),
1250 });
1251 });
1252 }
1253 }
1254
1255 Ok(definitions)
1256 })
1257 } else if let Some(project_id) = self.remote_id() {
1258 let client = self.client.clone();
1259 let request = proto::GetDefinition {
1260 project_id,
1261 buffer_id: source_buffer.remote_id(),
1262 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1263 };
1264 cx.spawn(|this, mut cx| async move {
1265 let response = client.request(request).await?;
1266 this.update(&mut cx, |this, cx| {
1267 let mut definitions = Vec::new();
1268 for definition in response.definitions {
1269 let target_buffer = this.deserialize_buffer(
1270 definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
1271 cx,
1272 )?;
1273 let target_start = definition
1274 .target_start
1275 .and_then(deserialize_anchor)
1276 .ok_or_else(|| anyhow!("missing target start"))?;
1277 let target_end = definition
1278 .target_end
1279 .and_then(deserialize_anchor)
1280 .ok_or_else(|| anyhow!("missing target end"))?;
1281 definitions.push(Definition {
1282 target_buffer,
1283 target_range: target_start..target_end,
1284 })
1285 }
1286
1287 Ok(definitions)
1288 })
1289 })
1290 } else {
1291 Task::ready(Ok(Default::default()))
1292 }
1293 }
1294
1295 pub fn completions<T: ToPointUtf16>(
1296 &self,
1297 source_buffer_handle: &ModelHandle<Buffer>,
1298 position: T,
1299 cx: &mut ModelContext<Self>,
1300 ) -> Task<Result<Vec<Completion>>> {
1301 let source_buffer_handle = source_buffer_handle.clone();
1302 let source_buffer = source_buffer_handle.read(cx);
1303 let buffer_id = source_buffer.remote_id();
1304 let language = source_buffer.language().cloned();
1305 let worktree;
1306 let buffer_abs_path;
1307 if let Some(file) = File::from_dyn(source_buffer.file()) {
1308 worktree = file.worktree.clone();
1309 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1310 } else {
1311 return Task::ready(Ok(Default::default()));
1312 };
1313
1314 let position = position.to_point_utf16(source_buffer);
1315 let anchor = source_buffer.anchor_after(position);
1316
1317 if worktree.read(cx).as_local().is_some() {
1318 let buffer_abs_path = buffer_abs_path.unwrap();
1319 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1320 server
1321 } else {
1322 return Task::ready(Ok(Default::default()));
1323 };
1324
1325 cx.spawn(|_, cx| async move {
1326 let completions = lang_server
1327 .request::<lsp::request::Completion>(lsp::CompletionParams {
1328 text_document_position: lsp::TextDocumentPositionParams::new(
1329 lsp::TextDocumentIdentifier::new(
1330 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1331 ),
1332 position.to_lsp_position(),
1333 ),
1334 context: Default::default(),
1335 work_done_progress_params: Default::default(),
1336 partial_result_params: Default::default(),
1337 })
1338 .await?;
1339
1340 let completions = if let Some(completions) = completions {
1341 match completions {
1342 lsp::CompletionResponse::Array(completions) => completions,
1343 lsp::CompletionResponse::List(list) => list.items,
1344 }
1345 } else {
1346 Default::default()
1347 };
1348
1349 source_buffer_handle.read_with(&cx, |this, _| {
1350 Ok(completions.into_iter().filter_map(|lsp_completion| {
1351 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1352 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1353 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1354 log::info!("received an insert and replace completion but we don't yet support that");
1355 return None
1356 },
1357 };
1358
1359 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1360 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1361 if clipped_start == old_range.start && clipped_end == old_range.end {
1362 Some(Completion {
1363 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1364 new_text,
1365 label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1366 lsp_completion,
1367 })
1368 } else {
1369 None
1370 }
1371 }).collect())
1372 })
1373
1374 })
1375 } else if let Some(project_id) = self.remote_id() {
1376 let rpc = self.client.clone();
1377 cx.foreground().spawn(async move {
1378 let response = rpc
1379 .request(proto::GetCompletions {
1380 project_id,
1381 buffer_id,
1382 position: Some(language::proto::serialize_anchor(&anchor)),
1383 })
1384 .await?;
1385 response
1386 .completions
1387 .into_iter()
1388 .map(|completion| {
1389 language::proto::deserialize_completion(completion, language.as_ref())
1390 })
1391 .collect()
1392 })
1393 } else {
1394 Task::ready(Ok(Default::default()))
1395 }
1396 }
1397
1398 pub fn apply_additional_edits_for_completion(
1399 &self,
1400 buffer_handle: ModelHandle<Buffer>,
1401 completion: Completion,
1402 push_to_history: bool,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Task<Result<Option<Transaction>>> {
1405 let buffer = buffer_handle.read(cx);
1406 let buffer_id = buffer.remote_id();
1407
1408 if self.is_local() {
1409 let lang_server = if let Some(language_server) = buffer.language_server() {
1410 language_server.clone()
1411 } else {
1412 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1413 };
1414
1415 cx.spawn(|_, mut cx| async move {
1416 let resolved_completion = lang_server
1417 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1418 .await?;
1419 if let Some(edits) = resolved_completion.additional_text_edits {
1420 let edits = buffer_handle
1421 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1422 .await?;
1423 buffer_handle.update(&mut cx, |buffer, cx| {
1424 buffer.finalize_last_transaction();
1425 buffer.start_transaction();
1426 for (range, text) in edits {
1427 buffer.edit([range], text, cx);
1428 }
1429 let transaction = if buffer.end_transaction(cx).is_some() {
1430 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1431 if !push_to_history {
1432 buffer.forget_transaction(transaction.id);
1433 }
1434 Some(transaction)
1435 } else {
1436 None
1437 };
1438 Ok(transaction)
1439 })
1440 } else {
1441 Ok(None)
1442 }
1443 })
1444 } else if let Some(project_id) = self.remote_id() {
1445 let client = self.client.clone();
1446 cx.spawn(|_, mut cx| async move {
1447 let response = client
1448 .request(proto::ApplyCompletionAdditionalEdits {
1449 project_id,
1450 buffer_id,
1451 completion: Some(language::proto::serialize_completion(&completion)),
1452 })
1453 .await?;
1454
1455 if let Some(transaction) = response.transaction {
1456 let transaction = language::proto::deserialize_transaction(transaction)?;
1457 buffer_handle
1458 .update(&mut cx, |buffer, _| {
1459 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1460 })
1461 .await;
1462 if push_to_history {
1463 buffer_handle.update(&mut cx, |buffer, _| {
1464 buffer.push_transaction(transaction.clone(), Instant::now());
1465 });
1466 }
1467 Ok(Some(transaction))
1468 } else {
1469 Ok(None)
1470 }
1471 })
1472 } else {
1473 Task::ready(Err(anyhow!("project does not have a remote id")))
1474 }
1475 }
1476
1477 pub fn code_actions<T: ToPointUtf16>(
1478 &self,
1479 source_buffer_handle: &ModelHandle<Buffer>,
1480 position: T,
1481 cx: &mut ModelContext<Self>,
1482 ) -> Task<Result<Vec<CodeAction>>> {
1483 let source_buffer_handle = source_buffer_handle.clone();
1484 let source_buffer = source_buffer_handle.read(cx);
1485 let buffer_id = source_buffer.remote_id();
1486 let worktree;
1487 let buffer_abs_path;
1488 if let Some(file) = File::from_dyn(source_buffer.file()) {
1489 worktree = file.worktree.clone();
1490 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1491 } else {
1492 return Task::ready(Ok(Default::default()));
1493 };
1494
1495 let position = position.to_point_utf16(source_buffer);
1496 let anchor = source_buffer.anchor_after(position);
1497
1498 if worktree.read(cx).as_local().is_some() {
1499 let buffer_abs_path = buffer_abs_path.unwrap();
1500 let lang_name;
1501 let lang_server;
1502 if let Some(lang) = source_buffer.language() {
1503 lang_name = lang.name().to_string();
1504 if let Some(server) = self
1505 .language_servers
1506 .get(&(worktree.read(cx).id(), lang_name.clone()))
1507 {
1508 lang_server = server.clone();
1509 } else {
1510 return Task::ready(Ok(Default::default()));
1511 };
1512 } else {
1513 return Task::ready(Ok(Default::default()));
1514 }
1515
1516 cx.foreground().spawn(async move {
1517 let actions = lang_server
1518 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1519 text_document: lsp::TextDocumentIdentifier::new(
1520 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1521 ),
1522 range: lsp::Range::new(
1523 position.to_lsp_position(),
1524 position.to_lsp_position(),
1525 ),
1526 work_done_progress_params: Default::default(),
1527 partial_result_params: Default::default(),
1528 context: lsp::CodeActionContext {
1529 diagnostics: Default::default(),
1530 only: Some(vec![
1531 lsp::CodeActionKind::QUICKFIX,
1532 lsp::CodeActionKind::REFACTOR,
1533 lsp::CodeActionKind::REFACTOR_EXTRACT,
1534 ]),
1535 },
1536 })
1537 .await?
1538 .unwrap_or_default()
1539 .into_iter()
1540 .filter_map(|entry| {
1541 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1542 Some(CodeAction {
1543 position: anchor.clone(),
1544 lsp_action,
1545 })
1546 } else {
1547 None
1548 }
1549 })
1550 .collect();
1551 Ok(actions)
1552 })
1553 } else if let Some(project_id) = self.remote_id() {
1554 let rpc = self.client.clone();
1555 cx.foreground().spawn(async move {
1556 let response = rpc
1557 .request(proto::GetCodeActions {
1558 project_id,
1559 buffer_id,
1560 position: Some(language::proto::serialize_anchor(&anchor)),
1561 })
1562 .await?;
1563 response
1564 .actions
1565 .into_iter()
1566 .map(language::proto::deserialize_code_action)
1567 .collect()
1568 })
1569 } else {
1570 Task::ready(Ok(Default::default()))
1571 }
1572 }
1573
1574 pub fn apply_code_action(
1575 &self,
1576 buffer_handle: ModelHandle<Buffer>,
1577 mut action: CodeAction,
1578 push_to_history: bool,
1579 cx: &mut ModelContext<Self>,
1580 ) -> Task<Result<ProjectTransaction>> {
1581 if self.is_local() {
1582 let buffer = buffer_handle.read(cx);
1583 let lang_name = if let Some(lang) = buffer.language() {
1584 lang.name().to_string()
1585 } else {
1586 return Task::ready(Ok(Default::default()));
1587 };
1588 let lang_server = if let Some(language_server) = buffer.language_server() {
1589 language_server.clone()
1590 } else {
1591 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1592 };
1593 let position = action.position.to_point_utf16(buffer).to_lsp_position();
1594 let fs = self.fs.clone();
1595
1596 cx.spawn(|this, mut cx| async move {
1597 if let Some(range) = action
1598 .lsp_action
1599 .data
1600 .as_mut()
1601 .and_then(|d| d.get_mut("codeActionParams"))
1602 .and_then(|d| d.get_mut("range"))
1603 {
1604 *range = serde_json::to_value(&lsp::Range::new(position, position)).unwrap();
1605 action.lsp_action = lang_server
1606 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1607 .await?;
1608 } else {
1609 let actions = this
1610 .update(&mut cx, |this, cx| {
1611 this.code_actions(&buffer_handle, action.position.clone(), cx)
1612 })
1613 .await?;
1614 action.lsp_action = actions
1615 .into_iter()
1616 .find(|a| a.lsp_action.title == action.lsp_action.title)
1617 .ok_or_else(|| anyhow!("code action is outdated"))?
1618 .lsp_action;
1619 }
1620
1621 let mut operations = Vec::new();
1622 if let Some(edit) = action.lsp_action.edit {
1623 if let Some(document_changes) = edit.document_changes {
1624 match document_changes {
1625 lsp::DocumentChanges::Edits(edits) => operations
1626 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1627 lsp::DocumentChanges::Operations(ops) => operations = ops,
1628 }
1629 } else if let Some(changes) = edit.changes {
1630 operations.extend(changes.into_iter().map(|(uri, edits)| {
1631 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1632 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1633 uri,
1634 version: None,
1635 },
1636 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1637 })
1638 }));
1639 }
1640 }
1641
1642 let mut project_transaction = ProjectTransaction::default();
1643 for operation in operations {
1644 match operation {
1645 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1646 let abs_path = op
1647 .uri
1648 .to_file_path()
1649 .map_err(|_| anyhow!("can't convert URI to path"))?;
1650
1651 if let Some(parent_path) = abs_path.parent() {
1652 fs.create_dir(parent_path).await?;
1653 }
1654 if abs_path.ends_with("/") {
1655 fs.create_dir(&abs_path).await?;
1656 } else {
1657 fs.create_file(
1658 &abs_path,
1659 op.options.map(Into::into).unwrap_or_default(),
1660 )
1661 .await?;
1662 }
1663 }
1664 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1665 let source_abs_path = op
1666 .old_uri
1667 .to_file_path()
1668 .map_err(|_| anyhow!("can't convert URI to path"))?;
1669 let target_abs_path = op
1670 .new_uri
1671 .to_file_path()
1672 .map_err(|_| anyhow!("can't convert URI to path"))?;
1673 fs.rename(
1674 &source_abs_path,
1675 &target_abs_path,
1676 op.options.map(Into::into).unwrap_or_default(),
1677 )
1678 .await?;
1679 }
1680 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1681 let abs_path = op
1682 .uri
1683 .to_file_path()
1684 .map_err(|_| anyhow!("can't convert URI to path"))?;
1685 let options = op.options.map(Into::into).unwrap_or_default();
1686 if abs_path.ends_with("/") {
1687 fs.remove_dir(&abs_path, options).await?;
1688 } else {
1689 fs.remove_file(&abs_path, options).await?;
1690 }
1691 }
1692 lsp::DocumentChangeOperation::Edit(op) => {
1693 let buffer_to_edit = this
1694 .update(&mut cx, |this, cx| {
1695 this.open_local_buffer_from_lsp_path(
1696 op.text_document.uri,
1697 lang_name.clone(),
1698 lang_server.clone(),
1699 cx,
1700 )
1701 })
1702 .await?;
1703
1704 let edits = buffer_to_edit
1705 .update(&mut cx, |buffer, cx| {
1706 let edits = op.edits.into_iter().map(|edit| match edit {
1707 lsp::OneOf::Left(edit) => edit,
1708 lsp::OneOf::Right(edit) => edit.text_edit,
1709 });
1710 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1711 })
1712 .await?;
1713
1714 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1715 buffer.finalize_last_transaction();
1716 buffer.start_transaction();
1717 for (range, text) in edits {
1718 buffer.edit([range], text, cx);
1719 }
1720 let transaction = if buffer.end_transaction(cx).is_some() {
1721 let transaction =
1722 buffer.finalize_last_transaction().unwrap().clone();
1723 if !push_to_history {
1724 buffer.forget_transaction(transaction.id);
1725 }
1726 Some(transaction)
1727 } else {
1728 None
1729 };
1730
1731 transaction
1732 });
1733 if let Some(transaction) = transaction {
1734 project_transaction.0.insert(buffer_to_edit, transaction);
1735 }
1736 }
1737 }
1738 }
1739
1740 Ok(project_transaction)
1741 })
1742 } else if let Some(project_id) = self.remote_id() {
1743 let client = self.client.clone();
1744 let request = proto::ApplyCodeAction {
1745 project_id,
1746 buffer_id: buffer_handle.read(cx).remote_id(),
1747 action: Some(language::proto::serialize_code_action(&action)),
1748 };
1749 cx.spawn(|this, mut cx| async move {
1750 let response = client
1751 .request(request)
1752 .await?
1753 .transaction
1754 .ok_or_else(|| anyhow!("missing transaction"))?;
1755 this.update(&mut cx, |this, cx| {
1756 this.deserialize_project_transaction(response, push_to_history, cx)
1757 })
1758 .await
1759 })
1760 } else {
1761 Task::ready(Err(anyhow!("project does not have a remote id")))
1762 }
1763 }
1764
1765 pub fn find_or_create_local_worktree(
1766 &self,
1767 abs_path: impl AsRef<Path>,
1768 weak: bool,
1769 cx: &mut ModelContext<Self>,
1770 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1771 let abs_path = abs_path.as_ref();
1772 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1773 Task::ready(Ok((tree.clone(), relative_path.into())))
1774 } else {
1775 let worktree = self.create_local_worktree(abs_path, weak, cx);
1776 cx.foreground()
1777 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1778 }
1779 }
1780
1781 fn find_local_worktree(
1782 &self,
1783 abs_path: &Path,
1784 cx: &AppContext,
1785 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1786 for tree in self.worktrees(cx) {
1787 if let Some(relative_path) = tree
1788 .read(cx)
1789 .as_local()
1790 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1791 {
1792 return Some((tree.clone(), relative_path.into()));
1793 }
1794 }
1795 None
1796 }
1797
1798 pub fn is_shared(&self) -> bool {
1799 match &self.client_state {
1800 ProjectClientState::Local { is_shared, .. } => *is_shared,
1801 ProjectClientState::Remote { .. } => false,
1802 }
1803 }
1804
1805 fn create_local_worktree(
1806 &self,
1807 abs_path: impl AsRef<Path>,
1808 weak: bool,
1809 cx: &mut ModelContext<Self>,
1810 ) -> Task<Result<ModelHandle<Worktree>>> {
1811 let fs = self.fs.clone();
1812 let client = self.client.clone();
1813 let path = Arc::from(abs_path.as_ref());
1814 cx.spawn(|project, mut cx| async move {
1815 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1816
1817 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1818 project.add_worktree(&worktree, cx);
1819 (project.remote_id(), project.is_shared())
1820 });
1821
1822 if let Some(project_id) = remote_project_id {
1823 worktree
1824 .update(&mut cx, |worktree, cx| {
1825 worktree.as_local_mut().unwrap().register(project_id, cx)
1826 })
1827 .await?;
1828 if is_shared {
1829 worktree
1830 .update(&mut cx, |worktree, cx| {
1831 worktree.as_local_mut().unwrap().share(project_id, cx)
1832 })
1833 .await?;
1834 }
1835 }
1836
1837 Ok(worktree)
1838 })
1839 }
1840
1841 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1842 self.worktrees.retain(|worktree| {
1843 worktree
1844 .upgrade(cx)
1845 .map_or(false, |w| w.read(cx).id() != id)
1846 });
1847 cx.notify();
1848 }
1849
1850 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1851 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1852 if worktree.read(cx).is_local() {
1853 cx.subscribe(&worktree, |this, worktree, _, cx| {
1854 this.update_local_worktree_buffers(worktree, cx);
1855 })
1856 .detach();
1857 }
1858
1859 let push_weak_handle = {
1860 let worktree = worktree.read(cx);
1861 worktree.is_local() && worktree.is_weak()
1862 };
1863 if push_weak_handle {
1864 cx.observe_release(&worktree, |this, cx| {
1865 this.worktrees
1866 .retain(|worktree| worktree.upgrade(cx).is_some());
1867 cx.notify();
1868 })
1869 .detach();
1870 self.worktrees
1871 .push(WorktreeHandle::Weak(worktree.downgrade()));
1872 } else {
1873 self.worktrees
1874 .push(WorktreeHandle::Strong(worktree.clone()));
1875 }
1876 cx.notify();
1877 }
1878
1879 fn update_local_worktree_buffers(
1880 &mut self,
1881 worktree_handle: ModelHandle<Worktree>,
1882 cx: &mut ModelContext<Self>,
1883 ) {
1884 let snapshot = worktree_handle.read(cx).snapshot();
1885 let mut buffers_to_delete = Vec::new();
1886 for (buffer_id, buffer) in &self.open_buffers {
1887 if let Some(buffer) = buffer.upgrade(cx) {
1888 buffer.update(cx, |buffer, cx| {
1889 if let Some(old_file) = File::from_dyn(buffer.file()) {
1890 if old_file.worktree != worktree_handle {
1891 return;
1892 }
1893
1894 let new_file = if let Some(entry) = old_file
1895 .entry_id
1896 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1897 {
1898 File {
1899 is_local: true,
1900 entry_id: Some(entry.id),
1901 mtime: entry.mtime,
1902 path: entry.path.clone(),
1903 worktree: worktree_handle.clone(),
1904 }
1905 } else if let Some(entry) =
1906 snapshot.entry_for_path(old_file.path().as_ref())
1907 {
1908 File {
1909 is_local: true,
1910 entry_id: Some(entry.id),
1911 mtime: entry.mtime,
1912 path: entry.path.clone(),
1913 worktree: worktree_handle.clone(),
1914 }
1915 } else {
1916 File {
1917 is_local: true,
1918 entry_id: None,
1919 path: old_file.path().clone(),
1920 mtime: old_file.mtime(),
1921 worktree: worktree_handle.clone(),
1922 }
1923 };
1924
1925 if let Some(project_id) = self.remote_id() {
1926 self.client
1927 .send(proto::UpdateBufferFile {
1928 project_id,
1929 buffer_id: *buffer_id as u64,
1930 file: Some(new_file.to_proto()),
1931 })
1932 .log_err();
1933 }
1934 buffer.file_updated(Box::new(new_file), cx).detach();
1935 }
1936 });
1937 } else {
1938 buffers_to_delete.push(*buffer_id);
1939 }
1940 }
1941
1942 for buffer_id in buffers_to_delete {
1943 self.open_buffers.remove(&buffer_id);
1944 }
1945 }
1946
1947 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1948 let new_active_entry = entry.and_then(|project_path| {
1949 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1950 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1951 Some(ProjectEntry {
1952 worktree_id: project_path.worktree_id,
1953 entry_id: entry.id,
1954 })
1955 });
1956 if new_active_entry != self.active_entry {
1957 self.active_entry = new_active_entry;
1958 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1959 }
1960 }
1961
1962 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1963 self.language_servers_with_diagnostics_running > 0
1964 }
1965
1966 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1967 let mut summary = DiagnosticSummary::default();
1968 for (_, path_summary) in self.diagnostic_summaries(cx) {
1969 summary.error_count += path_summary.error_count;
1970 summary.warning_count += path_summary.warning_count;
1971 summary.info_count += path_summary.info_count;
1972 summary.hint_count += path_summary.hint_count;
1973 }
1974 summary
1975 }
1976
1977 pub fn diagnostic_summaries<'a>(
1978 &'a self,
1979 cx: &'a AppContext,
1980 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1981 self.worktrees(cx).flat_map(move |worktree| {
1982 let worktree = worktree.read(cx);
1983 let worktree_id = worktree.id();
1984 worktree
1985 .diagnostic_summaries()
1986 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1987 })
1988 }
1989
1990 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1991 self.language_servers_with_diagnostics_running += 1;
1992 if self.language_servers_with_diagnostics_running == 1 {
1993 cx.emit(Event::DiskBasedDiagnosticsStarted);
1994 }
1995 }
1996
1997 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1998 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1999 self.language_servers_with_diagnostics_running -= 1;
2000 if self.language_servers_with_diagnostics_running == 0 {
2001 cx.emit(Event::DiskBasedDiagnosticsFinished);
2002 }
2003 }
2004
2005 pub fn active_entry(&self) -> Option<ProjectEntry> {
2006 self.active_entry
2007 }
2008
2009 // RPC message handlers
2010
2011 async fn handle_unshare_project(
2012 this: ModelHandle<Self>,
2013 _: TypedEnvelope<proto::UnshareProject>,
2014 _: Arc<Client>,
2015 mut cx: AsyncAppContext,
2016 ) -> Result<()> {
2017 this.update(&mut cx, |this, cx| {
2018 if let ProjectClientState::Remote {
2019 sharing_has_stopped,
2020 ..
2021 } = &mut this.client_state
2022 {
2023 *sharing_has_stopped = true;
2024 this.collaborators.clear();
2025 cx.notify();
2026 } else {
2027 unreachable!()
2028 }
2029 });
2030
2031 Ok(())
2032 }
2033
2034 async fn handle_add_collaborator(
2035 this: ModelHandle<Self>,
2036 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2037 _: Arc<Client>,
2038 mut cx: AsyncAppContext,
2039 ) -> Result<()> {
2040 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2041 let collaborator = envelope
2042 .payload
2043 .collaborator
2044 .take()
2045 .ok_or_else(|| anyhow!("empty collaborator"))?;
2046
2047 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2048 this.update(&mut cx, |this, cx| {
2049 this.collaborators
2050 .insert(collaborator.peer_id, collaborator);
2051 cx.notify();
2052 });
2053
2054 Ok(())
2055 }
2056
2057 async fn handle_remove_collaborator(
2058 this: ModelHandle<Self>,
2059 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2060 _: Arc<Client>,
2061 mut cx: AsyncAppContext,
2062 ) -> Result<()> {
2063 this.update(&mut cx, |this, cx| {
2064 let peer_id = PeerId(envelope.payload.peer_id);
2065 let replica_id = this
2066 .collaborators
2067 .remove(&peer_id)
2068 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2069 .replica_id;
2070 this.shared_buffers.remove(&peer_id);
2071 for (_, buffer) in &this.open_buffers {
2072 if let Some(buffer) = buffer.upgrade(cx) {
2073 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2074 }
2075 }
2076 cx.notify();
2077 Ok(())
2078 })
2079 }
2080
2081 async fn handle_share_worktree(
2082 this: ModelHandle<Self>,
2083 envelope: TypedEnvelope<proto::ShareWorktree>,
2084 client: Arc<Client>,
2085 mut cx: AsyncAppContext,
2086 ) -> Result<()> {
2087 this.update(&mut cx, |this, cx| {
2088 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2089 let replica_id = this.replica_id();
2090 let worktree = envelope
2091 .payload
2092 .worktree
2093 .ok_or_else(|| anyhow!("invalid worktree"))?;
2094 let (worktree, load_task) =
2095 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2096 this.add_worktree(&worktree, cx);
2097 load_task.detach();
2098 Ok(())
2099 })
2100 }
2101
2102 async fn handle_unregister_worktree(
2103 this: ModelHandle<Self>,
2104 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2105 _: Arc<Client>,
2106 mut cx: AsyncAppContext,
2107 ) -> Result<()> {
2108 this.update(&mut cx, |this, cx| {
2109 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2110 this.remove_worktree(worktree_id, cx);
2111 Ok(())
2112 })
2113 }
2114
2115 async fn handle_update_worktree(
2116 this: ModelHandle<Self>,
2117 envelope: TypedEnvelope<proto::UpdateWorktree>,
2118 _: Arc<Client>,
2119 mut cx: AsyncAppContext,
2120 ) -> Result<()> {
2121 this.update(&mut cx, |this, cx| {
2122 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2123 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2124 worktree.update(cx, |worktree, cx| {
2125 let worktree = worktree.as_remote_mut().unwrap();
2126 worktree.update_from_remote(envelope, cx)
2127 })?;
2128 }
2129 Ok(())
2130 })
2131 }
2132
2133 async fn handle_update_diagnostic_summary(
2134 this: ModelHandle<Self>,
2135 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2136 _: Arc<Client>,
2137 mut cx: AsyncAppContext,
2138 ) -> Result<()> {
2139 this.update(&mut cx, |this, cx| {
2140 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2141 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2142 if let Some(summary) = envelope.payload.summary {
2143 let project_path = ProjectPath {
2144 worktree_id,
2145 path: Path::new(&summary.path).into(),
2146 };
2147 worktree.update(cx, |worktree, _| {
2148 worktree
2149 .as_remote_mut()
2150 .unwrap()
2151 .update_diagnostic_summary(project_path.path.clone(), &summary);
2152 });
2153 cx.emit(Event::DiagnosticsUpdated(project_path));
2154 }
2155 }
2156 Ok(())
2157 })
2158 }
2159
2160 async fn handle_disk_based_diagnostics_updating(
2161 this: ModelHandle<Self>,
2162 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2163 _: Arc<Client>,
2164 mut cx: AsyncAppContext,
2165 ) -> Result<()> {
2166 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2167 Ok(())
2168 }
2169
2170 async fn handle_disk_based_diagnostics_updated(
2171 this: ModelHandle<Self>,
2172 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2173 _: Arc<Client>,
2174 mut cx: AsyncAppContext,
2175 ) -> Result<()> {
2176 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2177 Ok(())
2178 }
2179
2180 async fn handle_update_buffer(
2181 this: ModelHandle<Self>,
2182 envelope: TypedEnvelope<proto::UpdateBuffer>,
2183 _: Arc<Client>,
2184 mut cx: AsyncAppContext,
2185 ) -> Result<()> {
2186 this.update(&mut cx, |this, cx| {
2187 let payload = envelope.payload.clone();
2188 let buffer_id = payload.buffer_id as usize;
2189 let ops = payload
2190 .operations
2191 .into_iter()
2192 .map(|op| language::proto::deserialize_operation(op))
2193 .collect::<Result<Vec<_>, _>>()?;
2194 if let Some(buffer) = this.open_buffers.get_mut(&buffer_id) {
2195 if let Some(buffer) = buffer.upgrade(cx) {
2196 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2197 }
2198 }
2199 Ok(())
2200 })
2201 }
2202
2203 async fn handle_update_buffer_file(
2204 this: ModelHandle<Self>,
2205 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2206 _: Arc<Client>,
2207 mut cx: AsyncAppContext,
2208 ) -> Result<()> {
2209 this.update(&mut cx, |this, cx| {
2210 let payload = envelope.payload.clone();
2211 let buffer_id = payload.buffer_id as usize;
2212 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2213 let worktree = this
2214 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2215 .ok_or_else(|| anyhow!("no such worktree"))?;
2216 let file = File::from_proto(file, worktree.clone(), cx)?;
2217 let buffer = this
2218 .open_buffers
2219 .get_mut(&buffer_id)
2220 .and_then(|b| b.upgrade(cx))
2221 .ok_or_else(|| anyhow!("no such buffer"))?;
2222 buffer.update(cx, |buffer, cx| {
2223 buffer.file_updated(Box::new(file), cx).detach();
2224 });
2225 Ok(())
2226 })
2227 }
2228
2229 async fn handle_save_buffer(
2230 this: ModelHandle<Self>,
2231 envelope: TypedEnvelope<proto::SaveBuffer>,
2232 _: Arc<Client>,
2233 mut cx: AsyncAppContext,
2234 ) -> Result<proto::BufferSaved> {
2235 let buffer_id = envelope.payload.buffer_id;
2236 let sender_id = envelope.original_sender_id()?;
2237 let (project_id, save) = this.update(&mut cx, |this, cx| {
2238 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2239 let buffer = this
2240 .shared_buffers
2241 .get(&sender_id)
2242 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2243 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2244 Ok::<_, anyhow::Error>((project_id, buffer.update(cx, |buffer, cx| buffer.save(cx))))
2245 })?;
2246
2247 let (version, mtime) = save.await?;
2248 Ok(proto::BufferSaved {
2249 project_id,
2250 buffer_id,
2251 version: (&version).into(),
2252 mtime: Some(mtime.into()),
2253 })
2254 }
2255
2256 async fn handle_format_buffers(
2257 this: ModelHandle<Self>,
2258 envelope: TypedEnvelope<proto::FormatBuffers>,
2259 _: Arc<Client>,
2260 mut cx: AsyncAppContext,
2261 ) -> Result<proto::FormatBuffersResponse> {
2262 let sender_id = envelope.original_sender_id()?;
2263 let format = this.update(&mut cx, |this, cx| {
2264 let shared_buffers = this
2265 .shared_buffers
2266 .get(&sender_id)
2267 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2268 let mut buffers = HashSet::default();
2269 for buffer_id in &envelope.payload.buffer_ids {
2270 buffers.insert(
2271 shared_buffers
2272 .get(buffer_id)
2273 .cloned()
2274 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2275 );
2276 }
2277 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2278 })?;
2279
2280 let project_transaction = format.await?;
2281 let project_transaction = this.update(&mut cx, |this, cx| {
2282 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2283 });
2284 Ok(proto::FormatBuffersResponse {
2285 transaction: Some(project_transaction),
2286 })
2287 }
2288
2289 async fn handle_get_completions(
2290 this: ModelHandle<Self>,
2291 envelope: TypedEnvelope<proto::GetCompletions>,
2292 _: Arc<Client>,
2293 mut cx: AsyncAppContext,
2294 ) -> Result<proto::GetCompletionsResponse> {
2295 let sender_id = envelope.original_sender_id()?;
2296 let position = envelope
2297 .payload
2298 .position
2299 .and_then(language::proto::deserialize_anchor)
2300 .ok_or_else(|| anyhow!("invalid position"))?;
2301 let completions = this.update(&mut cx, |this, cx| {
2302 let buffer = this
2303 .shared_buffers
2304 .get(&sender_id)
2305 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2306 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2307 Ok::<_, anyhow::Error>(this.completions(&buffer, position, cx))
2308 })?;
2309
2310 Ok(proto::GetCompletionsResponse {
2311 completions: completions
2312 .await?
2313 .iter()
2314 .map(language::proto::serialize_completion)
2315 .collect(),
2316 })
2317 }
2318
2319 async fn handle_apply_additional_edits_for_completion(
2320 this: ModelHandle<Self>,
2321 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2322 _: Arc<Client>,
2323 mut cx: AsyncAppContext,
2324 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2325 let sender_id = envelope.original_sender_id()?;
2326 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2327 let buffer = this
2328 .shared_buffers
2329 .get(&sender_id)
2330 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2331 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2332 let language = buffer.read(cx).language();
2333 let completion = language::proto::deserialize_completion(
2334 envelope
2335 .payload
2336 .completion
2337 .ok_or_else(|| anyhow!("invalid completion"))?,
2338 language,
2339 )?;
2340 Ok::<_, anyhow::Error>(
2341 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2342 )
2343 })?;
2344
2345 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2346 transaction: apply_additional_edits
2347 .await?
2348 .as_ref()
2349 .map(language::proto::serialize_transaction),
2350 })
2351 }
2352
2353 async fn handle_get_code_actions(
2354 this: ModelHandle<Self>,
2355 envelope: TypedEnvelope<proto::GetCodeActions>,
2356 _: Arc<Client>,
2357 mut cx: AsyncAppContext,
2358 ) -> Result<proto::GetCodeActionsResponse> {
2359 let sender_id = envelope.original_sender_id()?;
2360 let position = envelope
2361 .payload
2362 .position
2363 .and_then(language::proto::deserialize_anchor)
2364 .ok_or_else(|| anyhow!("invalid position"))?;
2365 let code_actions = this.update(&mut cx, |this, cx| {
2366 let buffer = this
2367 .shared_buffers
2368 .get(&sender_id)
2369 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2370 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2371 Ok::<_, anyhow::Error>(this.code_actions(&buffer, position, cx))
2372 })?;
2373
2374 Ok(proto::GetCodeActionsResponse {
2375 actions: code_actions
2376 .await?
2377 .iter()
2378 .map(language::proto::serialize_code_action)
2379 .collect(),
2380 })
2381 }
2382
2383 async fn handle_apply_code_action(
2384 this: ModelHandle<Self>,
2385 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2386 _: Arc<Client>,
2387 mut cx: AsyncAppContext,
2388 ) -> Result<proto::ApplyCodeActionResponse> {
2389 let sender_id = envelope.original_sender_id()?;
2390 let action = language::proto::deserialize_code_action(
2391 envelope
2392 .payload
2393 .action
2394 .ok_or_else(|| anyhow!("invalid action"))?,
2395 )?;
2396 let apply_code_action = this.update(&mut cx, |this, cx| {
2397 let buffer = this
2398 .shared_buffers
2399 .get(&sender_id)
2400 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2401 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2402 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2403 })?;
2404
2405 let project_transaction = apply_code_action.await?;
2406 let project_transaction = this.update(&mut cx, |this, cx| {
2407 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2408 });
2409 Ok(proto::ApplyCodeActionResponse {
2410 transaction: Some(project_transaction),
2411 })
2412 }
2413
2414 async fn handle_get_definition(
2415 this: ModelHandle<Self>,
2416 envelope: TypedEnvelope<proto::GetDefinition>,
2417 _: Arc<Client>,
2418 mut cx: AsyncAppContext,
2419 ) -> Result<proto::GetDefinitionResponse> {
2420 let sender_id = envelope.original_sender_id()?;
2421 let position = envelope
2422 .payload
2423 .position
2424 .and_then(deserialize_anchor)
2425 .ok_or_else(|| anyhow!("invalid position"))?;
2426 let definitions = this.update(&mut cx, |this, cx| {
2427 let source_buffer = this
2428 .shared_buffers
2429 .get(&sender_id)
2430 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2431 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2432 if source_buffer.read(cx).can_resolve(&position) {
2433 Ok(this.definition(&source_buffer, position, cx))
2434 } else {
2435 Err(anyhow!("cannot resolve position"))
2436 }
2437 })?;
2438
2439 let definitions = definitions.await?;
2440
2441 this.update(&mut cx, |this, cx| {
2442 let mut response = proto::GetDefinitionResponse {
2443 definitions: Default::default(),
2444 };
2445 for definition in definitions {
2446 let buffer =
2447 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2448 response.definitions.push(proto::Definition {
2449 target_start: Some(serialize_anchor(&definition.target_range.start)),
2450 target_end: Some(serialize_anchor(&definition.target_range.end)),
2451 buffer: Some(buffer),
2452 });
2453 }
2454 Ok(response)
2455 })
2456 }
2457
2458 async fn handle_open_buffer(
2459 this: ModelHandle<Self>,
2460 envelope: TypedEnvelope<proto::OpenBuffer>,
2461 _: Arc<Client>,
2462 mut cx: AsyncAppContext,
2463 ) -> anyhow::Result<proto::OpenBufferResponse> {
2464 let peer_id = envelope.original_sender_id()?;
2465 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2466 let open_buffer = this.update(&mut cx, |this, cx| {
2467 this.open_buffer(
2468 ProjectPath {
2469 worktree_id,
2470 path: PathBuf::from(envelope.payload.path).into(),
2471 },
2472 cx,
2473 )
2474 });
2475
2476 let buffer = open_buffer.await?;
2477 this.update(&mut cx, |this, cx| {
2478 Ok(proto::OpenBufferResponse {
2479 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2480 })
2481 })
2482 }
2483
2484 fn serialize_project_transaction_for_peer(
2485 &mut self,
2486 project_transaction: ProjectTransaction,
2487 peer_id: PeerId,
2488 cx: &AppContext,
2489 ) -> proto::ProjectTransaction {
2490 let mut serialized_transaction = proto::ProjectTransaction {
2491 buffers: Default::default(),
2492 transactions: Default::default(),
2493 };
2494 for (buffer, transaction) in project_transaction.0 {
2495 serialized_transaction
2496 .buffers
2497 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2498 serialized_transaction
2499 .transactions
2500 .push(language::proto::serialize_transaction(&transaction));
2501 }
2502 serialized_transaction
2503 }
2504
2505 fn deserialize_project_transaction(
2506 &mut self,
2507 message: proto::ProjectTransaction,
2508 push_to_history: bool,
2509 cx: &mut ModelContext<Self>,
2510 ) -> Task<Result<ProjectTransaction>> {
2511 let mut project_transaction = ProjectTransaction::default();
2512 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2513 let buffer = match self.deserialize_buffer(buffer, cx) {
2514 Ok(buffer) => buffer,
2515 Err(error) => return Task::ready(Err(error)),
2516 };
2517 let transaction = match language::proto::deserialize_transaction(transaction) {
2518 Ok(transaction) => transaction,
2519 Err(error) => return Task::ready(Err(error)),
2520 };
2521 project_transaction.0.insert(buffer, transaction);
2522 }
2523
2524 cx.spawn_weak(|_, mut cx| async move {
2525 for (buffer, transaction) in &project_transaction.0 {
2526 buffer
2527 .update(&mut cx, |buffer, _| {
2528 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2529 })
2530 .await;
2531
2532 if push_to_history {
2533 buffer.update(&mut cx, |buffer, _| {
2534 buffer.push_transaction(transaction.clone(), Instant::now());
2535 });
2536 }
2537 }
2538
2539 Ok(project_transaction)
2540 })
2541 }
2542
2543 fn serialize_buffer_for_peer(
2544 &mut self,
2545 buffer: &ModelHandle<Buffer>,
2546 peer_id: PeerId,
2547 cx: &AppContext,
2548 ) -> proto::Buffer {
2549 let buffer_id = buffer.read(cx).remote_id();
2550 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2551 match shared_buffers.entry(buffer_id) {
2552 hash_map::Entry::Occupied(_) => proto::Buffer {
2553 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2554 },
2555 hash_map::Entry::Vacant(entry) => {
2556 entry.insert(buffer.clone());
2557 proto::Buffer {
2558 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2559 }
2560 }
2561 }
2562 }
2563
2564 fn deserialize_buffer(
2565 &mut self,
2566 buffer: proto::Buffer,
2567 cx: &mut ModelContext<Self>,
2568 ) -> Result<ModelHandle<Buffer>> {
2569 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2570 proto::buffer::Variant::Id(id) => self
2571 .open_buffers
2572 .get(&(id as usize))
2573 .and_then(|buffer| buffer.upgrade(cx))
2574 .ok_or_else(|| anyhow!("no buffer exists for id {}", id)),
2575 proto::buffer::Variant::State(mut buffer) => {
2576 let mut buffer_worktree = None;
2577 let mut buffer_file = None;
2578 if let Some(file) = buffer.file.take() {
2579 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2580 let worktree = self
2581 .worktree_for_id(worktree_id, cx)
2582 .ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?;
2583 buffer_file = Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2584 as Box<dyn language::File>);
2585 buffer_worktree = Some(worktree);
2586 }
2587
2588 let buffer = cx.add_model(|cx| {
2589 Buffer::from_proto(self.replica_id(), buffer, buffer_file, cx).unwrap()
2590 });
2591 self.register_buffer(&buffer, buffer_worktree.as_ref(), cx)?;
2592 Ok(buffer)
2593 }
2594 }
2595 }
2596
2597 async fn handle_close_buffer(
2598 this: ModelHandle<Self>,
2599 envelope: TypedEnvelope<proto::CloseBuffer>,
2600 _: Arc<Client>,
2601 mut cx: AsyncAppContext,
2602 ) -> anyhow::Result<()> {
2603 this.update(&mut cx, |this, cx| {
2604 if let Some(shared_buffers) =
2605 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2606 {
2607 shared_buffers.remove(&envelope.payload.buffer_id);
2608 cx.notify();
2609 }
2610 Ok(())
2611 })
2612 }
2613
2614 async fn handle_buffer_saved(
2615 this: ModelHandle<Self>,
2616 envelope: TypedEnvelope<proto::BufferSaved>,
2617 _: Arc<Client>,
2618 mut cx: AsyncAppContext,
2619 ) -> Result<()> {
2620 let version = envelope.payload.version.try_into()?;
2621 let mtime = envelope
2622 .payload
2623 .mtime
2624 .ok_or_else(|| anyhow!("missing mtime"))?
2625 .into();
2626
2627 this.update(&mut cx, |this, cx| {
2628 let buffer = this
2629 .open_buffers
2630 .get(&(envelope.payload.buffer_id as usize))
2631 .and_then(|buffer| buffer.upgrade(cx));
2632 if let Some(buffer) = buffer {
2633 buffer.update(cx, |buffer, cx| {
2634 buffer.did_save(version, mtime, None, cx);
2635 });
2636 }
2637 Ok(())
2638 })
2639 }
2640
2641 async fn handle_buffer_reloaded(
2642 this: ModelHandle<Self>,
2643 envelope: TypedEnvelope<proto::BufferReloaded>,
2644 _: Arc<Client>,
2645 mut cx: AsyncAppContext,
2646 ) -> Result<()> {
2647 let payload = envelope.payload.clone();
2648 let version = payload.version.try_into()?;
2649 let mtime = payload
2650 .mtime
2651 .ok_or_else(|| anyhow!("missing mtime"))?
2652 .into();
2653 this.update(&mut cx, |this, cx| {
2654 let buffer = this
2655 .open_buffers
2656 .get(&(payload.buffer_id as usize))
2657 .and_then(|buffer| buffer.upgrade(cx));
2658 if let Some(buffer) = buffer {
2659 buffer.update(cx, |buffer, cx| {
2660 buffer.did_reload(version, mtime, cx);
2661 });
2662 }
2663 Ok(())
2664 })
2665 }
2666
2667 pub fn match_paths<'a>(
2668 &self,
2669 query: &'a str,
2670 include_ignored: bool,
2671 smart_case: bool,
2672 max_results: usize,
2673 cancel_flag: &'a AtomicBool,
2674 cx: &AppContext,
2675 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2676 let worktrees = self
2677 .worktrees(cx)
2678 .filter(|worktree| !worktree.read(cx).is_weak())
2679 .collect::<Vec<_>>();
2680 let include_root_name = worktrees.len() > 1;
2681 let candidate_sets = worktrees
2682 .into_iter()
2683 .map(|worktree| CandidateSet {
2684 snapshot: worktree.read(cx).snapshot(),
2685 include_ignored,
2686 include_root_name,
2687 })
2688 .collect::<Vec<_>>();
2689
2690 let background = cx.background().clone();
2691 async move {
2692 fuzzy::match_paths(
2693 candidate_sets.as_slice(),
2694 query,
2695 smart_case,
2696 max_results,
2697 cancel_flag,
2698 background,
2699 )
2700 .await
2701 }
2702 }
2703}
2704
2705impl WorktreeHandle {
2706 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2707 match self {
2708 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2709 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2710 }
2711 }
2712}
2713
2714struct CandidateSet {
2715 snapshot: Snapshot,
2716 include_ignored: bool,
2717 include_root_name: bool,
2718}
2719
2720impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2721 type Candidates = CandidateSetIter<'a>;
2722
2723 fn id(&self) -> usize {
2724 self.snapshot.id().to_usize()
2725 }
2726
2727 fn len(&self) -> usize {
2728 if self.include_ignored {
2729 self.snapshot.file_count()
2730 } else {
2731 self.snapshot.visible_file_count()
2732 }
2733 }
2734
2735 fn prefix(&self) -> Arc<str> {
2736 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2737 self.snapshot.root_name().into()
2738 } else if self.include_root_name {
2739 format!("{}/", self.snapshot.root_name()).into()
2740 } else {
2741 "".into()
2742 }
2743 }
2744
2745 fn candidates(&'a self, start: usize) -> Self::Candidates {
2746 CandidateSetIter {
2747 traversal: self.snapshot.files(self.include_ignored, start),
2748 }
2749 }
2750}
2751
2752struct CandidateSetIter<'a> {
2753 traversal: Traversal<'a>,
2754}
2755
2756impl<'a> Iterator for CandidateSetIter<'a> {
2757 type Item = PathMatchCandidate<'a>;
2758
2759 fn next(&mut self) -> Option<Self::Item> {
2760 self.traversal.next().map(|entry| {
2761 if let EntryKind::File(char_bag) = entry.kind {
2762 PathMatchCandidate {
2763 path: &entry.path,
2764 char_bag,
2765 }
2766 } else {
2767 unreachable!()
2768 }
2769 })
2770 }
2771}
2772
2773impl Entity for Project {
2774 type Event = Event;
2775
2776 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2777 match &self.client_state {
2778 ProjectClientState::Local { remote_id_rx, .. } => {
2779 if let Some(project_id) = *remote_id_rx.borrow() {
2780 self.client
2781 .send(proto::UnregisterProject { project_id })
2782 .log_err();
2783 }
2784 }
2785 ProjectClientState::Remote { remote_id, .. } => {
2786 self.client
2787 .send(proto::LeaveProject {
2788 project_id: *remote_id,
2789 })
2790 .log_err();
2791 }
2792 }
2793 }
2794
2795 fn app_will_quit(
2796 &mut self,
2797 _: &mut MutableAppContext,
2798 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2799 use futures::FutureExt;
2800
2801 let shutdown_futures = self
2802 .language_servers
2803 .drain()
2804 .filter_map(|(_, server)| server.shutdown())
2805 .collect::<Vec<_>>();
2806 Some(
2807 async move {
2808 futures::future::join_all(shutdown_futures).await;
2809 }
2810 .boxed(),
2811 )
2812 }
2813}
2814
2815impl Collaborator {
2816 fn from_proto(
2817 message: proto::Collaborator,
2818 user_store: &ModelHandle<UserStore>,
2819 cx: &mut AsyncAppContext,
2820 ) -> impl Future<Output = Result<Self>> {
2821 let user = user_store.update(cx, |user_store, cx| {
2822 user_store.fetch_user(message.user_id, cx)
2823 });
2824
2825 async move {
2826 Ok(Self {
2827 peer_id: PeerId(message.peer_id),
2828 user: user.await?,
2829 replica_id: message.replica_id as ReplicaId,
2830 })
2831 }
2832 }
2833}
2834
2835impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2836 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2837 Self {
2838 worktree_id,
2839 path: path.as_ref().into(),
2840 }
2841 }
2842}
2843
2844impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2845 fn from(options: lsp::CreateFileOptions) -> Self {
2846 Self {
2847 overwrite: options.overwrite.unwrap_or(false),
2848 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2849 }
2850 }
2851}
2852
2853impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2854 fn from(options: lsp::RenameFileOptions) -> Self {
2855 Self {
2856 overwrite: options.overwrite.unwrap_or(false),
2857 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2858 }
2859 }
2860}
2861
2862impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2863 fn from(options: lsp::DeleteFileOptions) -> Self {
2864 Self {
2865 recursive: options.recursive.unwrap_or(false),
2866 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2867 }
2868 }
2869}
2870
2871#[cfg(test)]
2872mod tests {
2873 use super::{Event, *};
2874 use client::test::FakeHttpClient;
2875 use fs::RealFs;
2876 use futures::StreamExt;
2877 use gpui::test::subscribe;
2878 use language::{
2879 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2880 LanguageServerConfig, Point,
2881 };
2882 use lsp::Url;
2883 use serde_json::json;
2884 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2885 use unindent::Unindent as _;
2886 use util::test::temp_tree;
2887 use worktree::WorktreeHandle as _;
2888
2889 #[gpui::test]
2890 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2891 let dir = temp_tree(json!({
2892 "root": {
2893 "apple": "",
2894 "banana": {
2895 "carrot": {
2896 "date": "",
2897 "endive": "",
2898 }
2899 },
2900 "fennel": {
2901 "grape": "",
2902 }
2903 }
2904 }));
2905
2906 let root_link_path = dir.path().join("root_link");
2907 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2908 unix::fs::symlink(
2909 &dir.path().join("root/fennel"),
2910 &dir.path().join("root/finnochio"),
2911 )
2912 .unwrap();
2913
2914 let project = Project::test(Arc::new(RealFs), &mut cx);
2915
2916 let (tree, _) = project
2917 .update(&mut cx, |project, cx| {
2918 project.find_or_create_local_worktree(&root_link_path, false, cx)
2919 })
2920 .await
2921 .unwrap();
2922
2923 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2924 .await;
2925 cx.read(|cx| {
2926 let tree = tree.read(cx);
2927 assert_eq!(tree.file_count(), 5);
2928 assert_eq!(
2929 tree.inode_for_path("fennel/grape"),
2930 tree.inode_for_path("finnochio/grape")
2931 );
2932 });
2933
2934 let cancel_flag = Default::default();
2935 let results = project
2936 .read_with(&cx, |project, cx| {
2937 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2938 })
2939 .await;
2940 assert_eq!(
2941 results
2942 .into_iter()
2943 .map(|result| result.path)
2944 .collect::<Vec<Arc<Path>>>(),
2945 vec![
2946 PathBuf::from("banana/carrot/date").into(),
2947 PathBuf::from("banana/carrot/endive").into(),
2948 ]
2949 );
2950 }
2951
2952 #[gpui::test]
2953 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
2954 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
2955 let progress_token = language_server_config
2956 .disk_based_diagnostics_progress_token
2957 .clone()
2958 .unwrap();
2959
2960 let mut languages = LanguageRegistry::new();
2961 languages.add(Arc::new(Language::new(
2962 LanguageConfig {
2963 name: "Rust".to_string(),
2964 path_suffixes: vec!["rs".to_string()],
2965 language_server: Some(language_server_config),
2966 ..Default::default()
2967 },
2968 Some(tree_sitter_rust::language()),
2969 )));
2970
2971 let dir = temp_tree(json!({
2972 "a.rs": "fn a() { A }",
2973 "b.rs": "const y: i32 = 1",
2974 }));
2975
2976 let http_client = FakeHttpClient::with_404_response();
2977 let client = Client::new(http_client.clone());
2978 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2979
2980 let project = cx.update(|cx| {
2981 Project::local(
2982 client,
2983 user_store,
2984 Arc::new(languages),
2985 Arc::new(RealFs),
2986 cx,
2987 )
2988 });
2989
2990 let (tree, _) = project
2991 .update(&mut cx, |project, cx| {
2992 project.find_or_create_local_worktree(dir.path(), false, cx)
2993 })
2994 .await
2995 .unwrap();
2996 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
2997
2998 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2999 .await;
3000
3001 // Cause worktree to start the fake language server
3002 let _buffer = project
3003 .update(&mut cx, |project, cx| {
3004 project.open_buffer(
3005 ProjectPath {
3006 worktree_id,
3007 path: Path::new("b.rs").into(),
3008 },
3009 cx,
3010 )
3011 })
3012 .await
3013 .unwrap();
3014
3015 let mut events = subscribe(&project, &mut cx);
3016
3017 fake_server.start_progress(&progress_token).await;
3018 assert_eq!(
3019 events.next().await.unwrap(),
3020 Event::DiskBasedDiagnosticsStarted
3021 );
3022
3023 fake_server.start_progress(&progress_token).await;
3024 fake_server.end_progress(&progress_token).await;
3025 fake_server.start_progress(&progress_token).await;
3026
3027 fake_server
3028 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3029 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3030 version: None,
3031 diagnostics: vec![lsp::Diagnostic {
3032 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3033 severity: Some(lsp::DiagnosticSeverity::ERROR),
3034 message: "undefined variable 'A'".to_string(),
3035 ..Default::default()
3036 }],
3037 })
3038 .await;
3039 assert_eq!(
3040 events.next().await.unwrap(),
3041 Event::DiagnosticsUpdated(ProjectPath {
3042 worktree_id,
3043 path: Arc::from(Path::new("a.rs"))
3044 })
3045 );
3046
3047 fake_server.end_progress(&progress_token).await;
3048 fake_server.end_progress(&progress_token).await;
3049 assert_eq!(
3050 events.next().await.unwrap(),
3051 Event::DiskBasedDiagnosticsUpdated
3052 );
3053 assert_eq!(
3054 events.next().await.unwrap(),
3055 Event::DiskBasedDiagnosticsFinished
3056 );
3057
3058 let buffer = project
3059 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3060 .await
3061 .unwrap();
3062
3063 buffer.read_with(&cx, |buffer, _| {
3064 let snapshot = buffer.snapshot();
3065 let diagnostics = snapshot
3066 .diagnostics_in_range::<_, Point>(0..buffer.len())
3067 .collect::<Vec<_>>();
3068 assert_eq!(
3069 diagnostics,
3070 &[DiagnosticEntry {
3071 range: Point::new(0, 9)..Point::new(0, 10),
3072 diagnostic: Diagnostic {
3073 severity: lsp::DiagnosticSeverity::ERROR,
3074 message: "undefined variable 'A'".to_string(),
3075 group_id: 0,
3076 is_primary: true,
3077 ..Default::default()
3078 }
3079 }]
3080 )
3081 });
3082 }
3083
3084 #[gpui::test]
3085 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3086 let dir = temp_tree(json!({
3087 "root": {
3088 "dir1": {},
3089 "dir2": {
3090 "dir3": {}
3091 }
3092 }
3093 }));
3094
3095 let project = Project::test(Arc::new(RealFs), &mut cx);
3096 let (tree, _) = project
3097 .update(&mut cx, |project, cx| {
3098 project.find_or_create_local_worktree(&dir.path(), false, cx)
3099 })
3100 .await
3101 .unwrap();
3102
3103 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3104 .await;
3105
3106 let cancel_flag = Default::default();
3107 let results = project
3108 .read_with(&cx, |project, cx| {
3109 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3110 })
3111 .await;
3112
3113 assert!(results.is_empty());
3114 }
3115
3116 #[gpui::test]
3117 async fn test_definition(mut cx: gpui::TestAppContext) {
3118 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3119
3120 let mut languages = LanguageRegistry::new();
3121 languages.add(Arc::new(Language::new(
3122 LanguageConfig {
3123 name: "Rust".to_string(),
3124 path_suffixes: vec!["rs".to_string()],
3125 language_server: Some(language_server_config),
3126 ..Default::default()
3127 },
3128 Some(tree_sitter_rust::language()),
3129 )));
3130
3131 let dir = temp_tree(json!({
3132 "a.rs": "const fn a() { A }",
3133 "b.rs": "const y: i32 = crate::a()",
3134 }));
3135 let dir_path = dir.path().to_path_buf();
3136
3137 let http_client = FakeHttpClient::with_404_response();
3138 let client = Client::new(http_client.clone());
3139 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3140 let project = cx.update(|cx| {
3141 Project::local(
3142 client,
3143 user_store,
3144 Arc::new(languages),
3145 Arc::new(RealFs),
3146 cx,
3147 )
3148 });
3149
3150 let (tree, _) = project
3151 .update(&mut cx, |project, cx| {
3152 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3153 })
3154 .await
3155 .unwrap();
3156 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3157 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3158 .await;
3159
3160 let buffer = project
3161 .update(&mut cx, |project, cx| {
3162 project.open_buffer(
3163 ProjectPath {
3164 worktree_id,
3165 path: Path::new("").into(),
3166 },
3167 cx,
3168 )
3169 })
3170 .await
3171 .unwrap();
3172
3173 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3174 let params = params.text_document_position_params;
3175 assert_eq!(
3176 params.text_document.uri.to_file_path().unwrap(),
3177 dir_path.join("b.rs")
3178 );
3179 assert_eq!(params.position, lsp::Position::new(0, 22));
3180
3181 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3182 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3183 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3184 )))
3185 });
3186
3187 let mut definitions = project
3188 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3189 .await
3190 .unwrap();
3191
3192 assert_eq!(definitions.len(), 1);
3193 let definition = definitions.pop().unwrap();
3194 cx.update(|cx| {
3195 let target_buffer = definition.target_buffer.read(cx);
3196 assert_eq!(
3197 target_buffer
3198 .file()
3199 .unwrap()
3200 .as_local()
3201 .unwrap()
3202 .abs_path(cx),
3203 dir.path().join("a.rs")
3204 );
3205 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3206 assert_eq!(
3207 list_worktrees(&project, cx),
3208 [
3209 (dir.path().join("b.rs"), false),
3210 (dir.path().join("a.rs"), true)
3211 ]
3212 );
3213
3214 drop(definition);
3215 });
3216 cx.read(|cx| {
3217 assert_eq!(
3218 list_worktrees(&project, cx),
3219 [(dir.path().join("b.rs"), false)]
3220 );
3221 });
3222
3223 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3224 project
3225 .read(cx)
3226 .worktrees(cx)
3227 .map(|worktree| {
3228 let worktree = worktree.read(cx);
3229 (
3230 worktree.as_local().unwrap().abs_path().to_path_buf(),
3231 worktree.is_weak(),
3232 )
3233 })
3234 .collect::<Vec<_>>()
3235 }
3236 }
3237
3238 #[gpui::test]
3239 async fn test_save_file(mut cx: gpui::TestAppContext) {
3240 let fs = Arc::new(FakeFs::new(cx.background()));
3241 fs.insert_tree(
3242 "/dir",
3243 json!({
3244 "file1": "the old contents",
3245 }),
3246 )
3247 .await;
3248
3249 let project = Project::test(fs.clone(), &mut cx);
3250 let worktree_id = project
3251 .update(&mut cx, |p, cx| {
3252 p.find_or_create_local_worktree("/dir", false, cx)
3253 })
3254 .await
3255 .unwrap()
3256 .0
3257 .read_with(&cx, |tree, _| tree.id());
3258
3259 let buffer = project
3260 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3261 .await
3262 .unwrap();
3263 buffer
3264 .update(&mut cx, |buffer, cx| {
3265 assert_eq!(buffer.text(), "the old contents");
3266 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3267 buffer.save(cx)
3268 })
3269 .await
3270 .unwrap();
3271
3272 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3273 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3274 }
3275
3276 #[gpui::test]
3277 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3278 let fs = Arc::new(FakeFs::new(cx.background()));
3279 fs.insert_tree(
3280 "/dir",
3281 json!({
3282 "file1": "the old contents",
3283 }),
3284 )
3285 .await;
3286
3287 let project = Project::test(fs.clone(), &mut cx);
3288 let worktree_id = project
3289 .update(&mut cx, |p, cx| {
3290 p.find_or_create_local_worktree("/dir/file1", false, cx)
3291 })
3292 .await
3293 .unwrap()
3294 .0
3295 .read_with(&cx, |tree, _| tree.id());
3296
3297 let buffer = project
3298 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3299 .await
3300 .unwrap();
3301 buffer
3302 .update(&mut cx, |buffer, cx| {
3303 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3304 buffer.save(cx)
3305 })
3306 .await
3307 .unwrap();
3308
3309 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3310 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3311 }
3312
3313 #[gpui::test(retries = 5)]
3314 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3315 let dir = temp_tree(json!({
3316 "a": {
3317 "file1": "",
3318 "file2": "",
3319 "file3": "",
3320 },
3321 "b": {
3322 "c": {
3323 "file4": "",
3324 "file5": "",
3325 }
3326 }
3327 }));
3328
3329 let project = Project::test(Arc::new(RealFs), &mut cx);
3330 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3331
3332 let (tree, _) = project
3333 .update(&mut cx, |p, cx| {
3334 p.find_or_create_local_worktree(dir.path(), false, cx)
3335 })
3336 .await
3337 .unwrap();
3338 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3339
3340 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3341 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3342 async move { buffer.await.unwrap() }
3343 };
3344 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3345 tree.read_with(cx, |tree, _| {
3346 tree.entry_for_path(path)
3347 .expect(&format!("no entry for path {}", path))
3348 .id
3349 })
3350 };
3351
3352 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3353 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3354 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3355 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3356
3357 let file2_id = id_for_path("a/file2", &cx);
3358 let file3_id = id_for_path("a/file3", &cx);
3359 let file4_id = id_for_path("b/c/file4", &cx);
3360
3361 // Wait for the initial scan.
3362 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3363 .await;
3364
3365 // Create a remote copy of this worktree.
3366 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot());
3367 let (remote, load_task) = cx.update(|cx| {
3368 Worktree::remote(
3369 1,
3370 1,
3371 initial_snapshot.to_proto(&Default::default(), Default::default()),
3372 rpc.clone(),
3373 cx,
3374 )
3375 });
3376 load_task.await;
3377
3378 cx.read(|cx| {
3379 assert!(!buffer2.read(cx).is_dirty());
3380 assert!(!buffer3.read(cx).is_dirty());
3381 assert!(!buffer4.read(cx).is_dirty());
3382 assert!(!buffer5.read(cx).is_dirty());
3383 });
3384
3385 // Rename and delete files and directories.
3386 tree.flush_fs_events(&cx).await;
3387 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3388 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3389 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3390 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3391 tree.flush_fs_events(&cx).await;
3392
3393 let expected_paths = vec![
3394 "a",
3395 "a/file1",
3396 "a/file2.new",
3397 "b",
3398 "d",
3399 "d/file3",
3400 "d/file4",
3401 ];
3402
3403 cx.read(|app| {
3404 assert_eq!(
3405 tree.read(app)
3406 .paths()
3407 .map(|p| p.to_str().unwrap())
3408 .collect::<Vec<_>>(),
3409 expected_paths
3410 );
3411
3412 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3413 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3414 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3415
3416 assert_eq!(
3417 buffer2.read(app).file().unwrap().path().as_ref(),
3418 Path::new("a/file2.new")
3419 );
3420 assert_eq!(
3421 buffer3.read(app).file().unwrap().path().as_ref(),
3422 Path::new("d/file3")
3423 );
3424 assert_eq!(
3425 buffer4.read(app).file().unwrap().path().as_ref(),
3426 Path::new("d/file4")
3427 );
3428 assert_eq!(
3429 buffer5.read(app).file().unwrap().path().as_ref(),
3430 Path::new("b/c/file5")
3431 );
3432
3433 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3434 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3435 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3436 assert!(buffer5.read(app).file().unwrap().is_deleted());
3437 });
3438
3439 // Update the remote worktree. Check that it becomes consistent with the
3440 // local worktree.
3441 remote.update(&mut cx, |remote, cx| {
3442 let update_message =
3443 tree.read(cx)
3444 .snapshot()
3445 .build_update(&initial_snapshot, 1, 1, true);
3446 remote
3447 .as_remote_mut()
3448 .unwrap()
3449 .snapshot
3450 .apply_remote_update(update_message)
3451 .unwrap();
3452
3453 assert_eq!(
3454 remote
3455 .paths()
3456 .map(|p| p.to_str().unwrap())
3457 .collect::<Vec<_>>(),
3458 expected_paths
3459 );
3460 });
3461 }
3462
3463 #[gpui::test]
3464 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3465 let fs = Arc::new(FakeFs::new(cx.background()));
3466 fs.insert_tree(
3467 "/the-dir",
3468 json!({
3469 "a.txt": "a-contents",
3470 "b.txt": "b-contents",
3471 }),
3472 )
3473 .await;
3474
3475 let project = Project::test(fs.clone(), &mut cx);
3476 let worktree_id = project
3477 .update(&mut cx, |p, cx| {
3478 p.find_or_create_local_worktree("/the-dir", false, cx)
3479 })
3480 .await
3481 .unwrap()
3482 .0
3483 .read_with(&cx, |tree, _| tree.id());
3484
3485 // Spawn multiple tasks to open paths, repeating some paths.
3486 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3487 (
3488 p.open_buffer((worktree_id, "a.txt"), cx),
3489 p.open_buffer((worktree_id, "b.txt"), cx),
3490 p.open_buffer((worktree_id, "a.txt"), cx),
3491 )
3492 });
3493
3494 let buffer_a_1 = buffer_a_1.await.unwrap();
3495 let buffer_a_2 = buffer_a_2.await.unwrap();
3496 let buffer_b = buffer_b.await.unwrap();
3497 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3498 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3499
3500 // There is only one buffer per path.
3501 let buffer_a_id = buffer_a_1.id();
3502 assert_eq!(buffer_a_2.id(), buffer_a_id);
3503
3504 // Open the same path again while it is still open.
3505 drop(buffer_a_1);
3506 let buffer_a_3 = project
3507 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3508 .await
3509 .unwrap();
3510
3511 // There's still only one buffer per path.
3512 assert_eq!(buffer_a_3.id(), buffer_a_id);
3513 }
3514
3515 #[gpui::test]
3516 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3517 use std::fs;
3518
3519 let dir = temp_tree(json!({
3520 "file1": "abc",
3521 "file2": "def",
3522 "file3": "ghi",
3523 }));
3524
3525 let project = Project::test(Arc::new(RealFs), &mut cx);
3526 let (worktree, _) = project
3527 .update(&mut cx, |p, cx| {
3528 p.find_or_create_local_worktree(dir.path(), false, cx)
3529 })
3530 .await
3531 .unwrap();
3532 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3533
3534 worktree.flush_fs_events(&cx).await;
3535 worktree
3536 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3537 .await;
3538
3539 let buffer1 = project
3540 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3541 .await
3542 .unwrap();
3543 let events = Rc::new(RefCell::new(Vec::new()));
3544
3545 // initially, the buffer isn't dirty.
3546 buffer1.update(&mut cx, |buffer, cx| {
3547 cx.subscribe(&buffer1, {
3548 let events = events.clone();
3549 move |_, _, event, _| events.borrow_mut().push(event.clone())
3550 })
3551 .detach();
3552
3553 assert!(!buffer.is_dirty());
3554 assert!(events.borrow().is_empty());
3555
3556 buffer.edit(vec![1..2], "", cx);
3557 });
3558
3559 // after the first edit, the buffer is dirty, and emits a dirtied event.
3560 buffer1.update(&mut cx, |buffer, cx| {
3561 assert!(buffer.text() == "ac");
3562 assert!(buffer.is_dirty());
3563 assert_eq!(
3564 *events.borrow(),
3565 &[language::Event::Edited, language::Event::Dirtied]
3566 );
3567 events.borrow_mut().clear();
3568 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3569 });
3570
3571 // after saving, the buffer is not dirty, and emits a saved event.
3572 buffer1.update(&mut cx, |buffer, cx| {
3573 assert!(!buffer.is_dirty());
3574 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3575 events.borrow_mut().clear();
3576
3577 buffer.edit(vec![1..1], "B", cx);
3578 buffer.edit(vec![2..2], "D", cx);
3579 });
3580
3581 // after editing again, the buffer is dirty, and emits another dirty event.
3582 buffer1.update(&mut cx, |buffer, cx| {
3583 assert!(buffer.text() == "aBDc");
3584 assert!(buffer.is_dirty());
3585 assert_eq!(
3586 *events.borrow(),
3587 &[
3588 language::Event::Edited,
3589 language::Event::Dirtied,
3590 language::Event::Edited,
3591 ],
3592 );
3593 events.borrow_mut().clear();
3594
3595 // TODO - currently, after restoring the buffer to its
3596 // previously-saved state, the is still considered dirty.
3597 buffer.edit([1..3], "", cx);
3598 assert!(buffer.text() == "ac");
3599 assert!(buffer.is_dirty());
3600 });
3601
3602 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3603
3604 // When a file is deleted, the buffer is considered dirty.
3605 let events = Rc::new(RefCell::new(Vec::new()));
3606 let buffer2 = project
3607 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3608 .await
3609 .unwrap();
3610 buffer2.update(&mut cx, |_, cx| {
3611 cx.subscribe(&buffer2, {
3612 let events = events.clone();
3613 move |_, _, event, _| events.borrow_mut().push(event.clone())
3614 })
3615 .detach();
3616 });
3617
3618 fs::remove_file(dir.path().join("file2")).unwrap();
3619 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3620 assert_eq!(
3621 *events.borrow(),
3622 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3623 );
3624
3625 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3626 let events = Rc::new(RefCell::new(Vec::new()));
3627 let buffer3 = project
3628 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3629 .await
3630 .unwrap();
3631 buffer3.update(&mut cx, |_, cx| {
3632 cx.subscribe(&buffer3, {
3633 let events = events.clone();
3634 move |_, _, event, _| events.borrow_mut().push(event.clone())
3635 })
3636 .detach();
3637 });
3638
3639 worktree.flush_fs_events(&cx).await;
3640 buffer3.update(&mut cx, |buffer, cx| {
3641 buffer.edit(Some(0..0), "x", cx);
3642 });
3643 events.borrow_mut().clear();
3644 fs::remove_file(dir.path().join("file3")).unwrap();
3645 buffer3
3646 .condition(&cx, |_, _| !events.borrow().is_empty())
3647 .await;
3648 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3649 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3650 }
3651
3652 #[gpui::test]
3653 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3654 use std::fs;
3655
3656 let initial_contents = "aaa\nbbbbb\nc\n";
3657 let dir = temp_tree(json!({ "the-file": initial_contents }));
3658
3659 let project = Project::test(Arc::new(RealFs), &mut cx);
3660 let (worktree, _) = project
3661 .update(&mut cx, |p, cx| {
3662 p.find_or_create_local_worktree(dir.path(), false, cx)
3663 })
3664 .await
3665 .unwrap();
3666 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3667
3668 worktree
3669 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3670 .await;
3671
3672 let abs_path = dir.path().join("the-file");
3673 let buffer = project
3674 .update(&mut cx, |p, cx| {
3675 p.open_buffer((worktree_id, "the-file"), cx)
3676 })
3677 .await
3678 .unwrap();
3679
3680 // TODO
3681 // Add a cursor on each row.
3682 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3683 // assert!(!buffer.is_dirty());
3684 // buffer.add_selection_set(
3685 // &(0..3)
3686 // .map(|row| Selection {
3687 // id: row as usize,
3688 // start: Point::new(row, 1),
3689 // end: Point::new(row, 1),
3690 // reversed: false,
3691 // goal: SelectionGoal::None,
3692 // })
3693 // .collect::<Vec<_>>(),
3694 // cx,
3695 // )
3696 // });
3697
3698 // Change the file on disk, adding two new lines of text, and removing
3699 // one line.
3700 buffer.read_with(&cx, |buffer, _| {
3701 assert!(!buffer.is_dirty());
3702 assert!(!buffer.has_conflict());
3703 });
3704 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3705 fs::write(&abs_path, new_contents).unwrap();
3706
3707 // Because the buffer was not modified, it is reloaded from disk. Its
3708 // contents are edited according to the diff between the old and new
3709 // file contents.
3710 buffer
3711 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3712 .await;
3713
3714 buffer.update(&mut cx, |buffer, _| {
3715 assert_eq!(buffer.text(), new_contents);
3716 assert!(!buffer.is_dirty());
3717 assert!(!buffer.has_conflict());
3718
3719 // TODO
3720 // let cursor_positions = buffer
3721 // .selection_set(selection_set_id)
3722 // .unwrap()
3723 // .selections::<Point>(&*buffer)
3724 // .map(|selection| {
3725 // assert_eq!(selection.start, selection.end);
3726 // selection.start
3727 // })
3728 // .collect::<Vec<_>>();
3729 // assert_eq!(
3730 // cursor_positions,
3731 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3732 // );
3733 });
3734
3735 // Modify the buffer
3736 buffer.update(&mut cx, |buffer, cx| {
3737 buffer.edit(vec![0..0], " ", cx);
3738 assert!(buffer.is_dirty());
3739 assert!(!buffer.has_conflict());
3740 });
3741
3742 // Change the file on disk again, adding blank lines to the beginning.
3743 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3744
3745 // Because the buffer is modified, it doesn't reload from disk, but is
3746 // marked as having a conflict.
3747 buffer
3748 .condition(&cx, |buffer, _| buffer.has_conflict())
3749 .await;
3750 }
3751
3752 #[gpui::test]
3753 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3754 let fs = Arc::new(FakeFs::new(cx.background()));
3755 fs.insert_tree(
3756 "/the-dir",
3757 json!({
3758 "a.rs": "
3759 fn foo(mut v: Vec<usize>) {
3760 for x in &v {
3761 v.push(1);
3762 }
3763 }
3764 "
3765 .unindent(),
3766 }),
3767 )
3768 .await;
3769
3770 let project = Project::test(fs.clone(), &mut cx);
3771 let (worktree, _) = project
3772 .update(&mut cx, |p, cx| {
3773 p.find_or_create_local_worktree("/the-dir", false, cx)
3774 })
3775 .await
3776 .unwrap();
3777 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3778
3779 let buffer = project
3780 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3781 .await
3782 .unwrap();
3783
3784 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3785 let message = lsp::PublishDiagnosticsParams {
3786 uri: buffer_uri.clone(),
3787 diagnostics: vec![
3788 lsp::Diagnostic {
3789 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3790 severity: Some(DiagnosticSeverity::WARNING),
3791 message: "error 1".to_string(),
3792 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3793 location: lsp::Location {
3794 uri: buffer_uri.clone(),
3795 range: lsp::Range::new(
3796 lsp::Position::new(1, 8),
3797 lsp::Position::new(1, 9),
3798 ),
3799 },
3800 message: "error 1 hint 1".to_string(),
3801 }]),
3802 ..Default::default()
3803 },
3804 lsp::Diagnostic {
3805 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3806 severity: Some(DiagnosticSeverity::HINT),
3807 message: "error 1 hint 1".to_string(),
3808 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3809 location: lsp::Location {
3810 uri: buffer_uri.clone(),
3811 range: lsp::Range::new(
3812 lsp::Position::new(1, 8),
3813 lsp::Position::new(1, 9),
3814 ),
3815 },
3816 message: "original diagnostic".to_string(),
3817 }]),
3818 ..Default::default()
3819 },
3820 lsp::Diagnostic {
3821 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3822 severity: Some(DiagnosticSeverity::ERROR),
3823 message: "error 2".to_string(),
3824 related_information: Some(vec![
3825 lsp::DiagnosticRelatedInformation {
3826 location: lsp::Location {
3827 uri: buffer_uri.clone(),
3828 range: lsp::Range::new(
3829 lsp::Position::new(1, 13),
3830 lsp::Position::new(1, 15),
3831 ),
3832 },
3833 message: "error 2 hint 1".to_string(),
3834 },
3835 lsp::DiagnosticRelatedInformation {
3836 location: lsp::Location {
3837 uri: buffer_uri.clone(),
3838 range: lsp::Range::new(
3839 lsp::Position::new(1, 13),
3840 lsp::Position::new(1, 15),
3841 ),
3842 },
3843 message: "error 2 hint 2".to_string(),
3844 },
3845 ]),
3846 ..Default::default()
3847 },
3848 lsp::Diagnostic {
3849 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3850 severity: Some(DiagnosticSeverity::HINT),
3851 message: "error 2 hint 1".to_string(),
3852 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3853 location: lsp::Location {
3854 uri: buffer_uri.clone(),
3855 range: lsp::Range::new(
3856 lsp::Position::new(2, 8),
3857 lsp::Position::new(2, 17),
3858 ),
3859 },
3860 message: "original diagnostic".to_string(),
3861 }]),
3862 ..Default::default()
3863 },
3864 lsp::Diagnostic {
3865 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3866 severity: Some(DiagnosticSeverity::HINT),
3867 message: "error 2 hint 2".to_string(),
3868 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3869 location: lsp::Location {
3870 uri: buffer_uri.clone(),
3871 range: lsp::Range::new(
3872 lsp::Position::new(2, 8),
3873 lsp::Position::new(2, 17),
3874 ),
3875 },
3876 message: "original diagnostic".to_string(),
3877 }]),
3878 ..Default::default()
3879 },
3880 ],
3881 version: None,
3882 };
3883
3884 project
3885 .update(&mut cx, |p, cx| {
3886 p.update_diagnostics(message, &Default::default(), cx)
3887 })
3888 .unwrap();
3889 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3890
3891 assert_eq!(
3892 buffer
3893 .diagnostics_in_range::<_, Point>(0..buffer.len())
3894 .collect::<Vec<_>>(),
3895 &[
3896 DiagnosticEntry {
3897 range: Point::new(1, 8)..Point::new(1, 9),
3898 diagnostic: Diagnostic {
3899 severity: DiagnosticSeverity::WARNING,
3900 message: "error 1".to_string(),
3901 group_id: 0,
3902 is_primary: true,
3903 ..Default::default()
3904 }
3905 },
3906 DiagnosticEntry {
3907 range: Point::new(1, 8)..Point::new(1, 9),
3908 diagnostic: Diagnostic {
3909 severity: DiagnosticSeverity::HINT,
3910 message: "error 1 hint 1".to_string(),
3911 group_id: 0,
3912 is_primary: false,
3913 ..Default::default()
3914 }
3915 },
3916 DiagnosticEntry {
3917 range: Point::new(1, 13)..Point::new(1, 15),
3918 diagnostic: Diagnostic {
3919 severity: DiagnosticSeverity::HINT,
3920 message: "error 2 hint 1".to_string(),
3921 group_id: 1,
3922 is_primary: false,
3923 ..Default::default()
3924 }
3925 },
3926 DiagnosticEntry {
3927 range: Point::new(1, 13)..Point::new(1, 15),
3928 diagnostic: Diagnostic {
3929 severity: DiagnosticSeverity::HINT,
3930 message: "error 2 hint 2".to_string(),
3931 group_id: 1,
3932 is_primary: false,
3933 ..Default::default()
3934 }
3935 },
3936 DiagnosticEntry {
3937 range: Point::new(2, 8)..Point::new(2, 17),
3938 diagnostic: Diagnostic {
3939 severity: DiagnosticSeverity::ERROR,
3940 message: "error 2".to_string(),
3941 group_id: 1,
3942 is_primary: true,
3943 ..Default::default()
3944 }
3945 }
3946 ]
3947 );
3948
3949 assert_eq!(
3950 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3951 &[
3952 DiagnosticEntry {
3953 range: Point::new(1, 8)..Point::new(1, 9),
3954 diagnostic: Diagnostic {
3955 severity: DiagnosticSeverity::WARNING,
3956 message: "error 1".to_string(),
3957 group_id: 0,
3958 is_primary: true,
3959 ..Default::default()
3960 }
3961 },
3962 DiagnosticEntry {
3963 range: Point::new(1, 8)..Point::new(1, 9),
3964 diagnostic: Diagnostic {
3965 severity: DiagnosticSeverity::HINT,
3966 message: "error 1 hint 1".to_string(),
3967 group_id: 0,
3968 is_primary: false,
3969 ..Default::default()
3970 }
3971 },
3972 ]
3973 );
3974 assert_eq!(
3975 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3976 &[
3977 DiagnosticEntry {
3978 range: Point::new(1, 13)..Point::new(1, 15),
3979 diagnostic: Diagnostic {
3980 severity: DiagnosticSeverity::HINT,
3981 message: "error 2 hint 1".to_string(),
3982 group_id: 1,
3983 is_primary: false,
3984 ..Default::default()
3985 }
3986 },
3987 DiagnosticEntry {
3988 range: Point::new(1, 13)..Point::new(1, 15),
3989 diagnostic: Diagnostic {
3990 severity: DiagnosticSeverity::HINT,
3991 message: "error 2 hint 2".to_string(),
3992 group_id: 1,
3993 is_primary: false,
3994 ..Default::default()
3995 }
3996 },
3997 DiagnosticEntry {
3998 range: Point::new(2, 8)..Point::new(2, 17),
3999 diagnostic: Diagnostic {
4000 severity: DiagnosticSeverity::ERROR,
4001 message: "error 2".to_string(),
4002 group_id: 1,
4003 is_primary: true,
4004 ..Default::default()
4005 }
4006 }
4007 ]
4008 );
4009 }
4010}