1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, PointUtf16, ToLspPosition,
20 ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{prelude::Stream, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
50 loading_buffers: HashMap<
51 ProjectPath,
52 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
53 >,
54 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
55}
56
57enum WorktreeHandle {
58 Strong(ModelHandle<Worktree>),
59 Weak(WeakModelHandle<Worktree>),
60}
61
62enum ProjectClientState {
63 Local {
64 is_shared: bool,
65 remote_id_tx: watch::Sender<Option<u64>>,
66 remote_id_rx: watch::Receiver<Option<u64>>,
67 _maintain_remote_id_task: Task<Option<()>>,
68 },
69 Remote {
70 sharing_has_stopped: bool,
71 remote_id: u64,
72 replica_id: ReplicaId,
73 },
74}
75
76#[derive(Clone, Debug)]
77pub struct Collaborator {
78 pub user: Arc<User>,
79 pub peer_id: PeerId,
80 pub replica_id: ReplicaId,
81}
82
83#[derive(Clone, Debug, PartialEq)]
84pub enum Event {
85 ActiveEntryChanged(Option<ProjectEntry>),
86 WorktreeRemoved(WorktreeId),
87 DiskBasedDiagnosticsStarted,
88 DiskBasedDiagnosticsUpdated,
89 DiskBasedDiagnosticsFinished,
90 DiagnosticsUpdated(ProjectPath),
91}
92
93#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
94pub struct ProjectPath {
95 pub worktree_id: WorktreeId,
96 pub path: Arc<Path>,
97}
98
99#[derive(Clone, Debug, Default, PartialEq)]
100pub struct DiagnosticSummary {
101 pub error_count: usize,
102 pub warning_count: usize,
103 pub info_count: usize,
104 pub hint_count: usize,
105}
106
107#[derive(Debug)]
108pub struct Definition {
109 pub target_buffer: ModelHandle<Buffer>,
110 pub target_range: Range<language::Anchor>,
111}
112
113#[derive(Default)]
114pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
115
116impl DiagnosticSummary {
117 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
118 let mut this = Self {
119 error_count: 0,
120 warning_count: 0,
121 info_count: 0,
122 hint_count: 0,
123 };
124
125 for entry in diagnostics {
126 if entry.diagnostic.is_primary {
127 match entry.diagnostic.severity {
128 DiagnosticSeverity::ERROR => this.error_count += 1,
129 DiagnosticSeverity::WARNING => this.warning_count += 1,
130 DiagnosticSeverity::INFORMATION => this.info_count += 1,
131 DiagnosticSeverity::HINT => this.hint_count += 1,
132 _ => {}
133 }
134 }
135 }
136
137 this
138 }
139
140 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
141 proto::DiagnosticSummary {
142 path: path.to_string_lossy().to_string(),
143 error_count: self.error_count as u32,
144 warning_count: self.warning_count as u32,
145 info_count: self.info_count as u32,
146 hint_count: self.hint_count as u32,
147 }
148 }
149}
150
151#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
152pub struct ProjectEntry {
153 pub worktree_id: WorktreeId,
154 pub entry_id: usize,
155}
156
157impl Project {
158 pub fn local(
159 client: Arc<Client>,
160 user_store: ModelHandle<UserStore>,
161 languages: Arc<LanguageRegistry>,
162 fs: Arc<dyn Fs>,
163 cx: &mut MutableAppContext,
164 ) -> ModelHandle<Self> {
165 cx.add_model(|cx: &mut ModelContext<Self>| {
166 let (remote_id_tx, remote_id_rx) = watch::channel();
167 let _maintain_remote_id_task = cx.spawn_weak({
168 let rpc = client.clone();
169 move |this, mut cx| {
170 async move {
171 let mut status = rpc.status();
172 while let Some(status) = status.recv().await {
173 if let Some(this) = this.upgrade(&cx) {
174 let remote_id = if let client::Status::Connected { .. } = status {
175 let response = rpc.request(proto::RegisterProject {}).await?;
176 Some(response.project_id)
177 } else {
178 None
179 };
180
181 if let Some(project_id) = remote_id {
182 let mut registrations = Vec::new();
183 this.update(&mut cx, |this, cx| {
184 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
185 registrations.push(worktree.update(
186 cx,
187 |worktree, cx| {
188 let worktree = worktree.as_local_mut().unwrap();
189 worktree.register(project_id, cx)
190 },
191 ));
192 }
193 });
194 for registration in registrations {
195 registration.await?;
196 }
197 }
198 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
199 }
200 }
201 Ok(())
202 }
203 .log_err()
204 }
205 });
206
207 Self {
208 worktrees: Default::default(),
209 collaborators: Default::default(),
210 open_buffers: Default::default(),
211 loading_buffers: Default::default(),
212 shared_buffers: Default::default(),
213 client_state: ProjectClientState::Local {
214 is_shared: false,
215 remote_id_tx,
216 remote_id_rx,
217 _maintain_remote_id_task,
218 },
219 subscriptions: Vec::new(),
220 active_entry: None,
221 languages,
222 client,
223 user_store,
224 fs,
225 language_servers_with_diagnostics_running: 0,
226 language_servers: Default::default(),
227 }
228 })
229 }
230
231 pub async fn remote(
232 remote_id: u64,
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut AsyncAppContext,
238 ) -> Result<ModelHandle<Self>> {
239 client.authenticate_and_connect(&cx).await?;
240
241 let response = client
242 .request(proto::JoinProject {
243 project_id: remote_id,
244 })
245 .await?;
246
247 let replica_id = response.replica_id as ReplicaId;
248
249 let mut worktrees = Vec::new();
250 for worktree in response.worktrees {
251 let (worktree, load_task) = cx
252 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
253 worktrees.push(worktree);
254 load_task.detach();
255 }
256
257 let user_ids = response
258 .collaborators
259 .iter()
260 .map(|peer| peer.user_id)
261 .collect();
262 user_store
263 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
264 .await?;
265 let mut collaborators = HashMap::default();
266 for message in response.collaborators {
267 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
268 collaborators.insert(collaborator.peer_id, collaborator);
269 }
270
271 Ok(cx.add_model(|cx| {
272 let mut this = Self {
273 worktrees: Vec::new(),
274 open_buffers: Default::default(),
275 loading_buffers: Default::default(),
276 shared_buffers: Default::default(),
277 active_entry: None,
278 collaborators,
279 languages,
280 user_store,
281 fs,
282 subscriptions: vec![
283 client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
284 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
285 client.add_entity_message_handler(
286 remote_id,
287 cx,
288 Self::handle_remove_collaborator,
289 ),
290 client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
291 client.add_entity_message_handler(
292 remote_id,
293 cx,
294 Self::handle_unregister_worktree,
295 ),
296 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
297 client.add_entity_message_handler(
298 remote_id,
299 cx,
300 Self::handle_update_diagnostic_summary,
301 ),
302 client.add_entity_message_handler(
303 remote_id,
304 cx,
305 Self::handle_disk_based_diagnostics_updating,
306 ),
307 client.add_entity_message_handler(
308 remote_id,
309 cx,
310 Self::handle_disk_based_diagnostics_updated,
311 ),
312 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
313 client.add_entity_message_handler(
314 remote_id,
315 cx,
316 Self::handle_update_buffer_file,
317 ),
318 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
319 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
320 ],
321 client,
322 client_state: ProjectClientState::Remote {
323 sharing_has_stopped: false,
324 remote_id,
325 replica_id,
326 },
327 language_servers_with_diagnostics_running: 0,
328 language_servers: Default::default(),
329 };
330 for worktree in worktrees {
331 this.add_worktree(&worktree, cx);
332 }
333 this
334 }))
335 }
336
337 #[cfg(any(test, feature = "test-support"))]
338 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
339 let languages = Arc::new(LanguageRegistry::new());
340 let http_client = client::test::FakeHttpClient::with_404_response();
341 let client = client::Client::new(http_client.clone());
342 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
343 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
344 }
345
346 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
347 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
348 *remote_id_tx.borrow_mut() = remote_id;
349 }
350
351 self.subscriptions.clear();
352 if let Some(remote_id) = remote_id {
353 let client = &self.client;
354 self.subscriptions.extend([
355 client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
356 client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
357 client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
358 client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
359 client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
360 client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
361 client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
362 client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
363 client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
364 client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
365 client.add_entity_request_handler(
366 remote_id,
367 cx,
368 Self::handle_apply_additional_edits_for_completion,
369 ),
370 client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
371 client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
372 client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
373 ]);
374 }
375 }
376
377 pub fn remote_id(&self) -> Option<u64> {
378 match &self.client_state {
379 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
380 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
381 }
382 }
383
384 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
385 let mut id = None;
386 let mut watch = None;
387 match &self.client_state {
388 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
389 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
390 }
391
392 async move {
393 if let Some(id) = id {
394 return id;
395 }
396 let mut watch = watch.unwrap();
397 loop {
398 let id = *watch.borrow();
399 if let Some(id) = id {
400 return id;
401 }
402 watch.recv().await;
403 }
404 }
405 }
406
407 pub fn replica_id(&self) -> ReplicaId {
408 match &self.client_state {
409 ProjectClientState::Local { .. } => 0,
410 ProjectClientState::Remote { replica_id, .. } => *replica_id,
411 }
412 }
413
414 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
415 &self.collaborators
416 }
417
418 pub fn worktrees<'a>(
419 &'a self,
420 cx: &'a AppContext,
421 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
422 self.worktrees
423 .iter()
424 .filter_map(move |worktree| worktree.upgrade(cx))
425 }
426
427 pub fn worktree_for_id(
428 &self,
429 id: WorktreeId,
430 cx: &AppContext,
431 ) -> Option<ModelHandle<Worktree>> {
432 self.worktrees(cx)
433 .find(|worktree| worktree.read(cx).id() == id)
434 }
435
436 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
437 let rpc = self.client.clone();
438 cx.spawn(|this, mut cx| async move {
439 let project_id = this.update(&mut cx, |this, _| {
440 if let ProjectClientState::Local {
441 is_shared,
442 remote_id_rx,
443 ..
444 } = &mut this.client_state
445 {
446 *is_shared = true;
447 remote_id_rx
448 .borrow()
449 .ok_or_else(|| anyhow!("no project id"))
450 } else {
451 Err(anyhow!("can't share a remote project"))
452 }
453 })?;
454
455 rpc.request(proto::ShareProject { project_id }).await?;
456 let mut tasks = Vec::new();
457 this.update(&mut cx, |this, cx| {
458 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
459 worktree.update(cx, |worktree, cx| {
460 let worktree = worktree.as_local_mut().unwrap();
461 tasks.push(worktree.share(project_id, cx));
462 });
463 }
464 });
465 for task in tasks {
466 task.await?;
467 }
468 this.update(&mut cx, |_, cx| cx.notify());
469 Ok(())
470 })
471 }
472
473 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
474 let rpc = self.client.clone();
475 cx.spawn(|this, mut cx| async move {
476 let project_id = this.update(&mut cx, |this, _| {
477 if let ProjectClientState::Local {
478 is_shared,
479 remote_id_rx,
480 ..
481 } = &mut this.client_state
482 {
483 *is_shared = false;
484 remote_id_rx
485 .borrow()
486 .ok_or_else(|| anyhow!("no project id"))
487 } else {
488 Err(anyhow!("can't share a remote project"))
489 }
490 })?;
491
492 rpc.send(proto::UnshareProject { project_id })?;
493 this.update(&mut cx, |this, cx| {
494 this.collaborators.clear();
495 this.shared_buffers.clear();
496 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
497 worktree.update(cx, |worktree, _| {
498 worktree.as_local_mut().unwrap().unshare();
499 });
500 }
501 cx.notify()
502 });
503 Ok(())
504 })
505 }
506
507 pub fn is_read_only(&self) -> bool {
508 match &self.client_state {
509 ProjectClientState::Local { .. } => false,
510 ProjectClientState::Remote {
511 sharing_has_stopped,
512 ..
513 } => *sharing_has_stopped,
514 }
515 }
516
517 pub fn is_local(&self) -> bool {
518 match &self.client_state {
519 ProjectClientState::Local { .. } => true,
520 ProjectClientState::Remote { .. } => false,
521 }
522 }
523
524 pub fn open_buffer(
525 &mut self,
526 path: impl Into<ProjectPath>,
527 cx: &mut ModelContext<Self>,
528 ) -> Task<Result<ModelHandle<Buffer>>> {
529 let project_path = path.into();
530 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
531 worktree
532 } else {
533 return Task::ready(Err(anyhow!("no such worktree")));
534 };
535
536 // If there is already a buffer for the given path, then return it.
537 let existing_buffer = self.get_open_buffer(&project_path, cx);
538 if let Some(existing_buffer) = existing_buffer {
539 return Task::ready(Ok(existing_buffer));
540 }
541
542 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
543 // If the given path is already being loaded, then wait for that existing
544 // task to complete and return the same buffer.
545 hash_map::Entry::Occupied(e) => e.get().clone(),
546
547 // Otherwise, record the fact that this path is now being loaded.
548 hash_map::Entry::Vacant(entry) => {
549 let (mut tx, rx) = postage::watch::channel();
550 entry.insert(rx.clone());
551
552 let load_buffer = if worktree.read(cx).is_local() {
553 self.open_local_buffer(&project_path.path, &worktree, cx)
554 } else {
555 self.open_remote_buffer(&project_path.path, &worktree, cx)
556 };
557
558 cx.spawn(move |this, mut cx| async move {
559 let load_result = load_buffer.await;
560 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
561 // Record the fact that the buffer is no longer loading.
562 this.loading_buffers.remove(&project_path);
563 let buffer = load_result.map_err(Arc::new)?;
564 Ok(buffer)
565 }));
566 })
567 .detach();
568 rx
569 }
570 };
571
572 cx.foreground().spawn(async move {
573 loop {
574 if let Some(result) = loading_watch.borrow().as_ref() {
575 match result {
576 Ok(buffer) => return Ok(buffer.clone()),
577 Err(error) => return Err(anyhow!("{}", error)),
578 }
579 }
580 loading_watch.recv().await;
581 }
582 })
583 }
584
585 fn open_local_buffer(
586 &mut self,
587 path: &Arc<Path>,
588 worktree: &ModelHandle<Worktree>,
589 cx: &mut ModelContext<Self>,
590 ) -> Task<Result<ModelHandle<Buffer>>> {
591 let load_buffer = worktree.update(cx, |worktree, cx| {
592 let worktree = worktree.as_local_mut().unwrap();
593 worktree.load_buffer(path, cx)
594 });
595 let worktree = worktree.downgrade();
596 cx.spawn(|this, mut cx| async move {
597 let buffer = load_buffer.await?;
598 let worktree = worktree
599 .upgrade(&cx)
600 .ok_or_else(|| anyhow!("worktree was removed"))?;
601 this.update(&mut cx, |this, cx| {
602 this.register_buffer(&buffer, Some(&worktree), cx)
603 })?;
604 Ok(buffer)
605 })
606 }
607
608 fn open_remote_buffer(
609 &mut self,
610 path: &Arc<Path>,
611 worktree: &ModelHandle<Worktree>,
612 cx: &mut ModelContext<Self>,
613 ) -> Task<Result<ModelHandle<Buffer>>> {
614 let rpc = self.client.clone();
615 let project_id = self.remote_id().unwrap();
616 let remote_worktree_id = worktree.read(cx).id();
617 let path = path.clone();
618 let path_string = path.to_string_lossy().to_string();
619 cx.spawn(|this, mut cx| async move {
620 let response = rpc
621 .request(proto::OpenBuffer {
622 project_id,
623 worktree_id: remote_worktree_id.to_proto(),
624 path: path_string,
625 })
626 .await?;
627 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
628 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
629 })
630 }
631
632 fn open_local_buffer_from_lsp_path(
633 &mut self,
634 abs_path: lsp::Url,
635 lang_name: String,
636 lang_server: Arc<LanguageServer>,
637 cx: &mut ModelContext<Self>,
638 ) -> Task<Result<ModelHandle<Buffer>>> {
639 cx.spawn(|this, mut cx| async move {
640 let abs_path = abs_path
641 .to_file_path()
642 .map_err(|_| anyhow!("can't convert URI to path"))?;
643 let (worktree, relative_path) = if let Some(result) =
644 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
645 {
646 result
647 } else {
648 let worktree = this
649 .update(&mut cx, |this, cx| {
650 this.create_local_worktree(&abs_path, true, cx)
651 })
652 .await?;
653 this.update(&mut cx, |this, cx| {
654 this.language_servers
655 .insert((worktree.read(cx).id(), lang_name), lang_server);
656 });
657 (worktree, PathBuf::new())
658 };
659
660 let project_path = ProjectPath {
661 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
662 path: relative_path.into(),
663 };
664 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
665 .await
666 })
667 }
668
669 pub fn save_buffer_as(
670 &self,
671 buffer: ModelHandle<Buffer>,
672 abs_path: PathBuf,
673 cx: &mut ModelContext<Project>,
674 ) -> Task<Result<()>> {
675 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
676 cx.spawn(|this, mut cx| async move {
677 let (worktree, path) = worktree_task.await?;
678 worktree
679 .update(&mut cx, |worktree, cx| {
680 worktree
681 .as_local_mut()
682 .unwrap()
683 .save_buffer_as(buffer.clone(), path, cx)
684 })
685 .await?;
686 this.update(&mut cx, |this, cx| {
687 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
688 });
689 Ok(())
690 })
691 }
692
693 #[cfg(any(test, feature = "test-support"))]
694 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
695 let path = path.into();
696 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
697 self.open_buffers.iter().any(|(_, buffer)| {
698 if let Some(buffer) = buffer.upgrade(cx) {
699 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
700 if file.worktree == worktree && file.path() == &path.path {
701 return true;
702 }
703 }
704 }
705 false
706 })
707 } else {
708 false
709 }
710 }
711
712 fn get_open_buffer(
713 &mut self,
714 path: &ProjectPath,
715 cx: &mut ModelContext<Self>,
716 ) -> Option<ModelHandle<Buffer>> {
717 let mut result = None;
718 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
719 self.open_buffers.retain(|_, buffer| {
720 if let Some(buffer) = buffer.upgrade(cx) {
721 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
722 if file.worktree == worktree && file.path() == &path.path {
723 result = Some(buffer);
724 }
725 }
726 true
727 } else {
728 false
729 }
730 });
731 result
732 }
733
734 fn register_buffer(
735 &mut self,
736 buffer: &ModelHandle<Buffer>,
737 worktree: Option<&ModelHandle<Worktree>>,
738 cx: &mut ModelContext<Self>,
739 ) -> Result<()> {
740 if self
741 .open_buffers
742 .insert(buffer.read(cx).remote_id() as usize, buffer.downgrade())
743 .is_some()
744 {
745 return Err(anyhow!("registered the same buffer twice"));
746 }
747 self.assign_language_to_buffer(&buffer, worktree, cx);
748 Ok(())
749 }
750
751 fn assign_language_to_buffer(
752 &mut self,
753 buffer: &ModelHandle<Buffer>,
754 worktree: Option<&ModelHandle<Worktree>>,
755 cx: &mut ModelContext<Self>,
756 ) -> Option<()> {
757 let (path, full_path) = {
758 let file = buffer.read(cx).file()?;
759 (file.path().clone(), file.full_path(cx))
760 };
761
762 // If the buffer has a language, set it and start/assign the language server
763 if let Some(language) = self.languages.select_language(&full_path) {
764 buffer.update(cx, |buffer, cx| {
765 buffer.set_language(Some(language.clone()), cx);
766 });
767
768 // For local worktrees, start a language server if needed.
769 // Also assign the language server and any previously stored diagnostics to the buffer.
770 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
771 let worktree_id = local_worktree.id();
772 let worktree_abs_path = local_worktree.abs_path().clone();
773
774 let language_server = match self
775 .language_servers
776 .entry((worktree_id, language.name().to_string()))
777 {
778 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
779 hash_map::Entry::Vacant(e) => Self::start_language_server(
780 self.client.clone(),
781 language.clone(),
782 &worktree_abs_path,
783 cx,
784 )
785 .map(|server| e.insert(server).clone()),
786 };
787
788 buffer.update(cx, |buffer, cx| {
789 buffer.set_language_server(language_server, cx);
790 });
791 }
792 }
793
794 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
795 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
796 buffer.update(cx, |buffer, cx| {
797 buffer.update_diagnostics(diagnostics, None, cx).log_err();
798 });
799 }
800 }
801
802 None
803 }
804
805 fn start_language_server(
806 rpc: Arc<Client>,
807 language: Arc<Language>,
808 worktree_path: &Path,
809 cx: &mut ModelContext<Self>,
810 ) -> Option<Arc<LanguageServer>> {
811 enum LspEvent {
812 DiagnosticsStart,
813 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
814 DiagnosticsFinish,
815 }
816
817 let language_server = language
818 .start_server(worktree_path, cx)
819 .log_err()
820 .flatten()?;
821 let disk_based_sources = language
822 .disk_based_diagnostic_sources()
823 .cloned()
824 .unwrap_or_default();
825 let disk_based_diagnostics_progress_token =
826 language.disk_based_diagnostics_progress_token().cloned();
827 let has_disk_based_diagnostic_progress_token =
828 disk_based_diagnostics_progress_token.is_some();
829 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
830
831 // Listen for `PublishDiagnostics` notifications.
832 language_server
833 .on_notification::<lsp::notification::PublishDiagnostics, _>({
834 let diagnostics_tx = diagnostics_tx.clone();
835 move |params| {
836 if !has_disk_based_diagnostic_progress_token {
837 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
838 }
839 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
840 if !has_disk_based_diagnostic_progress_token {
841 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
842 }
843 }
844 })
845 .detach();
846
847 // Listen for `Progress` notifications. Send an event when the language server
848 // transitions between running jobs and not running any jobs.
849 let mut running_jobs_for_this_server: i32 = 0;
850 language_server
851 .on_notification::<lsp::notification::Progress, _>(move |params| {
852 let token = match params.token {
853 lsp::NumberOrString::Number(_) => None,
854 lsp::NumberOrString::String(token) => Some(token),
855 };
856
857 if token == disk_based_diagnostics_progress_token {
858 match params.value {
859 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
860 lsp::WorkDoneProgress::Begin(_) => {
861 running_jobs_for_this_server += 1;
862 if running_jobs_for_this_server == 1 {
863 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
864 }
865 }
866 lsp::WorkDoneProgress::End(_) => {
867 running_jobs_for_this_server -= 1;
868 if running_jobs_for_this_server == 0 {
869 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
870 }
871 }
872 _ => {}
873 },
874 }
875 }
876 })
877 .detach();
878
879 // Process all the LSP events.
880 cx.spawn_weak(|this, mut cx| async move {
881 while let Ok(message) = diagnostics_rx.recv().await {
882 let this = this.upgrade(&cx)?;
883 match message {
884 LspEvent::DiagnosticsStart => {
885 this.update(&mut cx, |this, cx| {
886 this.disk_based_diagnostics_started(cx);
887 if let Some(project_id) = this.remote_id() {
888 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
889 .log_err();
890 }
891 });
892 }
893 LspEvent::DiagnosticsUpdate(mut params) => {
894 language.process_diagnostics(&mut params);
895 this.update(&mut cx, |this, cx| {
896 this.update_diagnostics(params, &disk_based_sources, cx)
897 .log_err();
898 });
899 }
900 LspEvent::DiagnosticsFinish => {
901 this.update(&mut cx, |this, cx| {
902 this.disk_based_diagnostics_finished(cx);
903 if let Some(project_id) = this.remote_id() {
904 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
905 .log_err();
906 }
907 });
908 }
909 }
910 }
911 Some(())
912 })
913 .detach();
914
915 Some(language_server)
916 }
917
918 pub fn update_diagnostics(
919 &mut self,
920 params: lsp::PublishDiagnosticsParams,
921 disk_based_sources: &HashSet<String>,
922 cx: &mut ModelContext<Self>,
923 ) -> Result<()> {
924 let abs_path = params
925 .uri
926 .to_file_path()
927 .map_err(|_| anyhow!("URI is not a file"))?;
928 let mut next_group_id = 0;
929 let mut diagnostics = Vec::default();
930 let mut primary_diagnostic_group_ids = HashMap::default();
931 let mut sources_by_group_id = HashMap::default();
932 let mut supporting_diagnostic_severities = HashMap::default();
933 for diagnostic in ¶ms.diagnostics {
934 let source = diagnostic.source.as_ref();
935 let code = diagnostic.code.as_ref().map(|code| match code {
936 lsp::NumberOrString::Number(code) => code.to_string(),
937 lsp::NumberOrString::String(code) => code.clone(),
938 });
939 let range = range_from_lsp(diagnostic.range);
940 let is_supporting = diagnostic
941 .related_information
942 .as_ref()
943 .map_or(false, |infos| {
944 infos.iter().any(|info| {
945 primary_diagnostic_group_ids.contains_key(&(
946 source,
947 code.clone(),
948 range_from_lsp(info.location.range),
949 ))
950 })
951 });
952
953 if is_supporting {
954 if let Some(severity) = diagnostic.severity {
955 supporting_diagnostic_severities
956 .insert((source, code.clone(), range), severity);
957 }
958 } else {
959 let group_id = post_inc(&mut next_group_id);
960 let is_disk_based =
961 source.map_or(false, |source| disk_based_sources.contains(source));
962
963 sources_by_group_id.insert(group_id, source);
964 primary_diagnostic_group_ids
965 .insert((source, code.clone(), range.clone()), group_id);
966
967 diagnostics.push(DiagnosticEntry {
968 range,
969 diagnostic: Diagnostic {
970 code: code.clone(),
971 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
972 message: diagnostic.message.clone(),
973 group_id,
974 is_primary: true,
975 is_valid: true,
976 is_disk_based,
977 },
978 });
979 if let Some(infos) = &diagnostic.related_information {
980 for info in infos {
981 if info.location.uri == params.uri && !info.message.is_empty() {
982 let range = range_from_lsp(info.location.range);
983 diagnostics.push(DiagnosticEntry {
984 range,
985 diagnostic: Diagnostic {
986 code: code.clone(),
987 severity: DiagnosticSeverity::INFORMATION,
988 message: info.message.clone(),
989 group_id,
990 is_primary: false,
991 is_valid: true,
992 is_disk_based,
993 },
994 });
995 }
996 }
997 }
998 }
999 }
1000
1001 for entry in &mut diagnostics {
1002 let diagnostic = &mut entry.diagnostic;
1003 if !diagnostic.is_primary {
1004 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1005 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1006 source,
1007 diagnostic.code.clone(),
1008 entry.range.clone(),
1009 )) {
1010 diagnostic.severity = severity;
1011 }
1012 }
1013 }
1014
1015 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1016 Ok(())
1017 }
1018
1019 pub fn update_diagnostic_entries(
1020 &mut self,
1021 abs_path: PathBuf,
1022 version: Option<i32>,
1023 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1024 cx: &mut ModelContext<Project>,
1025 ) -> Result<(), anyhow::Error> {
1026 let (worktree, relative_path) = self
1027 .find_local_worktree(&abs_path, cx)
1028 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1029 let project_path = ProjectPath {
1030 worktree_id: worktree.read(cx).id(),
1031 path: relative_path.into(),
1032 };
1033
1034 for buffer in self.open_buffers.values() {
1035 if let Some(buffer) = buffer.upgrade(cx) {
1036 if buffer
1037 .read(cx)
1038 .file()
1039 .map_or(false, |file| *file.path() == project_path.path)
1040 {
1041 buffer.update(cx, |buffer, cx| {
1042 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1043 })?;
1044 break;
1045 }
1046 }
1047 }
1048 worktree.update(cx, |worktree, cx| {
1049 worktree
1050 .as_local_mut()
1051 .ok_or_else(|| anyhow!("not a local worktree"))?
1052 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1053 })?;
1054 cx.emit(Event::DiagnosticsUpdated(project_path));
1055 Ok(())
1056 }
1057
1058 pub fn format(
1059 &self,
1060 buffers: HashSet<ModelHandle<Buffer>>,
1061 push_to_history: bool,
1062 cx: &mut ModelContext<Project>,
1063 ) -> Task<Result<ProjectTransaction>> {
1064 let mut local_buffers = Vec::new();
1065 let mut remote_buffers = None;
1066 for buffer_handle in buffers {
1067 let buffer = buffer_handle.read(cx);
1068 let worktree;
1069 if let Some(file) = File::from_dyn(buffer.file()) {
1070 worktree = file.worktree.clone();
1071 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1072 let lang_server;
1073 if let Some(lang) = buffer.language() {
1074 if let Some(server) = self
1075 .language_servers
1076 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1077 {
1078 lang_server = server.clone();
1079 } else {
1080 return Task::ready(Ok(Default::default()));
1081 };
1082 } else {
1083 return Task::ready(Ok(Default::default()));
1084 }
1085
1086 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1087 } else {
1088 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1089 }
1090 } else {
1091 return Task::ready(Ok(Default::default()));
1092 }
1093 }
1094
1095 let remote_buffers = self.remote_id().zip(remote_buffers);
1096 let client = self.client.clone();
1097
1098 cx.spawn(|this, mut cx| async move {
1099 let mut project_transaction = ProjectTransaction::default();
1100
1101 if let Some((project_id, remote_buffers)) = remote_buffers {
1102 let response = client
1103 .request(proto::FormatBuffers {
1104 project_id,
1105 buffer_ids: remote_buffers
1106 .iter()
1107 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1108 .collect(),
1109 })
1110 .await?
1111 .transaction
1112 .ok_or_else(|| anyhow!("missing transaction"))?;
1113 project_transaction = this
1114 .update(&mut cx, |this, cx| {
1115 this.deserialize_project_transaction(response, push_to_history, cx)
1116 })
1117 .await?;
1118 }
1119
1120 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1121 let lsp_edits = lang_server
1122 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1123 text_document: lsp::TextDocumentIdentifier::new(
1124 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1125 ),
1126 options: Default::default(),
1127 work_done_progress_params: Default::default(),
1128 })
1129 .await?;
1130
1131 if let Some(lsp_edits) = lsp_edits {
1132 let edits = buffer
1133 .update(&mut cx, |buffer, cx| {
1134 buffer.edits_from_lsp(lsp_edits, None, cx)
1135 })
1136 .await?;
1137 buffer.update(&mut cx, |buffer, cx| {
1138 buffer.finalize_last_transaction();
1139 buffer.start_transaction();
1140 for (range, text) in edits {
1141 buffer.edit([range], text, cx);
1142 }
1143 if buffer.end_transaction(cx).is_some() {
1144 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1145 if !push_to_history {
1146 buffer.forget_transaction(transaction.id);
1147 }
1148 project_transaction.0.insert(cx.handle(), transaction);
1149 }
1150 });
1151 }
1152 }
1153
1154 Ok(project_transaction)
1155 })
1156 }
1157
1158 pub fn definition<T: ToPointUtf16>(
1159 &self,
1160 source_buffer_handle: &ModelHandle<Buffer>,
1161 position: T,
1162 cx: &mut ModelContext<Self>,
1163 ) -> Task<Result<Vec<Definition>>> {
1164 let source_buffer_handle = source_buffer_handle.clone();
1165 let source_buffer = source_buffer_handle.read(cx);
1166 let worktree;
1167 let buffer_abs_path;
1168 if let Some(file) = File::from_dyn(source_buffer.file()) {
1169 worktree = file.worktree.clone();
1170 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1171 } else {
1172 return Task::ready(Ok(Default::default()));
1173 };
1174
1175 let position = position.to_point_utf16(source_buffer);
1176
1177 if worktree.read(cx).as_local().is_some() {
1178 let buffer_abs_path = buffer_abs_path.unwrap();
1179 let lang_name;
1180 let lang_server;
1181 if let Some(lang) = source_buffer.language() {
1182 lang_name = lang.name().to_string();
1183 if let Some(server) = self
1184 .language_servers
1185 .get(&(worktree.read(cx).id(), lang_name.clone()))
1186 {
1187 lang_server = server.clone();
1188 } else {
1189 return Task::ready(Ok(Default::default()));
1190 };
1191 } else {
1192 return Task::ready(Ok(Default::default()));
1193 }
1194
1195 cx.spawn(|this, mut cx| async move {
1196 let response = lang_server
1197 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1198 text_document_position_params: lsp::TextDocumentPositionParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1201 ),
1202 position: lsp::Position::new(position.row, position.column),
1203 },
1204 work_done_progress_params: Default::default(),
1205 partial_result_params: Default::default(),
1206 })
1207 .await?;
1208
1209 let mut definitions = Vec::new();
1210 if let Some(response) = response {
1211 let mut unresolved_locations = Vec::new();
1212 match response {
1213 lsp::GotoDefinitionResponse::Scalar(loc) => {
1214 unresolved_locations.push((loc.uri, loc.range));
1215 }
1216 lsp::GotoDefinitionResponse::Array(locs) => {
1217 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1218 }
1219 lsp::GotoDefinitionResponse::Link(links) => {
1220 unresolved_locations.extend(
1221 links
1222 .into_iter()
1223 .map(|l| (l.target_uri, l.target_selection_range)),
1224 );
1225 }
1226 }
1227
1228 for (target_uri, target_range) in unresolved_locations {
1229 let target_buffer_handle = this
1230 .update(&mut cx, |this, cx| {
1231 this.open_local_buffer_from_lsp_path(
1232 target_uri,
1233 lang_name.clone(),
1234 lang_server.clone(),
1235 cx,
1236 )
1237 })
1238 .await?;
1239
1240 cx.read(|cx| {
1241 let target_buffer = target_buffer_handle.read(cx);
1242 let target_start = target_buffer
1243 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1244 let target_end = target_buffer
1245 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1246 definitions.push(Definition {
1247 target_buffer: target_buffer_handle,
1248 target_range: target_buffer.anchor_after(target_start)
1249 ..target_buffer.anchor_before(target_end),
1250 });
1251 });
1252 }
1253 }
1254
1255 Ok(definitions)
1256 })
1257 } else if let Some(project_id) = self.remote_id() {
1258 let client = self.client.clone();
1259 let request = proto::GetDefinition {
1260 project_id,
1261 buffer_id: source_buffer.remote_id(),
1262 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1263 };
1264 cx.spawn(|this, mut cx| async move {
1265 let response = client.request(request).await?;
1266 this.update(&mut cx, |this, cx| {
1267 let mut definitions = Vec::new();
1268 for definition in response.definitions {
1269 let target_buffer = this.deserialize_buffer(
1270 definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
1271 cx,
1272 )?;
1273 let target_start = definition
1274 .target_start
1275 .and_then(deserialize_anchor)
1276 .ok_or_else(|| anyhow!("missing target start"))?;
1277 let target_end = definition
1278 .target_end
1279 .and_then(deserialize_anchor)
1280 .ok_or_else(|| anyhow!("missing target end"))?;
1281 definitions.push(Definition {
1282 target_buffer,
1283 target_range: target_start..target_end,
1284 })
1285 }
1286
1287 Ok(definitions)
1288 })
1289 })
1290 } else {
1291 Task::ready(Ok(Default::default()))
1292 }
1293 }
1294
1295 pub fn completions<T: ToPointUtf16>(
1296 &self,
1297 source_buffer_handle: &ModelHandle<Buffer>,
1298 position: T,
1299 cx: &mut ModelContext<Self>,
1300 ) -> Task<Result<Vec<Completion>>> {
1301 let source_buffer_handle = source_buffer_handle.clone();
1302 let source_buffer = source_buffer_handle.read(cx);
1303 let buffer_id = source_buffer.remote_id();
1304 let language = source_buffer.language().cloned();
1305 let worktree;
1306 let buffer_abs_path;
1307 if let Some(file) = File::from_dyn(source_buffer.file()) {
1308 worktree = file.worktree.clone();
1309 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1310 } else {
1311 return Task::ready(Ok(Default::default()));
1312 };
1313
1314 let position = position.to_point_utf16(source_buffer);
1315 let anchor = source_buffer.anchor_after(position);
1316
1317 if worktree.read(cx).as_local().is_some() {
1318 let buffer_abs_path = buffer_abs_path.unwrap();
1319 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1320 server
1321 } else {
1322 return Task::ready(Ok(Default::default()));
1323 };
1324
1325 cx.spawn(|_, cx| async move {
1326 let completions = lang_server
1327 .request::<lsp::request::Completion>(lsp::CompletionParams {
1328 text_document_position: lsp::TextDocumentPositionParams::new(
1329 lsp::TextDocumentIdentifier::new(
1330 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1331 ),
1332 position.to_lsp_position(),
1333 ),
1334 context: Default::default(),
1335 work_done_progress_params: Default::default(),
1336 partial_result_params: Default::default(),
1337 })
1338 .await?;
1339
1340 let completions = if let Some(completions) = completions {
1341 match completions {
1342 lsp::CompletionResponse::Array(completions) => completions,
1343 lsp::CompletionResponse::List(list) => list.items,
1344 }
1345 } else {
1346 Default::default()
1347 };
1348
1349 source_buffer_handle.read_with(&cx, |this, _| {
1350 Ok(completions.into_iter().filter_map(|lsp_completion| {
1351 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1352 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1353 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1354 log::info!("received an insert and replace completion but we don't yet support that");
1355 return None
1356 },
1357 };
1358
1359 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1360 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1361 if clipped_start == old_range.start && clipped_end == old_range.end {
1362 Some(Completion {
1363 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1364 new_text,
1365 label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1366 lsp_completion,
1367 })
1368 } else {
1369 None
1370 }
1371 }).collect())
1372 })
1373
1374 })
1375 } else if let Some(project_id) = self.remote_id() {
1376 let rpc = self.client.clone();
1377 cx.foreground().spawn(async move {
1378 let response = rpc
1379 .request(proto::GetCompletions {
1380 project_id,
1381 buffer_id,
1382 position: Some(language::proto::serialize_anchor(&anchor)),
1383 })
1384 .await?;
1385 response
1386 .completions
1387 .into_iter()
1388 .map(|completion| {
1389 language::proto::deserialize_completion(completion, language.as_ref())
1390 })
1391 .collect()
1392 })
1393 } else {
1394 Task::ready(Ok(Default::default()))
1395 }
1396 }
1397
1398 pub fn apply_additional_edits_for_completion(
1399 &self,
1400 buffer_handle: ModelHandle<Buffer>,
1401 completion: Completion,
1402 push_to_history: bool,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Task<Result<Option<Transaction>>> {
1405 let buffer = buffer_handle.read(cx);
1406 let buffer_id = buffer.remote_id();
1407
1408 if self.is_local() {
1409 let lang_server = if let Some(language_server) = buffer.language_server() {
1410 language_server.clone()
1411 } else {
1412 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1413 };
1414
1415 cx.spawn(|_, mut cx| async move {
1416 let resolved_completion = lang_server
1417 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1418 .await?;
1419 if let Some(edits) = resolved_completion.additional_text_edits {
1420 let edits = buffer_handle
1421 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1422 .await?;
1423 buffer_handle.update(&mut cx, |buffer, cx| {
1424 buffer.finalize_last_transaction();
1425 buffer.start_transaction();
1426 for (range, text) in edits {
1427 buffer.edit([range], text, cx);
1428 }
1429 let transaction = if buffer.end_transaction(cx).is_some() {
1430 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1431 if !push_to_history {
1432 buffer.forget_transaction(transaction.id);
1433 }
1434 Some(transaction)
1435 } else {
1436 None
1437 };
1438 Ok(transaction)
1439 })
1440 } else {
1441 Ok(None)
1442 }
1443 })
1444 } else if let Some(project_id) = self.remote_id() {
1445 let client = self.client.clone();
1446 cx.spawn(|_, mut cx| async move {
1447 let response = client
1448 .request(proto::ApplyCompletionAdditionalEdits {
1449 project_id,
1450 buffer_id,
1451 completion: Some(language::proto::serialize_completion(&completion)),
1452 })
1453 .await?;
1454
1455 if let Some(transaction) = response.transaction {
1456 let transaction = language::proto::deserialize_transaction(transaction)?;
1457 buffer_handle
1458 .update(&mut cx, |buffer, _| {
1459 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1460 })
1461 .await;
1462 if push_to_history {
1463 buffer_handle.update(&mut cx, |buffer, _| {
1464 buffer.push_transaction(transaction.clone(), Instant::now());
1465 });
1466 }
1467 Ok(Some(transaction))
1468 } else {
1469 Ok(None)
1470 }
1471 })
1472 } else {
1473 Task::ready(Err(anyhow!("project does not have a remote id")))
1474 }
1475 }
1476
1477 pub fn code_actions<T: ToOffset>(
1478 &self,
1479 buffer_handle: &ModelHandle<Buffer>,
1480 range: Range<T>,
1481 cx: &mut ModelContext<Self>,
1482 ) -> Task<Result<Vec<CodeAction>>> {
1483 let buffer_handle = buffer_handle.clone();
1484 let buffer = buffer_handle.read(cx);
1485 let buffer_id = buffer.remote_id();
1486 let worktree;
1487 let buffer_abs_path;
1488 if let Some(file) = File::from_dyn(buffer.file()) {
1489 worktree = file.worktree.clone();
1490 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1491 } else {
1492 return Task::ready(Ok(Default::default()));
1493 };
1494 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1495
1496 if worktree.read(cx).as_local().is_some() {
1497 let buffer_abs_path = buffer_abs_path.unwrap();
1498 let lang_name;
1499 let lang_server;
1500 if let Some(lang) = buffer.language() {
1501 lang_name = lang.name().to_string();
1502 if let Some(server) = self
1503 .language_servers
1504 .get(&(worktree.read(cx).id(), lang_name.clone()))
1505 {
1506 lang_server = server.clone();
1507 } else {
1508 return Task::ready(Ok(Default::default()));
1509 };
1510 } else {
1511 return Task::ready(Ok(Default::default()));
1512 }
1513
1514 let actions =
1515 lang_server.request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1516 text_document: lsp::TextDocumentIdentifier::new(
1517 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1518 ),
1519 range: lsp::Range::new(
1520 range.start.to_point_utf16(buffer).to_lsp_position(),
1521 range.end.to_point_utf16(buffer).to_lsp_position(),
1522 ),
1523 work_done_progress_params: Default::default(),
1524 partial_result_params: Default::default(),
1525 context: lsp::CodeActionContext {
1526 diagnostics: Default::default(),
1527 only: Some(vec![
1528 lsp::CodeActionKind::QUICKFIX,
1529 lsp::CodeActionKind::REFACTOR,
1530 lsp::CodeActionKind::REFACTOR_EXTRACT,
1531 ]),
1532 },
1533 });
1534 cx.foreground().spawn(async move {
1535 Ok(actions
1536 .await?
1537 .unwrap_or_default()
1538 .into_iter()
1539 .filter_map(|entry| {
1540 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1541 Some(CodeAction {
1542 range: range.clone(),
1543 lsp_action,
1544 })
1545 } else {
1546 None
1547 }
1548 })
1549 .collect())
1550 })
1551 } else if let Some(project_id) = self.remote_id() {
1552 let rpc = self.client.clone();
1553 cx.foreground().spawn(async move {
1554 let response = rpc
1555 .request(proto::GetCodeActions {
1556 project_id,
1557 buffer_id,
1558 start: Some(language::proto::serialize_anchor(&range.start)),
1559 end: Some(language::proto::serialize_anchor(&range.end)),
1560 })
1561 .await?;
1562 response
1563 .actions
1564 .into_iter()
1565 .map(language::proto::deserialize_code_action)
1566 .collect()
1567 })
1568 } else {
1569 Task::ready(Ok(Default::default()))
1570 }
1571 }
1572
1573 pub fn apply_code_action(
1574 &self,
1575 buffer_handle: ModelHandle<Buffer>,
1576 mut action: CodeAction,
1577 push_to_history: bool,
1578 cx: &mut ModelContext<Self>,
1579 ) -> Task<Result<ProjectTransaction>> {
1580 if self.is_local() {
1581 let buffer = buffer_handle.read(cx);
1582 let lang_name = if let Some(lang) = buffer.language() {
1583 lang.name().to_string()
1584 } else {
1585 return Task::ready(Ok(Default::default()));
1586 };
1587 let lang_server = if let Some(language_server) = buffer.language_server() {
1588 language_server.clone()
1589 } else {
1590 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1591 };
1592 let range = action.range.to_point_utf16(buffer);
1593 let fs = self.fs.clone();
1594
1595 cx.spawn(|this, mut cx| async move {
1596 if let Some(lsp_range) = action
1597 .lsp_action
1598 .data
1599 .as_mut()
1600 .and_then(|d| d.get_mut("codeActionParams"))
1601 .and_then(|d| d.get_mut("range"))
1602 {
1603 *lsp_range = serde_json::to_value(&lsp::Range::new(
1604 range.start.to_lsp_position(),
1605 range.end.to_lsp_position(),
1606 ))
1607 .unwrap();
1608 action.lsp_action = lang_server
1609 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1610 .await?;
1611 } else {
1612 let actions = this
1613 .update(&mut cx, |this, cx| {
1614 this.code_actions(&buffer_handle, action.range, cx)
1615 })
1616 .await?;
1617 action.lsp_action = actions
1618 .into_iter()
1619 .find(|a| a.lsp_action.title == action.lsp_action.title)
1620 .ok_or_else(|| anyhow!("code action is outdated"))?
1621 .lsp_action;
1622 }
1623
1624 let mut operations = Vec::new();
1625 if let Some(edit) = action.lsp_action.edit {
1626 if let Some(document_changes) = edit.document_changes {
1627 match document_changes {
1628 lsp::DocumentChanges::Edits(edits) => operations
1629 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1630 lsp::DocumentChanges::Operations(ops) => operations = ops,
1631 }
1632 } else if let Some(changes) = edit.changes {
1633 operations.extend(changes.into_iter().map(|(uri, edits)| {
1634 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1635 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1636 uri,
1637 version: None,
1638 },
1639 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1640 })
1641 }));
1642 }
1643 }
1644
1645 let mut project_transaction = ProjectTransaction::default();
1646 for operation in operations {
1647 match operation {
1648 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1649 let abs_path = op
1650 .uri
1651 .to_file_path()
1652 .map_err(|_| anyhow!("can't convert URI to path"))?;
1653
1654 if let Some(parent_path) = abs_path.parent() {
1655 fs.create_dir(parent_path).await?;
1656 }
1657 if abs_path.ends_with("/") {
1658 fs.create_dir(&abs_path).await?;
1659 } else {
1660 fs.create_file(
1661 &abs_path,
1662 op.options.map(Into::into).unwrap_or_default(),
1663 )
1664 .await?;
1665 }
1666 }
1667 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1668 let source_abs_path = op
1669 .old_uri
1670 .to_file_path()
1671 .map_err(|_| anyhow!("can't convert URI to path"))?;
1672 let target_abs_path = op
1673 .new_uri
1674 .to_file_path()
1675 .map_err(|_| anyhow!("can't convert URI to path"))?;
1676 fs.rename(
1677 &source_abs_path,
1678 &target_abs_path,
1679 op.options.map(Into::into).unwrap_or_default(),
1680 )
1681 .await?;
1682 }
1683 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1684 let abs_path = op
1685 .uri
1686 .to_file_path()
1687 .map_err(|_| anyhow!("can't convert URI to path"))?;
1688 let options = op.options.map(Into::into).unwrap_or_default();
1689 if abs_path.ends_with("/") {
1690 fs.remove_dir(&abs_path, options).await?;
1691 } else {
1692 fs.remove_file(&abs_path, options).await?;
1693 }
1694 }
1695 lsp::DocumentChangeOperation::Edit(op) => {
1696 let buffer_to_edit = this
1697 .update(&mut cx, |this, cx| {
1698 this.open_local_buffer_from_lsp_path(
1699 op.text_document.uri,
1700 lang_name.clone(),
1701 lang_server.clone(),
1702 cx,
1703 )
1704 })
1705 .await?;
1706
1707 let edits = buffer_to_edit
1708 .update(&mut cx, |buffer, cx| {
1709 let edits = op.edits.into_iter().map(|edit| match edit {
1710 lsp::OneOf::Left(edit) => edit,
1711 lsp::OneOf::Right(edit) => edit.text_edit,
1712 });
1713 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1714 })
1715 .await?;
1716
1717 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1718 buffer.finalize_last_transaction();
1719 buffer.start_transaction();
1720 for (range, text) in edits {
1721 buffer.edit([range], text, cx);
1722 }
1723 let transaction = if buffer.end_transaction(cx).is_some() {
1724 let transaction =
1725 buffer.finalize_last_transaction().unwrap().clone();
1726 if !push_to_history {
1727 buffer.forget_transaction(transaction.id);
1728 }
1729 Some(transaction)
1730 } else {
1731 None
1732 };
1733
1734 transaction
1735 });
1736 if let Some(transaction) = transaction {
1737 project_transaction.0.insert(buffer_to_edit, transaction);
1738 }
1739 }
1740 }
1741 }
1742
1743 Ok(project_transaction)
1744 })
1745 } else if let Some(project_id) = self.remote_id() {
1746 let client = self.client.clone();
1747 let request = proto::ApplyCodeAction {
1748 project_id,
1749 buffer_id: buffer_handle.read(cx).remote_id(),
1750 action: Some(language::proto::serialize_code_action(&action)),
1751 };
1752 cx.spawn(|this, mut cx| async move {
1753 let response = client
1754 .request(request)
1755 .await?
1756 .transaction
1757 .ok_or_else(|| anyhow!("missing transaction"))?;
1758 this.update(&mut cx, |this, cx| {
1759 this.deserialize_project_transaction(response, push_to_history, cx)
1760 })
1761 .await
1762 })
1763 } else {
1764 Task::ready(Err(anyhow!("project does not have a remote id")))
1765 }
1766 }
1767
1768 pub fn find_or_create_local_worktree(
1769 &self,
1770 abs_path: impl AsRef<Path>,
1771 weak: bool,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1774 let abs_path = abs_path.as_ref();
1775 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1776 Task::ready(Ok((tree.clone(), relative_path.into())))
1777 } else {
1778 let worktree = self.create_local_worktree(abs_path, weak, cx);
1779 cx.foreground()
1780 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1781 }
1782 }
1783
1784 fn find_local_worktree(
1785 &self,
1786 abs_path: &Path,
1787 cx: &AppContext,
1788 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1789 for tree in self.worktrees(cx) {
1790 if let Some(relative_path) = tree
1791 .read(cx)
1792 .as_local()
1793 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1794 {
1795 return Some((tree.clone(), relative_path.into()));
1796 }
1797 }
1798 None
1799 }
1800
1801 pub fn is_shared(&self) -> bool {
1802 match &self.client_state {
1803 ProjectClientState::Local { is_shared, .. } => *is_shared,
1804 ProjectClientState::Remote { .. } => false,
1805 }
1806 }
1807
1808 fn create_local_worktree(
1809 &self,
1810 abs_path: impl AsRef<Path>,
1811 weak: bool,
1812 cx: &mut ModelContext<Self>,
1813 ) -> Task<Result<ModelHandle<Worktree>>> {
1814 let fs = self.fs.clone();
1815 let client = self.client.clone();
1816 let path = Arc::from(abs_path.as_ref());
1817 cx.spawn(|project, mut cx| async move {
1818 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1819
1820 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1821 project.add_worktree(&worktree, cx);
1822 (project.remote_id(), project.is_shared())
1823 });
1824
1825 if let Some(project_id) = remote_project_id {
1826 worktree
1827 .update(&mut cx, |worktree, cx| {
1828 worktree.as_local_mut().unwrap().register(project_id, cx)
1829 })
1830 .await?;
1831 if is_shared {
1832 worktree
1833 .update(&mut cx, |worktree, cx| {
1834 worktree.as_local_mut().unwrap().share(project_id, cx)
1835 })
1836 .await?;
1837 }
1838 }
1839
1840 Ok(worktree)
1841 })
1842 }
1843
1844 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1845 self.worktrees.retain(|worktree| {
1846 worktree
1847 .upgrade(cx)
1848 .map_or(false, |w| w.read(cx).id() != id)
1849 });
1850 cx.notify();
1851 }
1852
1853 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1854 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1855 if worktree.read(cx).is_local() {
1856 cx.subscribe(&worktree, |this, worktree, _, cx| {
1857 this.update_local_worktree_buffers(worktree, cx);
1858 })
1859 .detach();
1860 }
1861
1862 let push_weak_handle = {
1863 let worktree = worktree.read(cx);
1864 worktree.is_local() && worktree.is_weak()
1865 };
1866 if push_weak_handle {
1867 cx.observe_release(&worktree, |this, cx| {
1868 this.worktrees
1869 .retain(|worktree| worktree.upgrade(cx).is_some());
1870 cx.notify();
1871 })
1872 .detach();
1873 self.worktrees
1874 .push(WorktreeHandle::Weak(worktree.downgrade()));
1875 } else {
1876 self.worktrees
1877 .push(WorktreeHandle::Strong(worktree.clone()));
1878 }
1879 cx.notify();
1880 }
1881
1882 fn update_local_worktree_buffers(
1883 &mut self,
1884 worktree_handle: ModelHandle<Worktree>,
1885 cx: &mut ModelContext<Self>,
1886 ) {
1887 let snapshot = worktree_handle.read(cx).snapshot();
1888 let mut buffers_to_delete = Vec::new();
1889 for (buffer_id, buffer) in &self.open_buffers {
1890 if let Some(buffer) = buffer.upgrade(cx) {
1891 buffer.update(cx, |buffer, cx| {
1892 if let Some(old_file) = File::from_dyn(buffer.file()) {
1893 if old_file.worktree != worktree_handle {
1894 return;
1895 }
1896
1897 let new_file = if let Some(entry) = old_file
1898 .entry_id
1899 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1900 {
1901 File {
1902 is_local: true,
1903 entry_id: Some(entry.id),
1904 mtime: entry.mtime,
1905 path: entry.path.clone(),
1906 worktree: worktree_handle.clone(),
1907 }
1908 } else if let Some(entry) =
1909 snapshot.entry_for_path(old_file.path().as_ref())
1910 {
1911 File {
1912 is_local: true,
1913 entry_id: Some(entry.id),
1914 mtime: entry.mtime,
1915 path: entry.path.clone(),
1916 worktree: worktree_handle.clone(),
1917 }
1918 } else {
1919 File {
1920 is_local: true,
1921 entry_id: None,
1922 path: old_file.path().clone(),
1923 mtime: old_file.mtime(),
1924 worktree: worktree_handle.clone(),
1925 }
1926 };
1927
1928 if let Some(project_id) = self.remote_id() {
1929 self.client
1930 .send(proto::UpdateBufferFile {
1931 project_id,
1932 buffer_id: *buffer_id as u64,
1933 file: Some(new_file.to_proto()),
1934 })
1935 .log_err();
1936 }
1937 buffer.file_updated(Box::new(new_file), cx).detach();
1938 }
1939 });
1940 } else {
1941 buffers_to_delete.push(*buffer_id);
1942 }
1943 }
1944
1945 for buffer_id in buffers_to_delete {
1946 self.open_buffers.remove(&buffer_id);
1947 }
1948 }
1949
1950 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1951 let new_active_entry = entry.and_then(|project_path| {
1952 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1953 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1954 Some(ProjectEntry {
1955 worktree_id: project_path.worktree_id,
1956 entry_id: entry.id,
1957 })
1958 });
1959 if new_active_entry != self.active_entry {
1960 self.active_entry = new_active_entry;
1961 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1962 }
1963 }
1964
1965 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1966 self.language_servers_with_diagnostics_running > 0
1967 }
1968
1969 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1970 let mut summary = DiagnosticSummary::default();
1971 for (_, path_summary) in self.diagnostic_summaries(cx) {
1972 summary.error_count += path_summary.error_count;
1973 summary.warning_count += path_summary.warning_count;
1974 summary.info_count += path_summary.info_count;
1975 summary.hint_count += path_summary.hint_count;
1976 }
1977 summary
1978 }
1979
1980 pub fn diagnostic_summaries<'a>(
1981 &'a self,
1982 cx: &'a AppContext,
1983 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1984 self.worktrees(cx).flat_map(move |worktree| {
1985 let worktree = worktree.read(cx);
1986 let worktree_id = worktree.id();
1987 worktree
1988 .diagnostic_summaries()
1989 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1990 })
1991 }
1992
1993 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1994 self.language_servers_with_diagnostics_running += 1;
1995 if self.language_servers_with_diagnostics_running == 1 {
1996 cx.emit(Event::DiskBasedDiagnosticsStarted);
1997 }
1998 }
1999
2000 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2001 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2002 self.language_servers_with_diagnostics_running -= 1;
2003 if self.language_servers_with_diagnostics_running == 0 {
2004 cx.emit(Event::DiskBasedDiagnosticsFinished);
2005 }
2006 }
2007
2008 pub fn active_entry(&self) -> Option<ProjectEntry> {
2009 self.active_entry
2010 }
2011
2012 // RPC message handlers
2013
2014 async fn handle_unshare_project(
2015 this: ModelHandle<Self>,
2016 _: TypedEnvelope<proto::UnshareProject>,
2017 _: Arc<Client>,
2018 mut cx: AsyncAppContext,
2019 ) -> Result<()> {
2020 this.update(&mut cx, |this, cx| {
2021 if let ProjectClientState::Remote {
2022 sharing_has_stopped,
2023 ..
2024 } = &mut this.client_state
2025 {
2026 *sharing_has_stopped = true;
2027 this.collaborators.clear();
2028 cx.notify();
2029 } else {
2030 unreachable!()
2031 }
2032 });
2033
2034 Ok(())
2035 }
2036
2037 async fn handle_add_collaborator(
2038 this: ModelHandle<Self>,
2039 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2040 _: Arc<Client>,
2041 mut cx: AsyncAppContext,
2042 ) -> Result<()> {
2043 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2044 let collaborator = envelope
2045 .payload
2046 .collaborator
2047 .take()
2048 .ok_or_else(|| anyhow!("empty collaborator"))?;
2049
2050 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2051 this.update(&mut cx, |this, cx| {
2052 this.collaborators
2053 .insert(collaborator.peer_id, collaborator);
2054 cx.notify();
2055 });
2056
2057 Ok(())
2058 }
2059
2060 async fn handle_remove_collaborator(
2061 this: ModelHandle<Self>,
2062 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2063 _: Arc<Client>,
2064 mut cx: AsyncAppContext,
2065 ) -> Result<()> {
2066 this.update(&mut cx, |this, cx| {
2067 let peer_id = PeerId(envelope.payload.peer_id);
2068 let replica_id = this
2069 .collaborators
2070 .remove(&peer_id)
2071 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2072 .replica_id;
2073 this.shared_buffers.remove(&peer_id);
2074 for (_, buffer) in &this.open_buffers {
2075 if let Some(buffer) = buffer.upgrade(cx) {
2076 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2077 }
2078 }
2079 cx.notify();
2080 Ok(())
2081 })
2082 }
2083
2084 async fn handle_share_worktree(
2085 this: ModelHandle<Self>,
2086 envelope: TypedEnvelope<proto::ShareWorktree>,
2087 client: Arc<Client>,
2088 mut cx: AsyncAppContext,
2089 ) -> Result<()> {
2090 this.update(&mut cx, |this, cx| {
2091 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2092 let replica_id = this.replica_id();
2093 let worktree = envelope
2094 .payload
2095 .worktree
2096 .ok_or_else(|| anyhow!("invalid worktree"))?;
2097 let (worktree, load_task) =
2098 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2099 this.add_worktree(&worktree, cx);
2100 load_task.detach();
2101 Ok(())
2102 })
2103 }
2104
2105 async fn handle_unregister_worktree(
2106 this: ModelHandle<Self>,
2107 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2108 _: Arc<Client>,
2109 mut cx: AsyncAppContext,
2110 ) -> Result<()> {
2111 this.update(&mut cx, |this, cx| {
2112 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2113 this.remove_worktree(worktree_id, cx);
2114 Ok(())
2115 })
2116 }
2117
2118 async fn handle_update_worktree(
2119 this: ModelHandle<Self>,
2120 envelope: TypedEnvelope<proto::UpdateWorktree>,
2121 _: Arc<Client>,
2122 mut cx: AsyncAppContext,
2123 ) -> Result<()> {
2124 this.update(&mut cx, |this, cx| {
2125 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2126 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2127 worktree.update(cx, |worktree, cx| {
2128 let worktree = worktree.as_remote_mut().unwrap();
2129 worktree.update_from_remote(envelope, cx)
2130 })?;
2131 }
2132 Ok(())
2133 })
2134 }
2135
2136 async fn handle_update_diagnostic_summary(
2137 this: ModelHandle<Self>,
2138 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2139 _: Arc<Client>,
2140 mut cx: AsyncAppContext,
2141 ) -> Result<()> {
2142 this.update(&mut cx, |this, cx| {
2143 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2144 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2145 if let Some(summary) = envelope.payload.summary {
2146 let project_path = ProjectPath {
2147 worktree_id,
2148 path: Path::new(&summary.path).into(),
2149 };
2150 worktree.update(cx, |worktree, _| {
2151 worktree
2152 .as_remote_mut()
2153 .unwrap()
2154 .update_diagnostic_summary(project_path.path.clone(), &summary);
2155 });
2156 cx.emit(Event::DiagnosticsUpdated(project_path));
2157 }
2158 }
2159 Ok(())
2160 })
2161 }
2162
2163 async fn handle_disk_based_diagnostics_updating(
2164 this: ModelHandle<Self>,
2165 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2166 _: Arc<Client>,
2167 mut cx: AsyncAppContext,
2168 ) -> Result<()> {
2169 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2170 Ok(())
2171 }
2172
2173 async fn handle_disk_based_diagnostics_updated(
2174 this: ModelHandle<Self>,
2175 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2176 _: Arc<Client>,
2177 mut cx: AsyncAppContext,
2178 ) -> Result<()> {
2179 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2180 Ok(())
2181 }
2182
2183 async fn handle_update_buffer(
2184 this: ModelHandle<Self>,
2185 envelope: TypedEnvelope<proto::UpdateBuffer>,
2186 _: Arc<Client>,
2187 mut cx: AsyncAppContext,
2188 ) -> Result<()> {
2189 this.update(&mut cx, |this, cx| {
2190 let payload = envelope.payload.clone();
2191 let buffer_id = payload.buffer_id as usize;
2192 let ops = payload
2193 .operations
2194 .into_iter()
2195 .map(|op| language::proto::deserialize_operation(op))
2196 .collect::<Result<Vec<_>, _>>()?;
2197 if let Some(buffer) = this.open_buffers.get_mut(&buffer_id) {
2198 if let Some(buffer) = buffer.upgrade(cx) {
2199 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2200 }
2201 }
2202 Ok(())
2203 })
2204 }
2205
2206 async fn handle_update_buffer_file(
2207 this: ModelHandle<Self>,
2208 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2209 _: Arc<Client>,
2210 mut cx: AsyncAppContext,
2211 ) -> Result<()> {
2212 this.update(&mut cx, |this, cx| {
2213 let payload = envelope.payload.clone();
2214 let buffer_id = payload.buffer_id as usize;
2215 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2216 let worktree = this
2217 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2218 .ok_or_else(|| anyhow!("no such worktree"))?;
2219 let file = File::from_proto(file, worktree.clone(), cx)?;
2220 let buffer = this
2221 .open_buffers
2222 .get_mut(&buffer_id)
2223 .and_then(|b| b.upgrade(cx))
2224 .ok_or_else(|| anyhow!("no such buffer"))?;
2225 buffer.update(cx, |buffer, cx| {
2226 buffer.file_updated(Box::new(file), cx).detach();
2227 });
2228 Ok(())
2229 })
2230 }
2231
2232 async fn handle_save_buffer(
2233 this: ModelHandle<Self>,
2234 envelope: TypedEnvelope<proto::SaveBuffer>,
2235 _: Arc<Client>,
2236 mut cx: AsyncAppContext,
2237 ) -> Result<proto::BufferSaved> {
2238 let buffer_id = envelope.payload.buffer_id;
2239 let sender_id = envelope.original_sender_id()?;
2240 let (project_id, save) = this.update(&mut cx, |this, cx| {
2241 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2242 let buffer = this
2243 .shared_buffers
2244 .get(&sender_id)
2245 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2246 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2247 Ok::<_, anyhow::Error>((project_id, buffer.update(cx, |buffer, cx| buffer.save(cx))))
2248 })?;
2249
2250 let (version, mtime) = save.await?;
2251 Ok(proto::BufferSaved {
2252 project_id,
2253 buffer_id,
2254 version: (&version).into(),
2255 mtime: Some(mtime.into()),
2256 })
2257 }
2258
2259 async fn handle_format_buffers(
2260 this: ModelHandle<Self>,
2261 envelope: TypedEnvelope<proto::FormatBuffers>,
2262 _: Arc<Client>,
2263 mut cx: AsyncAppContext,
2264 ) -> Result<proto::FormatBuffersResponse> {
2265 let sender_id = envelope.original_sender_id()?;
2266 let format = this.update(&mut cx, |this, cx| {
2267 let shared_buffers = this
2268 .shared_buffers
2269 .get(&sender_id)
2270 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2271 let mut buffers = HashSet::default();
2272 for buffer_id in &envelope.payload.buffer_ids {
2273 buffers.insert(
2274 shared_buffers
2275 .get(buffer_id)
2276 .cloned()
2277 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2278 );
2279 }
2280 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2281 })?;
2282
2283 let project_transaction = format.await?;
2284 let project_transaction = this.update(&mut cx, |this, cx| {
2285 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2286 });
2287 Ok(proto::FormatBuffersResponse {
2288 transaction: Some(project_transaction),
2289 })
2290 }
2291
2292 async fn handle_get_completions(
2293 this: ModelHandle<Self>,
2294 envelope: TypedEnvelope<proto::GetCompletions>,
2295 _: Arc<Client>,
2296 mut cx: AsyncAppContext,
2297 ) -> Result<proto::GetCompletionsResponse> {
2298 let sender_id = envelope.original_sender_id()?;
2299 let position = envelope
2300 .payload
2301 .position
2302 .and_then(language::proto::deserialize_anchor)
2303 .ok_or_else(|| anyhow!("invalid position"))?;
2304 let completions = this.update(&mut cx, |this, cx| {
2305 let buffer = this
2306 .shared_buffers
2307 .get(&sender_id)
2308 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2309 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2310 Ok::<_, anyhow::Error>(this.completions(&buffer, position, cx))
2311 })?;
2312
2313 Ok(proto::GetCompletionsResponse {
2314 completions: completions
2315 .await?
2316 .iter()
2317 .map(language::proto::serialize_completion)
2318 .collect(),
2319 })
2320 }
2321
2322 async fn handle_apply_additional_edits_for_completion(
2323 this: ModelHandle<Self>,
2324 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2325 _: Arc<Client>,
2326 mut cx: AsyncAppContext,
2327 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2328 let sender_id = envelope.original_sender_id()?;
2329 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2330 let buffer = this
2331 .shared_buffers
2332 .get(&sender_id)
2333 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2334 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2335 let language = buffer.read(cx).language();
2336 let completion = language::proto::deserialize_completion(
2337 envelope
2338 .payload
2339 .completion
2340 .ok_or_else(|| anyhow!("invalid completion"))?,
2341 language,
2342 )?;
2343 Ok::<_, anyhow::Error>(
2344 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2345 )
2346 })?;
2347
2348 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2349 transaction: apply_additional_edits
2350 .await?
2351 .as_ref()
2352 .map(language::proto::serialize_transaction),
2353 })
2354 }
2355
2356 async fn handle_get_code_actions(
2357 this: ModelHandle<Self>,
2358 envelope: TypedEnvelope<proto::GetCodeActions>,
2359 _: Arc<Client>,
2360 mut cx: AsyncAppContext,
2361 ) -> Result<proto::GetCodeActionsResponse> {
2362 let sender_id = envelope.original_sender_id()?;
2363 let start = envelope
2364 .payload
2365 .start
2366 .and_then(language::proto::deserialize_anchor)
2367 .ok_or_else(|| anyhow!("invalid start"))?;
2368 let end = envelope
2369 .payload
2370 .end
2371 .and_then(language::proto::deserialize_anchor)
2372 .ok_or_else(|| anyhow!("invalid end"))?;
2373 let code_actions = this.update(&mut cx, |this, cx| {
2374 let buffer = this
2375 .shared_buffers
2376 .get(&sender_id)
2377 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2378 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2379 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2380 })?;
2381
2382 Ok(proto::GetCodeActionsResponse {
2383 actions: code_actions
2384 .await?
2385 .iter()
2386 .map(language::proto::serialize_code_action)
2387 .collect(),
2388 })
2389 }
2390
2391 async fn handle_apply_code_action(
2392 this: ModelHandle<Self>,
2393 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2394 _: Arc<Client>,
2395 mut cx: AsyncAppContext,
2396 ) -> Result<proto::ApplyCodeActionResponse> {
2397 let sender_id = envelope.original_sender_id()?;
2398 let action = language::proto::deserialize_code_action(
2399 envelope
2400 .payload
2401 .action
2402 .ok_or_else(|| anyhow!("invalid action"))?,
2403 )?;
2404 let apply_code_action = this.update(&mut cx, |this, cx| {
2405 let buffer = this
2406 .shared_buffers
2407 .get(&sender_id)
2408 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2409 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2410 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2411 })?;
2412
2413 let project_transaction = apply_code_action.await?;
2414 let project_transaction = this.update(&mut cx, |this, cx| {
2415 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2416 });
2417 Ok(proto::ApplyCodeActionResponse {
2418 transaction: Some(project_transaction),
2419 })
2420 }
2421
2422 async fn handle_get_definition(
2423 this: ModelHandle<Self>,
2424 envelope: TypedEnvelope<proto::GetDefinition>,
2425 _: Arc<Client>,
2426 mut cx: AsyncAppContext,
2427 ) -> Result<proto::GetDefinitionResponse> {
2428 let sender_id = envelope.original_sender_id()?;
2429 let position = envelope
2430 .payload
2431 .position
2432 .and_then(deserialize_anchor)
2433 .ok_or_else(|| anyhow!("invalid position"))?;
2434 let definitions = this.update(&mut cx, |this, cx| {
2435 let source_buffer = this
2436 .shared_buffers
2437 .get(&sender_id)
2438 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2439 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2440 if source_buffer.read(cx).can_resolve(&position) {
2441 Ok(this.definition(&source_buffer, position, cx))
2442 } else {
2443 Err(anyhow!("cannot resolve position"))
2444 }
2445 })?;
2446
2447 let definitions = definitions.await?;
2448
2449 this.update(&mut cx, |this, cx| {
2450 let mut response = proto::GetDefinitionResponse {
2451 definitions: Default::default(),
2452 };
2453 for definition in definitions {
2454 let buffer =
2455 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2456 response.definitions.push(proto::Definition {
2457 target_start: Some(serialize_anchor(&definition.target_range.start)),
2458 target_end: Some(serialize_anchor(&definition.target_range.end)),
2459 buffer: Some(buffer),
2460 });
2461 }
2462 Ok(response)
2463 })
2464 }
2465
2466 async fn handle_open_buffer(
2467 this: ModelHandle<Self>,
2468 envelope: TypedEnvelope<proto::OpenBuffer>,
2469 _: Arc<Client>,
2470 mut cx: AsyncAppContext,
2471 ) -> anyhow::Result<proto::OpenBufferResponse> {
2472 let peer_id = envelope.original_sender_id()?;
2473 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2474 let open_buffer = this.update(&mut cx, |this, cx| {
2475 this.open_buffer(
2476 ProjectPath {
2477 worktree_id,
2478 path: PathBuf::from(envelope.payload.path).into(),
2479 },
2480 cx,
2481 )
2482 });
2483
2484 let buffer = open_buffer.await?;
2485 this.update(&mut cx, |this, cx| {
2486 Ok(proto::OpenBufferResponse {
2487 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2488 })
2489 })
2490 }
2491
2492 fn serialize_project_transaction_for_peer(
2493 &mut self,
2494 project_transaction: ProjectTransaction,
2495 peer_id: PeerId,
2496 cx: &AppContext,
2497 ) -> proto::ProjectTransaction {
2498 let mut serialized_transaction = proto::ProjectTransaction {
2499 buffers: Default::default(),
2500 transactions: Default::default(),
2501 };
2502 for (buffer, transaction) in project_transaction.0 {
2503 serialized_transaction
2504 .buffers
2505 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2506 serialized_transaction
2507 .transactions
2508 .push(language::proto::serialize_transaction(&transaction));
2509 }
2510 serialized_transaction
2511 }
2512
2513 fn deserialize_project_transaction(
2514 &mut self,
2515 message: proto::ProjectTransaction,
2516 push_to_history: bool,
2517 cx: &mut ModelContext<Self>,
2518 ) -> Task<Result<ProjectTransaction>> {
2519 let mut project_transaction = ProjectTransaction::default();
2520 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2521 let buffer = match self.deserialize_buffer(buffer, cx) {
2522 Ok(buffer) => buffer,
2523 Err(error) => return Task::ready(Err(error)),
2524 };
2525 let transaction = match language::proto::deserialize_transaction(transaction) {
2526 Ok(transaction) => transaction,
2527 Err(error) => return Task::ready(Err(error)),
2528 };
2529 project_transaction.0.insert(buffer, transaction);
2530 }
2531
2532 cx.spawn_weak(|_, mut cx| async move {
2533 for (buffer, transaction) in &project_transaction.0 {
2534 buffer
2535 .update(&mut cx, |buffer, _| {
2536 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2537 })
2538 .await;
2539
2540 if push_to_history {
2541 buffer.update(&mut cx, |buffer, _| {
2542 buffer.push_transaction(transaction.clone(), Instant::now());
2543 });
2544 }
2545 }
2546
2547 Ok(project_transaction)
2548 })
2549 }
2550
2551 fn serialize_buffer_for_peer(
2552 &mut self,
2553 buffer: &ModelHandle<Buffer>,
2554 peer_id: PeerId,
2555 cx: &AppContext,
2556 ) -> proto::Buffer {
2557 let buffer_id = buffer.read(cx).remote_id();
2558 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2559 match shared_buffers.entry(buffer_id) {
2560 hash_map::Entry::Occupied(_) => proto::Buffer {
2561 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2562 },
2563 hash_map::Entry::Vacant(entry) => {
2564 entry.insert(buffer.clone());
2565 proto::Buffer {
2566 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2567 }
2568 }
2569 }
2570 }
2571
2572 fn deserialize_buffer(
2573 &mut self,
2574 buffer: proto::Buffer,
2575 cx: &mut ModelContext<Self>,
2576 ) -> Result<ModelHandle<Buffer>> {
2577 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2578 proto::buffer::Variant::Id(id) => self
2579 .open_buffers
2580 .get(&(id as usize))
2581 .and_then(|buffer| buffer.upgrade(cx))
2582 .ok_or_else(|| anyhow!("no buffer exists for id {}", id)),
2583 proto::buffer::Variant::State(mut buffer) => {
2584 let mut buffer_worktree = None;
2585 let mut buffer_file = None;
2586 if let Some(file) = buffer.file.take() {
2587 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2588 let worktree = self
2589 .worktree_for_id(worktree_id, cx)
2590 .ok_or_else(|| anyhow!("no worktree found for id {}", file.worktree_id))?;
2591 buffer_file = Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2592 as Box<dyn language::File>);
2593 buffer_worktree = Some(worktree);
2594 }
2595
2596 let buffer = cx.add_model(|cx| {
2597 Buffer::from_proto(self.replica_id(), buffer, buffer_file, cx).unwrap()
2598 });
2599 self.register_buffer(&buffer, buffer_worktree.as_ref(), cx)?;
2600 Ok(buffer)
2601 }
2602 }
2603 }
2604
2605 async fn handle_close_buffer(
2606 this: ModelHandle<Self>,
2607 envelope: TypedEnvelope<proto::CloseBuffer>,
2608 _: Arc<Client>,
2609 mut cx: AsyncAppContext,
2610 ) -> anyhow::Result<()> {
2611 this.update(&mut cx, |this, cx| {
2612 if let Some(shared_buffers) =
2613 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2614 {
2615 shared_buffers.remove(&envelope.payload.buffer_id);
2616 cx.notify();
2617 }
2618 Ok(())
2619 })
2620 }
2621
2622 async fn handle_buffer_saved(
2623 this: ModelHandle<Self>,
2624 envelope: TypedEnvelope<proto::BufferSaved>,
2625 _: Arc<Client>,
2626 mut cx: AsyncAppContext,
2627 ) -> Result<()> {
2628 let version = envelope.payload.version.try_into()?;
2629 let mtime = envelope
2630 .payload
2631 .mtime
2632 .ok_or_else(|| anyhow!("missing mtime"))?
2633 .into();
2634
2635 this.update(&mut cx, |this, cx| {
2636 let buffer = this
2637 .open_buffers
2638 .get(&(envelope.payload.buffer_id as usize))
2639 .and_then(|buffer| buffer.upgrade(cx));
2640 if let Some(buffer) = buffer {
2641 buffer.update(cx, |buffer, cx| {
2642 buffer.did_save(version, mtime, None, cx);
2643 });
2644 }
2645 Ok(())
2646 })
2647 }
2648
2649 async fn handle_buffer_reloaded(
2650 this: ModelHandle<Self>,
2651 envelope: TypedEnvelope<proto::BufferReloaded>,
2652 _: Arc<Client>,
2653 mut cx: AsyncAppContext,
2654 ) -> Result<()> {
2655 let payload = envelope.payload.clone();
2656 let version = payload.version.try_into()?;
2657 let mtime = payload
2658 .mtime
2659 .ok_or_else(|| anyhow!("missing mtime"))?
2660 .into();
2661 this.update(&mut cx, |this, cx| {
2662 let buffer = this
2663 .open_buffers
2664 .get(&(payload.buffer_id as usize))
2665 .and_then(|buffer| buffer.upgrade(cx));
2666 if let Some(buffer) = buffer {
2667 buffer.update(cx, |buffer, cx| {
2668 buffer.did_reload(version, mtime, cx);
2669 });
2670 }
2671 Ok(())
2672 })
2673 }
2674
2675 pub fn match_paths<'a>(
2676 &self,
2677 query: &'a str,
2678 include_ignored: bool,
2679 smart_case: bool,
2680 max_results: usize,
2681 cancel_flag: &'a AtomicBool,
2682 cx: &AppContext,
2683 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2684 let worktrees = self
2685 .worktrees(cx)
2686 .filter(|worktree| !worktree.read(cx).is_weak())
2687 .collect::<Vec<_>>();
2688 let include_root_name = worktrees.len() > 1;
2689 let candidate_sets = worktrees
2690 .into_iter()
2691 .map(|worktree| CandidateSet {
2692 snapshot: worktree.read(cx).snapshot(),
2693 include_ignored,
2694 include_root_name,
2695 })
2696 .collect::<Vec<_>>();
2697
2698 let background = cx.background().clone();
2699 async move {
2700 fuzzy::match_paths(
2701 candidate_sets.as_slice(),
2702 query,
2703 smart_case,
2704 max_results,
2705 cancel_flag,
2706 background,
2707 )
2708 .await
2709 }
2710 }
2711}
2712
2713impl WorktreeHandle {
2714 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2715 match self {
2716 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2717 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2718 }
2719 }
2720}
2721
2722struct CandidateSet {
2723 snapshot: Snapshot,
2724 include_ignored: bool,
2725 include_root_name: bool,
2726}
2727
2728impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2729 type Candidates = CandidateSetIter<'a>;
2730
2731 fn id(&self) -> usize {
2732 self.snapshot.id().to_usize()
2733 }
2734
2735 fn len(&self) -> usize {
2736 if self.include_ignored {
2737 self.snapshot.file_count()
2738 } else {
2739 self.snapshot.visible_file_count()
2740 }
2741 }
2742
2743 fn prefix(&self) -> Arc<str> {
2744 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2745 self.snapshot.root_name().into()
2746 } else if self.include_root_name {
2747 format!("{}/", self.snapshot.root_name()).into()
2748 } else {
2749 "".into()
2750 }
2751 }
2752
2753 fn candidates(&'a self, start: usize) -> Self::Candidates {
2754 CandidateSetIter {
2755 traversal: self.snapshot.files(self.include_ignored, start),
2756 }
2757 }
2758}
2759
2760struct CandidateSetIter<'a> {
2761 traversal: Traversal<'a>,
2762}
2763
2764impl<'a> Iterator for CandidateSetIter<'a> {
2765 type Item = PathMatchCandidate<'a>;
2766
2767 fn next(&mut self) -> Option<Self::Item> {
2768 self.traversal.next().map(|entry| {
2769 if let EntryKind::File(char_bag) = entry.kind {
2770 PathMatchCandidate {
2771 path: &entry.path,
2772 char_bag,
2773 }
2774 } else {
2775 unreachable!()
2776 }
2777 })
2778 }
2779}
2780
2781impl Entity for Project {
2782 type Event = Event;
2783
2784 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2785 match &self.client_state {
2786 ProjectClientState::Local { remote_id_rx, .. } => {
2787 if let Some(project_id) = *remote_id_rx.borrow() {
2788 self.client
2789 .send(proto::UnregisterProject { project_id })
2790 .log_err();
2791 }
2792 }
2793 ProjectClientState::Remote { remote_id, .. } => {
2794 self.client
2795 .send(proto::LeaveProject {
2796 project_id: *remote_id,
2797 })
2798 .log_err();
2799 }
2800 }
2801 }
2802
2803 fn app_will_quit(
2804 &mut self,
2805 _: &mut MutableAppContext,
2806 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2807 use futures::FutureExt;
2808
2809 let shutdown_futures = self
2810 .language_servers
2811 .drain()
2812 .filter_map(|(_, server)| server.shutdown())
2813 .collect::<Vec<_>>();
2814 Some(
2815 async move {
2816 futures::future::join_all(shutdown_futures).await;
2817 }
2818 .boxed(),
2819 )
2820 }
2821}
2822
2823impl Collaborator {
2824 fn from_proto(
2825 message: proto::Collaborator,
2826 user_store: &ModelHandle<UserStore>,
2827 cx: &mut AsyncAppContext,
2828 ) -> impl Future<Output = Result<Self>> {
2829 let user = user_store.update(cx, |user_store, cx| {
2830 user_store.fetch_user(message.user_id, cx)
2831 });
2832
2833 async move {
2834 Ok(Self {
2835 peer_id: PeerId(message.peer_id),
2836 user: user.await?,
2837 replica_id: message.replica_id as ReplicaId,
2838 })
2839 }
2840 }
2841}
2842
2843impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2844 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2845 Self {
2846 worktree_id,
2847 path: path.as_ref().into(),
2848 }
2849 }
2850}
2851
2852impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2853 fn from(options: lsp::CreateFileOptions) -> Self {
2854 Self {
2855 overwrite: options.overwrite.unwrap_or(false),
2856 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2857 }
2858 }
2859}
2860
2861impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2862 fn from(options: lsp::RenameFileOptions) -> Self {
2863 Self {
2864 overwrite: options.overwrite.unwrap_or(false),
2865 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2866 }
2867 }
2868}
2869
2870impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2871 fn from(options: lsp::DeleteFileOptions) -> Self {
2872 Self {
2873 recursive: options.recursive.unwrap_or(false),
2874 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2875 }
2876 }
2877}
2878
2879#[cfg(test)]
2880mod tests {
2881 use super::{Event, *};
2882 use client::test::FakeHttpClient;
2883 use fs::RealFs;
2884 use futures::StreamExt;
2885 use gpui::test::subscribe;
2886 use language::{
2887 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2888 LanguageServerConfig, Point,
2889 };
2890 use lsp::Url;
2891 use serde_json::json;
2892 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2893 use unindent::Unindent as _;
2894 use util::test::temp_tree;
2895 use worktree::WorktreeHandle as _;
2896
2897 #[gpui::test]
2898 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2899 let dir = temp_tree(json!({
2900 "root": {
2901 "apple": "",
2902 "banana": {
2903 "carrot": {
2904 "date": "",
2905 "endive": "",
2906 }
2907 },
2908 "fennel": {
2909 "grape": "",
2910 }
2911 }
2912 }));
2913
2914 let root_link_path = dir.path().join("root_link");
2915 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2916 unix::fs::symlink(
2917 &dir.path().join("root/fennel"),
2918 &dir.path().join("root/finnochio"),
2919 )
2920 .unwrap();
2921
2922 let project = Project::test(Arc::new(RealFs), &mut cx);
2923
2924 let (tree, _) = project
2925 .update(&mut cx, |project, cx| {
2926 project.find_or_create_local_worktree(&root_link_path, false, cx)
2927 })
2928 .await
2929 .unwrap();
2930
2931 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2932 .await;
2933 cx.read(|cx| {
2934 let tree = tree.read(cx);
2935 assert_eq!(tree.file_count(), 5);
2936 assert_eq!(
2937 tree.inode_for_path("fennel/grape"),
2938 tree.inode_for_path("finnochio/grape")
2939 );
2940 });
2941
2942 let cancel_flag = Default::default();
2943 let results = project
2944 .read_with(&cx, |project, cx| {
2945 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2946 })
2947 .await;
2948 assert_eq!(
2949 results
2950 .into_iter()
2951 .map(|result| result.path)
2952 .collect::<Vec<Arc<Path>>>(),
2953 vec![
2954 PathBuf::from("banana/carrot/date").into(),
2955 PathBuf::from("banana/carrot/endive").into(),
2956 ]
2957 );
2958 }
2959
2960 #[gpui::test]
2961 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
2962 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
2963 let progress_token = language_server_config
2964 .disk_based_diagnostics_progress_token
2965 .clone()
2966 .unwrap();
2967
2968 let mut languages = LanguageRegistry::new();
2969 languages.add(Arc::new(Language::new(
2970 LanguageConfig {
2971 name: "Rust".to_string(),
2972 path_suffixes: vec!["rs".to_string()],
2973 language_server: Some(language_server_config),
2974 ..Default::default()
2975 },
2976 Some(tree_sitter_rust::language()),
2977 )));
2978
2979 let dir = temp_tree(json!({
2980 "a.rs": "fn a() { A }",
2981 "b.rs": "const y: i32 = 1",
2982 }));
2983
2984 let http_client = FakeHttpClient::with_404_response();
2985 let client = Client::new(http_client.clone());
2986 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
2987
2988 let project = cx.update(|cx| {
2989 Project::local(
2990 client,
2991 user_store,
2992 Arc::new(languages),
2993 Arc::new(RealFs),
2994 cx,
2995 )
2996 });
2997
2998 let (tree, _) = project
2999 .update(&mut cx, |project, cx| {
3000 project.find_or_create_local_worktree(dir.path(), false, cx)
3001 })
3002 .await
3003 .unwrap();
3004 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3005
3006 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3007 .await;
3008
3009 // Cause worktree to start the fake language server
3010 let _buffer = project
3011 .update(&mut cx, |project, cx| {
3012 project.open_buffer(
3013 ProjectPath {
3014 worktree_id,
3015 path: Path::new("b.rs").into(),
3016 },
3017 cx,
3018 )
3019 })
3020 .await
3021 .unwrap();
3022
3023 let mut events = subscribe(&project, &mut cx);
3024
3025 fake_server.start_progress(&progress_token).await;
3026 assert_eq!(
3027 events.next().await.unwrap(),
3028 Event::DiskBasedDiagnosticsStarted
3029 );
3030
3031 fake_server.start_progress(&progress_token).await;
3032 fake_server.end_progress(&progress_token).await;
3033 fake_server.start_progress(&progress_token).await;
3034
3035 fake_server
3036 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3037 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3038 version: None,
3039 diagnostics: vec![lsp::Diagnostic {
3040 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3041 severity: Some(lsp::DiagnosticSeverity::ERROR),
3042 message: "undefined variable 'A'".to_string(),
3043 ..Default::default()
3044 }],
3045 })
3046 .await;
3047 assert_eq!(
3048 events.next().await.unwrap(),
3049 Event::DiagnosticsUpdated(ProjectPath {
3050 worktree_id,
3051 path: Arc::from(Path::new("a.rs"))
3052 })
3053 );
3054
3055 fake_server.end_progress(&progress_token).await;
3056 fake_server.end_progress(&progress_token).await;
3057 assert_eq!(
3058 events.next().await.unwrap(),
3059 Event::DiskBasedDiagnosticsUpdated
3060 );
3061 assert_eq!(
3062 events.next().await.unwrap(),
3063 Event::DiskBasedDiagnosticsFinished
3064 );
3065
3066 let buffer = project
3067 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3068 .await
3069 .unwrap();
3070
3071 buffer.read_with(&cx, |buffer, _| {
3072 let snapshot = buffer.snapshot();
3073 let diagnostics = snapshot
3074 .diagnostics_in_range::<_, Point>(0..buffer.len())
3075 .collect::<Vec<_>>();
3076 assert_eq!(
3077 diagnostics,
3078 &[DiagnosticEntry {
3079 range: Point::new(0, 9)..Point::new(0, 10),
3080 diagnostic: Diagnostic {
3081 severity: lsp::DiagnosticSeverity::ERROR,
3082 message: "undefined variable 'A'".to_string(),
3083 group_id: 0,
3084 is_primary: true,
3085 ..Default::default()
3086 }
3087 }]
3088 )
3089 });
3090 }
3091
3092 #[gpui::test]
3093 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3094 let dir = temp_tree(json!({
3095 "root": {
3096 "dir1": {},
3097 "dir2": {
3098 "dir3": {}
3099 }
3100 }
3101 }));
3102
3103 let project = Project::test(Arc::new(RealFs), &mut cx);
3104 let (tree, _) = project
3105 .update(&mut cx, |project, cx| {
3106 project.find_or_create_local_worktree(&dir.path(), false, cx)
3107 })
3108 .await
3109 .unwrap();
3110
3111 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3112 .await;
3113
3114 let cancel_flag = Default::default();
3115 let results = project
3116 .read_with(&cx, |project, cx| {
3117 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3118 })
3119 .await;
3120
3121 assert!(results.is_empty());
3122 }
3123
3124 #[gpui::test]
3125 async fn test_definition(mut cx: gpui::TestAppContext) {
3126 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3127
3128 let mut languages = LanguageRegistry::new();
3129 languages.add(Arc::new(Language::new(
3130 LanguageConfig {
3131 name: "Rust".to_string(),
3132 path_suffixes: vec!["rs".to_string()],
3133 language_server: Some(language_server_config),
3134 ..Default::default()
3135 },
3136 Some(tree_sitter_rust::language()),
3137 )));
3138
3139 let dir = temp_tree(json!({
3140 "a.rs": "const fn a() { A }",
3141 "b.rs": "const y: i32 = crate::a()",
3142 }));
3143 let dir_path = dir.path().to_path_buf();
3144
3145 let http_client = FakeHttpClient::with_404_response();
3146 let client = Client::new(http_client.clone());
3147 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3148 let project = cx.update(|cx| {
3149 Project::local(
3150 client,
3151 user_store,
3152 Arc::new(languages),
3153 Arc::new(RealFs),
3154 cx,
3155 )
3156 });
3157
3158 let (tree, _) = project
3159 .update(&mut cx, |project, cx| {
3160 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3161 })
3162 .await
3163 .unwrap();
3164 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3165 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3166 .await;
3167
3168 let buffer = project
3169 .update(&mut cx, |project, cx| {
3170 project.open_buffer(
3171 ProjectPath {
3172 worktree_id,
3173 path: Path::new("").into(),
3174 },
3175 cx,
3176 )
3177 })
3178 .await
3179 .unwrap();
3180
3181 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3182 let params = params.text_document_position_params;
3183 assert_eq!(
3184 params.text_document.uri.to_file_path().unwrap(),
3185 dir_path.join("b.rs")
3186 );
3187 assert_eq!(params.position, lsp::Position::new(0, 22));
3188
3189 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3190 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3191 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3192 )))
3193 });
3194
3195 let mut definitions = project
3196 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3197 .await
3198 .unwrap();
3199
3200 assert_eq!(definitions.len(), 1);
3201 let definition = definitions.pop().unwrap();
3202 cx.update(|cx| {
3203 let target_buffer = definition.target_buffer.read(cx);
3204 assert_eq!(
3205 target_buffer
3206 .file()
3207 .unwrap()
3208 .as_local()
3209 .unwrap()
3210 .abs_path(cx),
3211 dir.path().join("a.rs")
3212 );
3213 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3214 assert_eq!(
3215 list_worktrees(&project, cx),
3216 [
3217 (dir.path().join("b.rs"), false),
3218 (dir.path().join("a.rs"), true)
3219 ]
3220 );
3221
3222 drop(definition);
3223 });
3224 cx.read(|cx| {
3225 assert_eq!(
3226 list_worktrees(&project, cx),
3227 [(dir.path().join("b.rs"), false)]
3228 );
3229 });
3230
3231 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3232 project
3233 .read(cx)
3234 .worktrees(cx)
3235 .map(|worktree| {
3236 let worktree = worktree.read(cx);
3237 (
3238 worktree.as_local().unwrap().abs_path().to_path_buf(),
3239 worktree.is_weak(),
3240 )
3241 })
3242 .collect::<Vec<_>>()
3243 }
3244 }
3245
3246 #[gpui::test]
3247 async fn test_save_file(mut cx: gpui::TestAppContext) {
3248 let fs = Arc::new(FakeFs::new(cx.background()));
3249 fs.insert_tree(
3250 "/dir",
3251 json!({
3252 "file1": "the old contents",
3253 }),
3254 )
3255 .await;
3256
3257 let project = Project::test(fs.clone(), &mut cx);
3258 let worktree_id = project
3259 .update(&mut cx, |p, cx| {
3260 p.find_or_create_local_worktree("/dir", false, cx)
3261 })
3262 .await
3263 .unwrap()
3264 .0
3265 .read_with(&cx, |tree, _| tree.id());
3266
3267 let buffer = project
3268 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3269 .await
3270 .unwrap();
3271 buffer
3272 .update(&mut cx, |buffer, cx| {
3273 assert_eq!(buffer.text(), "the old contents");
3274 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3275 buffer.save(cx)
3276 })
3277 .await
3278 .unwrap();
3279
3280 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3281 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3282 }
3283
3284 #[gpui::test]
3285 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3286 let fs = Arc::new(FakeFs::new(cx.background()));
3287 fs.insert_tree(
3288 "/dir",
3289 json!({
3290 "file1": "the old contents",
3291 }),
3292 )
3293 .await;
3294
3295 let project = Project::test(fs.clone(), &mut cx);
3296 let worktree_id = project
3297 .update(&mut cx, |p, cx| {
3298 p.find_or_create_local_worktree("/dir/file1", false, cx)
3299 })
3300 .await
3301 .unwrap()
3302 .0
3303 .read_with(&cx, |tree, _| tree.id());
3304
3305 let buffer = project
3306 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3307 .await
3308 .unwrap();
3309 buffer
3310 .update(&mut cx, |buffer, cx| {
3311 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3312 buffer.save(cx)
3313 })
3314 .await
3315 .unwrap();
3316
3317 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3318 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3319 }
3320
3321 #[gpui::test(retries = 5)]
3322 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3323 let dir = temp_tree(json!({
3324 "a": {
3325 "file1": "",
3326 "file2": "",
3327 "file3": "",
3328 },
3329 "b": {
3330 "c": {
3331 "file4": "",
3332 "file5": "",
3333 }
3334 }
3335 }));
3336
3337 let project = Project::test(Arc::new(RealFs), &mut cx);
3338 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3339
3340 let (tree, _) = project
3341 .update(&mut cx, |p, cx| {
3342 p.find_or_create_local_worktree(dir.path(), false, cx)
3343 })
3344 .await
3345 .unwrap();
3346 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3347
3348 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3349 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3350 async move { buffer.await.unwrap() }
3351 };
3352 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3353 tree.read_with(cx, |tree, _| {
3354 tree.entry_for_path(path)
3355 .expect(&format!("no entry for path {}", path))
3356 .id
3357 })
3358 };
3359
3360 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3361 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3362 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3363 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3364
3365 let file2_id = id_for_path("a/file2", &cx);
3366 let file3_id = id_for_path("a/file3", &cx);
3367 let file4_id = id_for_path("b/c/file4", &cx);
3368
3369 // Wait for the initial scan.
3370 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3371 .await;
3372
3373 // Create a remote copy of this worktree.
3374 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot());
3375 let (remote, load_task) = cx.update(|cx| {
3376 Worktree::remote(
3377 1,
3378 1,
3379 initial_snapshot.to_proto(&Default::default(), Default::default()),
3380 rpc.clone(),
3381 cx,
3382 )
3383 });
3384 load_task.await;
3385
3386 cx.read(|cx| {
3387 assert!(!buffer2.read(cx).is_dirty());
3388 assert!(!buffer3.read(cx).is_dirty());
3389 assert!(!buffer4.read(cx).is_dirty());
3390 assert!(!buffer5.read(cx).is_dirty());
3391 });
3392
3393 // Rename and delete files and directories.
3394 tree.flush_fs_events(&cx).await;
3395 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3396 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3397 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3398 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3399 tree.flush_fs_events(&cx).await;
3400
3401 let expected_paths = vec![
3402 "a",
3403 "a/file1",
3404 "a/file2.new",
3405 "b",
3406 "d",
3407 "d/file3",
3408 "d/file4",
3409 ];
3410
3411 cx.read(|app| {
3412 assert_eq!(
3413 tree.read(app)
3414 .paths()
3415 .map(|p| p.to_str().unwrap())
3416 .collect::<Vec<_>>(),
3417 expected_paths
3418 );
3419
3420 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3421 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3422 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3423
3424 assert_eq!(
3425 buffer2.read(app).file().unwrap().path().as_ref(),
3426 Path::new("a/file2.new")
3427 );
3428 assert_eq!(
3429 buffer3.read(app).file().unwrap().path().as_ref(),
3430 Path::new("d/file3")
3431 );
3432 assert_eq!(
3433 buffer4.read(app).file().unwrap().path().as_ref(),
3434 Path::new("d/file4")
3435 );
3436 assert_eq!(
3437 buffer5.read(app).file().unwrap().path().as_ref(),
3438 Path::new("b/c/file5")
3439 );
3440
3441 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3442 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3443 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3444 assert!(buffer5.read(app).file().unwrap().is_deleted());
3445 });
3446
3447 // Update the remote worktree. Check that it becomes consistent with the
3448 // local worktree.
3449 remote.update(&mut cx, |remote, cx| {
3450 let update_message =
3451 tree.read(cx)
3452 .snapshot()
3453 .build_update(&initial_snapshot, 1, 1, true);
3454 remote
3455 .as_remote_mut()
3456 .unwrap()
3457 .snapshot
3458 .apply_remote_update(update_message)
3459 .unwrap();
3460
3461 assert_eq!(
3462 remote
3463 .paths()
3464 .map(|p| p.to_str().unwrap())
3465 .collect::<Vec<_>>(),
3466 expected_paths
3467 );
3468 });
3469 }
3470
3471 #[gpui::test]
3472 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3473 let fs = Arc::new(FakeFs::new(cx.background()));
3474 fs.insert_tree(
3475 "/the-dir",
3476 json!({
3477 "a.txt": "a-contents",
3478 "b.txt": "b-contents",
3479 }),
3480 )
3481 .await;
3482
3483 let project = Project::test(fs.clone(), &mut cx);
3484 let worktree_id = project
3485 .update(&mut cx, |p, cx| {
3486 p.find_or_create_local_worktree("/the-dir", false, cx)
3487 })
3488 .await
3489 .unwrap()
3490 .0
3491 .read_with(&cx, |tree, _| tree.id());
3492
3493 // Spawn multiple tasks to open paths, repeating some paths.
3494 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3495 (
3496 p.open_buffer((worktree_id, "a.txt"), cx),
3497 p.open_buffer((worktree_id, "b.txt"), cx),
3498 p.open_buffer((worktree_id, "a.txt"), cx),
3499 )
3500 });
3501
3502 let buffer_a_1 = buffer_a_1.await.unwrap();
3503 let buffer_a_2 = buffer_a_2.await.unwrap();
3504 let buffer_b = buffer_b.await.unwrap();
3505 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3506 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3507
3508 // There is only one buffer per path.
3509 let buffer_a_id = buffer_a_1.id();
3510 assert_eq!(buffer_a_2.id(), buffer_a_id);
3511
3512 // Open the same path again while it is still open.
3513 drop(buffer_a_1);
3514 let buffer_a_3 = project
3515 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3516 .await
3517 .unwrap();
3518
3519 // There's still only one buffer per path.
3520 assert_eq!(buffer_a_3.id(), buffer_a_id);
3521 }
3522
3523 #[gpui::test]
3524 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3525 use std::fs;
3526
3527 let dir = temp_tree(json!({
3528 "file1": "abc",
3529 "file2": "def",
3530 "file3": "ghi",
3531 }));
3532
3533 let project = Project::test(Arc::new(RealFs), &mut cx);
3534 let (worktree, _) = project
3535 .update(&mut cx, |p, cx| {
3536 p.find_or_create_local_worktree(dir.path(), false, cx)
3537 })
3538 .await
3539 .unwrap();
3540 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3541
3542 worktree.flush_fs_events(&cx).await;
3543 worktree
3544 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3545 .await;
3546
3547 let buffer1 = project
3548 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3549 .await
3550 .unwrap();
3551 let events = Rc::new(RefCell::new(Vec::new()));
3552
3553 // initially, the buffer isn't dirty.
3554 buffer1.update(&mut cx, |buffer, cx| {
3555 cx.subscribe(&buffer1, {
3556 let events = events.clone();
3557 move |_, _, event, _| events.borrow_mut().push(event.clone())
3558 })
3559 .detach();
3560
3561 assert!(!buffer.is_dirty());
3562 assert!(events.borrow().is_empty());
3563
3564 buffer.edit(vec![1..2], "", cx);
3565 });
3566
3567 // after the first edit, the buffer is dirty, and emits a dirtied event.
3568 buffer1.update(&mut cx, |buffer, cx| {
3569 assert!(buffer.text() == "ac");
3570 assert!(buffer.is_dirty());
3571 assert_eq!(
3572 *events.borrow(),
3573 &[language::Event::Edited, language::Event::Dirtied]
3574 );
3575 events.borrow_mut().clear();
3576 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3577 });
3578
3579 // after saving, the buffer is not dirty, and emits a saved event.
3580 buffer1.update(&mut cx, |buffer, cx| {
3581 assert!(!buffer.is_dirty());
3582 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3583 events.borrow_mut().clear();
3584
3585 buffer.edit(vec![1..1], "B", cx);
3586 buffer.edit(vec![2..2], "D", cx);
3587 });
3588
3589 // after editing again, the buffer is dirty, and emits another dirty event.
3590 buffer1.update(&mut cx, |buffer, cx| {
3591 assert!(buffer.text() == "aBDc");
3592 assert!(buffer.is_dirty());
3593 assert_eq!(
3594 *events.borrow(),
3595 &[
3596 language::Event::Edited,
3597 language::Event::Dirtied,
3598 language::Event::Edited,
3599 ],
3600 );
3601 events.borrow_mut().clear();
3602
3603 // TODO - currently, after restoring the buffer to its
3604 // previously-saved state, the is still considered dirty.
3605 buffer.edit([1..3], "", cx);
3606 assert!(buffer.text() == "ac");
3607 assert!(buffer.is_dirty());
3608 });
3609
3610 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3611
3612 // When a file is deleted, the buffer is considered dirty.
3613 let events = Rc::new(RefCell::new(Vec::new()));
3614 let buffer2 = project
3615 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3616 .await
3617 .unwrap();
3618 buffer2.update(&mut cx, |_, cx| {
3619 cx.subscribe(&buffer2, {
3620 let events = events.clone();
3621 move |_, _, event, _| events.borrow_mut().push(event.clone())
3622 })
3623 .detach();
3624 });
3625
3626 fs::remove_file(dir.path().join("file2")).unwrap();
3627 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3628 assert_eq!(
3629 *events.borrow(),
3630 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3631 );
3632
3633 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3634 let events = Rc::new(RefCell::new(Vec::new()));
3635 let buffer3 = project
3636 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3637 .await
3638 .unwrap();
3639 buffer3.update(&mut cx, |_, cx| {
3640 cx.subscribe(&buffer3, {
3641 let events = events.clone();
3642 move |_, _, event, _| events.borrow_mut().push(event.clone())
3643 })
3644 .detach();
3645 });
3646
3647 worktree.flush_fs_events(&cx).await;
3648 buffer3.update(&mut cx, |buffer, cx| {
3649 buffer.edit(Some(0..0), "x", cx);
3650 });
3651 events.borrow_mut().clear();
3652 fs::remove_file(dir.path().join("file3")).unwrap();
3653 buffer3
3654 .condition(&cx, |_, _| !events.borrow().is_empty())
3655 .await;
3656 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3657 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3658 }
3659
3660 #[gpui::test]
3661 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3662 use std::fs;
3663
3664 let initial_contents = "aaa\nbbbbb\nc\n";
3665 let dir = temp_tree(json!({ "the-file": initial_contents }));
3666
3667 let project = Project::test(Arc::new(RealFs), &mut cx);
3668 let (worktree, _) = project
3669 .update(&mut cx, |p, cx| {
3670 p.find_or_create_local_worktree(dir.path(), false, cx)
3671 })
3672 .await
3673 .unwrap();
3674 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3675
3676 worktree
3677 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3678 .await;
3679
3680 let abs_path = dir.path().join("the-file");
3681 let buffer = project
3682 .update(&mut cx, |p, cx| {
3683 p.open_buffer((worktree_id, "the-file"), cx)
3684 })
3685 .await
3686 .unwrap();
3687
3688 // TODO
3689 // Add a cursor on each row.
3690 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3691 // assert!(!buffer.is_dirty());
3692 // buffer.add_selection_set(
3693 // &(0..3)
3694 // .map(|row| Selection {
3695 // id: row as usize,
3696 // start: Point::new(row, 1),
3697 // end: Point::new(row, 1),
3698 // reversed: false,
3699 // goal: SelectionGoal::None,
3700 // })
3701 // .collect::<Vec<_>>(),
3702 // cx,
3703 // )
3704 // });
3705
3706 // Change the file on disk, adding two new lines of text, and removing
3707 // one line.
3708 buffer.read_with(&cx, |buffer, _| {
3709 assert!(!buffer.is_dirty());
3710 assert!(!buffer.has_conflict());
3711 });
3712 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3713 fs::write(&abs_path, new_contents).unwrap();
3714
3715 // Because the buffer was not modified, it is reloaded from disk. Its
3716 // contents are edited according to the diff between the old and new
3717 // file contents.
3718 buffer
3719 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3720 .await;
3721
3722 buffer.update(&mut cx, |buffer, _| {
3723 assert_eq!(buffer.text(), new_contents);
3724 assert!(!buffer.is_dirty());
3725 assert!(!buffer.has_conflict());
3726
3727 // TODO
3728 // let cursor_positions = buffer
3729 // .selection_set(selection_set_id)
3730 // .unwrap()
3731 // .selections::<Point>(&*buffer)
3732 // .map(|selection| {
3733 // assert_eq!(selection.start, selection.end);
3734 // selection.start
3735 // })
3736 // .collect::<Vec<_>>();
3737 // assert_eq!(
3738 // cursor_positions,
3739 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3740 // );
3741 });
3742
3743 // Modify the buffer
3744 buffer.update(&mut cx, |buffer, cx| {
3745 buffer.edit(vec![0..0], " ", cx);
3746 assert!(buffer.is_dirty());
3747 assert!(!buffer.has_conflict());
3748 });
3749
3750 // Change the file on disk again, adding blank lines to the beginning.
3751 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3752
3753 // Because the buffer is modified, it doesn't reload from disk, but is
3754 // marked as having a conflict.
3755 buffer
3756 .condition(&cx, |buffer, _| buffer.has_conflict())
3757 .await;
3758 }
3759
3760 #[gpui::test]
3761 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3762 let fs = Arc::new(FakeFs::new(cx.background()));
3763 fs.insert_tree(
3764 "/the-dir",
3765 json!({
3766 "a.rs": "
3767 fn foo(mut v: Vec<usize>) {
3768 for x in &v {
3769 v.push(1);
3770 }
3771 }
3772 "
3773 .unindent(),
3774 }),
3775 )
3776 .await;
3777
3778 let project = Project::test(fs.clone(), &mut cx);
3779 let (worktree, _) = project
3780 .update(&mut cx, |p, cx| {
3781 p.find_or_create_local_worktree("/the-dir", false, cx)
3782 })
3783 .await
3784 .unwrap();
3785 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3786
3787 let buffer = project
3788 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3789 .await
3790 .unwrap();
3791
3792 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3793 let message = lsp::PublishDiagnosticsParams {
3794 uri: buffer_uri.clone(),
3795 diagnostics: vec![
3796 lsp::Diagnostic {
3797 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3798 severity: Some(DiagnosticSeverity::WARNING),
3799 message: "error 1".to_string(),
3800 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3801 location: lsp::Location {
3802 uri: buffer_uri.clone(),
3803 range: lsp::Range::new(
3804 lsp::Position::new(1, 8),
3805 lsp::Position::new(1, 9),
3806 ),
3807 },
3808 message: "error 1 hint 1".to_string(),
3809 }]),
3810 ..Default::default()
3811 },
3812 lsp::Diagnostic {
3813 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3814 severity: Some(DiagnosticSeverity::HINT),
3815 message: "error 1 hint 1".to_string(),
3816 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3817 location: lsp::Location {
3818 uri: buffer_uri.clone(),
3819 range: lsp::Range::new(
3820 lsp::Position::new(1, 8),
3821 lsp::Position::new(1, 9),
3822 ),
3823 },
3824 message: "original diagnostic".to_string(),
3825 }]),
3826 ..Default::default()
3827 },
3828 lsp::Diagnostic {
3829 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3830 severity: Some(DiagnosticSeverity::ERROR),
3831 message: "error 2".to_string(),
3832 related_information: Some(vec![
3833 lsp::DiagnosticRelatedInformation {
3834 location: lsp::Location {
3835 uri: buffer_uri.clone(),
3836 range: lsp::Range::new(
3837 lsp::Position::new(1, 13),
3838 lsp::Position::new(1, 15),
3839 ),
3840 },
3841 message: "error 2 hint 1".to_string(),
3842 },
3843 lsp::DiagnosticRelatedInformation {
3844 location: lsp::Location {
3845 uri: buffer_uri.clone(),
3846 range: lsp::Range::new(
3847 lsp::Position::new(1, 13),
3848 lsp::Position::new(1, 15),
3849 ),
3850 },
3851 message: "error 2 hint 2".to_string(),
3852 },
3853 ]),
3854 ..Default::default()
3855 },
3856 lsp::Diagnostic {
3857 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3858 severity: Some(DiagnosticSeverity::HINT),
3859 message: "error 2 hint 1".to_string(),
3860 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3861 location: lsp::Location {
3862 uri: buffer_uri.clone(),
3863 range: lsp::Range::new(
3864 lsp::Position::new(2, 8),
3865 lsp::Position::new(2, 17),
3866 ),
3867 },
3868 message: "original diagnostic".to_string(),
3869 }]),
3870 ..Default::default()
3871 },
3872 lsp::Diagnostic {
3873 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3874 severity: Some(DiagnosticSeverity::HINT),
3875 message: "error 2 hint 2".to_string(),
3876 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3877 location: lsp::Location {
3878 uri: buffer_uri.clone(),
3879 range: lsp::Range::new(
3880 lsp::Position::new(2, 8),
3881 lsp::Position::new(2, 17),
3882 ),
3883 },
3884 message: "original diagnostic".to_string(),
3885 }]),
3886 ..Default::default()
3887 },
3888 ],
3889 version: None,
3890 };
3891
3892 project
3893 .update(&mut cx, |p, cx| {
3894 p.update_diagnostics(message, &Default::default(), cx)
3895 })
3896 .unwrap();
3897 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3898
3899 assert_eq!(
3900 buffer
3901 .diagnostics_in_range::<_, Point>(0..buffer.len())
3902 .collect::<Vec<_>>(),
3903 &[
3904 DiagnosticEntry {
3905 range: Point::new(1, 8)..Point::new(1, 9),
3906 diagnostic: Diagnostic {
3907 severity: DiagnosticSeverity::WARNING,
3908 message: "error 1".to_string(),
3909 group_id: 0,
3910 is_primary: true,
3911 ..Default::default()
3912 }
3913 },
3914 DiagnosticEntry {
3915 range: Point::new(1, 8)..Point::new(1, 9),
3916 diagnostic: Diagnostic {
3917 severity: DiagnosticSeverity::HINT,
3918 message: "error 1 hint 1".to_string(),
3919 group_id: 0,
3920 is_primary: false,
3921 ..Default::default()
3922 }
3923 },
3924 DiagnosticEntry {
3925 range: Point::new(1, 13)..Point::new(1, 15),
3926 diagnostic: Diagnostic {
3927 severity: DiagnosticSeverity::HINT,
3928 message: "error 2 hint 1".to_string(),
3929 group_id: 1,
3930 is_primary: false,
3931 ..Default::default()
3932 }
3933 },
3934 DiagnosticEntry {
3935 range: Point::new(1, 13)..Point::new(1, 15),
3936 diagnostic: Diagnostic {
3937 severity: DiagnosticSeverity::HINT,
3938 message: "error 2 hint 2".to_string(),
3939 group_id: 1,
3940 is_primary: false,
3941 ..Default::default()
3942 }
3943 },
3944 DiagnosticEntry {
3945 range: Point::new(2, 8)..Point::new(2, 17),
3946 diagnostic: Diagnostic {
3947 severity: DiagnosticSeverity::ERROR,
3948 message: "error 2".to_string(),
3949 group_id: 1,
3950 is_primary: true,
3951 ..Default::default()
3952 }
3953 }
3954 ]
3955 );
3956
3957 assert_eq!(
3958 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3959 &[
3960 DiagnosticEntry {
3961 range: Point::new(1, 8)..Point::new(1, 9),
3962 diagnostic: Diagnostic {
3963 severity: DiagnosticSeverity::WARNING,
3964 message: "error 1".to_string(),
3965 group_id: 0,
3966 is_primary: true,
3967 ..Default::default()
3968 }
3969 },
3970 DiagnosticEntry {
3971 range: Point::new(1, 8)..Point::new(1, 9),
3972 diagnostic: Diagnostic {
3973 severity: DiagnosticSeverity::HINT,
3974 message: "error 1 hint 1".to_string(),
3975 group_id: 0,
3976 is_primary: false,
3977 ..Default::default()
3978 }
3979 },
3980 ]
3981 );
3982 assert_eq!(
3983 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3984 &[
3985 DiagnosticEntry {
3986 range: Point::new(1, 13)..Point::new(1, 15),
3987 diagnostic: Diagnostic {
3988 severity: DiagnosticSeverity::HINT,
3989 message: "error 2 hint 1".to_string(),
3990 group_id: 1,
3991 is_primary: false,
3992 ..Default::default()
3993 }
3994 },
3995 DiagnosticEntry {
3996 range: Point::new(1, 13)..Point::new(1, 15),
3997 diagnostic: Diagnostic {
3998 severity: DiagnosticSeverity::HINT,
3999 message: "error 2 hint 2".to_string(),
4000 group_id: 1,
4001 is_primary: false,
4002 ..Default::default()
4003 }
4004 },
4005 DiagnosticEntry {
4006 range: Point::new(2, 8)..Point::new(2, 17),
4007 diagnostic: Diagnostic {
4008 severity: DiagnosticSeverity::ERROR,
4009 message: "error 2".to_string(),
4010 group_id: 1,
4011 is_primary: true,
4012 ..Default::default()
4013 }
4014 }
4015 ]
4016 );
4017 }
4018}