1pub mod fs;
2mod ignore;
3pub mod worktree;
4
5use anyhow::{anyhow, Result};
6use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
7use clock::ReplicaId;
8use collections::{hash_map, HashMap, HashSet};
9use futures::Future;
10use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
11use gpui::{
12 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
13 UpgradeModelHandle, WeakModelHandle,
14};
15use language::{
16 point_from_lsp,
17 proto::{deserialize_anchor, serialize_anchor},
18 range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, LanguageServer};
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Operations(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_get_definition);
185 client.add_entity_request_handler(Self::handle_open_buffer);
186 client.add_entity_request_handler(Self::handle_save_buffer);
187 }
188
189 pub fn local(
190 client: Arc<Client>,
191 user_store: ModelHandle<UserStore>,
192 languages: Arc<LanguageRegistry>,
193 fs: Arc<dyn Fs>,
194 cx: &mut MutableAppContext,
195 ) -> ModelHandle<Self> {
196 cx.add_model(|cx: &mut ModelContext<Self>| {
197 let (remote_id_tx, remote_id_rx) = watch::channel();
198 let _maintain_remote_id_task = cx.spawn_weak({
199 let rpc = client.clone();
200 move |this, mut cx| {
201 async move {
202 let mut status = rpc.status();
203 while let Some(status) = status.recv().await {
204 if let Some(this) = this.upgrade(&cx) {
205 let remote_id = if let client::Status::Connected { .. } = status {
206 let response = rpc.request(proto::RegisterProject {}).await?;
207 Some(response.project_id)
208 } else {
209 None
210 };
211
212 if let Some(project_id) = remote_id {
213 let mut registrations = Vec::new();
214 this.update(&mut cx, |this, cx| {
215 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
216 registrations.push(worktree.update(
217 cx,
218 |worktree, cx| {
219 let worktree = worktree.as_local_mut().unwrap();
220 worktree.register(project_id, cx)
221 },
222 ));
223 }
224 });
225 for registration in registrations {
226 registration.await?;
227 }
228 }
229 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
230 }
231 }
232 Ok(())
233 }
234 .log_err()
235 }
236 });
237
238 Self {
239 worktrees: Default::default(),
240 collaborators: Default::default(),
241 open_buffers: Default::default(),
242 loading_buffers: Default::default(),
243 shared_buffers: Default::default(),
244 client_state: ProjectClientState::Local {
245 is_shared: false,
246 remote_id_tx,
247 remote_id_rx,
248 _maintain_remote_id_task,
249 },
250 opened_buffer: broadcast::channel(1).0,
251 subscriptions: Vec::new(),
252 active_entry: None,
253 languages,
254 client,
255 user_store,
256 fs,
257 language_servers_with_diagnostics_running: 0,
258 language_servers: Default::default(),
259 }
260 })
261 }
262
263 pub async fn remote(
264 remote_id: u64,
265 client: Arc<Client>,
266 user_store: ModelHandle<UserStore>,
267 languages: Arc<LanguageRegistry>,
268 fs: Arc<dyn Fs>,
269 cx: &mut AsyncAppContext,
270 ) -> Result<ModelHandle<Self>> {
271 client.authenticate_and_connect(&cx).await?;
272
273 let response = client
274 .request(proto::JoinProject {
275 project_id: remote_id,
276 })
277 .await?;
278
279 let replica_id = response.replica_id as ReplicaId;
280
281 let mut worktrees = Vec::new();
282 for worktree in response.worktrees {
283 let (worktree, load_task) = cx
284 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
285 worktrees.push(worktree);
286 load_task.detach();
287 }
288
289 let user_ids = response
290 .collaborators
291 .iter()
292 .map(|peer| peer.user_id)
293 .collect();
294 user_store
295 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
296 .await?;
297 let mut collaborators = HashMap::default();
298 for message in response.collaborators {
299 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
300 collaborators.insert(collaborator.peer_id, collaborator);
301 }
302
303 Ok(cx.add_model(|cx| {
304 let mut this = Self {
305 worktrees: Vec::new(),
306 open_buffers: Default::default(),
307 loading_buffers: Default::default(),
308 opened_buffer: broadcast::channel(1).0,
309 shared_buffers: Default::default(),
310 active_entry: None,
311 collaborators,
312 languages,
313 user_store,
314 fs,
315 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
316 client,
317 client_state: ProjectClientState::Remote {
318 sharing_has_stopped: false,
319 remote_id,
320 replica_id,
321 },
322 language_servers_with_diagnostics_running: 0,
323 language_servers: Default::default(),
324 };
325 for worktree in worktrees {
326 this.add_worktree(&worktree, cx);
327 }
328 this
329 }))
330 }
331
332 #[cfg(any(test, feature = "test-support"))]
333 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
334 let languages = Arc::new(LanguageRegistry::new());
335 let http_client = client::test::FakeHttpClient::with_404_response();
336 let client = client::Client::new(http_client.clone());
337 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
338 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
339 }
340
341 pub fn fs(&self) -> &Arc<dyn Fs> {
342 &self.fs
343 }
344
345 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
346 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
347 *remote_id_tx.borrow_mut() = remote_id;
348 }
349
350 self.subscriptions.clear();
351 if let Some(remote_id) = remote_id {
352 self.subscriptions
353 .push(self.client.add_model_for_remote_entity(remote_id, cx));
354 }
355 }
356
357 pub fn remote_id(&self) -> Option<u64> {
358 match &self.client_state {
359 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
360 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
361 }
362 }
363
364 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
365 let mut id = None;
366 let mut watch = None;
367 match &self.client_state {
368 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
369 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
370 }
371
372 async move {
373 if let Some(id) = id {
374 return id;
375 }
376 let mut watch = watch.unwrap();
377 loop {
378 let id = *watch.borrow();
379 if let Some(id) = id {
380 return id;
381 }
382 watch.recv().await;
383 }
384 }
385 }
386
387 pub fn replica_id(&self) -> ReplicaId {
388 match &self.client_state {
389 ProjectClientState::Local { .. } => 0,
390 ProjectClientState::Remote { replica_id, .. } => *replica_id,
391 }
392 }
393
394 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
395 &self.collaborators
396 }
397
398 pub fn worktrees<'a>(
399 &'a self,
400 cx: &'a AppContext,
401 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
402 self.worktrees
403 .iter()
404 .filter_map(move |worktree| worktree.upgrade(cx))
405 }
406
407 pub fn worktree_for_id(
408 &self,
409 id: WorktreeId,
410 cx: &AppContext,
411 ) -> Option<ModelHandle<Worktree>> {
412 self.worktrees(cx)
413 .find(|worktree| worktree.read(cx).id() == id)
414 }
415
416 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
417 let rpc = self.client.clone();
418 cx.spawn(|this, mut cx| async move {
419 let project_id = this.update(&mut cx, |this, _| {
420 if let ProjectClientState::Local {
421 is_shared,
422 remote_id_rx,
423 ..
424 } = &mut this.client_state
425 {
426 *is_shared = true;
427 remote_id_rx
428 .borrow()
429 .ok_or_else(|| anyhow!("no project id"))
430 } else {
431 Err(anyhow!("can't share a remote project"))
432 }
433 })?;
434
435 rpc.request(proto::ShareProject { project_id }).await?;
436 let mut tasks = Vec::new();
437 this.update(&mut cx, |this, cx| {
438 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
439 worktree.update(cx, |worktree, cx| {
440 let worktree = worktree.as_local_mut().unwrap();
441 tasks.push(worktree.share(project_id, cx));
442 });
443 }
444 });
445 for task in tasks {
446 task.await?;
447 }
448 this.update(&mut cx, |_, cx| cx.notify());
449 Ok(())
450 })
451 }
452
453 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
454 let rpc = self.client.clone();
455 cx.spawn(|this, mut cx| async move {
456 let project_id = this.update(&mut cx, |this, _| {
457 if let ProjectClientState::Local {
458 is_shared,
459 remote_id_rx,
460 ..
461 } = &mut this.client_state
462 {
463 *is_shared = false;
464 remote_id_rx
465 .borrow()
466 .ok_or_else(|| anyhow!("no project id"))
467 } else {
468 Err(anyhow!("can't share a remote project"))
469 }
470 })?;
471
472 rpc.send(proto::UnshareProject { project_id })?;
473 this.update(&mut cx, |this, cx| {
474 this.collaborators.clear();
475 this.shared_buffers.clear();
476 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
477 worktree.update(cx, |worktree, _| {
478 worktree.as_local_mut().unwrap().unshare();
479 });
480 }
481 cx.notify()
482 });
483 Ok(())
484 })
485 }
486
487 pub fn is_read_only(&self) -> bool {
488 match &self.client_state {
489 ProjectClientState::Local { .. } => false,
490 ProjectClientState::Remote {
491 sharing_has_stopped,
492 ..
493 } => *sharing_has_stopped,
494 }
495 }
496
497 pub fn is_local(&self) -> bool {
498 match &self.client_state {
499 ProjectClientState::Local { .. } => true,
500 ProjectClientState::Remote { .. } => false,
501 }
502 }
503
504 pub fn open_buffer(
505 &mut self,
506 path: impl Into<ProjectPath>,
507 cx: &mut ModelContext<Self>,
508 ) -> Task<Result<ModelHandle<Buffer>>> {
509 let project_path = path.into();
510 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
511 worktree
512 } else {
513 return Task::ready(Err(anyhow!("no such worktree")));
514 };
515
516 // If there is already a buffer for the given path, then return it.
517 let existing_buffer = self.get_open_buffer(&project_path, cx);
518 if let Some(existing_buffer) = existing_buffer {
519 return Task::ready(Ok(existing_buffer));
520 }
521
522 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
523 // If the given path is already being loaded, then wait for that existing
524 // task to complete and return the same buffer.
525 hash_map::Entry::Occupied(e) => e.get().clone(),
526
527 // Otherwise, record the fact that this path is now being loaded.
528 hash_map::Entry::Vacant(entry) => {
529 let (mut tx, rx) = postage::watch::channel();
530 entry.insert(rx.clone());
531
532 let load_buffer = if worktree.read(cx).is_local() {
533 self.open_local_buffer(&project_path.path, &worktree, cx)
534 } else {
535 self.open_remote_buffer(&project_path.path, &worktree, cx)
536 };
537
538 cx.spawn(move |this, mut cx| async move {
539 let load_result = load_buffer.await;
540 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
541 // Record the fact that the buffer is no longer loading.
542 this.loading_buffers.remove(&project_path);
543 let buffer = load_result.map_err(Arc::new)?;
544 Ok(buffer)
545 }));
546 })
547 .detach();
548 rx
549 }
550 };
551
552 cx.foreground().spawn(async move {
553 loop {
554 if let Some(result) = loading_watch.borrow().as_ref() {
555 match result {
556 Ok(buffer) => return Ok(buffer.clone()),
557 Err(error) => return Err(anyhow!("{}", error)),
558 }
559 }
560 loading_watch.recv().await;
561 }
562 })
563 }
564
565 fn open_local_buffer(
566 &mut self,
567 path: &Arc<Path>,
568 worktree: &ModelHandle<Worktree>,
569 cx: &mut ModelContext<Self>,
570 ) -> Task<Result<ModelHandle<Buffer>>> {
571 let load_buffer = worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.load_buffer(path, cx)
574 });
575 let worktree = worktree.downgrade();
576 cx.spawn(|this, mut cx| async move {
577 let buffer = load_buffer.await?;
578 let worktree = worktree
579 .upgrade(&cx)
580 .ok_or_else(|| anyhow!("worktree was removed"))?;
581 this.update(&mut cx, |this, cx| {
582 this.register_buffer(&buffer, Some(&worktree), cx)
583 })?;
584 Ok(buffer)
585 })
586 }
587
588 fn open_remote_buffer(
589 &mut self,
590 path: &Arc<Path>,
591 worktree: &ModelHandle<Worktree>,
592 cx: &mut ModelContext<Self>,
593 ) -> Task<Result<ModelHandle<Buffer>>> {
594 let rpc = self.client.clone();
595 let project_id = self.remote_id().unwrap();
596 let remote_worktree_id = worktree.read(cx).id();
597 let path = path.clone();
598 let path_string = path.to_string_lossy().to_string();
599 cx.spawn(|this, mut cx| async move {
600 let response = rpc
601 .request(proto::OpenBuffer {
602 project_id,
603 worktree_id: remote_worktree_id.to_proto(),
604 path: path_string,
605 })
606 .await?;
607 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
608 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
609 .await
610 })
611 }
612
613 fn open_local_buffer_from_lsp_path(
614 &mut self,
615 abs_path: lsp::Url,
616 lang_name: String,
617 lang_server: Arc<LanguageServer>,
618 cx: &mut ModelContext<Self>,
619 ) -> Task<Result<ModelHandle<Buffer>>> {
620 cx.spawn(|this, mut cx| async move {
621 let abs_path = abs_path
622 .to_file_path()
623 .map_err(|_| anyhow!("can't convert URI to path"))?;
624 let (worktree, relative_path) = if let Some(result) =
625 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
626 {
627 result
628 } else {
629 let worktree = this
630 .update(&mut cx, |this, cx| {
631 this.create_local_worktree(&abs_path, true, cx)
632 })
633 .await?;
634 this.update(&mut cx, |this, cx| {
635 this.language_servers
636 .insert((worktree.read(cx).id(), lang_name), lang_server);
637 });
638 (worktree, PathBuf::new())
639 };
640
641 let project_path = ProjectPath {
642 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
643 path: relative_path.into(),
644 };
645 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
646 .await
647 })
648 }
649
650 pub fn save_buffer_as(
651 &self,
652 buffer: ModelHandle<Buffer>,
653 abs_path: PathBuf,
654 cx: &mut ModelContext<Project>,
655 ) -> Task<Result<()>> {
656 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
657 cx.spawn(|this, mut cx| async move {
658 let (worktree, path) = worktree_task.await?;
659 worktree
660 .update(&mut cx, |worktree, cx| {
661 worktree
662 .as_local_mut()
663 .unwrap()
664 .save_buffer_as(buffer.clone(), path, cx)
665 })
666 .await?;
667 this.update(&mut cx, |this, cx| {
668 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
669 });
670 Ok(())
671 })
672 }
673
674 #[cfg(any(test, feature = "test-support"))]
675 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
676 let path = path.into();
677 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
678 self.open_buffers.iter().any(|(_, buffer)| {
679 if let Some(buffer) = buffer.upgrade(cx) {
680 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
681 if file.worktree == worktree && file.path() == &path.path {
682 return true;
683 }
684 }
685 }
686 false
687 })
688 } else {
689 false
690 }
691 }
692
693 fn get_open_buffer(
694 &mut self,
695 path: &ProjectPath,
696 cx: &mut ModelContext<Self>,
697 ) -> Option<ModelHandle<Buffer>> {
698 let mut result = None;
699 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
700 self.open_buffers.retain(|_, buffer| {
701 if let Some(buffer) = buffer.upgrade(cx) {
702 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
703 if file.worktree == worktree && file.path() == &path.path {
704 result = Some(buffer);
705 }
706 }
707 true
708 } else {
709 false
710 }
711 });
712 result
713 }
714
715 fn register_buffer(
716 &mut self,
717 buffer: &ModelHandle<Buffer>,
718 worktree: Option<&ModelHandle<Worktree>>,
719 cx: &mut ModelContext<Self>,
720 ) -> Result<()> {
721 match self.open_buffers.insert(
722 buffer.read(cx).remote_id(),
723 OpenBuffer::Loaded(buffer.downgrade()),
724 ) {
725 None => {}
726 Some(OpenBuffer::Operations(operations)) => {
727 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
728 }
729 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
730 }
731 self.assign_language_to_buffer(&buffer, worktree, cx);
732 Ok(())
733 }
734
735 fn assign_language_to_buffer(
736 &mut self,
737 buffer: &ModelHandle<Buffer>,
738 worktree: Option<&ModelHandle<Worktree>>,
739 cx: &mut ModelContext<Self>,
740 ) -> Option<()> {
741 let (path, full_path) = {
742 let file = buffer.read(cx).file()?;
743 (file.path().clone(), file.full_path(cx))
744 };
745
746 // If the buffer has a language, set it and start/assign the language server
747 if let Some(language) = self.languages.select_language(&full_path) {
748 buffer.update(cx, |buffer, cx| {
749 buffer.set_language(Some(language.clone()), cx);
750 });
751
752 // For local worktrees, start a language server if needed.
753 // Also assign the language server and any previously stored diagnostics to the buffer.
754 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
755 let worktree_id = local_worktree.id();
756 let worktree_abs_path = local_worktree.abs_path().clone();
757
758 let language_server = match self
759 .language_servers
760 .entry((worktree_id, language.name().to_string()))
761 {
762 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
763 hash_map::Entry::Vacant(e) => Self::start_language_server(
764 self.client.clone(),
765 language.clone(),
766 &worktree_abs_path,
767 cx,
768 )
769 .map(|server| e.insert(server).clone()),
770 };
771
772 buffer.update(cx, |buffer, cx| {
773 buffer.set_language_server(language_server, cx);
774 });
775 }
776 }
777
778 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
779 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
780 buffer.update(cx, |buffer, cx| {
781 buffer.update_diagnostics(diagnostics, None, cx).log_err();
782 });
783 }
784 }
785
786 None
787 }
788
789 fn start_language_server(
790 rpc: Arc<Client>,
791 language: Arc<Language>,
792 worktree_path: &Path,
793 cx: &mut ModelContext<Self>,
794 ) -> Option<Arc<LanguageServer>> {
795 enum LspEvent {
796 DiagnosticsStart,
797 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
798 DiagnosticsFinish,
799 }
800
801 let language_server = language
802 .start_server(worktree_path, cx)
803 .log_err()
804 .flatten()?;
805 let disk_based_sources = language
806 .disk_based_diagnostic_sources()
807 .cloned()
808 .unwrap_or_default();
809 let disk_based_diagnostics_progress_token =
810 language.disk_based_diagnostics_progress_token().cloned();
811 let has_disk_based_diagnostic_progress_token =
812 disk_based_diagnostics_progress_token.is_some();
813 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
814
815 // Listen for `PublishDiagnostics` notifications.
816 language_server
817 .on_notification::<lsp::notification::PublishDiagnostics, _>({
818 let diagnostics_tx = diagnostics_tx.clone();
819 move |params| {
820 if !has_disk_based_diagnostic_progress_token {
821 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
822 }
823 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
824 if !has_disk_based_diagnostic_progress_token {
825 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
826 }
827 }
828 })
829 .detach();
830
831 // Listen for `Progress` notifications. Send an event when the language server
832 // transitions between running jobs and not running any jobs.
833 let mut running_jobs_for_this_server: i32 = 0;
834 language_server
835 .on_notification::<lsp::notification::Progress, _>(move |params| {
836 let token = match params.token {
837 lsp::NumberOrString::Number(_) => None,
838 lsp::NumberOrString::String(token) => Some(token),
839 };
840
841 if token == disk_based_diagnostics_progress_token {
842 match params.value {
843 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
844 lsp::WorkDoneProgress::Begin(_) => {
845 running_jobs_for_this_server += 1;
846 if running_jobs_for_this_server == 1 {
847 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
848 }
849 }
850 lsp::WorkDoneProgress::End(_) => {
851 running_jobs_for_this_server -= 1;
852 if running_jobs_for_this_server == 0 {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
854 }
855 }
856 _ => {}
857 },
858 }
859 }
860 })
861 .detach();
862
863 // Process all the LSP events.
864 cx.spawn_weak(|this, mut cx| async move {
865 while let Ok(message) = diagnostics_rx.recv().await {
866 let this = this.upgrade(&cx)?;
867 match message {
868 LspEvent::DiagnosticsStart => {
869 this.update(&mut cx, |this, cx| {
870 this.disk_based_diagnostics_started(cx);
871 if let Some(project_id) = this.remote_id() {
872 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
873 .log_err();
874 }
875 });
876 }
877 LspEvent::DiagnosticsUpdate(mut params) => {
878 language.process_diagnostics(&mut params);
879 this.update(&mut cx, |this, cx| {
880 this.update_diagnostics(params, &disk_based_sources, cx)
881 .log_err();
882 });
883 }
884 LspEvent::DiagnosticsFinish => {
885 this.update(&mut cx, |this, cx| {
886 this.disk_based_diagnostics_finished(cx);
887 if let Some(project_id) = this.remote_id() {
888 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
889 .log_err();
890 }
891 });
892 }
893 }
894 }
895 Some(())
896 })
897 .detach();
898
899 Some(language_server)
900 }
901
902 pub fn update_diagnostics(
903 &mut self,
904 params: lsp::PublishDiagnosticsParams,
905 disk_based_sources: &HashSet<String>,
906 cx: &mut ModelContext<Self>,
907 ) -> Result<()> {
908 let abs_path = params
909 .uri
910 .to_file_path()
911 .map_err(|_| anyhow!("URI is not a file"))?;
912 let mut next_group_id = 0;
913 let mut diagnostics = Vec::default();
914 let mut primary_diagnostic_group_ids = HashMap::default();
915 let mut sources_by_group_id = HashMap::default();
916 let mut supporting_diagnostic_severities = HashMap::default();
917 for diagnostic in ¶ms.diagnostics {
918 let source = diagnostic.source.as_ref();
919 let code = diagnostic.code.as_ref().map(|code| match code {
920 lsp::NumberOrString::Number(code) => code.to_string(),
921 lsp::NumberOrString::String(code) => code.clone(),
922 });
923 let range = range_from_lsp(diagnostic.range);
924 let is_supporting = diagnostic
925 .related_information
926 .as_ref()
927 .map_or(false, |infos| {
928 infos.iter().any(|info| {
929 primary_diagnostic_group_ids.contains_key(&(
930 source,
931 code.clone(),
932 range_from_lsp(info.location.range),
933 ))
934 })
935 });
936
937 if is_supporting {
938 if let Some(severity) = diagnostic.severity {
939 supporting_diagnostic_severities
940 .insert((source, code.clone(), range), severity);
941 }
942 } else {
943 let group_id = post_inc(&mut next_group_id);
944 let is_disk_based =
945 source.map_or(false, |source| disk_based_sources.contains(source));
946
947 sources_by_group_id.insert(group_id, source);
948 primary_diagnostic_group_ids
949 .insert((source, code.clone(), range.clone()), group_id);
950
951 diagnostics.push(DiagnosticEntry {
952 range,
953 diagnostic: Diagnostic {
954 code: code.clone(),
955 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
956 message: diagnostic.message.clone(),
957 group_id,
958 is_primary: true,
959 is_valid: true,
960 is_disk_based,
961 },
962 });
963 if let Some(infos) = &diagnostic.related_information {
964 for info in infos {
965 if info.location.uri == params.uri && !info.message.is_empty() {
966 let range = range_from_lsp(info.location.range);
967 diagnostics.push(DiagnosticEntry {
968 range,
969 diagnostic: Diagnostic {
970 code: code.clone(),
971 severity: DiagnosticSeverity::INFORMATION,
972 message: info.message.clone(),
973 group_id,
974 is_primary: false,
975 is_valid: true,
976 is_disk_based,
977 },
978 });
979 }
980 }
981 }
982 }
983 }
984
985 for entry in &mut diagnostics {
986 let diagnostic = &mut entry.diagnostic;
987 if !diagnostic.is_primary {
988 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
989 if let Some(&severity) = supporting_diagnostic_severities.get(&(
990 source,
991 diagnostic.code.clone(),
992 entry.range.clone(),
993 )) {
994 diagnostic.severity = severity;
995 }
996 }
997 }
998
999 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1000 Ok(())
1001 }
1002
1003 pub fn update_diagnostic_entries(
1004 &mut self,
1005 abs_path: PathBuf,
1006 version: Option<i32>,
1007 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1008 cx: &mut ModelContext<Project>,
1009 ) -> Result<(), anyhow::Error> {
1010 let (worktree, relative_path) = self
1011 .find_local_worktree(&abs_path, cx)
1012 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1013 let project_path = ProjectPath {
1014 worktree_id: worktree.read(cx).id(),
1015 path: relative_path.into(),
1016 };
1017
1018 for buffer in self.open_buffers.values() {
1019 if let Some(buffer) = buffer.upgrade(cx) {
1020 if buffer
1021 .read(cx)
1022 .file()
1023 .map_or(false, |file| *file.path() == project_path.path)
1024 {
1025 buffer.update(cx, |buffer, cx| {
1026 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1027 })?;
1028 break;
1029 }
1030 }
1031 }
1032 worktree.update(cx, |worktree, cx| {
1033 worktree
1034 .as_local_mut()
1035 .ok_or_else(|| anyhow!("not a local worktree"))?
1036 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1037 })?;
1038 cx.emit(Event::DiagnosticsUpdated(project_path));
1039 Ok(())
1040 }
1041
1042 pub fn format(
1043 &self,
1044 buffers: HashSet<ModelHandle<Buffer>>,
1045 push_to_history: bool,
1046 cx: &mut ModelContext<Project>,
1047 ) -> Task<Result<ProjectTransaction>> {
1048 let mut local_buffers = Vec::new();
1049 let mut remote_buffers = None;
1050 for buffer_handle in buffers {
1051 let buffer = buffer_handle.read(cx);
1052 let worktree;
1053 if let Some(file) = File::from_dyn(buffer.file()) {
1054 worktree = file.worktree.clone();
1055 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1056 let lang_server;
1057 if let Some(lang) = buffer.language() {
1058 if let Some(server) = self
1059 .language_servers
1060 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1061 {
1062 lang_server = server.clone();
1063 } else {
1064 return Task::ready(Ok(Default::default()));
1065 };
1066 } else {
1067 return Task::ready(Ok(Default::default()));
1068 }
1069
1070 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1071 } else {
1072 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1073 }
1074 } else {
1075 return Task::ready(Ok(Default::default()));
1076 }
1077 }
1078
1079 let remote_buffers = self.remote_id().zip(remote_buffers);
1080 let client = self.client.clone();
1081
1082 cx.spawn(|this, mut cx| async move {
1083 let mut project_transaction = ProjectTransaction::default();
1084
1085 if let Some((project_id, remote_buffers)) = remote_buffers {
1086 let response = client
1087 .request(proto::FormatBuffers {
1088 project_id,
1089 buffer_ids: remote_buffers
1090 .iter()
1091 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1092 .collect(),
1093 })
1094 .await?
1095 .transaction
1096 .ok_or_else(|| anyhow!("missing transaction"))?;
1097 project_transaction = this
1098 .update(&mut cx, |this, cx| {
1099 this.deserialize_project_transaction(response, push_to_history, cx)
1100 })
1101 .await?;
1102 }
1103
1104 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1105 let lsp_edits = lang_server
1106 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1107 text_document: lsp::TextDocumentIdentifier::new(
1108 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1109 ),
1110 options: Default::default(),
1111 work_done_progress_params: Default::default(),
1112 })
1113 .await?;
1114
1115 if let Some(lsp_edits) = lsp_edits {
1116 let edits = buffer
1117 .update(&mut cx, |buffer, cx| {
1118 buffer.edits_from_lsp(lsp_edits, None, cx)
1119 })
1120 .await?;
1121 buffer.update(&mut cx, |buffer, cx| {
1122 buffer.finalize_last_transaction();
1123 buffer.start_transaction();
1124 for (range, text) in edits {
1125 buffer.edit([range], text, cx);
1126 }
1127 if buffer.end_transaction(cx).is_some() {
1128 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1129 if !push_to_history {
1130 buffer.forget_transaction(transaction.id);
1131 }
1132 project_transaction.0.insert(cx.handle(), transaction);
1133 }
1134 });
1135 }
1136 }
1137
1138 Ok(project_transaction)
1139 })
1140 }
1141
1142 pub fn definition<T: ToPointUtf16>(
1143 &self,
1144 source_buffer_handle: &ModelHandle<Buffer>,
1145 position: T,
1146 cx: &mut ModelContext<Self>,
1147 ) -> Task<Result<Vec<Definition>>> {
1148 let source_buffer_handle = source_buffer_handle.clone();
1149 let source_buffer = source_buffer_handle.read(cx);
1150 let worktree;
1151 let buffer_abs_path;
1152 if let Some(file) = File::from_dyn(source_buffer.file()) {
1153 worktree = file.worktree.clone();
1154 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1155 } else {
1156 return Task::ready(Ok(Default::default()));
1157 };
1158
1159 let position = position.to_point_utf16(source_buffer);
1160
1161 if worktree.read(cx).as_local().is_some() {
1162 let buffer_abs_path = buffer_abs_path.unwrap();
1163 let lang_name;
1164 let lang_server;
1165 if let Some(lang) = source_buffer.language() {
1166 lang_name = lang.name().to_string();
1167 if let Some(server) = self
1168 .language_servers
1169 .get(&(worktree.read(cx).id(), lang_name.clone()))
1170 {
1171 lang_server = server.clone();
1172 } else {
1173 return Task::ready(Ok(Default::default()));
1174 };
1175 } else {
1176 return Task::ready(Ok(Default::default()));
1177 }
1178
1179 cx.spawn(|this, mut cx| async move {
1180 let response = lang_server
1181 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1182 text_document_position_params: lsp::TextDocumentPositionParams {
1183 text_document: lsp::TextDocumentIdentifier::new(
1184 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1185 ),
1186 position: lsp::Position::new(position.row, position.column),
1187 },
1188 work_done_progress_params: Default::default(),
1189 partial_result_params: Default::default(),
1190 })
1191 .await?;
1192
1193 let mut definitions = Vec::new();
1194 if let Some(response) = response {
1195 let mut unresolved_locations = Vec::new();
1196 match response {
1197 lsp::GotoDefinitionResponse::Scalar(loc) => {
1198 unresolved_locations.push((loc.uri, loc.range));
1199 }
1200 lsp::GotoDefinitionResponse::Array(locs) => {
1201 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1202 }
1203 lsp::GotoDefinitionResponse::Link(links) => {
1204 unresolved_locations.extend(
1205 links
1206 .into_iter()
1207 .map(|l| (l.target_uri, l.target_selection_range)),
1208 );
1209 }
1210 }
1211
1212 for (target_uri, target_range) in unresolved_locations {
1213 let target_buffer_handle = this
1214 .update(&mut cx, |this, cx| {
1215 this.open_local_buffer_from_lsp_path(
1216 target_uri,
1217 lang_name.clone(),
1218 lang_server.clone(),
1219 cx,
1220 )
1221 })
1222 .await?;
1223
1224 cx.read(|cx| {
1225 let target_buffer = target_buffer_handle.read(cx);
1226 let target_start = target_buffer
1227 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1228 let target_end = target_buffer
1229 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1230 definitions.push(Definition {
1231 target_buffer: target_buffer_handle,
1232 target_range: target_buffer.anchor_after(target_start)
1233 ..target_buffer.anchor_before(target_end),
1234 });
1235 });
1236 }
1237 }
1238
1239 Ok(definitions)
1240 })
1241 } else if let Some(project_id) = self.remote_id() {
1242 let client = self.client.clone();
1243 let request = proto::GetDefinition {
1244 project_id,
1245 buffer_id: source_buffer.remote_id(),
1246 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1247 };
1248 cx.spawn(|this, mut cx| async move {
1249 let response = client.request(request).await?;
1250 let mut definitions = Vec::new();
1251 for definition in response.definitions {
1252 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1253 let target_buffer = this
1254 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1255 .await?;
1256 let target_start = definition
1257 .target_start
1258 .and_then(deserialize_anchor)
1259 .ok_or_else(|| anyhow!("missing target start"))?;
1260 let target_end = definition
1261 .target_end
1262 .and_then(deserialize_anchor)
1263 .ok_or_else(|| anyhow!("missing target end"))?;
1264 definitions.push(Definition {
1265 target_buffer,
1266 target_range: target_start..target_end,
1267 })
1268 }
1269
1270 Ok(definitions)
1271 })
1272 } else {
1273 Task::ready(Ok(Default::default()))
1274 }
1275 }
1276
1277 pub fn completions<T: ToPointUtf16>(
1278 &self,
1279 source_buffer_handle: &ModelHandle<Buffer>,
1280 position: T,
1281 cx: &mut ModelContext<Self>,
1282 ) -> Task<Result<Vec<Completion>>> {
1283 let source_buffer_handle = source_buffer_handle.clone();
1284 let source_buffer = source_buffer_handle.read(cx);
1285 let buffer_id = source_buffer.remote_id();
1286 let language = source_buffer.language().cloned();
1287 let worktree;
1288 let buffer_abs_path;
1289 if let Some(file) = File::from_dyn(source_buffer.file()) {
1290 worktree = file.worktree.clone();
1291 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1292 } else {
1293 return Task::ready(Ok(Default::default()));
1294 };
1295
1296 let position = position.to_point_utf16(source_buffer);
1297 let anchor = source_buffer.anchor_after(position);
1298
1299 if worktree.read(cx).as_local().is_some() {
1300 let buffer_abs_path = buffer_abs_path.unwrap();
1301 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1302 server
1303 } else {
1304 return Task::ready(Ok(Default::default()));
1305 };
1306
1307 cx.spawn(|_, cx| async move {
1308 let completions = lang_server
1309 .request::<lsp::request::Completion>(lsp::CompletionParams {
1310 text_document_position: lsp::TextDocumentPositionParams::new(
1311 lsp::TextDocumentIdentifier::new(
1312 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1313 ),
1314 position.to_lsp_position(),
1315 ),
1316 context: Default::default(),
1317 work_done_progress_params: Default::default(),
1318 partial_result_params: Default::default(),
1319 })
1320 .await?;
1321
1322 let completions = if let Some(completions) = completions {
1323 match completions {
1324 lsp::CompletionResponse::Array(completions) => completions,
1325 lsp::CompletionResponse::List(list) => list.items,
1326 }
1327 } else {
1328 Default::default()
1329 };
1330
1331 source_buffer_handle.read_with(&cx, |this, _| {
1332 Ok(completions
1333 .into_iter()
1334 .filter_map(|lsp_completion| {
1335 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1336 lsp::CompletionTextEdit::Edit(edit) => {
1337 (range_from_lsp(edit.range), edit.new_text.clone())
1338 }
1339 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1340 log::info!("unsupported insert/replace completion");
1341 return None;
1342 }
1343 };
1344
1345 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1346 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1347 if clipped_start == old_range.start && clipped_end == old_range.end {
1348 Some(Completion {
1349 old_range: this.anchor_before(old_range.start)
1350 ..this.anchor_after(old_range.end),
1351 new_text,
1352 label: language
1353 .as_ref()
1354 .and_then(|l| l.label_for_completion(&lsp_completion))
1355 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1356 lsp_completion,
1357 })
1358 } else {
1359 None
1360 }
1361 })
1362 .collect())
1363 })
1364 })
1365 } else if let Some(project_id) = self.remote_id() {
1366 let rpc = self.client.clone();
1367 let message = proto::GetCompletions {
1368 project_id,
1369 buffer_id,
1370 position: Some(language::proto::serialize_anchor(&anchor)),
1371 version: (&source_buffer.version()).into(),
1372 };
1373 cx.spawn_weak(|_, mut cx| async move {
1374 let response = rpc.request(message).await?;
1375 source_buffer_handle
1376 .update(&mut cx, |buffer, _| {
1377 buffer.wait_for_version(response.version.into())
1378 })
1379 .await;
1380 response
1381 .completions
1382 .into_iter()
1383 .map(|completion| {
1384 language::proto::deserialize_completion(completion, language.as_ref())
1385 })
1386 .collect()
1387 })
1388 } else {
1389 Task::ready(Ok(Default::default()))
1390 }
1391 }
1392
1393 pub fn apply_additional_edits_for_completion(
1394 &self,
1395 buffer_handle: ModelHandle<Buffer>,
1396 completion: Completion,
1397 push_to_history: bool,
1398 cx: &mut ModelContext<Self>,
1399 ) -> Task<Result<Option<Transaction>>> {
1400 let buffer = buffer_handle.read(cx);
1401 let buffer_id = buffer.remote_id();
1402
1403 if self.is_local() {
1404 let lang_server = if let Some(language_server) = buffer.language_server() {
1405 language_server.clone()
1406 } else {
1407 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1408 };
1409
1410 cx.spawn(|_, mut cx| async move {
1411 let resolved_completion = lang_server
1412 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1413 .await?;
1414 if let Some(edits) = resolved_completion.additional_text_edits {
1415 let edits = buffer_handle
1416 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1417 .await?;
1418 buffer_handle.update(&mut cx, |buffer, cx| {
1419 buffer.finalize_last_transaction();
1420 buffer.start_transaction();
1421 for (range, text) in edits {
1422 buffer.edit([range], text, cx);
1423 }
1424 let transaction = if buffer.end_transaction(cx).is_some() {
1425 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1426 if !push_to_history {
1427 buffer.forget_transaction(transaction.id);
1428 }
1429 Some(transaction)
1430 } else {
1431 None
1432 };
1433 Ok(transaction)
1434 })
1435 } else {
1436 Ok(None)
1437 }
1438 })
1439 } else if let Some(project_id) = self.remote_id() {
1440 let client = self.client.clone();
1441 cx.spawn(|_, mut cx| async move {
1442 let response = client
1443 .request(proto::ApplyCompletionAdditionalEdits {
1444 project_id,
1445 buffer_id,
1446 completion: Some(language::proto::serialize_completion(&completion)),
1447 })
1448 .await?;
1449
1450 if let Some(transaction) = response.transaction {
1451 let transaction = language::proto::deserialize_transaction(transaction)?;
1452 buffer_handle
1453 .update(&mut cx, |buffer, _| {
1454 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1455 })
1456 .await;
1457 if push_to_history {
1458 buffer_handle.update(&mut cx, |buffer, _| {
1459 buffer.push_transaction(transaction.clone(), Instant::now());
1460 });
1461 }
1462 Ok(Some(transaction))
1463 } else {
1464 Ok(None)
1465 }
1466 })
1467 } else {
1468 Task::ready(Err(anyhow!("project does not have a remote id")))
1469 }
1470 }
1471
1472 pub fn code_actions<T: ToOffset>(
1473 &self,
1474 buffer_handle: &ModelHandle<Buffer>,
1475 range: Range<T>,
1476 cx: &mut ModelContext<Self>,
1477 ) -> Task<Result<Vec<CodeAction>>> {
1478 let buffer_handle = buffer_handle.clone();
1479 let buffer = buffer_handle.read(cx);
1480 let buffer_id = buffer.remote_id();
1481 let worktree;
1482 let buffer_abs_path;
1483 if let Some(file) = File::from_dyn(buffer.file()) {
1484 worktree = file.worktree.clone();
1485 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1486 } else {
1487 return Task::ready(Ok(Default::default()));
1488 };
1489 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1490
1491 if worktree.read(cx).as_local().is_some() {
1492 let buffer_abs_path = buffer_abs_path.unwrap();
1493 let lang_name;
1494 let lang_server;
1495 if let Some(lang) = buffer.language() {
1496 lang_name = lang.name().to_string();
1497 if let Some(server) = self
1498 .language_servers
1499 .get(&(worktree.read(cx).id(), lang_name.clone()))
1500 {
1501 lang_server = server.clone();
1502 } else {
1503 return Task::ready(Ok(Default::default()));
1504 };
1505 } else {
1506 return Task::ready(Ok(Default::default()));
1507 }
1508
1509 let lsp_range = lsp::Range::new(
1510 range.start.to_point_utf16(buffer).to_lsp_position(),
1511 range.end.to_point_utf16(buffer).to_lsp_position(),
1512 );
1513 cx.foreground().spawn(async move {
1514 Ok(lang_server
1515 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1516 text_document: lsp::TextDocumentIdentifier::new(
1517 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1518 ),
1519 range: lsp_range,
1520 work_done_progress_params: Default::default(),
1521 partial_result_params: Default::default(),
1522 context: lsp::CodeActionContext {
1523 diagnostics: Default::default(),
1524 only: Some(vec![
1525 lsp::CodeActionKind::QUICKFIX,
1526 lsp::CodeActionKind::REFACTOR,
1527 lsp::CodeActionKind::REFACTOR_EXTRACT,
1528 ]),
1529 },
1530 })
1531 .await?
1532 .unwrap_or_default()
1533 .into_iter()
1534 .filter_map(|entry| {
1535 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1536 Some(CodeAction {
1537 range: range.clone(),
1538 lsp_action,
1539 })
1540 } else {
1541 None
1542 }
1543 })
1544 .collect())
1545 })
1546 } else if let Some(project_id) = self.remote_id() {
1547 let rpc = self.client.clone();
1548 cx.foreground().spawn(async move {
1549 let response = rpc
1550 .request(proto::GetCodeActions {
1551 project_id,
1552 buffer_id,
1553 start: Some(language::proto::serialize_anchor(&range.start)),
1554 end: Some(language::proto::serialize_anchor(&range.end)),
1555 })
1556 .await?;
1557 response
1558 .actions
1559 .into_iter()
1560 .map(language::proto::deserialize_code_action)
1561 .collect()
1562 })
1563 } else {
1564 Task::ready(Ok(Default::default()))
1565 }
1566 }
1567
1568 pub fn apply_code_action(
1569 &self,
1570 buffer_handle: ModelHandle<Buffer>,
1571 mut action: CodeAction,
1572 push_to_history: bool,
1573 cx: &mut ModelContext<Self>,
1574 ) -> Task<Result<ProjectTransaction>> {
1575 if self.is_local() {
1576 let buffer = buffer_handle.read(cx);
1577 let lang_name = if let Some(lang) = buffer.language() {
1578 lang.name().to_string()
1579 } else {
1580 return Task::ready(Ok(Default::default()));
1581 };
1582 let lang_server = if let Some(language_server) = buffer.language_server() {
1583 language_server.clone()
1584 } else {
1585 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1586 };
1587 let range = action.range.to_point_utf16(buffer);
1588 let fs = self.fs.clone();
1589
1590 cx.spawn(|this, mut cx| async move {
1591 if let Some(lsp_range) = action
1592 .lsp_action
1593 .data
1594 .as_mut()
1595 .and_then(|d| d.get_mut("codeActionParams"))
1596 .and_then(|d| d.get_mut("range"))
1597 {
1598 *lsp_range = serde_json::to_value(&lsp::Range::new(
1599 range.start.to_lsp_position(),
1600 range.end.to_lsp_position(),
1601 ))
1602 .unwrap();
1603 action.lsp_action = lang_server
1604 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1605 .await?;
1606 } else {
1607 let actions = this
1608 .update(&mut cx, |this, cx| {
1609 this.code_actions(&buffer_handle, action.range, cx)
1610 })
1611 .await?;
1612 action.lsp_action = actions
1613 .into_iter()
1614 .find(|a| a.lsp_action.title == action.lsp_action.title)
1615 .ok_or_else(|| anyhow!("code action is outdated"))?
1616 .lsp_action;
1617 }
1618
1619 let mut operations = Vec::new();
1620 if let Some(edit) = action.lsp_action.edit {
1621 if let Some(document_changes) = edit.document_changes {
1622 match document_changes {
1623 lsp::DocumentChanges::Edits(edits) => operations
1624 .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
1625 lsp::DocumentChanges::Operations(ops) => operations = ops,
1626 }
1627 } else if let Some(changes) = edit.changes {
1628 operations.extend(changes.into_iter().map(|(uri, edits)| {
1629 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1630 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1631 uri,
1632 version: None,
1633 },
1634 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1635 })
1636 }));
1637 }
1638 }
1639
1640 let mut project_transaction = ProjectTransaction::default();
1641 for operation in operations {
1642 match operation {
1643 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1644 let abs_path = op
1645 .uri
1646 .to_file_path()
1647 .map_err(|_| anyhow!("can't convert URI to path"))?;
1648
1649 if let Some(parent_path) = abs_path.parent() {
1650 fs.create_dir(parent_path).await?;
1651 }
1652 if abs_path.ends_with("/") {
1653 fs.create_dir(&abs_path).await?;
1654 } else {
1655 fs.create_file(
1656 &abs_path,
1657 op.options.map(Into::into).unwrap_or_default(),
1658 )
1659 .await?;
1660 }
1661 }
1662 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1663 let source_abs_path = op
1664 .old_uri
1665 .to_file_path()
1666 .map_err(|_| anyhow!("can't convert URI to path"))?;
1667 let target_abs_path = op
1668 .new_uri
1669 .to_file_path()
1670 .map_err(|_| anyhow!("can't convert URI to path"))?;
1671 fs.rename(
1672 &source_abs_path,
1673 &target_abs_path,
1674 op.options.map(Into::into).unwrap_or_default(),
1675 )
1676 .await?;
1677 }
1678 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1679 let abs_path = op
1680 .uri
1681 .to_file_path()
1682 .map_err(|_| anyhow!("can't convert URI to path"))?;
1683 let options = op.options.map(Into::into).unwrap_or_default();
1684 if abs_path.ends_with("/") {
1685 fs.remove_dir(&abs_path, options).await?;
1686 } else {
1687 fs.remove_file(&abs_path, options).await?;
1688 }
1689 }
1690 lsp::DocumentChangeOperation::Edit(op) => {
1691 let buffer_to_edit = this
1692 .update(&mut cx, |this, cx| {
1693 this.open_local_buffer_from_lsp_path(
1694 op.text_document.uri,
1695 lang_name.clone(),
1696 lang_server.clone(),
1697 cx,
1698 )
1699 })
1700 .await?;
1701
1702 let edits = buffer_to_edit
1703 .update(&mut cx, |buffer, cx| {
1704 let edits = op.edits.into_iter().map(|edit| match edit {
1705 lsp::OneOf::Left(edit) => edit,
1706 lsp::OneOf::Right(edit) => edit.text_edit,
1707 });
1708 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1709 })
1710 .await?;
1711
1712 let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
1713 buffer.finalize_last_transaction();
1714 buffer.start_transaction();
1715 for (range, text) in edits {
1716 buffer.edit([range], text, cx);
1717 }
1718 let transaction = if buffer.end_transaction(cx).is_some() {
1719 let transaction =
1720 buffer.finalize_last_transaction().unwrap().clone();
1721 if !push_to_history {
1722 buffer.forget_transaction(transaction.id);
1723 }
1724 Some(transaction)
1725 } else {
1726 None
1727 };
1728
1729 transaction
1730 });
1731 if let Some(transaction) = transaction {
1732 project_transaction.0.insert(buffer_to_edit, transaction);
1733 }
1734 }
1735 }
1736 }
1737
1738 Ok(project_transaction)
1739 })
1740 } else if let Some(project_id) = self.remote_id() {
1741 let client = self.client.clone();
1742 let request = proto::ApplyCodeAction {
1743 project_id,
1744 buffer_id: buffer_handle.read(cx).remote_id(),
1745 action: Some(language::proto::serialize_code_action(&action)),
1746 };
1747 cx.spawn(|this, mut cx| async move {
1748 let response = client
1749 .request(request)
1750 .await?
1751 .transaction
1752 .ok_or_else(|| anyhow!("missing transaction"))?;
1753 this.update(&mut cx, |this, cx| {
1754 this.deserialize_project_transaction(response, push_to_history, cx)
1755 })
1756 .await
1757 })
1758 } else {
1759 Task::ready(Err(anyhow!("project does not have a remote id")))
1760 }
1761 }
1762
1763 pub fn find_or_create_local_worktree(
1764 &self,
1765 abs_path: impl AsRef<Path>,
1766 weak: bool,
1767 cx: &mut ModelContext<Self>,
1768 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1769 let abs_path = abs_path.as_ref();
1770 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1771 Task::ready(Ok((tree.clone(), relative_path.into())))
1772 } else {
1773 let worktree = self.create_local_worktree(abs_path, weak, cx);
1774 cx.foreground()
1775 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1776 }
1777 }
1778
1779 fn find_local_worktree(
1780 &self,
1781 abs_path: &Path,
1782 cx: &AppContext,
1783 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1784 for tree in self.worktrees(cx) {
1785 if let Some(relative_path) = tree
1786 .read(cx)
1787 .as_local()
1788 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1789 {
1790 return Some((tree.clone(), relative_path.into()));
1791 }
1792 }
1793 None
1794 }
1795
1796 pub fn is_shared(&self) -> bool {
1797 match &self.client_state {
1798 ProjectClientState::Local { is_shared, .. } => *is_shared,
1799 ProjectClientState::Remote { .. } => false,
1800 }
1801 }
1802
1803 fn create_local_worktree(
1804 &self,
1805 abs_path: impl AsRef<Path>,
1806 weak: bool,
1807 cx: &mut ModelContext<Self>,
1808 ) -> Task<Result<ModelHandle<Worktree>>> {
1809 let fs = self.fs.clone();
1810 let client = self.client.clone();
1811 let path = Arc::from(abs_path.as_ref());
1812 cx.spawn(|project, mut cx| async move {
1813 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1814
1815 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1816 project.add_worktree(&worktree, cx);
1817 (project.remote_id(), project.is_shared())
1818 });
1819
1820 if let Some(project_id) = remote_project_id {
1821 worktree
1822 .update(&mut cx, |worktree, cx| {
1823 worktree.as_local_mut().unwrap().register(project_id, cx)
1824 })
1825 .await?;
1826 if is_shared {
1827 worktree
1828 .update(&mut cx, |worktree, cx| {
1829 worktree.as_local_mut().unwrap().share(project_id, cx)
1830 })
1831 .await?;
1832 }
1833 }
1834
1835 Ok(worktree)
1836 })
1837 }
1838
1839 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1840 self.worktrees.retain(|worktree| {
1841 worktree
1842 .upgrade(cx)
1843 .map_or(false, |w| w.read(cx).id() != id)
1844 });
1845 cx.notify();
1846 }
1847
1848 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1849 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1850 if worktree.read(cx).is_local() {
1851 cx.subscribe(&worktree, |this, worktree, _, cx| {
1852 this.update_local_worktree_buffers(worktree, cx);
1853 })
1854 .detach();
1855 }
1856
1857 let push_weak_handle = {
1858 let worktree = worktree.read(cx);
1859 worktree.is_local() && worktree.is_weak()
1860 };
1861 if push_weak_handle {
1862 cx.observe_release(&worktree, |this, cx| {
1863 this.worktrees
1864 .retain(|worktree| worktree.upgrade(cx).is_some());
1865 cx.notify();
1866 })
1867 .detach();
1868 self.worktrees
1869 .push(WorktreeHandle::Weak(worktree.downgrade()));
1870 } else {
1871 self.worktrees
1872 .push(WorktreeHandle::Strong(worktree.clone()));
1873 }
1874 cx.notify();
1875 }
1876
1877 fn update_local_worktree_buffers(
1878 &mut self,
1879 worktree_handle: ModelHandle<Worktree>,
1880 cx: &mut ModelContext<Self>,
1881 ) {
1882 let snapshot = worktree_handle.read(cx).snapshot();
1883 let mut buffers_to_delete = Vec::new();
1884 for (buffer_id, buffer) in &self.open_buffers {
1885 if let Some(buffer) = buffer.upgrade(cx) {
1886 buffer.update(cx, |buffer, cx| {
1887 if let Some(old_file) = File::from_dyn(buffer.file()) {
1888 if old_file.worktree != worktree_handle {
1889 return;
1890 }
1891
1892 let new_file = if let Some(entry) = old_file
1893 .entry_id
1894 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1895 {
1896 File {
1897 is_local: true,
1898 entry_id: Some(entry.id),
1899 mtime: entry.mtime,
1900 path: entry.path.clone(),
1901 worktree: worktree_handle.clone(),
1902 }
1903 } else if let Some(entry) =
1904 snapshot.entry_for_path(old_file.path().as_ref())
1905 {
1906 File {
1907 is_local: true,
1908 entry_id: Some(entry.id),
1909 mtime: entry.mtime,
1910 path: entry.path.clone(),
1911 worktree: worktree_handle.clone(),
1912 }
1913 } else {
1914 File {
1915 is_local: true,
1916 entry_id: None,
1917 path: old_file.path().clone(),
1918 mtime: old_file.mtime(),
1919 worktree: worktree_handle.clone(),
1920 }
1921 };
1922
1923 if let Some(project_id) = self.remote_id() {
1924 self.client
1925 .send(proto::UpdateBufferFile {
1926 project_id,
1927 buffer_id: *buffer_id as u64,
1928 file: Some(new_file.to_proto()),
1929 })
1930 .log_err();
1931 }
1932 buffer.file_updated(Box::new(new_file), cx).detach();
1933 }
1934 });
1935 } else {
1936 buffers_to_delete.push(*buffer_id);
1937 }
1938 }
1939
1940 for buffer_id in buffers_to_delete {
1941 self.open_buffers.remove(&buffer_id);
1942 }
1943 }
1944
1945 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1946 let new_active_entry = entry.and_then(|project_path| {
1947 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1948 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1949 Some(ProjectEntry {
1950 worktree_id: project_path.worktree_id,
1951 entry_id: entry.id,
1952 })
1953 });
1954 if new_active_entry != self.active_entry {
1955 self.active_entry = new_active_entry;
1956 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1957 }
1958 }
1959
1960 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1961 self.language_servers_with_diagnostics_running > 0
1962 }
1963
1964 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1965 let mut summary = DiagnosticSummary::default();
1966 for (_, path_summary) in self.diagnostic_summaries(cx) {
1967 summary.error_count += path_summary.error_count;
1968 summary.warning_count += path_summary.warning_count;
1969 summary.info_count += path_summary.info_count;
1970 summary.hint_count += path_summary.hint_count;
1971 }
1972 summary
1973 }
1974
1975 pub fn diagnostic_summaries<'a>(
1976 &'a self,
1977 cx: &'a AppContext,
1978 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1979 self.worktrees(cx).flat_map(move |worktree| {
1980 let worktree = worktree.read(cx);
1981 let worktree_id = worktree.id();
1982 worktree
1983 .diagnostic_summaries()
1984 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1985 })
1986 }
1987
1988 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1989 self.language_servers_with_diagnostics_running += 1;
1990 if self.language_servers_with_diagnostics_running == 1 {
1991 cx.emit(Event::DiskBasedDiagnosticsStarted);
1992 }
1993 }
1994
1995 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1996 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1997 self.language_servers_with_diagnostics_running -= 1;
1998 if self.language_servers_with_diagnostics_running == 0 {
1999 cx.emit(Event::DiskBasedDiagnosticsFinished);
2000 }
2001 }
2002
2003 pub fn active_entry(&self) -> Option<ProjectEntry> {
2004 self.active_entry
2005 }
2006
2007 // RPC message handlers
2008
2009 async fn handle_unshare_project(
2010 this: ModelHandle<Self>,
2011 _: TypedEnvelope<proto::UnshareProject>,
2012 _: Arc<Client>,
2013 mut cx: AsyncAppContext,
2014 ) -> Result<()> {
2015 this.update(&mut cx, |this, cx| {
2016 if let ProjectClientState::Remote {
2017 sharing_has_stopped,
2018 ..
2019 } = &mut this.client_state
2020 {
2021 *sharing_has_stopped = true;
2022 this.collaborators.clear();
2023 cx.notify();
2024 } else {
2025 unreachable!()
2026 }
2027 });
2028
2029 Ok(())
2030 }
2031
2032 async fn handle_add_collaborator(
2033 this: ModelHandle<Self>,
2034 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2035 _: Arc<Client>,
2036 mut cx: AsyncAppContext,
2037 ) -> Result<()> {
2038 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2039 let collaborator = envelope
2040 .payload
2041 .collaborator
2042 .take()
2043 .ok_or_else(|| anyhow!("empty collaborator"))?;
2044
2045 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2046 this.update(&mut cx, |this, cx| {
2047 this.collaborators
2048 .insert(collaborator.peer_id, collaborator);
2049 cx.notify();
2050 });
2051
2052 Ok(())
2053 }
2054
2055 async fn handle_remove_collaborator(
2056 this: ModelHandle<Self>,
2057 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2058 _: Arc<Client>,
2059 mut cx: AsyncAppContext,
2060 ) -> Result<()> {
2061 this.update(&mut cx, |this, cx| {
2062 let peer_id = PeerId(envelope.payload.peer_id);
2063 let replica_id = this
2064 .collaborators
2065 .remove(&peer_id)
2066 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2067 .replica_id;
2068 this.shared_buffers.remove(&peer_id);
2069 for (_, buffer) in &this.open_buffers {
2070 if let Some(buffer) = buffer.upgrade(cx) {
2071 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2072 }
2073 }
2074 cx.notify();
2075 Ok(())
2076 })
2077 }
2078
2079 async fn handle_share_worktree(
2080 this: ModelHandle<Self>,
2081 envelope: TypedEnvelope<proto::ShareWorktree>,
2082 client: Arc<Client>,
2083 mut cx: AsyncAppContext,
2084 ) -> Result<()> {
2085 this.update(&mut cx, |this, cx| {
2086 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2087 let replica_id = this.replica_id();
2088 let worktree = envelope
2089 .payload
2090 .worktree
2091 .ok_or_else(|| anyhow!("invalid worktree"))?;
2092 let (worktree, load_task) =
2093 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2094 this.add_worktree(&worktree, cx);
2095 load_task.detach();
2096 Ok(())
2097 })
2098 }
2099
2100 async fn handle_unregister_worktree(
2101 this: ModelHandle<Self>,
2102 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2103 _: Arc<Client>,
2104 mut cx: AsyncAppContext,
2105 ) -> Result<()> {
2106 this.update(&mut cx, |this, cx| {
2107 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2108 this.remove_worktree(worktree_id, cx);
2109 Ok(())
2110 })
2111 }
2112
2113 async fn handle_update_worktree(
2114 this: ModelHandle<Self>,
2115 envelope: TypedEnvelope<proto::UpdateWorktree>,
2116 _: Arc<Client>,
2117 mut cx: AsyncAppContext,
2118 ) -> Result<()> {
2119 this.update(&mut cx, |this, cx| {
2120 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2121 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2122 worktree.update(cx, |worktree, _| {
2123 let worktree = worktree.as_remote_mut().unwrap();
2124 worktree.update_from_remote(envelope)
2125 })?;
2126 }
2127 Ok(())
2128 })
2129 }
2130
2131 async fn handle_update_diagnostic_summary(
2132 this: ModelHandle<Self>,
2133 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2134 _: Arc<Client>,
2135 mut cx: AsyncAppContext,
2136 ) -> Result<()> {
2137 this.update(&mut cx, |this, cx| {
2138 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2139 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2140 if let Some(summary) = envelope.payload.summary {
2141 let project_path = ProjectPath {
2142 worktree_id,
2143 path: Path::new(&summary.path).into(),
2144 };
2145 worktree.update(cx, |worktree, _| {
2146 worktree
2147 .as_remote_mut()
2148 .unwrap()
2149 .update_diagnostic_summary(project_path.path.clone(), &summary);
2150 });
2151 cx.emit(Event::DiagnosticsUpdated(project_path));
2152 }
2153 }
2154 Ok(())
2155 })
2156 }
2157
2158 async fn handle_disk_based_diagnostics_updating(
2159 this: ModelHandle<Self>,
2160 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2161 _: Arc<Client>,
2162 mut cx: AsyncAppContext,
2163 ) -> Result<()> {
2164 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2165 Ok(())
2166 }
2167
2168 async fn handle_disk_based_diagnostics_updated(
2169 this: ModelHandle<Self>,
2170 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2171 _: Arc<Client>,
2172 mut cx: AsyncAppContext,
2173 ) -> Result<()> {
2174 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2175 Ok(())
2176 }
2177
2178 async fn handle_update_buffer(
2179 this: ModelHandle<Self>,
2180 envelope: TypedEnvelope<proto::UpdateBuffer>,
2181 _: Arc<Client>,
2182 mut cx: AsyncAppContext,
2183 ) -> Result<()> {
2184 this.update(&mut cx, |this, cx| {
2185 let payload = envelope.payload.clone();
2186 let buffer_id = payload.buffer_id;
2187 let ops = payload
2188 .operations
2189 .into_iter()
2190 .map(|op| language::proto::deserialize_operation(op))
2191 .collect::<Result<Vec<_>, _>>()?;
2192 let buffer = this
2193 .open_buffers
2194 .entry(buffer_id)
2195 .or_insert_with(|| OpenBuffer::Operations(Vec::new()));
2196 match buffer {
2197 OpenBuffer::Loaded(buffer) => {
2198 if let Some(buffer) = buffer.upgrade(cx) {
2199 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2200 }
2201 }
2202 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2203 }
2204 Ok(())
2205 })
2206 }
2207
2208 async fn handle_update_buffer_file(
2209 this: ModelHandle<Self>,
2210 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2211 _: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| {
2215 let payload = envelope.payload.clone();
2216 let buffer_id = payload.buffer_id;
2217 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2218 let worktree = this
2219 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2220 .ok_or_else(|| anyhow!("no such worktree"))?;
2221 let file = File::from_proto(file, worktree.clone(), cx)?;
2222 let buffer = this
2223 .open_buffers
2224 .get_mut(&buffer_id)
2225 .and_then(|b| b.upgrade(cx))
2226 .ok_or_else(|| anyhow!("no such buffer"))?;
2227 buffer.update(cx, |buffer, cx| {
2228 buffer.file_updated(Box::new(file), cx).detach();
2229 });
2230 Ok(())
2231 })
2232 }
2233
2234 async fn handle_save_buffer(
2235 this: ModelHandle<Self>,
2236 envelope: TypedEnvelope<proto::SaveBuffer>,
2237 _: Arc<Client>,
2238 mut cx: AsyncAppContext,
2239 ) -> Result<proto::BufferSaved> {
2240 let buffer_id = envelope.payload.buffer_id;
2241 let sender_id = envelope.original_sender_id()?;
2242 let requested_version = envelope.payload.version.try_into()?;
2243
2244 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2245 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2246 let buffer = this
2247 .shared_buffers
2248 .get(&sender_id)
2249 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2250 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2251 Ok::<_, anyhow::Error>((project_id, buffer))
2252 })?;
2253
2254 buffer
2255 .update(&mut cx, |buffer, _| {
2256 buffer.wait_for_version(requested_version)
2257 })
2258 .await;
2259
2260 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2261 Ok(proto::BufferSaved {
2262 project_id,
2263 buffer_id,
2264 version: (&saved_version).into(),
2265 mtime: Some(mtime.into()),
2266 })
2267 }
2268
2269 async fn handle_format_buffers(
2270 this: ModelHandle<Self>,
2271 envelope: TypedEnvelope<proto::FormatBuffers>,
2272 _: Arc<Client>,
2273 mut cx: AsyncAppContext,
2274 ) -> Result<proto::FormatBuffersResponse> {
2275 let sender_id = envelope.original_sender_id()?;
2276 let format = this.update(&mut cx, |this, cx| {
2277 let shared_buffers = this
2278 .shared_buffers
2279 .get(&sender_id)
2280 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2281 let mut buffers = HashSet::default();
2282 for buffer_id in &envelope.payload.buffer_ids {
2283 buffers.insert(
2284 shared_buffers
2285 .get(buffer_id)
2286 .cloned()
2287 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2288 );
2289 }
2290 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2291 })?;
2292
2293 let project_transaction = format.await?;
2294 let project_transaction = this.update(&mut cx, |this, cx| {
2295 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2296 });
2297 Ok(proto::FormatBuffersResponse {
2298 transaction: Some(project_transaction),
2299 })
2300 }
2301
2302 async fn handle_get_completions(
2303 this: ModelHandle<Self>,
2304 envelope: TypedEnvelope<proto::GetCompletions>,
2305 _: Arc<Client>,
2306 mut cx: AsyncAppContext,
2307 ) -> Result<proto::GetCompletionsResponse> {
2308 let sender_id = envelope.original_sender_id()?;
2309 let position = envelope
2310 .payload
2311 .position
2312 .and_then(language::proto::deserialize_anchor)
2313 .ok_or_else(|| anyhow!("invalid position"))?;
2314 let version = clock::Global::from(envelope.payload.version);
2315 let buffer = this.read_with(&cx, |this, _| {
2316 this.shared_buffers
2317 .get(&sender_id)
2318 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2319 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2320 })?;
2321 buffer
2322 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2323 .await;
2324 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2325 let completions = this
2326 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2327 .await?;
2328
2329 Ok(proto::GetCompletionsResponse {
2330 completions: completions
2331 .iter()
2332 .map(language::proto::serialize_completion)
2333 .collect(),
2334 version: (&version).into(),
2335 })
2336 }
2337
2338 async fn handle_apply_additional_edits_for_completion(
2339 this: ModelHandle<Self>,
2340 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2341 _: Arc<Client>,
2342 mut cx: AsyncAppContext,
2343 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2344 let sender_id = envelope.original_sender_id()?;
2345 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2346 let buffer = this
2347 .shared_buffers
2348 .get(&sender_id)
2349 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2350 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2351 let language = buffer.read(cx).language();
2352 let completion = language::proto::deserialize_completion(
2353 envelope
2354 .payload
2355 .completion
2356 .ok_or_else(|| anyhow!("invalid completion"))?,
2357 language,
2358 )?;
2359 Ok::<_, anyhow::Error>(
2360 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2361 )
2362 })?;
2363
2364 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2365 transaction: apply_additional_edits
2366 .await?
2367 .as_ref()
2368 .map(language::proto::serialize_transaction),
2369 })
2370 }
2371
2372 async fn handle_get_code_actions(
2373 this: ModelHandle<Self>,
2374 envelope: TypedEnvelope<proto::GetCodeActions>,
2375 _: Arc<Client>,
2376 mut cx: AsyncAppContext,
2377 ) -> Result<proto::GetCodeActionsResponse> {
2378 let sender_id = envelope.original_sender_id()?;
2379 let start = envelope
2380 .payload
2381 .start
2382 .and_then(language::proto::deserialize_anchor)
2383 .ok_or_else(|| anyhow!("invalid start"))?;
2384 let end = envelope
2385 .payload
2386 .end
2387 .and_then(language::proto::deserialize_anchor)
2388 .ok_or_else(|| anyhow!("invalid end"))?;
2389 let buffer = this.update(&mut cx, |this, _| {
2390 this.shared_buffers
2391 .get(&sender_id)
2392 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2393 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2394 })?;
2395 buffer
2396 .update(&mut cx, |buffer, _| {
2397 buffer.wait_for_version([start.timestamp, end.timestamp].into_iter().collect())
2398 })
2399 .await;
2400 let code_actions = this.update(&mut cx, |this, cx| {
2401 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2402 })?;
2403
2404 Ok(proto::GetCodeActionsResponse {
2405 actions: code_actions
2406 .await?
2407 .iter()
2408 .map(language::proto::serialize_code_action)
2409 .collect(),
2410 })
2411 }
2412
2413 async fn handle_apply_code_action(
2414 this: ModelHandle<Self>,
2415 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2416 _: Arc<Client>,
2417 mut cx: AsyncAppContext,
2418 ) -> Result<proto::ApplyCodeActionResponse> {
2419 let sender_id = envelope.original_sender_id()?;
2420 let action = language::proto::deserialize_code_action(
2421 envelope
2422 .payload
2423 .action
2424 .ok_or_else(|| anyhow!("invalid action"))?,
2425 )?;
2426 let apply_code_action = this.update(&mut cx, |this, cx| {
2427 let buffer = this
2428 .shared_buffers
2429 .get(&sender_id)
2430 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2431 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2432 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2433 })?;
2434
2435 let project_transaction = apply_code_action.await?;
2436 let project_transaction = this.update(&mut cx, |this, cx| {
2437 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2438 });
2439 Ok(proto::ApplyCodeActionResponse {
2440 transaction: Some(project_transaction),
2441 })
2442 }
2443
2444 async fn handle_get_definition(
2445 this: ModelHandle<Self>,
2446 envelope: TypedEnvelope<proto::GetDefinition>,
2447 _: Arc<Client>,
2448 mut cx: AsyncAppContext,
2449 ) -> Result<proto::GetDefinitionResponse> {
2450 let sender_id = envelope.original_sender_id()?;
2451 let position = envelope
2452 .payload
2453 .position
2454 .and_then(deserialize_anchor)
2455 .ok_or_else(|| anyhow!("invalid position"))?;
2456 let definitions = this.update(&mut cx, |this, cx| {
2457 let source_buffer = this
2458 .shared_buffers
2459 .get(&sender_id)
2460 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2461 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2462 if source_buffer.read(cx).can_resolve(&position) {
2463 Ok(this.definition(&source_buffer, position, cx))
2464 } else {
2465 Err(anyhow!("cannot resolve position"))
2466 }
2467 })?;
2468
2469 let definitions = definitions.await?;
2470
2471 this.update(&mut cx, |this, cx| {
2472 let mut response = proto::GetDefinitionResponse {
2473 definitions: Default::default(),
2474 };
2475 for definition in definitions {
2476 let buffer =
2477 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2478 response.definitions.push(proto::Definition {
2479 target_start: Some(serialize_anchor(&definition.target_range.start)),
2480 target_end: Some(serialize_anchor(&definition.target_range.end)),
2481 buffer: Some(buffer),
2482 });
2483 }
2484 Ok(response)
2485 })
2486 }
2487
2488 async fn handle_open_buffer(
2489 this: ModelHandle<Self>,
2490 envelope: TypedEnvelope<proto::OpenBuffer>,
2491 _: Arc<Client>,
2492 mut cx: AsyncAppContext,
2493 ) -> anyhow::Result<proto::OpenBufferResponse> {
2494 let peer_id = envelope.original_sender_id()?;
2495 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2496 let open_buffer = this.update(&mut cx, |this, cx| {
2497 this.open_buffer(
2498 ProjectPath {
2499 worktree_id,
2500 path: PathBuf::from(envelope.payload.path).into(),
2501 },
2502 cx,
2503 )
2504 });
2505
2506 let buffer = open_buffer.await?;
2507 this.update(&mut cx, |this, cx| {
2508 Ok(proto::OpenBufferResponse {
2509 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2510 })
2511 })
2512 }
2513
2514 fn serialize_project_transaction_for_peer(
2515 &mut self,
2516 project_transaction: ProjectTransaction,
2517 peer_id: PeerId,
2518 cx: &AppContext,
2519 ) -> proto::ProjectTransaction {
2520 let mut serialized_transaction = proto::ProjectTransaction {
2521 buffers: Default::default(),
2522 transactions: Default::default(),
2523 };
2524 for (buffer, transaction) in project_transaction.0 {
2525 serialized_transaction
2526 .buffers
2527 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2528 serialized_transaction
2529 .transactions
2530 .push(language::proto::serialize_transaction(&transaction));
2531 }
2532 serialized_transaction
2533 }
2534
2535 fn deserialize_project_transaction(
2536 &mut self,
2537 message: proto::ProjectTransaction,
2538 push_to_history: bool,
2539 cx: &mut ModelContext<Self>,
2540 ) -> Task<Result<ProjectTransaction>> {
2541 cx.spawn(|this, mut cx| async move {
2542 let mut project_transaction = ProjectTransaction::default();
2543 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2544 let buffer = this
2545 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2546 .await?;
2547 let transaction = language::proto::deserialize_transaction(transaction)?;
2548 project_transaction.0.insert(buffer, transaction);
2549 }
2550 for (buffer, transaction) in &project_transaction.0 {
2551 buffer
2552 .update(&mut cx, |buffer, _| {
2553 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2554 })
2555 .await;
2556
2557 if push_to_history {
2558 buffer.update(&mut cx, |buffer, _| {
2559 buffer.push_transaction(transaction.clone(), Instant::now());
2560 });
2561 }
2562 }
2563
2564 Ok(project_transaction)
2565 })
2566 }
2567
2568 fn serialize_buffer_for_peer(
2569 &mut self,
2570 buffer: &ModelHandle<Buffer>,
2571 peer_id: PeerId,
2572 cx: &AppContext,
2573 ) -> proto::Buffer {
2574 let buffer_id = buffer.read(cx).remote_id();
2575 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2576 match shared_buffers.entry(buffer_id) {
2577 hash_map::Entry::Occupied(_) => proto::Buffer {
2578 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2579 },
2580 hash_map::Entry::Vacant(entry) => {
2581 entry.insert(buffer.clone());
2582 proto::Buffer {
2583 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2584 }
2585 }
2586 }
2587 }
2588
2589 fn deserialize_buffer(
2590 &mut self,
2591 buffer: proto::Buffer,
2592 cx: &mut ModelContext<Self>,
2593 ) -> Task<Result<ModelHandle<Buffer>>> {
2594 let replica_id = self.replica_id();
2595
2596 let mut opened_buffer_tx = self.opened_buffer.clone();
2597 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2598 cx.spawn(|this, mut cx| async move {
2599 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2600 proto::buffer::Variant::Id(id) => {
2601 let buffer = loop {
2602 let buffer = this.read_with(&cx, |this, cx| {
2603 this.open_buffers
2604 .get(&id)
2605 .and_then(|buffer| buffer.upgrade(cx))
2606 });
2607 if let Some(buffer) = buffer {
2608 break buffer;
2609 }
2610 opened_buffer_rx
2611 .recv()
2612 .await
2613 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2614 };
2615 Ok(buffer)
2616 }
2617 proto::buffer::Variant::State(mut buffer) => {
2618 let mut buffer_worktree = None;
2619 let mut buffer_file = None;
2620 if let Some(file) = buffer.file.take() {
2621 this.read_with(&cx, |this, cx| {
2622 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2623 let worktree =
2624 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2625 anyhow!("no worktree found for id {}", file.worktree_id)
2626 })?;
2627 buffer_file =
2628 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2629 as Box<dyn language::File>);
2630 buffer_worktree = Some(worktree);
2631 Ok::<_, anyhow::Error>(())
2632 })?;
2633 }
2634
2635 let buffer = cx.add_model(|cx| {
2636 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2637 });
2638 this.update(&mut cx, |this, cx| {
2639 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2640 })?;
2641
2642 let _ = opened_buffer_tx.send(()).await;
2643 Ok(buffer)
2644 }
2645 }
2646 })
2647 }
2648
2649 async fn handle_close_buffer(
2650 this: ModelHandle<Self>,
2651 envelope: TypedEnvelope<proto::CloseBuffer>,
2652 _: Arc<Client>,
2653 mut cx: AsyncAppContext,
2654 ) -> anyhow::Result<()> {
2655 this.update(&mut cx, |this, cx| {
2656 if let Some(shared_buffers) =
2657 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2658 {
2659 shared_buffers.remove(&envelope.payload.buffer_id);
2660 cx.notify();
2661 }
2662 Ok(())
2663 })
2664 }
2665
2666 async fn handle_buffer_saved(
2667 this: ModelHandle<Self>,
2668 envelope: TypedEnvelope<proto::BufferSaved>,
2669 _: Arc<Client>,
2670 mut cx: AsyncAppContext,
2671 ) -> Result<()> {
2672 let version = envelope.payload.version.try_into()?;
2673 let mtime = envelope
2674 .payload
2675 .mtime
2676 .ok_or_else(|| anyhow!("missing mtime"))?
2677 .into();
2678
2679 this.update(&mut cx, |this, cx| {
2680 let buffer = this
2681 .open_buffers
2682 .get(&envelope.payload.buffer_id)
2683 .and_then(|buffer| buffer.upgrade(cx));
2684 if let Some(buffer) = buffer {
2685 buffer.update(cx, |buffer, cx| {
2686 buffer.did_save(version, mtime, None, cx);
2687 });
2688 }
2689 Ok(())
2690 })
2691 }
2692
2693 async fn handle_buffer_reloaded(
2694 this: ModelHandle<Self>,
2695 envelope: TypedEnvelope<proto::BufferReloaded>,
2696 _: Arc<Client>,
2697 mut cx: AsyncAppContext,
2698 ) -> Result<()> {
2699 let payload = envelope.payload.clone();
2700 let version = payload.version.try_into()?;
2701 let mtime = payload
2702 .mtime
2703 .ok_or_else(|| anyhow!("missing mtime"))?
2704 .into();
2705 this.update(&mut cx, |this, cx| {
2706 let buffer = this
2707 .open_buffers
2708 .get(&payload.buffer_id)
2709 .and_then(|buffer| buffer.upgrade(cx));
2710 if let Some(buffer) = buffer {
2711 buffer.update(cx, |buffer, cx| {
2712 buffer.did_reload(version, mtime, cx);
2713 });
2714 }
2715 Ok(())
2716 })
2717 }
2718
2719 pub fn match_paths<'a>(
2720 &self,
2721 query: &'a str,
2722 include_ignored: bool,
2723 smart_case: bool,
2724 max_results: usize,
2725 cancel_flag: &'a AtomicBool,
2726 cx: &AppContext,
2727 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2728 let worktrees = self
2729 .worktrees(cx)
2730 .filter(|worktree| !worktree.read(cx).is_weak())
2731 .collect::<Vec<_>>();
2732 let include_root_name = worktrees.len() > 1;
2733 let candidate_sets = worktrees
2734 .into_iter()
2735 .map(|worktree| CandidateSet {
2736 snapshot: worktree.read(cx).snapshot(),
2737 include_ignored,
2738 include_root_name,
2739 })
2740 .collect::<Vec<_>>();
2741
2742 let background = cx.background().clone();
2743 async move {
2744 fuzzy::match_paths(
2745 candidate_sets.as_slice(),
2746 query,
2747 smart_case,
2748 max_results,
2749 cancel_flag,
2750 background,
2751 )
2752 .await
2753 }
2754 }
2755}
2756
2757impl WorktreeHandle {
2758 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2759 match self {
2760 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2761 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2762 }
2763 }
2764}
2765
2766impl OpenBuffer {
2767 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2768 match self {
2769 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2770 OpenBuffer::Operations(_) => None,
2771 }
2772 }
2773}
2774
2775struct CandidateSet {
2776 snapshot: Snapshot,
2777 include_ignored: bool,
2778 include_root_name: bool,
2779}
2780
2781impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2782 type Candidates = CandidateSetIter<'a>;
2783
2784 fn id(&self) -> usize {
2785 self.snapshot.id().to_usize()
2786 }
2787
2788 fn len(&self) -> usize {
2789 if self.include_ignored {
2790 self.snapshot.file_count()
2791 } else {
2792 self.snapshot.visible_file_count()
2793 }
2794 }
2795
2796 fn prefix(&self) -> Arc<str> {
2797 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2798 self.snapshot.root_name().into()
2799 } else if self.include_root_name {
2800 format!("{}/", self.snapshot.root_name()).into()
2801 } else {
2802 "".into()
2803 }
2804 }
2805
2806 fn candidates(&'a self, start: usize) -> Self::Candidates {
2807 CandidateSetIter {
2808 traversal: self.snapshot.files(self.include_ignored, start),
2809 }
2810 }
2811}
2812
2813struct CandidateSetIter<'a> {
2814 traversal: Traversal<'a>,
2815}
2816
2817impl<'a> Iterator for CandidateSetIter<'a> {
2818 type Item = PathMatchCandidate<'a>;
2819
2820 fn next(&mut self) -> Option<Self::Item> {
2821 self.traversal.next().map(|entry| {
2822 if let EntryKind::File(char_bag) = entry.kind {
2823 PathMatchCandidate {
2824 path: &entry.path,
2825 char_bag,
2826 }
2827 } else {
2828 unreachable!()
2829 }
2830 })
2831 }
2832}
2833
2834impl Entity for Project {
2835 type Event = Event;
2836
2837 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2838 match &self.client_state {
2839 ProjectClientState::Local { remote_id_rx, .. } => {
2840 if let Some(project_id) = *remote_id_rx.borrow() {
2841 self.client
2842 .send(proto::UnregisterProject { project_id })
2843 .log_err();
2844 }
2845 }
2846 ProjectClientState::Remote { remote_id, .. } => {
2847 self.client
2848 .send(proto::LeaveProject {
2849 project_id: *remote_id,
2850 })
2851 .log_err();
2852 }
2853 }
2854 }
2855
2856 fn app_will_quit(
2857 &mut self,
2858 _: &mut MutableAppContext,
2859 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2860 use futures::FutureExt;
2861
2862 let shutdown_futures = self
2863 .language_servers
2864 .drain()
2865 .filter_map(|(_, server)| server.shutdown())
2866 .collect::<Vec<_>>();
2867 Some(
2868 async move {
2869 futures::future::join_all(shutdown_futures).await;
2870 }
2871 .boxed(),
2872 )
2873 }
2874}
2875
2876impl Collaborator {
2877 fn from_proto(
2878 message: proto::Collaborator,
2879 user_store: &ModelHandle<UserStore>,
2880 cx: &mut AsyncAppContext,
2881 ) -> impl Future<Output = Result<Self>> {
2882 let user = user_store.update(cx, |user_store, cx| {
2883 user_store.fetch_user(message.user_id, cx)
2884 });
2885
2886 async move {
2887 Ok(Self {
2888 peer_id: PeerId(message.peer_id),
2889 user: user.await?,
2890 replica_id: message.replica_id as ReplicaId,
2891 })
2892 }
2893 }
2894}
2895
2896impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2897 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2898 Self {
2899 worktree_id,
2900 path: path.as_ref().into(),
2901 }
2902 }
2903}
2904
2905impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2906 fn from(options: lsp::CreateFileOptions) -> Self {
2907 Self {
2908 overwrite: options.overwrite.unwrap_or(false),
2909 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2910 }
2911 }
2912}
2913
2914impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2915 fn from(options: lsp::RenameFileOptions) -> Self {
2916 Self {
2917 overwrite: options.overwrite.unwrap_or(false),
2918 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2919 }
2920 }
2921}
2922
2923impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2924 fn from(options: lsp::DeleteFileOptions) -> Self {
2925 Self {
2926 recursive: options.recursive.unwrap_or(false),
2927 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2928 }
2929 }
2930}
2931
2932#[cfg(test)]
2933mod tests {
2934 use super::{Event, *};
2935 use client::test::FakeHttpClient;
2936 use fs::RealFs;
2937 use futures::StreamExt;
2938 use gpui::test::subscribe;
2939 use language::{
2940 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry,
2941 LanguageServerConfig, Point,
2942 };
2943 use lsp::Url;
2944 use serde_json::json;
2945 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2946 use unindent::Unindent as _;
2947 use util::test::temp_tree;
2948 use worktree::WorktreeHandle as _;
2949
2950 #[gpui::test]
2951 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2952 let dir = temp_tree(json!({
2953 "root": {
2954 "apple": "",
2955 "banana": {
2956 "carrot": {
2957 "date": "",
2958 "endive": "",
2959 }
2960 },
2961 "fennel": {
2962 "grape": "",
2963 }
2964 }
2965 }));
2966
2967 let root_link_path = dir.path().join("root_link");
2968 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2969 unix::fs::symlink(
2970 &dir.path().join("root/fennel"),
2971 &dir.path().join("root/finnochio"),
2972 )
2973 .unwrap();
2974
2975 let project = Project::test(Arc::new(RealFs), &mut cx);
2976
2977 let (tree, _) = project
2978 .update(&mut cx, |project, cx| {
2979 project.find_or_create_local_worktree(&root_link_path, false, cx)
2980 })
2981 .await
2982 .unwrap();
2983
2984 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2985 .await;
2986 cx.read(|cx| {
2987 let tree = tree.read(cx);
2988 assert_eq!(tree.file_count(), 5);
2989 assert_eq!(
2990 tree.inode_for_path("fennel/grape"),
2991 tree.inode_for_path("finnochio/grape")
2992 );
2993 });
2994
2995 let cancel_flag = Default::default();
2996 let results = project
2997 .read_with(&cx, |project, cx| {
2998 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2999 })
3000 .await;
3001 assert_eq!(
3002 results
3003 .into_iter()
3004 .map(|result| result.path)
3005 .collect::<Vec<Arc<Path>>>(),
3006 vec![
3007 PathBuf::from("banana/carrot/date").into(),
3008 PathBuf::from("banana/carrot/endive").into(),
3009 ]
3010 );
3011 }
3012
3013 #[gpui::test]
3014 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3015 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3016 let progress_token = language_server_config
3017 .disk_based_diagnostics_progress_token
3018 .clone()
3019 .unwrap();
3020
3021 let mut languages = LanguageRegistry::new();
3022 languages.add(Arc::new(Language::new(
3023 LanguageConfig {
3024 name: "Rust".to_string(),
3025 path_suffixes: vec!["rs".to_string()],
3026 language_server: Some(language_server_config),
3027 ..Default::default()
3028 },
3029 Some(tree_sitter_rust::language()),
3030 )));
3031
3032 let dir = temp_tree(json!({
3033 "a.rs": "fn a() { A }",
3034 "b.rs": "const y: i32 = 1",
3035 }));
3036
3037 let http_client = FakeHttpClient::with_404_response();
3038 let client = Client::new(http_client.clone());
3039 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3040
3041 let project = cx.update(|cx| {
3042 Project::local(
3043 client,
3044 user_store,
3045 Arc::new(languages),
3046 Arc::new(RealFs),
3047 cx,
3048 )
3049 });
3050
3051 let (tree, _) = project
3052 .update(&mut cx, |project, cx| {
3053 project.find_or_create_local_worktree(dir.path(), false, cx)
3054 })
3055 .await
3056 .unwrap();
3057 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3058
3059 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3060 .await;
3061
3062 // Cause worktree to start the fake language server
3063 let _buffer = project
3064 .update(&mut cx, |project, cx| {
3065 project.open_buffer(
3066 ProjectPath {
3067 worktree_id,
3068 path: Path::new("b.rs").into(),
3069 },
3070 cx,
3071 )
3072 })
3073 .await
3074 .unwrap();
3075
3076 let mut events = subscribe(&project, &mut cx);
3077
3078 fake_server.start_progress(&progress_token).await;
3079 assert_eq!(
3080 events.next().await.unwrap(),
3081 Event::DiskBasedDiagnosticsStarted
3082 );
3083
3084 fake_server.start_progress(&progress_token).await;
3085 fake_server.end_progress(&progress_token).await;
3086 fake_server.start_progress(&progress_token).await;
3087
3088 fake_server
3089 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3090 uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
3091 version: None,
3092 diagnostics: vec![lsp::Diagnostic {
3093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3094 severity: Some(lsp::DiagnosticSeverity::ERROR),
3095 message: "undefined variable 'A'".to_string(),
3096 ..Default::default()
3097 }],
3098 })
3099 .await;
3100 assert_eq!(
3101 events.next().await.unwrap(),
3102 Event::DiagnosticsUpdated(ProjectPath {
3103 worktree_id,
3104 path: Arc::from(Path::new("a.rs"))
3105 })
3106 );
3107
3108 fake_server.end_progress(&progress_token).await;
3109 fake_server.end_progress(&progress_token).await;
3110 assert_eq!(
3111 events.next().await.unwrap(),
3112 Event::DiskBasedDiagnosticsUpdated
3113 );
3114 assert_eq!(
3115 events.next().await.unwrap(),
3116 Event::DiskBasedDiagnosticsFinished
3117 );
3118
3119 let buffer = project
3120 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3121 .await
3122 .unwrap();
3123
3124 buffer.read_with(&cx, |buffer, _| {
3125 let snapshot = buffer.snapshot();
3126 let diagnostics = snapshot
3127 .diagnostics_in_range::<_, Point>(0..buffer.len())
3128 .collect::<Vec<_>>();
3129 assert_eq!(
3130 diagnostics,
3131 &[DiagnosticEntry {
3132 range: Point::new(0, 9)..Point::new(0, 10),
3133 diagnostic: Diagnostic {
3134 severity: lsp::DiagnosticSeverity::ERROR,
3135 message: "undefined variable 'A'".to_string(),
3136 group_id: 0,
3137 is_primary: true,
3138 ..Default::default()
3139 }
3140 }]
3141 )
3142 });
3143 }
3144
3145 #[gpui::test]
3146 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3147 let dir = temp_tree(json!({
3148 "root": {
3149 "dir1": {},
3150 "dir2": {
3151 "dir3": {}
3152 }
3153 }
3154 }));
3155
3156 let project = Project::test(Arc::new(RealFs), &mut cx);
3157 let (tree, _) = project
3158 .update(&mut cx, |project, cx| {
3159 project.find_or_create_local_worktree(&dir.path(), false, cx)
3160 })
3161 .await
3162 .unwrap();
3163
3164 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3165 .await;
3166
3167 let cancel_flag = Default::default();
3168 let results = project
3169 .read_with(&cx, |project, cx| {
3170 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3171 })
3172 .await;
3173
3174 assert!(results.is_empty());
3175 }
3176
3177 #[gpui::test]
3178 async fn test_definition(mut cx: gpui::TestAppContext) {
3179 let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await;
3180
3181 let mut languages = LanguageRegistry::new();
3182 languages.add(Arc::new(Language::new(
3183 LanguageConfig {
3184 name: "Rust".to_string(),
3185 path_suffixes: vec!["rs".to_string()],
3186 language_server: Some(language_server_config),
3187 ..Default::default()
3188 },
3189 Some(tree_sitter_rust::language()),
3190 )));
3191
3192 let dir = temp_tree(json!({
3193 "a.rs": "const fn a() { A }",
3194 "b.rs": "const y: i32 = crate::a()",
3195 }));
3196 let dir_path = dir.path().to_path_buf();
3197
3198 let http_client = FakeHttpClient::with_404_response();
3199 let client = Client::new(http_client.clone());
3200 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
3201 let project = cx.update(|cx| {
3202 Project::local(
3203 client,
3204 user_store,
3205 Arc::new(languages),
3206 Arc::new(RealFs),
3207 cx,
3208 )
3209 });
3210
3211 let (tree, _) = project
3212 .update(&mut cx, |project, cx| {
3213 project.find_or_create_local_worktree(dir.path().join("b.rs"), false, cx)
3214 })
3215 .await
3216 .unwrap();
3217 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3218 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3219 .await;
3220
3221 let buffer = project
3222 .update(&mut cx, |project, cx| {
3223 project.open_buffer(
3224 ProjectPath {
3225 worktree_id,
3226 path: Path::new("").into(),
3227 },
3228 cx,
3229 )
3230 })
3231 .await
3232 .unwrap();
3233
3234 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3235 let params = params.text_document_position_params;
3236 assert_eq!(
3237 params.text_document.uri.to_file_path().unwrap(),
3238 dir_path.join("b.rs")
3239 );
3240 assert_eq!(params.position, lsp::Position::new(0, 22));
3241
3242 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3243 lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(),
3244 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3245 )))
3246 });
3247
3248 let mut definitions = project
3249 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3250 .await
3251 .unwrap();
3252
3253 assert_eq!(definitions.len(), 1);
3254 let definition = definitions.pop().unwrap();
3255 cx.update(|cx| {
3256 let target_buffer = definition.target_buffer.read(cx);
3257 assert_eq!(
3258 target_buffer
3259 .file()
3260 .unwrap()
3261 .as_local()
3262 .unwrap()
3263 .abs_path(cx),
3264 dir.path().join("a.rs")
3265 );
3266 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3267 assert_eq!(
3268 list_worktrees(&project, cx),
3269 [
3270 (dir.path().join("b.rs"), false),
3271 (dir.path().join("a.rs"), true)
3272 ]
3273 );
3274
3275 drop(definition);
3276 });
3277 cx.read(|cx| {
3278 assert_eq!(
3279 list_worktrees(&project, cx),
3280 [(dir.path().join("b.rs"), false)]
3281 );
3282 });
3283
3284 fn list_worktrees(project: &ModelHandle<Project>, cx: &AppContext) -> Vec<(PathBuf, bool)> {
3285 project
3286 .read(cx)
3287 .worktrees(cx)
3288 .map(|worktree| {
3289 let worktree = worktree.read(cx);
3290 (
3291 worktree.as_local().unwrap().abs_path().to_path_buf(),
3292 worktree.is_weak(),
3293 )
3294 })
3295 .collect::<Vec<_>>()
3296 }
3297 }
3298
3299 #[gpui::test]
3300 async fn test_save_file(mut cx: gpui::TestAppContext) {
3301 let fs = Arc::new(FakeFs::new(cx.background()));
3302 fs.insert_tree(
3303 "/dir",
3304 json!({
3305 "file1": "the old contents",
3306 }),
3307 )
3308 .await;
3309
3310 let project = Project::test(fs.clone(), &mut cx);
3311 let worktree_id = project
3312 .update(&mut cx, |p, cx| {
3313 p.find_or_create_local_worktree("/dir", false, cx)
3314 })
3315 .await
3316 .unwrap()
3317 .0
3318 .read_with(&cx, |tree, _| tree.id());
3319
3320 let buffer = project
3321 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3322 .await
3323 .unwrap();
3324 buffer
3325 .update(&mut cx, |buffer, cx| {
3326 assert_eq!(buffer.text(), "the old contents");
3327 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3328 buffer.save(cx)
3329 })
3330 .await
3331 .unwrap();
3332
3333 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3334 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3335 }
3336
3337 #[gpui::test]
3338 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3339 let fs = Arc::new(FakeFs::new(cx.background()));
3340 fs.insert_tree(
3341 "/dir",
3342 json!({
3343 "file1": "the old contents",
3344 }),
3345 )
3346 .await;
3347
3348 let project = Project::test(fs.clone(), &mut cx);
3349 let worktree_id = project
3350 .update(&mut cx, |p, cx| {
3351 p.find_or_create_local_worktree("/dir/file1", false, cx)
3352 })
3353 .await
3354 .unwrap()
3355 .0
3356 .read_with(&cx, |tree, _| tree.id());
3357
3358 let buffer = project
3359 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3360 .await
3361 .unwrap();
3362 buffer
3363 .update(&mut cx, |buffer, cx| {
3364 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3365 buffer.save(cx)
3366 })
3367 .await
3368 .unwrap();
3369
3370 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3371 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3372 }
3373
3374 #[gpui::test(retries = 5)]
3375 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3376 let dir = temp_tree(json!({
3377 "a": {
3378 "file1": "",
3379 "file2": "",
3380 "file3": "",
3381 },
3382 "b": {
3383 "c": {
3384 "file4": "",
3385 "file5": "",
3386 }
3387 }
3388 }));
3389
3390 let project = Project::test(Arc::new(RealFs), &mut cx);
3391 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3392
3393 let (tree, _) = project
3394 .update(&mut cx, |p, cx| {
3395 p.find_or_create_local_worktree(dir.path(), false, cx)
3396 })
3397 .await
3398 .unwrap();
3399 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3400
3401 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3402 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3403 async move { buffer.await.unwrap() }
3404 };
3405 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3406 tree.read_with(cx, |tree, _| {
3407 tree.entry_for_path(path)
3408 .expect(&format!("no entry for path {}", path))
3409 .id
3410 })
3411 };
3412
3413 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3414 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3415 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3416 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3417
3418 let file2_id = id_for_path("a/file2", &cx);
3419 let file3_id = id_for_path("a/file3", &cx);
3420 let file4_id = id_for_path("b/c/file4", &cx);
3421
3422 // Wait for the initial scan.
3423 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3424 .await;
3425
3426 // Create a remote copy of this worktree.
3427 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3428 let (remote, load_task) = cx.update(|cx| {
3429 Worktree::remote(
3430 1,
3431 1,
3432 initial_snapshot.to_proto(&Default::default(), Default::default()),
3433 rpc.clone(),
3434 cx,
3435 )
3436 });
3437 load_task.await;
3438
3439 cx.read(|cx| {
3440 assert!(!buffer2.read(cx).is_dirty());
3441 assert!(!buffer3.read(cx).is_dirty());
3442 assert!(!buffer4.read(cx).is_dirty());
3443 assert!(!buffer5.read(cx).is_dirty());
3444 });
3445
3446 // Rename and delete files and directories.
3447 tree.flush_fs_events(&cx).await;
3448 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3449 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3450 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3451 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3452 tree.flush_fs_events(&cx).await;
3453
3454 let expected_paths = vec![
3455 "a",
3456 "a/file1",
3457 "a/file2.new",
3458 "b",
3459 "d",
3460 "d/file3",
3461 "d/file4",
3462 ];
3463
3464 cx.read(|app| {
3465 assert_eq!(
3466 tree.read(app)
3467 .paths()
3468 .map(|p| p.to_str().unwrap())
3469 .collect::<Vec<_>>(),
3470 expected_paths
3471 );
3472
3473 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3474 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3475 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3476
3477 assert_eq!(
3478 buffer2.read(app).file().unwrap().path().as_ref(),
3479 Path::new("a/file2.new")
3480 );
3481 assert_eq!(
3482 buffer3.read(app).file().unwrap().path().as_ref(),
3483 Path::new("d/file3")
3484 );
3485 assert_eq!(
3486 buffer4.read(app).file().unwrap().path().as_ref(),
3487 Path::new("d/file4")
3488 );
3489 assert_eq!(
3490 buffer5.read(app).file().unwrap().path().as_ref(),
3491 Path::new("b/c/file5")
3492 );
3493
3494 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3495 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3496 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3497 assert!(buffer5.read(app).file().unwrap().is_deleted());
3498 });
3499
3500 // Update the remote worktree. Check that it becomes consistent with the
3501 // local worktree.
3502 remote.update(&mut cx, |remote, cx| {
3503 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3504 &initial_snapshot,
3505 1,
3506 1,
3507 0,
3508 true,
3509 );
3510 remote
3511 .as_remote_mut()
3512 .unwrap()
3513 .snapshot
3514 .apply_remote_update(update_message)
3515 .unwrap();
3516
3517 assert_eq!(
3518 remote
3519 .paths()
3520 .map(|p| p.to_str().unwrap())
3521 .collect::<Vec<_>>(),
3522 expected_paths
3523 );
3524 });
3525 }
3526
3527 #[gpui::test]
3528 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3529 let fs = Arc::new(FakeFs::new(cx.background()));
3530 fs.insert_tree(
3531 "/the-dir",
3532 json!({
3533 "a.txt": "a-contents",
3534 "b.txt": "b-contents",
3535 }),
3536 )
3537 .await;
3538
3539 let project = Project::test(fs.clone(), &mut cx);
3540 let worktree_id = project
3541 .update(&mut cx, |p, cx| {
3542 p.find_or_create_local_worktree("/the-dir", false, cx)
3543 })
3544 .await
3545 .unwrap()
3546 .0
3547 .read_with(&cx, |tree, _| tree.id());
3548
3549 // Spawn multiple tasks to open paths, repeating some paths.
3550 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3551 (
3552 p.open_buffer((worktree_id, "a.txt"), cx),
3553 p.open_buffer((worktree_id, "b.txt"), cx),
3554 p.open_buffer((worktree_id, "a.txt"), cx),
3555 )
3556 });
3557
3558 let buffer_a_1 = buffer_a_1.await.unwrap();
3559 let buffer_a_2 = buffer_a_2.await.unwrap();
3560 let buffer_b = buffer_b.await.unwrap();
3561 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3562 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3563
3564 // There is only one buffer per path.
3565 let buffer_a_id = buffer_a_1.id();
3566 assert_eq!(buffer_a_2.id(), buffer_a_id);
3567
3568 // Open the same path again while it is still open.
3569 drop(buffer_a_1);
3570 let buffer_a_3 = project
3571 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3572 .await
3573 .unwrap();
3574
3575 // There's still only one buffer per path.
3576 assert_eq!(buffer_a_3.id(), buffer_a_id);
3577 }
3578
3579 #[gpui::test]
3580 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3581 use std::fs;
3582
3583 let dir = temp_tree(json!({
3584 "file1": "abc",
3585 "file2": "def",
3586 "file3": "ghi",
3587 }));
3588
3589 let project = Project::test(Arc::new(RealFs), &mut cx);
3590 let (worktree, _) = project
3591 .update(&mut cx, |p, cx| {
3592 p.find_or_create_local_worktree(dir.path(), false, cx)
3593 })
3594 .await
3595 .unwrap();
3596 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3597
3598 worktree.flush_fs_events(&cx).await;
3599 worktree
3600 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3601 .await;
3602
3603 let buffer1 = project
3604 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3605 .await
3606 .unwrap();
3607 let events = Rc::new(RefCell::new(Vec::new()));
3608
3609 // initially, the buffer isn't dirty.
3610 buffer1.update(&mut cx, |buffer, cx| {
3611 cx.subscribe(&buffer1, {
3612 let events = events.clone();
3613 move |_, _, event, _| events.borrow_mut().push(event.clone())
3614 })
3615 .detach();
3616
3617 assert!(!buffer.is_dirty());
3618 assert!(events.borrow().is_empty());
3619
3620 buffer.edit(vec![1..2], "", cx);
3621 });
3622
3623 // after the first edit, the buffer is dirty, and emits a dirtied event.
3624 buffer1.update(&mut cx, |buffer, cx| {
3625 assert!(buffer.text() == "ac");
3626 assert!(buffer.is_dirty());
3627 assert_eq!(
3628 *events.borrow(),
3629 &[language::Event::Edited, language::Event::Dirtied]
3630 );
3631 events.borrow_mut().clear();
3632 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3633 });
3634
3635 // after saving, the buffer is not dirty, and emits a saved event.
3636 buffer1.update(&mut cx, |buffer, cx| {
3637 assert!(!buffer.is_dirty());
3638 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3639 events.borrow_mut().clear();
3640
3641 buffer.edit(vec![1..1], "B", cx);
3642 buffer.edit(vec![2..2], "D", cx);
3643 });
3644
3645 // after editing again, the buffer is dirty, and emits another dirty event.
3646 buffer1.update(&mut cx, |buffer, cx| {
3647 assert!(buffer.text() == "aBDc");
3648 assert!(buffer.is_dirty());
3649 assert_eq!(
3650 *events.borrow(),
3651 &[
3652 language::Event::Edited,
3653 language::Event::Dirtied,
3654 language::Event::Edited,
3655 ],
3656 );
3657 events.borrow_mut().clear();
3658
3659 // TODO - currently, after restoring the buffer to its
3660 // previously-saved state, the is still considered dirty.
3661 buffer.edit([1..3], "", cx);
3662 assert!(buffer.text() == "ac");
3663 assert!(buffer.is_dirty());
3664 });
3665
3666 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3667
3668 // When a file is deleted, the buffer is considered dirty.
3669 let events = Rc::new(RefCell::new(Vec::new()));
3670 let buffer2 = project
3671 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3672 .await
3673 .unwrap();
3674 buffer2.update(&mut cx, |_, cx| {
3675 cx.subscribe(&buffer2, {
3676 let events = events.clone();
3677 move |_, _, event, _| events.borrow_mut().push(event.clone())
3678 })
3679 .detach();
3680 });
3681
3682 fs::remove_file(dir.path().join("file2")).unwrap();
3683 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3684 assert_eq!(
3685 *events.borrow(),
3686 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3687 );
3688
3689 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3690 let events = Rc::new(RefCell::new(Vec::new()));
3691 let buffer3 = project
3692 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3693 .await
3694 .unwrap();
3695 buffer3.update(&mut cx, |_, cx| {
3696 cx.subscribe(&buffer3, {
3697 let events = events.clone();
3698 move |_, _, event, _| events.borrow_mut().push(event.clone())
3699 })
3700 .detach();
3701 });
3702
3703 worktree.flush_fs_events(&cx).await;
3704 buffer3.update(&mut cx, |buffer, cx| {
3705 buffer.edit(Some(0..0), "x", cx);
3706 });
3707 events.borrow_mut().clear();
3708 fs::remove_file(dir.path().join("file3")).unwrap();
3709 buffer3
3710 .condition(&cx, |_, _| !events.borrow().is_empty())
3711 .await;
3712 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3713 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3714 }
3715
3716 #[gpui::test]
3717 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3718 use std::fs;
3719
3720 let initial_contents = "aaa\nbbbbb\nc\n";
3721 let dir = temp_tree(json!({ "the-file": initial_contents }));
3722
3723 let project = Project::test(Arc::new(RealFs), &mut cx);
3724 let (worktree, _) = project
3725 .update(&mut cx, |p, cx| {
3726 p.find_or_create_local_worktree(dir.path(), false, cx)
3727 })
3728 .await
3729 .unwrap();
3730 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3731
3732 worktree
3733 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3734 .await;
3735
3736 let abs_path = dir.path().join("the-file");
3737 let buffer = project
3738 .update(&mut cx, |p, cx| {
3739 p.open_buffer((worktree_id, "the-file"), cx)
3740 })
3741 .await
3742 .unwrap();
3743
3744 // TODO
3745 // Add a cursor on each row.
3746 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3747 // assert!(!buffer.is_dirty());
3748 // buffer.add_selection_set(
3749 // &(0..3)
3750 // .map(|row| Selection {
3751 // id: row as usize,
3752 // start: Point::new(row, 1),
3753 // end: Point::new(row, 1),
3754 // reversed: false,
3755 // goal: SelectionGoal::None,
3756 // })
3757 // .collect::<Vec<_>>(),
3758 // cx,
3759 // )
3760 // });
3761
3762 // Change the file on disk, adding two new lines of text, and removing
3763 // one line.
3764 buffer.read_with(&cx, |buffer, _| {
3765 assert!(!buffer.is_dirty());
3766 assert!(!buffer.has_conflict());
3767 });
3768 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3769 fs::write(&abs_path, new_contents).unwrap();
3770
3771 // Because the buffer was not modified, it is reloaded from disk. Its
3772 // contents are edited according to the diff between the old and new
3773 // file contents.
3774 buffer
3775 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3776 .await;
3777
3778 buffer.update(&mut cx, |buffer, _| {
3779 assert_eq!(buffer.text(), new_contents);
3780 assert!(!buffer.is_dirty());
3781 assert!(!buffer.has_conflict());
3782
3783 // TODO
3784 // let cursor_positions = buffer
3785 // .selection_set(selection_set_id)
3786 // .unwrap()
3787 // .selections::<Point>(&*buffer)
3788 // .map(|selection| {
3789 // assert_eq!(selection.start, selection.end);
3790 // selection.start
3791 // })
3792 // .collect::<Vec<_>>();
3793 // assert_eq!(
3794 // cursor_positions,
3795 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3796 // );
3797 });
3798
3799 // Modify the buffer
3800 buffer.update(&mut cx, |buffer, cx| {
3801 buffer.edit(vec![0..0], " ", cx);
3802 assert!(buffer.is_dirty());
3803 assert!(!buffer.has_conflict());
3804 });
3805
3806 // Change the file on disk again, adding blank lines to the beginning.
3807 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3808
3809 // Because the buffer is modified, it doesn't reload from disk, but is
3810 // marked as having a conflict.
3811 buffer
3812 .condition(&cx, |buffer, _| buffer.has_conflict())
3813 .await;
3814 }
3815
3816 #[gpui::test]
3817 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3818 let fs = Arc::new(FakeFs::new(cx.background()));
3819 fs.insert_tree(
3820 "/the-dir",
3821 json!({
3822 "a.rs": "
3823 fn foo(mut v: Vec<usize>) {
3824 for x in &v {
3825 v.push(1);
3826 }
3827 }
3828 "
3829 .unindent(),
3830 }),
3831 )
3832 .await;
3833
3834 let project = Project::test(fs.clone(), &mut cx);
3835 let (worktree, _) = project
3836 .update(&mut cx, |p, cx| {
3837 p.find_or_create_local_worktree("/the-dir", false, cx)
3838 })
3839 .await
3840 .unwrap();
3841 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3842
3843 let buffer = project
3844 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3845 .await
3846 .unwrap();
3847
3848 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3849 let message = lsp::PublishDiagnosticsParams {
3850 uri: buffer_uri.clone(),
3851 diagnostics: vec![
3852 lsp::Diagnostic {
3853 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3854 severity: Some(DiagnosticSeverity::WARNING),
3855 message: "error 1".to_string(),
3856 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3857 location: lsp::Location {
3858 uri: buffer_uri.clone(),
3859 range: lsp::Range::new(
3860 lsp::Position::new(1, 8),
3861 lsp::Position::new(1, 9),
3862 ),
3863 },
3864 message: "error 1 hint 1".to_string(),
3865 }]),
3866 ..Default::default()
3867 },
3868 lsp::Diagnostic {
3869 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3870 severity: Some(DiagnosticSeverity::HINT),
3871 message: "error 1 hint 1".to_string(),
3872 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3873 location: lsp::Location {
3874 uri: buffer_uri.clone(),
3875 range: lsp::Range::new(
3876 lsp::Position::new(1, 8),
3877 lsp::Position::new(1, 9),
3878 ),
3879 },
3880 message: "original diagnostic".to_string(),
3881 }]),
3882 ..Default::default()
3883 },
3884 lsp::Diagnostic {
3885 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3886 severity: Some(DiagnosticSeverity::ERROR),
3887 message: "error 2".to_string(),
3888 related_information: Some(vec![
3889 lsp::DiagnosticRelatedInformation {
3890 location: lsp::Location {
3891 uri: buffer_uri.clone(),
3892 range: lsp::Range::new(
3893 lsp::Position::new(1, 13),
3894 lsp::Position::new(1, 15),
3895 ),
3896 },
3897 message: "error 2 hint 1".to_string(),
3898 },
3899 lsp::DiagnosticRelatedInformation {
3900 location: lsp::Location {
3901 uri: buffer_uri.clone(),
3902 range: lsp::Range::new(
3903 lsp::Position::new(1, 13),
3904 lsp::Position::new(1, 15),
3905 ),
3906 },
3907 message: "error 2 hint 2".to_string(),
3908 },
3909 ]),
3910 ..Default::default()
3911 },
3912 lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3914 severity: Some(DiagnosticSeverity::HINT),
3915 message: "error 2 hint 1".to_string(),
3916 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3917 location: lsp::Location {
3918 uri: buffer_uri.clone(),
3919 range: lsp::Range::new(
3920 lsp::Position::new(2, 8),
3921 lsp::Position::new(2, 17),
3922 ),
3923 },
3924 message: "original diagnostic".to_string(),
3925 }]),
3926 ..Default::default()
3927 },
3928 lsp::Diagnostic {
3929 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3930 severity: Some(DiagnosticSeverity::HINT),
3931 message: "error 2 hint 2".to_string(),
3932 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3933 location: lsp::Location {
3934 uri: buffer_uri.clone(),
3935 range: lsp::Range::new(
3936 lsp::Position::new(2, 8),
3937 lsp::Position::new(2, 17),
3938 ),
3939 },
3940 message: "original diagnostic".to_string(),
3941 }]),
3942 ..Default::default()
3943 },
3944 ],
3945 version: None,
3946 };
3947
3948 project
3949 .update(&mut cx, |p, cx| {
3950 p.update_diagnostics(message, &Default::default(), cx)
3951 })
3952 .unwrap();
3953 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3954
3955 assert_eq!(
3956 buffer
3957 .diagnostics_in_range::<_, Point>(0..buffer.len())
3958 .collect::<Vec<_>>(),
3959 &[
3960 DiagnosticEntry {
3961 range: Point::new(1, 8)..Point::new(1, 9),
3962 diagnostic: Diagnostic {
3963 severity: DiagnosticSeverity::WARNING,
3964 message: "error 1".to_string(),
3965 group_id: 0,
3966 is_primary: true,
3967 ..Default::default()
3968 }
3969 },
3970 DiagnosticEntry {
3971 range: Point::new(1, 8)..Point::new(1, 9),
3972 diagnostic: Diagnostic {
3973 severity: DiagnosticSeverity::HINT,
3974 message: "error 1 hint 1".to_string(),
3975 group_id: 0,
3976 is_primary: false,
3977 ..Default::default()
3978 }
3979 },
3980 DiagnosticEntry {
3981 range: Point::new(1, 13)..Point::new(1, 15),
3982 diagnostic: Diagnostic {
3983 severity: DiagnosticSeverity::HINT,
3984 message: "error 2 hint 1".to_string(),
3985 group_id: 1,
3986 is_primary: false,
3987 ..Default::default()
3988 }
3989 },
3990 DiagnosticEntry {
3991 range: Point::new(1, 13)..Point::new(1, 15),
3992 diagnostic: Diagnostic {
3993 severity: DiagnosticSeverity::HINT,
3994 message: "error 2 hint 2".to_string(),
3995 group_id: 1,
3996 is_primary: false,
3997 ..Default::default()
3998 }
3999 },
4000 DiagnosticEntry {
4001 range: Point::new(2, 8)..Point::new(2, 17),
4002 diagnostic: Diagnostic {
4003 severity: DiagnosticSeverity::ERROR,
4004 message: "error 2".to_string(),
4005 group_id: 1,
4006 is_primary: true,
4007 ..Default::default()
4008 }
4009 }
4010 ]
4011 );
4012
4013 assert_eq!(
4014 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4015 &[
4016 DiagnosticEntry {
4017 range: Point::new(1, 8)..Point::new(1, 9),
4018 diagnostic: Diagnostic {
4019 severity: DiagnosticSeverity::WARNING,
4020 message: "error 1".to_string(),
4021 group_id: 0,
4022 is_primary: true,
4023 ..Default::default()
4024 }
4025 },
4026 DiagnosticEntry {
4027 range: Point::new(1, 8)..Point::new(1, 9),
4028 diagnostic: Diagnostic {
4029 severity: DiagnosticSeverity::HINT,
4030 message: "error 1 hint 1".to_string(),
4031 group_id: 0,
4032 is_primary: false,
4033 ..Default::default()
4034 }
4035 },
4036 ]
4037 );
4038 assert_eq!(
4039 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4040 &[
4041 DiagnosticEntry {
4042 range: Point::new(1, 13)..Point::new(1, 15),
4043 diagnostic: Diagnostic {
4044 severity: DiagnosticSeverity::HINT,
4045 message: "error 2 hint 1".to_string(),
4046 group_id: 1,
4047 is_primary: false,
4048 ..Default::default()
4049 }
4050 },
4051 DiagnosticEntry {
4052 range: Point::new(1, 13)..Point::new(1, 15),
4053 diagnostic: Diagnostic {
4054 severity: DiagnosticSeverity::HINT,
4055 message: "error 2 hint 2".to_string(),
4056 group_id: 1,
4057 is_primary: false,
4058 ..Default::default()
4059 }
4060 },
4061 DiagnosticEntry {
4062 range: Point::new(2, 8)..Point::new(2, 17),
4063 diagnostic: Diagnostic {
4064 severity: DiagnosticSeverity::ERROR,
4065 message: "error 2".to_string(),
4066 group_id: 1,
4067 is_primary: true,
4068 ..Default::default()
4069 }
4070 }
4071 ]
4072 );
4073 }
4074}