1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::Future;
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 point_from_lsp,
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, LanguageServer};
24use lsp_command::*;
25use postage::{broadcast, prelude::Stream, sink::Sink, watch};
26use smol::block_on;
27use std::{
28 convert::TryInto,
29 ops::Range,
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicBool, Arc},
32 time::Instant,
33};
34use util::{post_inc, ResultExt, TryFutureExt as _};
35
36pub use fs::*;
37pub use worktree::*;
38
39pub struct Project {
40 worktrees: Vec<WorktreeHandle>,
41 active_entry: Option<ProjectEntry>,
42 languages: Arc<LanguageRegistry>,
43 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_get_definition);
187 client.add_entity_request_handler(Self::handle_open_buffer);
188 client.add_entity_request_handler(Self::handle_save_buffer);
189 }
190
191 pub fn local(
192 client: Arc<Client>,
193 user_store: ModelHandle<UserStore>,
194 languages: Arc<LanguageRegistry>,
195 fs: Arc<dyn Fs>,
196 cx: &mut MutableAppContext,
197 ) -> ModelHandle<Self> {
198 cx.add_model(|cx: &mut ModelContext<Self>| {
199 let (remote_id_tx, remote_id_rx) = watch::channel();
200 let _maintain_remote_id_task = cx.spawn_weak({
201 let rpc = client.clone();
202 move |this, mut cx| {
203 async move {
204 let mut status = rpc.status();
205 while let Some(status) = status.recv().await {
206 if let Some(this) = this.upgrade(&cx) {
207 let remote_id = if let client::Status::Connected { .. } = status {
208 let response = rpc.request(proto::RegisterProject {}).await?;
209 Some(response.project_id)
210 } else {
211 None
212 };
213
214 if let Some(project_id) = remote_id {
215 let mut registrations = Vec::new();
216 this.update(&mut cx, |this, cx| {
217 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
218 registrations.push(worktree.update(
219 cx,
220 |worktree, cx| {
221 let worktree = worktree.as_local_mut().unwrap();
222 worktree.register(project_id, cx)
223 },
224 ));
225 }
226 });
227 for registration in registrations {
228 registration.await?;
229 }
230 }
231 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
232 }
233 }
234 Ok(())
235 }
236 .log_err()
237 }
238 });
239
240 Self {
241 worktrees: Default::default(),
242 collaborators: Default::default(),
243 open_buffers: Default::default(),
244 loading_buffers: Default::default(),
245 shared_buffers: Default::default(),
246 client_state: ProjectClientState::Local {
247 is_shared: false,
248 remote_id_tx,
249 remote_id_rx,
250 _maintain_remote_id_task,
251 },
252 opened_buffer: broadcast::channel(1).0,
253 subscriptions: Vec::new(),
254 active_entry: None,
255 languages,
256 client,
257 user_store,
258 fs,
259 language_servers_with_diagnostics_running: 0,
260 language_servers: Default::default(),
261 }
262 })
263 }
264
265 pub async fn remote(
266 remote_id: u64,
267 client: Arc<Client>,
268 user_store: ModelHandle<UserStore>,
269 languages: Arc<LanguageRegistry>,
270 fs: Arc<dyn Fs>,
271 cx: &mut AsyncAppContext,
272 ) -> Result<ModelHandle<Self>> {
273 client.authenticate_and_connect(&cx).await?;
274
275 let response = client
276 .request(proto::JoinProject {
277 project_id: remote_id,
278 })
279 .await?;
280
281 let replica_id = response.replica_id as ReplicaId;
282
283 let mut worktrees = Vec::new();
284 for worktree in response.worktrees {
285 let (worktree, load_task) = cx
286 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
287 worktrees.push(worktree);
288 load_task.detach();
289 }
290
291 let this = cx.add_model(|cx| {
292 let mut this = Self {
293 worktrees: Vec::new(),
294 open_buffers: Default::default(),
295 loading_buffers: Default::default(),
296 opened_buffer: broadcast::channel(1).0,
297 shared_buffers: Default::default(),
298 active_entry: None,
299 collaborators: Default::default(),
300 languages,
301 user_store: user_store.clone(),
302 fs,
303 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
304 client,
305 client_state: ProjectClientState::Remote {
306 sharing_has_stopped: false,
307 remote_id,
308 replica_id,
309 },
310 language_servers_with_diagnostics_running: 0,
311 language_servers: Default::default(),
312 };
313 for worktree in worktrees {
314 this.add_worktree(&worktree, cx);
315 }
316 this
317 });
318
319 let user_ids = response
320 .collaborators
321 .iter()
322 .map(|peer| peer.user_id)
323 .collect();
324 user_store
325 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
326 .await?;
327 let mut collaborators = HashMap::default();
328 for message in response.collaborators {
329 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
330 collaborators.insert(collaborator.peer_id, collaborator);
331 }
332
333 this.update(cx, |this, _| {
334 this.collaborators = collaborators;
335 });
336
337 Ok(this)
338 }
339
340 #[cfg(any(test, feature = "test-support"))]
341 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
342 let languages = Arc::new(LanguageRegistry::new());
343 let http_client = client::test::FakeHttpClient::with_404_response();
344 let client = client::Client::new(http_client.clone());
345 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
346 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
347 }
348
349 #[cfg(any(test, feature = "test-support"))]
350 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
351 self.shared_buffers
352 .get(&peer_id)
353 .and_then(|buffers| buffers.get(&remote_id))
354 .cloned()
355 }
356
357 #[cfg(any(test, feature = "test-support"))]
358 pub fn has_buffered_operations(&self) -> bool {
359 self.open_buffers
360 .values()
361 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
362 }
363
364 pub fn fs(&self) -> &Arc<dyn Fs> {
365 &self.fs
366 }
367
368 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
369 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
370 *remote_id_tx.borrow_mut() = remote_id;
371 }
372
373 self.subscriptions.clear();
374 if let Some(remote_id) = remote_id {
375 self.subscriptions
376 .push(self.client.add_model_for_remote_entity(remote_id, cx));
377 }
378 }
379
380 pub fn remote_id(&self) -> Option<u64> {
381 match &self.client_state {
382 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
383 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
384 }
385 }
386
387 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
388 let mut id = None;
389 let mut watch = None;
390 match &self.client_state {
391 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
392 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
393 }
394
395 async move {
396 if let Some(id) = id {
397 return id;
398 }
399 let mut watch = watch.unwrap();
400 loop {
401 let id = *watch.borrow();
402 if let Some(id) = id {
403 return id;
404 }
405 watch.recv().await;
406 }
407 }
408 }
409
410 pub fn replica_id(&self) -> ReplicaId {
411 match &self.client_state {
412 ProjectClientState::Local { .. } => 0,
413 ProjectClientState::Remote { replica_id, .. } => *replica_id,
414 }
415 }
416
417 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
418 &self.collaborators
419 }
420
421 pub fn worktrees<'a>(
422 &'a self,
423 cx: &'a AppContext,
424 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
425 self.worktrees
426 .iter()
427 .filter_map(move |worktree| worktree.upgrade(cx))
428 }
429
430 pub fn worktree_for_id(
431 &self,
432 id: WorktreeId,
433 cx: &AppContext,
434 ) -> Option<ModelHandle<Worktree>> {
435 self.worktrees(cx)
436 .find(|worktree| worktree.read(cx).id() == id)
437 }
438
439 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
440 let rpc = self.client.clone();
441 cx.spawn(|this, mut cx| async move {
442 let project_id = this.update(&mut cx, |this, _| {
443 if let ProjectClientState::Local {
444 is_shared,
445 remote_id_rx,
446 ..
447 } = &mut this.client_state
448 {
449 *is_shared = true;
450 remote_id_rx
451 .borrow()
452 .ok_or_else(|| anyhow!("no project id"))
453 } else {
454 Err(anyhow!("can't share a remote project"))
455 }
456 })?;
457
458 rpc.request(proto::ShareProject { project_id }).await?;
459 let mut tasks = Vec::new();
460 this.update(&mut cx, |this, cx| {
461 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
462 worktree.update(cx, |worktree, cx| {
463 let worktree = worktree.as_local_mut().unwrap();
464 tasks.push(worktree.share(project_id, cx));
465 });
466 }
467 });
468 for task in tasks {
469 task.await?;
470 }
471 this.update(&mut cx, |_, cx| cx.notify());
472 Ok(())
473 })
474 }
475
476 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
477 let rpc = self.client.clone();
478 cx.spawn(|this, mut cx| async move {
479 let project_id = this.update(&mut cx, |this, _| {
480 if let ProjectClientState::Local {
481 is_shared,
482 remote_id_rx,
483 ..
484 } = &mut this.client_state
485 {
486 *is_shared = false;
487 remote_id_rx
488 .borrow()
489 .ok_or_else(|| anyhow!("no project id"))
490 } else {
491 Err(anyhow!("can't share a remote project"))
492 }
493 })?;
494
495 rpc.send(proto::UnshareProject { project_id })?;
496 this.update(&mut cx, |this, cx| {
497 this.collaborators.clear();
498 this.shared_buffers.clear();
499 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
500 worktree.update(cx, |worktree, _| {
501 worktree.as_local_mut().unwrap().unshare();
502 });
503 }
504 cx.notify()
505 });
506 Ok(())
507 })
508 }
509
510 pub fn is_read_only(&self) -> bool {
511 match &self.client_state {
512 ProjectClientState::Local { .. } => false,
513 ProjectClientState::Remote {
514 sharing_has_stopped,
515 ..
516 } => *sharing_has_stopped,
517 }
518 }
519
520 pub fn is_local(&self) -> bool {
521 match &self.client_state {
522 ProjectClientState::Local { .. } => true,
523 ProjectClientState::Remote { .. } => false,
524 }
525 }
526
527 pub fn is_remote(&self) -> bool {
528 !self.is_local()
529 }
530
531 pub fn open_buffer(
532 &mut self,
533 path: impl Into<ProjectPath>,
534 cx: &mut ModelContext<Self>,
535 ) -> Task<Result<ModelHandle<Buffer>>> {
536 let project_path = path.into();
537 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
538 worktree
539 } else {
540 return Task::ready(Err(anyhow!("no such worktree")));
541 };
542
543 // If there is already a buffer for the given path, then return it.
544 let existing_buffer = self.get_open_buffer(&project_path, cx);
545 if let Some(existing_buffer) = existing_buffer {
546 return Task::ready(Ok(existing_buffer));
547 }
548
549 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
550 // If the given path is already being loaded, then wait for that existing
551 // task to complete and return the same buffer.
552 hash_map::Entry::Occupied(e) => e.get().clone(),
553
554 // Otherwise, record the fact that this path is now being loaded.
555 hash_map::Entry::Vacant(entry) => {
556 let (mut tx, rx) = postage::watch::channel();
557 entry.insert(rx.clone());
558
559 let load_buffer = if worktree.read(cx).is_local() {
560 self.open_local_buffer(&project_path.path, &worktree, cx)
561 } else {
562 self.open_remote_buffer(&project_path.path, &worktree, cx)
563 };
564
565 cx.spawn(move |this, mut cx| async move {
566 let load_result = load_buffer.await;
567 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
568 // Record the fact that the buffer is no longer loading.
569 this.loading_buffers.remove(&project_path);
570 if this.loading_buffers.is_empty() {
571 this.open_buffers
572 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
573 }
574
575 let buffer = load_result.map_err(Arc::new)?;
576 Ok(buffer)
577 }));
578 })
579 .detach();
580 rx
581 }
582 };
583
584 cx.foreground().spawn(async move {
585 loop {
586 if let Some(result) = loading_watch.borrow().as_ref() {
587 match result {
588 Ok(buffer) => return Ok(buffer.clone()),
589 Err(error) => return Err(anyhow!("{}", error)),
590 }
591 }
592 loading_watch.recv().await;
593 }
594 })
595 }
596
597 fn open_local_buffer(
598 &mut self,
599 path: &Arc<Path>,
600 worktree: &ModelHandle<Worktree>,
601 cx: &mut ModelContext<Self>,
602 ) -> Task<Result<ModelHandle<Buffer>>> {
603 let load_buffer = worktree.update(cx, |worktree, cx| {
604 let worktree = worktree.as_local_mut().unwrap();
605 worktree.load_buffer(path, cx)
606 });
607 let worktree = worktree.downgrade();
608 cx.spawn(|this, mut cx| async move {
609 let buffer = load_buffer.await?;
610 let worktree = worktree
611 .upgrade(&cx)
612 .ok_or_else(|| anyhow!("worktree was removed"))?;
613 this.update(&mut cx, |this, cx| {
614 this.register_buffer(&buffer, Some(&worktree), cx)
615 })?;
616 Ok(buffer)
617 })
618 }
619
620 fn open_remote_buffer(
621 &mut self,
622 path: &Arc<Path>,
623 worktree: &ModelHandle<Worktree>,
624 cx: &mut ModelContext<Self>,
625 ) -> Task<Result<ModelHandle<Buffer>>> {
626 let rpc = self.client.clone();
627 let project_id = self.remote_id().unwrap();
628 let remote_worktree_id = worktree.read(cx).id();
629 let path = path.clone();
630 let path_string = path.to_string_lossy().to_string();
631 cx.spawn(|this, mut cx| async move {
632 let response = rpc
633 .request(proto::OpenBuffer {
634 project_id,
635 worktree_id: remote_worktree_id.to_proto(),
636 path: path_string,
637 })
638 .await?;
639 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
640 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
641 .await
642 })
643 }
644
645 fn open_local_buffer_from_lsp_path(
646 &mut self,
647 abs_path: lsp::Url,
648 lang_name: String,
649 lang_server: Arc<LanguageServer>,
650 cx: &mut ModelContext<Self>,
651 ) -> Task<Result<ModelHandle<Buffer>>> {
652 cx.spawn(|this, mut cx| async move {
653 let abs_path = abs_path
654 .to_file_path()
655 .map_err(|_| anyhow!("can't convert URI to path"))?;
656 let (worktree, relative_path) = if let Some(result) =
657 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
658 {
659 result
660 } else {
661 let worktree = this
662 .update(&mut cx, |this, cx| {
663 this.create_local_worktree(&abs_path, true, cx)
664 })
665 .await?;
666 this.update(&mut cx, |this, cx| {
667 this.language_servers
668 .insert((worktree.read(cx).id(), lang_name), lang_server);
669 });
670 (worktree, PathBuf::new())
671 };
672
673 let project_path = ProjectPath {
674 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
675 path: relative_path.into(),
676 };
677 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
678 .await
679 })
680 }
681
682 pub fn save_buffer_as(
683 &self,
684 buffer: ModelHandle<Buffer>,
685 abs_path: PathBuf,
686 cx: &mut ModelContext<Project>,
687 ) -> Task<Result<()>> {
688 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
689 cx.spawn(|this, mut cx| async move {
690 let (worktree, path) = worktree_task.await?;
691 worktree
692 .update(&mut cx, |worktree, cx| {
693 worktree
694 .as_local_mut()
695 .unwrap()
696 .save_buffer_as(buffer.clone(), path, cx)
697 })
698 .await?;
699 this.update(&mut cx, |this, cx| {
700 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
701 });
702 Ok(())
703 })
704 }
705
706 #[cfg(any(test, feature = "test-support"))]
707 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
708 let path = path.into();
709 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
710 self.open_buffers.iter().any(|(_, buffer)| {
711 if let Some(buffer) = buffer.upgrade(cx) {
712 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
713 if file.worktree == worktree && file.path() == &path.path {
714 return true;
715 }
716 }
717 }
718 false
719 })
720 } else {
721 false
722 }
723 }
724
725 fn get_open_buffer(
726 &mut self,
727 path: &ProjectPath,
728 cx: &mut ModelContext<Self>,
729 ) -> Option<ModelHandle<Buffer>> {
730 let mut result = None;
731 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
732 self.open_buffers.retain(|_, buffer| {
733 if let Some(buffer) = buffer.upgrade(cx) {
734 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
735 if file.worktree == worktree && file.path() == &path.path {
736 result = Some(buffer);
737 }
738 }
739 true
740 } else {
741 false
742 }
743 });
744 result
745 }
746
747 fn register_buffer(
748 &mut self,
749 buffer: &ModelHandle<Buffer>,
750 worktree: Option<&ModelHandle<Worktree>>,
751 cx: &mut ModelContext<Self>,
752 ) -> Result<()> {
753 match self.open_buffers.insert(
754 buffer.read(cx).remote_id(),
755 OpenBuffer::Loaded(buffer.downgrade()),
756 ) {
757 None => {}
758 Some(OpenBuffer::Loading(operations)) => {
759 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
760 }
761 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
762 }
763 self.assign_language_to_buffer(&buffer, worktree, cx);
764 Ok(())
765 }
766
767 fn assign_language_to_buffer(
768 &mut self,
769 buffer: &ModelHandle<Buffer>,
770 worktree: Option<&ModelHandle<Worktree>>,
771 cx: &mut ModelContext<Self>,
772 ) -> Option<()> {
773 let (path, full_path) = {
774 let file = buffer.read(cx).file()?;
775 (file.path().clone(), file.full_path(cx))
776 };
777
778 // If the buffer has a language, set it and start/assign the language server
779 if let Some(language) = self.languages.select_language(&full_path) {
780 buffer.update(cx, |buffer, cx| {
781 buffer.set_language(Some(language.clone()), cx);
782 });
783
784 // For local worktrees, start a language server if needed.
785 // Also assign the language server and any previously stored diagnostics to the buffer.
786 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
787 let worktree_id = local_worktree.id();
788 let worktree_abs_path = local_worktree.abs_path().clone();
789
790 let language_server = match self
791 .language_servers
792 .entry((worktree_id, language.name().to_string()))
793 {
794 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
795 hash_map::Entry::Vacant(e) => Self::start_language_server(
796 self.client.clone(),
797 language.clone(),
798 &worktree_abs_path,
799 cx,
800 )
801 .map(|server| e.insert(server).clone()),
802 };
803
804 buffer.update(cx, |buffer, cx| {
805 buffer.set_language_server(language_server, cx);
806 });
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 rpc: Arc<Client>,
823 language: Arc<Language>,
824 worktree_path: &Path,
825 cx: &mut ModelContext<Self>,
826 ) -> Option<Arc<LanguageServer>> {
827 enum LspEvent {
828 DiagnosticsStart,
829 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
830 DiagnosticsFinish,
831 }
832
833 let language_server = language
834 .start_server(worktree_path, cx)
835 .log_err()
836 .flatten()?;
837 let disk_based_sources = language
838 .disk_based_diagnostic_sources()
839 .cloned()
840 .unwrap_or_default();
841 let disk_based_diagnostics_progress_token =
842 language.disk_based_diagnostics_progress_token().cloned();
843 let has_disk_based_diagnostic_progress_token =
844 disk_based_diagnostics_progress_token.is_some();
845 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
846
847 // Listen for `PublishDiagnostics` notifications.
848 language_server
849 .on_notification::<lsp::notification::PublishDiagnostics, _>({
850 let diagnostics_tx = diagnostics_tx.clone();
851 move |params| {
852 if !has_disk_based_diagnostic_progress_token {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
854 }
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
856 if !has_disk_based_diagnostic_progress_token {
857 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
858 }
859 }
860 })
861 .detach();
862
863 // Listen for `Progress` notifications. Send an event when the language server
864 // transitions between running jobs and not running any jobs.
865 let mut running_jobs_for_this_server: i32 = 0;
866 language_server
867 .on_notification::<lsp::notification::Progress, _>(move |params| {
868 let token = match params.token {
869 lsp::NumberOrString::Number(_) => None,
870 lsp::NumberOrString::String(token) => Some(token),
871 };
872
873 if token == disk_based_diagnostics_progress_token {
874 match params.value {
875 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
876 lsp::WorkDoneProgress::Begin(_) => {
877 running_jobs_for_this_server += 1;
878 if running_jobs_for_this_server == 1 {
879 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
880 }
881 }
882 lsp::WorkDoneProgress::End(_) => {
883 running_jobs_for_this_server -= 1;
884 if running_jobs_for_this_server == 0 {
885 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
886 }
887 }
888 _ => {}
889 },
890 }
891 }
892 })
893 .detach();
894
895 // Process all the LSP events.
896 cx.spawn_weak(|this, mut cx| async move {
897 while let Ok(message) = diagnostics_rx.recv().await {
898 let this = this.upgrade(&cx)?;
899 match message {
900 LspEvent::DiagnosticsStart => {
901 this.update(&mut cx, |this, cx| {
902 this.disk_based_diagnostics_started(cx);
903 if let Some(project_id) = this.remote_id() {
904 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
905 .log_err();
906 }
907 });
908 }
909 LspEvent::DiagnosticsUpdate(mut params) => {
910 language.process_diagnostics(&mut params);
911 this.update(&mut cx, |this, cx| {
912 this.update_diagnostics(params, &disk_based_sources, cx)
913 .log_err();
914 });
915 }
916 LspEvent::DiagnosticsFinish => {
917 this.update(&mut cx, |this, cx| {
918 this.disk_based_diagnostics_finished(cx);
919 if let Some(project_id) = this.remote_id() {
920 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
921 .log_err();
922 }
923 });
924 }
925 }
926 }
927 Some(())
928 })
929 .detach();
930
931 Some(language_server)
932 }
933
934 pub fn update_diagnostics(
935 &mut self,
936 params: lsp::PublishDiagnosticsParams,
937 disk_based_sources: &HashSet<String>,
938 cx: &mut ModelContext<Self>,
939 ) -> Result<()> {
940 let abs_path = params
941 .uri
942 .to_file_path()
943 .map_err(|_| anyhow!("URI is not a file"))?;
944 let mut next_group_id = 0;
945 let mut diagnostics = Vec::default();
946 let mut primary_diagnostic_group_ids = HashMap::default();
947 let mut sources_by_group_id = HashMap::default();
948 let mut supporting_diagnostic_severities = HashMap::default();
949 for diagnostic in ¶ms.diagnostics {
950 let source = diagnostic.source.as_ref();
951 let code = diagnostic.code.as_ref().map(|code| match code {
952 lsp::NumberOrString::Number(code) => code.to_string(),
953 lsp::NumberOrString::String(code) => code.clone(),
954 });
955 let range = range_from_lsp(diagnostic.range);
956 let is_supporting = diagnostic
957 .related_information
958 .as_ref()
959 .map_or(false, |infos| {
960 infos.iter().any(|info| {
961 primary_diagnostic_group_ids.contains_key(&(
962 source,
963 code.clone(),
964 range_from_lsp(info.location.range),
965 ))
966 })
967 });
968
969 if is_supporting {
970 if let Some(severity) = diagnostic.severity {
971 supporting_diagnostic_severities
972 .insert((source, code.clone(), range), severity);
973 }
974 } else {
975 let group_id = post_inc(&mut next_group_id);
976 let is_disk_based =
977 source.map_or(false, |source| disk_based_sources.contains(source));
978
979 sources_by_group_id.insert(group_id, source);
980 primary_diagnostic_group_ids
981 .insert((source, code.clone(), range.clone()), group_id);
982
983 diagnostics.push(DiagnosticEntry {
984 range,
985 diagnostic: Diagnostic {
986 code: code.clone(),
987 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
988 message: diagnostic.message.clone(),
989 group_id,
990 is_primary: true,
991 is_valid: true,
992 is_disk_based,
993 },
994 });
995 if let Some(infos) = &diagnostic.related_information {
996 for info in infos {
997 if info.location.uri == params.uri && !info.message.is_empty() {
998 let range = range_from_lsp(info.location.range);
999 diagnostics.push(DiagnosticEntry {
1000 range,
1001 diagnostic: Diagnostic {
1002 code: code.clone(),
1003 severity: DiagnosticSeverity::INFORMATION,
1004 message: info.message.clone(),
1005 group_id,
1006 is_primary: false,
1007 is_valid: true,
1008 is_disk_based,
1009 },
1010 });
1011 }
1012 }
1013 }
1014 }
1015 }
1016
1017 for entry in &mut diagnostics {
1018 let diagnostic = &mut entry.diagnostic;
1019 if !diagnostic.is_primary {
1020 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1021 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1022 source,
1023 diagnostic.code.clone(),
1024 entry.range.clone(),
1025 )) {
1026 diagnostic.severity = severity;
1027 }
1028 }
1029 }
1030
1031 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1032 Ok(())
1033 }
1034
1035 pub fn update_diagnostic_entries(
1036 &mut self,
1037 abs_path: PathBuf,
1038 version: Option<i32>,
1039 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1040 cx: &mut ModelContext<Project>,
1041 ) -> Result<(), anyhow::Error> {
1042 let (worktree, relative_path) = self
1043 .find_local_worktree(&abs_path, cx)
1044 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1045 let project_path = ProjectPath {
1046 worktree_id: worktree.read(cx).id(),
1047 path: relative_path.into(),
1048 };
1049
1050 for buffer in self.open_buffers.values() {
1051 if let Some(buffer) = buffer.upgrade(cx) {
1052 if buffer
1053 .read(cx)
1054 .file()
1055 .map_or(false, |file| *file.path() == project_path.path)
1056 {
1057 buffer.update(cx, |buffer, cx| {
1058 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1059 })?;
1060 break;
1061 }
1062 }
1063 }
1064 worktree.update(cx, |worktree, cx| {
1065 worktree
1066 .as_local_mut()
1067 .ok_or_else(|| anyhow!("not a local worktree"))?
1068 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1069 })?;
1070 cx.emit(Event::DiagnosticsUpdated(project_path));
1071 Ok(())
1072 }
1073
1074 pub fn format(
1075 &self,
1076 buffers: HashSet<ModelHandle<Buffer>>,
1077 push_to_history: bool,
1078 cx: &mut ModelContext<Project>,
1079 ) -> Task<Result<ProjectTransaction>> {
1080 let mut local_buffers = Vec::new();
1081 let mut remote_buffers = None;
1082 for buffer_handle in buffers {
1083 let buffer = buffer_handle.read(cx);
1084 let worktree;
1085 if let Some(file) = File::from_dyn(buffer.file()) {
1086 worktree = file.worktree.clone();
1087 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1088 let lang_server;
1089 if let Some(lang) = buffer.language() {
1090 if let Some(server) = self
1091 .language_servers
1092 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1093 {
1094 lang_server = server.clone();
1095 } else {
1096 return Task::ready(Ok(Default::default()));
1097 };
1098 } else {
1099 return Task::ready(Ok(Default::default()));
1100 }
1101
1102 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1103 } else {
1104 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1105 }
1106 } else {
1107 return Task::ready(Ok(Default::default()));
1108 }
1109 }
1110
1111 let remote_buffers = self.remote_id().zip(remote_buffers);
1112 let client = self.client.clone();
1113
1114 cx.spawn(|this, mut cx| async move {
1115 let mut project_transaction = ProjectTransaction::default();
1116
1117 if let Some((project_id, remote_buffers)) = remote_buffers {
1118 let response = client
1119 .request(proto::FormatBuffers {
1120 project_id,
1121 buffer_ids: remote_buffers
1122 .iter()
1123 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1124 .collect(),
1125 })
1126 .await?
1127 .transaction
1128 .ok_or_else(|| anyhow!("missing transaction"))?;
1129 project_transaction = this
1130 .update(&mut cx, |this, cx| {
1131 this.deserialize_project_transaction(response, push_to_history, cx)
1132 })
1133 .await?;
1134 }
1135
1136 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1137 let lsp_edits = lang_server
1138 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1139 text_document: lsp::TextDocumentIdentifier::new(
1140 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1141 ),
1142 options: Default::default(),
1143 work_done_progress_params: Default::default(),
1144 })
1145 .await?;
1146
1147 if let Some(lsp_edits) = lsp_edits {
1148 let edits = buffer
1149 .update(&mut cx, |buffer, cx| {
1150 buffer.edits_from_lsp(lsp_edits, None, cx)
1151 })
1152 .await?;
1153 buffer.update(&mut cx, |buffer, cx| {
1154 buffer.finalize_last_transaction();
1155 buffer.start_transaction();
1156 for (range, text) in edits {
1157 buffer.edit([range], text, cx);
1158 }
1159 if buffer.end_transaction(cx).is_some() {
1160 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1161 if !push_to_history {
1162 buffer.forget_transaction(transaction.id);
1163 }
1164 project_transaction.0.insert(cx.handle(), transaction);
1165 }
1166 });
1167 }
1168 }
1169
1170 Ok(project_transaction)
1171 })
1172 }
1173
1174 pub fn definition<T: ToPointUtf16>(
1175 &self,
1176 source_buffer_handle: &ModelHandle<Buffer>,
1177 position: T,
1178 cx: &mut ModelContext<Self>,
1179 ) -> Task<Result<Vec<Definition>>> {
1180 let source_buffer_handle = source_buffer_handle.clone();
1181 let source_buffer = source_buffer_handle.read(cx);
1182 let worktree;
1183 let buffer_abs_path;
1184 if let Some(file) = File::from_dyn(source_buffer.file()) {
1185 worktree = file.worktree.clone();
1186 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1187 } else {
1188 return Task::ready(Ok(Default::default()));
1189 };
1190
1191 let position = position.to_point_utf16(source_buffer);
1192
1193 if worktree.read(cx).as_local().is_some() {
1194 let buffer_abs_path = buffer_abs_path.unwrap();
1195 let lang_name;
1196 let lang_server;
1197 if let Some(lang) = source_buffer.language() {
1198 lang_name = lang.name().to_string();
1199 if let Some(server) = self
1200 .language_servers
1201 .get(&(worktree.read(cx).id(), lang_name.clone()))
1202 {
1203 lang_server = server.clone();
1204 } else {
1205 return Task::ready(Ok(Default::default()));
1206 };
1207 } else {
1208 return Task::ready(Ok(Default::default()));
1209 }
1210
1211 cx.spawn(|this, mut cx| async move {
1212 let response = lang_server
1213 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1214 text_document_position_params: lsp::TextDocumentPositionParams {
1215 text_document: lsp::TextDocumentIdentifier::new(
1216 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1217 ),
1218 position: lsp::Position::new(position.row, position.column),
1219 },
1220 work_done_progress_params: Default::default(),
1221 partial_result_params: Default::default(),
1222 })
1223 .await?;
1224
1225 let mut definitions = Vec::new();
1226 if let Some(response) = response {
1227 let mut unresolved_locations = Vec::new();
1228 match response {
1229 lsp::GotoDefinitionResponse::Scalar(loc) => {
1230 unresolved_locations.push((loc.uri, loc.range));
1231 }
1232 lsp::GotoDefinitionResponse::Array(locs) => {
1233 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1234 }
1235 lsp::GotoDefinitionResponse::Link(links) => {
1236 unresolved_locations.extend(
1237 links
1238 .into_iter()
1239 .map(|l| (l.target_uri, l.target_selection_range)),
1240 );
1241 }
1242 }
1243
1244 for (target_uri, target_range) in unresolved_locations {
1245 let target_buffer_handle = this
1246 .update(&mut cx, |this, cx| {
1247 this.open_local_buffer_from_lsp_path(
1248 target_uri,
1249 lang_name.clone(),
1250 lang_server.clone(),
1251 cx,
1252 )
1253 })
1254 .await?;
1255
1256 cx.read(|cx| {
1257 let target_buffer = target_buffer_handle.read(cx);
1258 let target_start = target_buffer
1259 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1260 let target_end = target_buffer
1261 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1262 definitions.push(Definition {
1263 target_buffer: target_buffer_handle,
1264 target_range: target_buffer.anchor_after(target_start)
1265 ..target_buffer.anchor_before(target_end),
1266 });
1267 });
1268 }
1269 }
1270
1271 Ok(definitions)
1272 })
1273 } else if let Some(project_id) = self.remote_id() {
1274 let client = self.client.clone();
1275 let request = proto::GetDefinition {
1276 project_id,
1277 buffer_id: source_buffer.remote_id(),
1278 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1279 };
1280 cx.spawn(|this, mut cx| async move {
1281 let response = client.request(request).await?;
1282 let mut definitions = Vec::new();
1283 for definition in response.definitions {
1284 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1285 let target_buffer = this
1286 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1287 .await?;
1288 let target_start = definition
1289 .target_start
1290 .and_then(deserialize_anchor)
1291 .ok_or_else(|| anyhow!("missing target start"))?;
1292 let target_end = definition
1293 .target_end
1294 .and_then(deserialize_anchor)
1295 .ok_or_else(|| anyhow!("missing target end"))?;
1296 definitions.push(Definition {
1297 target_buffer,
1298 target_range: target_start..target_end,
1299 })
1300 }
1301
1302 Ok(definitions)
1303 })
1304 } else {
1305 Task::ready(Ok(Default::default()))
1306 }
1307 }
1308
1309 pub fn completions<T: ToPointUtf16>(
1310 &self,
1311 source_buffer_handle: &ModelHandle<Buffer>,
1312 position: T,
1313 cx: &mut ModelContext<Self>,
1314 ) -> Task<Result<Vec<Completion>>> {
1315 let source_buffer_handle = source_buffer_handle.clone();
1316 let source_buffer = source_buffer_handle.read(cx);
1317 let buffer_id = source_buffer.remote_id();
1318 let language = source_buffer.language().cloned();
1319 let worktree;
1320 let buffer_abs_path;
1321 if let Some(file) = File::from_dyn(source_buffer.file()) {
1322 worktree = file.worktree.clone();
1323 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1324 } else {
1325 return Task::ready(Ok(Default::default()));
1326 };
1327
1328 let position = position.to_point_utf16(source_buffer);
1329 let anchor = source_buffer.anchor_after(position);
1330
1331 if worktree.read(cx).as_local().is_some() {
1332 let buffer_abs_path = buffer_abs_path.unwrap();
1333 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1334 server
1335 } else {
1336 return Task::ready(Ok(Default::default()));
1337 };
1338
1339 cx.spawn(|_, cx| async move {
1340 let completions = lang_server
1341 .request::<lsp::request::Completion>(lsp::CompletionParams {
1342 text_document_position: lsp::TextDocumentPositionParams::new(
1343 lsp::TextDocumentIdentifier::new(
1344 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1345 ),
1346 position.to_lsp_position(),
1347 ),
1348 context: Default::default(),
1349 work_done_progress_params: Default::default(),
1350 partial_result_params: Default::default(),
1351 })
1352 .await
1353 .context("lsp completion request failed")?;
1354
1355 let completions = if let Some(completions) = completions {
1356 match completions {
1357 lsp::CompletionResponse::Array(completions) => completions,
1358 lsp::CompletionResponse::List(list) => list.items,
1359 }
1360 } else {
1361 Default::default()
1362 };
1363
1364 source_buffer_handle.read_with(&cx, |this, _| {
1365 Ok(completions
1366 .into_iter()
1367 .filter_map(|lsp_completion| {
1368 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1369 lsp::CompletionTextEdit::Edit(edit) => {
1370 (range_from_lsp(edit.range), edit.new_text.clone())
1371 }
1372 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1373 log::info!("unsupported insert/replace completion");
1374 return None;
1375 }
1376 };
1377
1378 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1379 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1380 if clipped_start == old_range.start && clipped_end == old_range.end {
1381 Some(Completion {
1382 old_range: this.anchor_before(old_range.start)
1383 ..this.anchor_after(old_range.end),
1384 new_text,
1385 label: language
1386 .as_ref()
1387 .and_then(|l| l.label_for_completion(&lsp_completion))
1388 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1389 lsp_completion,
1390 })
1391 } else {
1392 None
1393 }
1394 })
1395 .collect())
1396 })
1397 })
1398 } else if let Some(project_id) = self.remote_id() {
1399 let rpc = self.client.clone();
1400 let message = proto::GetCompletions {
1401 project_id,
1402 buffer_id,
1403 position: Some(language::proto::serialize_anchor(&anchor)),
1404 version: (&source_buffer.version()).into(),
1405 };
1406 cx.spawn_weak(|_, mut cx| async move {
1407 let response = rpc.request(message).await?;
1408
1409 source_buffer_handle
1410 .update(&mut cx, |buffer, _| {
1411 buffer.wait_for_version(response.version.into())
1412 })
1413 .await;
1414
1415 response
1416 .completions
1417 .into_iter()
1418 .map(|completion| {
1419 language::proto::deserialize_completion(completion, language.as_ref())
1420 })
1421 .collect()
1422 })
1423 } else {
1424 Task::ready(Ok(Default::default()))
1425 }
1426 }
1427
1428 pub fn apply_additional_edits_for_completion(
1429 &self,
1430 buffer_handle: ModelHandle<Buffer>,
1431 completion: Completion,
1432 push_to_history: bool,
1433 cx: &mut ModelContext<Self>,
1434 ) -> Task<Result<Option<Transaction>>> {
1435 let buffer = buffer_handle.read(cx);
1436 let buffer_id = buffer.remote_id();
1437
1438 if self.is_local() {
1439 let lang_server = if let Some(language_server) = buffer.language_server() {
1440 language_server.clone()
1441 } else {
1442 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1443 };
1444
1445 cx.spawn(|_, mut cx| async move {
1446 let resolved_completion = lang_server
1447 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1448 .await?;
1449 if let Some(edits) = resolved_completion.additional_text_edits {
1450 let edits = buffer_handle
1451 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1452 .await?;
1453 buffer_handle.update(&mut cx, |buffer, cx| {
1454 buffer.finalize_last_transaction();
1455 buffer.start_transaction();
1456 for (range, text) in edits {
1457 buffer.edit([range], text, cx);
1458 }
1459 let transaction = if buffer.end_transaction(cx).is_some() {
1460 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1461 if !push_to_history {
1462 buffer.forget_transaction(transaction.id);
1463 }
1464 Some(transaction)
1465 } else {
1466 None
1467 };
1468 Ok(transaction)
1469 })
1470 } else {
1471 Ok(None)
1472 }
1473 })
1474 } else if let Some(project_id) = self.remote_id() {
1475 let client = self.client.clone();
1476 cx.spawn(|_, mut cx| async move {
1477 let response = client
1478 .request(proto::ApplyCompletionAdditionalEdits {
1479 project_id,
1480 buffer_id,
1481 completion: Some(language::proto::serialize_completion(&completion)),
1482 })
1483 .await?;
1484
1485 if let Some(transaction) = response.transaction {
1486 let transaction = language::proto::deserialize_transaction(transaction)?;
1487 buffer_handle
1488 .update(&mut cx, |buffer, _| {
1489 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1490 })
1491 .await;
1492 if push_to_history {
1493 buffer_handle.update(&mut cx, |buffer, _| {
1494 buffer.push_transaction(transaction.clone(), Instant::now());
1495 });
1496 }
1497 Ok(Some(transaction))
1498 } else {
1499 Ok(None)
1500 }
1501 })
1502 } else {
1503 Task::ready(Err(anyhow!("project does not have a remote id")))
1504 }
1505 }
1506
1507 pub fn code_actions<T: ToOffset>(
1508 &self,
1509 buffer_handle: &ModelHandle<Buffer>,
1510 range: Range<T>,
1511 cx: &mut ModelContext<Self>,
1512 ) -> Task<Result<Vec<CodeAction>>> {
1513 let buffer_handle = buffer_handle.clone();
1514 let buffer = buffer_handle.read(cx);
1515 let buffer_id = buffer.remote_id();
1516 let worktree;
1517 let buffer_abs_path;
1518 if let Some(file) = File::from_dyn(buffer.file()) {
1519 worktree = file.worktree.clone();
1520 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1521 } else {
1522 return Task::ready(Ok(Default::default()));
1523 };
1524 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1525
1526 if worktree.read(cx).as_local().is_some() {
1527 let buffer_abs_path = buffer_abs_path.unwrap();
1528 let lang_name;
1529 let lang_server;
1530 if let Some(lang) = buffer.language() {
1531 lang_name = lang.name().to_string();
1532 if let Some(server) = self
1533 .language_servers
1534 .get(&(worktree.read(cx).id(), lang_name.clone()))
1535 {
1536 lang_server = server.clone();
1537 } else {
1538 return Task::ready(Ok(Default::default()));
1539 };
1540 } else {
1541 return Task::ready(Ok(Default::default()));
1542 }
1543
1544 let lsp_range = lsp::Range::new(
1545 range.start.to_point_utf16(buffer).to_lsp_position(),
1546 range.end.to_point_utf16(buffer).to_lsp_position(),
1547 );
1548 cx.foreground().spawn(async move {
1549 Ok(lang_server
1550 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1551 text_document: lsp::TextDocumentIdentifier::new(
1552 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1553 ),
1554 range: lsp_range,
1555 work_done_progress_params: Default::default(),
1556 partial_result_params: Default::default(),
1557 context: lsp::CodeActionContext {
1558 diagnostics: Default::default(),
1559 only: Some(vec![
1560 lsp::CodeActionKind::QUICKFIX,
1561 lsp::CodeActionKind::REFACTOR,
1562 lsp::CodeActionKind::REFACTOR_EXTRACT,
1563 ]),
1564 },
1565 })
1566 .await?
1567 .unwrap_or_default()
1568 .into_iter()
1569 .filter_map(|entry| {
1570 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1571 Some(CodeAction {
1572 range: range.clone(),
1573 lsp_action,
1574 })
1575 } else {
1576 None
1577 }
1578 })
1579 .collect())
1580 })
1581 } else if let Some(project_id) = self.remote_id() {
1582 let rpc = self.client.clone();
1583 cx.spawn_weak(|_, mut cx| async move {
1584 let response = rpc
1585 .request(proto::GetCodeActions {
1586 project_id,
1587 buffer_id,
1588 start: Some(language::proto::serialize_anchor(&range.start)),
1589 end: Some(language::proto::serialize_anchor(&range.end)),
1590 })
1591 .await?;
1592
1593 buffer_handle
1594 .update(&mut cx, |buffer, _| {
1595 buffer.wait_for_version(response.version.into())
1596 })
1597 .await;
1598
1599 response
1600 .actions
1601 .into_iter()
1602 .map(language::proto::deserialize_code_action)
1603 .collect()
1604 })
1605 } else {
1606 Task::ready(Ok(Default::default()))
1607 }
1608 }
1609
1610 pub fn apply_code_action(
1611 &self,
1612 buffer_handle: ModelHandle<Buffer>,
1613 mut action: CodeAction,
1614 push_to_history: bool,
1615 cx: &mut ModelContext<Self>,
1616 ) -> Task<Result<ProjectTransaction>> {
1617 if self.is_local() {
1618 let buffer = buffer_handle.read(cx);
1619 let lang_name = if let Some(lang) = buffer.language() {
1620 lang.name().to_string()
1621 } else {
1622 return Task::ready(Ok(Default::default()));
1623 };
1624 let lang_server = if let Some(language_server) = buffer.language_server() {
1625 language_server.clone()
1626 } else {
1627 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1628 };
1629 let range = action.range.to_point_utf16(buffer);
1630
1631 cx.spawn(|this, mut cx| async move {
1632 if let Some(lsp_range) = action
1633 .lsp_action
1634 .data
1635 .as_mut()
1636 .and_then(|d| d.get_mut("codeActionParams"))
1637 .and_then(|d| d.get_mut("range"))
1638 {
1639 *lsp_range = serde_json::to_value(&lsp::Range::new(
1640 range.start.to_lsp_position(),
1641 range.end.to_lsp_position(),
1642 ))
1643 .unwrap();
1644 action.lsp_action = lang_server
1645 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1646 .await?;
1647 } else {
1648 let actions = this
1649 .update(&mut cx, |this, cx| {
1650 this.code_actions(&buffer_handle, action.range, cx)
1651 })
1652 .await?;
1653 action.lsp_action = actions
1654 .into_iter()
1655 .find(|a| a.lsp_action.title == action.lsp_action.title)
1656 .ok_or_else(|| anyhow!("code action is outdated"))?
1657 .lsp_action;
1658 }
1659
1660 if let Some(edit) = action.lsp_action.edit {
1661 Self::deserialize_workspace_edit(
1662 this,
1663 edit,
1664 push_to_history,
1665 lang_name,
1666 lang_server,
1667 &mut cx,
1668 )
1669 .await
1670 } else {
1671 Ok(ProjectTransaction::default())
1672 }
1673 })
1674 } else if let Some(project_id) = self.remote_id() {
1675 let client = self.client.clone();
1676 let request = proto::ApplyCodeAction {
1677 project_id,
1678 buffer_id: buffer_handle.read(cx).remote_id(),
1679 action: Some(language::proto::serialize_code_action(&action)),
1680 };
1681 cx.spawn(|this, mut cx| async move {
1682 let response = client
1683 .request(request)
1684 .await?
1685 .transaction
1686 .ok_or_else(|| anyhow!("missing transaction"))?;
1687 this.update(&mut cx, |this, cx| {
1688 this.deserialize_project_transaction(response, push_to_history, cx)
1689 })
1690 .await
1691 })
1692 } else {
1693 Task::ready(Err(anyhow!("project does not have a remote id")))
1694 }
1695 }
1696
1697 async fn deserialize_workspace_edit(
1698 this: ModelHandle<Self>,
1699 edit: lsp::WorkspaceEdit,
1700 push_to_history: bool,
1701 language_name: String,
1702 language_server: Arc<LanguageServer>,
1703 cx: &mut AsyncAppContext,
1704 ) -> Result<ProjectTransaction> {
1705 let fs = this.read_with(cx, |this, _| this.fs.clone());
1706 let mut operations = Vec::new();
1707 if let Some(document_changes) = edit.document_changes {
1708 match document_changes {
1709 lsp::DocumentChanges::Edits(edits) => {
1710 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1711 }
1712 lsp::DocumentChanges::Operations(ops) => operations = ops,
1713 }
1714 } else if let Some(changes) = edit.changes {
1715 operations.extend(changes.into_iter().map(|(uri, edits)| {
1716 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1717 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1718 uri,
1719 version: None,
1720 },
1721 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1722 })
1723 }));
1724 }
1725
1726 let mut project_transaction = ProjectTransaction::default();
1727 for operation in operations {
1728 match operation {
1729 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1730 let abs_path = op
1731 .uri
1732 .to_file_path()
1733 .map_err(|_| anyhow!("can't convert URI to path"))?;
1734
1735 if let Some(parent_path) = abs_path.parent() {
1736 fs.create_dir(parent_path).await?;
1737 }
1738 if abs_path.ends_with("/") {
1739 fs.create_dir(&abs_path).await?;
1740 } else {
1741 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1742 .await?;
1743 }
1744 }
1745 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1746 let source_abs_path = op
1747 .old_uri
1748 .to_file_path()
1749 .map_err(|_| anyhow!("can't convert URI to path"))?;
1750 let target_abs_path = op
1751 .new_uri
1752 .to_file_path()
1753 .map_err(|_| anyhow!("can't convert URI to path"))?;
1754 fs.rename(
1755 &source_abs_path,
1756 &target_abs_path,
1757 op.options.map(Into::into).unwrap_or_default(),
1758 )
1759 .await?;
1760 }
1761 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1762 let abs_path = op
1763 .uri
1764 .to_file_path()
1765 .map_err(|_| anyhow!("can't convert URI to path"))?;
1766 let options = op.options.map(Into::into).unwrap_or_default();
1767 if abs_path.ends_with("/") {
1768 fs.remove_dir(&abs_path, options).await?;
1769 } else {
1770 fs.remove_file(&abs_path, options).await?;
1771 }
1772 }
1773 lsp::DocumentChangeOperation::Edit(op) => {
1774 let buffer_to_edit = this
1775 .update(cx, |this, cx| {
1776 this.open_local_buffer_from_lsp_path(
1777 op.text_document.uri,
1778 language_name.clone(),
1779 language_server.clone(),
1780 cx,
1781 )
1782 })
1783 .await?;
1784
1785 let edits = buffer_to_edit
1786 .update(cx, |buffer, cx| {
1787 let edits = op.edits.into_iter().map(|edit| match edit {
1788 lsp::OneOf::Left(edit) => edit,
1789 lsp::OneOf::Right(edit) => edit.text_edit,
1790 });
1791 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1792 })
1793 .await?;
1794
1795 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1796 buffer.finalize_last_transaction();
1797 buffer.start_transaction();
1798 for (range, text) in edits {
1799 buffer.edit([range], text, cx);
1800 }
1801 let transaction = if buffer.end_transaction(cx).is_some() {
1802 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1803 if !push_to_history {
1804 buffer.forget_transaction(transaction.id);
1805 }
1806 Some(transaction)
1807 } else {
1808 None
1809 };
1810
1811 transaction
1812 });
1813 if let Some(transaction) = transaction {
1814 project_transaction.0.insert(buffer_to_edit, transaction);
1815 }
1816 }
1817 }
1818 }
1819
1820 Ok(project_transaction)
1821 }
1822
1823 pub fn prepare_rename<T: ToPointUtf16>(
1824 &self,
1825 buffer: ModelHandle<Buffer>,
1826 position: T,
1827 cx: &mut ModelContext<Self>,
1828 ) -> Task<Result<Option<Range<Anchor>>>> {
1829 let position = position.to_point_utf16(buffer.read(cx));
1830 self.request_lsp(buffer.clone(), PrepareRename { buffer, position }, cx)
1831 }
1832
1833 pub fn perform_rename<T: ToPointUtf16>(
1834 &self,
1835 buffer: ModelHandle<Buffer>,
1836 position: T,
1837 new_name: String,
1838 cx: &mut ModelContext<Self>,
1839 ) -> Task<Result<ProjectTransaction>> {
1840 let position = position.to_point_utf16(buffer.read(cx));
1841 self.request_lsp(
1842 buffer.clone(),
1843 PerformRename {
1844 buffer,
1845 position,
1846 new_name,
1847 },
1848 cx,
1849 )
1850 }
1851
1852 fn request_lsp<R: LspCommand>(
1853 &self,
1854 buffer_handle: ModelHandle<Buffer>,
1855 request: R,
1856 cx: &mut ModelContext<Self>,
1857 ) -> Task<Result<R::Response>>
1858 where
1859 <R::LspRequest as lsp::request::Request>::Result: Send,
1860 {
1861 let buffer = buffer_handle.read(cx);
1862 if self.is_local() {
1863 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1864 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1865 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1866 return cx.spawn(|this, cx| async move {
1867 let response = language_server
1868 .request::<R::LspRequest>(lsp_params)
1869 .await
1870 .context("lsp request failed")?;
1871 request.response_from_lsp(response, this, cx).await
1872 });
1873 }
1874 } else if let Some(project_id) = self.remote_id() {
1875 let rpc = self.client.clone();
1876 let message = request.to_proto(project_id, cx);
1877 return cx.spawn(|this, cx| async move {
1878 let response = rpc.request(message).await?;
1879 request.response_from_proto(response, this, cx).await
1880 });
1881 }
1882 Task::ready(Ok(Default::default()))
1883 }
1884
1885 pub fn find_or_create_local_worktree(
1886 &self,
1887 abs_path: impl AsRef<Path>,
1888 weak: bool,
1889 cx: &mut ModelContext<Self>,
1890 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1891 let abs_path = abs_path.as_ref();
1892 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1893 Task::ready(Ok((tree.clone(), relative_path.into())))
1894 } else {
1895 let worktree = self.create_local_worktree(abs_path, weak, cx);
1896 cx.foreground()
1897 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1898 }
1899 }
1900
1901 fn find_local_worktree(
1902 &self,
1903 abs_path: &Path,
1904 cx: &AppContext,
1905 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1906 for tree in self.worktrees(cx) {
1907 if let Some(relative_path) = tree
1908 .read(cx)
1909 .as_local()
1910 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1911 {
1912 return Some((tree.clone(), relative_path.into()));
1913 }
1914 }
1915 None
1916 }
1917
1918 pub fn is_shared(&self) -> bool {
1919 match &self.client_state {
1920 ProjectClientState::Local { is_shared, .. } => *is_shared,
1921 ProjectClientState::Remote { .. } => false,
1922 }
1923 }
1924
1925 fn create_local_worktree(
1926 &self,
1927 abs_path: impl AsRef<Path>,
1928 weak: bool,
1929 cx: &mut ModelContext<Self>,
1930 ) -> Task<Result<ModelHandle<Worktree>>> {
1931 let fs = self.fs.clone();
1932 let client = self.client.clone();
1933 let path = Arc::from(abs_path.as_ref());
1934 cx.spawn(|project, mut cx| async move {
1935 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1936
1937 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1938 project.add_worktree(&worktree, cx);
1939 (project.remote_id(), project.is_shared())
1940 });
1941
1942 if let Some(project_id) = remote_project_id {
1943 worktree
1944 .update(&mut cx, |worktree, cx| {
1945 worktree.as_local_mut().unwrap().register(project_id, cx)
1946 })
1947 .await?;
1948 if is_shared {
1949 worktree
1950 .update(&mut cx, |worktree, cx| {
1951 worktree.as_local_mut().unwrap().share(project_id, cx)
1952 })
1953 .await?;
1954 }
1955 }
1956
1957 Ok(worktree)
1958 })
1959 }
1960
1961 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1962 self.worktrees.retain(|worktree| {
1963 worktree
1964 .upgrade(cx)
1965 .map_or(false, |w| w.read(cx).id() != id)
1966 });
1967 cx.notify();
1968 }
1969
1970 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1971 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1972 if worktree.read(cx).is_local() {
1973 cx.subscribe(&worktree, |this, worktree, _, cx| {
1974 this.update_local_worktree_buffers(worktree, cx);
1975 })
1976 .detach();
1977 }
1978
1979 let push_weak_handle = {
1980 let worktree = worktree.read(cx);
1981 worktree.is_local() && worktree.is_weak()
1982 };
1983 if push_weak_handle {
1984 cx.observe_release(&worktree, |this, cx| {
1985 this.worktrees
1986 .retain(|worktree| worktree.upgrade(cx).is_some());
1987 cx.notify();
1988 })
1989 .detach();
1990 self.worktrees
1991 .push(WorktreeHandle::Weak(worktree.downgrade()));
1992 } else {
1993 self.worktrees
1994 .push(WorktreeHandle::Strong(worktree.clone()));
1995 }
1996 cx.notify();
1997 }
1998
1999 fn update_local_worktree_buffers(
2000 &mut self,
2001 worktree_handle: ModelHandle<Worktree>,
2002 cx: &mut ModelContext<Self>,
2003 ) {
2004 let snapshot = worktree_handle.read(cx).snapshot();
2005 let mut buffers_to_delete = Vec::new();
2006 for (buffer_id, buffer) in &self.open_buffers {
2007 if let Some(buffer) = buffer.upgrade(cx) {
2008 buffer.update(cx, |buffer, cx| {
2009 if let Some(old_file) = File::from_dyn(buffer.file()) {
2010 if old_file.worktree != worktree_handle {
2011 return;
2012 }
2013
2014 let new_file = if let Some(entry) = old_file
2015 .entry_id
2016 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2017 {
2018 File {
2019 is_local: true,
2020 entry_id: Some(entry.id),
2021 mtime: entry.mtime,
2022 path: entry.path.clone(),
2023 worktree: worktree_handle.clone(),
2024 }
2025 } else if let Some(entry) =
2026 snapshot.entry_for_path(old_file.path().as_ref())
2027 {
2028 File {
2029 is_local: true,
2030 entry_id: Some(entry.id),
2031 mtime: entry.mtime,
2032 path: entry.path.clone(),
2033 worktree: worktree_handle.clone(),
2034 }
2035 } else {
2036 File {
2037 is_local: true,
2038 entry_id: None,
2039 path: old_file.path().clone(),
2040 mtime: old_file.mtime(),
2041 worktree: worktree_handle.clone(),
2042 }
2043 };
2044
2045 if let Some(project_id) = self.remote_id() {
2046 self.client
2047 .send(proto::UpdateBufferFile {
2048 project_id,
2049 buffer_id: *buffer_id as u64,
2050 file: Some(new_file.to_proto()),
2051 })
2052 .log_err();
2053 }
2054 buffer.file_updated(Box::new(new_file), cx).detach();
2055 }
2056 });
2057 } else {
2058 buffers_to_delete.push(*buffer_id);
2059 }
2060 }
2061
2062 for buffer_id in buffers_to_delete {
2063 self.open_buffers.remove(&buffer_id);
2064 }
2065 }
2066
2067 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2068 let new_active_entry = entry.and_then(|project_path| {
2069 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2070 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2071 Some(ProjectEntry {
2072 worktree_id: project_path.worktree_id,
2073 entry_id: entry.id,
2074 })
2075 });
2076 if new_active_entry != self.active_entry {
2077 self.active_entry = new_active_entry;
2078 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2079 }
2080 }
2081
2082 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2083 self.language_servers_with_diagnostics_running > 0
2084 }
2085
2086 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2087 let mut summary = DiagnosticSummary::default();
2088 for (_, path_summary) in self.diagnostic_summaries(cx) {
2089 summary.error_count += path_summary.error_count;
2090 summary.warning_count += path_summary.warning_count;
2091 summary.info_count += path_summary.info_count;
2092 summary.hint_count += path_summary.hint_count;
2093 }
2094 summary
2095 }
2096
2097 pub fn diagnostic_summaries<'a>(
2098 &'a self,
2099 cx: &'a AppContext,
2100 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2101 self.worktrees(cx).flat_map(move |worktree| {
2102 let worktree = worktree.read(cx);
2103 let worktree_id = worktree.id();
2104 worktree
2105 .diagnostic_summaries()
2106 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2107 })
2108 }
2109
2110 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2111 self.language_servers_with_diagnostics_running += 1;
2112 if self.language_servers_with_diagnostics_running == 1 {
2113 cx.emit(Event::DiskBasedDiagnosticsStarted);
2114 }
2115 }
2116
2117 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2118 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2119 self.language_servers_with_diagnostics_running -= 1;
2120 if self.language_servers_with_diagnostics_running == 0 {
2121 cx.emit(Event::DiskBasedDiagnosticsFinished);
2122 }
2123 }
2124
2125 pub fn active_entry(&self) -> Option<ProjectEntry> {
2126 self.active_entry
2127 }
2128
2129 // RPC message handlers
2130
2131 async fn handle_unshare_project(
2132 this: ModelHandle<Self>,
2133 _: TypedEnvelope<proto::UnshareProject>,
2134 _: Arc<Client>,
2135 mut cx: AsyncAppContext,
2136 ) -> Result<()> {
2137 this.update(&mut cx, |this, cx| {
2138 if let ProjectClientState::Remote {
2139 sharing_has_stopped,
2140 ..
2141 } = &mut this.client_state
2142 {
2143 *sharing_has_stopped = true;
2144 this.collaborators.clear();
2145 cx.notify();
2146 } else {
2147 unreachable!()
2148 }
2149 });
2150
2151 Ok(())
2152 }
2153
2154 async fn handle_add_collaborator(
2155 this: ModelHandle<Self>,
2156 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2157 _: Arc<Client>,
2158 mut cx: AsyncAppContext,
2159 ) -> Result<()> {
2160 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2161 let collaborator = envelope
2162 .payload
2163 .collaborator
2164 .take()
2165 .ok_or_else(|| anyhow!("empty collaborator"))?;
2166
2167 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2168 this.update(&mut cx, |this, cx| {
2169 this.collaborators
2170 .insert(collaborator.peer_id, collaborator);
2171 cx.notify();
2172 });
2173
2174 Ok(())
2175 }
2176
2177 async fn handle_remove_collaborator(
2178 this: ModelHandle<Self>,
2179 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2180 _: Arc<Client>,
2181 mut cx: AsyncAppContext,
2182 ) -> Result<()> {
2183 this.update(&mut cx, |this, cx| {
2184 let peer_id = PeerId(envelope.payload.peer_id);
2185 let replica_id = this
2186 .collaborators
2187 .remove(&peer_id)
2188 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2189 .replica_id;
2190 this.shared_buffers.remove(&peer_id);
2191 for (_, buffer) in &this.open_buffers {
2192 if let Some(buffer) = buffer.upgrade(cx) {
2193 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2194 }
2195 }
2196 cx.notify();
2197 Ok(())
2198 })
2199 }
2200
2201 async fn handle_share_worktree(
2202 this: ModelHandle<Self>,
2203 envelope: TypedEnvelope<proto::ShareWorktree>,
2204 client: Arc<Client>,
2205 mut cx: AsyncAppContext,
2206 ) -> Result<()> {
2207 this.update(&mut cx, |this, cx| {
2208 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2209 let replica_id = this.replica_id();
2210 let worktree = envelope
2211 .payload
2212 .worktree
2213 .ok_or_else(|| anyhow!("invalid worktree"))?;
2214 let (worktree, load_task) =
2215 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2216 this.add_worktree(&worktree, cx);
2217 load_task.detach();
2218 Ok(())
2219 })
2220 }
2221
2222 async fn handle_unregister_worktree(
2223 this: ModelHandle<Self>,
2224 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2225 _: Arc<Client>,
2226 mut cx: AsyncAppContext,
2227 ) -> Result<()> {
2228 this.update(&mut cx, |this, cx| {
2229 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2230 this.remove_worktree(worktree_id, cx);
2231 Ok(())
2232 })
2233 }
2234
2235 async fn handle_update_worktree(
2236 this: ModelHandle<Self>,
2237 envelope: TypedEnvelope<proto::UpdateWorktree>,
2238 _: Arc<Client>,
2239 mut cx: AsyncAppContext,
2240 ) -> Result<()> {
2241 this.update(&mut cx, |this, cx| {
2242 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2243 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2244 worktree.update(cx, |worktree, _| {
2245 let worktree = worktree.as_remote_mut().unwrap();
2246 worktree.update_from_remote(envelope)
2247 })?;
2248 }
2249 Ok(())
2250 })
2251 }
2252
2253 async fn handle_update_diagnostic_summary(
2254 this: ModelHandle<Self>,
2255 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2256 _: Arc<Client>,
2257 mut cx: AsyncAppContext,
2258 ) -> Result<()> {
2259 this.update(&mut cx, |this, cx| {
2260 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2261 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2262 if let Some(summary) = envelope.payload.summary {
2263 let project_path = ProjectPath {
2264 worktree_id,
2265 path: Path::new(&summary.path).into(),
2266 };
2267 worktree.update(cx, |worktree, _| {
2268 worktree
2269 .as_remote_mut()
2270 .unwrap()
2271 .update_diagnostic_summary(project_path.path.clone(), &summary);
2272 });
2273 cx.emit(Event::DiagnosticsUpdated(project_path));
2274 }
2275 }
2276 Ok(())
2277 })
2278 }
2279
2280 async fn handle_disk_based_diagnostics_updating(
2281 this: ModelHandle<Self>,
2282 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2283 _: Arc<Client>,
2284 mut cx: AsyncAppContext,
2285 ) -> Result<()> {
2286 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2287 Ok(())
2288 }
2289
2290 async fn handle_disk_based_diagnostics_updated(
2291 this: ModelHandle<Self>,
2292 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2293 _: Arc<Client>,
2294 mut cx: AsyncAppContext,
2295 ) -> Result<()> {
2296 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2297 Ok(())
2298 }
2299
2300 async fn handle_update_buffer(
2301 this: ModelHandle<Self>,
2302 envelope: TypedEnvelope<proto::UpdateBuffer>,
2303 _: Arc<Client>,
2304 mut cx: AsyncAppContext,
2305 ) -> Result<()> {
2306 this.update(&mut cx, |this, cx| {
2307 let payload = envelope.payload.clone();
2308 let buffer_id = payload.buffer_id;
2309 let ops = payload
2310 .operations
2311 .into_iter()
2312 .map(|op| language::proto::deserialize_operation(op))
2313 .collect::<Result<Vec<_>, _>>()?;
2314 let is_remote = this.is_remote();
2315 match this.open_buffers.entry(buffer_id) {
2316 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2317 OpenBuffer::Loaded(buffer) => {
2318 if let Some(buffer) = buffer.upgrade(cx) {
2319 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2320 }
2321 }
2322 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2323 },
2324 hash_map::Entry::Vacant(e) => {
2325 if is_remote && this.loading_buffers.len() > 0 {
2326 e.insert(OpenBuffer::Loading(ops));
2327 }
2328 }
2329 }
2330 Ok(())
2331 })
2332 }
2333
2334 async fn handle_update_buffer_file(
2335 this: ModelHandle<Self>,
2336 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2337 _: Arc<Client>,
2338 mut cx: AsyncAppContext,
2339 ) -> Result<()> {
2340 this.update(&mut cx, |this, cx| {
2341 let payload = envelope.payload.clone();
2342 let buffer_id = payload.buffer_id;
2343 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2344 let worktree = this
2345 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2346 .ok_or_else(|| anyhow!("no such worktree"))?;
2347 let file = File::from_proto(file, worktree.clone(), cx)?;
2348 let buffer = this
2349 .open_buffers
2350 .get_mut(&buffer_id)
2351 .and_then(|b| b.upgrade(cx))
2352 .ok_or_else(|| anyhow!("no such buffer"))?;
2353 buffer.update(cx, |buffer, cx| {
2354 buffer.file_updated(Box::new(file), cx).detach();
2355 });
2356 Ok(())
2357 })
2358 }
2359
2360 async fn handle_save_buffer(
2361 this: ModelHandle<Self>,
2362 envelope: TypedEnvelope<proto::SaveBuffer>,
2363 _: Arc<Client>,
2364 mut cx: AsyncAppContext,
2365 ) -> Result<proto::BufferSaved> {
2366 let buffer_id = envelope.payload.buffer_id;
2367 let sender_id = envelope.original_sender_id()?;
2368 let requested_version = envelope.payload.version.try_into()?;
2369
2370 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2371 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2372 let buffer = this
2373 .shared_buffers
2374 .get(&sender_id)
2375 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2376 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2377 Ok::<_, anyhow::Error>((project_id, buffer))
2378 })?;
2379
2380 if !buffer
2381 .read_with(&cx, |buffer, _| buffer.version())
2382 .observed_all(&requested_version)
2383 {
2384 Err(anyhow!("save request depends on unreceived edits"))?;
2385 }
2386
2387 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2388 Ok(proto::BufferSaved {
2389 project_id,
2390 buffer_id,
2391 version: (&saved_version).into(),
2392 mtime: Some(mtime.into()),
2393 })
2394 }
2395
2396 async fn handle_format_buffers(
2397 this: ModelHandle<Self>,
2398 envelope: TypedEnvelope<proto::FormatBuffers>,
2399 _: Arc<Client>,
2400 mut cx: AsyncAppContext,
2401 ) -> Result<proto::FormatBuffersResponse> {
2402 let sender_id = envelope.original_sender_id()?;
2403 let format = this.update(&mut cx, |this, cx| {
2404 let shared_buffers = this
2405 .shared_buffers
2406 .get(&sender_id)
2407 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2408 let mut buffers = HashSet::default();
2409 for buffer_id in &envelope.payload.buffer_ids {
2410 buffers.insert(
2411 shared_buffers
2412 .get(buffer_id)
2413 .cloned()
2414 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2415 );
2416 }
2417 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2418 })?;
2419
2420 let project_transaction = format.await?;
2421 let project_transaction = this.update(&mut cx, |this, cx| {
2422 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2423 });
2424 Ok(proto::FormatBuffersResponse {
2425 transaction: Some(project_transaction),
2426 })
2427 }
2428
2429 async fn handle_get_completions(
2430 this: ModelHandle<Self>,
2431 envelope: TypedEnvelope<proto::GetCompletions>,
2432 _: Arc<Client>,
2433 mut cx: AsyncAppContext,
2434 ) -> Result<proto::GetCompletionsResponse> {
2435 let sender_id = envelope.original_sender_id()?;
2436 let position = envelope
2437 .payload
2438 .position
2439 .and_then(language::proto::deserialize_anchor)
2440 .ok_or_else(|| anyhow!("invalid position"))?;
2441 let version = clock::Global::from(envelope.payload.version);
2442 let buffer = this.read_with(&cx, |this, _| {
2443 this.shared_buffers
2444 .get(&sender_id)
2445 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2446 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2447 })?;
2448 if !buffer
2449 .read_with(&cx, |buffer, _| buffer.version())
2450 .observed_all(&version)
2451 {
2452 Err(anyhow!("completion request depends on unreceived edits"))?;
2453 }
2454 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2455 let completions = this
2456 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2457 .await?;
2458
2459 Ok(proto::GetCompletionsResponse {
2460 completions: completions
2461 .iter()
2462 .map(language::proto::serialize_completion)
2463 .collect(),
2464 version: (&version).into(),
2465 })
2466 }
2467
2468 async fn handle_apply_additional_edits_for_completion(
2469 this: ModelHandle<Self>,
2470 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2471 _: Arc<Client>,
2472 mut cx: AsyncAppContext,
2473 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2474 let sender_id = envelope.original_sender_id()?;
2475 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2476 let buffer = this
2477 .shared_buffers
2478 .get(&sender_id)
2479 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2480 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2481 let language = buffer.read(cx).language();
2482 let completion = language::proto::deserialize_completion(
2483 envelope
2484 .payload
2485 .completion
2486 .ok_or_else(|| anyhow!("invalid completion"))?,
2487 language,
2488 )?;
2489 Ok::<_, anyhow::Error>(
2490 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2491 )
2492 })?;
2493
2494 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2495 transaction: apply_additional_edits
2496 .await?
2497 .as_ref()
2498 .map(language::proto::serialize_transaction),
2499 })
2500 }
2501
2502 async fn handle_get_code_actions(
2503 this: ModelHandle<Self>,
2504 envelope: TypedEnvelope<proto::GetCodeActions>,
2505 _: Arc<Client>,
2506 mut cx: AsyncAppContext,
2507 ) -> Result<proto::GetCodeActionsResponse> {
2508 let sender_id = envelope.original_sender_id()?;
2509 let start = envelope
2510 .payload
2511 .start
2512 .and_then(language::proto::deserialize_anchor)
2513 .ok_or_else(|| anyhow!("invalid start"))?;
2514 let end = envelope
2515 .payload
2516 .end
2517 .and_then(language::proto::deserialize_anchor)
2518 .ok_or_else(|| anyhow!("invalid end"))?;
2519 let buffer = this.update(&mut cx, |this, _| {
2520 this.shared_buffers
2521 .get(&sender_id)
2522 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2523 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2524 })?;
2525 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2526 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2527 Err(anyhow!("code action request references unreceived edits"))?;
2528 }
2529 let code_actions = this.update(&mut cx, |this, cx| {
2530 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2531 })?;
2532
2533 Ok(proto::GetCodeActionsResponse {
2534 actions: code_actions
2535 .await?
2536 .iter()
2537 .map(language::proto::serialize_code_action)
2538 .collect(),
2539 version: (&version).into(),
2540 })
2541 }
2542
2543 async fn handle_apply_code_action(
2544 this: ModelHandle<Self>,
2545 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2546 _: Arc<Client>,
2547 mut cx: AsyncAppContext,
2548 ) -> Result<proto::ApplyCodeActionResponse> {
2549 let sender_id = envelope.original_sender_id()?;
2550 let action = language::proto::deserialize_code_action(
2551 envelope
2552 .payload
2553 .action
2554 .ok_or_else(|| anyhow!("invalid action"))?,
2555 )?;
2556 let apply_code_action = this.update(&mut cx, |this, cx| {
2557 let buffer = this
2558 .shared_buffers
2559 .get(&sender_id)
2560 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2561 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2562 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2563 })?;
2564
2565 let project_transaction = apply_code_action.await?;
2566 let project_transaction = this.update(&mut cx, |this, cx| {
2567 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2568 });
2569 Ok(proto::ApplyCodeActionResponse {
2570 transaction: Some(project_transaction),
2571 })
2572 }
2573
2574 async fn handle_get_definition(
2575 this: ModelHandle<Self>,
2576 envelope: TypedEnvelope<proto::GetDefinition>,
2577 _: Arc<Client>,
2578 mut cx: AsyncAppContext,
2579 ) -> Result<proto::GetDefinitionResponse> {
2580 let sender_id = envelope.original_sender_id()?;
2581 let position = envelope
2582 .payload
2583 .position
2584 .and_then(deserialize_anchor)
2585 .ok_or_else(|| anyhow!("invalid position"))?;
2586 let definitions = this.update(&mut cx, |this, cx| {
2587 let source_buffer = this
2588 .shared_buffers
2589 .get(&sender_id)
2590 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2591 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2592 if source_buffer.read(cx).can_resolve(&position) {
2593 Ok(this.definition(&source_buffer, position, cx))
2594 } else {
2595 Err(anyhow!("cannot resolve position"))
2596 }
2597 })?;
2598
2599 let definitions = definitions.await?;
2600
2601 this.update(&mut cx, |this, cx| {
2602 let mut response = proto::GetDefinitionResponse {
2603 definitions: Default::default(),
2604 };
2605 for definition in definitions {
2606 let buffer =
2607 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2608 response.definitions.push(proto::Definition {
2609 target_start: Some(serialize_anchor(&definition.target_range.start)),
2610 target_end: Some(serialize_anchor(&definition.target_range.end)),
2611 buffer: Some(buffer),
2612 });
2613 }
2614 Ok(response)
2615 })
2616 }
2617
2618 async fn handle_open_buffer(
2619 this: ModelHandle<Self>,
2620 envelope: TypedEnvelope<proto::OpenBuffer>,
2621 _: Arc<Client>,
2622 mut cx: AsyncAppContext,
2623 ) -> anyhow::Result<proto::OpenBufferResponse> {
2624 let peer_id = envelope.original_sender_id()?;
2625 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2626 let open_buffer = this.update(&mut cx, |this, cx| {
2627 this.open_buffer(
2628 ProjectPath {
2629 worktree_id,
2630 path: PathBuf::from(envelope.payload.path).into(),
2631 },
2632 cx,
2633 )
2634 });
2635
2636 let buffer = open_buffer.await?;
2637 this.update(&mut cx, |this, cx| {
2638 Ok(proto::OpenBufferResponse {
2639 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2640 })
2641 })
2642 }
2643
2644 fn serialize_project_transaction_for_peer(
2645 &mut self,
2646 project_transaction: ProjectTransaction,
2647 peer_id: PeerId,
2648 cx: &AppContext,
2649 ) -> proto::ProjectTransaction {
2650 let mut serialized_transaction = proto::ProjectTransaction {
2651 buffers: Default::default(),
2652 transactions: Default::default(),
2653 };
2654 for (buffer, transaction) in project_transaction.0 {
2655 serialized_transaction
2656 .buffers
2657 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2658 serialized_transaction
2659 .transactions
2660 .push(language::proto::serialize_transaction(&transaction));
2661 }
2662 serialized_transaction
2663 }
2664
2665 fn deserialize_project_transaction(
2666 &mut self,
2667 message: proto::ProjectTransaction,
2668 push_to_history: bool,
2669 cx: &mut ModelContext<Self>,
2670 ) -> Task<Result<ProjectTransaction>> {
2671 cx.spawn(|this, mut cx| async move {
2672 let mut project_transaction = ProjectTransaction::default();
2673 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2674 let buffer = this
2675 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2676 .await?;
2677 let transaction = language::proto::deserialize_transaction(transaction)?;
2678 project_transaction.0.insert(buffer, transaction);
2679 }
2680 for (buffer, transaction) in &project_transaction.0 {
2681 buffer
2682 .update(&mut cx, |buffer, _| {
2683 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2684 })
2685 .await;
2686
2687 if push_to_history {
2688 buffer.update(&mut cx, |buffer, _| {
2689 buffer.push_transaction(transaction.clone(), Instant::now());
2690 });
2691 }
2692 }
2693
2694 Ok(project_transaction)
2695 })
2696 }
2697
2698 fn serialize_buffer_for_peer(
2699 &mut self,
2700 buffer: &ModelHandle<Buffer>,
2701 peer_id: PeerId,
2702 cx: &AppContext,
2703 ) -> proto::Buffer {
2704 let buffer_id = buffer.read(cx).remote_id();
2705 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2706 match shared_buffers.entry(buffer_id) {
2707 hash_map::Entry::Occupied(_) => proto::Buffer {
2708 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2709 },
2710 hash_map::Entry::Vacant(entry) => {
2711 entry.insert(buffer.clone());
2712 proto::Buffer {
2713 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2714 }
2715 }
2716 }
2717 }
2718
2719 fn deserialize_buffer(
2720 &mut self,
2721 buffer: proto::Buffer,
2722 cx: &mut ModelContext<Self>,
2723 ) -> Task<Result<ModelHandle<Buffer>>> {
2724 let replica_id = self.replica_id();
2725
2726 let mut opened_buffer_tx = self.opened_buffer.clone();
2727 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2728 cx.spawn(|this, mut cx| async move {
2729 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2730 proto::buffer::Variant::Id(id) => {
2731 let buffer = loop {
2732 let buffer = this.read_with(&cx, |this, cx| {
2733 this.open_buffers
2734 .get(&id)
2735 .and_then(|buffer| buffer.upgrade(cx))
2736 });
2737 if let Some(buffer) = buffer {
2738 break buffer;
2739 }
2740 opened_buffer_rx
2741 .recv()
2742 .await
2743 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2744 };
2745 Ok(buffer)
2746 }
2747 proto::buffer::Variant::State(mut buffer) => {
2748 let mut buffer_worktree = None;
2749 let mut buffer_file = None;
2750 if let Some(file) = buffer.file.take() {
2751 this.read_with(&cx, |this, cx| {
2752 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2753 let worktree =
2754 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2755 anyhow!("no worktree found for id {}", file.worktree_id)
2756 })?;
2757 buffer_file =
2758 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2759 as Box<dyn language::File>);
2760 buffer_worktree = Some(worktree);
2761 Ok::<_, anyhow::Error>(())
2762 })?;
2763 }
2764
2765 let buffer = cx.add_model(|cx| {
2766 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2767 });
2768 this.update(&mut cx, |this, cx| {
2769 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2770 })?;
2771
2772 let _ = opened_buffer_tx.send(()).await;
2773 Ok(buffer)
2774 }
2775 }
2776 })
2777 }
2778
2779 async fn handle_close_buffer(
2780 this: ModelHandle<Self>,
2781 envelope: TypedEnvelope<proto::CloseBuffer>,
2782 _: Arc<Client>,
2783 mut cx: AsyncAppContext,
2784 ) -> anyhow::Result<()> {
2785 this.update(&mut cx, |this, cx| {
2786 if let Some(shared_buffers) =
2787 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2788 {
2789 shared_buffers.remove(&envelope.payload.buffer_id);
2790 cx.notify();
2791 }
2792 Ok(())
2793 })
2794 }
2795
2796 async fn handle_buffer_saved(
2797 this: ModelHandle<Self>,
2798 envelope: TypedEnvelope<proto::BufferSaved>,
2799 _: Arc<Client>,
2800 mut cx: AsyncAppContext,
2801 ) -> Result<()> {
2802 let version = envelope.payload.version.try_into()?;
2803 let mtime = envelope
2804 .payload
2805 .mtime
2806 .ok_or_else(|| anyhow!("missing mtime"))?
2807 .into();
2808
2809 this.update(&mut cx, |this, cx| {
2810 let buffer = this
2811 .open_buffers
2812 .get(&envelope.payload.buffer_id)
2813 .and_then(|buffer| buffer.upgrade(cx));
2814 if let Some(buffer) = buffer {
2815 buffer.update(cx, |buffer, cx| {
2816 buffer.did_save(version, mtime, None, cx);
2817 });
2818 }
2819 Ok(())
2820 })
2821 }
2822
2823 async fn handle_buffer_reloaded(
2824 this: ModelHandle<Self>,
2825 envelope: TypedEnvelope<proto::BufferReloaded>,
2826 _: Arc<Client>,
2827 mut cx: AsyncAppContext,
2828 ) -> Result<()> {
2829 let payload = envelope.payload.clone();
2830 let version = payload.version.try_into()?;
2831 let mtime = payload
2832 .mtime
2833 .ok_or_else(|| anyhow!("missing mtime"))?
2834 .into();
2835 this.update(&mut cx, |this, cx| {
2836 let buffer = this
2837 .open_buffers
2838 .get(&payload.buffer_id)
2839 .and_then(|buffer| buffer.upgrade(cx));
2840 if let Some(buffer) = buffer {
2841 buffer.update(cx, |buffer, cx| {
2842 buffer.did_reload(version, mtime, cx);
2843 });
2844 }
2845 Ok(())
2846 })
2847 }
2848
2849 pub fn match_paths<'a>(
2850 &self,
2851 query: &'a str,
2852 include_ignored: bool,
2853 smart_case: bool,
2854 max_results: usize,
2855 cancel_flag: &'a AtomicBool,
2856 cx: &AppContext,
2857 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2858 let worktrees = self
2859 .worktrees(cx)
2860 .filter(|worktree| !worktree.read(cx).is_weak())
2861 .collect::<Vec<_>>();
2862 let include_root_name = worktrees.len() > 1;
2863 let candidate_sets = worktrees
2864 .into_iter()
2865 .map(|worktree| CandidateSet {
2866 snapshot: worktree.read(cx).snapshot(),
2867 include_ignored,
2868 include_root_name,
2869 })
2870 .collect::<Vec<_>>();
2871
2872 let background = cx.background().clone();
2873 async move {
2874 fuzzy::match_paths(
2875 candidate_sets.as_slice(),
2876 query,
2877 smart_case,
2878 max_results,
2879 cancel_flag,
2880 background,
2881 )
2882 .await
2883 }
2884 }
2885}
2886
2887impl WorktreeHandle {
2888 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2889 match self {
2890 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2891 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2892 }
2893 }
2894}
2895
2896impl OpenBuffer {
2897 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2898 match self {
2899 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2900 OpenBuffer::Loading(_) => None,
2901 }
2902 }
2903}
2904
2905struct CandidateSet {
2906 snapshot: Snapshot,
2907 include_ignored: bool,
2908 include_root_name: bool,
2909}
2910
2911impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2912 type Candidates = CandidateSetIter<'a>;
2913
2914 fn id(&self) -> usize {
2915 self.snapshot.id().to_usize()
2916 }
2917
2918 fn len(&self) -> usize {
2919 if self.include_ignored {
2920 self.snapshot.file_count()
2921 } else {
2922 self.snapshot.visible_file_count()
2923 }
2924 }
2925
2926 fn prefix(&self) -> Arc<str> {
2927 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2928 self.snapshot.root_name().into()
2929 } else if self.include_root_name {
2930 format!("{}/", self.snapshot.root_name()).into()
2931 } else {
2932 "".into()
2933 }
2934 }
2935
2936 fn candidates(&'a self, start: usize) -> Self::Candidates {
2937 CandidateSetIter {
2938 traversal: self.snapshot.files(self.include_ignored, start),
2939 }
2940 }
2941}
2942
2943struct CandidateSetIter<'a> {
2944 traversal: Traversal<'a>,
2945}
2946
2947impl<'a> Iterator for CandidateSetIter<'a> {
2948 type Item = PathMatchCandidate<'a>;
2949
2950 fn next(&mut self) -> Option<Self::Item> {
2951 self.traversal.next().map(|entry| {
2952 if let EntryKind::File(char_bag) = entry.kind {
2953 PathMatchCandidate {
2954 path: &entry.path,
2955 char_bag,
2956 }
2957 } else {
2958 unreachable!()
2959 }
2960 })
2961 }
2962}
2963
2964impl Entity for Project {
2965 type Event = Event;
2966
2967 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2968 match &self.client_state {
2969 ProjectClientState::Local { remote_id_rx, .. } => {
2970 if let Some(project_id) = *remote_id_rx.borrow() {
2971 self.client
2972 .send(proto::UnregisterProject { project_id })
2973 .log_err();
2974 }
2975 }
2976 ProjectClientState::Remote { remote_id, .. } => {
2977 self.client
2978 .send(proto::LeaveProject {
2979 project_id: *remote_id,
2980 })
2981 .log_err();
2982 }
2983 }
2984 }
2985
2986 fn app_will_quit(
2987 &mut self,
2988 _: &mut MutableAppContext,
2989 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2990 use futures::FutureExt;
2991
2992 let shutdown_futures = self
2993 .language_servers
2994 .drain()
2995 .filter_map(|(_, server)| server.shutdown())
2996 .collect::<Vec<_>>();
2997 Some(
2998 async move {
2999 futures::future::join_all(shutdown_futures).await;
3000 }
3001 .boxed(),
3002 )
3003 }
3004}
3005
3006impl Collaborator {
3007 fn from_proto(
3008 message: proto::Collaborator,
3009 user_store: &ModelHandle<UserStore>,
3010 cx: &mut AsyncAppContext,
3011 ) -> impl Future<Output = Result<Self>> {
3012 let user = user_store.update(cx, |user_store, cx| {
3013 user_store.fetch_user(message.user_id, cx)
3014 });
3015
3016 async move {
3017 Ok(Self {
3018 peer_id: PeerId(message.peer_id),
3019 user: user.await?,
3020 replica_id: message.replica_id as ReplicaId,
3021 })
3022 }
3023 }
3024}
3025
3026impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3027 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3028 Self {
3029 worktree_id,
3030 path: path.as_ref().into(),
3031 }
3032 }
3033}
3034
3035impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3036 fn from(options: lsp::CreateFileOptions) -> Self {
3037 Self {
3038 overwrite: options.overwrite.unwrap_or(false),
3039 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3040 }
3041 }
3042}
3043
3044impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3045 fn from(options: lsp::RenameFileOptions) -> Self {
3046 Self {
3047 overwrite: options.overwrite.unwrap_or(false),
3048 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3049 }
3050 }
3051}
3052
3053impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3054 fn from(options: lsp::DeleteFileOptions) -> Self {
3055 Self {
3056 recursive: options.recursive.unwrap_or(false),
3057 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3058 }
3059 }
3060}
3061
3062#[cfg(test)]
3063mod tests {
3064 use super::{Event, *};
3065 use fs::RealFs;
3066 use futures::StreamExt;
3067 use gpui::test::subscribe;
3068 use language::{
3069 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3070 };
3071 use lsp::Url;
3072 use serde_json::json;
3073 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3074 use unindent::Unindent as _;
3075 use util::test::temp_tree;
3076 use worktree::WorktreeHandle as _;
3077
3078 #[gpui::test]
3079 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3080 let dir = temp_tree(json!({
3081 "root": {
3082 "apple": "",
3083 "banana": {
3084 "carrot": {
3085 "date": "",
3086 "endive": "",
3087 }
3088 },
3089 "fennel": {
3090 "grape": "",
3091 }
3092 }
3093 }));
3094
3095 let root_link_path = dir.path().join("root_link");
3096 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3097 unix::fs::symlink(
3098 &dir.path().join("root/fennel"),
3099 &dir.path().join("root/finnochio"),
3100 )
3101 .unwrap();
3102
3103 let project = Project::test(Arc::new(RealFs), &mut cx);
3104
3105 let (tree, _) = project
3106 .update(&mut cx, |project, cx| {
3107 project.find_or_create_local_worktree(&root_link_path, false, cx)
3108 })
3109 .await
3110 .unwrap();
3111
3112 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3113 .await;
3114 cx.read(|cx| {
3115 let tree = tree.read(cx);
3116 assert_eq!(tree.file_count(), 5);
3117 assert_eq!(
3118 tree.inode_for_path("fennel/grape"),
3119 tree.inode_for_path("finnochio/grape")
3120 );
3121 });
3122
3123 let cancel_flag = Default::default();
3124 let results = project
3125 .read_with(&cx, |project, cx| {
3126 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3127 })
3128 .await;
3129 assert_eq!(
3130 results
3131 .into_iter()
3132 .map(|result| result.path)
3133 .collect::<Vec<Arc<Path>>>(),
3134 vec![
3135 PathBuf::from("banana/carrot/date").into(),
3136 PathBuf::from("banana/carrot/endive").into(),
3137 ]
3138 );
3139 }
3140
3141 #[gpui::test]
3142 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3143 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3144 let progress_token = language_server_config
3145 .disk_based_diagnostics_progress_token
3146 .clone()
3147 .unwrap();
3148
3149 let language = Arc::new(Language::new(
3150 LanguageConfig {
3151 name: "Rust".to_string(),
3152 path_suffixes: vec!["rs".to_string()],
3153 language_server: Some(language_server_config),
3154 ..Default::default()
3155 },
3156 Some(tree_sitter_rust::language()),
3157 ));
3158
3159 let fs = FakeFs::new(cx.background());
3160 fs.insert_tree(
3161 "/dir",
3162 json!({
3163 "a.rs": "fn a() { A }",
3164 "b.rs": "const y: i32 = 1",
3165 }),
3166 )
3167 .await;
3168
3169 let project = Project::test(fs, &mut cx);
3170 project.update(&mut cx, |project, _| {
3171 Arc::get_mut(&mut project.languages).unwrap().add(language);
3172 });
3173
3174 let (tree, _) = project
3175 .update(&mut cx, |project, cx| {
3176 project.find_or_create_local_worktree("/dir", false, cx)
3177 })
3178 .await
3179 .unwrap();
3180 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3181
3182 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3183 .await;
3184
3185 // Cause worktree to start the fake language server
3186 let _buffer = project
3187 .update(&mut cx, |project, cx| {
3188 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3189 })
3190 .await
3191 .unwrap();
3192
3193 let mut events = subscribe(&project, &mut cx);
3194
3195 let mut fake_server = fake_servers.next().await.unwrap();
3196 fake_server.start_progress(&progress_token).await;
3197 assert_eq!(
3198 events.next().await.unwrap(),
3199 Event::DiskBasedDiagnosticsStarted
3200 );
3201
3202 fake_server.start_progress(&progress_token).await;
3203 fake_server.end_progress(&progress_token).await;
3204 fake_server.start_progress(&progress_token).await;
3205
3206 fake_server
3207 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3208 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3209 version: None,
3210 diagnostics: vec![lsp::Diagnostic {
3211 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3212 severity: Some(lsp::DiagnosticSeverity::ERROR),
3213 message: "undefined variable 'A'".to_string(),
3214 ..Default::default()
3215 }],
3216 })
3217 .await;
3218 assert_eq!(
3219 events.next().await.unwrap(),
3220 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3221 );
3222
3223 fake_server.end_progress(&progress_token).await;
3224 fake_server.end_progress(&progress_token).await;
3225 assert_eq!(
3226 events.next().await.unwrap(),
3227 Event::DiskBasedDiagnosticsUpdated
3228 );
3229 assert_eq!(
3230 events.next().await.unwrap(),
3231 Event::DiskBasedDiagnosticsFinished
3232 );
3233
3234 let buffer = project
3235 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3236 .await
3237 .unwrap();
3238
3239 buffer.read_with(&cx, |buffer, _| {
3240 let snapshot = buffer.snapshot();
3241 let diagnostics = snapshot
3242 .diagnostics_in_range::<_, Point>(0..buffer.len())
3243 .collect::<Vec<_>>();
3244 assert_eq!(
3245 diagnostics,
3246 &[DiagnosticEntry {
3247 range: Point::new(0, 9)..Point::new(0, 10),
3248 diagnostic: Diagnostic {
3249 severity: lsp::DiagnosticSeverity::ERROR,
3250 message: "undefined variable 'A'".to_string(),
3251 group_id: 0,
3252 is_primary: true,
3253 ..Default::default()
3254 }
3255 }]
3256 )
3257 });
3258 }
3259
3260 #[gpui::test]
3261 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3262 let dir = temp_tree(json!({
3263 "root": {
3264 "dir1": {},
3265 "dir2": {
3266 "dir3": {}
3267 }
3268 }
3269 }));
3270
3271 let project = Project::test(Arc::new(RealFs), &mut cx);
3272 let (tree, _) = project
3273 .update(&mut cx, |project, cx| {
3274 project.find_or_create_local_worktree(&dir.path(), false, cx)
3275 })
3276 .await
3277 .unwrap();
3278
3279 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3280 .await;
3281
3282 let cancel_flag = Default::default();
3283 let results = project
3284 .read_with(&cx, |project, cx| {
3285 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3286 })
3287 .await;
3288
3289 assert!(results.is_empty());
3290 }
3291
3292 #[gpui::test]
3293 async fn test_definition(mut cx: gpui::TestAppContext) {
3294 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3295 let language = Arc::new(Language::new(
3296 LanguageConfig {
3297 name: "Rust".to_string(),
3298 path_suffixes: vec!["rs".to_string()],
3299 language_server: Some(language_server_config),
3300 ..Default::default()
3301 },
3302 Some(tree_sitter_rust::language()),
3303 ));
3304
3305 let fs = FakeFs::new(cx.background());
3306 fs.insert_tree(
3307 "/dir",
3308 json!({
3309 "a.rs": "const fn a() { A }",
3310 "b.rs": "const y: i32 = crate::a()",
3311 }),
3312 )
3313 .await;
3314
3315 let project = Project::test(fs, &mut cx);
3316 project.update(&mut cx, |project, _| {
3317 Arc::get_mut(&mut project.languages).unwrap().add(language);
3318 });
3319
3320 let (tree, _) = project
3321 .update(&mut cx, |project, cx| {
3322 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3323 })
3324 .await
3325 .unwrap();
3326 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3327 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3328 .await;
3329
3330 let buffer = project
3331 .update(&mut cx, |project, cx| {
3332 project.open_buffer(
3333 ProjectPath {
3334 worktree_id,
3335 path: Path::new("").into(),
3336 },
3337 cx,
3338 )
3339 })
3340 .await
3341 .unwrap();
3342
3343 let mut fake_server = fake_servers.next().await.unwrap();
3344 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3345 let params = params.text_document_position_params;
3346 assert_eq!(
3347 params.text_document.uri.to_file_path().unwrap(),
3348 Path::new("/dir/b.rs"),
3349 );
3350 assert_eq!(params.position, lsp::Position::new(0, 22));
3351
3352 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3353 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3354 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3355 )))
3356 });
3357
3358 let mut definitions = project
3359 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3360 .await
3361 .unwrap();
3362
3363 assert_eq!(definitions.len(), 1);
3364 let definition = definitions.pop().unwrap();
3365 cx.update(|cx| {
3366 let target_buffer = definition.target_buffer.read(cx);
3367 assert_eq!(
3368 target_buffer
3369 .file()
3370 .unwrap()
3371 .as_local()
3372 .unwrap()
3373 .abs_path(cx),
3374 Path::new("/dir/a.rs"),
3375 );
3376 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3377 assert_eq!(
3378 list_worktrees(&project, cx),
3379 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3380 );
3381
3382 drop(definition);
3383 });
3384 cx.read(|cx| {
3385 assert_eq!(
3386 list_worktrees(&project, cx),
3387 [("/dir/b.rs".as_ref(), false)]
3388 );
3389 });
3390
3391 fn list_worktrees<'a>(
3392 project: &'a ModelHandle<Project>,
3393 cx: &'a AppContext,
3394 ) -> Vec<(&'a Path, bool)> {
3395 project
3396 .read(cx)
3397 .worktrees(cx)
3398 .map(|worktree| {
3399 let worktree = worktree.read(cx);
3400 (
3401 worktree.as_local().unwrap().abs_path().as_ref(),
3402 worktree.is_weak(),
3403 )
3404 })
3405 .collect::<Vec<_>>()
3406 }
3407 }
3408
3409 #[gpui::test]
3410 async fn test_save_file(mut cx: gpui::TestAppContext) {
3411 let fs = FakeFs::new(cx.background());
3412 fs.insert_tree(
3413 "/dir",
3414 json!({
3415 "file1": "the old contents",
3416 }),
3417 )
3418 .await;
3419
3420 let project = Project::test(fs.clone(), &mut cx);
3421 let worktree_id = project
3422 .update(&mut cx, |p, cx| {
3423 p.find_or_create_local_worktree("/dir", false, cx)
3424 })
3425 .await
3426 .unwrap()
3427 .0
3428 .read_with(&cx, |tree, _| tree.id());
3429
3430 let buffer = project
3431 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3432 .await
3433 .unwrap();
3434 buffer
3435 .update(&mut cx, |buffer, cx| {
3436 assert_eq!(buffer.text(), "the old contents");
3437 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3438 buffer.save(cx)
3439 })
3440 .await
3441 .unwrap();
3442
3443 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3444 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3445 }
3446
3447 #[gpui::test]
3448 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3449 let fs = FakeFs::new(cx.background());
3450 fs.insert_tree(
3451 "/dir",
3452 json!({
3453 "file1": "the old contents",
3454 }),
3455 )
3456 .await;
3457
3458 let project = Project::test(fs.clone(), &mut cx);
3459 let worktree_id = project
3460 .update(&mut cx, |p, cx| {
3461 p.find_or_create_local_worktree("/dir/file1", false, cx)
3462 })
3463 .await
3464 .unwrap()
3465 .0
3466 .read_with(&cx, |tree, _| tree.id());
3467
3468 let buffer = project
3469 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3470 .await
3471 .unwrap();
3472 buffer
3473 .update(&mut cx, |buffer, cx| {
3474 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3475 buffer.save(cx)
3476 })
3477 .await
3478 .unwrap();
3479
3480 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3481 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3482 }
3483
3484 #[gpui::test(retries = 5)]
3485 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3486 let dir = temp_tree(json!({
3487 "a": {
3488 "file1": "",
3489 "file2": "",
3490 "file3": "",
3491 },
3492 "b": {
3493 "c": {
3494 "file4": "",
3495 "file5": "",
3496 }
3497 }
3498 }));
3499
3500 let project = Project::test(Arc::new(RealFs), &mut cx);
3501 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3502
3503 let (tree, _) = project
3504 .update(&mut cx, |p, cx| {
3505 p.find_or_create_local_worktree(dir.path(), false, cx)
3506 })
3507 .await
3508 .unwrap();
3509 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3510
3511 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3512 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3513 async move { buffer.await.unwrap() }
3514 };
3515 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3516 tree.read_with(cx, |tree, _| {
3517 tree.entry_for_path(path)
3518 .expect(&format!("no entry for path {}", path))
3519 .id
3520 })
3521 };
3522
3523 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3524 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3525 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3526 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3527
3528 let file2_id = id_for_path("a/file2", &cx);
3529 let file3_id = id_for_path("a/file3", &cx);
3530 let file4_id = id_for_path("b/c/file4", &cx);
3531
3532 // Wait for the initial scan.
3533 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3534 .await;
3535
3536 // Create a remote copy of this worktree.
3537 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3538 let (remote, load_task) = cx.update(|cx| {
3539 Worktree::remote(
3540 1,
3541 1,
3542 initial_snapshot.to_proto(&Default::default(), Default::default()),
3543 rpc.clone(),
3544 cx,
3545 )
3546 });
3547 load_task.await;
3548
3549 cx.read(|cx| {
3550 assert!(!buffer2.read(cx).is_dirty());
3551 assert!(!buffer3.read(cx).is_dirty());
3552 assert!(!buffer4.read(cx).is_dirty());
3553 assert!(!buffer5.read(cx).is_dirty());
3554 });
3555
3556 // Rename and delete files and directories.
3557 tree.flush_fs_events(&cx).await;
3558 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3559 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3560 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3561 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3562 tree.flush_fs_events(&cx).await;
3563
3564 let expected_paths = vec![
3565 "a",
3566 "a/file1",
3567 "a/file2.new",
3568 "b",
3569 "d",
3570 "d/file3",
3571 "d/file4",
3572 ];
3573
3574 cx.read(|app| {
3575 assert_eq!(
3576 tree.read(app)
3577 .paths()
3578 .map(|p| p.to_str().unwrap())
3579 .collect::<Vec<_>>(),
3580 expected_paths
3581 );
3582
3583 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3584 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3585 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3586
3587 assert_eq!(
3588 buffer2.read(app).file().unwrap().path().as_ref(),
3589 Path::new("a/file2.new")
3590 );
3591 assert_eq!(
3592 buffer3.read(app).file().unwrap().path().as_ref(),
3593 Path::new("d/file3")
3594 );
3595 assert_eq!(
3596 buffer4.read(app).file().unwrap().path().as_ref(),
3597 Path::new("d/file4")
3598 );
3599 assert_eq!(
3600 buffer5.read(app).file().unwrap().path().as_ref(),
3601 Path::new("b/c/file5")
3602 );
3603
3604 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3605 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3606 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3607 assert!(buffer5.read(app).file().unwrap().is_deleted());
3608 });
3609
3610 // Update the remote worktree. Check that it becomes consistent with the
3611 // local worktree.
3612 remote.update(&mut cx, |remote, cx| {
3613 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3614 &initial_snapshot,
3615 1,
3616 1,
3617 0,
3618 true,
3619 );
3620 remote
3621 .as_remote_mut()
3622 .unwrap()
3623 .snapshot
3624 .apply_remote_update(update_message)
3625 .unwrap();
3626
3627 assert_eq!(
3628 remote
3629 .paths()
3630 .map(|p| p.to_str().unwrap())
3631 .collect::<Vec<_>>(),
3632 expected_paths
3633 );
3634 });
3635 }
3636
3637 #[gpui::test]
3638 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3639 let fs = FakeFs::new(cx.background());
3640 fs.insert_tree(
3641 "/the-dir",
3642 json!({
3643 "a.txt": "a-contents",
3644 "b.txt": "b-contents",
3645 }),
3646 )
3647 .await;
3648
3649 let project = Project::test(fs.clone(), &mut cx);
3650 let worktree_id = project
3651 .update(&mut cx, |p, cx| {
3652 p.find_or_create_local_worktree("/the-dir", false, cx)
3653 })
3654 .await
3655 .unwrap()
3656 .0
3657 .read_with(&cx, |tree, _| tree.id());
3658
3659 // Spawn multiple tasks to open paths, repeating some paths.
3660 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3661 (
3662 p.open_buffer((worktree_id, "a.txt"), cx),
3663 p.open_buffer((worktree_id, "b.txt"), cx),
3664 p.open_buffer((worktree_id, "a.txt"), cx),
3665 )
3666 });
3667
3668 let buffer_a_1 = buffer_a_1.await.unwrap();
3669 let buffer_a_2 = buffer_a_2.await.unwrap();
3670 let buffer_b = buffer_b.await.unwrap();
3671 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3672 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3673
3674 // There is only one buffer per path.
3675 let buffer_a_id = buffer_a_1.id();
3676 assert_eq!(buffer_a_2.id(), buffer_a_id);
3677
3678 // Open the same path again while it is still open.
3679 drop(buffer_a_1);
3680 let buffer_a_3 = project
3681 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3682 .await
3683 .unwrap();
3684
3685 // There's still only one buffer per path.
3686 assert_eq!(buffer_a_3.id(), buffer_a_id);
3687 }
3688
3689 #[gpui::test]
3690 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3691 use std::fs;
3692
3693 let dir = temp_tree(json!({
3694 "file1": "abc",
3695 "file2": "def",
3696 "file3": "ghi",
3697 }));
3698
3699 let project = Project::test(Arc::new(RealFs), &mut cx);
3700 let (worktree, _) = project
3701 .update(&mut cx, |p, cx| {
3702 p.find_or_create_local_worktree(dir.path(), false, cx)
3703 })
3704 .await
3705 .unwrap();
3706 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3707
3708 worktree.flush_fs_events(&cx).await;
3709 worktree
3710 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3711 .await;
3712
3713 let buffer1 = project
3714 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3715 .await
3716 .unwrap();
3717 let events = Rc::new(RefCell::new(Vec::new()));
3718
3719 // initially, the buffer isn't dirty.
3720 buffer1.update(&mut cx, |buffer, cx| {
3721 cx.subscribe(&buffer1, {
3722 let events = events.clone();
3723 move |_, _, event, _| events.borrow_mut().push(event.clone())
3724 })
3725 .detach();
3726
3727 assert!(!buffer.is_dirty());
3728 assert!(events.borrow().is_empty());
3729
3730 buffer.edit(vec![1..2], "", cx);
3731 });
3732
3733 // after the first edit, the buffer is dirty, and emits a dirtied event.
3734 buffer1.update(&mut cx, |buffer, cx| {
3735 assert!(buffer.text() == "ac");
3736 assert!(buffer.is_dirty());
3737 assert_eq!(
3738 *events.borrow(),
3739 &[language::Event::Edited, language::Event::Dirtied]
3740 );
3741 events.borrow_mut().clear();
3742 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3743 });
3744
3745 // after saving, the buffer is not dirty, and emits a saved event.
3746 buffer1.update(&mut cx, |buffer, cx| {
3747 assert!(!buffer.is_dirty());
3748 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3749 events.borrow_mut().clear();
3750
3751 buffer.edit(vec![1..1], "B", cx);
3752 buffer.edit(vec![2..2], "D", cx);
3753 });
3754
3755 // after editing again, the buffer is dirty, and emits another dirty event.
3756 buffer1.update(&mut cx, |buffer, cx| {
3757 assert!(buffer.text() == "aBDc");
3758 assert!(buffer.is_dirty());
3759 assert_eq!(
3760 *events.borrow(),
3761 &[
3762 language::Event::Edited,
3763 language::Event::Dirtied,
3764 language::Event::Edited,
3765 ],
3766 );
3767 events.borrow_mut().clear();
3768
3769 // TODO - currently, after restoring the buffer to its
3770 // previously-saved state, the is still considered dirty.
3771 buffer.edit([1..3], "", cx);
3772 assert!(buffer.text() == "ac");
3773 assert!(buffer.is_dirty());
3774 });
3775
3776 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3777
3778 // When a file is deleted, the buffer is considered dirty.
3779 let events = Rc::new(RefCell::new(Vec::new()));
3780 let buffer2 = project
3781 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3782 .await
3783 .unwrap();
3784 buffer2.update(&mut cx, |_, cx| {
3785 cx.subscribe(&buffer2, {
3786 let events = events.clone();
3787 move |_, _, event, _| events.borrow_mut().push(event.clone())
3788 })
3789 .detach();
3790 });
3791
3792 fs::remove_file(dir.path().join("file2")).unwrap();
3793 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3794 assert_eq!(
3795 *events.borrow(),
3796 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3797 );
3798
3799 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3800 let events = Rc::new(RefCell::new(Vec::new()));
3801 let buffer3 = project
3802 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3803 .await
3804 .unwrap();
3805 buffer3.update(&mut cx, |_, cx| {
3806 cx.subscribe(&buffer3, {
3807 let events = events.clone();
3808 move |_, _, event, _| events.borrow_mut().push(event.clone())
3809 })
3810 .detach();
3811 });
3812
3813 worktree.flush_fs_events(&cx).await;
3814 buffer3.update(&mut cx, |buffer, cx| {
3815 buffer.edit(Some(0..0), "x", cx);
3816 });
3817 events.borrow_mut().clear();
3818 fs::remove_file(dir.path().join("file3")).unwrap();
3819 buffer3
3820 .condition(&cx, |_, _| !events.borrow().is_empty())
3821 .await;
3822 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3823 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3824 }
3825
3826 #[gpui::test]
3827 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3828 use std::fs;
3829
3830 let initial_contents = "aaa\nbbbbb\nc\n";
3831 let dir = temp_tree(json!({ "the-file": initial_contents }));
3832
3833 let project = Project::test(Arc::new(RealFs), &mut cx);
3834 let (worktree, _) = project
3835 .update(&mut cx, |p, cx| {
3836 p.find_or_create_local_worktree(dir.path(), false, cx)
3837 })
3838 .await
3839 .unwrap();
3840 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3841
3842 worktree
3843 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3844 .await;
3845
3846 let abs_path = dir.path().join("the-file");
3847 let buffer = project
3848 .update(&mut cx, |p, cx| {
3849 p.open_buffer((worktree_id, "the-file"), cx)
3850 })
3851 .await
3852 .unwrap();
3853
3854 // TODO
3855 // Add a cursor on each row.
3856 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3857 // assert!(!buffer.is_dirty());
3858 // buffer.add_selection_set(
3859 // &(0..3)
3860 // .map(|row| Selection {
3861 // id: row as usize,
3862 // start: Point::new(row, 1),
3863 // end: Point::new(row, 1),
3864 // reversed: false,
3865 // goal: SelectionGoal::None,
3866 // })
3867 // .collect::<Vec<_>>(),
3868 // cx,
3869 // )
3870 // });
3871
3872 // Change the file on disk, adding two new lines of text, and removing
3873 // one line.
3874 buffer.read_with(&cx, |buffer, _| {
3875 assert!(!buffer.is_dirty());
3876 assert!(!buffer.has_conflict());
3877 });
3878 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3879 fs::write(&abs_path, new_contents).unwrap();
3880
3881 // Because the buffer was not modified, it is reloaded from disk. Its
3882 // contents are edited according to the diff between the old and new
3883 // file contents.
3884 buffer
3885 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3886 .await;
3887
3888 buffer.update(&mut cx, |buffer, _| {
3889 assert_eq!(buffer.text(), new_contents);
3890 assert!(!buffer.is_dirty());
3891 assert!(!buffer.has_conflict());
3892
3893 // TODO
3894 // let cursor_positions = buffer
3895 // .selection_set(selection_set_id)
3896 // .unwrap()
3897 // .selections::<Point>(&*buffer)
3898 // .map(|selection| {
3899 // assert_eq!(selection.start, selection.end);
3900 // selection.start
3901 // })
3902 // .collect::<Vec<_>>();
3903 // assert_eq!(
3904 // cursor_positions,
3905 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3906 // );
3907 });
3908
3909 // Modify the buffer
3910 buffer.update(&mut cx, |buffer, cx| {
3911 buffer.edit(vec![0..0], " ", cx);
3912 assert!(buffer.is_dirty());
3913 assert!(!buffer.has_conflict());
3914 });
3915
3916 // Change the file on disk again, adding blank lines to the beginning.
3917 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3918
3919 // Because the buffer is modified, it doesn't reload from disk, but is
3920 // marked as having a conflict.
3921 buffer
3922 .condition(&cx, |buffer, _| buffer.has_conflict())
3923 .await;
3924 }
3925
3926 #[gpui::test]
3927 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3928 let fs = FakeFs::new(cx.background());
3929 fs.insert_tree(
3930 "/the-dir",
3931 json!({
3932 "a.rs": "
3933 fn foo(mut v: Vec<usize>) {
3934 for x in &v {
3935 v.push(1);
3936 }
3937 }
3938 "
3939 .unindent(),
3940 }),
3941 )
3942 .await;
3943
3944 let project = Project::test(fs.clone(), &mut cx);
3945 let (worktree, _) = project
3946 .update(&mut cx, |p, cx| {
3947 p.find_or_create_local_worktree("/the-dir", false, cx)
3948 })
3949 .await
3950 .unwrap();
3951 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3952
3953 let buffer = project
3954 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3955 .await
3956 .unwrap();
3957
3958 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3959 let message = lsp::PublishDiagnosticsParams {
3960 uri: buffer_uri.clone(),
3961 diagnostics: vec![
3962 lsp::Diagnostic {
3963 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3964 severity: Some(DiagnosticSeverity::WARNING),
3965 message: "error 1".to_string(),
3966 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3967 location: lsp::Location {
3968 uri: buffer_uri.clone(),
3969 range: lsp::Range::new(
3970 lsp::Position::new(1, 8),
3971 lsp::Position::new(1, 9),
3972 ),
3973 },
3974 message: "error 1 hint 1".to_string(),
3975 }]),
3976 ..Default::default()
3977 },
3978 lsp::Diagnostic {
3979 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3980 severity: Some(DiagnosticSeverity::HINT),
3981 message: "error 1 hint 1".to_string(),
3982 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3983 location: lsp::Location {
3984 uri: buffer_uri.clone(),
3985 range: lsp::Range::new(
3986 lsp::Position::new(1, 8),
3987 lsp::Position::new(1, 9),
3988 ),
3989 },
3990 message: "original diagnostic".to_string(),
3991 }]),
3992 ..Default::default()
3993 },
3994 lsp::Diagnostic {
3995 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3996 severity: Some(DiagnosticSeverity::ERROR),
3997 message: "error 2".to_string(),
3998 related_information: Some(vec![
3999 lsp::DiagnosticRelatedInformation {
4000 location: lsp::Location {
4001 uri: buffer_uri.clone(),
4002 range: lsp::Range::new(
4003 lsp::Position::new(1, 13),
4004 lsp::Position::new(1, 15),
4005 ),
4006 },
4007 message: "error 2 hint 1".to_string(),
4008 },
4009 lsp::DiagnosticRelatedInformation {
4010 location: lsp::Location {
4011 uri: buffer_uri.clone(),
4012 range: lsp::Range::new(
4013 lsp::Position::new(1, 13),
4014 lsp::Position::new(1, 15),
4015 ),
4016 },
4017 message: "error 2 hint 2".to_string(),
4018 },
4019 ]),
4020 ..Default::default()
4021 },
4022 lsp::Diagnostic {
4023 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4024 severity: Some(DiagnosticSeverity::HINT),
4025 message: "error 2 hint 1".to_string(),
4026 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4027 location: lsp::Location {
4028 uri: buffer_uri.clone(),
4029 range: lsp::Range::new(
4030 lsp::Position::new(2, 8),
4031 lsp::Position::new(2, 17),
4032 ),
4033 },
4034 message: "original diagnostic".to_string(),
4035 }]),
4036 ..Default::default()
4037 },
4038 lsp::Diagnostic {
4039 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4040 severity: Some(DiagnosticSeverity::HINT),
4041 message: "error 2 hint 2".to_string(),
4042 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4043 location: lsp::Location {
4044 uri: buffer_uri.clone(),
4045 range: lsp::Range::new(
4046 lsp::Position::new(2, 8),
4047 lsp::Position::new(2, 17),
4048 ),
4049 },
4050 message: "original diagnostic".to_string(),
4051 }]),
4052 ..Default::default()
4053 },
4054 ],
4055 version: None,
4056 };
4057
4058 project
4059 .update(&mut cx, |p, cx| {
4060 p.update_diagnostics(message, &Default::default(), cx)
4061 })
4062 .unwrap();
4063 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4064
4065 assert_eq!(
4066 buffer
4067 .diagnostics_in_range::<_, Point>(0..buffer.len())
4068 .collect::<Vec<_>>(),
4069 &[
4070 DiagnosticEntry {
4071 range: Point::new(1, 8)..Point::new(1, 9),
4072 diagnostic: Diagnostic {
4073 severity: DiagnosticSeverity::WARNING,
4074 message: "error 1".to_string(),
4075 group_id: 0,
4076 is_primary: true,
4077 ..Default::default()
4078 }
4079 },
4080 DiagnosticEntry {
4081 range: Point::new(1, 8)..Point::new(1, 9),
4082 diagnostic: Diagnostic {
4083 severity: DiagnosticSeverity::HINT,
4084 message: "error 1 hint 1".to_string(),
4085 group_id: 0,
4086 is_primary: false,
4087 ..Default::default()
4088 }
4089 },
4090 DiagnosticEntry {
4091 range: Point::new(1, 13)..Point::new(1, 15),
4092 diagnostic: Diagnostic {
4093 severity: DiagnosticSeverity::HINT,
4094 message: "error 2 hint 1".to_string(),
4095 group_id: 1,
4096 is_primary: false,
4097 ..Default::default()
4098 }
4099 },
4100 DiagnosticEntry {
4101 range: Point::new(1, 13)..Point::new(1, 15),
4102 diagnostic: Diagnostic {
4103 severity: DiagnosticSeverity::HINT,
4104 message: "error 2 hint 2".to_string(),
4105 group_id: 1,
4106 is_primary: false,
4107 ..Default::default()
4108 }
4109 },
4110 DiagnosticEntry {
4111 range: Point::new(2, 8)..Point::new(2, 17),
4112 diagnostic: Diagnostic {
4113 severity: DiagnosticSeverity::ERROR,
4114 message: "error 2".to_string(),
4115 group_id: 1,
4116 is_primary: true,
4117 ..Default::default()
4118 }
4119 }
4120 ]
4121 );
4122
4123 assert_eq!(
4124 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4125 &[
4126 DiagnosticEntry {
4127 range: Point::new(1, 8)..Point::new(1, 9),
4128 diagnostic: Diagnostic {
4129 severity: DiagnosticSeverity::WARNING,
4130 message: "error 1".to_string(),
4131 group_id: 0,
4132 is_primary: true,
4133 ..Default::default()
4134 }
4135 },
4136 DiagnosticEntry {
4137 range: Point::new(1, 8)..Point::new(1, 9),
4138 diagnostic: Diagnostic {
4139 severity: DiagnosticSeverity::HINT,
4140 message: "error 1 hint 1".to_string(),
4141 group_id: 0,
4142 is_primary: false,
4143 ..Default::default()
4144 }
4145 },
4146 ]
4147 );
4148 assert_eq!(
4149 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4150 &[
4151 DiagnosticEntry {
4152 range: Point::new(1, 13)..Point::new(1, 15),
4153 diagnostic: Diagnostic {
4154 severity: DiagnosticSeverity::HINT,
4155 message: "error 2 hint 1".to_string(),
4156 group_id: 1,
4157 is_primary: false,
4158 ..Default::default()
4159 }
4160 },
4161 DiagnosticEntry {
4162 range: Point::new(1, 13)..Point::new(1, 15),
4163 diagnostic: Diagnostic {
4164 severity: DiagnosticSeverity::HINT,
4165 message: "error 2 hint 2".to_string(),
4166 group_id: 1,
4167 is_primary: false,
4168 ..Default::default()
4169 }
4170 },
4171 DiagnosticEntry {
4172 range: Point::new(2, 8)..Point::new(2, 17),
4173 diagnostic: Diagnostic {
4174 severity: DiagnosticSeverity::ERROR,
4175 message: "error 2".to_string(),
4176 group_id: 1,
4177 is_primary: true,
4178 ..Default::default()
4179 }
4180 }
4181 ]
4182 );
4183 }
4184
4185 #[gpui::test]
4186 async fn test_rename(mut cx: gpui::TestAppContext) {
4187 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4188 let language = Arc::new(Language::new(
4189 LanguageConfig {
4190 name: "Rust".to_string(),
4191 path_suffixes: vec!["rs".to_string()],
4192 language_server: Some(language_server_config),
4193 ..Default::default()
4194 },
4195 Some(tree_sitter_rust::language()),
4196 ));
4197
4198 let fs = FakeFs::new(cx.background());
4199 fs.insert_tree(
4200 "/dir",
4201 json!({
4202 "one.rs": "const ONE: usize = 1;",
4203 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4204 }),
4205 )
4206 .await;
4207
4208 let project = Project::test(fs.clone(), &mut cx);
4209 project.update(&mut cx, |project, _| {
4210 Arc::get_mut(&mut project.languages).unwrap().add(language);
4211 });
4212
4213 let (tree, _) = project
4214 .update(&mut cx, |project, cx| {
4215 project.find_or_create_local_worktree("/dir", false, cx)
4216 })
4217 .await
4218 .unwrap();
4219 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4220 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4221 .await;
4222
4223 let buffer = project
4224 .update(&mut cx, |project, cx| {
4225 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4226 })
4227 .await
4228 .unwrap();
4229
4230 let mut fake_server = fake_servers.next().await.unwrap();
4231
4232 let response = project.update(&mut cx, |project, cx| {
4233 project.prepare_rename(buffer.clone(), 7, cx)
4234 });
4235 fake_server
4236 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4237 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4238 assert_eq!(params.position, lsp::Position::new(0, 7));
4239 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4240 lsp::Position::new(0, 6),
4241 lsp::Position::new(0, 9),
4242 )))
4243 })
4244 .next()
4245 .await
4246 .unwrap();
4247 let range = response.await.unwrap().unwrap();
4248 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4249 assert_eq!(range, 6..9);
4250 }
4251}