1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::Future;
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 point_from_lsp,
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, LanguageServer};
24use lsp_command::*;
25use postage::{broadcast, prelude::Stream, sink::Sink, watch};
26use smol::block_on;
27use std::{
28 convert::TryInto,
29 ops::Range,
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicBool, Arc},
32 time::Instant,
33};
34use util::{post_inc, ResultExt, TryFutureExt as _};
35
36pub use fs::*;
37pub use worktree::*;
38
39pub struct Project {
40 worktrees: Vec<WorktreeHandle>,
41 active_entry: Option<ProjectEntry>,
42 languages: Arc<LanguageRegistry>,
43 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_get_definition);
187 client.add_entity_request_handler(Self::handle_lsp_command::<lsp_command::PrepareRename>);
188 client.add_entity_request_handler(Self::handle_lsp_command::<lsp_command::PerformRename>);
189 client.add_entity_request_handler(Self::handle_open_buffer);
190 client.add_entity_request_handler(Self::handle_save_buffer);
191 }
192
193 pub fn local(
194 client: Arc<Client>,
195 user_store: ModelHandle<UserStore>,
196 languages: Arc<LanguageRegistry>,
197 fs: Arc<dyn Fs>,
198 cx: &mut MutableAppContext,
199 ) -> ModelHandle<Self> {
200 cx.add_model(|cx: &mut ModelContext<Self>| {
201 let (remote_id_tx, remote_id_rx) = watch::channel();
202 let _maintain_remote_id_task = cx.spawn_weak({
203 let rpc = client.clone();
204 move |this, mut cx| {
205 async move {
206 let mut status = rpc.status();
207 while let Some(status) = status.recv().await {
208 if let Some(this) = this.upgrade(&cx) {
209 let remote_id = if let client::Status::Connected { .. } = status {
210 let response = rpc.request(proto::RegisterProject {}).await?;
211 Some(response.project_id)
212 } else {
213 None
214 };
215
216 if let Some(project_id) = remote_id {
217 let mut registrations = Vec::new();
218 this.update(&mut cx, |this, cx| {
219 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
220 registrations.push(worktree.update(
221 cx,
222 |worktree, cx| {
223 let worktree = worktree.as_local_mut().unwrap();
224 worktree.register(project_id, cx)
225 },
226 ));
227 }
228 });
229 for registration in registrations {
230 registration.await?;
231 }
232 }
233 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
234 }
235 }
236 Ok(())
237 }
238 .log_err()
239 }
240 });
241
242 Self {
243 worktrees: Default::default(),
244 collaborators: Default::default(),
245 open_buffers: Default::default(),
246 loading_buffers: Default::default(),
247 shared_buffers: Default::default(),
248 client_state: ProjectClientState::Local {
249 is_shared: false,
250 remote_id_tx,
251 remote_id_rx,
252 _maintain_remote_id_task,
253 },
254 opened_buffer: broadcast::channel(1).0,
255 subscriptions: Vec::new(),
256 active_entry: None,
257 languages,
258 client,
259 user_store,
260 fs,
261 language_servers_with_diagnostics_running: 0,
262 language_servers: Default::default(),
263 }
264 })
265 }
266
267 pub async fn remote(
268 remote_id: u64,
269 client: Arc<Client>,
270 user_store: ModelHandle<UserStore>,
271 languages: Arc<LanguageRegistry>,
272 fs: Arc<dyn Fs>,
273 cx: &mut AsyncAppContext,
274 ) -> Result<ModelHandle<Self>> {
275 client.authenticate_and_connect(&cx).await?;
276
277 let response = client
278 .request(proto::JoinProject {
279 project_id: remote_id,
280 })
281 .await?;
282
283 let replica_id = response.replica_id as ReplicaId;
284
285 let mut worktrees = Vec::new();
286 for worktree in response.worktrees {
287 let (worktree, load_task) = cx
288 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
289 worktrees.push(worktree);
290 load_task.detach();
291 }
292
293 let this = cx.add_model(|cx| {
294 let mut this = Self {
295 worktrees: Vec::new(),
296 open_buffers: Default::default(),
297 loading_buffers: Default::default(),
298 opened_buffer: broadcast::channel(1).0,
299 shared_buffers: Default::default(),
300 active_entry: None,
301 collaborators: Default::default(),
302 languages,
303 user_store: user_store.clone(),
304 fs,
305 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
306 client,
307 client_state: ProjectClientState::Remote {
308 sharing_has_stopped: false,
309 remote_id,
310 replica_id,
311 },
312 language_servers_with_diagnostics_running: 0,
313 language_servers: Default::default(),
314 };
315 for worktree in worktrees {
316 this.add_worktree(&worktree, cx);
317 }
318 this
319 });
320
321 let user_ids = response
322 .collaborators
323 .iter()
324 .map(|peer| peer.user_id)
325 .collect();
326 user_store
327 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
328 .await?;
329 let mut collaborators = HashMap::default();
330 for message in response.collaborators {
331 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
332 collaborators.insert(collaborator.peer_id, collaborator);
333 }
334
335 this.update(cx, |this, _| {
336 this.collaborators = collaborators;
337 });
338
339 Ok(this)
340 }
341
342 #[cfg(any(test, feature = "test-support"))]
343 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
344 let languages = Arc::new(LanguageRegistry::new());
345 let http_client = client::test::FakeHttpClient::with_404_response();
346 let client = client::Client::new(http_client.clone());
347 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
348 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
349 }
350
351 #[cfg(any(test, feature = "test-support"))]
352 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
353 self.shared_buffers
354 .get(&peer_id)
355 .and_then(|buffers| buffers.get(&remote_id))
356 .cloned()
357 }
358
359 #[cfg(any(test, feature = "test-support"))]
360 pub fn has_buffered_operations(&self) -> bool {
361 self.open_buffers
362 .values()
363 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
364 }
365
366 pub fn fs(&self) -> &Arc<dyn Fs> {
367 &self.fs
368 }
369
370 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
371 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
372 *remote_id_tx.borrow_mut() = remote_id;
373 }
374
375 self.subscriptions.clear();
376 if let Some(remote_id) = remote_id {
377 self.subscriptions
378 .push(self.client.add_model_for_remote_entity(remote_id, cx));
379 }
380 }
381
382 pub fn remote_id(&self) -> Option<u64> {
383 match &self.client_state {
384 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
385 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
386 }
387 }
388
389 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
390 let mut id = None;
391 let mut watch = None;
392 match &self.client_state {
393 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
394 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
395 }
396
397 async move {
398 if let Some(id) = id {
399 return id;
400 }
401 let mut watch = watch.unwrap();
402 loop {
403 let id = *watch.borrow();
404 if let Some(id) = id {
405 return id;
406 }
407 watch.recv().await;
408 }
409 }
410 }
411
412 pub fn replica_id(&self) -> ReplicaId {
413 match &self.client_state {
414 ProjectClientState::Local { .. } => 0,
415 ProjectClientState::Remote { replica_id, .. } => *replica_id,
416 }
417 }
418
419 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
420 &self.collaborators
421 }
422
423 pub fn worktrees<'a>(
424 &'a self,
425 cx: &'a AppContext,
426 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
427 self.worktrees
428 .iter()
429 .filter_map(move |worktree| worktree.upgrade(cx))
430 }
431
432 pub fn worktree_for_id(
433 &self,
434 id: WorktreeId,
435 cx: &AppContext,
436 ) -> Option<ModelHandle<Worktree>> {
437 self.worktrees(cx)
438 .find(|worktree| worktree.read(cx).id() == id)
439 }
440
441 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
442 let rpc = self.client.clone();
443 cx.spawn(|this, mut cx| async move {
444 let project_id = this.update(&mut cx, |this, _| {
445 if let ProjectClientState::Local {
446 is_shared,
447 remote_id_rx,
448 ..
449 } = &mut this.client_state
450 {
451 *is_shared = true;
452 remote_id_rx
453 .borrow()
454 .ok_or_else(|| anyhow!("no project id"))
455 } else {
456 Err(anyhow!("can't share a remote project"))
457 }
458 })?;
459
460 rpc.request(proto::ShareProject { project_id }).await?;
461 let mut tasks = Vec::new();
462 this.update(&mut cx, |this, cx| {
463 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
464 worktree.update(cx, |worktree, cx| {
465 let worktree = worktree.as_local_mut().unwrap();
466 tasks.push(worktree.share(project_id, cx));
467 });
468 }
469 });
470 for task in tasks {
471 task.await?;
472 }
473 this.update(&mut cx, |_, cx| cx.notify());
474 Ok(())
475 })
476 }
477
478 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
479 let rpc = self.client.clone();
480 cx.spawn(|this, mut cx| async move {
481 let project_id = this.update(&mut cx, |this, _| {
482 if let ProjectClientState::Local {
483 is_shared,
484 remote_id_rx,
485 ..
486 } = &mut this.client_state
487 {
488 *is_shared = false;
489 remote_id_rx
490 .borrow()
491 .ok_or_else(|| anyhow!("no project id"))
492 } else {
493 Err(anyhow!("can't share a remote project"))
494 }
495 })?;
496
497 rpc.send(proto::UnshareProject { project_id })?;
498 this.update(&mut cx, |this, cx| {
499 this.collaborators.clear();
500 this.shared_buffers.clear();
501 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
502 worktree.update(cx, |worktree, _| {
503 worktree.as_local_mut().unwrap().unshare();
504 });
505 }
506 cx.notify()
507 });
508 Ok(())
509 })
510 }
511
512 pub fn is_read_only(&self) -> bool {
513 match &self.client_state {
514 ProjectClientState::Local { .. } => false,
515 ProjectClientState::Remote {
516 sharing_has_stopped,
517 ..
518 } => *sharing_has_stopped,
519 }
520 }
521
522 pub fn is_local(&self) -> bool {
523 match &self.client_state {
524 ProjectClientState::Local { .. } => true,
525 ProjectClientState::Remote { .. } => false,
526 }
527 }
528
529 pub fn is_remote(&self) -> bool {
530 !self.is_local()
531 }
532
533 pub fn open_buffer(
534 &mut self,
535 path: impl Into<ProjectPath>,
536 cx: &mut ModelContext<Self>,
537 ) -> Task<Result<ModelHandle<Buffer>>> {
538 let project_path = path.into();
539 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
540 worktree
541 } else {
542 return Task::ready(Err(anyhow!("no such worktree")));
543 };
544
545 // If there is already a buffer for the given path, then return it.
546 let existing_buffer = self.get_open_buffer(&project_path, cx);
547 if let Some(existing_buffer) = existing_buffer {
548 return Task::ready(Ok(existing_buffer));
549 }
550
551 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
552 // If the given path is already being loaded, then wait for that existing
553 // task to complete and return the same buffer.
554 hash_map::Entry::Occupied(e) => e.get().clone(),
555
556 // Otherwise, record the fact that this path is now being loaded.
557 hash_map::Entry::Vacant(entry) => {
558 let (mut tx, rx) = postage::watch::channel();
559 entry.insert(rx.clone());
560
561 let load_buffer = if worktree.read(cx).is_local() {
562 self.open_local_buffer(&project_path.path, &worktree, cx)
563 } else {
564 self.open_remote_buffer(&project_path.path, &worktree, cx)
565 };
566
567 cx.spawn(move |this, mut cx| async move {
568 let load_result = load_buffer.await;
569 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
570 // Record the fact that the buffer is no longer loading.
571 this.loading_buffers.remove(&project_path);
572 if this.loading_buffers.is_empty() {
573 this.open_buffers
574 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
575 }
576
577 let buffer = load_result.map_err(Arc::new)?;
578 Ok(buffer)
579 }));
580 })
581 .detach();
582 rx
583 }
584 };
585
586 cx.foreground().spawn(async move {
587 loop {
588 if let Some(result) = loading_watch.borrow().as_ref() {
589 match result {
590 Ok(buffer) => return Ok(buffer.clone()),
591 Err(error) => return Err(anyhow!("{}", error)),
592 }
593 }
594 loading_watch.recv().await;
595 }
596 })
597 }
598
599 fn open_local_buffer(
600 &mut self,
601 path: &Arc<Path>,
602 worktree: &ModelHandle<Worktree>,
603 cx: &mut ModelContext<Self>,
604 ) -> Task<Result<ModelHandle<Buffer>>> {
605 let load_buffer = worktree.update(cx, |worktree, cx| {
606 let worktree = worktree.as_local_mut().unwrap();
607 worktree.load_buffer(path, cx)
608 });
609 let worktree = worktree.downgrade();
610 cx.spawn(|this, mut cx| async move {
611 let buffer = load_buffer.await?;
612 let worktree = worktree
613 .upgrade(&cx)
614 .ok_or_else(|| anyhow!("worktree was removed"))?;
615 this.update(&mut cx, |this, cx| {
616 this.register_buffer(&buffer, Some(&worktree), cx)
617 })?;
618 Ok(buffer)
619 })
620 }
621
622 fn open_remote_buffer(
623 &mut self,
624 path: &Arc<Path>,
625 worktree: &ModelHandle<Worktree>,
626 cx: &mut ModelContext<Self>,
627 ) -> Task<Result<ModelHandle<Buffer>>> {
628 let rpc = self.client.clone();
629 let project_id = self.remote_id().unwrap();
630 let remote_worktree_id = worktree.read(cx).id();
631 let path = path.clone();
632 let path_string = path.to_string_lossy().to_string();
633 cx.spawn(|this, mut cx| async move {
634 let response = rpc
635 .request(proto::OpenBuffer {
636 project_id,
637 worktree_id: remote_worktree_id.to_proto(),
638 path: path_string,
639 })
640 .await?;
641 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
642 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
643 .await
644 })
645 }
646
647 fn open_local_buffer_from_lsp_path(
648 &mut self,
649 abs_path: lsp::Url,
650 lang_name: String,
651 lang_server: Arc<LanguageServer>,
652 cx: &mut ModelContext<Self>,
653 ) -> Task<Result<ModelHandle<Buffer>>> {
654 cx.spawn(|this, mut cx| async move {
655 let abs_path = abs_path
656 .to_file_path()
657 .map_err(|_| anyhow!("can't convert URI to path"))?;
658 let (worktree, relative_path) = if let Some(result) =
659 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
660 {
661 result
662 } else {
663 let worktree = this
664 .update(&mut cx, |this, cx| {
665 this.create_local_worktree(&abs_path, true, cx)
666 })
667 .await?;
668 this.update(&mut cx, |this, cx| {
669 this.language_servers
670 .insert((worktree.read(cx).id(), lang_name), lang_server);
671 });
672 (worktree, PathBuf::new())
673 };
674
675 let project_path = ProjectPath {
676 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
677 path: relative_path.into(),
678 };
679 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
680 .await
681 })
682 }
683
684 pub fn save_buffer_as(
685 &self,
686 buffer: ModelHandle<Buffer>,
687 abs_path: PathBuf,
688 cx: &mut ModelContext<Project>,
689 ) -> Task<Result<()>> {
690 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
691 cx.spawn(|this, mut cx| async move {
692 let (worktree, path) = worktree_task.await?;
693 worktree
694 .update(&mut cx, |worktree, cx| {
695 worktree
696 .as_local_mut()
697 .unwrap()
698 .save_buffer_as(buffer.clone(), path, cx)
699 })
700 .await?;
701 this.update(&mut cx, |this, cx| {
702 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
703 });
704 Ok(())
705 })
706 }
707
708 #[cfg(any(test, feature = "test-support"))]
709 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
710 let path = path.into();
711 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
712 self.open_buffers.iter().any(|(_, buffer)| {
713 if let Some(buffer) = buffer.upgrade(cx) {
714 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
715 if file.worktree == worktree && file.path() == &path.path {
716 return true;
717 }
718 }
719 }
720 false
721 })
722 } else {
723 false
724 }
725 }
726
727 fn get_open_buffer(
728 &mut self,
729 path: &ProjectPath,
730 cx: &mut ModelContext<Self>,
731 ) -> Option<ModelHandle<Buffer>> {
732 let mut result = None;
733 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
734 self.open_buffers.retain(|_, buffer| {
735 if let Some(buffer) = buffer.upgrade(cx) {
736 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
737 if file.worktree == worktree && file.path() == &path.path {
738 result = Some(buffer);
739 }
740 }
741 true
742 } else {
743 false
744 }
745 });
746 result
747 }
748
749 fn register_buffer(
750 &mut self,
751 buffer: &ModelHandle<Buffer>,
752 worktree: Option<&ModelHandle<Worktree>>,
753 cx: &mut ModelContext<Self>,
754 ) -> Result<()> {
755 match self.open_buffers.insert(
756 buffer.read(cx).remote_id(),
757 OpenBuffer::Loaded(buffer.downgrade()),
758 ) {
759 None => {}
760 Some(OpenBuffer::Loading(operations)) => {
761 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
762 }
763 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
764 }
765 self.assign_language_to_buffer(&buffer, worktree, cx);
766 Ok(())
767 }
768
769 fn assign_language_to_buffer(
770 &mut self,
771 buffer: &ModelHandle<Buffer>,
772 worktree: Option<&ModelHandle<Worktree>>,
773 cx: &mut ModelContext<Self>,
774 ) -> Option<()> {
775 let (path, full_path) = {
776 let file = buffer.read(cx).file()?;
777 (file.path().clone(), file.full_path(cx))
778 };
779
780 // If the buffer has a language, set it and start/assign the language server
781 if let Some(language) = self.languages.select_language(&full_path) {
782 buffer.update(cx, |buffer, cx| {
783 buffer.set_language(Some(language.clone()), cx);
784 });
785
786 // For local worktrees, start a language server if needed.
787 // Also assign the language server and any previously stored diagnostics to the buffer.
788 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
789 let worktree_id = local_worktree.id();
790 let worktree_abs_path = local_worktree.abs_path().clone();
791
792 let language_server = match self
793 .language_servers
794 .entry((worktree_id, language.name().to_string()))
795 {
796 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
797 hash_map::Entry::Vacant(e) => Self::start_language_server(
798 self.client.clone(),
799 language.clone(),
800 &worktree_abs_path,
801 cx,
802 )
803 .map(|server| e.insert(server).clone()),
804 };
805
806 buffer.update(cx, |buffer, cx| {
807 buffer.set_language_server(language_server, cx);
808 });
809 }
810 }
811
812 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
813 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
814 buffer.update(cx, |buffer, cx| {
815 buffer.update_diagnostics(diagnostics, None, cx).log_err();
816 });
817 }
818 }
819
820 None
821 }
822
823 fn start_language_server(
824 rpc: Arc<Client>,
825 language: Arc<Language>,
826 worktree_path: &Path,
827 cx: &mut ModelContext<Self>,
828 ) -> Option<Arc<LanguageServer>> {
829 enum LspEvent {
830 DiagnosticsStart,
831 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
832 DiagnosticsFinish,
833 }
834
835 let language_server = language
836 .start_server(worktree_path, cx)
837 .log_err()
838 .flatten()?;
839 let disk_based_sources = language
840 .disk_based_diagnostic_sources()
841 .cloned()
842 .unwrap_or_default();
843 let disk_based_diagnostics_progress_token =
844 language.disk_based_diagnostics_progress_token().cloned();
845 let has_disk_based_diagnostic_progress_token =
846 disk_based_diagnostics_progress_token.is_some();
847 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
848
849 // Listen for `PublishDiagnostics` notifications.
850 language_server
851 .on_notification::<lsp::notification::PublishDiagnostics, _>({
852 let diagnostics_tx = diagnostics_tx.clone();
853 move |params| {
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
856 }
857 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
858 if !has_disk_based_diagnostic_progress_token {
859 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
860 }
861 }
862 })
863 .detach();
864
865 // Listen for `Progress` notifications. Send an event when the language server
866 // transitions between running jobs and not running any jobs.
867 let mut running_jobs_for_this_server: i32 = 0;
868 language_server
869 .on_notification::<lsp::notification::Progress, _>(move |params| {
870 let token = match params.token {
871 lsp::NumberOrString::Number(_) => None,
872 lsp::NumberOrString::String(token) => Some(token),
873 };
874
875 if token == disk_based_diagnostics_progress_token {
876 match params.value {
877 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
878 lsp::WorkDoneProgress::Begin(_) => {
879 running_jobs_for_this_server += 1;
880 if running_jobs_for_this_server == 1 {
881 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
882 }
883 }
884 lsp::WorkDoneProgress::End(_) => {
885 running_jobs_for_this_server -= 1;
886 if running_jobs_for_this_server == 0 {
887 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
888 }
889 }
890 _ => {}
891 },
892 }
893 }
894 })
895 .detach();
896
897 // Process all the LSP events.
898 cx.spawn_weak(|this, mut cx| async move {
899 while let Ok(message) = diagnostics_rx.recv().await {
900 let this = this.upgrade(&cx)?;
901 match message {
902 LspEvent::DiagnosticsStart => {
903 this.update(&mut cx, |this, cx| {
904 this.disk_based_diagnostics_started(cx);
905 if let Some(project_id) = this.remote_id() {
906 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
907 .log_err();
908 }
909 });
910 }
911 LspEvent::DiagnosticsUpdate(mut params) => {
912 language.process_diagnostics(&mut params);
913 this.update(&mut cx, |this, cx| {
914 this.update_diagnostics(params, &disk_based_sources, cx)
915 .log_err();
916 });
917 }
918 LspEvent::DiagnosticsFinish => {
919 this.update(&mut cx, |this, cx| {
920 this.disk_based_diagnostics_finished(cx);
921 if let Some(project_id) = this.remote_id() {
922 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
923 .log_err();
924 }
925 });
926 }
927 }
928 }
929 Some(())
930 })
931 .detach();
932
933 Some(language_server)
934 }
935
936 pub fn update_diagnostics(
937 &mut self,
938 params: lsp::PublishDiagnosticsParams,
939 disk_based_sources: &HashSet<String>,
940 cx: &mut ModelContext<Self>,
941 ) -> Result<()> {
942 let abs_path = params
943 .uri
944 .to_file_path()
945 .map_err(|_| anyhow!("URI is not a file"))?;
946 let mut next_group_id = 0;
947 let mut diagnostics = Vec::default();
948 let mut primary_diagnostic_group_ids = HashMap::default();
949 let mut sources_by_group_id = HashMap::default();
950 let mut supporting_diagnostic_severities = HashMap::default();
951 for diagnostic in ¶ms.diagnostics {
952 let source = diagnostic.source.as_ref();
953 let code = diagnostic.code.as_ref().map(|code| match code {
954 lsp::NumberOrString::Number(code) => code.to_string(),
955 lsp::NumberOrString::String(code) => code.clone(),
956 });
957 let range = range_from_lsp(diagnostic.range);
958 let is_supporting = diagnostic
959 .related_information
960 .as_ref()
961 .map_or(false, |infos| {
962 infos.iter().any(|info| {
963 primary_diagnostic_group_ids.contains_key(&(
964 source,
965 code.clone(),
966 range_from_lsp(info.location.range),
967 ))
968 })
969 });
970
971 if is_supporting {
972 if let Some(severity) = diagnostic.severity {
973 supporting_diagnostic_severities
974 .insert((source, code.clone(), range), severity);
975 }
976 } else {
977 let group_id = post_inc(&mut next_group_id);
978 let is_disk_based =
979 source.map_or(false, |source| disk_based_sources.contains(source));
980
981 sources_by_group_id.insert(group_id, source);
982 primary_diagnostic_group_ids
983 .insert((source, code.clone(), range.clone()), group_id);
984
985 diagnostics.push(DiagnosticEntry {
986 range,
987 diagnostic: Diagnostic {
988 code: code.clone(),
989 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
990 message: diagnostic.message.clone(),
991 group_id,
992 is_primary: true,
993 is_valid: true,
994 is_disk_based,
995 },
996 });
997 if let Some(infos) = &diagnostic.related_information {
998 for info in infos {
999 if info.location.uri == params.uri && !info.message.is_empty() {
1000 let range = range_from_lsp(info.location.range);
1001 diagnostics.push(DiagnosticEntry {
1002 range,
1003 diagnostic: Diagnostic {
1004 code: code.clone(),
1005 severity: DiagnosticSeverity::INFORMATION,
1006 message: info.message.clone(),
1007 group_id,
1008 is_primary: false,
1009 is_valid: true,
1010 is_disk_based,
1011 },
1012 });
1013 }
1014 }
1015 }
1016 }
1017 }
1018
1019 for entry in &mut diagnostics {
1020 let diagnostic = &mut entry.diagnostic;
1021 if !diagnostic.is_primary {
1022 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1023 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1024 source,
1025 diagnostic.code.clone(),
1026 entry.range.clone(),
1027 )) {
1028 diagnostic.severity = severity;
1029 }
1030 }
1031 }
1032
1033 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1034 Ok(())
1035 }
1036
1037 pub fn update_diagnostic_entries(
1038 &mut self,
1039 abs_path: PathBuf,
1040 version: Option<i32>,
1041 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1042 cx: &mut ModelContext<Project>,
1043 ) -> Result<(), anyhow::Error> {
1044 let (worktree, relative_path) = self
1045 .find_local_worktree(&abs_path, cx)
1046 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1047 let project_path = ProjectPath {
1048 worktree_id: worktree.read(cx).id(),
1049 path: relative_path.into(),
1050 };
1051
1052 for buffer in self.open_buffers.values() {
1053 if let Some(buffer) = buffer.upgrade(cx) {
1054 if buffer
1055 .read(cx)
1056 .file()
1057 .map_or(false, |file| *file.path() == project_path.path)
1058 {
1059 buffer.update(cx, |buffer, cx| {
1060 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1061 })?;
1062 break;
1063 }
1064 }
1065 }
1066 worktree.update(cx, |worktree, cx| {
1067 worktree
1068 .as_local_mut()
1069 .ok_or_else(|| anyhow!("not a local worktree"))?
1070 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1071 })?;
1072 cx.emit(Event::DiagnosticsUpdated(project_path));
1073 Ok(())
1074 }
1075
1076 pub fn format(
1077 &self,
1078 buffers: HashSet<ModelHandle<Buffer>>,
1079 push_to_history: bool,
1080 cx: &mut ModelContext<Project>,
1081 ) -> Task<Result<ProjectTransaction>> {
1082 let mut local_buffers = Vec::new();
1083 let mut remote_buffers = None;
1084 for buffer_handle in buffers {
1085 let buffer = buffer_handle.read(cx);
1086 let worktree;
1087 if let Some(file) = File::from_dyn(buffer.file()) {
1088 worktree = file.worktree.clone();
1089 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1090 let lang_server;
1091 if let Some(lang) = buffer.language() {
1092 if let Some(server) = self
1093 .language_servers
1094 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1095 {
1096 lang_server = server.clone();
1097 } else {
1098 return Task::ready(Ok(Default::default()));
1099 };
1100 } else {
1101 return Task::ready(Ok(Default::default()));
1102 }
1103
1104 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1105 } else {
1106 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1107 }
1108 } else {
1109 return Task::ready(Ok(Default::default()));
1110 }
1111 }
1112
1113 let remote_buffers = self.remote_id().zip(remote_buffers);
1114 let client = self.client.clone();
1115
1116 cx.spawn(|this, mut cx| async move {
1117 let mut project_transaction = ProjectTransaction::default();
1118
1119 if let Some((project_id, remote_buffers)) = remote_buffers {
1120 let response = client
1121 .request(proto::FormatBuffers {
1122 project_id,
1123 buffer_ids: remote_buffers
1124 .iter()
1125 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1126 .collect(),
1127 })
1128 .await?
1129 .transaction
1130 .ok_or_else(|| anyhow!("missing transaction"))?;
1131 project_transaction = this
1132 .update(&mut cx, |this, cx| {
1133 this.deserialize_project_transaction(response, push_to_history, cx)
1134 })
1135 .await?;
1136 }
1137
1138 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1139 let lsp_edits = lang_server
1140 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1141 text_document: lsp::TextDocumentIdentifier::new(
1142 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1143 ),
1144 options: Default::default(),
1145 work_done_progress_params: Default::default(),
1146 })
1147 .await?;
1148
1149 if let Some(lsp_edits) = lsp_edits {
1150 let edits = buffer
1151 .update(&mut cx, |buffer, cx| {
1152 buffer.edits_from_lsp(lsp_edits, None, cx)
1153 })
1154 .await?;
1155 buffer.update(&mut cx, |buffer, cx| {
1156 buffer.finalize_last_transaction();
1157 buffer.start_transaction();
1158 for (range, text) in edits {
1159 buffer.edit([range], text, cx);
1160 }
1161 if buffer.end_transaction(cx).is_some() {
1162 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1163 if !push_to_history {
1164 buffer.forget_transaction(transaction.id);
1165 }
1166 project_transaction.0.insert(cx.handle(), transaction);
1167 }
1168 });
1169 }
1170 }
1171
1172 Ok(project_transaction)
1173 })
1174 }
1175
1176 pub fn definition<T: ToPointUtf16>(
1177 &self,
1178 source_buffer_handle: &ModelHandle<Buffer>,
1179 position: T,
1180 cx: &mut ModelContext<Self>,
1181 ) -> Task<Result<Vec<Definition>>> {
1182 let source_buffer_handle = source_buffer_handle.clone();
1183 let source_buffer = source_buffer_handle.read(cx);
1184 let worktree;
1185 let buffer_abs_path;
1186 if let Some(file) = File::from_dyn(source_buffer.file()) {
1187 worktree = file.worktree.clone();
1188 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1189 } else {
1190 return Task::ready(Ok(Default::default()));
1191 };
1192
1193 let position = position.to_point_utf16(source_buffer);
1194
1195 if worktree.read(cx).as_local().is_some() {
1196 let buffer_abs_path = buffer_abs_path.unwrap();
1197 let lang_name;
1198 let lang_server;
1199 if let Some(lang) = source_buffer.language() {
1200 lang_name = lang.name().to_string();
1201 if let Some(server) = self
1202 .language_servers
1203 .get(&(worktree.read(cx).id(), lang_name.clone()))
1204 {
1205 lang_server = server.clone();
1206 } else {
1207 return Task::ready(Ok(Default::default()));
1208 };
1209 } else {
1210 return Task::ready(Ok(Default::default()));
1211 }
1212
1213 cx.spawn(|this, mut cx| async move {
1214 let response = lang_server
1215 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1216 text_document_position_params: lsp::TextDocumentPositionParams {
1217 text_document: lsp::TextDocumentIdentifier::new(
1218 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1219 ),
1220 position: lsp::Position::new(position.row, position.column),
1221 },
1222 work_done_progress_params: Default::default(),
1223 partial_result_params: Default::default(),
1224 })
1225 .await?;
1226
1227 let mut definitions = Vec::new();
1228 if let Some(response) = response {
1229 let mut unresolved_locations = Vec::new();
1230 match response {
1231 lsp::GotoDefinitionResponse::Scalar(loc) => {
1232 unresolved_locations.push((loc.uri, loc.range));
1233 }
1234 lsp::GotoDefinitionResponse::Array(locs) => {
1235 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1236 }
1237 lsp::GotoDefinitionResponse::Link(links) => {
1238 unresolved_locations.extend(
1239 links
1240 .into_iter()
1241 .map(|l| (l.target_uri, l.target_selection_range)),
1242 );
1243 }
1244 }
1245
1246 for (target_uri, target_range) in unresolved_locations {
1247 let target_buffer_handle = this
1248 .update(&mut cx, |this, cx| {
1249 this.open_local_buffer_from_lsp_path(
1250 target_uri,
1251 lang_name.clone(),
1252 lang_server.clone(),
1253 cx,
1254 )
1255 })
1256 .await?;
1257
1258 cx.read(|cx| {
1259 let target_buffer = target_buffer_handle.read(cx);
1260 let target_start = target_buffer
1261 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1262 let target_end = target_buffer
1263 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1264 definitions.push(Definition {
1265 target_buffer: target_buffer_handle,
1266 target_range: target_buffer.anchor_after(target_start)
1267 ..target_buffer.anchor_before(target_end),
1268 });
1269 });
1270 }
1271 }
1272
1273 Ok(definitions)
1274 })
1275 } else if let Some(project_id) = self.remote_id() {
1276 let client = self.client.clone();
1277 let request = proto::GetDefinition {
1278 project_id,
1279 buffer_id: source_buffer.remote_id(),
1280 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1281 };
1282 cx.spawn(|this, mut cx| async move {
1283 let response = client.request(request).await?;
1284 let mut definitions = Vec::new();
1285 for definition in response.definitions {
1286 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1287 let target_buffer = this
1288 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1289 .await?;
1290 let target_start = definition
1291 .target_start
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target start"))?;
1294 let target_end = definition
1295 .target_end
1296 .and_then(deserialize_anchor)
1297 .ok_or_else(|| anyhow!("missing target end"))?;
1298 definitions.push(Definition {
1299 target_buffer,
1300 target_range: target_start..target_end,
1301 })
1302 }
1303
1304 Ok(definitions)
1305 })
1306 } else {
1307 Task::ready(Ok(Default::default()))
1308 }
1309 }
1310
1311 pub fn completions<T: ToPointUtf16>(
1312 &self,
1313 source_buffer_handle: &ModelHandle<Buffer>,
1314 position: T,
1315 cx: &mut ModelContext<Self>,
1316 ) -> Task<Result<Vec<Completion>>> {
1317 let source_buffer_handle = source_buffer_handle.clone();
1318 let source_buffer = source_buffer_handle.read(cx);
1319 let buffer_id = source_buffer.remote_id();
1320 let language = source_buffer.language().cloned();
1321 let worktree;
1322 let buffer_abs_path;
1323 if let Some(file) = File::from_dyn(source_buffer.file()) {
1324 worktree = file.worktree.clone();
1325 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1326 } else {
1327 return Task::ready(Ok(Default::default()));
1328 };
1329
1330 let position = position.to_point_utf16(source_buffer);
1331 let anchor = source_buffer.anchor_after(position);
1332
1333 if worktree.read(cx).as_local().is_some() {
1334 let buffer_abs_path = buffer_abs_path.unwrap();
1335 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1336 server
1337 } else {
1338 return Task::ready(Ok(Default::default()));
1339 };
1340
1341 cx.spawn(|_, cx| async move {
1342 let completions = lang_server
1343 .request::<lsp::request::Completion>(lsp::CompletionParams {
1344 text_document_position: lsp::TextDocumentPositionParams::new(
1345 lsp::TextDocumentIdentifier::new(
1346 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1347 ),
1348 position.to_lsp_position(),
1349 ),
1350 context: Default::default(),
1351 work_done_progress_params: Default::default(),
1352 partial_result_params: Default::default(),
1353 })
1354 .await
1355 .context("lsp completion request failed")?;
1356
1357 let completions = if let Some(completions) = completions {
1358 match completions {
1359 lsp::CompletionResponse::Array(completions) => completions,
1360 lsp::CompletionResponse::List(list) => list.items,
1361 }
1362 } else {
1363 Default::default()
1364 };
1365
1366 source_buffer_handle.read_with(&cx, |this, _| {
1367 Ok(completions
1368 .into_iter()
1369 .filter_map(|lsp_completion| {
1370 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1371 lsp::CompletionTextEdit::Edit(edit) => {
1372 (range_from_lsp(edit.range), edit.new_text.clone())
1373 }
1374 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1375 log::info!("unsupported insert/replace completion");
1376 return None;
1377 }
1378 };
1379
1380 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1381 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1382 if clipped_start == old_range.start && clipped_end == old_range.end {
1383 Some(Completion {
1384 old_range: this.anchor_before(old_range.start)
1385 ..this.anchor_after(old_range.end),
1386 new_text,
1387 label: language
1388 .as_ref()
1389 .and_then(|l| l.label_for_completion(&lsp_completion))
1390 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1391 lsp_completion,
1392 })
1393 } else {
1394 None
1395 }
1396 })
1397 .collect())
1398 })
1399 })
1400 } else if let Some(project_id) = self.remote_id() {
1401 let rpc = self.client.clone();
1402 let message = proto::GetCompletions {
1403 project_id,
1404 buffer_id,
1405 position: Some(language::proto::serialize_anchor(&anchor)),
1406 version: (&source_buffer.version()).into(),
1407 };
1408 cx.spawn_weak(|_, mut cx| async move {
1409 let response = rpc.request(message).await?;
1410
1411 source_buffer_handle
1412 .update(&mut cx, |buffer, _| {
1413 buffer.wait_for_version(response.version.into())
1414 })
1415 .await;
1416
1417 response
1418 .completions
1419 .into_iter()
1420 .map(|completion| {
1421 language::proto::deserialize_completion(completion, language.as_ref())
1422 })
1423 .collect()
1424 })
1425 } else {
1426 Task::ready(Ok(Default::default()))
1427 }
1428 }
1429
1430 pub fn apply_additional_edits_for_completion(
1431 &self,
1432 buffer_handle: ModelHandle<Buffer>,
1433 completion: Completion,
1434 push_to_history: bool,
1435 cx: &mut ModelContext<Self>,
1436 ) -> Task<Result<Option<Transaction>>> {
1437 let buffer = buffer_handle.read(cx);
1438 let buffer_id = buffer.remote_id();
1439
1440 if self.is_local() {
1441 let lang_server = if let Some(language_server) = buffer.language_server() {
1442 language_server.clone()
1443 } else {
1444 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1445 };
1446
1447 cx.spawn(|_, mut cx| async move {
1448 let resolved_completion = lang_server
1449 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1450 .await?;
1451 if let Some(edits) = resolved_completion.additional_text_edits {
1452 let edits = buffer_handle
1453 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1454 .await?;
1455 buffer_handle.update(&mut cx, |buffer, cx| {
1456 buffer.finalize_last_transaction();
1457 buffer.start_transaction();
1458 for (range, text) in edits {
1459 buffer.edit([range], text, cx);
1460 }
1461 let transaction = if buffer.end_transaction(cx).is_some() {
1462 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1463 if !push_to_history {
1464 buffer.forget_transaction(transaction.id);
1465 }
1466 Some(transaction)
1467 } else {
1468 None
1469 };
1470 Ok(transaction)
1471 })
1472 } else {
1473 Ok(None)
1474 }
1475 })
1476 } else if let Some(project_id) = self.remote_id() {
1477 let client = self.client.clone();
1478 cx.spawn(|_, mut cx| async move {
1479 let response = client
1480 .request(proto::ApplyCompletionAdditionalEdits {
1481 project_id,
1482 buffer_id,
1483 completion: Some(language::proto::serialize_completion(&completion)),
1484 })
1485 .await?;
1486
1487 if let Some(transaction) = response.transaction {
1488 let transaction = language::proto::deserialize_transaction(transaction)?;
1489 buffer_handle
1490 .update(&mut cx, |buffer, _| {
1491 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1492 })
1493 .await;
1494 if push_to_history {
1495 buffer_handle.update(&mut cx, |buffer, _| {
1496 buffer.push_transaction(transaction.clone(), Instant::now());
1497 });
1498 }
1499 Ok(Some(transaction))
1500 } else {
1501 Ok(None)
1502 }
1503 })
1504 } else {
1505 Task::ready(Err(anyhow!("project does not have a remote id")))
1506 }
1507 }
1508
1509 pub fn code_actions<T: ToOffset>(
1510 &self,
1511 buffer_handle: &ModelHandle<Buffer>,
1512 range: Range<T>,
1513 cx: &mut ModelContext<Self>,
1514 ) -> Task<Result<Vec<CodeAction>>> {
1515 let buffer_handle = buffer_handle.clone();
1516 let buffer = buffer_handle.read(cx);
1517 let buffer_id = buffer.remote_id();
1518 let worktree;
1519 let buffer_abs_path;
1520 if let Some(file) = File::from_dyn(buffer.file()) {
1521 worktree = file.worktree.clone();
1522 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1523 } else {
1524 return Task::ready(Ok(Default::default()));
1525 };
1526 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1527
1528 if worktree.read(cx).as_local().is_some() {
1529 let buffer_abs_path = buffer_abs_path.unwrap();
1530 let lang_name;
1531 let lang_server;
1532 if let Some(lang) = buffer.language() {
1533 lang_name = lang.name().to_string();
1534 if let Some(server) = self
1535 .language_servers
1536 .get(&(worktree.read(cx).id(), lang_name.clone()))
1537 {
1538 lang_server = server.clone();
1539 } else {
1540 return Task::ready(Ok(Default::default()));
1541 };
1542 } else {
1543 return Task::ready(Ok(Default::default()));
1544 }
1545
1546 let lsp_range = lsp::Range::new(
1547 range.start.to_point_utf16(buffer).to_lsp_position(),
1548 range.end.to_point_utf16(buffer).to_lsp_position(),
1549 );
1550 cx.foreground().spawn(async move {
1551 Ok(lang_server
1552 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1553 text_document: lsp::TextDocumentIdentifier::new(
1554 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1555 ),
1556 range: lsp_range,
1557 work_done_progress_params: Default::default(),
1558 partial_result_params: Default::default(),
1559 context: lsp::CodeActionContext {
1560 diagnostics: Default::default(),
1561 only: Some(vec![
1562 lsp::CodeActionKind::QUICKFIX,
1563 lsp::CodeActionKind::REFACTOR,
1564 lsp::CodeActionKind::REFACTOR_EXTRACT,
1565 ]),
1566 },
1567 })
1568 .await?
1569 .unwrap_or_default()
1570 .into_iter()
1571 .filter_map(|entry| {
1572 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1573 Some(CodeAction {
1574 range: range.clone(),
1575 lsp_action,
1576 })
1577 } else {
1578 None
1579 }
1580 })
1581 .collect())
1582 })
1583 } else if let Some(project_id) = self.remote_id() {
1584 let rpc = self.client.clone();
1585 cx.spawn_weak(|_, mut cx| async move {
1586 let response = rpc
1587 .request(proto::GetCodeActions {
1588 project_id,
1589 buffer_id,
1590 start: Some(language::proto::serialize_anchor(&range.start)),
1591 end: Some(language::proto::serialize_anchor(&range.end)),
1592 })
1593 .await?;
1594
1595 buffer_handle
1596 .update(&mut cx, |buffer, _| {
1597 buffer.wait_for_version(response.version.into())
1598 })
1599 .await;
1600
1601 response
1602 .actions
1603 .into_iter()
1604 .map(language::proto::deserialize_code_action)
1605 .collect()
1606 })
1607 } else {
1608 Task::ready(Ok(Default::default()))
1609 }
1610 }
1611
1612 pub fn apply_code_action(
1613 &self,
1614 buffer_handle: ModelHandle<Buffer>,
1615 mut action: CodeAction,
1616 push_to_history: bool,
1617 cx: &mut ModelContext<Self>,
1618 ) -> Task<Result<ProjectTransaction>> {
1619 if self.is_local() {
1620 let buffer = buffer_handle.read(cx);
1621 let lang_name = if let Some(lang) = buffer.language() {
1622 lang.name().to_string()
1623 } else {
1624 return Task::ready(Ok(Default::default()));
1625 };
1626 let lang_server = if let Some(language_server) = buffer.language_server() {
1627 language_server.clone()
1628 } else {
1629 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1630 };
1631 let range = action.range.to_point_utf16(buffer);
1632
1633 cx.spawn(|this, mut cx| async move {
1634 if let Some(lsp_range) = action
1635 .lsp_action
1636 .data
1637 .as_mut()
1638 .and_then(|d| d.get_mut("codeActionParams"))
1639 .and_then(|d| d.get_mut("range"))
1640 {
1641 *lsp_range = serde_json::to_value(&lsp::Range::new(
1642 range.start.to_lsp_position(),
1643 range.end.to_lsp_position(),
1644 ))
1645 .unwrap();
1646 action.lsp_action = lang_server
1647 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1648 .await?;
1649 } else {
1650 let actions = this
1651 .update(&mut cx, |this, cx| {
1652 this.code_actions(&buffer_handle, action.range, cx)
1653 })
1654 .await?;
1655 action.lsp_action = actions
1656 .into_iter()
1657 .find(|a| a.lsp_action.title == action.lsp_action.title)
1658 .ok_or_else(|| anyhow!("code action is outdated"))?
1659 .lsp_action;
1660 }
1661
1662 if let Some(edit) = action.lsp_action.edit {
1663 Self::deserialize_workspace_edit(
1664 this,
1665 edit,
1666 push_to_history,
1667 lang_name,
1668 lang_server,
1669 &mut cx,
1670 )
1671 .await
1672 } else {
1673 Ok(ProjectTransaction::default())
1674 }
1675 })
1676 } else if let Some(project_id) = self.remote_id() {
1677 let client = self.client.clone();
1678 let request = proto::ApplyCodeAction {
1679 project_id,
1680 buffer_id: buffer_handle.read(cx).remote_id(),
1681 action: Some(language::proto::serialize_code_action(&action)),
1682 };
1683 cx.spawn(|this, mut cx| async move {
1684 let response = client
1685 .request(request)
1686 .await?
1687 .transaction
1688 .ok_or_else(|| anyhow!("missing transaction"))?;
1689 this.update(&mut cx, |this, cx| {
1690 this.deserialize_project_transaction(response, push_to_history, cx)
1691 })
1692 .await
1693 })
1694 } else {
1695 Task::ready(Err(anyhow!("project does not have a remote id")))
1696 }
1697 }
1698
1699 async fn deserialize_workspace_edit(
1700 this: ModelHandle<Self>,
1701 edit: lsp::WorkspaceEdit,
1702 push_to_history: bool,
1703 language_name: String,
1704 language_server: Arc<LanguageServer>,
1705 cx: &mut AsyncAppContext,
1706 ) -> Result<ProjectTransaction> {
1707 let fs = this.read_with(cx, |this, _| this.fs.clone());
1708 let mut operations = Vec::new();
1709 if let Some(document_changes) = edit.document_changes {
1710 match document_changes {
1711 lsp::DocumentChanges::Edits(edits) => {
1712 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1713 }
1714 lsp::DocumentChanges::Operations(ops) => operations = ops,
1715 }
1716 } else if let Some(changes) = edit.changes {
1717 operations.extend(changes.into_iter().map(|(uri, edits)| {
1718 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1719 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1720 uri,
1721 version: None,
1722 },
1723 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1724 })
1725 }));
1726 }
1727
1728 let mut project_transaction = ProjectTransaction::default();
1729 for operation in operations {
1730 match operation {
1731 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1732 let abs_path = op
1733 .uri
1734 .to_file_path()
1735 .map_err(|_| anyhow!("can't convert URI to path"))?;
1736
1737 if let Some(parent_path) = abs_path.parent() {
1738 fs.create_dir(parent_path).await?;
1739 }
1740 if abs_path.ends_with("/") {
1741 fs.create_dir(&abs_path).await?;
1742 } else {
1743 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1744 .await?;
1745 }
1746 }
1747 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1748 let source_abs_path = op
1749 .old_uri
1750 .to_file_path()
1751 .map_err(|_| anyhow!("can't convert URI to path"))?;
1752 let target_abs_path = op
1753 .new_uri
1754 .to_file_path()
1755 .map_err(|_| anyhow!("can't convert URI to path"))?;
1756 fs.rename(
1757 &source_abs_path,
1758 &target_abs_path,
1759 op.options.map(Into::into).unwrap_or_default(),
1760 )
1761 .await?;
1762 }
1763 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1764 let abs_path = op
1765 .uri
1766 .to_file_path()
1767 .map_err(|_| anyhow!("can't convert URI to path"))?;
1768 let options = op.options.map(Into::into).unwrap_or_default();
1769 if abs_path.ends_with("/") {
1770 fs.remove_dir(&abs_path, options).await?;
1771 } else {
1772 fs.remove_file(&abs_path, options).await?;
1773 }
1774 }
1775 lsp::DocumentChangeOperation::Edit(op) => {
1776 let buffer_to_edit = this
1777 .update(cx, |this, cx| {
1778 this.open_local_buffer_from_lsp_path(
1779 op.text_document.uri,
1780 language_name.clone(),
1781 language_server.clone(),
1782 cx,
1783 )
1784 })
1785 .await?;
1786
1787 let edits = buffer_to_edit
1788 .update(cx, |buffer, cx| {
1789 let edits = op.edits.into_iter().map(|edit| match edit {
1790 lsp::OneOf::Left(edit) => edit,
1791 lsp::OneOf::Right(edit) => edit.text_edit,
1792 });
1793 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1794 })
1795 .await?;
1796
1797 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1798 buffer.finalize_last_transaction();
1799 buffer.start_transaction();
1800 for (range, text) in edits {
1801 buffer.edit([range], text, cx);
1802 }
1803 let transaction = if buffer.end_transaction(cx).is_some() {
1804 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1805 if !push_to_history {
1806 buffer.forget_transaction(transaction.id);
1807 }
1808 Some(transaction)
1809 } else {
1810 None
1811 };
1812
1813 transaction
1814 });
1815 if let Some(transaction) = transaction {
1816 project_transaction.0.insert(buffer_to_edit, transaction);
1817 }
1818 }
1819 }
1820 }
1821
1822 Ok(project_transaction)
1823 }
1824
1825 pub fn prepare_rename<T: ToPointUtf16>(
1826 &self,
1827 buffer: ModelHandle<Buffer>,
1828 position: T,
1829 cx: &mut ModelContext<Self>,
1830 ) -> Task<Result<Option<Range<Anchor>>>> {
1831 let position = position.to_point_utf16(buffer.read(cx));
1832 self.request_lsp(buffer, PrepareRename { position }, cx)
1833 }
1834
1835 pub fn perform_rename<T: ToPointUtf16>(
1836 &self,
1837 buffer: ModelHandle<Buffer>,
1838 position: T,
1839 new_name: String,
1840 push_to_history: bool,
1841 cx: &mut ModelContext<Self>,
1842 ) -> Task<Result<ProjectTransaction>> {
1843 let position = position.to_point_utf16(buffer.read(cx));
1844 self.request_lsp(
1845 buffer,
1846 PerformRename {
1847 position,
1848 new_name,
1849 push_to_history,
1850 },
1851 cx,
1852 )
1853 }
1854
1855 fn request_lsp<R: LspCommand>(
1856 &self,
1857 buffer_handle: ModelHandle<Buffer>,
1858 request: R,
1859 cx: &mut ModelContext<Self>,
1860 ) -> Task<Result<R::Response>>
1861 where
1862 <R::LspRequest as lsp::request::Request>::Result: Send,
1863 {
1864 if self.is_local() {
1865 let buffer = buffer_handle.read(cx);
1866 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1867 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1868 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1869 return cx.spawn(|this, cx| async move {
1870 let response = language_server
1871 .request::<R::LspRequest>(lsp_params)
1872 .await
1873 .context("lsp request failed")?;
1874 request
1875 .response_from_lsp(response, this, buffer_handle, cx)
1876 .await
1877 });
1878 }
1879 } else if let Some(project_id) = self.remote_id() {
1880 let rpc = self.client.clone();
1881 let message = request.to_proto(project_id, &buffer_handle, cx);
1882 return cx.spawn(|this, cx| async move {
1883 let response = rpc.request(message).await?;
1884 request
1885 .response_from_proto(response, this, buffer_handle, cx)
1886 .await
1887 });
1888 }
1889 Task::ready(Ok(Default::default()))
1890 }
1891
1892 pub fn find_or_create_local_worktree(
1893 &self,
1894 abs_path: impl AsRef<Path>,
1895 weak: bool,
1896 cx: &mut ModelContext<Self>,
1897 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1898 let abs_path = abs_path.as_ref();
1899 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1900 Task::ready(Ok((tree.clone(), relative_path.into())))
1901 } else {
1902 let worktree = self.create_local_worktree(abs_path, weak, cx);
1903 cx.foreground()
1904 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1905 }
1906 }
1907
1908 fn find_local_worktree(
1909 &self,
1910 abs_path: &Path,
1911 cx: &AppContext,
1912 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1913 for tree in self.worktrees(cx) {
1914 if let Some(relative_path) = tree
1915 .read(cx)
1916 .as_local()
1917 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1918 {
1919 return Some((tree.clone(), relative_path.into()));
1920 }
1921 }
1922 None
1923 }
1924
1925 pub fn is_shared(&self) -> bool {
1926 match &self.client_state {
1927 ProjectClientState::Local { is_shared, .. } => *is_shared,
1928 ProjectClientState::Remote { .. } => false,
1929 }
1930 }
1931
1932 fn create_local_worktree(
1933 &self,
1934 abs_path: impl AsRef<Path>,
1935 weak: bool,
1936 cx: &mut ModelContext<Self>,
1937 ) -> Task<Result<ModelHandle<Worktree>>> {
1938 let fs = self.fs.clone();
1939 let client = self.client.clone();
1940 let path = Arc::from(abs_path.as_ref());
1941 cx.spawn(|project, mut cx| async move {
1942 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1943
1944 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1945 project.add_worktree(&worktree, cx);
1946 (project.remote_id(), project.is_shared())
1947 });
1948
1949 if let Some(project_id) = remote_project_id {
1950 worktree
1951 .update(&mut cx, |worktree, cx| {
1952 worktree.as_local_mut().unwrap().register(project_id, cx)
1953 })
1954 .await?;
1955 if is_shared {
1956 worktree
1957 .update(&mut cx, |worktree, cx| {
1958 worktree.as_local_mut().unwrap().share(project_id, cx)
1959 })
1960 .await?;
1961 }
1962 }
1963
1964 Ok(worktree)
1965 })
1966 }
1967
1968 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1969 self.worktrees.retain(|worktree| {
1970 worktree
1971 .upgrade(cx)
1972 .map_or(false, |w| w.read(cx).id() != id)
1973 });
1974 cx.notify();
1975 }
1976
1977 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1978 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1979 if worktree.read(cx).is_local() {
1980 cx.subscribe(&worktree, |this, worktree, _, cx| {
1981 this.update_local_worktree_buffers(worktree, cx);
1982 })
1983 .detach();
1984 }
1985
1986 let push_weak_handle = {
1987 let worktree = worktree.read(cx);
1988 worktree.is_local() && worktree.is_weak()
1989 };
1990 if push_weak_handle {
1991 cx.observe_release(&worktree, |this, cx| {
1992 this.worktrees
1993 .retain(|worktree| worktree.upgrade(cx).is_some());
1994 cx.notify();
1995 })
1996 .detach();
1997 self.worktrees
1998 .push(WorktreeHandle::Weak(worktree.downgrade()));
1999 } else {
2000 self.worktrees
2001 .push(WorktreeHandle::Strong(worktree.clone()));
2002 }
2003 cx.notify();
2004 }
2005
2006 fn update_local_worktree_buffers(
2007 &mut self,
2008 worktree_handle: ModelHandle<Worktree>,
2009 cx: &mut ModelContext<Self>,
2010 ) {
2011 let snapshot = worktree_handle.read(cx).snapshot();
2012 let mut buffers_to_delete = Vec::new();
2013 for (buffer_id, buffer) in &self.open_buffers {
2014 if let Some(buffer) = buffer.upgrade(cx) {
2015 buffer.update(cx, |buffer, cx| {
2016 if let Some(old_file) = File::from_dyn(buffer.file()) {
2017 if old_file.worktree != worktree_handle {
2018 return;
2019 }
2020
2021 let new_file = if let Some(entry) = old_file
2022 .entry_id
2023 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2024 {
2025 File {
2026 is_local: true,
2027 entry_id: Some(entry.id),
2028 mtime: entry.mtime,
2029 path: entry.path.clone(),
2030 worktree: worktree_handle.clone(),
2031 }
2032 } else if let Some(entry) =
2033 snapshot.entry_for_path(old_file.path().as_ref())
2034 {
2035 File {
2036 is_local: true,
2037 entry_id: Some(entry.id),
2038 mtime: entry.mtime,
2039 path: entry.path.clone(),
2040 worktree: worktree_handle.clone(),
2041 }
2042 } else {
2043 File {
2044 is_local: true,
2045 entry_id: None,
2046 path: old_file.path().clone(),
2047 mtime: old_file.mtime(),
2048 worktree: worktree_handle.clone(),
2049 }
2050 };
2051
2052 if let Some(project_id) = self.remote_id() {
2053 self.client
2054 .send(proto::UpdateBufferFile {
2055 project_id,
2056 buffer_id: *buffer_id as u64,
2057 file: Some(new_file.to_proto()),
2058 })
2059 .log_err();
2060 }
2061 buffer.file_updated(Box::new(new_file), cx).detach();
2062 }
2063 });
2064 } else {
2065 buffers_to_delete.push(*buffer_id);
2066 }
2067 }
2068
2069 for buffer_id in buffers_to_delete {
2070 self.open_buffers.remove(&buffer_id);
2071 }
2072 }
2073
2074 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2075 let new_active_entry = entry.and_then(|project_path| {
2076 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2077 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2078 Some(ProjectEntry {
2079 worktree_id: project_path.worktree_id,
2080 entry_id: entry.id,
2081 })
2082 });
2083 if new_active_entry != self.active_entry {
2084 self.active_entry = new_active_entry;
2085 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2086 }
2087 }
2088
2089 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2090 self.language_servers_with_diagnostics_running > 0
2091 }
2092
2093 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2094 let mut summary = DiagnosticSummary::default();
2095 for (_, path_summary) in self.diagnostic_summaries(cx) {
2096 summary.error_count += path_summary.error_count;
2097 summary.warning_count += path_summary.warning_count;
2098 summary.info_count += path_summary.info_count;
2099 summary.hint_count += path_summary.hint_count;
2100 }
2101 summary
2102 }
2103
2104 pub fn diagnostic_summaries<'a>(
2105 &'a self,
2106 cx: &'a AppContext,
2107 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2108 self.worktrees(cx).flat_map(move |worktree| {
2109 let worktree = worktree.read(cx);
2110 let worktree_id = worktree.id();
2111 worktree
2112 .diagnostic_summaries()
2113 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2114 })
2115 }
2116
2117 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2118 self.language_servers_with_diagnostics_running += 1;
2119 if self.language_servers_with_diagnostics_running == 1 {
2120 cx.emit(Event::DiskBasedDiagnosticsStarted);
2121 }
2122 }
2123
2124 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2125 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2126 self.language_servers_with_diagnostics_running -= 1;
2127 if self.language_servers_with_diagnostics_running == 0 {
2128 cx.emit(Event::DiskBasedDiagnosticsFinished);
2129 }
2130 }
2131
2132 pub fn active_entry(&self) -> Option<ProjectEntry> {
2133 self.active_entry
2134 }
2135
2136 // RPC message handlers
2137
2138 async fn handle_unshare_project(
2139 this: ModelHandle<Self>,
2140 _: TypedEnvelope<proto::UnshareProject>,
2141 _: Arc<Client>,
2142 mut cx: AsyncAppContext,
2143 ) -> Result<()> {
2144 this.update(&mut cx, |this, cx| {
2145 if let ProjectClientState::Remote {
2146 sharing_has_stopped,
2147 ..
2148 } = &mut this.client_state
2149 {
2150 *sharing_has_stopped = true;
2151 this.collaborators.clear();
2152 cx.notify();
2153 } else {
2154 unreachable!()
2155 }
2156 });
2157
2158 Ok(())
2159 }
2160
2161 async fn handle_add_collaborator(
2162 this: ModelHandle<Self>,
2163 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2164 _: Arc<Client>,
2165 mut cx: AsyncAppContext,
2166 ) -> Result<()> {
2167 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2168 let collaborator = envelope
2169 .payload
2170 .collaborator
2171 .take()
2172 .ok_or_else(|| anyhow!("empty collaborator"))?;
2173
2174 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2175 this.update(&mut cx, |this, cx| {
2176 this.collaborators
2177 .insert(collaborator.peer_id, collaborator);
2178 cx.notify();
2179 });
2180
2181 Ok(())
2182 }
2183
2184 async fn handle_remove_collaborator(
2185 this: ModelHandle<Self>,
2186 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2187 _: Arc<Client>,
2188 mut cx: AsyncAppContext,
2189 ) -> Result<()> {
2190 this.update(&mut cx, |this, cx| {
2191 let peer_id = PeerId(envelope.payload.peer_id);
2192 let replica_id = this
2193 .collaborators
2194 .remove(&peer_id)
2195 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2196 .replica_id;
2197 this.shared_buffers.remove(&peer_id);
2198 for (_, buffer) in &this.open_buffers {
2199 if let Some(buffer) = buffer.upgrade(cx) {
2200 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2201 }
2202 }
2203 cx.notify();
2204 Ok(())
2205 })
2206 }
2207
2208 async fn handle_share_worktree(
2209 this: ModelHandle<Self>,
2210 envelope: TypedEnvelope<proto::ShareWorktree>,
2211 client: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| {
2215 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2216 let replica_id = this.replica_id();
2217 let worktree = envelope
2218 .payload
2219 .worktree
2220 .ok_or_else(|| anyhow!("invalid worktree"))?;
2221 let (worktree, load_task) =
2222 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2223 this.add_worktree(&worktree, cx);
2224 load_task.detach();
2225 Ok(())
2226 })
2227 }
2228
2229 async fn handle_unregister_worktree(
2230 this: ModelHandle<Self>,
2231 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2232 _: Arc<Client>,
2233 mut cx: AsyncAppContext,
2234 ) -> Result<()> {
2235 this.update(&mut cx, |this, cx| {
2236 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2237 this.remove_worktree(worktree_id, cx);
2238 Ok(())
2239 })
2240 }
2241
2242 async fn handle_update_worktree(
2243 this: ModelHandle<Self>,
2244 envelope: TypedEnvelope<proto::UpdateWorktree>,
2245 _: Arc<Client>,
2246 mut cx: AsyncAppContext,
2247 ) -> Result<()> {
2248 this.update(&mut cx, |this, cx| {
2249 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2250 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2251 worktree.update(cx, |worktree, _| {
2252 let worktree = worktree.as_remote_mut().unwrap();
2253 worktree.update_from_remote(envelope)
2254 })?;
2255 }
2256 Ok(())
2257 })
2258 }
2259
2260 async fn handle_update_diagnostic_summary(
2261 this: ModelHandle<Self>,
2262 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2263 _: Arc<Client>,
2264 mut cx: AsyncAppContext,
2265 ) -> Result<()> {
2266 this.update(&mut cx, |this, cx| {
2267 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2268 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2269 if let Some(summary) = envelope.payload.summary {
2270 let project_path = ProjectPath {
2271 worktree_id,
2272 path: Path::new(&summary.path).into(),
2273 };
2274 worktree.update(cx, |worktree, _| {
2275 worktree
2276 .as_remote_mut()
2277 .unwrap()
2278 .update_diagnostic_summary(project_path.path.clone(), &summary);
2279 });
2280 cx.emit(Event::DiagnosticsUpdated(project_path));
2281 }
2282 }
2283 Ok(())
2284 })
2285 }
2286
2287 async fn handle_disk_based_diagnostics_updating(
2288 this: ModelHandle<Self>,
2289 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2290 _: Arc<Client>,
2291 mut cx: AsyncAppContext,
2292 ) -> Result<()> {
2293 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2294 Ok(())
2295 }
2296
2297 async fn handle_disk_based_diagnostics_updated(
2298 this: ModelHandle<Self>,
2299 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2300 _: Arc<Client>,
2301 mut cx: AsyncAppContext,
2302 ) -> Result<()> {
2303 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2304 Ok(())
2305 }
2306
2307 async fn handle_update_buffer(
2308 this: ModelHandle<Self>,
2309 envelope: TypedEnvelope<proto::UpdateBuffer>,
2310 _: Arc<Client>,
2311 mut cx: AsyncAppContext,
2312 ) -> Result<()> {
2313 this.update(&mut cx, |this, cx| {
2314 let payload = envelope.payload.clone();
2315 let buffer_id = payload.buffer_id;
2316 let ops = payload
2317 .operations
2318 .into_iter()
2319 .map(|op| language::proto::deserialize_operation(op))
2320 .collect::<Result<Vec<_>, _>>()?;
2321 let is_remote = this.is_remote();
2322 match this.open_buffers.entry(buffer_id) {
2323 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2324 OpenBuffer::Loaded(buffer) => {
2325 if let Some(buffer) = buffer.upgrade(cx) {
2326 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2327 }
2328 }
2329 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2330 },
2331 hash_map::Entry::Vacant(e) => {
2332 if is_remote && this.loading_buffers.len() > 0 {
2333 e.insert(OpenBuffer::Loading(ops));
2334 }
2335 }
2336 }
2337 Ok(())
2338 })
2339 }
2340
2341 async fn handle_update_buffer_file(
2342 this: ModelHandle<Self>,
2343 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2344 _: Arc<Client>,
2345 mut cx: AsyncAppContext,
2346 ) -> Result<()> {
2347 this.update(&mut cx, |this, cx| {
2348 let payload = envelope.payload.clone();
2349 let buffer_id = payload.buffer_id;
2350 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2351 let worktree = this
2352 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2353 .ok_or_else(|| anyhow!("no such worktree"))?;
2354 let file = File::from_proto(file, worktree.clone(), cx)?;
2355 let buffer = this
2356 .open_buffers
2357 .get_mut(&buffer_id)
2358 .and_then(|b| b.upgrade(cx))
2359 .ok_or_else(|| anyhow!("no such buffer"))?;
2360 buffer.update(cx, |buffer, cx| {
2361 buffer.file_updated(Box::new(file), cx).detach();
2362 });
2363 Ok(())
2364 })
2365 }
2366
2367 async fn handle_save_buffer(
2368 this: ModelHandle<Self>,
2369 envelope: TypedEnvelope<proto::SaveBuffer>,
2370 _: Arc<Client>,
2371 mut cx: AsyncAppContext,
2372 ) -> Result<proto::BufferSaved> {
2373 let buffer_id = envelope.payload.buffer_id;
2374 let sender_id = envelope.original_sender_id()?;
2375 let requested_version = envelope.payload.version.try_into()?;
2376
2377 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2378 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2379 let buffer = this
2380 .shared_buffers
2381 .get(&sender_id)
2382 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2383 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2384 Ok::<_, anyhow::Error>((project_id, buffer))
2385 })?;
2386
2387 if !buffer
2388 .read_with(&cx, |buffer, _| buffer.version())
2389 .observed_all(&requested_version)
2390 {
2391 Err(anyhow!("save request depends on unreceived edits"))?;
2392 }
2393
2394 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2395 Ok(proto::BufferSaved {
2396 project_id,
2397 buffer_id,
2398 version: (&saved_version).into(),
2399 mtime: Some(mtime.into()),
2400 })
2401 }
2402
2403 async fn handle_format_buffers(
2404 this: ModelHandle<Self>,
2405 envelope: TypedEnvelope<proto::FormatBuffers>,
2406 _: Arc<Client>,
2407 mut cx: AsyncAppContext,
2408 ) -> Result<proto::FormatBuffersResponse> {
2409 let sender_id = envelope.original_sender_id()?;
2410 let format = this.update(&mut cx, |this, cx| {
2411 let shared_buffers = this
2412 .shared_buffers
2413 .get(&sender_id)
2414 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2415 let mut buffers = HashSet::default();
2416 for buffer_id in &envelope.payload.buffer_ids {
2417 buffers.insert(
2418 shared_buffers
2419 .get(buffer_id)
2420 .cloned()
2421 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2422 );
2423 }
2424 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2425 })?;
2426
2427 let project_transaction = format.await?;
2428 let project_transaction = this.update(&mut cx, |this, cx| {
2429 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2430 });
2431 Ok(proto::FormatBuffersResponse {
2432 transaction: Some(project_transaction),
2433 })
2434 }
2435
2436 async fn handle_get_completions(
2437 this: ModelHandle<Self>,
2438 envelope: TypedEnvelope<proto::GetCompletions>,
2439 _: Arc<Client>,
2440 mut cx: AsyncAppContext,
2441 ) -> Result<proto::GetCompletionsResponse> {
2442 let sender_id = envelope.original_sender_id()?;
2443 let position = envelope
2444 .payload
2445 .position
2446 .and_then(language::proto::deserialize_anchor)
2447 .ok_or_else(|| anyhow!("invalid position"))?;
2448 let version = clock::Global::from(envelope.payload.version);
2449 let buffer = this.read_with(&cx, |this, _| {
2450 this.shared_buffers
2451 .get(&sender_id)
2452 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2453 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2454 })?;
2455 if !buffer
2456 .read_with(&cx, |buffer, _| buffer.version())
2457 .observed_all(&version)
2458 {
2459 Err(anyhow!("completion request depends on unreceived edits"))?;
2460 }
2461 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2462 let completions = this
2463 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2464 .await?;
2465
2466 Ok(proto::GetCompletionsResponse {
2467 completions: completions
2468 .iter()
2469 .map(language::proto::serialize_completion)
2470 .collect(),
2471 version: (&version).into(),
2472 })
2473 }
2474
2475 async fn handle_apply_additional_edits_for_completion(
2476 this: ModelHandle<Self>,
2477 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2478 _: Arc<Client>,
2479 mut cx: AsyncAppContext,
2480 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2481 let sender_id = envelope.original_sender_id()?;
2482 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2483 let buffer = this
2484 .shared_buffers
2485 .get(&sender_id)
2486 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2487 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2488 let language = buffer.read(cx).language();
2489 let completion = language::proto::deserialize_completion(
2490 envelope
2491 .payload
2492 .completion
2493 .ok_or_else(|| anyhow!("invalid completion"))?,
2494 language,
2495 )?;
2496 Ok::<_, anyhow::Error>(
2497 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2498 )
2499 })?;
2500
2501 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2502 transaction: apply_additional_edits
2503 .await?
2504 .as_ref()
2505 .map(language::proto::serialize_transaction),
2506 })
2507 }
2508
2509 async fn handle_get_code_actions(
2510 this: ModelHandle<Self>,
2511 envelope: TypedEnvelope<proto::GetCodeActions>,
2512 _: Arc<Client>,
2513 mut cx: AsyncAppContext,
2514 ) -> Result<proto::GetCodeActionsResponse> {
2515 let sender_id = envelope.original_sender_id()?;
2516 let start = envelope
2517 .payload
2518 .start
2519 .and_then(language::proto::deserialize_anchor)
2520 .ok_or_else(|| anyhow!("invalid start"))?;
2521 let end = envelope
2522 .payload
2523 .end
2524 .and_then(language::proto::deserialize_anchor)
2525 .ok_or_else(|| anyhow!("invalid end"))?;
2526 let buffer = this.update(&mut cx, |this, _| {
2527 this.shared_buffers
2528 .get(&sender_id)
2529 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2530 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2531 })?;
2532 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2533 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2534 Err(anyhow!("code action request references unreceived edits"))?;
2535 }
2536 let code_actions = this.update(&mut cx, |this, cx| {
2537 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2538 })?;
2539
2540 Ok(proto::GetCodeActionsResponse {
2541 actions: code_actions
2542 .await?
2543 .iter()
2544 .map(language::proto::serialize_code_action)
2545 .collect(),
2546 version: (&version).into(),
2547 })
2548 }
2549
2550 async fn handle_apply_code_action(
2551 this: ModelHandle<Self>,
2552 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2553 _: Arc<Client>,
2554 mut cx: AsyncAppContext,
2555 ) -> Result<proto::ApplyCodeActionResponse> {
2556 let sender_id = envelope.original_sender_id()?;
2557 let action = language::proto::deserialize_code_action(
2558 envelope
2559 .payload
2560 .action
2561 .ok_or_else(|| anyhow!("invalid action"))?,
2562 )?;
2563 let apply_code_action = this.update(&mut cx, |this, cx| {
2564 let buffer = this
2565 .shared_buffers
2566 .get(&sender_id)
2567 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2568 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2569 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2570 })?;
2571
2572 let project_transaction = apply_code_action.await?;
2573 let project_transaction = this.update(&mut cx, |this, cx| {
2574 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2575 });
2576 Ok(proto::ApplyCodeActionResponse {
2577 transaction: Some(project_transaction),
2578 })
2579 }
2580
2581 async fn handle_get_definition(
2582 this: ModelHandle<Self>,
2583 envelope: TypedEnvelope<proto::GetDefinition>,
2584 _: Arc<Client>,
2585 mut cx: AsyncAppContext,
2586 ) -> Result<proto::GetDefinitionResponse> {
2587 let sender_id = envelope.original_sender_id()?;
2588 let position = envelope
2589 .payload
2590 .position
2591 .and_then(deserialize_anchor)
2592 .ok_or_else(|| anyhow!("invalid position"))?;
2593 let definitions = this.update(&mut cx, |this, cx| {
2594 let source_buffer = this
2595 .shared_buffers
2596 .get(&sender_id)
2597 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2598 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2599 if source_buffer.read(cx).can_resolve(&position) {
2600 Ok(this.definition(&source_buffer, position, cx))
2601 } else {
2602 Err(anyhow!("cannot resolve position"))
2603 }
2604 })?;
2605
2606 let definitions = definitions.await?;
2607
2608 this.update(&mut cx, |this, cx| {
2609 let mut response = proto::GetDefinitionResponse {
2610 definitions: Default::default(),
2611 };
2612 for definition in definitions {
2613 let buffer =
2614 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2615 response.definitions.push(proto::Definition {
2616 target_start: Some(serialize_anchor(&definition.target_range.start)),
2617 target_end: Some(serialize_anchor(&definition.target_range.end)),
2618 buffer: Some(buffer),
2619 });
2620 }
2621 Ok(response)
2622 })
2623 }
2624
2625 async fn handle_lsp_command<T: LspCommand>(
2626 this: ModelHandle<Self>,
2627 envelope: TypedEnvelope<T::ProtoRequest>,
2628 _: Arc<Client>,
2629 mut cx: AsyncAppContext,
2630 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2631 where
2632 <T::LspRequest as lsp::request::Request>::Result: Send,
2633 {
2634 let sender_id = envelope.original_sender_id()?;
2635 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2636 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2637 let buffer_handle = this
2638 .shared_buffers
2639 .get(&sender_id)
2640 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2641 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2642 let buffer_version = buffer_handle.read(cx).version();
2643 let request = T::from_proto(envelope.payload, this, &buffer_handle, cx)?;
2644 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2645 })?;
2646 let response = request.await?;
2647 this.update(&mut cx, |this, cx| {
2648 Ok(T::response_to_proto(
2649 response,
2650 this,
2651 sender_id,
2652 &buffer_version,
2653 cx,
2654 ))
2655 })
2656 }
2657
2658 async fn handle_open_buffer(
2659 this: ModelHandle<Self>,
2660 envelope: TypedEnvelope<proto::OpenBuffer>,
2661 _: Arc<Client>,
2662 mut cx: AsyncAppContext,
2663 ) -> anyhow::Result<proto::OpenBufferResponse> {
2664 let peer_id = envelope.original_sender_id()?;
2665 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2666 let open_buffer = this.update(&mut cx, |this, cx| {
2667 this.open_buffer(
2668 ProjectPath {
2669 worktree_id,
2670 path: PathBuf::from(envelope.payload.path).into(),
2671 },
2672 cx,
2673 )
2674 });
2675
2676 let buffer = open_buffer.await?;
2677 this.update(&mut cx, |this, cx| {
2678 Ok(proto::OpenBufferResponse {
2679 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2680 })
2681 })
2682 }
2683
2684 fn serialize_project_transaction_for_peer(
2685 &mut self,
2686 project_transaction: ProjectTransaction,
2687 peer_id: PeerId,
2688 cx: &AppContext,
2689 ) -> proto::ProjectTransaction {
2690 let mut serialized_transaction = proto::ProjectTransaction {
2691 buffers: Default::default(),
2692 transactions: Default::default(),
2693 };
2694 for (buffer, transaction) in project_transaction.0 {
2695 serialized_transaction
2696 .buffers
2697 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2698 serialized_transaction
2699 .transactions
2700 .push(language::proto::serialize_transaction(&transaction));
2701 }
2702 serialized_transaction
2703 }
2704
2705 fn deserialize_project_transaction(
2706 &mut self,
2707 message: proto::ProjectTransaction,
2708 push_to_history: bool,
2709 cx: &mut ModelContext<Self>,
2710 ) -> Task<Result<ProjectTransaction>> {
2711 cx.spawn(|this, mut cx| async move {
2712 let mut project_transaction = ProjectTransaction::default();
2713 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2714 let buffer = this
2715 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2716 .await?;
2717 let transaction = language::proto::deserialize_transaction(transaction)?;
2718 project_transaction.0.insert(buffer, transaction);
2719 }
2720 for (buffer, transaction) in &project_transaction.0 {
2721 buffer
2722 .update(&mut cx, |buffer, _| {
2723 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2724 })
2725 .await;
2726
2727 if push_to_history {
2728 buffer.update(&mut cx, |buffer, _| {
2729 buffer.push_transaction(transaction.clone(), Instant::now());
2730 });
2731 }
2732 }
2733
2734 Ok(project_transaction)
2735 })
2736 }
2737
2738 fn serialize_buffer_for_peer(
2739 &mut self,
2740 buffer: &ModelHandle<Buffer>,
2741 peer_id: PeerId,
2742 cx: &AppContext,
2743 ) -> proto::Buffer {
2744 let buffer_id = buffer.read(cx).remote_id();
2745 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2746 match shared_buffers.entry(buffer_id) {
2747 hash_map::Entry::Occupied(_) => proto::Buffer {
2748 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2749 },
2750 hash_map::Entry::Vacant(entry) => {
2751 entry.insert(buffer.clone());
2752 proto::Buffer {
2753 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2754 }
2755 }
2756 }
2757 }
2758
2759 fn deserialize_buffer(
2760 &mut self,
2761 buffer: proto::Buffer,
2762 cx: &mut ModelContext<Self>,
2763 ) -> Task<Result<ModelHandle<Buffer>>> {
2764 let replica_id = self.replica_id();
2765
2766 let mut opened_buffer_tx = self.opened_buffer.clone();
2767 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2768 cx.spawn(|this, mut cx| async move {
2769 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2770 proto::buffer::Variant::Id(id) => {
2771 let buffer = loop {
2772 let buffer = this.read_with(&cx, |this, cx| {
2773 this.open_buffers
2774 .get(&id)
2775 .and_then(|buffer| buffer.upgrade(cx))
2776 });
2777 if let Some(buffer) = buffer {
2778 break buffer;
2779 }
2780 opened_buffer_rx
2781 .recv()
2782 .await
2783 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2784 };
2785 Ok(buffer)
2786 }
2787 proto::buffer::Variant::State(mut buffer) => {
2788 let mut buffer_worktree = None;
2789 let mut buffer_file = None;
2790 if let Some(file) = buffer.file.take() {
2791 this.read_with(&cx, |this, cx| {
2792 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2793 let worktree =
2794 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2795 anyhow!("no worktree found for id {}", file.worktree_id)
2796 })?;
2797 buffer_file =
2798 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2799 as Box<dyn language::File>);
2800 buffer_worktree = Some(worktree);
2801 Ok::<_, anyhow::Error>(())
2802 })?;
2803 }
2804
2805 let buffer = cx.add_model(|cx| {
2806 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2807 });
2808 this.update(&mut cx, |this, cx| {
2809 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2810 })?;
2811
2812 let _ = opened_buffer_tx.send(()).await;
2813 Ok(buffer)
2814 }
2815 }
2816 })
2817 }
2818
2819 async fn handle_close_buffer(
2820 this: ModelHandle<Self>,
2821 envelope: TypedEnvelope<proto::CloseBuffer>,
2822 _: Arc<Client>,
2823 mut cx: AsyncAppContext,
2824 ) -> anyhow::Result<()> {
2825 this.update(&mut cx, |this, cx| {
2826 if let Some(shared_buffers) =
2827 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2828 {
2829 shared_buffers.remove(&envelope.payload.buffer_id);
2830 cx.notify();
2831 }
2832 Ok(())
2833 })
2834 }
2835
2836 async fn handle_buffer_saved(
2837 this: ModelHandle<Self>,
2838 envelope: TypedEnvelope<proto::BufferSaved>,
2839 _: Arc<Client>,
2840 mut cx: AsyncAppContext,
2841 ) -> Result<()> {
2842 let version = envelope.payload.version.try_into()?;
2843 let mtime = envelope
2844 .payload
2845 .mtime
2846 .ok_or_else(|| anyhow!("missing mtime"))?
2847 .into();
2848
2849 this.update(&mut cx, |this, cx| {
2850 let buffer = this
2851 .open_buffers
2852 .get(&envelope.payload.buffer_id)
2853 .and_then(|buffer| buffer.upgrade(cx));
2854 if let Some(buffer) = buffer {
2855 buffer.update(cx, |buffer, cx| {
2856 buffer.did_save(version, mtime, None, cx);
2857 });
2858 }
2859 Ok(())
2860 })
2861 }
2862
2863 async fn handle_buffer_reloaded(
2864 this: ModelHandle<Self>,
2865 envelope: TypedEnvelope<proto::BufferReloaded>,
2866 _: Arc<Client>,
2867 mut cx: AsyncAppContext,
2868 ) -> Result<()> {
2869 let payload = envelope.payload.clone();
2870 let version = payload.version.try_into()?;
2871 let mtime = payload
2872 .mtime
2873 .ok_or_else(|| anyhow!("missing mtime"))?
2874 .into();
2875 this.update(&mut cx, |this, cx| {
2876 let buffer = this
2877 .open_buffers
2878 .get(&payload.buffer_id)
2879 .and_then(|buffer| buffer.upgrade(cx));
2880 if let Some(buffer) = buffer {
2881 buffer.update(cx, |buffer, cx| {
2882 buffer.did_reload(version, mtime, cx);
2883 });
2884 }
2885 Ok(())
2886 })
2887 }
2888
2889 pub fn match_paths<'a>(
2890 &self,
2891 query: &'a str,
2892 include_ignored: bool,
2893 smart_case: bool,
2894 max_results: usize,
2895 cancel_flag: &'a AtomicBool,
2896 cx: &AppContext,
2897 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2898 let worktrees = self
2899 .worktrees(cx)
2900 .filter(|worktree| !worktree.read(cx).is_weak())
2901 .collect::<Vec<_>>();
2902 let include_root_name = worktrees.len() > 1;
2903 let candidate_sets = worktrees
2904 .into_iter()
2905 .map(|worktree| CandidateSet {
2906 snapshot: worktree.read(cx).snapshot(),
2907 include_ignored,
2908 include_root_name,
2909 })
2910 .collect::<Vec<_>>();
2911
2912 let background = cx.background().clone();
2913 async move {
2914 fuzzy::match_paths(
2915 candidate_sets.as_slice(),
2916 query,
2917 smart_case,
2918 max_results,
2919 cancel_flag,
2920 background,
2921 )
2922 .await
2923 }
2924 }
2925}
2926
2927impl WorktreeHandle {
2928 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2929 match self {
2930 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2931 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2932 }
2933 }
2934}
2935
2936impl OpenBuffer {
2937 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2938 match self {
2939 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2940 OpenBuffer::Loading(_) => None,
2941 }
2942 }
2943}
2944
2945struct CandidateSet {
2946 snapshot: Snapshot,
2947 include_ignored: bool,
2948 include_root_name: bool,
2949}
2950
2951impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2952 type Candidates = CandidateSetIter<'a>;
2953
2954 fn id(&self) -> usize {
2955 self.snapshot.id().to_usize()
2956 }
2957
2958 fn len(&self) -> usize {
2959 if self.include_ignored {
2960 self.snapshot.file_count()
2961 } else {
2962 self.snapshot.visible_file_count()
2963 }
2964 }
2965
2966 fn prefix(&self) -> Arc<str> {
2967 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2968 self.snapshot.root_name().into()
2969 } else if self.include_root_name {
2970 format!("{}/", self.snapshot.root_name()).into()
2971 } else {
2972 "".into()
2973 }
2974 }
2975
2976 fn candidates(&'a self, start: usize) -> Self::Candidates {
2977 CandidateSetIter {
2978 traversal: self.snapshot.files(self.include_ignored, start),
2979 }
2980 }
2981}
2982
2983struct CandidateSetIter<'a> {
2984 traversal: Traversal<'a>,
2985}
2986
2987impl<'a> Iterator for CandidateSetIter<'a> {
2988 type Item = PathMatchCandidate<'a>;
2989
2990 fn next(&mut self) -> Option<Self::Item> {
2991 self.traversal.next().map(|entry| {
2992 if let EntryKind::File(char_bag) = entry.kind {
2993 PathMatchCandidate {
2994 path: &entry.path,
2995 char_bag,
2996 }
2997 } else {
2998 unreachable!()
2999 }
3000 })
3001 }
3002}
3003
3004impl Entity for Project {
3005 type Event = Event;
3006
3007 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3008 match &self.client_state {
3009 ProjectClientState::Local { remote_id_rx, .. } => {
3010 if let Some(project_id) = *remote_id_rx.borrow() {
3011 self.client
3012 .send(proto::UnregisterProject { project_id })
3013 .log_err();
3014 }
3015 }
3016 ProjectClientState::Remote { remote_id, .. } => {
3017 self.client
3018 .send(proto::LeaveProject {
3019 project_id: *remote_id,
3020 })
3021 .log_err();
3022 }
3023 }
3024 }
3025
3026 fn app_will_quit(
3027 &mut self,
3028 _: &mut MutableAppContext,
3029 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3030 use futures::FutureExt;
3031
3032 let shutdown_futures = self
3033 .language_servers
3034 .drain()
3035 .filter_map(|(_, server)| server.shutdown())
3036 .collect::<Vec<_>>();
3037 Some(
3038 async move {
3039 futures::future::join_all(shutdown_futures).await;
3040 }
3041 .boxed(),
3042 )
3043 }
3044}
3045
3046impl Collaborator {
3047 fn from_proto(
3048 message: proto::Collaborator,
3049 user_store: &ModelHandle<UserStore>,
3050 cx: &mut AsyncAppContext,
3051 ) -> impl Future<Output = Result<Self>> {
3052 let user = user_store.update(cx, |user_store, cx| {
3053 user_store.fetch_user(message.user_id, cx)
3054 });
3055
3056 async move {
3057 Ok(Self {
3058 peer_id: PeerId(message.peer_id),
3059 user: user.await?,
3060 replica_id: message.replica_id as ReplicaId,
3061 })
3062 }
3063 }
3064}
3065
3066impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3067 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3068 Self {
3069 worktree_id,
3070 path: path.as_ref().into(),
3071 }
3072 }
3073}
3074
3075impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3076 fn from(options: lsp::CreateFileOptions) -> Self {
3077 Self {
3078 overwrite: options.overwrite.unwrap_or(false),
3079 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3080 }
3081 }
3082}
3083
3084impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3085 fn from(options: lsp::RenameFileOptions) -> Self {
3086 Self {
3087 overwrite: options.overwrite.unwrap_or(false),
3088 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3089 }
3090 }
3091}
3092
3093impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3094 fn from(options: lsp::DeleteFileOptions) -> Self {
3095 Self {
3096 recursive: options.recursive.unwrap_or(false),
3097 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3098 }
3099 }
3100}
3101
3102#[cfg(test)]
3103mod tests {
3104 use super::{Event, *};
3105 use fs::RealFs;
3106 use futures::StreamExt;
3107 use gpui::test::subscribe;
3108 use language::{
3109 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3110 };
3111 use lsp::Url;
3112 use serde_json::json;
3113 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3114 use unindent::Unindent as _;
3115 use util::test::temp_tree;
3116 use worktree::WorktreeHandle as _;
3117
3118 #[gpui::test]
3119 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3120 let dir = temp_tree(json!({
3121 "root": {
3122 "apple": "",
3123 "banana": {
3124 "carrot": {
3125 "date": "",
3126 "endive": "",
3127 }
3128 },
3129 "fennel": {
3130 "grape": "",
3131 }
3132 }
3133 }));
3134
3135 let root_link_path = dir.path().join("root_link");
3136 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3137 unix::fs::symlink(
3138 &dir.path().join("root/fennel"),
3139 &dir.path().join("root/finnochio"),
3140 )
3141 .unwrap();
3142
3143 let project = Project::test(Arc::new(RealFs), &mut cx);
3144
3145 let (tree, _) = project
3146 .update(&mut cx, |project, cx| {
3147 project.find_or_create_local_worktree(&root_link_path, false, cx)
3148 })
3149 .await
3150 .unwrap();
3151
3152 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3153 .await;
3154 cx.read(|cx| {
3155 let tree = tree.read(cx);
3156 assert_eq!(tree.file_count(), 5);
3157 assert_eq!(
3158 tree.inode_for_path("fennel/grape"),
3159 tree.inode_for_path("finnochio/grape")
3160 );
3161 });
3162
3163 let cancel_flag = Default::default();
3164 let results = project
3165 .read_with(&cx, |project, cx| {
3166 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3167 })
3168 .await;
3169 assert_eq!(
3170 results
3171 .into_iter()
3172 .map(|result| result.path)
3173 .collect::<Vec<Arc<Path>>>(),
3174 vec![
3175 PathBuf::from("banana/carrot/date").into(),
3176 PathBuf::from("banana/carrot/endive").into(),
3177 ]
3178 );
3179 }
3180
3181 #[gpui::test]
3182 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3183 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3184 let progress_token = language_server_config
3185 .disk_based_diagnostics_progress_token
3186 .clone()
3187 .unwrap();
3188
3189 let language = Arc::new(Language::new(
3190 LanguageConfig {
3191 name: "Rust".to_string(),
3192 path_suffixes: vec!["rs".to_string()],
3193 language_server: Some(language_server_config),
3194 ..Default::default()
3195 },
3196 Some(tree_sitter_rust::language()),
3197 ));
3198
3199 let fs = FakeFs::new(cx.background());
3200 fs.insert_tree(
3201 "/dir",
3202 json!({
3203 "a.rs": "fn a() { A }",
3204 "b.rs": "const y: i32 = 1",
3205 }),
3206 )
3207 .await;
3208
3209 let project = Project::test(fs, &mut cx);
3210 project.update(&mut cx, |project, _| {
3211 Arc::get_mut(&mut project.languages).unwrap().add(language);
3212 });
3213
3214 let (tree, _) = project
3215 .update(&mut cx, |project, cx| {
3216 project.find_or_create_local_worktree("/dir", false, cx)
3217 })
3218 .await
3219 .unwrap();
3220 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3221
3222 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3223 .await;
3224
3225 // Cause worktree to start the fake language server
3226 let _buffer = project
3227 .update(&mut cx, |project, cx| {
3228 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3229 })
3230 .await
3231 .unwrap();
3232
3233 let mut events = subscribe(&project, &mut cx);
3234
3235 let mut fake_server = fake_servers.next().await.unwrap();
3236 fake_server.start_progress(&progress_token).await;
3237 assert_eq!(
3238 events.next().await.unwrap(),
3239 Event::DiskBasedDiagnosticsStarted
3240 );
3241
3242 fake_server.start_progress(&progress_token).await;
3243 fake_server.end_progress(&progress_token).await;
3244 fake_server.start_progress(&progress_token).await;
3245
3246 fake_server
3247 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3248 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3249 version: None,
3250 diagnostics: vec![lsp::Diagnostic {
3251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3252 severity: Some(lsp::DiagnosticSeverity::ERROR),
3253 message: "undefined variable 'A'".to_string(),
3254 ..Default::default()
3255 }],
3256 })
3257 .await;
3258 assert_eq!(
3259 events.next().await.unwrap(),
3260 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3261 );
3262
3263 fake_server.end_progress(&progress_token).await;
3264 fake_server.end_progress(&progress_token).await;
3265 assert_eq!(
3266 events.next().await.unwrap(),
3267 Event::DiskBasedDiagnosticsUpdated
3268 );
3269 assert_eq!(
3270 events.next().await.unwrap(),
3271 Event::DiskBasedDiagnosticsFinished
3272 );
3273
3274 let buffer = project
3275 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3276 .await
3277 .unwrap();
3278
3279 buffer.read_with(&cx, |buffer, _| {
3280 let snapshot = buffer.snapshot();
3281 let diagnostics = snapshot
3282 .diagnostics_in_range::<_, Point>(0..buffer.len())
3283 .collect::<Vec<_>>();
3284 assert_eq!(
3285 diagnostics,
3286 &[DiagnosticEntry {
3287 range: Point::new(0, 9)..Point::new(0, 10),
3288 diagnostic: Diagnostic {
3289 severity: lsp::DiagnosticSeverity::ERROR,
3290 message: "undefined variable 'A'".to_string(),
3291 group_id: 0,
3292 is_primary: true,
3293 ..Default::default()
3294 }
3295 }]
3296 )
3297 });
3298 }
3299
3300 #[gpui::test]
3301 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3302 let dir = temp_tree(json!({
3303 "root": {
3304 "dir1": {},
3305 "dir2": {
3306 "dir3": {}
3307 }
3308 }
3309 }));
3310
3311 let project = Project::test(Arc::new(RealFs), &mut cx);
3312 let (tree, _) = project
3313 .update(&mut cx, |project, cx| {
3314 project.find_or_create_local_worktree(&dir.path(), false, cx)
3315 })
3316 .await
3317 .unwrap();
3318
3319 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3320 .await;
3321
3322 let cancel_flag = Default::default();
3323 let results = project
3324 .read_with(&cx, |project, cx| {
3325 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3326 })
3327 .await;
3328
3329 assert!(results.is_empty());
3330 }
3331
3332 #[gpui::test]
3333 async fn test_definition(mut cx: gpui::TestAppContext) {
3334 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3335 let language = Arc::new(Language::new(
3336 LanguageConfig {
3337 name: "Rust".to_string(),
3338 path_suffixes: vec!["rs".to_string()],
3339 language_server: Some(language_server_config),
3340 ..Default::default()
3341 },
3342 Some(tree_sitter_rust::language()),
3343 ));
3344
3345 let fs = FakeFs::new(cx.background());
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "a.rs": "const fn a() { A }",
3350 "b.rs": "const y: i32 = crate::a()",
3351 }),
3352 )
3353 .await;
3354
3355 let project = Project::test(fs, &mut cx);
3356 project.update(&mut cx, |project, _| {
3357 Arc::get_mut(&mut project.languages).unwrap().add(language);
3358 });
3359
3360 let (tree, _) = project
3361 .update(&mut cx, |project, cx| {
3362 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3363 })
3364 .await
3365 .unwrap();
3366 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3367 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3368 .await;
3369
3370 let buffer = project
3371 .update(&mut cx, |project, cx| {
3372 project.open_buffer(
3373 ProjectPath {
3374 worktree_id,
3375 path: Path::new("").into(),
3376 },
3377 cx,
3378 )
3379 })
3380 .await
3381 .unwrap();
3382
3383 let mut fake_server = fake_servers.next().await.unwrap();
3384 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3385 let params = params.text_document_position_params;
3386 assert_eq!(
3387 params.text_document.uri.to_file_path().unwrap(),
3388 Path::new("/dir/b.rs"),
3389 );
3390 assert_eq!(params.position, lsp::Position::new(0, 22));
3391
3392 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3393 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3394 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3395 )))
3396 });
3397
3398 let mut definitions = project
3399 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3400 .await
3401 .unwrap();
3402
3403 assert_eq!(definitions.len(), 1);
3404 let definition = definitions.pop().unwrap();
3405 cx.update(|cx| {
3406 let target_buffer = definition.target_buffer.read(cx);
3407 assert_eq!(
3408 target_buffer
3409 .file()
3410 .unwrap()
3411 .as_local()
3412 .unwrap()
3413 .abs_path(cx),
3414 Path::new("/dir/a.rs"),
3415 );
3416 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3417 assert_eq!(
3418 list_worktrees(&project, cx),
3419 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3420 );
3421
3422 drop(definition);
3423 });
3424 cx.read(|cx| {
3425 assert_eq!(
3426 list_worktrees(&project, cx),
3427 [("/dir/b.rs".as_ref(), false)]
3428 );
3429 });
3430
3431 fn list_worktrees<'a>(
3432 project: &'a ModelHandle<Project>,
3433 cx: &'a AppContext,
3434 ) -> Vec<(&'a Path, bool)> {
3435 project
3436 .read(cx)
3437 .worktrees(cx)
3438 .map(|worktree| {
3439 let worktree = worktree.read(cx);
3440 (
3441 worktree.as_local().unwrap().abs_path().as_ref(),
3442 worktree.is_weak(),
3443 )
3444 })
3445 .collect::<Vec<_>>()
3446 }
3447 }
3448
3449 #[gpui::test]
3450 async fn test_save_file(mut cx: gpui::TestAppContext) {
3451 let fs = FakeFs::new(cx.background());
3452 fs.insert_tree(
3453 "/dir",
3454 json!({
3455 "file1": "the old contents",
3456 }),
3457 )
3458 .await;
3459
3460 let project = Project::test(fs.clone(), &mut cx);
3461 let worktree_id = project
3462 .update(&mut cx, |p, cx| {
3463 p.find_or_create_local_worktree("/dir", false, cx)
3464 })
3465 .await
3466 .unwrap()
3467 .0
3468 .read_with(&cx, |tree, _| tree.id());
3469
3470 let buffer = project
3471 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3472 .await
3473 .unwrap();
3474 buffer
3475 .update(&mut cx, |buffer, cx| {
3476 assert_eq!(buffer.text(), "the old contents");
3477 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3478 buffer.save(cx)
3479 })
3480 .await
3481 .unwrap();
3482
3483 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3484 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3485 }
3486
3487 #[gpui::test]
3488 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3489 let fs = FakeFs::new(cx.background());
3490 fs.insert_tree(
3491 "/dir",
3492 json!({
3493 "file1": "the old contents",
3494 }),
3495 )
3496 .await;
3497
3498 let project = Project::test(fs.clone(), &mut cx);
3499 let worktree_id = project
3500 .update(&mut cx, |p, cx| {
3501 p.find_or_create_local_worktree("/dir/file1", false, cx)
3502 })
3503 .await
3504 .unwrap()
3505 .0
3506 .read_with(&cx, |tree, _| tree.id());
3507
3508 let buffer = project
3509 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3510 .await
3511 .unwrap();
3512 buffer
3513 .update(&mut cx, |buffer, cx| {
3514 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3515 buffer.save(cx)
3516 })
3517 .await
3518 .unwrap();
3519
3520 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3521 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3522 }
3523
3524 #[gpui::test(retries = 5)]
3525 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3526 let dir = temp_tree(json!({
3527 "a": {
3528 "file1": "",
3529 "file2": "",
3530 "file3": "",
3531 },
3532 "b": {
3533 "c": {
3534 "file4": "",
3535 "file5": "",
3536 }
3537 }
3538 }));
3539
3540 let project = Project::test(Arc::new(RealFs), &mut cx);
3541 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3542
3543 let (tree, _) = project
3544 .update(&mut cx, |p, cx| {
3545 p.find_or_create_local_worktree(dir.path(), false, cx)
3546 })
3547 .await
3548 .unwrap();
3549 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3550
3551 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3552 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3553 async move { buffer.await.unwrap() }
3554 };
3555 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3556 tree.read_with(cx, |tree, _| {
3557 tree.entry_for_path(path)
3558 .expect(&format!("no entry for path {}", path))
3559 .id
3560 })
3561 };
3562
3563 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3564 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3565 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3566 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3567
3568 let file2_id = id_for_path("a/file2", &cx);
3569 let file3_id = id_for_path("a/file3", &cx);
3570 let file4_id = id_for_path("b/c/file4", &cx);
3571
3572 // Wait for the initial scan.
3573 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3574 .await;
3575
3576 // Create a remote copy of this worktree.
3577 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3578 let (remote, load_task) = cx.update(|cx| {
3579 Worktree::remote(
3580 1,
3581 1,
3582 initial_snapshot.to_proto(&Default::default(), Default::default()),
3583 rpc.clone(),
3584 cx,
3585 )
3586 });
3587 load_task.await;
3588
3589 cx.read(|cx| {
3590 assert!(!buffer2.read(cx).is_dirty());
3591 assert!(!buffer3.read(cx).is_dirty());
3592 assert!(!buffer4.read(cx).is_dirty());
3593 assert!(!buffer5.read(cx).is_dirty());
3594 });
3595
3596 // Rename and delete files and directories.
3597 tree.flush_fs_events(&cx).await;
3598 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3599 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3600 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3601 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3602 tree.flush_fs_events(&cx).await;
3603
3604 let expected_paths = vec![
3605 "a",
3606 "a/file1",
3607 "a/file2.new",
3608 "b",
3609 "d",
3610 "d/file3",
3611 "d/file4",
3612 ];
3613
3614 cx.read(|app| {
3615 assert_eq!(
3616 tree.read(app)
3617 .paths()
3618 .map(|p| p.to_str().unwrap())
3619 .collect::<Vec<_>>(),
3620 expected_paths
3621 );
3622
3623 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3624 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3625 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3626
3627 assert_eq!(
3628 buffer2.read(app).file().unwrap().path().as_ref(),
3629 Path::new("a/file2.new")
3630 );
3631 assert_eq!(
3632 buffer3.read(app).file().unwrap().path().as_ref(),
3633 Path::new("d/file3")
3634 );
3635 assert_eq!(
3636 buffer4.read(app).file().unwrap().path().as_ref(),
3637 Path::new("d/file4")
3638 );
3639 assert_eq!(
3640 buffer5.read(app).file().unwrap().path().as_ref(),
3641 Path::new("b/c/file5")
3642 );
3643
3644 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3645 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3646 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3647 assert!(buffer5.read(app).file().unwrap().is_deleted());
3648 });
3649
3650 // Update the remote worktree. Check that it becomes consistent with the
3651 // local worktree.
3652 remote.update(&mut cx, |remote, cx| {
3653 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3654 &initial_snapshot,
3655 1,
3656 1,
3657 0,
3658 true,
3659 );
3660 remote
3661 .as_remote_mut()
3662 .unwrap()
3663 .snapshot
3664 .apply_remote_update(update_message)
3665 .unwrap();
3666
3667 assert_eq!(
3668 remote
3669 .paths()
3670 .map(|p| p.to_str().unwrap())
3671 .collect::<Vec<_>>(),
3672 expected_paths
3673 );
3674 });
3675 }
3676
3677 #[gpui::test]
3678 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3679 let fs = FakeFs::new(cx.background());
3680 fs.insert_tree(
3681 "/the-dir",
3682 json!({
3683 "a.txt": "a-contents",
3684 "b.txt": "b-contents",
3685 }),
3686 )
3687 .await;
3688
3689 let project = Project::test(fs.clone(), &mut cx);
3690 let worktree_id = project
3691 .update(&mut cx, |p, cx| {
3692 p.find_or_create_local_worktree("/the-dir", false, cx)
3693 })
3694 .await
3695 .unwrap()
3696 .0
3697 .read_with(&cx, |tree, _| tree.id());
3698
3699 // Spawn multiple tasks to open paths, repeating some paths.
3700 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3701 (
3702 p.open_buffer((worktree_id, "a.txt"), cx),
3703 p.open_buffer((worktree_id, "b.txt"), cx),
3704 p.open_buffer((worktree_id, "a.txt"), cx),
3705 )
3706 });
3707
3708 let buffer_a_1 = buffer_a_1.await.unwrap();
3709 let buffer_a_2 = buffer_a_2.await.unwrap();
3710 let buffer_b = buffer_b.await.unwrap();
3711 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3712 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3713
3714 // There is only one buffer per path.
3715 let buffer_a_id = buffer_a_1.id();
3716 assert_eq!(buffer_a_2.id(), buffer_a_id);
3717
3718 // Open the same path again while it is still open.
3719 drop(buffer_a_1);
3720 let buffer_a_3 = project
3721 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3722 .await
3723 .unwrap();
3724
3725 // There's still only one buffer per path.
3726 assert_eq!(buffer_a_3.id(), buffer_a_id);
3727 }
3728
3729 #[gpui::test]
3730 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3731 use std::fs;
3732
3733 let dir = temp_tree(json!({
3734 "file1": "abc",
3735 "file2": "def",
3736 "file3": "ghi",
3737 }));
3738
3739 let project = Project::test(Arc::new(RealFs), &mut cx);
3740 let (worktree, _) = project
3741 .update(&mut cx, |p, cx| {
3742 p.find_or_create_local_worktree(dir.path(), false, cx)
3743 })
3744 .await
3745 .unwrap();
3746 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3747
3748 worktree.flush_fs_events(&cx).await;
3749 worktree
3750 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3751 .await;
3752
3753 let buffer1 = project
3754 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3755 .await
3756 .unwrap();
3757 let events = Rc::new(RefCell::new(Vec::new()));
3758
3759 // initially, the buffer isn't dirty.
3760 buffer1.update(&mut cx, |buffer, cx| {
3761 cx.subscribe(&buffer1, {
3762 let events = events.clone();
3763 move |_, _, event, _| events.borrow_mut().push(event.clone())
3764 })
3765 .detach();
3766
3767 assert!(!buffer.is_dirty());
3768 assert!(events.borrow().is_empty());
3769
3770 buffer.edit(vec![1..2], "", cx);
3771 });
3772
3773 // after the first edit, the buffer is dirty, and emits a dirtied event.
3774 buffer1.update(&mut cx, |buffer, cx| {
3775 assert!(buffer.text() == "ac");
3776 assert!(buffer.is_dirty());
3777 assert_eq!(
3778 *events.borrow(),
3779 &[language::Event::Edited, language::Event::Dirtied]
3780 );
3781 events.borrow_mut().clear();
3782 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3783 });
3784
3785 // after saving, the buffer is not dirty, and emits a saved event.
3786 buffer1.update(&mut cx, |buffer, cx| {
3787 assert!(!buffer.is_dirty());
3788 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3789 events.borrow_mut().clear();
3790
3791 buffer.edit(vec![1..1], "B", cx);
3792 buffer.edit(vec![2..2], "D", cx);
3793 });
3794
3795 // after editing again, the buffer is dirty, and emits another dirty event.
3796 buffer1.update(&mut cx, |buffer, cx| {
3797 assert!(buffer.text() == "aBDc");
3798 assert!(buffer.is_dirty());
3799 assert_eq!(
3800 *events.borrow(),
3801 &[
3802 language::Event::Edited,
3803 language::Event::Dirtied,
3804 language::Event::Edited,
3805 ],
3806 );
3807 events.borrow_mut().clear();
3808
3809 // TODO - currently, after restoring the buffer to its
3810 // previously-saved state, the is still considered dirty.
3811 buffer.edit([1..3], "", cx);
3812 assert!(buffer.text() == "ac");
3813 assert!(buffer.is_dirty());
3814 });
3815
3816 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3817
3818 // When a file is deleted, the buffer is considered dirty.
3819 let events = Rc::new(RefCell::new(Vec::new()));
3820 let buffer2 = project
3821 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3822 .await
3823 .unwrap();
3824 buffer2.update(&mut cx, |_, cx| {
3825 cx.subscribe(&buffer2, {
3826 let events = events.clone();
3827 move |_, _, event, _| events.borrow_mut().push(event.clone())
3828 })
3829 .detach();
3830 });
3831
3832 fs::remove_file(dir.path().join("file2")).unwrap();
3833 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3834 assert_eq!(
3835 *events.borrow(),
3836 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3837 );
3838
3839 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3840 let events = Rc::new(RefCell::new(Vec::new()));
3841 let buffer3 = project
3842 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3843 .await
3844 .unwrap();
3845 buffer3.update(&mut cx, |_, cx| {
3846 cx.subscribe(&buffer3, {
3847 let events = events.clone();
3848 move |_, _, event, _| events.borrow_mut().push(event.clone())
3849 })
3850 .detach();
3851 });
3852
3853 worktree.flush_fs_events(&cx).await;
3854 buffer3.update(&mut cx, |buffer, cx| {
3855 buffer.edit(Some(0..0), "x", cx);
3856 });
3857 events.borrow_mut().clear();
3858 fs::remove_file(dir.path().join("file3")).unwrap();
3859 buffer3
3860 .condition(&cx, |_, _| !events.borrow().is_empty())
3861 .await;
3862 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3863 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3864 }
3865
3866 #[gpui::test]
3867 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3868 use std::fs;
3869
3870 let initial_contents = "aaa\nbbbbb\nc\n";
3871 let dir = temp_tree(json!({ "the-file": initial_contents }));
3872
3873 let project = Project::test(Arc::new(RealFs), &mut cx);
3874 let (worktree, _) = project
3875 .update(&mut cx, |p, cx| {
3876 p.find_or_create_local_worktree(dir.path(), false, cx)
3877 })
3878 .await
3879 .unwrap();
3880 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3881
3882 worktree
3883 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3884 .await;
3885
3886 let abs_path = dir.path().join("the-file");
3887 let buffer = project
3888 .update(&mut cx, |p, cx| {
3889 p.open_buffer((worktree_id, "the-file"), cx)
3890 })
3891 .await
3892 .unwrap();
3893
3894 // TODO
3895 // Add a cursor on each row.
3896 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3897 // assert!(!buffer.is_dirty());
3898 // buffer.add_selection_set(
3899 // &(0..3)
3900 // .map(|row| Selection {
3901 // id: row as usize,
3902 // start: Point::new(row, 1),
3903 // end: Point::new(row, 1),
3904 // reversed: false,
3905 // goal: SelectionGoal::None,
3906 // })
3907 // .collect::<Vec<_>>(),
3908 // cx,
3909 // )
3910 // });
3911
3912 // Change the file on disk, adding two new lines of text, and removing
3913 // one line.
3914 buffer.read_with(&cx, |buffer, _| {
3915 assert!(!buffer.is_dirty());
3916 assert!(!buffer.has_conflict());
3917 });
3918 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3919 fs::write(&abs_path, new_contents).unwrap();
3920
3921 // Because the buffer was not modified, it is reloaded from disk. Its
3922 // contents are edited according to the diff between the old and new
3923 // file contents.
3924 buffer
3925 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3926 .await;
3927
3928 buffer.update(&mut cx, |buffer, _| {
3929 assert_eq!(buffer.text(), new_contents);
3930 assert!(!buffer.is_dirty());
3931 assert!(!buffer.has_conflict());
3932
3933 // TODO
3934 // let cursor_positions = buffer
3935 // .selection_set(selection_set_id)
3936 // .unwrap()
3937 // .selections::<Point>(&*buffer)
3938 // .map(|selection| {
3939 // assert_eq!(selection.start, selection.end);
3940 // selection.start
3941 // })
3942 // .collect::<Vec<_>>();
3943 // assert_eq!(
3944 // cursor_positions,
3945 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3946 // );
3947 });
3948
3949 // Modify the buffer
3950 buffer.update(&mut cx, |buffer, cx| {
3951 buffer.edit(vec![0..0], " ", cx);
3952 assert!(buffer.is_dirty());
3953 assert!(!buffer.has_conflict());
3954 });
3955
3956 // Change the file on disk again, adding blank lines to the beginning.
3957 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3958
3959 // Because the buffer is modified, it doesn't reload from disk, but is
3960 // marked as having a conflict.
3961 buffer
3962 .condition(&cx, |buffer, _| buffer.has_conflict())
3963 .await;
3964 }
3965
3966 #[gpui::test]
3967 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3968 let fs = FakeFs::new(cx.background());
3969 fs.insert_tree(
3970 "/the-dir",
3971 json!({
3972 "a.rs": "
3973 fn foo(mut v: Vec<usize>) {
3974 for x in &v {
3975 v.push(1);
3976 }
3977 }
3978 "
3979 .unindent(),
3980 }),
3981 )
3982 .await;
3983
3984 let project = Project::test(fs.clone(), &mut cx);
3985 let (worktree, _) = project
3986 .update(&mut cx, |p, cx| {
3987 p.find_or_create_local_worktree("/the-dir", false, cx)
3988 })
3989 .await
3990 .unwrap();
3991 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3992
3993 let buffer = project
3994 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3995 .await
3996 .unwrap();
3997
3998 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3999 let message = lsp::PublishDiagnosticsParams {
4000 uri: buffer_uri.clone(),
4001 diagnostics: vec![
4002 lsp::Diagnostic {
4003 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4004 severity: Some(DiagnosticSeverity::WARNING),
4005 message: "error 1".to_string(),
4006 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4007 location: lsp::Location {
4008 uri: buffer_uri.clone(),
4009 range: lsp::Range::new(
4010 lsp::Position::new(1, 8),
4011 lsp::Position::new(1, 9),
4012 ),
4013 },
4014 message: "error 1 hint 1".to_string(),
4015 }]),
4016 ..Default::default()
4017 },
4018 lsp::Diagnostic {
4019 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4020 severity: Some(DiagnosticSeverity::HINT),
4021 message: "error 1 hint 1".to_string(),
4022 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4023 location: lsp::Location {
4024 uri: buffer_uri.clone(),
4025 range: lsp::Range::new(
4026 lsp::Position::new(1, 8),
4027 lsp::Position::new(1, 9),
4028 ),
4029 },
4030 message: "original diagnostic".to_string(),
4031 }]),
4032 ..Default::default()
4033 },
4034 lsp::Diagnostic {
4035 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4036 severity: Some(DiagnosticSeverity::ERROR),
4037 message: "error 2".to_string(),
4038 related_information: Some(vec![
4039 lsp::DiagnosticRelatedInformation {
4040 location: lsp::Location {
4041 uri: buffer_uri.clone(),
4042 range: lsp::Range::new(
4043 lsp::Position::new(1, 13),
4044 lsp::Position::new(1, 15),
4045 ),
4046 },
4047 message: "error 2 hint 1".to_string(),
4048 },
4049 lsp::DiagnosticRelatedInformation {
4050 location: lsp::Location {
4051 uri: buffer_uri.clone(),
4052 range: lsp::Range::new(
4053 lsp::Position::new(1, 13),
4054 lsp::Position::new(1, 15),
4055 ),
4056 },
4057 message: "error 2 hint 2".to_string(),
4058 },
4059 ]),
4060 ..Default::default()
4061 },
4062 lsp::Diagnostic {
4063 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4064 severity: Some(DiagnosticSeverity::HINT),
4065 message: "error 2 hint 1".to_string(),
4066 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4067 location: lsp::Location {
4068 uri: buffer_uri.clone(),
4069 range: lsp::Range::new(
4070 lsp::Position::new(2, 8),
4071 lsp::Position::new(2, 17),
4072 ),
4073 },
4074 message: "original diagnostic".to_string(),
4075 }]),
4076 ..Default::default()
4077 },
4078 lsp::Diagnostic {
4079 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4080 severity: Some(DiagnosticSeverity::HINT),
4081 message: "error 2 hint 2".to_string(),
4082 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4083 location: lsp::Location {
4084 uri: buffer_uri.clone(),
4085 range: lsp::Range::new(
4086 lsp::Position::new(2, 8),
4087 lsp::Position::new(2, 17),
4088 ),
4089 },
4090 message: "original diagnostic".to_string(),
4091 }]),
4092 ..Default::default()
4093 },
4094 ],
4095 version: None,
4096 };
4097
4098 project
4099 .update(&mut cx, |p, cx| {
4100 p.update_diagnostics(message, &Default::default(), cx)
4101 })
4102 .unwrap();
4103 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4104
4105 assert_eq!(
4106 buffer
4107 .diagnostics_in_range::<_, Point>(0..buffer.len())
4108 .collect::<Vec<_>>(),
4109 &[
4110 DiagnosticEntry {
4111 range: Point::new(1, 8)..Point::new(1, 9),
4112 diagnostic: Diagnostic {
4113 severity: DiagnosticSeverity::WARNING,
4114 message: "error 1".to_string(),
4115 group_id: 0,
4116 is_primary: true,
4117 ..Default::default()
4118 }
4119 },
4120 DiagnosticEntry {
4121 range: Point::new(1, 8)..Point::new(1, 9),
4122 diagnostic: Diagnostic {
4123 severity: DiagnosticSeverity::HINT,
4124 message: "error 1 hint 1".to_string(),
4125 group_id: 0,
4126 is_primary: false,
4127 ..Default::default()
4128 }
4129 },
4130 DiagnosticEntry {
4131 range: Point::new(1, 13)..Point::new(1, 15),
4132 diagnostic: Diagnostic {
4133 severity: DiagnosticSeverity::HINT,
4134 message: "error 2 hint 1".to_string(),
4135 group_id: 1,
4136 is_primary: false,
4137 ..Default::default()
4138 }
4139 },
4140 DiagnosticEntry {
4141 range: Point::new(1, 13)..Point::new(1, 15),
4142 diagnostic: Diagnostic {
4143 severity: DiagnosticSeverity::HINT,
4144 message: "error 2 hint 2".to_string(),
4145 group_id: 1,
4146 is_primary: false,
4147 ..Default::default()
4148 }
4149 },
4150 DiagnosticEntry {
4151 range: Point::new(2, 8)..Point::new(2, 17),
4152 diagnostic: Diagnostic {
4153 severity: DiagnosticSeverity::ERROR,
4154 message: "error 2".to_string(),
4155 group_id: 1,
4156 is_primary: true,
4157 ..Default::default()
4158 }
4159 }
4160 ]
4161 );
4162
4163 assert_eq!(
4164 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4165 &[
4166 DiagnosticEntry {
4167 range: Point::new(1, 8)..Point::new(1, 9),
4168 diagnostic: Diagnostic {
4169 severity: DiagnosticSeverity::WARNING,
4170 message: "error 1".to_string(),
4171 group_id: 0,
4172 is_primary: true,
4173 ..Default::default()
4174 }
4175 },
4176 DiagnosticEntry {
4177 range: Point::new(1, 8)..Point::new(1, 9),
4178 diagnostic: Diagnostic {
4179 severity: DiagnosticSeverity::HINT,
4180 message: "error 1 hint 1".to_string(),
4181 group_id: 0,
4182 is_primary: false,
4183 ..Default::default()
4184 }
4185 },
4186 ]
4187 );
4188 assert_eq!(
4189 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4190 &[
4191 DiagnosticEntry {
4192 range: Point::new(1, 13)..Point::new(1, 15),
4193 diagnostic: Diagnostic {
4194 severity: DiagnosticSeverity::HINT,
4195 message: "error 2 hint 1".to_string(),
4196 group_id: 1,
4197 is_primary: false,
4198 ..Default::default()
4199 }
4200 },
4201 DiagnosticEntry {
4202 range: Point::new(1, 13)..Point::new(1, 15),
4203 diagnostic: Diagnostic {
4204 severity: DiagnosticSeverity::HINT,
4205 message: "error 2 hint 2".to_string(),
4206 group_id: 1,
4207 is_primary: false,
4208 ..Default::default()
4209 }
4210 },
4211 DiagnosticEntry {
4212 range: Point::new(2, 8)..Point::new(2, 17),
4213 diagnostic: Diagnostic {
4214 severity: DiagnosticSeverity::ERROR,
4215 message: "error 2".to_string(),
4216 group_id: 1,
4217 is_primary: true,
4218 ..Default::default()
4219 }
4220 }
4221 ]
4222 );
4223 }
4224
4225 #[gpui::test]
4226 async fn test_rename(mut cx: gpui::TestAppContext) {
4227 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4228 let language = Arc::new(Language::new(
4229 LanguageConfig {
4230 name: "Rust".to_string(),
4231 path_suffixes: vec!["rs".to_string()],
4232 language_server: Some(language_server_config),
4233 ..Default::default()
4234 },
4235 Some(tree_sitter_rust::language()),
4236 ));
4237
4238 let fs = FakeFs::new(cx.background());
4239 fs.insert_tree(
4240 "/dir",
4241 json!({
4242 "one.rs": "const ONE: usize = 1;",
4243 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4244 }),
4245 )
4246 .await;
4247
4248 let project = Project::test(fs.clone(), &mut cx);
4249 project.update(&mut cx, |project, _| {
4250 Arc::get_mut(&mut project.languages).unwrap().add(language);
4251 });
4252
4253 let (tree, _) = project
4254 .update(&mut cx, |project, cx| {
4255 project.find_or_create_local_worktree("/dir", false, cx)
4256 })
4257 .await
4258 .unwrap();
4259 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4260 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4261 .await;
4262
4263 let buffer = project
4264 .update(&mut cx, |project, cx| {
4265 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4266 })
4267 .await
4268 .unwrap();
4269
4270 let mut fake_server = fake_servers.next().await.unwrap();
4271
4272 let response = project.update(&mut cx, |project, cx| {
4273 project.prepare_rename(buffer.clone(), 7, cx)
4274 });
4275 fake_server
4276 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4277 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4278 assert_eq!(params.position, lsp::Position::new(0, 7));
4279 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4280 lsp::Position::new(0, 6),
4281 lsp::Position::new(0, 9),
4282 )))
4283 })
4284 .next()
4285 .await
4286 .unwrap();
4287 let range = response.await.unwrap().unwrap();
4288 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4289 assert_eq!(range, 6..9);
4290
4291 let response = project.update(&mut cx, |project, cx| {
4292 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4293 });
4294 fake_server
4295 .handle_request::<lsp::request::Rename, _>(|params| {
4296 assert_eq!(
4297 params.text_document_position.text_document.uri.as_str(),
4298 "file:///dir/one.rs"
4299 );
4300 assert_eq!(
4301 params.text_document_position.position,
4302 lsp::Position::new(0, 7)
4303 );
4304 assert_eq!(params.new_name, "THREE");
4305 Some(lsp::WorkspaceEdit {
4306 changes: Some(
4307 [
4308 (
4309 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4310 vec![lsp::TextEdit::new(
4311 lsp::Range::new(
4312 lsp::Position::new(0, 6),
4313 lsp::Position::new(0, 9),
4314 ),
4315 "THREE".to_string(),
4316 )],
4317 ),
4318 (
4319 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4320 vec![
4321 lsp::TextEdit::new(
4322 lsp::Range::new(
4323 lsp::Position::new(0, 24),
4324 lsp::Position::new(0, 27),
4325 ),
4326 "THREE".to_string(),
4327 ),
4328 lsp::TextEdit::new(
4329 lsp::Range::new(
4330 lsp::Position::new(0, 35),
4331 lsp::Position::new(0, 38),
4332 ),
4333 "THREE".to_string(),
4334 ),
4335 ],
4336 ),
4337 ]
4338 .into_iter()
4339 .collect(),
4340 ),
4341 ..Default::default()
4342 })
4343 })
4344 .next()
4345 .await
4346 .unwrap();
4347 let mut transaction = response.await.unwrap().0;
4348 assert_eq!(transaction.len(), 2);
4349 assert_eq!(
4350 transaction
4351 .remove_entry(&buffer)
4352 .unwrap()
4353 .0
4354 .read_with(&cx, |buffer, _| buffer.text()),
4355 "const THREE: usize = 1;"
4356 );
4357 assert_eq!(
4358 transaction
4359 .into_keys()
4360 .next()
4361 .unwrap()
4362 .read_with(&cx, |buffer, _| buffer.text()),
4363 "const TWO: usize = one::THREE + one::THREE;"
4364 );
4365 }
4366}