1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::{future::Shared, Future, FutureExt};
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 started_language_servers:
43 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Debug)]
122pub struct Symbol {
123 pub label: CodeLabel,
124 pub lsp_symbol: lsp::SymbolInformation,
125}
126
127#[derive(Default)]
128pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
129
130impl DiagnosticSummary {
131 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
132 let mut this = Self {
133 error_count: 0,
134 warning_count: 0,
135 info_count: 0,
136 hint_count: 0,
137 };
138
139 for entry in diagnostics {
140 if entry.diagnostic.is_primary {
141 match entry.diagnostic.severity {
142 DiagnosticSeverity::ERROR => this.error_count += 1,
143 DiagnosticSeverity::WARNING => this.warning_count += 1,
144 DiagnosticSeverity::INFORMATION => this.info_count += 1,
145 DiagnosticSeverity::HINT => this.hint_count += 1,
146 _ => {}
147 }
148 }
149 }
150
151 this
152 }
153
154 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
155 proto::DiagnosticSummary {
156 path: path.to_string_lossy().to_string(),
157 error_count: self.error_count as u32,
158 warning_count: self.warning_count as u32,
159 info_count: self.info_count as u32,
160 hint_count: self.hint_count as u32,
161 }
162 }
163}
164
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
166pub struct ProjectEntry {
167 pub worktree_id: WorktreeId,
168 pub entry_id: usize,
169}
170
171impl Project {
172 pub fn init(client: &Arc<Client>) {
173 client.add_entity_message_handler(Self::handle_add_collaborator);
174 client.add_entity_message_handler(Self::handle_buffer_reloaded);
175 client.add_entity_message_handler(Self::handle_buffer_saved);
176 client.add_entity_message_handler(Self::handle_close_buffer);
177 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
178 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
179 client.add_entity_message_handler(Self::handle_remove_collaborator);
180 client.add_entity_message_handler(Self::handle_share_worktree);
181 client.add_entity_message_handler(Self::handle_unregister_worktree);
182 client.add_entity_message_handler(Self::handle_unshare_project);
183 client.add_entity_message_handler(Self::handle_update_buffer_file);
184 client.add_entity_message_handler(Self::handle_update_buffer);
185 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
186 client.add_entity_message_handler(Self::handle_update_worktree);
187 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
188 client.add_entity_request_handler(Self::handle_apply_code_action);
189 client.add_entity_request_handler(Self::handle_format_buffers);
190 client.add_entity_request_handler(Self::handle_get_code_actions);
191 client.add_entity_request_handler(Self::handle_get_completions);
192 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
193 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
194 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
195 client.add_entity_request_handler(Self::handle_get_project_symbols);
196 client.add_entity_request_handler(Self::handle_open_buffer);
197 client.add_entity_request_handler(Self::handle_save_buffer);
198 }
199
200 pub fn local(
201 client: Arc<Client>,
202 user_store: ModelHandle<UserStore>,
203 languages: Arc<LanguageRegistry>,
204 fs: Arc<dyn Fs>,
205 cx: &mut MutableAppContext,
206 ) -> ModelHandle<Self> {
207 cx.add_model(|cx: &mut ModelContext<Self>| {
208 let (remote_id_tx, remote_id_rx) = watch::channel();
209 let _maintain_remote_id_task = cx.spawn_weak({
210 let rpc = client.clone();
211 move |this, mut cx| {
212 async move {
213 let mut status = rpc.status();
214 while let Some(status) = status.recv().await {
215 if let Some(this) = this.upgrade(&cx) {
216 let remote_id = if let client::Status::Connected { .. } = status {
217 let response = rpc.request(proto::RegisterProject {}).await?;
218 Some(response.project_id)
219 } else {
220 None
221 };
222
223 if let Some(project_id) = remote_id {
224 let mut registrations = Vec::new();
225 this.update(&mut cx, |this, cx| {
226 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
227 registrations.push(worktree.update(
228 cx,
229 |worktree, cx| {
230 let worktree = worktree.as_local_mut().unwrap();
231 worktree.register(project_id, cx)
232 },
233 ));
234 }
235 });
236 for registration in registrations {
237 registration.await?;
238 }
239 }
240 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
241 }
242 }
243 Ok(())
244 }
245 .log_err()
246 }
247 });
248
249 Self {
250 worktrees: Default::default(),
251 collaborators: Default::default(),
252 open_buffers: Default::default(),
253 loading_buffers: Default::default(),
254 shared_buffers: Default::default(),
255 client_state: ProjectClientState::Local {
256 is_shared: false,
257 remote_id_tx,
258 remote_id_rx,
259 _maintain_remote_id_task,
260 },
261 opened_buffer: broadcast::channel(1).0,
262 subscriptions: Vec::new(),
263 active_entry: None,
264 languages,
265 client,
266 user_store,
267 fs,
268 language_servers_with_diagnostics_running: 0,
269 language_servers: Default::default(),
270 started_language_servers: Default::default(),
271 }
272 })
273 }
274
275 pub async fn remote(
276 remote_id: u64,
277 client: Arc<Client>,
278 user_store: ModelHandle<UserStore>,
279 languages: Arc<LanguageRegistry>,
280 fs: Arc<dyn Fs>,
281 cx: &mut AsyncAppContext,
282 ) -> Result<ModelHandle<Self>> {
283 client.authenticate_and_connect(&cx).await?;
284
285 let response = client
286 .request(proto::JoinProject {
287 project_id: remote_id,
288 })
289 .await?;
290
291 let replica_id = response.replica_id as ReplicaId;
292
293 let mut worktrees = Vec::new();
294 for worktree in response.worktrees {
295 let (worktree, load_task) = cx
296 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
297 worktrees.push(worktree);
298 load_task.detach();
299 }
300
301 let this = cx.add_model(|cx| {
302 let mut this = Self {
303 worktrees: Vec::new(),
304 open_buffers: Default::default(),
305 loading_buffers: Default::default(),
306 opened_buffer: broadcast::channel(1).0,
307 shared_buffers: Default::default(),
308 active_entry: None,
309 collaborators: Default::default(),
310 languages,
311 user_store: user_store.clone(),
312 fs,
313 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
314 client,
315 client_state: ProjectClientState::Remote {
316 sharing_has_stopped: false,
317 remote_id,
318 replica_id,
319 },
320 language_servers_with_diagnostics_running: 0,
321 language_servers: Default::default(),
322 started_language_servers: Default::default(),
323 };
324 for worktree in worktrees {
325 this.add_worktree(&worktree, cx);
326 }
327 this
328 });
329
330 let user_ids = response
331 .collaborators
332 .iter()
333 .map(|peer| peer.user_id)
334 .collect();
335 user_store
336 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
337 .await?;
338 let mut collaborators = HashMap::default();
339 for message in response.collaborators {
340 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
341 collaborators.insert(collaborator.peer_id, collaborator);
342 }
343
344 this.update(cx, |this, _| {
345 this.collaborators = collaborators;
346 });
347
348 Ok(this)
349 }
350
351 #[cfg(any(test, feature = "test-support"))]
352 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
353 let languages = Arc::new(LanguageRegistry::new());
354 let http_client = client::test::FakeHttpClient::with_404_response();
355 let client = client::Client::new(http_client.clone());
356 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
357 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
358 }
359
360 #[cfg(any(test, feature = "test-support"))]
361 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
362 self.shared_buffers
363 .get(&peer_id)
364 .and_then(|buffers| buffers.get(&remote_id))
365 .cloned()
366 }
367
368 #[cfg(any(test, feature = "test-support"))]
369 pub fn has_buffered_operations(&self) -> bool {
370 self.open_buffers
371 .values()
372 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
373 }
374
375 pub fn fs(&self) -> &Arc<dyn Fs> {
376 &self.fs
377 }
378
379 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
380 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
381 *remote_id_tx.borrow_mut() = remote_id;
382 }
383
384 self.subscriptions.clear();
385 if let Some(remote_id) = remote_id {
386 self.subscriptions
387 .push(self.client.add_model_for_remote_entity(remote_id, cx));
388 }
389 }
390
391 pub fn remote_id(&self) -> Option<u64> {
392 match &self.client_state {
393 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
394 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
395 }
396 }
397
398 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
399 let mut id = None;
400 let mut watch = None;
401 match &self.client_state {
402 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
403 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
404 }
405
406 async move {
407 if let Some(id) = id {
408 return id;
409 }
410 let mut watch = watch.unwrap();
411 loop {
412 let id = *watch.borrow();
413 if let Some(id) = id {
414 return id;
415 }
416 watch.recv().await;
417 }
418 }
419 }
420
421 pub fn replica_id(&self) -> ReplicaId {
422 match &self.client_state {
423 ProjectClientState::Local { .. } => 0,
424 ProjectClientState::Remote { replica_id, .. } => *replica_id,
425 }
426 }
427
428 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
429 &self.collaborators
430 }
431
432 pub fn worktrees<'a>(
433 &'a self,
434 cx: &'a AppContext,
435 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
436 self.worktrees
437 .iter()
438 .filter_map(move |worktree| worktree.upgrade(cx))
439 }
440
441 pub fn worktree_for_id(
442 &self,
443 id: WorktreeId,
444 cx: &AppContext,
445 ) -> Option<ModelHandle<Worktree>> {
446 self.worktrees(cx)
447 .find(|worktree| worktree.read(cx).id() == id)
448 }
449
450 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
451 let rpc = self.client.clone();
452 cx.spawn(|this, mut cx| async move {
453 let project_id = this.update(&mut cx, |this, _| {
454 if let ProjectClientState::Local {
455 is_shared,
456 remote_id_rx,
457 ..
458 } = &mut this.client_state
459 {
460 *is_shared = true;
461 remote_id_rx
462 .borrow()
463 .ok_or_else(|| anyhow!("no project id"))
464 } else {
465 Err(anyhow!("can't share a remote project"))
466 }
467 })?;
468
469 rpc.request(proto::ShareProject { project_id }).await?;
470 let mut tasks = Vec::new();
471 this.update(&mut cx, |this, cx| {
472 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
473 worktree.update(cx, |worktree, cx| {
474 let worktree = worktree.as_local_mut().unwrap();
475 tasks.push(worktree.share(project_id, cx));
476 });
477 }
478 });
479 for task in tasks {
480 task.await?;
481 }
482 this.update(&mut cx, |_, cx| cx.notify());
483 Ok(())
484 })
485 }
486
487 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
488 let rpc = self.client.clone();
489 cx.spawn(|this, mut cx| async move {
490 let project_id = this.update(&mut cx, |this, _| {
491 if let ProjectClientState::Local {
492 is_shared,
493 remote_id_rx,
494 ..
495 } = &mut this.client_state
496 {
497 *is_shared = false;
498 remote_id_rx
499 .borrow()
500 .ok_or_else(|| anyhow!("no project id"))
501 } else {
502 Err(anyhow!("can't share a remote project"))
503 }
504 })?;
505
506 rpc.send(proto::UnshareProject { project_id })?;
507 this.update(&mut cx, |this, cx| {
508 this.collaborators.clear();
509 this.shared_buffers.clear();
510 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
511 worktree.update(cx, |worktree, _| {
512 worktree.as_local_mut().unwrap().unshare();
513 });
514 }
515 cx.notify()
516 });
517 Ok(())
518 })
519 }
520
521 pub fn is_read_only(&self) -> bool {
522 match &self.client_state {
523 ProjectClientState::Local { .. } => false,
524 ProjectClientState::Remote {
525 sharing_has_stopped,
526 ..
527 } => *sharing_has_stopped,
528 }
529 }
530
531 pub fn is_local(&self) -> bool {
532 match &self.client_state {
533 ProjectClientState::Local { .. } => true,
534 ProjectClientState::Remote { .. } => false,
535 }
536 }
537
538 pub fn is_remote(&self) -> bool {
539 !self.is_local()
540 }
541
542 pub fn open_buffer(
543 &mut self,
544 path: impl Into<ProjectPath>,
545 cx: &mut ModelContext<Self>,
546 ) -> Task<Result<ModelHandle<Buffer>>> {
547 let project_path = path.into();
548 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
549 worktree
550 } else {
551 return Task::ready(Err(anyhow!("no such worktree")));
552 };
553
554 // If there is already a buffer for the given path, then return it.
555 let existing_buffer = self.get_open_buffer(&project_path, cx);
556 if let Some(existing_buffer) = existing_buffer {
557 return Task::ready(Ok(existing_buffer));
558 }
559
560 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
561 // If the given path is already being loaded, then wait for that existing
562 // task to complete and return the same buffer.
563 hash_map::Entry::Occupied(e) => e.get().clone(),
564
565 // Otherwise, record the fact that this path is now being loaded.
566 hash_map::Entry::Vacant(entry) => {
567 let (mut tx, rx) = postage::watch::channel();
568 entry.insert(rx.clone());
569
570 let load_buffer = if worktree.read(cx).is_local() {
571 self.open_local_buffer(&project_path.path, &worktree, cx)
572 } else {
573 self.open_remote_buffer(&project_path.path, &worktree, cx)
574 };
575
576 cx.spawn(move |this, mut cx| async move {
577 let load_result = load_buffer.await;
578 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
579 // Record the fact that the buffer is no longer loading.
580 this.loading_buffers.remove(&project_path);
581 if this.loading_buffers.is_empty() {
582 this.open_buffers
583 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
584 }
585
586 let buffer = load_result.map_err(Arc::new)?;
587 Ok(buffer)
588 }));
589 })
590 .detach();
591 rx
592 }
593 };
594
595 cx.foreground().spawn(async move {
596 loop {
597 if let Some(result) = loading_watch.borrow().as_ref() {
598 match result {
599 Ok(buffer) => return Ok(buffer.clone()),
600 Err(error) => return Err(anyhow!("{}", error)),
601 }
602 }
603 loading_watch.recv().await;
604 }
605 })
606 }
607
608 fn open_local_buffer(
609 &mut self,
610 path: &Arc<Path>,
611 worktree: &ModelHandle<Worktree>,
612 cx: &mut ModelContext<Self>,
613 ) -> Task<Result<ModelHandle<Buffer>>> {
614 let load_buffer = worktree.update(cx, |worktree, cx| {
615 let worktree = worktree.as_local_mut().unwrap();
616 worktree.load_buffer(path, cx)
617 });
618 let worktree = worktree.downgrade();
619 cx.spawn(|this, mut cx| async move {
620 let buffer = load_buffer.await?;
621 let worktree = worktree
622 .upgrade(&cx)
623 .ok_or_else(|| anyhow!("worktree was removed"))?;
624 this.update(&mut cx, |this, cx| {
625 this.register_buffer(&buffer, Some(&worktree), cx)
626 })?;
627 Ok(buffer)
628 })
629 }
630
631 fn open_remote_buffer(
632 &mut self,
633 path: &Arc<Path>,
634 worktree: &ModelHandle<Worktree>,
635 cx: &mut ModelContext<Self>,
636 ) -> Task<Result<ModelHandle<Buffer>>> {
637 let rpc = self.client.clone();
638 let project_id = self.remote_id().unwrap();
639 let remote_worktree_id = worktree.read(cx).id();
640 let path = path.clone();
641 let path_string = path.to_string_lossy().to_string();
642 cx.spawn(|this, mut cx| async move {
643 let response = rpc
644 .request(proto::OpenBuffer {
645 project_id,
646 worktree_id: remote_worktree_id.to_proto(),
647 path: path_string,
648 })
649 .await?;
650 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
651 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
652 .await
653 })
654 }
655
656 fn open_local_buffer_from_lsp_path(
657 &mut self,
658 abs_path: lsp::Url,
659 lang_name: String,
660 lang_server: Arc<LanguageServer>,
661 cx: &mut ModelContext<Self>,
662 ) -> Task<Result<ModelHandle<Buffer>>> {
663 cx.spawn(|this, mut cx| async move {
664 let abs_path = abs_path
665 .to_file_path()
666 .map_err(|_| anyhow!("can't convert URI to path"))?;
667 let (worktree, relative_path) = if let Some(result) =
668 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
669 {
670 result
671 } else {
672 let worktree = this
673 .update(&mut cx, |this, cx| {
674 this.create_local_worktree(&abs_path, true, cx)
675 })
676 .await?;
677 this.update(&mut cx, |this, cx| {
678 this.language_servers
679 .insert((worktree.read(cx).id(), lang_name), lang_server);
680 });
681 (worktree, PathBuf::new())
682 };
683
684 let project_path = ProjectPath {
685 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
686 path: relative_path.into(),
687 };
688 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
689 .await
690 })
691 }
692
693 pub fn save_buffer_as(
694 &self,
695 buffer: ModelHandle<Buffer>,
696 abs_path: PathBuf,
697 cx: &mut ModelContext<Project>,
698 ) -> Task<Result<()>> {
699 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
700 cx.spawn(|this, mut cx| async move {
701 let (worktree, path) = worktree_task.await?;
702 worktree
703 .update(&mut cx, |worktree, cx| {
704 worktree
705 .as_local_mut()
706 .unwrap()
707 .save_buffer_as(buffer.clone(), path, cx)
708 })
709 .await?;
710 this.update(&mut cx, |this, cx| {
711 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
712 });
713 Ok(())
714 })
715 }
716
717 #[cfg(any(test, feature = "test-support"))]
718 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
719 let path = path.into();
720 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
721 self.open_buffers.iter().any(|(_, buffer)| {
722 if let Some(buffer) = buffer.upgrade(cx) {
723 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
724 if file.worktree == worktree && file.path() == &path.path {
725 return true;
726 }
727 }
728 }
729 false
730 })
731 } else {
732 false
733 }
734 }
735
736 fn get_open_buffer(
737 &mut self,
738 path: &ProjectPath,
739 cx: &mut ModelContext<Self>,
740 ) -> Option<ModelHandle<Buffer>> {
741 let mut result = None;
742 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
743 self.open_buffers.retain(|_, buffer| {
744 if let Some(buffer) = buffer.upgrade(cx) {
745 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
746 if file.worktree == worktree && file.path() == &path.path {
747 result = Some(buffer);
748 }
749 }
750 true
751 } else {
752 false
753 }
754 });
755 result
756 }
757
758 fn register_buffer(
759 &mut self,
760 buffer: &ModelHandle<Buffer>,
761 worktree: Option<&ModelHandle<Worktree>>,
762 cx: &mut ModelContext<Self>,
763 ) -> Result<()> {
764 match self.open_buffers.insert(
765 buffer.read(cx).remote_id(),
766 OpenBuffer::Loaded(buffer.downgrade()),
767 ) {
768 None => {}
769 Some(OpenBuffer::Loading(operations)) => {
770 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
771 }
772 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
773 }
774 self.assign_language_to_buffer(&buffer, worktree, cx);
775 Ok(())
776 }
777
778 fn assign_language_to_buffer(
779 &mut self,
780 buffer: &ModelHandle<Buffer>,
781 worktree: Option<&ModelHandle<Worktree>>,
782 cx: &mut ModelContext<Self>,
783 ) -> Option<()> {
784 let (path, full_path) = {
785 let file = buffer.read(cx).file()?;
786 (file.path().clone(), file.full_path(cx))
787 };
788
789 // If the buffer has a language, set it and start/assign the language server
790 if let Some(language) = self.languages.select_language(&full_path).cloned() {
791 buffer.update(cx, |buffer, cx| {
792 buffer.set_language(Some(language.clone()), cx);
793 });
794
795 // For local worktrees, start a language server if needed.
796 // Also assign the language server and any previously stored diagnostics to the buffer.
797 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
798 let worktree_id = local_worktree.id();
799 let worktree_abs_path = local_worktree.abs_path().clone();
800 let buffer = buffer.downgrade();
801 let language_server =
802 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
803
804 cx.spawn_weak(|_, mut cx| async move {
805 if let Some(language_server) = language_server.await {
806 if let Some(buffer) = buffer.upgrade(&cx) {
807 buffer.update(&mut cx, |buffer, cx| {
808 buffer.set_language_server(Some(language_server), cx);
809 });
810 }
811 }
812 })
813 .detach();
814 }
815 }
816
817 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
818 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
819 buffer.update(cx, |buffer, cx| {
820 buffer.update_diagnostics(diagnostics, None, cx).log_err();
821 });
822 }
823 }
824
825 None
826 }
827
828 fn start_language_server(
829 &mut self,
830 worktree_id: WorktreeId,
831 worktree_path: Arc<Path>,
832 language: Arc<Language>,
833 cx: &mut ModelContext<Self>,
834 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
835 enum LspEvent {
836 DiagnosticsStart,
837 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
838 DiagnosticsFinish,
839 }
840
841 let key = (worktree_id, language.name().to_string());
842 self.started_language_servers
843 .entry(key.clone())
844 .or_insert_with(|| {
845 let language_server = self.languages.start_language_server(
846 &language,
847 worktree_path,
848 self.client.http_client(),
849 cx,
850 );
851 let rpc = self.client.clone();
852 cx.spawn_weak(|this, mut cx| async move {
853 let language_server = language_server?.await.log_err()?;
854 if let Some(this) = this.upgrade(&cx) {
855 this.update(&mut cx, |this, _| {
856 this.language_servers.insert(key, language_server.clone());
857 });
858 }
859
860 let disk_based_sources = language
861 .disk_based_diagnostic_sources()
862 .cloned()
863 .unwrap_or_default();
864 let disk_based_diagnostics_progress_token =
865 language.disk_based_diagnostics_progress_token().cloned();
866 let has_disk_based_diagnostic_progress_token =
867 disk_based_diagnostics_progress_token.is_some();
868 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
869
870 // Listen for `PublishDiagnostics` notifications.
871 language_server
872 .on_notification::<lsp::notification::PublishDiagnostics, _>({
873 let diagnostics_tx = diagnostics_tx.clone();
874 move |params| {
875 if !has_disk_based_diagnostic_progress_token {
876 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
877 }
878 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
879 .ok();
880 if !has_disk_based_diagnostic_progress_token {
881 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
882 }
883 }
884 })
885 .detach();
886
887 // Listen for `Progress` notifications. Send an event when the language server
888 // transitions between running jobs and not running any jobs.
889 let mut running_jobs_for_this_server: i32 = 0;
890 language_server
891 .on_notification::<lsp::notification::Progress, _>(move |params| {
892 let token = match params.token {
893 lsp::NumberOrString::Number(_) => None,
894 lsp::NumberOrString::String(token) => Some(token),
895 };
896
897 if token == disk_based_diagnostics_progress_token {
898 match params.value {
899 lsp::ProgressParamsValue::WorkDone(progress) => {
900 match progress {
901 lsp::WorkDoneProgress::Begin(_) => {
902 running_jobs_for_this_server += 1;
903 if running_jobs_for_this_server == 1 {
904 block_on(
905 diagnostics_tx
906 .send(LspEvent::DiagnosticsStart),
907 )
908 .ok();
909 }
910 }
911 lsp::WorkDoneProgress::End(_) => {
912 running_jobs_for_this_server -= 1;
913 if running_jobs_for_this_server == 0 {
914 block_on(
915 diagnostics_tx
916 .send(LspEvent::DiagnosticsFinish),
917 )
918 .ok();
919 }
920 }
921 _ => {}
922 }
923 }
924 }
925 }
926 })
927 .detach();
928
929 // Process all the LSP events.
930 cx.spawn(|mut cx| async move {
931 while let Ok(message) = diagnostics_rx.recv().await {
932 let this = this.upgrade(&cx)?;
933 match message {
934 LspEvent::DiagnosticsStart => {
935 this.update(&mut cx, |this, cx| {
936 this.disk_based_diagnostics_started(cx);
937 if let Some(project_id) = this.remote_id() {
938 rpc.send(proto::DiskBasedDiagnosticsUpdating {
939 project_id,
940 })
941 .log_err();
942 }
943 });
944 }
945 LspEvent::DiagnosticsUpdate(mut params) => {
946 language.process_diagnostics(&mut params);
947 this.update(&mut cx, |this, cx| {
948 this.update_diagnostics(params, &disk_based_sources, cx)
949 .log_err();
950 });
951 }
952 LspEvent::DiagnosticsFinish => {
953 this.update(&mut cx, |this, cx| {
954 this.disk_based_diagnostics_finished(cx);
955 if let Some(project_id) = this.remote_id() {
956 rpc.send(proto::DiskBasedDiagnosticsUpdated {
957 project_id,
958 })
959 .log_err();
960 }
961 });
962 }
963 }
964 }
965 Some(())
966 })
967 .detach();
968
969 Some(language_server)
970 })
971 .shared()
972 })
973 .clone()
974 }
975
976 pub fn update_diagnostics(
977 &mut self,
978 params: lsp::PublishDiagnosticsParams,
979 disk_based_sources: &HashSet<String>,
980 cx: &mut ModelContext<Self>,
981 ) -> Result<()> {
982 let abs_path = params
983 .uri
984 .to_file_path()
985 .map_err(|_| anyhow!("URI is not a file"))?;
986 let mut next_group_id = 0;
987 let mut diagnostics = Vec::default();
988 let mut primary_diagnostic_group_ids = HashMap::default();
989 let mut sources_by_group_id = HashMap::default();
990 let mut supporting_diagnostic_severities = HashMap::default();
991 for diagnostic in ¶ms.diagnostics {
992 let source = diagnostic.source.as_ref();
993 let code = diagnostic.code.as_ref().map(|code| match code {
994 lsp::NumberOrString::Number(code) => code.to_string(),
995 lsp::NumberOrString::String(code) => code.clone(),
996 });
997 let range = range_from_lsp(diagnostic.range);
998 let is_supporting = diagnostic
999 .related_information
1000 .as_ref()
1001 .map_or(false, |infos| {
1002 infos.iter().any(|info| {
1003 primary_diagnostic_group_ids.contains_key(&(
1004 source,
1005 code.clone(),
1006 range_from_lsp(info.location.range),
1007 ))
1008 })
1009 });
1010
1011 if is_supporting {
1012 if let Some(severity) = diagnostic.severity {
1013 supporting_diagnostic_severities
1014 .insert((source, code.clone(), range), severity);
1015 }
1016 } else {
1017 let group_id = post_inc(&mut next_group_id);
1018 let is_disk_based =
1019 source.map_or(false, |source| disk_based_sources.contains(source));
1020
1021 sources_by_group_id.insert(group_id, source);
1022 primary_diagnostic_group_ids
1023 .insert((source, code.clone(), range.clone()), group_id);
1024
1025 diagnostics.push(DiagnosticEntry {
1026 range,
1027 diagnostic: Diagnostic {
1028 code: code.clone(),
1029 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1030 message: diagnostic.message.clone(),
1031 group_id,
1032 is_primary: true,
1033 is_valid: true,
1034 is_disk_based,
1035 },
1036 });
1037 if let Some(infos) = &diagnostic.related_information {
1038 for info in infos {
1039 if info.location.uri == params.uri && !info.message.is_empty() {
1040 let range = range_from_lsp(info.location.range);
1041 diagnostics.push(DiagnosticEntry {
1042 range,
1043 diagnostic: Diagnostic {
1044 code: code.clone(),
1045 severity: DiagnosticSeverity::INFORMATION,
1046 message: info.message.clone(),
1047 group_id,
1048 is_primary: false,
1049 is_valid: true,
1050 is_disk_based,
1051 },
1052 });
1053 }
1054 }
1055 }
1056 }
1057 }
1058
1059 for entry in &mut diagnostics {
1060 let diagnostic = &mut entry.diagnostic;
1061 if !diagnostic.is_primary {
1062 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1063 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1064 source,
1065 diagnostic.code.clone(),
1066 entry.range.clone(),
1067 )) {
1068 diagnostic.severity = severity;
1069 }
1070 }
1071 }
1072
1073 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1074 Ok(())
1075 }
1076
1077 pub fn update_diagnostic_entries(
1078 &mut self,
1079 abs_path: PathBuf,
1080 version: Option<i32>,
1081 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1082 cx: &mut ModelContext<Project>,
1083 ) -> Result<(), anyhow::Error> {
1084 let (worktree, relative_path) = self
1085 .find_local_worktree(&abs_path, cx)
1086 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1087 let project_path = ProjectPath {
1088 worktree_id: worktree.read(cx).id(),
1089 path: relative_path.into(),
1090 };
1091
1092 for buffer in self.open_buffers.values() {
1093 if let Some(buffer) = buffer.upgrade(cx) {
1094 if buffer
1095 .read(cx)
1096 .file()
1097 .map_or(false, |file| *file.path() == project_path.path)
1098 {
1099 buffer.update(cx, |buffer, cx| {
1100 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1101 })?;
1102 break;
1103 }
1104 }
1105 }
1106 worktree.update(cx, |worktree, cx| {
1107 worktree
1108 .as_local_mut()
1109 .ok_or_else(|| anyhow!("not a local worktree"))?
1110 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1111 })?;
1112 cx.emit(Event::DiagnosticsUpdated(project_path));
1113 Ok(())
1114 }
1115
1116 pub fn format(
1117 &self,
1118 buffers: HashSet<ModelHandle<Buffer>>,
1119 push_to_history: bool,
1120 cx: &mut ModelContext<Project>,
1121 ) -> Task<Result<ProjectTransaction>> {
1122 let mut local_buffers = Vec::new();
1123 let mut remote_buffers = None;
1124 for buffer_handle in buffers {
1125 let buffer = buffer_handle.read(cx);
1126 let worktree;
1127 if let Some(file) = File::from_dyn(buffer.file()) {
1128 worktree = file.worktree.clone();
1129 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1130 let lang_server;
1131 if let Some(lang) = buffer.language() {
1132 if let Some(server) = self
1133 .language_servers
1134 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1135 {
1136 lang_server = server.clone();
1137 } else {
1138 return Task::ready(Ok(Default::default()));
1139 };
1140 } else {
1141 return Task::ready(Ok(Default::default()));
1142 }
1143
1144 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1145 } else {
1146 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1147 }
1148 } else {
1149 return Task::ready(Ok(Default::default()));
1150 }
1151 }
1152
1153 let remote_buffers = self.remote_id().zip(remote_buffers);
1154 let client = self.client.clone();
1155
1156 cx.spawn(|this, mut cx| async move {
1157 let mut project_transaction = ProjectTransaction::default();
1158
1159 if let Some((project_id, remote_buffers)) = remote_buffers {
1160 let response = client
1161 .request(proto::FormatBuffers {
1162 project_id,
1163 buffer_ids: remote_buffers
1164 .iter()
1165 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1166 .collect(),
1167 })
1168 .await?
1169 .transaction
1170 .ok_or_else(|| anyhow!("missing transaction"))?;
1171 project_transaction = this
1172 .update(&mut cx, |this, cx| {
1173 this.deserialize_project_transaction(response, push_to_history, cx)
1174 })
1175 .await?;
1176 }
1177
1178 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1179 let lsp_edits = lang_server
1180 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1181 text_document: lsp::TextDocumentIdentifier::new(
1182 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1183 ),
1184 options: Default::default(),
1185 work_done_progress_params: Default::default(),
1186 })
1187 .await?;
1188
1189 if let Some(lsp_edits) = lsp_edits {
1190 let edits = buffer
1191 .update(&mut cx, |buffer, cx| {
1192 buffer.edits_from_lsp(lsp_edits, None, cx)
1193 })
1194 .await?;
1195 buffer.update(&mut cx, |buffer, cx| {
1196 buffer.finalize_last_transaction();
1197 buffer.start_transaction();
1198 for (range, text) in edits {
1199 buffer.edit([range], text, cx);
1200 }
1201 if buffer.end_transaction(cx).is_some() {
1202 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1203 if !push_to_history {
1204 buffer.forget_transaction(transaction.id);
1205 }
1206 project_transaction.0.insert(cx.handle(), transaction);
1207 }
1208 });
1209 }
1210 }
1211
1212 Ok(project_transaction)
1213 })
1214 }
1215
1216 pub fn definition<T: ToPointUtf16>(
1217 &self,
1218 buffer: &ModelHandle<Buffer>,
1219 position: T,
1220 cx: &mut ModelContext<Self>,
1221 ) -> Task<Result<Vec<Definition>>> {
1222 let position = position.to_point_utf16(buffer.read(cx));
1223 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1224 }
1225
1226 pub fn symbols(
1227 &self,
1228 query: &str,
1229 cx: &mut ModelContext<Self>,
1230 ) -> Task<Result<HashMap<String, Vec<Symbol>>>> {
1231 if self.is_local() {
1232 let mut language_servers = HashMap::default();
1233 for ((_, language_name), language_server) in self.language_servers.iter() {
1234 let language = self.languages.get_language(language_name).unwrap();
1235 language_servers
1236 .entry(Arc::as_ptr(language_server))
1237 .or_insert((language_server.clone(), language.clone()));
1238 }
1239
1240 let mut requests = Vec::new();
1241 for (language_server, _) in language_servers.values() {
1242 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1243 lsp::WorkspaceSymbolParams {
1244 query: query.to_string(),
1245 ..Default::default()
1246 },
1247 ));
1248 }
1249
1250 cx.foreground().spawn(async move {
1251 let responses = futures::future::try_join_all(requests).await?;
1252 let mut symbols = HashMap::default();
1253 for ((_, language), lsp_symbols) in language_servers.into_values().zip(responses) {
1254 let language_symbols = symbols
1255 .entry(language.name().to_string())
1256 .or_insert(Vec::new());
1257 for lsp_symbol in lsp_symbols.into_iter().flatten() {
1258 let label = language
1259 .label_for_symbol(&lsp_symbol)
1260 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
1261 language_symbols.push(Symbol { label, lsp_symbol });
1262 }
1263 }
1264 Ok(symbols)
1265 })
1266 } else if let Some(project_id) = self.remote_id() {
1267 let request = self.client.request(proto::GetProjectSymbols {
1268 project_id,
1269 query: query.to_string(),
1270 });
1271 cx.spawn_weak(|this, cx| async move {
1272 let response = request.await?;
1273 let mut symbols = HashMap::default();
1274 if let Some(this) = this.upgrade(&cx) {
1275 this.read_with(&cx, |this, _| {
1276 let mut serialized_symbols = response.symbols.into_iter();
1277 for (language_name, symbol_count) in response
1278 .languages
1279 .into_iter()
1280 .zip(response.symbol_counts_per_language)
1281 {
1282 let language = this.languages.get_language(&language_name);
1283 let language_symbols =
1284 symbols.entry(language_name).or_insert(Vec::new());
1285 language_symbols.extend(
1286 serialized_symbols
1287 .by_ref()
1288 .take(symbol_count as usize)
1289 .filter_map(|serialized_symbol| {
1290 let lsp_symbol =
1291 serde_json::from_slice(&serialized_symbol.lsp_symbol)
1292 .log_err()?;
1293 Some(Symbol {
1294 label: language
1295 .and_then(|language| {
1296 language.label_for_symbol(&lsp_symbol)
1297 })
1298 .unwrap_or(CodeLabel::plain(
1299 lsp_symbol.name.clone(),
1300 None,
1301 )),
1302 lsp_symbol,
1303 })
1304 }),
1305 );
1306 }
1307 })
1308 }
1309 Ok(symbols)
1310 })
1311 } else {
1312 Task::ready(Ok(Default::default()))
1313 }
1314 }
1315
1316 pub fn completions<T: ToPointUtf16>(
1317 &self,
1318 source_buffer_handle: &ModelHandle<Buffer>,
1319 position: T,
1320 cx: &mut ModelContext<Self>,
1321 ) -> Task<Result<Vec<Completion>>> {
1322 let source_buffer_handle = source_buffer_handle.clone();
1323 let source_buffer = source_buffer_handle.read(cx);
1324 let buffer_id = source_buffer.remote_id();
1325 let language = source_buffer.language().cloned();
1326 let worktree;
1327 let buffer_abs_path;
1328 if let Some(file) = File::from_dyn(source_buffer.file()) {
1329 worktree = file.worktree.clone();
1330 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1331 } else {
1332 return Task::ready(Ok(Default::default()));
1333 };
1334
1335 let position = position.to_point_utf16(source_buffer);
1336 let anchor = source_buffer.anchor_after(position);
1337
1338 if worktree.read(cx).as_local().is_some() {
1339 let buffer_abs_path = buffer_abs_path.unwrap();
1340 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1341 server
1342 } else {
1343 return Task::ready(Ok(Default::default()));
1344 };
1345
1346 cx.spawn(|_, cx| async move {
1347 let completions = lang_server
1348 .request::<lsp::request::Completion>(lsp::CompletionParams {
1349 text_document_position: lsp::TextDocumentPositionParams::new(
1350 lsp::TextDocumentIdentifier::new(
1351 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1352 ),
1353 position.to_lsp_position(),
1354 ),
1355 context: Default::default(),
1356 work_done_progress_params: Default::default(),
1357 partial_result_params: Default::default(),
1358 })
1359 .await
1360 .context("lsp completion request failed")?;
1361
1362 let completions = if let Some(completions) = completions {
1363 match completions {
1364 lsp::CompletionResponse::Array(completions) => completions,
1365 lsp::CompletionResponse::List(list) => list.items,
1366 }
1367 } else {
1368 Default::default()
1369 };
1370
1371 source_buffer_handle.read_with(&cx, |this, _| {
1372 Ok(completions
1373 .into_iter()
1374 .filter_map(|lsp_completion| {
1375 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1376 lsp::CompletionTextEdit::Edit(edit) => {
1377 (range_from_lsp(edit.range), edit.new_text.clone())
1378 }
1379 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1380 log::info!("unsupported insert/replace completion");
1381 return None;
1382 }
1383 };
1384
1385 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1386 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1387 if clipped_start == old_range.start && clipped_end == old_range.end {
1388 Some(Completion {
1389 old_range: this.anchor_before(old_range.start)
1390 ..this.anchor_after(old_range.end),
1391 new_text,
1392 label: language
1393 .as_ref()
1394 .and_then(|l| l.label_for_completion(&lsp_completion))
1395 .unwrap_or_else(|| {
1396 CodeLabel::plain(
1397 lsp_completion.label.clone(),
1398 lsp_completion.filter_text.as_deref(),
1399 )
1400 }),
1401 lsp_completion,
1402 })
1403 } else {
1404 None
1405 }
1406 })
1407 .collect())
1408 })
1409 })
1410 } else if let Some(project_id) = self.remote_id() {
1411 let rpc = self.client.clone();
1412 let message = proto::GetCompletions {
1413 project_id,
1414 buffer_id,
1415 position: Some(language::proto::serialize_anchor(&anchor)),
1416 version: (&source_buffer.version()).into(),
1417 };
1418 cx.spawn_weak(|_, mut cx| async move {
1419 let response = rpc.request(message).await?;
1420
1421 source_buffer_handle
1422 .update(&mut cx, |buffer, _| {
1423 buffer.wait_for_version(response.version.into())
1424 })
1425 .await;
1426
1427 response
1428 .completions
1429 .into_iter()
1430 .map(|completion| {
1431 language::proto::deserialize_completion(completion, language.as_ref())
1432 })
1433 .collect()
1434 })
1435 } else {
1436 Task::ready(Ok(Default::default()))
1437 }
1438 }
1439
1440 pub fn apply_additional_edits_for_completion(
1441 &self,
1442 buffer_handle: ModelHandle<Buffer>,
1443 completion: Completion,
1444 push_to_history: bool,
1445 cx: &mut ModelContext<Self>,
1446 ) -> Task<Result<Option<Transaction>>> {
1447 let buffer = buffer_handle.read(cx);
1448 let buffer_id = buffer.remote_id();
1449
1450 if self.is_local() {
1451 let lang_server = if let Some(language_server) = buffer.language_server() {
1452 language_server.clone()
1453 } else {
1454 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1455 };
1456
1457 cx.spawn(|_, mut cx| async move {
1458 let resolved_completion = lang_server
1459 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1460 .await?;
1461 if let Some(edits) = resolved_completion.additional_text_edits {
1462 let edits = buffer_handle
1463 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1464 .await?;
1465 buffer_handle.update(&mut cx, |buffer, cx| {
1466 buffer.finalize_last_transaction();
1467 buffer.start_transaction();
1468 for (range, text) in edits {
1469 buffer.edit([range], text, cx);
1470 }
1471 let transaction = if buffer.end_transaction(cx).is_some() {
1472 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1473 if !push_to_history {
1474 buffer.forget_transaction(transaction.id);
1475 }
1476 Some(transaction)
1477 } else {
1478 None
1479 };
1480 Ok(transaction)
1481 })
1482 } else {
1483 Ok(None)
1484 }
1485 })
1486 } else if let Some(project_id) = self.remote_id() {
1487 let client = self.client.clone();
1488 cx.spawn(|_, mut cx| async move {
1489 let response = client
1490 .request(proto::ApplyCompletionAdditionalEdits {
1491 project_id,
1492 buffer_id,
1493 completion: Some(language::proto::serialize_completion(&completion)),
1494 })
1495 .await?;
1496
1497 if let Some(transaction) = response.transaction {
1498 let transaction = language::proto::deserialize_transaction(transaction)?;
1499 buffer_handle
1500 .update(&mut cx, |buffer, _| {
1501 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1502 })
1503 .await;
1504 if push_to_history {
1505 buffer_handle.update(&mut cx, |buffer, _| {
1506 buffer.push_transaction(transaction.clone(), Instant::now());
1507 });
1508 }
1509 Ok(Some(transaction))
1510 } else {
1511 Ok(None)
1512 }
1513 })
1514 } else {
1515 Task::ready(Err(anyhow!("project does not have a remote id")))
1516 }
1517 }
1518
1519 pub fn code_actions<T: ToOffset>(
1520 &self,
1521 buffer_handle: &ModelHandle<Buffer>,
1522 range: Range<T>,
1523 cx: &mut ModelContext<Self>,
1524 ) -> Task<Result<Vec<CodeAction>>> {
1525 let buffer_handle = buffer_handle.clone();
1526 let buffer = buffer_handle.read(cx);
1527 let buffer_id = buffer.remote_id();
1528 let worktree;
1529 let buffer_abs_path;
1530 if let Some(file) = File::from_dyn(buffer.file()) {
1531 worktree = file.worktree.clone();
1532 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1533 } else {
1534 return Task::ready(Ok(Default::default()));
1535 };
1536 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1537
1538 if worktree.read(cx).as_local().is_some() {
1539 let buffer_abs_path = buffer_abs_path.unwrap();
1540 let lang_name;
1541 let lang_server;
1542 if let Some(lang) = buffer.language() {
1543 lang_name = lang.name().to_string();
1544 if let Some(server) = self
1545 .language_servers
1546 .get(&(worktree.read(cx).id(), lang_name.clone()))
1547 {
1548 lang_server = server.clone();
1549 } else {
1550 return Task::ready(Ok(Default::default()));
1551 };
1552 } else {
1553 return Task::ready(Ok(Default::default()));
1554 }
1555
1556 let lsp_range = lsp::Range::new(
1557 range.start.to_point_utf16(buffer).to_lsp_position(),
1558 range.end.to_point_utf16(buffer).to_lsp_position(),
1559 );
1560 cx.foreground().spawn(async move {
1561 Ok(lang_server
1562 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1563 text_document: lsp::TextDocumentIdentifier::new(
1564 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1565 ),
1566 range: lsp_range,
1567 work_done_progress_params: Default::default(),
1568 partial_result_params: Default::default(),
1569 context: lsp::CodeActionContext {
1570 diagnostics: Default::default(),
1571 only: Some(vec![
1572 lsp::CodeActionKind::QUICKFIX,
1573 lsp::CodeActionKind::REFACTOR,
1574 lsp::CodeActionKind::REFACTOR_EXTRACT,
1575 ]),
1576 },
1577 })
1578 .await?
1579 .unwrap_or_default()
1580 .into_iter()
1581 .filter_map(|entry| {
1582 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1583 Some(CodeAction {
1584 range: range.clone(),
1585 lsp_action,
1586 })
1587 } else {
1588 None
1589 }
1590 })
1591 .collect())
1592 })
1593 } else if let Some(project_id) = self.remote_id() {
1594 let rpc = self.client.clone();
1595 cx.spawn_weak(|_, mut cx| async move {
1596 let response = rpc
1597 .request(proto::GetCodeActions {
1598 project_id,
1599 buffer_id,
1600 start: Some(language::proto::serialize_anchor(&range.start)),
1601 end: Some(language::proto::serialize_anchor(&range.end)),
1602 })
1603 .await?;
1604
1605 buffer_handle
1606 .update(&mut cx, |buffer, _| {
1607 buffer.wait_for_version(response.version.into())
1608 })
1609 .await;
1610
1611 response
1612 .actions
1613 .into_iter()
1614 .map(language::proto::deserialize_code_action)
1615 .collect()
1616 })
1617 } else {
1618 Task::ready(Ok(Default::default()))
1619 }
1620 }
1621
1622 pub fn apply_code_action(
1623 &self,
1624 buffer_handle: ModelHandle<Buffer>,
1625 mut action: CodeAction,
1626 push_to_history: bool,
1627 cx: &mut ModelContext<Self>,
1628 ) -> Task<Result<ProjectTransaction>> {
1629 if self.is_local() {
1630 let buffer = buffer_handle.read(cx);
1631 let lang_name = if let Some(lang) = buffer.language() {
1632 lang.name().to_string()
1633 } else {
1634 return Task::ready(Ok(Default::default()));
1635 };
1636 let lang_server = if let Some(language_server) = buffer.language_server() {
1637 language_server.clone()
1638 } else {
1639 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1640 };
1641 let range = action.range.to_point_utf16(buffer);
1642
1643 cx.spawn(|this, mut cx| async move {
1644 if let Some(lsp_range) = action
1645 .lsp_action
1646 .data
1647 .as_mut()
1648 .and_then(|d| d.get_mut("codeActionParams"))
1649 .and_then(|d| d.get_mut("range"))
1650 {
1651 *lsp_range = serde_json::to_value(&lsp::Range::new(
1652 range.start.to_lsp_position(),
1653 range.end.to_lsp_position(),
1654 ))
1655 .unwrap();
1656 action.lsp_action = lang_server
1657 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1658 .await?;
1659 } else {
1660 let actions = this
1661 .update(&mut cx, |this, cx| {
1662 this.code_actions(&buffer_handle, action.range, cx)
1663 })
1664 .await?;
1665 action.lsp_action = actions
1666 .into_iter()
1667 .find(|a| a.lsp_action.title == action.lsp_action.title)
1668 .ok_or_else(|| anyhow!("code action is outdated"))?
1669 .lsp_action;
1670 }
1671
1672 if let Some(edit) = action.lsp_action.edit {
1673 Self::deserialize_workspace_edit(
1674 this,
1675 edit,
1676 push_to_history,
1677 lang_name,
1678 lang_server,
1679 &mut cx,
1680 )
1681 .await
1682 } else {
1683 Ok(ProjectTransaction::default())
1684 }
1685 })
1686 } else if let Some(project_id) = self.remote_id() {
1687 let client = self.client.clone();
1688 let request = proto::ApplyCodeAction {
1689 project_id,
1690 buffer_id: buffer_handle.read(cx).remote_id(),
1691 action: Some(language::proto::serialize_code_action(&action)),
1692 };
1693 cx.spawn(|this, mut cx| async move {
1694 let response = client
1695 .request(request)
1696 .await?
1697 .transaction
1698 .ok_or_else(|| anyhow!("missing transaction"))?;
1699 this.update(&mut cx, |this, cx| {
1700 this.deserialize_project_transaction(response, push_to_history, cx)
1701 })
1702 .await
1703 })
1704 } else {
1705 Task::ready(Err(anyhow!("project does not have a remote id")))
1706 }
1707 }
1708
1709 async fn deserialize_workspace_edit(
1710 this: ModelHandle<Self>,
1711 edit: lsp::WorkspaceEdit,
1712 push_to_history: bool,
1713 language_name: String,
1714 language_server: Arc<LanguageServer>,
1715 cx: &mut AsyncAppContext,
1716 ) -> Result<ProjectTransaction> {
1717 let fs = this.read_with(cx, |this, _| this.fs.clone());
1718 let mut operations = Vec::new();
1719 if let Some(document_changes) = edit.document_changes {
1720 match document_changes {
1721 lsp::DocumentChanges::Edits(edits) => {
1722 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1723 }
1724 lsp::DocumentChanges::Operations(ops) => operations = ops,
1725 }
1726 } else if let Some(changes) = edit.changes {
1727 operations.extend(changes.into_iter().map(|(uri, edits)| {
1728 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1729 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1730 uri,
1731 version: None,
1732 },
1733 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1734 })
1735 }));
1736 }
1737
1738 let mut project_transaction = ProjectTransaction::default();
1739 for operation in operations {
1740 match operation {
1741 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1742 let abs_path = op
1743 .uri
1744 .to_file_path()
1745 .map_err(|_| anyhow!("can't convert URI to path"))?;
1746
1747 if let Some(parent_path) = abs_path.parent() {
1748 fs.create_dir(parent_path).await?;
1749 }
1750 if abs_path.ends_with("/") {
1751 fs.create_dir(&abs_path).await?;
1752 } else {
1753 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1754 .await?;
1755 }
1756 }
1757 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1758 let source_abs_path = op
1759 .old_uri
1760 .to_file_path()
1761 .map_err(|_| anyhow!("can't convert URI to path"))?;
1762 let target_abs_path = op
1763 .new_uri
1764 .to_file_path()
1765 .map_err(|_| anyhow!("can't convert URI to path"))?;
1766 fs.rename(
1767 &source_abs_path,
1768 &target_abs_path,
1769 op.options.map(Into::into).unwrap_or_default(),
1770 )
1771 .await?;
1772 }
1773 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1774 let abs_path = op
1775 .uri
1776 .to_file_path()
1777 .map_err(|_| anyhow!("can't convert URI to path"))?;
1778 let options = op.options.map(Into::into).unwrap_or_default();
1779 if abs_path.ends_with("/") {
1780 fs.remove_dir(&abs_path, options).await?;
1781 } else {
1782 fs.remove_file(&abs_path, options).await?;
1783 }
1784 }
1785 lsp::DocumentChangeOperation::Edit(op) => {
1786 let buffer_to_edit = this
1787 .update(cx, |this, cx| {
1788 this.open_local_buffer_from_lsp_path(
1789 op.text_document.uri,
1790 language_name.clone(),
1791 language_server.clone(),
1792 cx,
1793 )
1794 })
1795 .await?;
1796
1797 let edits = buffer_to_edit
1798 .update(cx, |buffer, cx| {
1799 let edits = op.edits.into_iter().map(|edit| match edit {
1800 lsp::OneOf::Left(edit) => edit,
1801 lsp::OneOf::Right(edit) => edit.text_edit,
1802 });
1803 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1804 })
1805 .await?;
1806
1807 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1808 buffer.finalize_last_transaction();
1809 buffer.start_transaction();
1810 for (range, text) in edits {
1811 buffer.edit([range], text, cx);
1812 }
1813 let transaction = if buffer.end_transaction(cx).is_some() {
1814 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1815 if !push_to_history {
1816 buffer.forget_transaction(transaction.id);
1817 }
1818 Some(transaction)
1819 } else {
1820 None
1821 };
1822
1823 transaction
1824 });
1825 if let Some(transaction) = transaction {
1826 project_transaction.0.insert(buffer_to_edit, transaction);
1827 }
1828 }
1829 }
1830 }
1831
1832 Ok(project_transaction)
1833 }
1834
1835 pub fn prepare_rename<T: ToPointUtf16>(
1836 &self,
1837 buffer: ModelHandle<Buffer>,
1838 position: T,
1839 cx: &mut ModelContext<Self>,
1840 ) -> Task<Result<Option<Range<Anchor>>>> {
1841 let position = position.to_point_utf16(buffer.read(cx));
1842 self.request_lsp(buffer, PrepareRename { position }, cx)
1843 }
1844
1845 pub fn perform_rename<T: ToPointUtf16>(
1846 &self,
1847 buffer: ModelHandle<Buffer>,
1848 position: T,
1849 new_name: String,
1850 push_to_history: bool,
1851 cx: &mut ModelContext<Self>,
1852 ) -> Task<Result<ProjectTransaction>> {
1853 let position = position.to_point_utf16(buffer.read(cx));
1854 self.request_lsp(
1855 buffer,
1856 PerformRename {
1857 position,
1858 new_name,
1859 push_to_history,
1860 },
1861 cx,
1862 )
1863 }
1864
1865 fn request_lsp<R: LspCommand>(
1866 &self,
1867 buffer_handle: ModelHandle<Buffer>,
1868 request: R,
1869 cx: &mut ModelContext<Self>,
1870 ) -> Task<Result<R::Response>>
1871 where
1872 <R::LspRequest as lsp::request::Request>::Result: Send,
1873 {
1874 let buffer = buffer_handle.read(cx);
1875 if self.is_local() {
1876 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1877 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1878 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1879 return cx.spawn(|this, cx| async move {
1880 let response = language_server
1881 .request::<R::LspRequest>(lsp_params)
1882 .await
1883 .context("lsp request failed")?;
1884 request
1885 .response_from_lsp(response, this, buffer_handle, cx)
1886 .await
1887 });
1888 }
1889 } else if let Some(project_id) = self.remote_id() {
1890 let rpc = self.client.clone();
1891 let message = request.to_proto(project_id, buffer);
1892 return cx.spawn(|this, cx| async move {
1893 let response = rpc.request(message).await?;
1894 request
1895 .response_from_proto(response, this, buffer_handle, cx)
1896 .await
1897 });
1898 }
1899 Task::ready(Ok(Default::default()))
1900 }
1901
1902 pub fn find_or_create_local_worktree(
1903 &self,
1904 abs_path: impl AsRef<Path>,
1905 weak: bool,
1906 cx: &mut ModelContext<Self>,
1907 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1908 let abs_path = abs_path.as_ref();
1909 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1910 Task::ready(Ok((tree.clone(), relative_path.into())))
1911 } else {
1912 let worktree = self.create_local_worktree(abs_path, weak, cx);
1913 cx.foreground()
1914 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1915 }
1916 }
1917
1918 fn find_local_worktree(
1919 &self,
1920 abs_path: &Path,
1921 cx: &AppContext,
1922 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1923 for tree in self.worktrees(cx) {
1924 if let Some(relative_path) = tree
1925 .read(cx)
1926 .as_local()
1927 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1928 {
1929 return Some((tree.clone(), relative_path.into()));
1930 }
1931 }
1932 None
1933 }
1934
1935 pub fn is_shared(&self) -> bool {
1936 match &self.client_state {
1937 ProjectClientState::Local { is_shared, .. } => *is_shared,
1938 ProjectClientState::Remote { .. } => false,
1939 }
1940 }
1941
1942 fn create_local_worktree(
1943 &self,
1944 abs_path: impl AsRef<Path>,
1945 weak: bool,
1946 cx: &mut ModelContext<Self>,
1947 ) -> Task<Result<ModelHandle<Worktree>>> {
1948 let fs = self.fs.clone();
1949 let client = self.client.clone();
1950 let path = Arc::from(abs_path.as_ref());
1951 cx.spawn(|project, mut cx| async move {
1952 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1953
1954 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1955 project.add_worktree(&worktree, cx);
1956 (project.remote_id(), project.is_shared())
1957 });
1958
1959 if let Some(project_id) = remote_project_id {
1960 worktree
1961 .update(&mut cx, |worktree, cx| {
1962 worktree.as_local_mut().unwrap().register(project_id, cx)
1963 })
1964 .await?;
1965 if is_shared {
1966 worktree
1967 .update(&mut cx, |worktree, cx| {
1968 worktree.as_local_mut().unwrap().share(project_id, cx)
1969 })
1970 .await?;
1971 }
1972 }
1973
1974 Ok(worktree)
1975 })
1976 }
1977
1978 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1979 self.worktrees.retain(|worktree| {
1980 worktree
1981 .upgrade(cx)
1982 .map_or(false, |w| w.read(cx).id() != id)
1983 });
1984 cx.notify();
1985 }
1986
1987 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1988 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1989 if worktree.read(cx).is_local() {
1990 cx.subscribe(&worktree, |this, worktree, _, cx| {
1991 this.update_local_worktree_buffers(worktree, cx);
1992 })
1993 .detach();
1994 }
1995
1996 let push_weak_handle = {
1997 let worktree = worktree.read(cx);
1998 worktree.is_local() && worktree.is_weak()
1999 };
2000 if push_weak_handle {
2001 cx.observe_release(&worktree, |this, cx| {
2002 this.worktrees
2003 .retain(|worktree| worktree.upgrade(cx).is_some());
2004 cx.notify();
2005 })
2006 .detach();
2007 self.worktrees
2008 .push(WorktreeHandle::Weak(worktree.downgrade()));
2009 } else {
2010 self.worktrees
2011 .push(WorktreeHandle::Strong(worktree.clone()));
2012 }
2013 cx.notify();
2014 }
2015
2016 fn update_local_worktree_buffers(
2017 &mut self,
2018 worktree_handle: ModelHandle<Worktree>,
2019 cx: &mut ModelContext<Self>,
2020 ) {
2021 let snapshot = worktree_handle.read(cx).snapshot();
2022 let mut buffers_to_delete = Vec::new();
2023 for (buffer_id, buffer) in &self.open_buffers {
2024 if let Some(buffer) = buffer.upgrade(cx) {
2025 buffer.update(cx, |buffer, cx| {
2026 if let Some(old_file) = File::from_dyn(buffer.file()) {
2027 if old_file.worktree != worktree_handle {
2028 return;
2029 }
2030
2031 let new_file = if let Some(entry) = old_file
2032 .entry_id
2033 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2034 {
2035 File {
2036 is_local: true,
2037 entry_id: Some(entry.id),
2038 mtime: entry.mtime,
2039 path: entry.path.clone(),
2040 worktree: worktree_handle.clone(),
2041 }
2042 } else if let Some(entry) =
2043 snapshot.entry_for_path(old_file.path().as_ref())
2044 {
2045 File {
2046 is_local: true,
2047 entry_id: Some(entry.id),
2048 mtime: entry.mtime,
2049 path: entry.path.clone(),
2050 worktree: worktree_handle.clone(),
2051 }
2052 } else {
2053 File {
2054 is_local: true,
2055 entry_id: None,
2056 path: old_file.path().clone(),
2057 mtime: old_file.mtime(),
2058 worktree: worktree_handle.clone(),
2059 }
2060 };
2061
2062 if let Some(project_id) = self.remote_id() {
2063 self.client
2064 .send(proto::UpdateBufferFile {
2065 project_id,
2066 buffer_id: *buffer_id as u64,
2067 file: Some(new_file.to_proto()),
2068 })
2069 .log_err();
2070 }
2071 buffer.file_updated(Box::new(new_file), cx).detach();
2072 }
2073 });
2074 } else {
2075 buffers_to_delete.push(*buffer_id);
2076 }
2077 }
2078
2079 for buffer_id in buffers_to_delete {
2080 self.open_buffers.remove(&buffer_id);
2081 }
2082 }
2083
2084 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2085 let new_active_entry = entry.and_then(|project_path| {
2086 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2087 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2088 Some(ProjectEntry {
2089 worktree_id: project_path.worktree_id,
2090 entry_id: entry.id,
2091 })
2092 });
2093 if new_active_entry != self.active_entry {
2094 self.active_entry = new_active_entry;
2095 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2096 }
2097 }
2098
2099 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2100 self.language_servers_with_diagnostics_running > 0
2101 }
2102
2103 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2104 let mut summary = DiagnosticSummary::default();
2105 for (_, path_summary) in self.diagnostic_summaries(cx) {
2106 summary.error_count += path_summary.error_count;
2107 summary.warning_count += path_summary.warning_count;
2108 summary.info_count += path_summary.info_count;
2109 summary.hint_count += path_summary.hint_count;
2110 }
2111 summary
2112 }
2113
2114 pub fn diagnostic_summaries<'a>(
2115 &'a self,
2116 cx: &'a AppContext,
2117 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2118 self.worktrees(cx).flat_map(move |worktree| {
2119 let worktree = worktree.read(cx);
2120 let worktree_id = worktree.id();
2121 worktree
2122 .diagnostic_summaries()
2123 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2124 })
2125 }
2126
2127 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2128 self.language_servers_with_diagnostics_running += 1;
2129 if self.language_servers_with_diagnostics_running == 1 {
2130 cx.emit(Event::DiskBasedDiagnosticsStarted);
2131 }
2132 }
2133
2134 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2135 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2136 self.language_servers_with_diagnostics_running -= 1;
2137 if self.language_servers_with_diagnostics_running == 0 {
2138 cx.emit(Event::DiskBasedDiagnosticsFinished);
2139 }
2140 }
2141
2142 pub fn active_entry(&self) -> Option<ProjectEntry> {
2143 self.active_entry
2144 }
2145
2146 // RPC message handlers
2147
2148 async fn handle_unshare_project(
2149 this: ModelHandle<Self>,
2150 _: TypedEnvelope<proto::UnshareProject>,
2151 _: Arc<Client>,
2152 mut cx: AsyncAppContext,
2153 ) -> Result<()> {
2154 this.update(&mut cx, |this, cx| {
2155 if let ProjectClientState::Remote {
2156 sharing_has_stopped,
2157 ..
2158 } = &mut this.client_state
2159 {
2160 *sharing_has_stopped = true;
2161 this.collaborators.clear();
2162 cx.notify();
2163 } else {
2164 unreachable!()
2165 }
2166 });
2167
2168 Ok(())
2169 }
2170
2171 async fn handle_add_collaborator(
2172 this: ModelHandle<Self>,
2173 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2174 _: Arc<Client>,
2175 mut cx: AsyncAppContext,
2176 ) -> Result<()> {
2177 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2178 let collaborator = envelope
2179 .payload
2180 .collaborator
2181 .take()
2182 .ok_or_else(|| anyhow!("empty collaborator"))?;
2183
2184 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2185 this.update(&mut cx, |this, cx| {
2186 this.collaborators
2187 .insert(collaborator.peer_id, collaborator);
2188 cx.notify();
2189 });
2190
2191 Ok(())
2192 }
2193
2194 async fn handle_remove_collaborator(
2195 this: ModelHandle<Self>,
2196 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2197 _: Arc<Client>,
2198 mut cx: AsyncAppContext,
2199 ) -> Result<()> {
2200 this.update(&mut cx, |this, cx| {
2201 let peer_id = PeerId(envelope.payload.peer_id);
2202 let replica_id = this
2203 .collaborators
2204 .remove(&peer_id)
2205 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2206 .replica_id;
2207 this.shared_buffers.remove(&peer_id);
2208 for (_, buffer) in &this.open_buffers {
2209 if let Some(buffer) = buffer.upgrade(cx) {
2210 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2211 }
2212 }
2213 cx.notify();
2214 Ok(())
2215 })
2216 }
2217
2218 async fn handle_share_worktree(
2219 this: ModelHandle<Self>,
2220 envelope: TypedEnvelope<proto::ShareWorktree>,
2221 client: Arc<Client>,
2222 mut cx: AsyncAppContext,
2223 ) -> Result<()> {
2224 this.update(&mut cx, |this, cx| {
2225 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2226 let replica_id = this.replica_id();
2227 let worktree = envelope
2228 .payload
2229 .worktree
2230 .ok_or_else(|| anyhow!("invalid worktree"))?;
2231 let (worktree, load_task) =
2232 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2233 this.add_worktree(&worktree, cx);
2234 load_task.detach();
2235 Ok(())
2236 })
2237 }
2238
2239 async fn handle_unregister_worktree(
2240 this: ModelHandle<Self>,
2241 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2242 _: Arc<Client>,
2243 mut cx: AsyncAppContext,
2244 ) -> Result<()> {
2245 this.update(&mut cx, |this, cx| {
2246 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2247 this.remove_worktree(worktree_id, cx);
2248 Ok(())
2249 })
2250 }
2251
2252 async fn handle_update_worktree(
2253 this: ModelHandle<Self>,
2254 envelope: TypedEnvelope<proto::UpdateWorktree>,
2255 _: Arc<Client>,
2256 mut cx: AsyncAppContext,
2257 ) -> Result<()> {
2258 this.update(&mut cx, |this, cx| {
2259 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2260 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2261 worktree.update(cx, |worktree, _| {
2262 let worktree = worktree.as_remote_mut().unwrap();
2263 worktree.update_from_remote(envelope)
2264 })?;
2265 }
2266 Ok(())
2267 })
2268 }
2269
2270 async fn handle_update_diagnostic_summary(
2271 this: ModelHandle<Self>,
2272 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2273 _: Arc<Client>,
2274 mut cx: AsyncAppContext,
2275 ) -> Result<()> {
2276 this.update(&mut cx, |this, cx| {
2277 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2278 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2279 if let Some(summary) = envelope.payload.summary {
2280 let project_path = ProjectPath {
2281 worktree_id,
2282 path: Path::new(&summary.path).into(),
2283 };
2284 worktree.update(cx, |worktree, _| {
2285 worktree
2286 .as_remote_mut()
2287 .unwrap()
2288 .update_diagnostic_summary(project_path.path.clone(), &summary);
2289 });
2290 cx.emit(Event::DiagnosticsUpdated(project_path));
2291 }
2292 }
2293 Ok(())
2294 })
2295 }
2296
2297 async fn handle_disk_based_diagnostics_updating(
2298 this: ModelHandle<Self>,
2299 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2300 _: Arc<Client>,
2301 mut cx: AsyncAppContext,
2302 ) -> Result<()> {
2303 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2304 Ok(())
2305 }
2306
2307 async fn handle_disk_based_diagnostics_updated(
2308 this: ModelHandle<Self>,
2309 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2310 _: Arc<Client>,
2311 mut cx: AsyncAppContext,
2312 ) -> Result<()> {
2313 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2314 Ok(())
2315 }
2316
2317 async fn handle_update_buffer(
2318 this: ModelHandle<Self>,
2319 envelope: TypedEnvelope<proto::UpdateBuffer>,
2320 _: Arc<Client>,
2321 mut cx: AsyncAppContext,
2322 ) -> Result<()> {
2323 this.update(&mut cx, |this, cx| {
2324 let payload = envelope.payload.clone();
2325 let buffer_id = payload.buffer_id;
2326 let ops = payload
2327 .operations
2328 .into_iter()
2329 .map(|op| language::proto::deserialize_operation(op))
2330 .collect::<Result<Vec<_>, _>>()?;
2331 let is_remote = this.is_remote();
2332 match this.open_buffers.entry(buffer_id) {
2333 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2334 OpenBuffer::Loaded(buffer) => {
2335 if let Some(buffer) = buffer.upgrade(cx) {
2336 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2337 }
2338 }
2339 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2340 },
2341 hash_map::Entry::Vacant(e) => {
2342 if is_remote && this.loading_buffers.len() > 0 {
2343 e.insert(OpenBuffer::Loading(ops));
2344 }
2345 }
2346 }
2347 Ok(())
2348 })
2349 }
2350
2351 async fn handle_update_buffer_file(
2352 this: ModelHandle<Self>,
2353 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2354 _: Arc<Client>,
2355 mut cx: AsyncAppContext,
2356 ) -> Result<()> {
2357 this.update(&mut cx, |this, cx| {
2358 let payload = envelope.payload.clone();
2359 let buffer_id = payload.buffer_id;
2360 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2361 let worktree = this
2362 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2363 .ok_or_else(|| anyhow!("no such worktree"))?;
2364 let file = File::from_proto(file, worktree.clone(), cx)?;
2365 let buffer = this
2366 .open_buffers
2367 .get_mut(&buffer_id)
2368 .and_then(|b| b.upgrade(cx))
2369 .ok_or_else(|| anyhow!("no such buffer"))?;
2370 buffer.update(cx, |buffer, cx| {
2371 buffer.file_updated(Box::new(file), cx).detach();
2372 });
2373 Ok(())
2374 })
2375 }
2376
2377 async fn handle_save_buffer(
2378 this: ModelHandle<Self>,
2379 envelope: TypedEnvelope<proto::SaveBuffer>,
2380 _: Arc<Client>,
2381 mut cx: AsyncAppContext,
2382 ) -> Result<proto::BufferSaved> {
2383 let buffer_id = envelope.payload.buffer_id;
2384 let sender_id = envelope.original_sender_id()?;
2385 let requested_version = envelope.payload.version.try_into()?;
2386
2387 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2388 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2389 let buffer = this
2390 .shared_buffers
2391 .get(&sender_id)
2392 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2393 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2394 Ok::<_, anyhow::Error>((project_id, buffer))
2395 })?;
2396
2397 if !buffer
2398 .read_with(&cx, |buffer, _| buffer.version())
2399 .observed_all(&requested_version)
2400 {
2401 Err(anyhow!("save request depends on unreceived edits"))?;
2402 }
2403
2404 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2405 Ok(proto::BufferSaved {
2406 project_id,
2407 buffer_id,
2408 version: (&saved_version).into(),
2409 mtime: Some(mtime.into()),
2410 })
2411 }
2412
2413 async fn handle_format_buffers(
2414 this: ModelHandle<Self>,
2415 envelope: TypedEnvelope<proto::FormatBuffers>,
2416 _: Arc<Client>,
2417 mut cx: AsyncAppContext,
2418 ) -> Result<proto::FormatBuffersResponse> {
2419 let sender_id = envelope.original_sender_id()?;
2420 let format = this.update(&mut cx, |this, cx| {
2421 let shared_buffers = this
2422 .shared_buffers
2423 .get(&sender_id)
2424 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2425 let mut buffers = HashSet::default();
2426 for buffer_id in &envelope.payload.buffer_ids {
2427 buffers.insert(
2428 shared_buffers
2429 .get(buffer_id)
2430 .cloned()
2431 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2432 );
2433 }
2434 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2435 })?;
2436
2437 let project_transaction = format.await?;
2438 let project_transaction = this.update(&mut cx, |this, cx| {
2439 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2440 });
2441 Ok(proto::FormatBuffersResponse {
2442 transaction: Some(project_transaction),
2443 })
2444 }
2445
2446 async fn handle_get_completions(
2447 this: ModelHandle<Self>,
2448 envelope: TypedEnvelope<proto::GetCompletions>,
2449 _: Arc<Client>,
2450 mut cx: AsyncAppContext,
2451 ) -> Result<proto::GetCompletionsResponse> {
2452 let sender_id = envelope.original_sender_id()?;
2453 let position = envelope
2454 .payload
2455 .position
2456 .and_then(language::proto::deserialize_anchor)
2457 .ok_or_else(|| anyhow!("invalid position"))?;
2458 let version = clock::Global::from(envelope.payload.version);
2459 let buffer = this.read_with(&cx, |this, _| {
2460 this.shared_buffers
2461 .get(&sender_id)
2462 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2463 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2464 })?;
2465 if !buffer
2466 .read_with(&cx, |buffer, _| buffer.version())
2467 .observed_all(&version)
2468 {
2469 Err(anyhow!("completion request depends on unreceived edits"))?;
2470 }
2471 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2472 let completions = this
2473 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2474 .await?;
2475
2476 Ok(proto::GetCompletionsResponse {
2477 completions: completions
2478 .iter()
2479 .map(language::proto::serialize_completion)
2480 .collect(),
2481 version: (&version).into(),
2482 })
2483 }
2484
2485 async fn handle_apply_additional_edits_for_completion(
2486 this: ModelHandle<Self>,
2487 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2488 _: Arc<Client>,
2489 mut cx: AsyncAppContext,
2490 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2491 let sender_id = envelope.original_sender_id()?;
2492 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2493 let buffer = this
2494 .shared_buffers
2495 .get(&sender_id)
2496 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2497 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2498 let language = buffer.read(cx).language();
2499 let completion = language::proto::deserialize_completion(
2500 envelope
2501 .payload
2502 .completion
2503 .ok_or_else(|| anyhow!("invalid completion"))?,
2504 language,
2505 )?;
2506 Ok::<_, anyhow::Error>(
2507 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2508 )
2509 })?;
2510
2511 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2512 transaction: apply_additional_edits
2513 .await?
2514 .as_ref()
2515 .map(language::proto::serialize_transaction),
2516 })
2517 }
2518
2519 async fn handle_get_code_actions(
2520 this: ModelHandle<Self>,
2521 envelope: TypedEnvelope<proto::GetCodeActions>,
2522 _: Arc<Client>,
2523 mut cx: AsyncAppContext,
2524 ) -> Result<proto::GetCodeActionsResponse> {
2525 let sender_id = envelope.original_sender_id()?;
2526 let start = envelope
2527 .payload
2528 .start
2529 .and_then(language::proto::deserialize_anchor)
2530 .ok_or_else(|| anyhow!("invalid start"))?;
2531 let end = envelope
2532 .payload
2533 .end
2534 .and_then(language::proto::deserialize_anchor)
2535 .ok_or_else(|| anyhow!("invalid end"))?;
2536 let buffer = this.update(&mut cx, |this, _| {
2537 this.shared_buffers
2538 .get(&sender_id)
2539 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2540 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2541 })?;
2542 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2543 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2544 Err(anyhow!("code action request references unreceived edits"))?;
2545 }
2546 let code_actions = this.update(&mut cx, |this, cx| {
2547 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2548 })?;
2549
2550 Ok(proto::GetCodeActionsResponse {
2551 actions: code_actions
2552 .await?
2553 .iter()
2554 .map(language::proto::serialize_code_action)
2555 .collect(),
2556 version: (&version).into(),
2557 })
2558 }
2559
2560 async fn handle_apply_code_action(
2561 this: ModelHandle<Self>,
2562 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2563 _: Arc<Client>,
2564 mut cx: AsyncAppContext,
2565 ) -> Result<proto::ApplyCodeActionResponse> {
2566 let sender_id = envelope.original_sender_id()?;
2567 let action = language::proto::deserialize_code_action(
2568 envelope
2569 .payload
2570 .action
2571 .ok_or_else(|| anyhow!("invalid action"))?,
2572 )?;
2573 let apply_code_action = this.update(&mut cx, |this, cx| {
2574 let buffer = this
2575 .shared_buffers
2576 .get(&sender_id)
2577 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2578 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2579 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2580 })?;
2581
2582 let project_transaction = apply_code_action.await?;
2583 let project_transaction = this.update(&mut cx, |this, cx| {
2584 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2585 });
2586 Ok(proto::ApplyCodeActionResponse {
2587 transaction: Some(project_transaction),
2588 })
2589 }
2590
2591 async fn handle_lsp_command<T: LspCommand>(
2592 this: ModelHandle<Self>,
2593 envelope: TypedEnvelope<T::ProtoRequest>,
2594 _: Arc<Client>,
2595 mut cx: AsyncAppContext,
2596 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2597 where
2598 <T::LspRequest as lsp::request::Request>::Result: Send,
2599 {
2600 let sender_id = envelope.original_sender_id()?;
2601 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2602 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2603 let buffer_handle = this
2604 .shared_buffers
2605 .get(&sender_id)
2606 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2607 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2608 let buffer = buffer_handle.read(cx);
2609 let buffer_version = buffer.version();
2610 let request = T::from_proto(envelope.payload, this, buffer)?;
2611 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2612 })?;
2613 let response = request.await?;
2614 this.update(&mut cx, |this, cx| {
2615 Ok(T::response_to_proto(
2616 response,
2617 this,
2618 sender_id,
2619 &buffer_version,
2620 cx,
2621 ))
2622 })
2623 }
2624
2625 async fn handle_get_project_symbols(
2626 this: ModelHandle<Self>,
2627 envelope: TypedEnvelope<proto::GetProjectSymbols>,
2628 _: Arc<Client>,
2629 mut cx: AsyncAppContext,
2630 ) -> Result<proto::GetProjectSymbolsResponse> {
2631 let symbols = this
2632 .update(&mut cx, |this, cx| {
2633 this.symbols(&envelope.payload.query, cx)
2634 })
2635 .await?;
2636
2637 let mut languages = Vec::new();
2638 let mut symbol_counts_per_language = Vec::new();
2639 let mut serialized_symbols = Vec::new();
2640 for (language_name, language_symbols) in symbols {
2641 languages.push(language_name);
2642 symbol_counts_per_language.push(language_symbols.len() as u64);
2643 serialized_symbols.extend(language_symbols.into_iter().map(|symbol| proto::Symbol {
2644 lsp_symbol: serde_json::to_vec(&symbol.lsp_symbol).unwrap(),
2645 }));
2646 }
2647
2648 Ok(proto::GetProjectSymbolsResponse {
2649 languages,
2650 symbol_counts_per_language,
2651 symbols: serialized_symbols,
2652 })
2653 }
2654
2655 async fn handle_open_buffer(
2656 this: ModelHandle<Self>,
2657 envelope: TypedEnvelope<proto::OpenBuffer>,
2658 _: Arc<Client>,
2659 mut cx: AsyncAppContext,
2660 ) -> Result<proto::OpenBufferResponse> {
2661 let peer_id = envelope.original_sender_id()?;
2662 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2663 let open_buffer = this.update(&mut cx, |this, cx| {
2664 this.open_buffer(
2665 ProjectPath {
2666 worktree_id,
2667 path: PathBuf::from(envelope.payload.path).into(),
2668 },
2669 cx,
2670 )
2671 });
2672
2673 let buffer = open_buffer.await?;
2674 this.update(&mut cx, |this, cx| {
2675 Ok(proto::OpenBufferResponse {
2676 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2677 })
2678 })
2679 }
2680
2681 fn serialize_project_transaction_for_peer(
2682 &mut self,
2683 project_transaction: ProjectTransaction,
2684 peer_id: PeerId,
2685 cx: &AppContext,
2686 ) -> proto::ProjectTransaction {
2687 let mut serialized_transaction = proto::ProjectTransaction {
2688 buffers: Default::default(),
2689 transactions: Default::default(),
2690 };
2691 for (buffer, transaction) in project_transaction.0 {
2692 serialized_transaction
2693 .buffers
2694 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2695 serialized_transaction
2696 .transactions
2697 .push(language::proto::serialize_transaction(&transaction));
2698 }
2699 serialized_transaction
2700 }
2701
2702 fn deserialize_project_transaction(
2703 &mut self,
2704 message: proto::ProjectTransaction,
2705 push_to_history: bool,
2706 cx: &mut ModelContext<Self>,
2707 ) -> Task<Result<ProjectTransaction>> {
2708 cx.spawn(|this, mut cx| async move {
2709 let mut project_transaction = ProjectTransaction::default();
2710 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2711 let buffer = this
2712 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2713 .await?;
2714 let transaction = language::proto::deserialize_transaction(transaction)?;
2715 project_transaction.0.insert(buffer, transaction);
2716 }
2717 for (buffer, transaction) in &project_transaction.0 {
2718 buffer
2719 .update(&mut cx, |buffer, _| {
2720 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2721 })
2722 .await;
2723
2724 if push_to_history {
2725 buffer.update(&mut cx, |buffer, _| {
2726 buffer.push_transaction(transaction.clone(), Instant::now());
2727 });
2728 }
2729 }
2730
2731 Ok(project_transaction)
2732 })
2733 }
2734
2735 fn serialize_buffer_for_peer(
2736 &mut self,
2737 buffer: &ModelHandle<Buffer>,
2738 peer_id: PeerId,
2739 cx: &AppContext,
2740 ) -> proto::Buffer {
2741 let buffer_id = buffer.read(cx).remote_id();
2742 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2743 match shared_buffers.entry(buffer_id) {
2744 hash_map::Entry::Occupied(_) => proto::Buffer {
2745 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2746 },
2747 hash_map::Entry::Vacant(entry) => {
2748 entry.insert(buffer.clone());
2749 proto::Buffer {
2750 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2751 }
2752 }
2753 }
2754 }
2755
2756 fn deserialize_buffer(
2757 &mut self,
2758 buffer: proto::Buffer,
2759 cx: &mut ModelContext<Self>,
2760 ) -> Task<Result<ModelHandle<Buffer>>> {
2761 let replica_id = self.replica_id();
2762
2763 let mut opened_buffer_tx = self.opened_buffer.clone();
2764 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2765 cx.spawn(|this, mut cx| async move {
2766 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2767 proto::buffer::Variant::Id(id) => {
2768 let buffer = loop {
2769 let buffer = this.read_with(&cx, |this, cx| {
2770 this.open_buffers
2771 .get(&id)
2772 .and_then(|buffer| buffer.upgrade(cx))
2773 });
2774 if let Some(buffer) = buffer {
2775 break buffer;
2776 }
2777 opened_buffer_rx
2778 .recv()
2779 .await
2780 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2781 };
2782 Ok(buffer)
2783 }
2784 proto::buffer::Variant::State(mut buffer) => {
2785 let mut buffer_worktree = None;
2786 let mut buffer_file = None;
2787 if let Some(file) = buffer.file.take() {
2788 this.read_with(&cx, |this, cx| {
2789 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2790 let worktree =
2791 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2792 anyhow!("no worktree found for id {}", file.worktree_id)
2793 })?;
2794 buffer_file =
2795 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2796 as Box<dyn language::File>);
2797 buffer_worktree = Some(worktree);
2798 Ok::<_, anyhow::Error>(())
2799 })?;
2800 }
2801
2802 let buffer = cx.add_model(|cx| {
2803 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2804 });
2805 this.update(&mut cx, |this, cx| {
2806 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2807 })?;
2808
2809 let _ = opened_buffer_tx.send(()).await;
2810 Ok(buffer)
2811 }
2812 }
2813 })
2814 }
2815
2816 async fn handle_close_buffer(
2817 this: ModelHandle<Self>,
2818 envelope: TypedEnvelope<proto::CloseBuffer>,
2819 _: Arc<Client>,
2820 mut cx: AsyncAppContext,
2821 ) -> Result<()> {
2822 this.update(&mut cx, |this, cx| {
2823 if let Some(shared_buffers) =
2824 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2825 {
2826 shared_buffers.remove(&envelope.payload.buffer_id);
2827 cx.notify();
2828 }
2829 Ok(())
2830 })
2831 }
2832
2833 async fn handle_buffer_saved(
2834 this: ModelHandle<Self>,
2835 envelope: TypedEnvelope<proto::BufferSaved>,
2836 _: Arc<Client>,
2837 mut cx: AsyncAppContext,
2838 ) -> Result<()> {
2839 let version = envelope.payload.version.try_into()?;
2840 let mtime = envelope
2841 .payload
2842 .mtime
2843 .ok_or_else(|| anyhow!("missing mtime"))?
2844 .into();
2845
2846 this.update(&mut cx, |this, cx| {
2847 let buffer = this
2848 .open_buffers
2849 .get(&envelope.payload.buffer_id)
2850 .and_then(|buffer| buffer.upgrade(cx));
2851 if let Some(buffer) = buffer {
2852 buffer.update(cx, |buffer, cx| {
2853 buffer.did_save(version, mtime, None, cx);
2854 });
2855 }
2856 Ok(())
2857 })
2858 }
2859
2860 async fn handle_buffer_reloaded(
2861 this: ModelHandle<Self>,
2862 envelope: TypedEnvelope<proto::BufferReloaded>,
2863 _: Arc<Client>,
2864 mut cx: AsyncAppContext,
2865 ) -> Result<()> {
2866 let payload = envelope.payload.clone();
2867 let version = payload.version.try_into()?;
2868 let mtime = payload
2869 .mtime
2870 .ok_or_else(|| anyhow!("missing mtime"))?
2871 .into();
2872 this.update(&mut cx, |this, cx| {
2873 let buffer = this
2874 .open_buffers
2875 .get(&payload.buffer_id)
2876 .and_then(|buffer| buffer.upgrade(cx));
2877 if let Some(buffer) = buffer {
2878 buffer.update(cx, |buffer, cx| {
2879 buffer.did_reload(version, mtime, cx);
2880 });
2881 }
2882 Ok(())
2883 })
2884 }
2885
2886 pub fn match_paths<'a>(
2887 &self,
2888 query: &'a str,
2889 include_ignored: bool,
2890 smart_case: bool,
2891 max_results: usize,
2892 cancel_flag: &'a AtomicBool,
2893 cx: &AppContext,
2894 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2895 let worktrees = self
2896 .worktrees(cx)
2897 .filter(|worktree| !worktree.read(cx).is_weak())
2898 .collect::<Vec<_>>();
2899 let include_root_name = worktrees.len() > 1;
2900 let candidate_sets = worktrees
2901 .into_iter()
2902 .map(|worktree| CandidateSet {
2903 snapshot: worktree.read(cx).snapshot(),
2904 include_ignored,
2905 include_root_name,
2906 })
2907 .collect::<Vec<_>>();
2908
2909 let background = cx.background().clone();
2910 async move {
2911 fuzzy::match_paths(
2912 candidate_sets.as_slice(),
2913 query,
2914 smart_case,
2915 max_results,
2916 cancel_flag,
2917 background,
2918 )
2919 .await
2920 }
2921 }
2922}
2923
2924impl WorktreeHandle {
2925 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2926 match self {
2927 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2928 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2929 }
2930 }
2931}
2932
2933impl OpenBuffer {
2934 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2935 match self {
2936 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2937 OpenBuffer::Loading(_) => None,
2938 }
2939 }
2940}
2941
2942struct CandidateSet {
2943 snapshot: Snapshot,
2944 include_ignored: bool,
2945 include_root_name: bool,
2946}
2947
2948impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2949 type Candidates = CandidateSetIter<'a>;
2950
2951 fn id(&self) -> usize {
2952 self.snapshot.id().to_usize()
2953 }
2954
2955 fn len(&self) -> usize {
2956 if self.include_ignored {
2957 self.snapshot.file_count()
2958 } else {
2959 self.snapshot.visible_file_count()
2960 }
2961 }
2962
2963 fn prefix(&self) -> Arc<str> {
2964 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2965 self.snapshot.root_name().into()
2966 } else if self.include_root_name {
2967 format!("{}/", self.snapshot.root_name()).into()
2968 } else {
2969 "".into()
2970 }
2971 }
2972
2973 fn candidates(&'a self, start: usize) -> Self::Candidates {
2974 CandidateSetIter {
2975 traversal: self.snapshot.files(self.include_ignored, start),
2976 }
2977 }
2978}
2979
2980struct CandidateSetIter<'a> {
2981 traversal: Traversal<'a>,
2982}
2983
2984impl<'a> Iterator for CandidateSetIter<'a> {
2985 type Item = PathMatchCandidate<'a>;
2986
2987 fn next(&mut self) -> Option<Self::Item> {
2988 self.traversal.next().map(|entry| {
2989 if let EntryKind::File(char_bag) = entry.kind {
2990 PathMatchCandidate {
2991 path: &entry.path,
2992 char_bag,
2993 }
2994 } else {
2995 unreachable!()
2996 }
2997 })
2998 }
2999}
3000
3001impl Entity for Project {
3002 type Event = Event;
3003
3004 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3005 match &self.client_state {
3006 ProjectClientState::Local { remote_id_rx, .. } => {
3007 if let Some(project_id) = *remote_id_rx.borrow() {
3008 self.client
3009 .send(proto::UnregisterProject { project_id })
3010 .log_err();
3011 }
3012 }
3013 ProjectClientState::Remote { remote_id, .. } => {
3014 self.client
3015 .send(proto::LeaveProject {
3016 project_id: *remote_id,
3017 })
3018 .log_err();
3019 }
3020 }
3021 }
3022
3023 fn app_will_quit(
3024 &mut self,
3025 _: &mut MutableAppContext,
3026 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3027 let shutdown_futures = self
3028 .language_servers
3029 .drain()
3030 .filter_map(|(_, server)| server.shutdown())
3031 .collect::<Vec<_>>();
3032 Some(
3033 async move {
3034 futures::future::join_all(shutdown_futures).await;
3035 }
3036 .boxed(),
3037 )
3038 }
3039}
3040
3041impl Collaborator {
3042 fn from_proto(
3043 message: proto::Collaborator,
3044 user_store: &ModelHandle<UserStore>,
3045 cx: &mut AsyncAppContext,
3046 ) -> impl Future<Output = Result<Self>> {
3047 let user = user_store.update(cx, |user_store, cx| {
3048 user_store.fetch_user(message.user_id, cx)
3049 });
3050
3051 async move {
3052 Ok(Self {
3053 peer_id: PeerId(message.peer_id),
3054 user: user.await?,
3055 replica_id: message.replica_id as ReplicaId,
3056 })
3057 }
3058 }
3059}
3060
3061impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3062 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3063 Self {
3064 worktree_id,
3065 path: path.as_ref().into(),
3066 }
3067 }
3068}
3069
3070impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3071 fn from(options: lsp::CreateFileOptions) -> Self {
3072 Self {
3073 overwrite: options.overwrite.unwrap_or(false),
3074 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3075 }
3076 }
3077}
3078
3079impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3080 fn from(options: lsp::RenameFileOptions) -> Self {
3081 Self {
3082 overwrite: options.overwrite.unwrap_or(false),
3083 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3084 }
3085 }
3086}
3087
3088impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3089 fn from(options: lsp::DeleteFileOptions) -> Self {
3090 Self {
3091 recursive: options.recursive.unwrap_or(false),
3092 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3093 }
3094 }
3095}
3096
3097#[cfg(test)]
3098mod tests {
3099 use super::{Event, *};
3100 use fs::RealFs;
3101 use futures::StreamExt;
3102 use gpui::test::subscribe;
3103 use language::{
3104 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3105 };
3106 use lsp::Url;
3107 use serde_json::json;
3108 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3109 use unindent::Unindent as _;
3110 use util::test::temp_tree;
3111 use worktree::WorktreeHandle as _;
3112
3113 #[gpui::test]
3114 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3115 let dir = temp_tree(json!({
3116 "root": {
3117 "apple": "",
3118 "banana": {
3119 "carrot": {
3120 "date": "",
3121 "endive": "",
3122 }
3123 },
3124 "fennel": {
3125 "grape": "",
3126 }
3127 }
3128 }));
3129
3130 let root_link_path = dir.path().join("root_link");
3131 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3132 unix::fs::symlink(
3133 &dir.path().join("root/fennel"),
3134 &dir.path().join("root/finnochio"),
3135 )
3136 .unwrap();
3137
3138 let project = Project::test(Arc::new(RealFs), &mut cx);
3139
3140 let (tree, _) = project
3141 .update(&mut cx, |project, cx| {
3142 project.find_or_create_local_worktree(&root_link_path, false, cx)
3143 })
3144 .await
3145 .unwrap();
3146
3147 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3148 .await;
3149 cx.read(|cx| {
3150 let tree = tree.read(cx);
3151 assert_eq!(tree.file_count(), 5);
3152 assert_eq!(
3153 tree.inode_for_path("fennel/grape"),
3154 tree.inode_for_path("finnochio/grape")
3155 );
3156 });
3157
3158 let cancel_flag = Default::default();
3159 let results = project
3160 .read_with(&cx, |project, cx| {
3161 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3162 })
3163 .await;
3164 assert_eq!(
3165 results
3166 .into_iter()
3167 .map(|result| result.path)
3168 .collect::<Vec<Arc<Path>>>(),
3169 vec![
3170 PathBuf::from("banana/carrot/date").into(),
3171 PathBuf::from("banana/carrot/endive").into(),
3172 ]
3173 );
3174 }
3175
3176 #[gpui::test]
3177 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3178 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3179 let progress_token = language_server_config
3180 .disk_based_diagnostics_progress_token
3181 .clone()
3182 .unwrap();
3183
3184 let language = Arc::new(Language::new(
3185 LanguageConfig {
3186 name: "Rust".to_string(),
3187 path_suffixes: vec!["rs".to_string()],
3188 language_server: Some(language_server_config),
3189 ..Default::default()
3190 },
3191 Some(tree_sitter_rust::language()),
3192 ));
3193
3194 let fs = FakeFs::new(cx.background());
3195 fs.insert_tree(
3196 "/dir",
3197 json!({
3198 "a.rs": "fn a() { A }",
3199 "b.rs": "const y: i32 = 1",
3200 }),
3201 )
3202 .await;
3203
3204 let project = Project::test(fs, &mut cx);
3205 project.update(&mut cx, |project, _| {
3206 Arc::get_mut(&mut project.languages).unwrap().add(language);
3207 });
3208
3209 let (tree, _) = project
3210 .update(&mut cx, |project, cx| {
3211 project.find_or_create_local_worktree("/dir", false, cx)
3212 })
3213 .await
3214 .unwrap();
3215 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3216
3217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3218 .await;
3219
3220 // Cause worktree to start the fake language server
3221 let _buffer = project
3222 .update(&mut cx, |project, cx| {
3223 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3224 })
3225 .await
3226 .unwrap();
3227
3228 let mut events = subscribe(&project, &mut cx);
3229
3230 let mut fake_server = fake_servers.next().await.unwrap();
3231 fake_server.start_progress(&progress_token).await;
3232 assert_eq!(
3233 events.next().await.unwrap(),
3234 Event::DiskBasedDiagnosticsStarted
3235 );
3236
3237 fake_server.start_progress(&progress_token).await;
3238 fake_server.end_progress(&progress_token).await;
3239 fake_server.start_progress(&progress_token).await;
3240
3241 fake_server
3242 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3243 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3244 version: None,
3245 diagnostics: vec![lsp::Diagnostic {
3246 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3247 severity: Some(lsp::DiagnosticSeverity::ERROR),
3248 message: "undefined variable 'A'".to_string(),
3249 ..Default::default()
3250 }],
3251 })
3252 .await;
3253 assert_eq!(
3254 events.next().await.unwrap(),
3255 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3256 );
3257
3258 fake_server.end_progress(&progress_token).await;
3259 fake_server.end_progress(&progress_token).await;
3260 assert_eq!(
3261 events.next().await.unwrap(),
3262 Event::DiskBasedDiagnosticsUpdated
3263 );
3264 assert_eq!(
3265 events.next().await.unwrap(),
3266 Event::DiskBasedDiagnosticsFinished
3267 );
3268
3269 let buffer = project
3270 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3271 .await
3272 .unwrap();
3273
3274 buffer.read_with(&cx, |buffer, _| {
3275 let snapshot = buffer.snapshot();
3276 let diagnostics = snapshot
3277 .diagnostics_in_range::<_, Point>(0..buffer.len())
3278 .collect::<Vec<_>>();
3279 assert_eq!(
3280 diagnostics,
3281 &[DiagnosticEntry {
3282 range: Point::new(0, 9)..Point::new(0, 10),
3283 diagnostic: Diagnostic {
3284 severity: lsp::DiagnosticSeverity::ERROR,
3285 message: "undefined variable 'A'".to_string(),
3286 group_id: 0,
3287 is_primary: true,
3288 ..Default::default()
3289 }
3290 }]
3291 )
3292 });
3293 }
3294
3295 #[gpui::test]
3296 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3297 let dir = temp_tree(json!({
3298 "root": {
3299 "dir1": {},
3300 "dir2": {
3301 "dir3": {}
3302 }
3303 }
3304 }));
3305
3306 let project = Project::test(Arc::new(RealFs), &mut cx);
3307 let (tree, _) = project
3308 .update(&mut cx, |project, cx| {
3309 project.find_or_create_local_worktree(&dir.path(), false, cx)
3310 })
3311 .await
3312 .unwrap();
3313
3314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3315 .await;
3316
3317 let cancel_flag = Default::default();
3318 let results = project
3319 .read_with(&cx, |project, cx| {
3320 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3321 })
3322 .await;
3323
3324 assert!(results.is_empty());
3325 }
3326
3327 #[gpui::test]
3328 async fn test_definition(mut cx: gpui::TestAppContext) {
3329 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3330 let language = Arc::new(Language::new(
3331 LanguageConfig {
3332 name: "Rust".to_string(),
3333 path_suffixes: vec!["rs".to_string()],
3334 language_server: Some(language_server_config),
3335 ..Default::default()
3336 },
3337 Some(tree_sitter_rust::language()),
3338 ));
3339
3340 let fs = FakeFs::new(cx.background());
3341 fs.insert_tree(
3342 "/dir",
3343 json!({
3344 "a.rs": "const fn a() { A }",
3345 "b.rs": "const y: i32 = crate::a()",
3346 }),
3347 )
3348 .await;
3349
3350 let project = Project::test(fs, &mut cx);
3351 project.update(&mut cx, |project, _| {
3352 Arc::get_mut(&mut project.languages).unwrap().add(language);
3353 });
3354
3355 let (tree, _) = project
3356 .update(&mut cx, |project, cx| {
3357 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3358 })
3359 .await
3360 .unwrap();
3361 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3362 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3363 .await;
3364
3365 let buffer = project
3366 .update(&mut cx, |project, cx| {
3367 project.open_buffer(
3368 ProjectPath {
3369 worktree_id,
3370 path: Path::new("").into(),
3371 },
3372 cx,
3373 )
3374 })
3375 .await
3376 .unwrap();
3377
3378 let mut fake_server = fake_servers.next().await.unwrap();
3379 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3380 let params = params.text_document_position_params;
3381 assert_eq!(
3382 params.text_document.uri.to_file_path().unwrap(),
3383 Path::new("/dir/b.rs"),
3384 );
3385 assert_eq!(params.position, lsp::Position::new(0, 22));
3386
3387 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3388 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3389 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3390 )))
3391 });
3392
3393 let mut definitions = project
3394 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3395 .await
3396 .unwrap();
3397
3398 assert_eq!(definitions.len(), 1);
3399 let definition = definitions.pop().unwrap();
3400 cx.update(|cx| {
3401 let target_buffer = definition.target_buffer.read(cx);
3402 assert_eq!(
3403 target_buffer
3404 .file()
3405 .unwrap()
3406 .as_local()
3407 .unwrap()
3408 .abs_path(cx),
3409 Path::new("/dir/a.rs"),
3410 );
3411 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3412 assert_eq!(
3413 list_worktrees(&project, cx),
3414 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3415 );
3416
3417 drop(definition);
3418 });
3419 cx.read(|cx| {
3420 assert_eq!(
3421 list_worktrees(&project, cx),
3422 [("/dir/b.rs".as_ref(), false)]
3423 );
3424 });
3425
3426 fn list_worktrees<'a>(
3427 project: &'a ModelHandle<Project>,
3428 cx: &'a AppContext,
3429 ) -> Vec<(&'a Path, bool)> {
3430 project
3431 .read(cx)
3432 .worktrees(cx)
3433 .map(|worktree| {
3434 let worktree = worktree.read(cx);
3435 (
3436 worktree.as_local().unwrap().abs_path().as_ref(),
3437 worktree.is_weak(),
3438 )
3439 })
3440 .collect::<Vec<_>>()
3441 }
3442 }
3443
3444 #[gpui::test]
3445 async fn test_save_file(mut cx: gpui::TestAppContext) {
3446 let fs = FakeFs::new(cx.background());
3447 fs.insert_tree(
3448 "/dir",
3449 json!({
3450 "file1": "the old contents",
3451 }),
3452 )
3453 .await;
3454
3455 let project = Project::test(fs.clone(), &mut cx);
3456 let worktree_id = project
3457 .update(&mut cx, |p, cx| {
3458 p.find_or_create_local_worktree("/dir", false, cx)
3459 })
3460 .await
3461 .unwrap()
3462 .0
3463 .read_with(&cx, |tree, _| tree.id());
3464
3465 let buffer = project
3466 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3467 .await
3468 .unwrap();
3469 buffer
3470 .update(&mut cx, |buffer, cx| {
3471 assert_eq!(buffer.text(), "the old contents");
3472 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3473 buffer.save(cx)
3474 })
3475 .await
3476 .unwrap();
3477
3478 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3479 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3480 }
3481
3482 #[gpui::test]
3483 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3484 let fs = FakeFs::new(cx.background());
3485 fs.insert_tree(
3486 "/dir",
3487 json!({
3488 "file1": "the old contents",
3489 }),
3490 )
3491 .await;
3492
3493 let project = Project::test(fs.clone(), &mut cx);
3494 let worktree_id = project
3495 .update(&mut cx, |p, cx| {
3496 p.find_or_create_local_worktree("/dir/file1", false, cx)
3497 })
3498 .await
3499 .unwrap()
3500 .0
3501 .read_with(&cx, |tree, _| tree.id());
3502
3503 let buffer = project
3504 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3505 .await
3506 .unwrap();
3507 buffer
3508 .update(&mut cx, |buffer, cx| {
3509 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3510 buffer.save(cx)
3511 })
3512 .await
3513 .unwrap();
3514
3515 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3516 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3517 }
3518
3519 #[gpui::test(retries = 5)]
3520 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3521 let dir = temp_tree(json!({
3522 "a": {
3523 "file1": "",
3524 "file2": "",
3525 "file3": "",
3526 },
3527 "b": {
3528 "c": {
3529 "file4": "",
3530 "file5": "",
3531 }
3532 }
3533 }));
3534
3535 let project = Project::test(Arc::new(RealFs), &mut cx);
3536 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3537
3538 let (tree, _) = project
3539 .update(&mut cx, |p, cx| {
3540 p.find_or_create_local_worktree(dir.path(), false, cx)
3541 })
3542 .await
3543 .unwrap();
3544 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3545
3546 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3547 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3548 async move { buffer.await.unwrap() }
3549 };
3550 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3551 tree.read_with(cx, |tree, _| {
3552 tree.entry_for_path(path)
3553 .expect(&format!("no entry for path {}", path))
3554 .id
3555 })
3556 };
3557
3558 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3559 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3560 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3561 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3562
3563 let file2_id = id_for_path("a/file2", &cx);
3564 let file3_id = id_for_path("a/file3", &cx);
3565 let file4_id = id_for_path("b/c/file4", &cx);
3566
3567 // Wait for the initial scan.
3568 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3569 .await;
3570
3571 // Create a remote copy of this worktree.
3572 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3573 let (remote, load_task) = cx.update(|cx| {
3574 Worktree::remote(
3575 1,
3576 1,
3577 initial_snapshot.to_proto(&Default::default(), Default::default()),
3578 rpc.clone(),
3579 cx,
3580 )
3581 });
3582 load_task.await;
3583
3584 cx.read(|cx| {
3585 assert!(!buffer2.read(cx).is_dirty());
3586 assert!(!buffer3.read(cx).is_dirty());
3587 assert!(!buffer4.read(cx).is_dirty());
3588 assert!(!buffer5.read(cx).is_dirty());
3589 });
3590
3591 // Rename and delete files and directories.
3592 tree.flush_fs_events(&cx).await;
3593 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3594 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3595 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3596 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3597 tree.flush_fs_events(&cx).await;
3598
3599 let expected_paths = vec![
3600 "a",
3601 "a/file1",
3602 "a/file2.new",
3603 "b",
3604 "d",
3605 "d/file3",
3606 "d/file4",
3607 ];
3608
3609 cx.read(|app| {
3610 assert_eq!(
3611 tree.read(app)
3612 .paths()
3613 .map(|p| p.to_str().unwrap())
3614 .collect::<Vec<_>>(),
3615 expected_paths
3616 );
3617
3618 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3619 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3620 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3621
3622 assert_eq!(
3623 buffer2.read(app).file().unwrap().path().as_ref(),
3624 Path::new("a/file2.new")
3625 );
3626 assert_eq!(
3627 buffer3.read(app).file().unwrap().path().as_ref(),
3628 Path::new("d/file3")
3629 );
3630 assert_eq!(
3631 buffer4.read(app).file().unwrap().path().as_ref(),
3632 Path::new("d/file4")
3633 );
3634 assert_eq!(
3635 buffer5.read(app).file().unwrap().path().as_ref(),
3636 Path::new("b/c/file5")
3637 );
3638
3639 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3640 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3641 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3642 assert!(buffer5.read(app).file().unwrap().is_deleted());
3643 });
3644
3645 // Update the remote worktree. Check that it becomes consistent with the
3646 // local worktree.
3647 remote.update(&mut cx, |remote, cx| {
3648 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3649 &initial_snapshot,
3650 1,
3651 1,
3652 0,
3653 true,
3654 );
3655 remote
3656 .as_remote_mut()
3657 .unwrap()
3658 .snapshot
3659 .apply_remote_update(update_message)
3660 .unwrap();
3661
3662 assert_eq!(
3663 remote
3664 .paths()
3665 .map(|p| p.to_str().unwrap())
3666 .collect::<Vec<_>>(),
3667 expected_paths
3668 );
3669 });
3670 }
3671
3672 #[gpui::test]
3673 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3674 let fs = FakeFs::new(cx.background());
3675 fs.insert_tree(
3676 "/the-dir",
3677 json!({
3678 "a.txt": "a-contents",
3679 "b.txt": "b-contents",
3680 }),
3681 )
3682 .await;
3683
3684 let project = Project::test(fs.clone(), &mut cx);
3685 let worktree_id = project
3686 .update(&mut cx, |p, cx| {
3687 p.find_or_create_local_worktree("/the-dir", false, cx)
3688 })
3689 .await
3690 .unwrap()
3691 .0
3692 .read_with(&cx, |tree, _| tree.id());
3693
3694 // Spawn multiple tasks to open paths, repeating some paths.
3695 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3696 (
3697 p.open_buffer((worktree_id, "a.txt"), cx),
3698 p.open_buffer((worktree_id, "b.txt"), cx),
3699 p.open_buffer((worktree_id, "a.txt"), cx),
3700 )
3701 });
3702
3703 let buffer_a_1 = buffer_a_1.await.unwrap();
3704 let buffer_a_2 = buffer_a_2.await.unwrap();
3705 let buffer_b = buffer_b.await.unwrap();
3706 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3707 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3708
3709 // There is only one buffer per path.
3710 let buffer_a_id = buffer_a_1.id();
3711 assert_eq!(buffer_a_2.id(), buffer_a_id);
3712
3713 // Open the same path again while it is still open.
3714 drop(buffer_a_1);
3715 let buffer_a_3 = project
3716 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3717 .await
3718 .unwrap();
3719
3720 // There's still only one buffer per path.
3721 assert_eq!(buffer_a_3.id(), buffer_a_id);
3722 }
3723
3724 #[gpui::test]
3725 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3726 use std::fs;
3727
3728 let dir = temp_tree(json!({
3729 "file1": "abc",
3730 "file2": "def",
3731 "file3": "ghi",
3732 }));
3733
3734 let project = Project::test(Arc::new(RealFs), &mut cx);
3735 let (worktree, _) = project
3736 .update(&mut cx, |p, cx| {
3737 p.find_or_create_local_worktree(dir.path(), false, cx)
3738 })
3739 .await
3740 .unwrap();
3741 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3742
3743 worktree.flush_fs_events(&cx).await;
3744 worktree
3745 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3746 .await;
3747
3748 let buffer1 = project
3749 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3750 .await
3751 .unwrap();
3752 let events = Rc::new(RefCell::new(Vec::new()));
3753
3754 // initially, the buffer isn't dirty.
3755 buffer1.update(&mut cx, |buffer, cx| {
3756 cx.subscribe(&buffer1, {
3757 let events = events.clone();
3758 move |_, _, event, _| events.borrow_mut().push(event.clone())
3759 })
3760 .detach();
3761
3762 assert!(!buffer.is_dirty());
3763 assert!(events.borrow().is_empty());
3764
3765 buffer.edit(vec![1..2], "", cx);
3766 });
3767
3768 // after the first edit, the buffer is dirty, and emits a dirtied event.
3769 buffer1.update(&mut cx, |buffer, cx| {
3770 assert!(buffer.text() == "ac");
3771 assert!(buffer.is_dirty());
3772 assert_eq!(
3773 *events.borrow(),
3774 &[language::Event::Edited, language::Event::Dirtied]
3775 );
3776 events.borrow_mut().clear();
3777 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3778 });
3779
3780 // after saving, the buffer is not dirty, and emits a saved event.
3781 buffer1.update(&mut cx, |buffer, cx| {
3782 assert!(!buffer.is_dirty());
3783 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3784 events.borrow_mut().clear();
3785
3786 buffer.edit(vec![1..1], "B", cx);
3787 buffer.edit(vec![2..2], "D", cx);
3788 });
3789
3790 // after editing again, the buffer is dirty, and emits another dirty event.
3791 buffer1.update(&mut cx, |buffer, cx| {
3792 assert!(buffer.text() == "aBDc");
3793 assert!(buffer.is_dirty());
3794 assert_eq!(
3795 *events.borrow(),
3796 &[
3797 language::Event::Edited,
3798 language::Event::Dirtied,
3799 language::Event::Edited,
3800 ],
3801 );
3802 events.borrow_mut().clear();
3803
3804 // TODO - currently, after restoring the buffer to its
3805 // previously-saved state, the is still considered dirty.
3806 buffer.edit([1..3], "", cx);
3807 assert!(buffer.text() == "ac");
3808 assert!(buffer.is_dirty());
3809 });
3810
3811 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3812
3813 // When a file is deleted, the buffer is considered dirty.
3814 let events = Rc::new(RefCell::new(Vec::new()));
3815 let buffer2 = project
3816 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3817 .await
3818 .unwrap();
3819 buffer2.update(&mut cx, |_, cx| {
3820 cx.subscribe(&buffer2, {
3821 let events = events.clone();
3822 move |_, _, event, _| events.borrow_mut().push(event.clone())
3823 })
3824 .detach();
3825 });
3826
3827 fs::remove_file(dir.path().join("file2")).unwrap();
3828 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3829 assert_eq!(
3830 *events.borrow(),
3831 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3832 );
3833
3834 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3835 let events = Rc::new(RefCell::new(Vec::new()));
3836 let buffer3 = project
3837 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3838 .await
3839 .unwrap();
3840 buffer3.update(&mut cx, |_, cx| {
3841 cx.subscribe(&buffer3, {
3842 let events = events.clone();
3843 move |_, _, event, _| events.borrow_mut().push(event.clone())
3844 })
3845 .detach();
3846 });
3847
3848 worktree.flush_fs_events(&cx).await;
3849 buffer3.update(&mut cx, |buffer, cx| {
3850 buffer.edit(Some(0..0), "x", cx);
3851 });
3852 events.borrow_mut().clear();
3853 fs::remove_file(dir.path().join("file3")).unwrap();
3854 buffer3
3855 .condition(&cx, |_, _| !events.borrow().is_empty())
3856 .await;
3857 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3858 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3859 }
3860
3861 #[gpui::test]
3862 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3863 use std::fs;
3864
3865 let initial_contents = "aaa\nbbbbb\nc\n";
3866 let dir = temp_tree(json!({ "the-file": initial_contents }));
3867
3868 let project = Project::test(Arc::new(RealFs), &mut cx);
3869 let (worktree, _) = project
3870 .update(&mut cx, |p, cx| {
3871 p.find_or_create_local_worktree(dir.path(), false, cx)
3872 })
3873 .await
3874 .unwrap();
3875 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3876
3877 worktree
3878 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3879 .await;
3880
3881 let abs_path = dir.path().join("the-file");
3882 let buffer = project
3883 .update(&mut cx, |p, cx| {
3884 p.open_buffer((worktree_id, "the-file"), cx)
3885 })
3886 .await
3887 .unwrap();
3888
3889 // TODO
3890 // Add a cursor on each row.
3891 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3892 // assert!(!buffer.is_dirty());
3893 // buffer.add_selection_set(
3894 // &(0..3)
3895 // .map(|row| Selection {
3896 // id: row as usize,
3897 // start: Point::new(row, 1),
3898 // end: Point::new(row, 1),
3899 // reversed: false,
3900 // goal: SelectionGoal::None,
3901 // })
3902 // .collect::<Vec<_>>(),
3903 // cx,
3904 // )
3905 // });
3906
3907 // Change the file on disk, adding two new lines of text, and removing
3908 // one line.
3909 buffer.read_with(&cx, |buffer, _| {
3910 assert!(!buffer.is_dirty());
3911 assert!(!buffer.has_conflict());
3912 });
3913 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3914 fs::write(&abs_path, new_contents).unwrap();
3915
3916 // Because the buffer was not modified, it is reloaded from disk. Its
3917 // contents are edited according to the diff between the old and new
3918 // file contents.
3919 buffer
3920 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3921 .await;
3922
3923 buffer.update(&mut cx, |buffer, _| {
3924 assert_eq!(buffer.text(), new_contents);
3925 assert!(!buffer.is_dirty());
3926 assert!(!buffer.has_conflict());
3927
3928 // TODO
3929 // let cursor_positions = buffer
3930 // .selection_set(selection_set_id)
3931 // .unwrap()
3932 // .selections::<Point>(&*buffer)
3933 // .map(|selection| {
3934 // assert_eq!(selection.start, selection.end);
3935 // selection.start
3936 // })
3937 // .collect::<Vec<_>>();
3938 // assert_eq!(
3939 // cursor_positions,
3940 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3941 // );
3942 });
3943
3944 // Modify the buffer
3945 buffer.update(&mut cx, |buffer, cx| {
3946 buffer.edit(vec![0..0], " ", cx);
3947 assert!(buffer.is_dirty());
3948 assert!(!buffer.has_conflict());
3949 });
3950
3951 // Change the file on disk again, adding blank lines to the beginning.
3952 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3953
3954 // Because the buffer is modified, it doesn't reload from disk, but is
3955 // marked as having a conflict.
3956 buffer
3957 .condition(&cx, |buffer, _| buffer.has_conflict())
3958 .await;
3959 }
3960
3961 #[gpui::test]
3962 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3963 let fs = FakeFs::new(cx.background());
3964 fs.insert_tree(
3965 "/the-dir",
3966 json!({
3967 "a.rs": "
3968 fn foo(mut v: Vec<usize>) {
3969 for x in &v {
3970 v.push(1);
3971 }
3972 }
3973 "
3974 .unindent(),
3975 }),
3976 )
3977 .await;
3978
3979 let project = Project::test(fs.clone(), &mut cx);
3980 let (worktree, _) = project
3981 .update(&mut cx, |p, cx| {
3982 p.find_or_create_local_worktree("/the-dir", false, cx)
3983 })
3984 .await
3985 .unwrap();
3986 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3987
3988 let buffer = project
3989 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3990 .await
3991 .unwrap();
3992
3993 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3994 let message = lsp::PublishDiagnosticsParams {
3995 uri: buffer_uri.clone(),
3996 diagnostics: vec![
3997 lsp::Diagnostic {
3998 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3999 severity: Some(DiagnosticSeverity::WARNING),
4000 message: "error 1".to_string(),
4001 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4002 location: lsp::Location {
4003 uri: buffer_uri.clone(),
4004 range: lsp::Range::new(
4005 lsp::Position::new(1, 8),
4006 lsp::Position::new(1, 9),
4007 ),
4008 },
4009 message: "error 1 hint 1".to_string(),
4010 }]),
4011 ..Default::default()
4012 },
4013 lsp::Diagnostic {
4014 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4015 severity: Some(DiagnosticSeverity::HINT),
4016 message: "error 1 hint 1".to_string(),
4017 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4018 location: lsp::Location {
4019 uri: buffer_uri.clone(),
4020 range: lsp::Range::new(
4021 lsp::Position::new(1, 8),
4022 lsp::Position::new(1, 9),
4023 ),
4024 },
4025 message: "original diagnostic".to_string(),
4026 }]),
4027 ..Default::default()
4028 },
4029 lsp::Diagnostic {
4030 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4031 severity: Some(DiagnosticSeverity::ERROR),
4032 message: "error 2".to_string(),
4033 related_information: Some(vec![
4034 lsp::DiagnosticRelatedInformation {
4035 location: lsp::Location {
4036 uri: buffer_uri.clone(),
4037 range: lsp::Range::new(
4038 lsp::Position::new(1, 13),
4039 lsp::Position::new(1, 15),
4040 ),
4041 },
4042 message: "error 2 hint 1".to_string(),
4043 },
4044 lsp::DiagnosticRelatedInformation {
4045 location: lsp::Location {
4046 uri: buffer_uri.clone(),
4047 range: lsp::Range::new(
4048 lsp::Position::new(1, 13),
4049 lsp::Position::new(1, 15),
4050 ),
4051 },
4052 message: "error 2 hint 2".to_string(),
4053 },
4054 ]),
4055 ..Default::default()
4056 },
4057 lsp::Diagnostic {
4058 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4059 severity: Some(DiagnosticSeverity::HINT),
4060 message: "error 2 hint 1".to_string(),
4061 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4062 location: lsp::Location {
4063 uri: buffer_uri.clone(),
4064 range: lsp::Range::new(
4065 lsp::Position::new(2, 8),
4066 lsp::Position::new(2, 17),
4067 ),
4068 },
4069 message: "original diagnostic".to_string(),
4070 }]),
4071 ..Default::default()
4072 },
4073 lsp::Diagnostic {
4074 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4075 severity: Some(DiagnosticSeverity::HINT),
4076 message: "error 2 hint 2".to_string(),
4077 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4078 location: lsp::Location {
4079 uri: buffer_uri.clone(),
4080 range: lsp::Range::new(
4081 lsp::Position::new(2, 8),
4082 lsp::Position::new(2, 17),
4083 ),
4084 },
4085 message: "original diagnostic".to_string(),
4086 }]),
4087 ..Default::default()
4088 },
4089 ],
4090 version: None,
4091 };
4092
4093 project
4094 .update(&mut cx, |p, cx| {
4095 p.update_diagnostics(message, &Default::default(), cx)
4096 })
4097 .unwrap();
4098 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4099
4100 assert_eq!(
4101 buffer
4102 .diagnostics_in_range::<_, Point>(0..buffer.len())
4103 .collect::<Vec<_>>(),
4104 &[
4105 DiagnosticEntry {
4106 range: Point::new(1, 8)..Point::new(1, 9),
4107 diagnostic: Diagnostic {
4108 severity: DiagnosticSeverity::WARNING,
4109 message: "error 1".to_string(),
4110 group_id: 0,
4111 is_primary: true,
4112 ..Default::default()
4113 }
4114 },
4115 DiagnosticEntry {
4116 range: Point::new(1, 8)..Point::new(1, 9),
4117 diagnostic: Diagnostic {
4118 severity: DiagnosticSeverity::HINT,
4119 message: "error 1 hint 1".to_string(),
4120 group_id: 0,
4121 is_primary: false,
4122 ..Default::default()
4123 }
4124 },
4125 DiagnosticEntry {
4126 range: Point::new(1, 13)..Point::new(1, 15),
4127 diagnostic: Diagnostic {
4128 severity: DiagnosticSeverity::HINT,
4129 message: "error 2 hint 1".to_string(),
4130 group_id: 1,
4131 is_primary: false,
4132 ..Default::default()
4133 }
4134 },
4135 DiagnosticEntry {
4136 range: Point::new(1, 13)..Point::new(1, 15),
4137 diagnostic: Diagnostic {
4138 severity: DiagnosticSeverity::HINT,
4139 message: "error 2 hint 2".to_string(),
4140 group_id: 1,
4141 is_primary: false,
4142 ..Default::default()
4143 }
4144 },
4145 DiagnosticEntry {
4146 range: Point::new(2, 8)..Point::new(2, 17),
4147 diagnostic: Diagnostic {
4148 severity: DiagnosticSeverity::ERROR,
4149 message: "error 2".to_string(),
4150 group_id: 1,
4151 is_primary: true,
4152 ..Default::default()
4153 }
4154 }
4155 ]
4156 );
4157
4158 assert_eq!(
4159 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4160 &[
4161 DiagnosticEntry {
4162 range: Point::new(1, 8)..Point::new(1, 9),
4163 diagnostic: Diagnostic {
4164 severity: DiagnosticSeverity::WARNING,
4165 message: "error 1".to_string(),
4166 group_id: 0,
4167 is_primary: true,
4168 ..Default::default()
4169 }
4170 },
4171 DiagnosticEntry {
4172 range: Point::new(1, 8)..Point::new(1, 9),
4173 diagnostic: Diagnostic {
4174 severity: DiagnosticSeverity::HINT,
4175 message: "error 1 hint 1".to_string(),
4176 group_id: 0,
4177 is_primary: false,
4178 ..Default::default()
4179 }
4180 },
4181 ]
4182 );
4183 assert_eq!(
4184 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4185 &[
4186 DiagnosticEntry {
4187 range: Point::new(1, 13)..Point::new(1, 15),
4188 diagnostic: Diagnostic {
4189 severity: DiagnosticSeverity::HINT,
4190 message: "error 2 hint 1".to_string(),
4191 group_id: 1,
4192 is_primary: false,
4193 ..Default::default()
4194 }
4195 },
4196 DiagnosticEntry {
4197 range: Point::new(1, 13)..Point::new(1, 15),
4198 diagnostic: Diagnostic {
4199 severity: DiagnosticSeverity::HINT,
4200 message: "error 2 hint 2".to_string(),
4201 group_id: 1,
4202 is_primary: false,
4203 ..Default::default()
4204 }
4205 },
4206 DiagnosticEntry {
4207 range: Point::new(2, 8)..Point::new(2, 17),
4208 diagnostic: Diagnostic {
4209 severity: DiagnosticSeverity::ERROR,
4210 message: "error 2".to_string(),
4211 group_id: 1,
4212 is_primary: true,
4213 ..Default::default()
4214 }
4215 }
4216 ]
4217 );
4218 }
4219
4220 #[gpui::test]
4221 async fn test_rename(mut cx: gpui::TestAppContext) {
4222 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4223 let language = Arc::new(Language::new(
4224 LanguageConfig {
4225 name: "Rust".to_string(),
4226 path_suffixes: vec!["rs".to_string()],
4227 language_server: Some(language_server_config),
4228 ..Default::default()
4229 },
4230 Some(tree_sitter_rust::language()),
4231 ));
4232
4233 let fs = FakeFs::new(cx.background());
4234 fs.insert_tree(
4235 "/dir",
4236 json!({
4237 "one.rs": "const ONE: usize = 1;",
4238 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4239 }),
4240 )
4241 .await;
4242
4243 let project = Project::test(fs.clone(), &mut cx);
4244 project.update(&mut cx, |project, _| {
4245 Arc::get_mut(&mut project.languages).unwrap().add(language);
4246 });
4247
4248 let (tree, _) = project
4249 .update(&mut cx, |project, cx| {
4250 project.find_or_create_local_worktree("/dir", false, cx)
4251 })
4252 .await
4253 .unwrap();
4254 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4255 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4256 .await;
4257
4258 let buffer = project
4259 .update(&mut cx, |project, cx| {
4260 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4261 })
4262 .await
4263 .unwrap();
4264
4265 let mut fake_server = fake_servers.next().await.unwrap();
4266
4267 let response = project.update(&mut cx, |project, cx| {
4268 project.prepare_rename(buffer.clone(), 7, cx)
4269 });
4270 fake_server
4271 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4272 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4273 assert_eq!(params.position, lsp::Position::new(0, 7));
4274 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4275 lsp::Position::new(0, 6),
4276 lsp::Position::new(0, 9),
4277 )))
4278 })
4279 .next()
4280 .await
4281 .unwrap();
4282 let range = response.await.unwrap().unwrap();
4283 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4284 assert_eq!(range, 6..9);
4285
4286 let response = project.update(&mut cx, |project, cx| {
4287 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4288 });
4289 fake_server
4290 .handle_request::<lsp::request::Rename, _>(|params| {
4291 assert_eq!(
4292 params.text_document_position.text_document.uri.as_str(),
4293 "file:///dir/one.rs"
4294 );
4295 assert_eq!(
4296 params.text_document_position.position,
4297 lsp::Position::new(0, 7)
4298 );
4299 assert_eq!(params.new_name, "THREE");
4300 Some(lsp::WorkspaceEdit {
4301 changes: Some(
4302 [
4303 (
4304 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4305 vec![lsp::TextEdit::new(
4306 lsp::Range::new(
4307 lsp::Position::new(0, 6),
4308 lsp::Position::new(0, 9),
4309 ),
4310 "THREE".to_string(),
4311 )],
4312 ),
4313 (
4314 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4315 vec![
4316 lsp::TextEdit::new(
4317 lsp::Range::new(
4318 lsp::Position::new(0, 24),
4319 lsp::Position::new(0, 27),
4320 ),
4321 "THREE".to_string(),
4322 ),
4323 lsp::TextEdit::new(
4324 lsp::Range::new(
4325 lsp::Position::new(0, 35),
4326 lsp::Position::new(0, 38),
4327 ),
4328 "THREE".to_string(),
4329 ),
4330 ],
4331 ),
4332 ]
4333 .into_iter()
4334 .collect(),
4335 ),
4336 ..Default::default()
4337 })
4338 })
4339 .next()
4340 .await
4341 .unwrap();
4342 let mut transaction = response.await.unwrap().0;
4343 assert_eq!(transaction.len(), 2);
4344 assert_eq!(
4345 transaction
4346 .remove_entry(&buffer)
4347 .unwrap()
4348 .0
4349 .read_with(&cx, |buffer, _| buffer.text()),
4350 "const THREE: usize = 1;"
4351 );
4352 assert_eq!(
4353 transaction
4354 .into_keys()
4355 .next()
4356 .unwrap()
4357 .read_with(&cx, |buffer, _| buffer.text()),
4358 "const TWO: usize = one::THREE + one::THREE;"
4359 );
4360 }
4361}