1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::Future;
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 point_from_lsp,
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, LanguageServer};
24use lsp_command::*;
25use postage::{broadcast, prelude::Stream, sink::Sink, watch};
26use smol::block_on;
27use std::{
28 convert::TryInto,
29 ops::Range,
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicBool, Arc},
32 time::Instant,
33};
34use util::{post_inc, ResultExt, TryFutureExt as _};
35
36pub use fs::*;
37pub use worktree::*;
38
39pub struct Project {
40 worktrees: Vec<WorktreeHandle>,
41 active_entry: Option<ProjectEntry>,
42 languages: Arc<LanguageRegistry>,
43 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_get_definition);
187 client.add_entity_request_handler(Self::handle_prepare_rename);
188 client.add_entity_request_handler(Self::handle_perform_rename);
189 client.add_entity_request_handler(Self::handle_open_buffer);
190 client.add_entity_request_handler(Self::handle_save_buffer);
191 }
192
193 pub fn local(
194 client: Arc<Client>,
195 user_store: ModelHandle<UserStore>,
196 languages: Arc<LanguageRegistry>,
197 fs: Arc<dyn Fs>,
198 cx: &mut MutableAppContext,
199 ) -> ModelHandle<Self> {
200 cx.add_model(|cx: &mut ModelContext<Self>| {
201 let (remote_id_tx, remote_id_rx) = watch::channel();
202 let _maintain_remote_id_task = cx.spawn_weak({
203 let rpc = client.clone();
204 move |this, mut cx| {
205 async move {
206 let mut status = rpc.status();
207 while let Some(status) = status.recv().await {
208 if let Some(this) = this.upgrade(&cx) {
209 let remote_id = if let client::Status::Connected { .. } = status {
210 let response = rpc.request(proto::RegisterProject {}).await?;
211 Some(response.project_id)
212 } else {
213 None
214 };
215
216 if let Some(project_id) = remote_id {
217 let mut registrations = Vec::new();
218 this.update(&mut cx, |this, cx| {
219 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
220 registrations.push(worktree.update(
221 cx,
222 |worktree, cx| {
223 let worktree = worktree.as_local_mut().unwrap();
224 worktree.register(project_id, cx)
225 },
226 ));
227 }
228 });
229 for registration in registrations {
230 registration.await?;
231 }
232 }
233 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
234 }
235 }
236 Ok(())
237 }
238 .log_err()
239 }
240 });
241
242 Self {
243 worktrees: Default::default(),
244 collaborators: Default::default(),
245 open_buffers: Default::default(),
246 loading_buffers: Default::default(),
247 shared_buffers: Default::default(),
248 client_state: ProjectClientState::Local {
249 is_shared: false,
250 remote_id_tx,
251 remote_id_rx,
252 _maintain_remote_id_task,
253 },
254 opened_buffer: broadcast::channel(1).0,
255 subscriptions: Vec::new(),
256 active_entry: None,
257 languages,
258 client,
259 user_store,
260 fs,
261 language_servers_with_diagnostics_running: 0,
262 language_servers: Default::default(),
263 }
264 })
265 }
266
267 pub async fn remote(
268 remote_id: u64,
269 client: Arc<Client>,
270 user_store: ModelHandle<UserStore>,
271 languages: Arc<LanguageRegistry>,
272 fs: Arc<dyn Fs>,
273 cx: &mut AsyncAppContext,
274 ) -> Result<ModelHandle<Self>> {
275 client.authenticate_and_connect(&cx).await?;
276
277 let response = client
278 .request(proto::JoinProject {
279 project_id: remote_id,
280 })
281 .await?;
282
283 let replica_id = response.replica_id as ReplicaId;
284
285 let mut worktrees = Vec::new();
286 for worktree in response.worktrees {
287 let (worktree, load_task) = cx
288 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
289 worktrees.push(worktree);
290 load_task.detach();
291 }
292
293 let this = cx.add_model(|cx| {
294 let mut this = Self {
295 worktrees: Vec::new(),
296 open_buffers: Default::default(),
297 loading_buffers: Default::default(),
298 opened_buffer: broadcast::channel(1).0,
299 shared_buffers: Default::default(),
300 active_entry: None,
301 collaborators: Default::default(),
302 languages,
303 user_store: user_store.clone(),
304 fs,
305 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
306 client,
307 client_state: ProjectClientState::Remote {
308 sharing_has_stopped: false,
309 remote_id,
310 replica_id,
311 },
312 language_servers_with_diagnostics_running: 0,
313 language_servers: Default::default(),
314 };
315 for worktree in worktrees {
316 this.add_worktree(&worktree, cx);
317 }
318 this
319 });
320
321 let user_ids = response
322 .collaborators
323 .iter()
324 .map(|peer| peer.user_id)
325 .collect();
326 user_store
327 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
328 .await?;
329 let mut collaborators = HashMap::default();
330 for message in response.collaborators {
331 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
332 collaborators.insert(collaborator.peer_id, collaborator);
333 }
334
335 this.update(cx, |this, _| {
336 this.collaborators = collaborators;
337 });
338
339 Ok(this)
340 }
341
342 #[cfg(any(test, feature = "test-support"))]
343 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
344 let languages = Arc::new(LanguageRegistry::new());
345 let http_client = client::test::FakeHttpClient::with_404_response();
346 let client = client::Client::new(http_client.clone());
347 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
348 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
349 }
350
351 #[cfg(any(test, feature = "test-support"))]
352 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
353 self.shared_buffers
354 .get(&peer_id)
355 .and_then(|buffers| buffers.get(&remote_id))
356 .cloned()
357 }
358
359 #[cfg(any(test, feature = "test-support"))]
360 pub fn has_buffered_operations(&self) -> bool {
361 self.open_buffers
362 .values()
363 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
364 }
365
366 pub fn fs(&self) -> &Arc<dyn Fs> {
367 &self.fs
368 }
369
370 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
371 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
372 *remote_id_tx.borrow_mut() = remote_id;
373 }
374
375 self.subscriptions.clear();
376 if let Some(remote_id) = remote_id {
377 self.subscriptions
378 .push(self.client.add_model_for_remote_entity(remote_id, cx));
379 }
380 }
381
382 pub fn remote_id(&self) -> Option<u64> {
383 match &self.client_state {
384 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
385 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
386 }
387 }
388
389 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
390 let mut id = None;
391 let mut watch = None;
392 match &self.client_state {
393 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
394 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
395 }
396
397 async move {
398 if let Some(id) = id {
399 return id;
400 }
401 let mut watch = watch.unwrap();
402 loop {
403 let id = *watch.borrow();
404 if let Some(id) = id {
405 return id;
406 }
407 watch.recv().await;
408 }
409 }
410 }
411
412 pub fn replica_id(&self) -> ReplicaId {
413 match &self.client_state {
414 ProjectClientState::Local { .. } => 0,
415 ProjectClientState::Remote { replica_id, .. } => *replica_id,
416 }
417 }
418
419 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
420 &self.collaborators
421 }
422
423 pub fn worktrees<'a>(
424 &'a self,
425 cx: &'a AppContext,
426 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
427 self.worktrees
428 .iter()
429 .filter_map(move |worktree| worktree.upgrade(cx))
430 }
431
432 pub fn worktree_for_id(
433 &self,
434 id: WorktreeId,
435 cx: &AppContext,
436 ) -> Option<ModelHandle<Worktree>> {
437 self.worktrees(cx)
438 .find(|worktree| worktree.read(cx).id() == id)
439 }
440
441 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
442 let rpc = self.client.clone();
443 cx.spawn(|this, mut cx| async move {
444 let project_id = this.update(&mut cx, |this, _| {
445 if let ProjectClientState::Local {
446 is_shared,
447 remote_id_rx,
448 ..
449 } = &mut this.client_state
450 {
451 *is_shared = true;
452 remote_id_rx
453 .borrow()
454 .ok_or_else(|| anyhow!("no project id"))
455 } else {
456 Err(anyhow!("can't share a remote project"))
457 }
458 })?;
459
460 rpc.request(proto::ShareProject { project_id }).await?;
461 let mut tasks = Vec::new();
462 this.update(&mut cx, |this, cx| {
463 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
464 worktree.update(cx, |worktree, cx| {
465 let worktree = worktree.as_local_mut().unwrap();
466 tasks.push(worktree.share(project_id, cx));
467 });
468 }
469 });
470 for task in tasks {
471 task.await?;
472 }
473 this.update(&mut cx, |_, cx| cx.notify());
474 Ok(())
475 })
476 }
477
478 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
479 let rpc = self.client.clone();
480 cx.spawn(|this, mut cx| async move {
481 let project_id = this.update(&mut cx, |this, _| {
482 if let ProjectClientState::Local {
483 is_shared,
484 remote_id_rx,
485 ..
486 } = &mut this.client_state
487 {
488 *is_shared = false;
489 remote_id_rx
490 .borrow()
491 .ok_or_else(|| anyhow!("no project id"))
492 } else {
493 Err(anyhow!("can't share a remote project"))
494 }
495 })?;
496
497 rpc.send(proto::UnshareProject { project_id })?;
498 this.update(&mut cx, |this, cx| {
499 this.collaborators.clear();
500 this.shared_buffers.clear();
501 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
502 worktree.update(cx, |worktree, _| {
503 worktree.as_local_mut().unwrap().unshare();
504 });
505 }
506 cx.notify()
507 });
508 Ok(())
509 })
510 }
511
512 pub fn is_read_only(&self) -> bool {
513 match &self.client_state {
514 ProjectClientState::Local { .. } => false,
515 ProjectClientState::Remote {
516 sharing_has_stopped,
517 ..
518 } => *sharing_has_stopped,
519 }
520 }
521
522 pub fn is_local(&self) -> bool {
523 match &self.client_state {
524 ProjectClientState::Local { .. } => true,
525 ProjectClientState::Remote { .. } => false,
526 }
527 }
528
529 pub fn is_remote(&self) -> bool {
530 !self.is_local()
531 }
532
533 pub fn open_buffer(
534 &mut self,
535 path: impl Into<ProjectPath>,
536 cx: &mut ModelContext<Self>,
537 ) -> Task<Result<ModelHandle<Buffer>>> {
538 let project_path = path.into();
539 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
540 worktree
541 } else {
542 return Task::ready(Err(anyhow!("no such worktree")));
543 };
544
545 // If there is already a buffer for the given path, then return it.
546 let existing_buffer = self.get_open_buffer(&project_path, cx);
547 if let Some(existing_buffer) = existing_buffer {
548 return Task::ready(Ok(existing_buffer));
549 }
550
551 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
552 // If the given path is already being loaded, then wait for that existing
553 // task to complete and return the same buffer.
554 hash_map::Entry::Occupied(e) => e.get().clone(),
555
556 // Otherwise, record the fact that this path is now being loaded.
557 hash_map::Entry::Vacant(entry) => {
558 let (mut tx, rx) = postage::watch::channel();
559 entry.insert(rx.clone());
560
561 let load_buffer = if worktree.read(cx).is_local() {
562 self.open_local_buffer(&project_path.path, &worktree, cx)
563 } else {
564 self.open_remote_buffer(&project_path.path, &worktree, cx)
565 };
566
567 cx.spawn(move |this, mut cx| async move {
568 let load_result = load_buffer.await;
569 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
570 // Record the fact that the buffer is no longer loading.
571 this.loading_buffers.remove(&project_path);
572 if this.loading_buffers.is_empty() {
573 this.open_buffers
574 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
575 }
576
577 let buffer = load_result.map_err(Arc::new)?;
578 Ok(buffer)
579 }));
580 })
581 .detach();
582 rx
583 }
584 };
585
586 cx.foreground().spawn(async move {
587 loop {
588 if let Some(result) = loading_watch.borrow().as_ref() {
589 match result {
590 Ok(buffer) => return Ok(buffer.clone()),
591 Err(error) => return Err(anyhow!("{}", error)),
592 }
593 }
594 loading_watch.recv().await;
595 }
596 })
597 }
598
599 fn open_local_buffer(
600 &mut self,
601 path: &Arc<Path>,
602 worktree: &ModelHandle<Worktree>,
603 cx: &mut ModelContext<Self>,
604 ) -> Task<Result<ModelHandle<Buffer>>> {
605 let load_buffer = worktree.update(cx, |worktree, cx| {
606 let worktree = worktree.as_local_mut().unwrap();
607 worktree.load_buffer(path, cx)
608 });
609 let worktree = worktree.downgrade();
610 cx.spawn(|this, mut cx| async move {
611 let buffer = load_buffer.await?;
612 let worktree = worktree
613 .upgrade(&cx)
614 .ok_or_else(|| anyhow!("worktree was removed"))?;
615 this.update(&mut cx, |this, cx| {
616 this.register_buffer(&buffer, Some(&worktree), cx)
617 })?;
618 Ok(buffer)
619 })
620 }
621
622 fn open_remote_buffer(
623 &mut self,
624 path: &Arc<Path>,
625 worktree: &ModelHandle<Worktree>,
626 cx: &mut ModelContext<Self>,
627 ) -> Task<Result<ModelHandle<Buffer>>> {
628 let rpc = self.client.clone();
629 let project_id = self.remote_id().unwrap();
630 let remote_worktree_id = worktree.read(cx).id();
631 let path = path.clone();
632 let path_string = path.to_string_lossy().to_string();
633 cx.spawn(|this, mut cx| async move {
634 let response = rpc
635 .request(proto::OpenBuffer {
636 project_id,
637 worktree_id: remote_worktree_id.to_proto(),
638 path: path_string,
639 })
640 .await?;
641 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
642 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
643 .await
644 })
645 }
646
647 fn open_local_buffer_from_lsp_path(
648 &mut self,
649 abs_path: lsp::Url,
650 lang_name: String,
651 lang_server: Arc<LanguageServer>,
652 cx: &mut ModelContext<Self>,
653 ) -> Task<Result<ModelHandle<Buffer>>> {
654 cx.spawn(|this, mut cx| async move {
655 let abs_path = abs_path
656 .to_file_path()
657 .map_err(|_| anyhow!("can't convert URI to path"))?;
658 let (worktree, relative_path) = if let Some(result) =
659 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
660 {
661 result
662 } else {
663 let worktree = this
664 .update(&mut cx, |this, cx| {
665 this.create_local_worktree(&abs_path, true, cx)
666 })
667 .await?;
668 this.update(&mut cx, |this, cx| {
669 this.language_servers
670 .insert((worktree.read(cx).id(), lang_name), lang_server);
671 });
672 (worktree, PathBuf::new())
673 };
674
675 let project_path = ProjectPath {
676 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
677 path: relative_path.into(),
678 };
679 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
680 .await
681 })
682 }
683
684 pub fn save_buffer_as(
685 &self,
686 buffer: ModelHandle<Buffer>,
687 abs_path: PathBuf,
688 cx: &mut ModelContext<Project>,
689 ) -> Task<Result<()>> {
690 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
691 cx.spawn(|this, mut cx| async move {
692 let (worktree, path) = worktree_task.await?;
693 worktree
694 .update(&mut cx, |worktree, cx| {
695 worktree
696 .as_local_mut()
697 .unwrap()
698 .save_buffer_as(buffer.clone(), path, cx)
699 })
700 .await?;
701 this.update(&mut cx, |this, cx| {
702 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
703 });
704 Ok(())
705 })
706 }
707
708 #[cfg(any(test, feature = "test-support"))]
709 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
710 let path = path.into();
711 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
712 self.open_buffers.iter().any(|(_, buffer)| {
713 if let Some(buffer) = buffer.upgrade(cx) {
714 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
715 if file.worktree == worktree && file.path() == &path.path {
716 return true;
717 }
718 }
719 }
720 false
721 })
722 } else {
723 false
724 }
725 }
726
727 fn get_open_buffer(
728 &mut self,
729 path: &ProjectPath,
730 cx: &mut ModelContext<Self>,
731 ) -> Option<ModelHandle<Buffer>> {
732 let mut result = None;
733 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
734 self.open_buffers.retain(|_, buffer| {
735 if let Some(buffer) = buffer.upgrade(cx) {
736 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
737 if file.worktree == worktree && file.path() == &path.path {
738 result = Some(buffer);
739 }
740 }
741 true
742 } else {
743 false
744 }
745 });
746 result
747 }
748
749 fn register_buffer(
750 &mut self,
751 buffer: &ModelHandle<Buffer>,
752 worktree: Option<&ModelHandle<Worktree>>,
753 cx: &mut ModelContext<Self>,
754 ) -> Result<()> {
755 match self.open_buffers.insert(
756 buffer.read(cx).remote_id(),
757 OpenBuffer::Loaded(buffer.downgrade()),
758 ) {
759 None => {}
760 Some(OpenBuffer::Loading(operations)) => {
761 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
762 }
763 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
764 }
765 self.assign_language_to_buffer(&buffer, worktree, cx);
766 Ok(())
767 }
768
769 fn assign_language_to_buffer(
770 &mut self,
771 buffer: &ModelHandle<Buffer>,
772 worktree: Option<&ModelHandle<Worktree>>,
773 cx: &mut ModelContext<Self>,
774 ) -> Option<()> {
775 let (path, full_path) = {
776 let file = buffer.read(cx).file()?;
777 (file.path().clone(), file.full_path(cx))
778 };
779
780 // If the buffer has a language, set it and start/assign the language server
781 if let Some(language) = self.languages.select_language(&full_path) {
782 buffer.update(cx, |buffer, cx| {
783 buffer.set_language(Some(language.clone()), cx);
784 });
785
786 // For local worktrees, start a language server if needed.
787 // Also assign the language server and any previously stored diagnostics to the buffer.
788 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
789 let worktree_id = local_worktree.id();
790 let worktree_abs_path = local_worktree.abs_path().clone();
791
792 let language_server = match self
793 .language_servers
794 .entry((worktree_id, language.name().to_string()))
795 {
796 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
797 hash_map::Entry::Vacant(e) => Self::start_language_server(
798 self.client.clone(),
799 language.clone(),
800 &worktree_abs_path,
801 cx,
802 )
803 .map(|server| e.insert(server).clone()),
804 };
805
806 buffer.update(cx, |buffer, cx| {
807 buffer.set_language_server(language_server, cx);
808 });
809 }
810 }
811
812 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
813 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
814 buffer.update(cx, |buffer, cx| {
815 buffer.update_diagnostics(diagnostics, None, cx).log_err();
816 });
817 }
818 }
819
820 None
821 }
822
823 fn start_language_server(
824 rpc: Arc<Client>,
825 language: Arc<Language>,
826 worktree_path: &Path,
827 cx: &mut ModelContext<Self>,
828 ) -> Option<Arc<LanguageServer>> {
829 enum LspEvent {
830 DiagnosticsStart,
831 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
832 DiagnosticsFinish,
833 }
834
835 let language_server = language
836 .start_server(worktree_path, cx)
837 .log_err()
838 .flatten()?;
839 let disk_based_sources = language
840 .disk_based_diagnostic_sources()
841 .cloned()
842 .unwrap_or_default();
843 let disk_based_diagnostics_progress_token =
844 language.disk_based_diagnostics_progress_token().cloned();
845 let has_disk_based_diagnostic_progress_token =
846 disk_based_diagnostics_progress_token.is_some();
847 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
848
849 // Listen for `PublishDiagnostics` notifications.
850 language_server
851 .on_notification::<lsp::notification::PublishDiagnostics, _>({
852 let diagnostics_tx = diagnostics_tx.clone();
853 move |params| {
854 if !has_disk_based_diagnostic_progress_token {
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
856 }
857 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
858 if !has_disk_based_diagnostic_progress_token {
859 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
860 }
861 }
862 })
863 .detach();
864
865 // Listen for `Progress` notifications. Send an event when the language server
866 // transitions between running jobs and not running any jobs.
867 let mut running_jobs_for_this_server: i32 = 0;
868 language_server
869 .on_notification::<lsp::notification::Progress, _>(move |params| {
870 let token = match params.token {
871 lsp::NumberOrString::Number(_) => None,
872 lsp::NumberOrString::String(token) => Some(token),
873 };
874
875 if token == disk_based_diagnostics_progress_token {
876 match params.value {
877 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
878 lsp::WorkDoneProgress::Begin(_) => {
879 running_jobs_for_this_server += 1;
880 if running_jobs_for_this_server == 1 {
881 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
882 }
883 }
884 lsp::WorkDoneProgress::End(_) => {
885 running_jobs_for_this_server -= 1;
886 if running_jobs_for_this_server == 0 {
887 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
888 }
889 }
890 _ => {}
891 },
892 }
893 }
894 })
895 .detach();
896
897 // Process all the LSP events.
898 cx.spawn_weak(|this, mut cx| async move {
899 while let Ok(message) = diagnostics_rx.recv().await {
900 let this = this.upgrade(&cx)?;
901 match message {
902 LspEvent::DiagnosticsStart => {
903 this.update(&mut cx, |this, cx| {
904 this.disk_based_diagnostics_started(cx);
905 if let Some(project_id) = this.remote_id() {
906 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
907 .log_err();
908 }
909 });
910 }
911 LspEvent::DiagnosticsUpdate(mut params) => {
912 language.process_diagnostics(&mut params);
913 this.update(&mut cx, |this, cx| {
914 this.update_diagnostics(params, &disk_based_sources, cx)
915 .log_err();
916 });
917 }
918 LspEvent::DiagnosticsFinish => {
919 this.update(&mut cx, |this, cx| {
920 this.disk_based_diagnostics_finished(cx);
921 if let Some(project_id) = this.remote_id() {
922 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
923 .log_err();
924 }
925 });
926 }
927 }
928 }
929 Some(())
930 })
931 .detach();
932
933 Some(language_server)
934 }
935
936 pub fn update_diagnostics(
937 &mut self,
938 params: lsp::PublishDiagnosticsParams,
939 disk_based_sources: &HashSet<String>,
940 cx: &mut ModelContext<Self>,
941 ) -> Result<()> {
942 let abs_path = params
943 .uri
944 .to_file_path()
945 .map_err(|_| anyhow!("URI is not a file"))?;
946 let mut next_group_id = 0;
947 let mut diagnostics = Vec::default();
948 let mut primary_diagnostic_group_ids = HashMap::default();
949 let mut sources_by_group_id = HashMap::default();
950 let mut supporting_diagnostic_severities = HashMap::default();
951 for diagnostic in ¶ms.diagnostics {
952 let source = diagnostic.source.as_ref();
953 let code = diagnostic.code.as_ref().map(|code| match code {
954 lsp::NumberOrString::Number(code) => code.to_string(),
955 lsp::NumberOrString::String(code) => code.clone(),
956 });
957 let range = range_from_lsp(diagnostic.range);
958 let is_supporting = diagnostic
959 .related_information
960 .as_ref()
961 .map_or(false, |infos| {
962 infos.iter().any(|info| {
963 primary_diagnostic_group_ids.contains_key(&(
964 source,
965 code.clone(),
966 range_from_lsp(info.location.range),
967 ))
968 })
969 });
970
971 if is_supporting {
972 if let Some(severity) = diagnostic.severity {
973 supporting_diagnostic_severities
974 .insert((source, code.clone(), range), severity);
975 }
976 } else {
977 let group_id = post_inc(&mut next_group_id);
978 let is_disk_based =
979 source.map_or(false, |source| disk_based_sources.contains(source));
980
981 sources_by_group_id.insert(group_id, source);
982 primary_diagnostic_group_ids
983 .insert((source, code.clone(), range.clone()), group_id);
984
985 diagnostics.push(DiagnosticEntry {
986 range,
987 diagnostic: Diagnostic {
988 code: code.clone(),
989 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
990 message: diagnostic.message.clone(),
991 group_id,
992 is_primary: true,
993 is_valid: true,
994 is_disk_based,
995 },
996 });
997 if let Some(infos) = &diagnostic.related_information {
998 for info in infos {
999 if info.location.uri == params.uri && !info.message.is_empty() {
1000 let range = range_from_lsp(info.location.range);
1001 diagnostics.push(DiagnosticEntry {
1002 range,
1003 diagnostic: Diagnostic {
1004 code: code.clone(),
1005 severity: DiagnosticSeverity::INFORMATION,
1006 message: info.message.clone(),
1007 group_id,
1008 is_primary: false,
1009 is_valid: true,
1010 is_disk_based,
1011 },
1012 });
1013 }
1014 }
1015 }
1016 }
1017 }
1018
1019 for entry in &mut diagnostics {
1020 let diagnostic = &mut entry.diagnostic;
1021 if !diagnostic.is_primary {
1022 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1023 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1024 source,
1025 diagnostic.code.clone(),
1026 entry.range.clone(),
1027 )) {
1028 diagnostic.severity = severity;
1029 }
1030 }
1031 }
1032
1033 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1034 Ok(())
1035 }
1036
1037 pub fn update_diagnostic_entries(
1038 &mut self,
1039 abs_path: PathBuf,
1040 version: Option<i32>,
1041 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1042 cx: &mut ModelContext<Project>,
1043 ) -> Result<(), anyhow::Error> {
1044 let (worktree, relative_path) = self
1045 .find_local_worktree(&abs_path, cx)
1046 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1047 let project_path = ProjectPath {
1048 worktree_id: worktree.read(cx).id(),
1049 path: relative_path.into(),
1050 };
1051
1052 for buffer in self.open_buffers.values() {
1053 if let Some(buffer) = buffer.upgrade(cx) {
1054 if buffer
1055 .read(cx)
1056 .file()
1057 .map_or(false, |file| *file.path() == project_path.path)
1058 {
1059 buffer.update(cx, |buffer, cx| {
1060 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1061 })?;
1062 break;
1063 }
1064 }
1065 }
1066 worktree.update(cx, |worktree, cx| {
1067 worktree
1068 .as_local_mut()
1069 .ok_or_else(|| anyhow!("not a local worktree"))?
1070 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1071 })?;
1072 cx.emit(Event::DiagnosticsUpdated(project_path));
1073 Ok(())
1074 }
1075
1076 pub fn format(
1077 &self,
1078 buffers: HashSet<ModelHandle<Buffer>>,
1079 push_to_history: bool,
1080 cx: &mut ModelContext<Project>,
1081 ) -> Task<Result<ProjectTransaction>> {
1082 let mut local_buffers = Vec::new();
1083 let mut remote_buffers = None;
1084 for buffer_handle in buffers {
1085 let buffer = buffer_handle.read(cx);
1086 let worktree;
1087 if let Some(file) = File::from_dyn(buffer.file()) {
1088 worktree = file.worktree.clone();
1089 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1090 let lang_server;
1091 if let Some(lang) = buffer.language() {
1092 if let Some(server) = self
1093 .language_servers
1094 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1095 {
1096 lang_server = server.clone();
1097 } else {
1098 return Task::ready(Ok(Default::default()));
1099 };
1100 } else {
1101 return Task::ready(Ok(Default::default()));
1102 }
1103
1104 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1105 } else {
1106 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1107 }
1108 } else {
1109 return Task::ready(Ok(Default::default()));
1110 }
1111 }
1112
1113 let remote_buffers = self.remote_id().zip(remote_buffers);
1114 let client = self.client.clone();
1115
1116 cx.spawn(|this, mut cx| async move {
1117 let mut project_transaction = ProjectTransaction::default();
1118
1119 if let Some((project_id, remote_buffers)) = remote_buffers {
1120 let response = client
1121 .request(proto::FormatBuffers {
1122 project_id,
1123 buffer_ids: remote_buffers
1124 .iter()
1125 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1126 .collect(),
1127 })
1128 .await?
1129 .transaction
1130 .ok_or_else(|| anyhow!("missing transaction"))?;
1131 project_transaction = this
1132 .update(&mut cx, |this, cx| {
1133 this.deserialize_project_transaction(response, push_to_history, cx)
1134 })
1135 .await?;
1136 }
1137
1138 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1139 let lsp_edits = lang_server
1140 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1141 text_document: lsp::TextDocumentIdentifier::new(
1142 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1143 ),
1144 options: Default::default(),
1145 work_done_progress_params: Default::default(),
1146 })
1147 .await?;
1148
1149 if let Some(lsp_edits) = lsp_edits {
1150 let edits = buffer
1151 .update(&mut cx, |buffer, cx| {
1152 buffer.edits_from_lsp(lsp_edits, None, cx)
1153 })
1154 .await?;
1155 buffer.update(&mut cx, |buffer, cx| {
1156 buffer.finalize_last_transaction();
1157 buffer.start_transaction();
1158 for (range, text) in edits {
1159 buffer.edit([range], text, cx);
1160 }
1161 if buffer.end_transaction(cx).is_some() {
1162 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1163 if !push_to_history {
1164 buffer.forget_transaction(transaction.id);
1165 }
1166 project_transaction.0.insert(cx.handle(), transaction);
1167 }
1168 });
1169 }
1170 }
1171
1172 Ok(project_transaction)
1173 })
1174 }
1175
1176 pub fn definition<T: ToPointUtf16>(
1177 &self,
1178 source_buffer_handle: &ModelHandle<Buffer>,
1179 position: T,
1180 cx: &mut ModelContext<Self>,
1181 ) -> Task<Result<Vec<Definition>>> {
1182 let source_buffer_handle = source_buffer_handle.clone();
1183 let source_buffer = source_buffer_handle.read(cx);
1184 let worktree;
1185 let buffer_abs_path;
1186 if let Some(file) = File::from_dyn(source_buffer.file()) {
1187 worktree = file.worktree.clone();
1188 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1189 } else {
1190 return Task::ready(Ok(Default::default()));
1191 };
1192
1193 let position = position.to_point_utf16(source_buffer);
1194
1195 if worktree.read(cx).as_local().is_some() {
1196 let buffer_abs_path = buffer_abs_path.unwrap();
1197 let lang_name;
1198 let lang_server;
1199 if let Some(lang) = source_buffer.language() {
1200 lang_name = lang.name().to_string();
1201 if let Some(server) = self
1202 .language_servers
1203 .get(&(worktree.read(cx).id(), lang_name.clone()))
1204 {
1205 lang_server = server.clone();
1206 } else {
1207 return Task::ready(Ok(Default::default()));
1208 };
1209 } else {
1210 return Task::ready(Ok(Default::default()));
1211 }
1212
1213 cx.spawn(|this, mut cx| async move {
1214 let response = lang_server
1215 .request::<lsp::request::GotoDefinition>(lsp::GotoDefinitionParams {
1216 text_document_position_params: lsp::TextDocumentPositionParams {
1217 text_document: lsp::TextDocumentIdentifier::new(
1218 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1219 ),
1220 position: lsp::Position::new(position.row, position.column),
1221 },
1222 work_done_progress_params: Default::default(),
1223 partial_result_params: Default::default(),
1224 })
1225 .await?;
1226
1227 let mut definitions = Vec::new();
1228 if let Some(response) = response {
1229 let mut unresolved_locations = Vec::new();
1230 match response {
1231 lsp::GotoDefinitionResponse::Scalar(loc) => {
1232 unresolved_locations.push((loc.uri, loc.range));
1233 }
1234 lsp::GotoDefinitionResponse::Array(locs) => {
1235 unresolved_locations.extend(locs.into_iter().map(|l| (l.uri, l.range)));
1236 }
1237 lsp::GotoDefinitionResponse::Link(links) => {
1238 unresolved_locations.extend(
1239 links
1240 .into_iter()
1241 .map(|l| (l.target_uri, l.target_selection_range)),
1242 );
1243 }
1244 }
1245
1246 for (target_uri, target_range) in unresolved_locations {
1247 let target_buffer_handle = this
1248 .update(&mut cx, |this, cx| {
1249 this.open_local_buffer_from_lsp_path(
1250 target_uri,
1251 lang_name.clone(),
1252 lang_server.clone(),
1253 cx,
1254 )
1255 })
1256 .await?;
1257
1258 cx.read(|cx| {
1259 let target_buffer = target_buffer_handle.read(cx);
1260 let target_start = target_buffer
1261 .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
1262 let target_end = target_buffer
1263 .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
1264 definitions.push(Definition {
1265 target_buffer: target_buffer_handle,
1266 target_range: target_buffer.anchor_after(target_start)
1267 ..target_buffer.anchor_before(target_end),
1268 });
1269 });
1270 }
1271 }
1272
1273 Ok(definitions)
1274 })
1275 } else if let Some(project_id) = self.remote_id() {
1276 let client = self.client.clone();
1277 let request = proto::GetDefinition {
1278 project_id,
1279 buffer_id: source_buffer.remote_id(),
1280 position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
1281 };
1282 cx.spawn(|this, mut cx| async move {
1283 let response = client.request(request).await?;
1284 let mut definitions = Vec::new();
1285 for definition in response.definitions {
1286 let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1287 let target_buffer = this
1288 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1289 .await?;
1290 let target_start = definition
1291 .target_start
1292 .and_then(deserialize_anchor)
1293 .ok_or_else(|| anyhow!("missing target start"))?;
1294 let target_end = definition
1295 .target_end
1296 .and_then(deserialize_anchor)
1297 .ok_or_else(|| anyhow!("missing target end"))?;
1298 definitions.push(Definition {
1299 target_buffer,
1300 target_range: target_start..target_end,
1301 })
1302 }
1303
1304 Ok(definitions)
1305 })
1306 } else {
1307 Task::ready(Ok(Default::default()))
1308 }
1309 }
1310
1311 pub fn completions<T: ToPointUtf16>(
1312 &self,
1313 source_buffer_handle: &ModelHandle<Buffer>,
1314 position: T,
1315 cx: &mut ModelContext<Self>,
1316 ) -> Task<Result<Vec<Completion>>> {
1317 let source_buffer_handle = source_buffer_handle.clone();
1318 let source_buffer = source_buffer_handle.read(cx);
1319 let buffer_id = source_buffer.remote_id();
1320 let language = source_buffer.language().cloned();
1321 let worktree;
1322 let buffer_abs_path;
1323 if let Some(file) = File::from_dyn(source_buffer.file()) {
1324 worktree = file.worktree.clone();
1325 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1326 } else {
1327 return Task::ready(Ok(Default::default()));
1328 };
1329
1330 let position = position.to_point_utf16(source_buffer);
1331 let anchor = source_buffer.anchor_after(position);
1332
1333 if worktree.read(cx).as_local().is_some() {
1334 let buffer_abs_path = buffer_abs_path.unwrap();
1335 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1336 server
1337 } else {
1338 return Task::ready(Ok(Default::default()));
1339 };
1340
1341 cx.spawn(|_, cx| async move {
1342 let completions = lang_server
1343 .request::<lsp::request::Completion>(lsp::CompletionParams {
1344 text_document_position: lsp::TextDocumentPositionParams::new(
1345 lsp::TextDocumentIdentifier::new(
1346 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1347 ),
1348 position.to_lsp_position(),
1349 ),
1350 context: Default::default(),
1351 work_done_progress_params: Default::default(),
1352 partial_result_params: Default::default(),
1353 })
1354 .await
1355 .context("lsp completion request failed")?;
1356
1357 let completions = if let Some(completions) = completions {
1358 match completions {
1359 lsp::CompletionResponse::Array(completions) => completions,
1360 lsp::CompletionResponse::List(list) => list.items,
1361 }
1362 } else {
1363 Default::default()
1364 };
1365
1366 source_buffer_handle.read_with(&cx, |this, _| {
1367 Ok(completions
1368 .into_iter()
1369 .filter_map(|lsp_completion| {
1370 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1371 lsp::CompletionTextEdit::Edit(edit) => {
1372 (range_from_lsp(edit.range), edit.new_text.clone())
1373 }
1374 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1375 log::info!("unsupported insert/replace completion");
1376 return None;
1377 }
1378 };
1379
1380 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1381 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1382 if clipped_start == old_range.start && clipped_end == old_range.end {
1383 Some(Completion {
1384 old_range: this.anchor_before(old_range.start)
1385 ..this.anchor_after(old_range.end),
1386 new_text,
1387 label: language
1388 .as_ref()
1389 .and_then(|l| l.label_for_completion(&lsp_completion))
1390 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1391 lsp_completion,
1392 })
1393 } else {
1394 None
1395 }
1396 })
1397 .collect())
1398 })
1399 })
1400 } else if let Some(project_id) = self.remote_id() {
1401 let rpc = self.client.clone();
1402 let message = proto::GetCompletions {
1403 project_id,
1404 buffer_id,
1405 position: Some(language::proto::serialize_anchor(&anchor)),
1406 version: (&source_buffer.version()).into(),
1407 };
1408 cx.spawn_weak(|_, mut cx| async move {
1409 let response = rpc.request(message).await?;
1410
1411 source_buffer_handle
1412 .update(&mut cx, |buffer, _| {
1413 buffer.wait_for_version(response.version.into())
1414 })
1415 .await;
1416
1417 response
1418 .completions
1419 .into_iter()
1420 .map(|completion| {
1421 language::proto::deserialize_completion(completion, language.as_ref())
1422 })
1423 .collect()
1424 })
1425 } else {
1426 Task::ready(Ok(Default::default()))
1427 }
1428 }
1429
1430 pub fn apply_additional_edits_for_completion(
1431 &self,
1432 buffer_handle: ModelHandle<Buffer>,
1433 completion: Completion,
1434 push_to_history: bool,
1435 cx: &mut ModelContext<Self>,
1436 ) -> Task<Result<Option<Transaction>>> {
1437 let buffer = buffer_handle.read(cx);
1438 let buffer_id = buffer.remote_id();
1439
1440 if self.is_local() {
1441 let lang_server = if let Some(language_server) = buffer.language_server() {
1442 language_server.clone()
1443 } else {
1444 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1445 };
1446
1447 cx.spawn(|_, mut cx| async move {
1448 let resolved_completion = lang_server
1449 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1450 .await?;
1451 if let Some(edits) = resolved_completion.additional_text_edits {
1452 let edits = buffer_handle
1453 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1454 .await?;
1455 buffer_handle.update(&mut cx, |buffer, cx| {
1456 buffer.finalize_last_transaction();
1457 buffer.start_transaction();
1458 for (range, text) in edits {
1459 buffer.edit([range], text, cx);
1460 }
1461 let transaction = if buffer.end_transaction(cx).is_some() {
1462 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1463 if !push_to_history {
1464 buffer.forget_transaction(transaction.id);
1465 }
1466 Some(transaction)
1467 } else {
1468 None
1469 };
1470 Ok(transaction)
1471 })
1472 } else {
1473 Ok(None)
1474 }
1475 })
1476 } else if let Some(project_id) = self.remote_id() {
1477 let client = self.client.clone();
1478 cx.spawn(|_, mut cx| async move {
1479 let response = client
1480 .request(proto::ApplyCompletionAdditionalEdits {
1481 project_id,
1482 buffer_id,
1483 completion: Some(language::proto::serialize_completion(&completion)),
1484 })
1485 .await?;
1486
1487 if let Some(transaction) = response.transaction {
1488 let transaction = language::proto::deserialize_transaction(transaction)?;
1489 buffer_handle
1490 .update(&mut cx, |buffer, _| {
1491 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1492 })
1493 .await;
1494 if push_to_history {
1495 buffer_handle.update(&mut cx, |buffer, _| {
1496 buffer.push_transaction(transaction.clone(), Instant::now());
1497 });
1498 }
1499 Ok(Some(transaction))
1500 } else {
1501 Ok(None)
1502 }
1503 })
1504 } else {
1505 Task::ready(Err(anyhow!("project does not have a remote id")))
1506 }
1507 }
1508
1509 pub fn code_actions<T: ToOffset>(
1510 &self,
1511 buffer_handle: &ModelHandle<Buffer>,
1512 range: Range<T>,
1513 cx: &mut ModelContext<Self>,
1514 ) -> Task<Result<Vec<CodeAction>>> {
1515 let buffer_handle = buffer_handle.clone();
1516 let buffer = buffer_handle.read(cx);
1517 let buffer_id = buffer.remote_id();
1518 let worktree;
1519 let buffer_abs_path;
1520 if let Some(file) = File::from_dyn(buffer.file()) {
1521 worktree = file.worktree.clone();
1522 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1523 } else {
1524 return Task::ready(Ok(Default::default()));
1525 };
1526 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1527
1528 if worktree.read(cx).as_local().is_some() {
1529 let buffer_abs_path = buffer_abs_path.unwrap();
1530 let lang_name;
1531 let lang_server;
1532 if let Some(lang) = buffer.language() {
1533 lang_name = lang.name().to_string();
1534 if let Some(server) = self
1535 .language_servers
1536 .get(&(worktree.read(cx).id(), lang_name.clone()))
1537 {
1538 lang_server = server.clone();
1539 } else {
1540 return Task::ready(Ok(Default::default()));
1541 };
1542 } else {
1543 return Task::ready(Ok(Default::default()));
1544 }
1545
1546 let lsp_range = lsp::Range::new(
1547 range.start.to_point_utf16(buffer).to_lsp_position(),
1548 range.end.to_point_utf16(buffer).to_lsp_position(),
1549 );
1550 cx.foreground().spawn(async move {
1551 Ok(lang_server
1552 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1553 text_document: lsp::TextDocumentIdentifier::new(
1554 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1555 ),
1556 range: lsp_range,
1557 work_done_progress_params: Default::default(),
1558 partial_result_params: Default::default(),
1559 context: lsp::CodeActionContext {
1560 diagnostics: Default::default(),
1561 only: Some(vec![
1562 lsp::CodeActionKind::QUICKFIX,
1563 lsp::CodeActionKind::REFACTOR,
1564 lsp::CodeActionKind::REFACTOR_EXTRACT,
1565 ]),
1566 },
1567 })
1568 .await?
1569 .unwrap_or_default()
1570 .into_iter()
1571 .filter_map(|entry| {
1572 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1573 Some(CodeAction {
1574 range: range.clone(),
1575 lsp_action,
1576 })
1577 } else {
1578 None
1579 }
1580 })
1581 .collect())
1582 })
1583 } else if let Some(project_id) = self.remote_id() {
1584 let rpc = self.client.clone();
1585 cx.spawn_weak(|_, mut cx| async move {
1586 let response = rpc
1587 .request(proto::GetCodeActions {
1588 project_id,
1589 buffer_id,
1590 start: Some(language::proto::serialize_anchor(&range.start)),
1591 end: Some(language::proto::serialize_anchor(&range.end)),
1592 })
1593 .await?;
1594
1595 buffer_handle
1596 .update(&mut cx, |buffer, _| {
1597 buffer.wait_for_version(response.version.into())
1598 })
1599 .await;
1600
1601 response
1602 .actions
1603 .into_iter()
1604 .map(language::proto::deserialize_code_action)
1605 .collect()
1606 })
1607 } else {
1608 Task::ready(Ok(Default::default()))
1609 }
1610 }
1611
1612 pub fn apply_code_action(
1613 &self,
1614 buffer_handle: ModelHandle<Buffer>,
1615 mut action: CodeAction,
1616 push_to_history: bool,
1617 cx: &mut ModelContext<Self>,
1618 ) -> Task<Result<ProjectTransaction>> {
1619 if self.is_local() {
1620 let buffer = buffer_handle.read(cx);
1621 let lang_name = if let Some(lang) = buffer.language() {
1622 lang.name().to_string()
1623 } else {
1624 return Task::ready(Ok(Default::default()));
1625 };
1626 let lang_server = if let Some(language_server) = buffer.language_server() {
1627 language_server.clone()
1628 } else {
1629 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1630 };
1631 let range = action.range.to_point_utf16(buffer);
1632
1633 cx.spawn(|this, mut cx| async move {
1634 if let Some(lsp_range) = action
1635 .lsp_action
1636 .data
1637 .as_mut()
1638 .and_then(|d| d.get_mut("codeActionParams"))
1639 .and_then(|d| d.get_mut("range"))
1640 {
1641 *lsp_range = serde_json::to_value(&lsp::Range::new(
1642 range.start.to_lsp_position(),
1643 range.end.to_lsp_position(),
1644 ))
1645 .unwrap();
1646 action.lsp_action = lang_server
1647 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1648 .await?;
1649 } else {
1650 let actions = this
1651 .update(&mut cx, |this, cx| {
1652 this.code_actions(&buffer_handle, action.range, cx)
1653 })
1654 .await?;
1655 action.lsp_action = actions
1656 .into_iter()
1657 .find(|a| a.lsp_action.title == action.lsp_action.title)
1658 .ok_or_else(|| anyhow!("code action is outdated"))?
1659 .lsp_action;
1660 }
1661
1662 if let Some(edit) = action.lsp_action.edit {
1663 Self::deserialize_workspace_edit(
1664 this,
1665 edit,
1666 push_to_history,
1667 lang_name,
1668 lang_server,
1669 &mut cx,
1670 )
1671 .await
1672 } else {
1673 Ok(ProjectTransaction::default())
1674 }
1675 })
1676 } else if let Some(project_id) = self.remote_id() {
1677 let client = self.client.clone();
1678 let request = proto::ApplyCodeAction {
1679 project_id,
1680 buffer_id: buffer_handle.read(cx).remote_id(),
1681 action: Some(language::proto::serialize_code_action(&action)),
1682 };
1683 cx.spawn(|this, mut cx| async move {
1684 let response = client
1685 .request(request)
1686 .await?
1687 .transaction
1688 .ok_or_else(|| anyhow!("missing transaction"))?;
1689 this.update(&mut cx, |this, cx| {
1690 this.deserialize_project_transaction(response, push_to_history, cx)
1691 })
1692 .await
1693 })
1694 } else {
1695 Task::ready(Err(anyhow!("project does not have a remote id")))
1696 }
1697 }
1698
1699 async fn deserialize_workspace_edit(
1700 this: ModelHandle<Self>,
1701 edit: lsp::WorkspaceEdit,
1702 push_to_history: bool,
1703 language_name: String,
1704 language_server: Arc<LanguageServer>,
1705 cx: &mut AsyncAppContext,
1706 ) -> Result<ProjectTransaction> {
1707 let fs = this.read_with(cx, |this, _| this.fs.clone());
1708 let mut operations = Vec::new();
1709 if let Some(document_changes) = edit.document_changes {
1710 match document_changes {
1711 lsp::DocumentChanges::Edits(edits) => {
1712 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1713 }
1714 lsp::DocumentChanges::Operations(ops) => operations = ops,
1715 }
1716 } else if let Some(changes) = edit.changes {
1717 operations.extend(changes.into_iter().map(|(uri, edits)| {
1718 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1719 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1720 uri,
1721 version: None,
1722 },
1723 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1724 })
1725 }));
1726 }
1727
1728 let mut project_transaction = ProjectTransaction::default();
1729 for operation in operations {
1730 match operation {
1731 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1732 let abs_path = op
1733 .uri
1734 .to_file_path()
1735 .map_err(|_| anyhow!("can't convert URI to path"))?;
1736
1737 if let Some(parent_path) = abs_path.parent() {
1738 fs.create_dir(parent_path).await?;
1739 }
1740 if abs_path.ends_with("/") {
1741 fs.create_dir(&abs_path).await?;
1742 } else {
1743 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1744 .await?;
1745 }
1746 }
1747 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1748 let source_abs_path = op
1749 .old_uri
1750 .to_file_path()
1751 .map_err(|_| anyhow!("can't convert URI to path"))?;
1752 let target_abs_path = op
1753 .new_uri
1754 .to_file_path()
1755 .map_err(|_| anyhow!("can't convert URI to path"))?;
1756 fs.rename(
1757 &source_abs_path,
1758 &target_abs_path,
1759 op.options.map(Into::into).unwrap_or_default(),
1760 )
1761 .await?;
1762 }
1763 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1764 let abs_path = op
1765 .uri
1766 .to_file_path()
1767 .map_err(|_| anyhow!("can't convert URI to path"))?;
1768 let options = op.options.map(Into::into).unwrap_or_default();
1769 if abs_path.ends_with("/") {
1770 fs.remove_dir(&abs_path, options).await?;
1771 } else {
1772 fs.remove_file(&abs_path, options).await?;
1773 }
1774 }
1775 lsp::DocumentChangeOperation::Edit(op) => {
1776 let buffer_to_edit = this
1777 .update(cx, |this, cx| {
1778 this.open_local_buffer_from_lsp_path(
1779 op.text_document.uri,
1780 language_name.clone(),
1781 language_server.clone(),
1782 cx,
1783 )
1784 })
1785 .await?;
1786
1787 let edits = buffer_to_edit
1788 .update(cx, |buffer, cx| {
1789 let edits = op.edits.into_iter().map(|edit| match edit {
1790 lsp::OneOf::Left(edit) => edit,
1791 lsp::OneOf::Right(edit) => edit.text_edit,
1792 });
1793 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1794 })
1795 .await?;
1796
1797 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1798 buffer.finalize_last_transaction();
1799 buffer.start_transaction();
1800 for (range, text) in edits {
1801 buffer.edit([range], text, cx);
1802 }
1803 let transaction = if buffer.end_transaction(cx).is_some() {
1804 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1805 if !push_to_history {
1806 buffer.forget_transaction(transaction.id);
1807 }
1808 Some(transaction)
1809 } else {
1810 None
1811 };
1812
1813 transaction
1814 });
1815 if let Some(transaction) = transaction {
1816 project_transaction.0.insert(buffer_to_edit, transaction);
1817 }
1818 }
1819 }
1820 }
1821
1822 Ok(project_transaction)
1823 }
1824
1825 pub fn prepare_rename<T: ToPointUtf16>(
1826 &self,
1827 buffer: ModelHandle<Buffer>,
1828 position: T,
1829 cx: &mut ModelContext<Self>,
1830 ) -> Task<Result<Option<Range<Anchor>>>> {
1831 let position = position.to_point_utf16(buffer.read(cx));
1832 self.request_lsp(buffer.clone(), PrepareRename { buffer, position }, cx)
1833 }
1834
1835 pub fn perform_rename<T: ToPointUtf16>(
1836 &self,
1837 buffer: ModelHandle<Buffer>,
1838 position: T,
1839 new_name: String,
1840 push_to_history: bool,
1841 cx: &mut ModelContext<Self>,
1842 ) -> Task<Result<ProjectTransaction>> {
1843 let position = position.to_point_utf16(buffer.read(cx));
1844 self.request_lsp(
1845 buffer.clone(),
1846 PerformRename {
1847 buffer,
1848 position,
1849 new_name,
1850 push_to_history,
1851 },
1852 cx,
1853 )
1854 }
1855
1856 fn request_lsp<R: LspCommand>(
1857 &self,
1858 buffer_handle: ModelHandle<Buffer>,
1859 request: R,
1860 cx: &mut ModelContext<Self>,
1861 ) -> Task<Result<R::Response>>
1862 where
1863 <R::LspRequest as lsp::request::Request>::Result: Send,
1864 {
1865 let buffer = buffer_handle.read(cx);
1866 if self.is_local() {
1867 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1868 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1869 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1870 return cx.spawn(|this, cx| async move {
1871 let response = language_server
1872 .request::<R::LspRequest>(lsp_params)
1873 .await
1874 .context("lsp request failed")?;
1875 request.response_from_lsp(response, this, cx).await
1876 });
1877 }
1878 } else if let Some(project_id) = self.remote_id() {
1879 let rpc = self.client.clone();
1880 let message = request.to_proto(project_id, cx);
1881 return cx.spawn(|this, cx| async move {
1882 let response = rpc.request(message).await?;
1883 request.response_from_proto(response, this, cx).await
1884 });
1885 }
1886 Task::ready(Ok(Default::default()))
1887 }
1888
1889 pub fn find_or_create_local_worktree(
1890 &self,
1891 abs_path: impl AsRef<Path>,
1892 weak: bool,
1893 cx: &mut ModelContext<Self>,
1894 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1895 let abs_path = abs_path.as_ref();
1896 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1897 Task::ready(Ok((tree.clone(), relative_path.into())))
1898 } else {
1899 let worktree = self.create_local_worktree(abs_path, weak, cx);
1900 cx.foreground()
1901 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1902 }
1903 }
1904
1905 fn find_local_worktree(
1906 &self,
1907 abs_path: &Path,
1908 cx: &AppContext,
1909 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1910 for tree in self.worktrees(cx) {
1911 if let Some(relative_path) = tree
1912 .read(cx)
1913 .as_local()
1914 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1915 {
1916 return Some((tree.clone(), relative_path.into()));
1917 }
1918 }
1919 None
1920 }
1921
1922 pub fn is_shared(&self) -> bool {
1923 match &self.client_state {
1924 ProjectClientState::Local { is_shared, .. } => *is_shared,
1925 ProjectClientState::Remote { .. } => false,
1926 }
1927 }
1928
1929 fn create_local_worktree(
1930 &self,
1931 abs_path: impl AsRef<Path>,
1932 weak: bool,
1933 cx: &mut ModelContext<Self>,
1934 ) -> Task<Result<ModelHandle<Worktree>>> {
1935 let fs = self.fs.clone();
1936 let client = self.client.clone();
1937 let path = Arc::from(abs_path.as_ref());
1938 cx.spawn(|project, mut cx| async move {
1939 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1940
1941 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1942 project.add_worktree(&worktree, cx);
1943 (project.remote_id(), project.is_shared())
1944 });
1945
1946 if let Some(project_id) = remote_project_id {
1947 worktree
1948 .update(&mut cx, |worktree, cx| {
1949 worktree.as_local_mut().unwrap().register(project_id, cx)
1950 })
1951 .await?;
1952 if is_shared {
1953 worktree
1954 .update(&mut cx, |worktree, cx| {
1955 worktree.as_local_mut().unwrap().share(project_id, cx)
1956 })
1957 .await?;
1958 }
1959 }
1960
1961 Ok(worktree)
1962 })
1963 }
1964
1965 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1966 self.worktrees.retain(|worktree| {
1967 worktree
1968 .upgrade(cx)
1969 .map_or(false, |w| w.read(cx).id() != id)
1970 });
1971 cx.notify();
1972 }
1973
1974 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1975 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1976 if worktree.read(cx).is_local() {
1977 cx.subscribe(&worktree, |this, worktree, _, cx| {
1978 this.update_local_worktree_buffers(worktree, cx);
1979 })
1980 .detach();
1981 }
1982
1983 let push_weak_handle = {
1984 let worktree = worktree.read(cx);
1985 worktree.is_local() && worktree.is_weak()
1986 };
1987 if push_weak_handle {
1988 cx.observe_release(&worktree, |this, cx| {
1989 this.worktrees
1990 .retain(|worktree| worktree.upgrade(cx).is_some());
1991 cx.notify();
1992 })
1993 .detach();
1994 self.worktrees
1995 .push(WorktreeHandle::Weak(worktree.downgrade()));
1996 } else {
1997 self.worktrees
1998 .push(WorktreeHandle::Strong(worktree.clone()));
1999 }
2000 cx.notify();
2001 }
2002
2003 fn update_local_worktree_buffers(
2004 &mut self,
2005 worktree_handle: ModelHandle<Worktree>,
2006 cx: &mut ModelContext<Self>,
2007 ) {
2008 let snapshot = worktree_handle.read(cx).snapshot();
2009 let mut buffers_to_delete = Vec::new();
2010 for (buffer_id, buffer) in &self.open_buffers {
2011 if let Some(buffer) = buffer.upgrade(cx) {
2012 buffer.update(cx, |buffer, cx| {
2013 if let Some(old_file) = File::from_dyn(buffer.file()) {
2014 if old_file.worktree != worktree_handle {
2015 return;
2016 }
2017
2018 let new_file = if let Some(entry) = old_file
2019 .entry_id
2020 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2021 {
2022 File {
2023 is_local: true,
2024 entry_id: Some(entry.id),
2025 mtime: entry.mtime,
2026 path: entry.path.clone(),
2027 worktree: worktree_handle.clone(),
2028 }
2029 } else if let Some(entry) =
2030 snapshot.entry_for_path(old_file.path().as_ref())
2031 {
2032 File {
2033 is_local: true,
2034 entry_id: Some(entry.id),
2035 mtime: entry.mtime,
2036 path: entry.path.clone(),
2037 worktree: worktree_handle.clone(),
2038 }
2039 } else {
2040 File {
2041 is_local: true,
2042 entry_id: None,
2043 path: old_file.path().clone(),
2044 mtime: old_file.mtime(),
2045 worktree: worktree_handle.clone(),
2046 }
2047 };
2048
2049 if let Some(project_id) = self.remote_id() {
2050 self.client
2051 .send(proto::UpdateBufferFile {
2052 project_id,
2053 buffer_id: *buffer_id as u64,
2054 file: Some(new_file.to_proto()),
2055 })
2056 .log_err();
2057 }
2058 buffer.file_updated(Box::new(new_file), cx).detach();
2059 }
2060 });
2061 } else {
2062 buffers_to_delete.push(*buffer_id);
2063 }
2064 }
2065
2066 for buffer_id in buffers_to_delete {
2067 self.open_buffers.remove(&buffer_id);
2068 }
2069 }
2070
2071 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2072 let new_active_entry = entry.and_then(|project_path| {
2073 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2074 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2075 Some(ProjectEntry {
2076 worktree_id: project_path.worktree_id,
2077 entry_id: entry.id,
2078 })
2079 });
2080 if new_active_entry != self.active_entry {
2081 self.active_entry = new_active_entry;
2082 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2083 }
2084 }
2085
2086 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2087 self.language_servers_with_diagnostics_running > 0
2088 }
2089
2090 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2091 let mut summary = DiagnosticSummary::default();
2092 for (_, path_summary) in self.diagnostic_summaries(cx) {
2093 summary.error_count += path_summary.error_count;
2094 summary.warning_count += path_summary.warning_count;
2095 summary.info_count += path_summary.info_count;
2096 summary.hint_count += path_summary.hint_count;
2097 }
2098 summary
2099 }
2100
2101 pub fn diagnostic_summaries<'a>(
2102 &'a self,
2103 cx: &'a AppContext,
2104 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2105 self.worktrees(cx).flat_map(move |worktree| {
2106 let worktree = worktree.read(cx);
2107 let worktree_id = worktree.id();
2108 worktree
2109 .diagnostic_summaries()
2110 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2111 })
2112 }
2113
2114 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2115 self.language_servers_with_diagnostics_running += 1;
2116 if self.language_servers_with_diagnostics_running == 1 {
2117 cx.emit(Event::DiskBasedDiagnosticsStarted);
2118 }
2119 }
2120
2121 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2122 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2123 self.language_servers_with_diagnostics_running -= 1;
2124 if self.language_servers_with_diagnostics_running == 0 {
2125 cx.emit(Event::DiskBasedDiagnosticsFinished);
2126 }
2127 }
2128
2129 pub fn active_entry(&self) -> Option<ProjectEntry> {
2130 self.active_entry
2131 }
2132
2133 // RPC message handlers
2134
2135 async fn handle_unshare_project(
2136 this: ModelHandle<Self>,
2137 _: TypedEnvelope<proto::UnshareProject>,
2138 _: Arc<Client>,
2139 mut cx: AsyncAppContext,
2140 ) -> Result<()> {
2141 this.update(&mut cx, |this, cx| {
2142 if let ProjectClientState::Remote {
2143 sharing_has_stopped,
2144 ..
2145 } = &mut this.client_state
2146 {
2147 *sharing_has_stopped = true;
2148 this.collaborators.clear();
2149 cx.notify();
2150 } else {
2151 unreachable!()
2152 }
2153 });
2154
2155 Ok(())
2156 }
2157
2158 async fn handle_add_collaborator(
2159 this: ModelHandle<Self>,
2160 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2161 _: Arc<Client>,
2162 mut cx: AsyncAppContext,
2163 ) -> Result<()> {
2164 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2165 let collaborator = envelope
2166 .payload
2167 .collaborator
2168 .take()
2169 .ok_or_else(|| anyhow!("empty collaborator"))?;
2170
2171 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2172 this.update(&mut cx, |this, cx| {
2173 this.collaborators
2174 .insert(collaborator.peer_id, collaborator);
2175 cx.notify();
2176 });
2177
2178 Ok(())
2179 }
2180
2181 async fn handle_remove_collaborator(
2182 this: ModelHandle<Self>,
2183 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2184 _: Arc<Client>,
2185 mut cx: AsyncAppContext,
2186 ) -> Result<()> {
2187 this.update(&mut cx, |this, cx| {
2188 let peer_id = PeerId(envelope.payload.peer_id);
2189 let replica_id = this
2190 .collaborators
2191 .remove(&peer_id)
2192 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2193 .replica_id;
2194 this.shared_buffers.remove(&peer_id);
2195 for (_, buffer) in &this.open_buffers {
2196 if let Some(buffer) = buffer.upgrade(cx) {
2197 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2198 }
2199 }
2200 cx.notify();
2201 Ok(())
2202 })
2203 }
2204
2205 async fn handle_share_worktree(
2206 this: ModelHandle<Self>,
2207 envelope: TypedEnvelope<proto::ShareWorktree>,
2208 client: Arc<Client>,
2209 mut cx: AsyncAppContext,
2210 ) -> Result<()> {
2211 this.update(&mut cx, |this, cx| {
2212 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2213 let replica_id = this.replica_id();
2214 let worktree = envelope
2215 .payload
2216 .worktree
2217 .ok_or_else(|| anyhow!("invalid worktree"))?;
2218 let (worktree, load_task) =
2219 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2220 this.add_worktree(&worktree, cx);
2221 load_task.detach();
2222 Ok(())
2223 })
2224 }
2225
2226 async fn handle_unregister_worktree(
2227 this: ModelHandle<Self>,
2228 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2229 _: Arc<Client>,
2230 mut cx: AsyncAppContext,
2231 ) -> Result<()> {
2232 this.update(&mut cx, |this, cx| {
2233 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2234 this.remove_worktree(worktree_id, cx);
2235 Ok(())
2236 })
2237 }
2238
2239 async fn handle_update_worktree(
2240 this: ModelHandle<Self>,
2241 envelope: TypedEnvelope<proto::UpdateWorktree>,
2242 _: Arc<Client>,
2243 mut cx: AsyncAppContext,
2244 ) -> Result<()> {
2245 this.update(&mut cx, |this, cx| {
2246 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2247 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2248 worktree.update(cx, |worktree, _| {
2249 let worktree = worktree.as_remote_mut().unwrap();
2250 worktree.update_from_remote(envelope)
2251 })?;
2252 }
2253 Ok(())
2254 })
2255 }
2256
2257 async fn handle_update_diagnostic_summary(
2258 this: ModelHandle<Self>,
2259 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2260 _: Arc<Client>,
2261 mut cx: AsyncAppContext,
2262 ) -> Result<()> {
2263 this.update(&mut cx, |this, cx| {
2264 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2265 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2266 if let Some(summary) = envelope.payload.summary {
2267 let project_path = ProjectPath {
2268 worktree_id,
2269 path: Path::new(&summary.path).into(),
2270 };
2271 worktree.update(cx, |worktree, _| {
2272 worktree
2273 .as_remote_mut()
2274 .unwrap()
2275 .update_diagnostic_summary(project_path.path.clone(), &summary);
2276 });
2277 cx.emit(Event::DiagnosticsUpdated(project_path));
2278 }
2279 }
2280 Ok(())
2281 })
2282 }
2283
2284 async fn handle_disk_based_diagnostics_updating(
2285 this: ModelHandle<Self>,
2286 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2287 _: Arc<Client>,
2288 mut cx: AsyncAppContext,
2289 ) -> Result<()> {
2290 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2291 Ok(())
2292 }
2293
2294 async fn handle_disk_based_diagnostics_updated(
2295 this: ModelHandle<Self>,
2296 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2297 _: Arc<Client>,
2298 mut cx: AsyncAppContext,
2299 ) -> Result<()> {
2300 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2301 Ok(())
2302 }
2303
2304 async fn handle_update_buffer(
2305 this: ModelHandle<Self>,
2306 envelope: TypedEnvelope<proto::UpdateBuffer>,
2307 _: Arc<Client>,
2308 mut cx: AsyncAppContext,
2309 ) -> Result<()> {
2310 this.update(&mut cx, |this, cx| {
2311 let payload = envelope.payload.clone();
2312 let buffer_id = payload.buffer_id;
2313 let ops = payload
2314 .operations
2315 .into_iter()
2316 .map(|op| language::proto::deserialize_operation(op))
2317 .collect::<Result<Vec<_>, _>>()?;
2318 let is_remote = this.is_remote();
2319 match this.open_buffers.entry(buffer_id) {
2320 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2321 OpenBuffer::Loaded(buffer) => {
2322 if let Some(buffer) = buffer.upgrade(cx) {
2323 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2324 }
2325 }
2326 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2327 },
2328 hash_map::Entry::Vacant(e) => {
2329 if is_remote && this.loading_buffers.len() > 0 {
2330 e.insert(OpenBuffer::Loading(ops));
2331 }
2332 }
2333 }
2334 Ok(())
2335 })
2336 }
2337
2338 async fn handle_update_buffer_file(
2339 this: ModelHandle<Self>,
2340 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2341 _: Arc<Client>,
2342 mut cx: AsyncAppContext,
2343 ) -> Result<()> {
2344 this.update(&mut cx, |this, cx| {
2345 let payload = envelope.payload.clone();
2346 let buffer_id = payload.buffer_id;
2347 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2348 let worktree = this
2349 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2350 .ok_or_else(|| anyhow!("no such worktree"))?;
2351 let file = File::from_proto(file, worktree.clone(), cx)?;
2352 let buffer = this
2353 .open_buffers
2354 .get_mut(&buffer_id)
2355 .and_then(|b| b.upgrade(cx))
2356 .ok_or_else(|| anyhow!("no such buffer"))?;
2357 buffer.update(cx, |buffer, cx| {
2358 buffer.file_updated(Box::new(file), cx).detach();
2359 });
2360 Ok(())
2361 })
2362 }
2363
2364 async fn handle_save_buffer(
2365 this: ModelHandle<Self>,
2366 envelope: TypedEnvelope<proto::SaveBuffer>,
2367 _: Arc<Client>,
2368 mut cx: AsyncAppContext,
2369 ) -> Result<proto::BufferSaved> {
2370 let buffer_id = envelope.payload.buffer_id;
2371 let sender_id = envelope.original_sender_id()?;
2372 let requested_version = envelope.payload.version.try_into()?;
2373
2374 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2375 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2376 let buffer = this
2377 .shared_buffers
2378 .get(&sender_id)
2379 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2380 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2381 Ok::<_, anyhow::Error>((project_id, buffer))
2382 })?;
2383
2384 if !buffer
2385 .read_with(&cx, |buffer, _| buffer.version())
2386 .observed_all(&requested_version)
2387 {
2388 Err(anyhow!("save request depends on unreceived edits"))?;
2389 }
2390
2391 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2392 Ok(proto::BufferSaved {
2393 project_id,
2394 buffer_id,
2395 version: (&saved_version).into(),
2396 mtime: Some(mtime.into()),
2397 })
2398 }
2399
2400 async fn handle_format_buffers(
2401 this: ModelHandle<Self>,
2402 envelope: TypedEnvelope<proto::FormatBuffers>,
2403 _: Arc<Client>,
2404 mut cx: AsyncAppContext,
2405 ) -> Result<proto::FormatBuffersResponse> {
2406 let sender_id = envelope.original_sender_id()?;
2407 let format = this.update(&mut cx, |this, cx| {
2408 let shared_buffers = this
2409 .shared_buffers
2410 .get(&sender_id)
2411 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2412 let mut buffers = HashSet::default();
2413 for buffer_id in &envelope.payload.buffer_ids {
2414 buffers.insert(
2415 shared_buffers
2416 .get(buffer_id)
2417 .cloned()
2418 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2419 );
2420 }
2421 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2422 })?;
2423
2424 let project_transaction = format.await?;
2425 let project_transaction = this.update(&mut cx, |this, cx| {
2426 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2427 });
2428 Ok(proto::FormatBuffersResponse {
2429 transaction: Some(project_transaction),
2430 })
2431 }
2432
2433 async fn handle_get_completions(
2434 this: ModelHandle<Self>,
2435 envelope: TypedEnvelope<proto::GetCompletions>,
2436 _: Arc<Client>,
2437 mut cx: AsyncAppContext,
2438 ) -> Result<proto::GetCompletionsResponse> {
2439 let sender_id = envelope.original_sender_id()?;
2440 let position = envelope
2441 .payload
2442 .position
2443 .and_then(language::proto::deserialize_anchor)
2444 .ok_or_else(|| anyhow!("invalid position"))?;
2445 let version = clock::Global::from(envelope.payload.version);
2446 let buffer = this.read_with(&cx, |this, _| {
2447 this.shared_buffers
2448 .get(&sender_id)
2449 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2450 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2451 })?;
2452 if !buffer
2453 .read_with(&cx, |buffer, _| buffer.version())
2454 .observed_all(&version)
2455 {
2456 Err(anyhow!("completion request depends on unreceived edits"))?;
2457 }
2458 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2459 let completions = this
2460 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2461 .await?;
2462
2463 Ok(proto::GetCompletionsResponse {
2464 completions: completions
2465 .iter()
2466 .map(language::proto::serialize_completion)
2467 .collect(),
2468 version: (&version).into(),
2469 })
2470 }
2471
2472 async fn handle_apply_additional_edits_for_completion(
2473 this: ModelHandle<Self>,
2474 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2475 _: Arc<Client>,
2476 mut cx: AsyncAppContext,
2477 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2478 let sender_id = envelope.original_sender_id()?;
2479 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2480 let buffer = this
2481 .shared_buffers
2482 .get(&sender_id)
2483 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2484 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2485 let language = buffer.read(cx).language();
2486 let completion = language::proto::deserialize_completion(
2487 envelope
2488 .payload
2489 .completion
2490 .ok_or_else(|| anyhow!("invalid completion"))?,
2491 language,
2492 )?;
2493 Ok::<_, anyhow::Error>(
2494 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2495 )
2496 })?;
2497
2498 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2499 transaction: apply_additional_edits
2500 .await?
2501 .as_ref()
2502 .map(language::proto::serialize_transaction),
2503 })
2504 }
2505
2506 async fn handle_get_code_actions(
2507 this: ModelHandle<Self>,
2508 envelope: TypedEnvelope<proto::GetCodeActions>,
2509 _: Arc<Client>,
2510 mut cx: AsyncAppContext,
2511 ) -> Result<proto::GetCodeActionsResponse> {
2512 let sender_id = envelope.original_sender_id()?;
2513 let start = envelope
2514 .payload
2515 .start
2516 .and_then(language::proto::deserialize_anchor)
2517 .ok_or_else(|| anyhow!("invalid start"))?;
2518 let end = envelope
2519 .payload
2520 .end
2521 .and_then(language::proto::deserialize_anchor)
2522 .ok_or_else(|| anyhow!("invalid end"))?;
2523 let buffer = this.update(&mut cx, |this, _| {
2524 this.shared_buffers
2525 .get(&sender_id)
2526 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2527 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2528 })?;
2529 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2530 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2531 Err(anyhow!("code action request references unreceived edits"))?;
2532 }
2533 let code_actions = this.update(&mut cx, |this, cx| {
2534 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2535 })?;
2536
2537 Ok(proto::GetCodeActionsResponse {
2538 actions: code_actions
2539 .await?
2540 .iter()
2541 .map(language::proto::serialize_code_action)
2542 .collect(),
2543 version: (&version).into(),
2544 })
2545 }
2546
2547 async fn handle_apply_code_action(
2548 this: ModelHandle<Self>,
2549 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2550 _: Arc<Client>,
2551 mut cx: AsyncAppContext,
2552 ) -> Result<proto::ApplyCodeActionResponse> {
2553 let sender_id = envelope.original_sender_id()?;
2554 let action = language::proto::deserialize_code_action(
2555 envelope
2556 .payload
2557 .action
2558 .ok_or_else(|| anyhow!("invalid action"))?,
2559 )?;
2560 let apply_code_action = this.update(&mut cx, |this, cx| {
2561 let buffer = this
2562 .shared_buffers
2563 .get(&sender_id)
2564 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2565 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2566 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2567 })?;
2568
2569 let project_transaction = apply_code_action.await?;
2570 let project_transaction = this.update(&mut cx, |this, cx| {
2571 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2572 });
2573 Ok(proto::ApplyCodeActionResponse {
2574 transaction: Some(project_transaction),
2575 })
2576 }
2577
2578 async fn handle_get_definition(
2579 this: ModelHandle<Self>,
2580 envelope: TypedEnvelope<proto::GetDefinition>,
2581 _: Arc<Client>,
2582 mut cx: AsyncAppContext,
2583 ) -> Result<proto::GetDefinitionResponse> {
2584 let sender_id = envelope.original_sender_id()?;
2585 let position = envelope
2586 .payload
2587 .position
2588 .and_then(deserialize_anchor)
2589 .ok_or_else(|| anyhow!("invalid position"))?;
2590 let definitions = this.update(&mut cx, |this, cx| {
2591 let source_buffer = this
2592 .shared_buffers
2593 .get(&sender_id)
2594 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2595 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2596 if source_buffer.read(cx).can_resolve(&position) {
2597 Ok(this.definition(&source_buffer, position, cx))
2598 } else {
2599 Err(anyhow!("cannot resolve position"))
2600 }
2601 })?;
2602
2603 let definitions = definitions.await?;
2604
2605 this.update(&mut cx, |this, cx| {
2606 let mut response = proto::GetDefinitionResponse {
2607 definitions: Default::default(),
2608 };
2609 for definition in definitions {
2610 let buffer =
2611 this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
2612 response.definitions.push(proto::Definition {
2613 target_start: Some(serialize_anchor(&definition.target_range.start)),
2614 target_end: Some(serialize_anchor(&definition.target_range.end)),
2615 buffer: Some(buffer),
2616 });
2617 }
2618 Ok(response)
2619 })
2620 }
2621
2622 async fn handle_prepare_rename(
2623 this: ModelHandle<Self>,
2624 envelope: TypedEnvelope<proto::PrepareRename>,
2625 _: Arc<Client>,
2626 mut cx: AsyncAppContext,
2627 ) -> Result<proto::PrepareRenameResponse> {
2628 let sender_id = envelope.original_sender_id()?;
2629 let position = envelope
2630 .payload
2631 .position
2632 .and_then(deserialize_anchor)
2633 .ok_or_else(|| anyhow!("invalid position"))?;
2634 let (prepare_rename, version) = this.update(&mut cx, |this, cx| {
2635 let buffer_handle = this
2636 .shared_buffers
2637 .get(&sender_id)
2638 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2639 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2640 let buffer = buffer_handle.read(cx);
2641 let version = buffer.version();
2642 if buffer.can_resolve(&position) {
2643 Ok((this.prepare_rename(buffer_handle, position, cx), version))
2644 } else {
2645 Err(anyhow!("cannot resolve position"))
2646 }
2647 })?;
2648
2649 let range = prepare_rename.await?;
2650 Ok(proto::PrepareRenameResponse {
2651 can_rename: range.is_some(),
2652 start: range
2653 .as_ref()
2654 .map(|range| language::proto::serialize_anchor(&range.start)),
2655 end: range
2656 .as_ref()
2657 .map(|range| language::proto::serialize_anchor(&range.end)),
2658 version: (&version).into(),
2659 })
2660 }
2661
2662 async fn handle_perform_rename(
2663 this: ModelHandle<Self>,
2664 envelope: TypedEnvelope<proto::PerformRename>,
2665 _: Arc<Client>,
2666 mut cx: AsyncAppContext,
2667 ) -> Result<proto::PerformRenameResponse> {
2668 let sender_id = envelope.original_sender_id()?;
2669 let position = envelope
2670 .payload
2671 .position
2672 .and_then(deserialize_anchor)
2673 .ok_or_else(|| anyhow!("invalid position"))?;
2674 let perform_rename = this.update(&mut cx, |this, cx| {
2675 let buffer_handle = this
2676 .shared_buffers
2677 .get(&sender_id)
2678 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2679 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2680 let buffer = buffer_handle.read(cx);
2681 if buffer.can_resolve(&position) {
2682 Ok(this.perform_rename(
2683 buffer_handle,
2684 position,
2685 envelope.payload.new_name,
2686 false,
2687 cx,
2688 ))
2689 } else {
2690 Err(anyhow!("cannot resolve position"))
2691 }
2692 })?;
2693
2694 let transaction = perform_rename.await?;
2695 let transaction = this.update(&mut cx, |this, cx| {
2696 this.serialize_project_transaction_for_peer(transaction, sender_id, cx)
2697 });
2698 Ok(proto::PerformRenameResponse {
2699 transaction: Some(transaction),
2700 })
2701 }
2702
2703 async fn handle_open_buffer(
2704 this: ModelHandle<Self>,
2705 envelope: TypedEnvelope<proto::OpenBuffer>,
2706 _: Arc<Client>,
2707 mut cx: AsyncAppContext,
2708 ) -> anyhow::Result<proto::OpenBufferResponse> {
2709 let peer_id = envelope.original_sender_id()?;
2710 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2711 let open_buffer = this.update(&mut cx, |this, cx| {
2712 this.open_buffer(
2713 ProjectPath {
2714 worktree_id,
2715 path: PathBuf::from(envelope.payload.path).into(),
2716 },
2717 cx,
2718 )
2719 });
2720
2721 let buffer = open_buffer.await?;
2722 this.update(&mut cx, |this, cx| {
2723 Ok(proto::OpenBufferResponse {
2724 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2725 })
2726 })
2727 }
2728
2729 fn serialize_project_transaction_for_peer(
2730 &mut self,
2731 project_transaction: ProjectTransaction,
2732 peer_id: PeerId,
2733 cx: &AppContext,
2734 ) -> proto::ProjectTransaction {
2735 let mut serialized_transaction = proto::ProjectTransaction {
2736 buffers: Default::default(),
2737 transactions: Default::default(),
2738 };
2739 for (buffer, transaction) in project_transaction.0 {
2740 serialized_transaction
2741 .buffers
2742 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2743 serialized_transaction
2744 .transactions
2745 .push(language::proto::serialize_transaction(&transaction));
2746 }
2747 serialized_transaction
2748 }
2749
2750 fn deserialize_project_transaction(
2751 &mut self,
2752 message: proto::ProjectTransaction,
2753 push_to_history: bool,
2754 cx: &mut ModelContext<Self>,
2755 ) -> Task<Result<ProjectTransaction>> {
2756 cx.spawn(|this, mut cx| async move {
2757 let mut project_transaction = ProjectTransaction::default();
2758 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2759 let buffer = this
2760 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2761 .await?;
2762 let transaction = language::proto::deserialize_transaction(transaction)?;
2763 project_transaction.0.insert(buffer, transaction);
2764 }
2765 for (buffer, transaction) in &project_transaction.0 {
2766 buffer
2767 .update(&mut cx, |buffer, _| {
2768 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2769 })
2770 .await;
2771
2772 if push_to_history {
2773 buffer.update(&mut cx, |buffer, _| {
2774 buffer.push_transaction(transaction.clone(), Instant::now());
2775 });
2776 }
2777 }
2778
2779 Ok(project_transaction)
2780 })
2781 }
2782
2783 fn serialize_buffer_for_peer(
2784 &mut self,
2785 buffer: &ModelHandle<Buffer>,
2786 peer_id: PeerId,
2787 cx: &AppContext,
2788 ) -> proto::Buffer {
2789 let buffer_id = buffer.read(cx).remote_id();
2790 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2791 match shared_buffers.entry(buffer_id) {
2792 hash_map::Entry::Occupied(_) => proto::Buffer {
2793 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2794 },
2795 hash_map::Entry::Vacant(entry) => {
2796 entry.insert(buffer.clone());
2797 proto::Buffer {
2798 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2799 }
2800 }
2801 }
2802 }
2803
2804 fn deserialize_buffer(
2805 &mut self,
2806 buffer: proto::Buffer,
2807 cx: &mut ModelContext<Self>,
2808 ) -> Task<Result<ModelHandle<Buffer>>> {
2809 let replica_id = self.replica_id();
2810
2811 let mut opened_buffer_tx = self.opened_buffer.clone();
2812 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2813 cx.spawn(|this, mut cx| async move {
2814 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2815 proto::buffer::Variant::Id(id) => {
2816 let buffer = loop {
2817 let buffer = this.read_with(&cx, |this, cx| {
2818 this.open_buffers
2819 .get(&id)
2820 .and_then(|buffer| buffer.upgrade(cx))
2821 });
2822 if let Some(buffer) = buffer {
2823 break buffer;
2824 }
2825 opened_buffer_rx
2826 .recv()
2827 .await
2828 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2829 };
2830 Ok(buffer)
2831 }
2832 proto::buffer::Variant::State(mut buffer) => {
2833 let mut buffer_worktree = None;
2834 let mut buffer_file = None;
2835 if let Some(file) = buffer.file.take() {
2836 this.read_with(&cx, |this, cx| {
2837 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2838 let worktree =
2839 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2840 anyhow!("no worktree found for id {}", file.worktree_id)
2841 })?;
2842 buffer_file =
2843 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2844 as Box<dyn language::File>);
2845 buffer_worktree = Some(worktree);
2846 Ok::<_, anyhow::Error>(())
2847 })?;
2848 }
2849
2850 let buffer = cx.add_model(|cx| {
2851 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2852 });
2853 this.update(&mut cx, |this, cx| {
2854 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2855 })?;
2856
2857 let _ = opened_buffer_tx.send(()).await;
2858 Ok(buffer)
2859 }
2860 }
2861 })
2862 }
2863
2864 async fn handle_close_buffer(
2865 this: ModelHandle<Self>,
2866 envelope: TypedEnvelope<proto::CloseBuffer>,
2867 _: Arc<Client>,
2868 mut cx: AsyncAppContext,
2869 ) -> anyhow::Result<()> {
2870 this.update(&mut cx, |this, cx| {
2871 if let Some(shared_buffers) =
2872 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2873 {
2874 shared_buffers.remove(&envelope.payload.buffer_id);
2875 cx.notify();
2876 }
2877 Ok(())
2878 })
2879 }
2880
2881 async fn handle_buffer_saved(
2882 this: ModelHandle<Self>,
2883 envelope: TypedEnvelope<proto::BufferSaved>,
2884 _: Arc<Client>,
2885 mut cx: AsyncAppContext,
2886 ) -> Result<()> {
2887 let version = envelope.payload.version.try_into()?;
2888 let mtime = envelope
2889 .payload
2890 .mtime
2891 .ok_or_else(|| anyhow!("missing mtime"))?
2892 .into();
2893
2894 this.update(&mut cx, |this, cx| {
2895 let buffer = this
2896 .open_buffers
2897 .get(&envelope.payload.buffer_id)
2898 .and_then(|buffer| buffer.upgrade(cx));
2899 if let Some(buffer) = buffer {
2900 buffer.update(cx, |buffer, cx| {
2901 buffer.did_save(version, mtime, None, cx);
2902 });
2903 }
2904 Ok(())
2905 })
2906 }
2907
2908 async fn handle_buffer_reloaded(
2909 this: ModelHandle<Self>,
2910 envelope: TypedEnvelope<proto::BufferReloaded>,
2911 _: Arc<Client>,
2912 mut cx: AsyncAppContext,
2913 ) -> Result<()> {
2914 let payload = envelope.payload.clone();
2915 let version = payload.version.try_into()?;
2916 let mtime = payload
2917 .mtime
2918 .ok_or_else(|| anyhow!("missing mtime"))?
2919 .into();
2920 this.update(&mut cx, |this, cx| {
2921 let buffer = this
2922 .open_buffers
2923 .get(&payload.buffer_id)
2924 .and_then(|buffer| buffer.upgrade(cx));
2925 if let Some(buffer) = buffer {
2926 buffer.update(cx, |buffer, cx| {
2927 buffer.did_reload(version, mtime, cx);
2928 });
2929 }
2930 Ok(())
2931 })
2932 }
2933
2934 pub fn match_paths<'a>(
2935 &self,
2936 query: &'a str,
2937 include_ignored: bool,
2938 smart_case: bool,
2939 max_results: usize,
2940 cancel_flag: &'a AtomicBool,
2941 cx: &AppContext,
2942 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2943 let worktrees = self
2944 .worktrees(cx)
2945 .filter(|worktree| !worktree.read(cx).is_weak())
2946 .collect::<Vec<_>>();
2947 let include_root_name = worktrees.len() > 1;
2948 let candidate_sets = worktrees
2949 .into_iter()
2950 .map(|worktree| CandidateSet {
2951 snapshot: worktree.read(cx).snapshot(),
2952 include_ignored,
2953 include_root_name,
2954 })
2955 .collect::<Vec<_>>();
2956
2957 let background = cx.background().clone();
2958 async move {
2959 fuzzy::match_paths(
2960 candidate_sets.as_slice(),
2961 query,
2962 smart_case,
2963 max_results,
2964 cancel_flag,
2965 background,
2966 )
2967 .await
2968 }
2969 }
2970}
2971
2972impl WorktreeHandle {
2973 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2974 match self {
2975 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2976 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2977 }
2978 }
2979}
2980
2981impl OpenBuffer {
2982 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2983 match self {
2984 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2985 OpenBuffer::Loading(_) => None,
2986 }
2987 }
2988}
2989
2990struct CandidateSet {
2991 snapshot: Snapshot,
2992 include_ignored: bool,
2993 include_root_name: bool,
2994}
2995
2996impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2997 type Candidates = CandidateSetIter<'a>;
2998
2999 fn id(&self) -> usize {
3000 self.snapshot.id().to_usize()
3001 }
3002
3003 fn len(&self) -> usize {
3004 if self.include_ignored {
3005 self.snapshot.file_count()
3006 } else {
3007 self.snapshot.visible_file_count()
3008 }
3009 }
3010
3011 fn prefix(&self) -> Arc<str> {
3012 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3013 self.snapshot.root_name().into()
3014 } else if self.include_root_name {
3015 format!("{}/", self.snapshot.root_name()).into()
3016 } else {
3017 "".into()
3018 }
3019 }
3020
3021 fn candidates(&'a self, start: usize) -> Self::Candidates {
3022 CandidateSetIter {
3023 traversal: self.snapshot.files(self.include_ignored, start),
3024 }
3025 }
3026}
3027
3028struct CandidateSetIter<'a> {
3029 traversal: Traversal<'a>,
3030}
3031
3032impl<'a> Iterator for CandidateSetIter<'a> {
3033 type Item = PathMatchCandidate<'a>;
3034
3035 fn next(&mut self) -> Option<Self::Item> {
3036 self.traversal.next().map(|entry| {
3037 if let EntryKind::File(char_bag) = entry.kind {
3038 PathMatchCandidate {
3039 path: &entry.path,
3040 char_bag,
3041 }
3042 } else {
3043 unreachable!()
3044 }
3045 })
3046 }
3047}
3048
3049impl Entity for Project {
3050 type Event = Event;
3051
3052 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3053 match &self.client_state {
3054 ProjectClientState::Local { remote_id_rx, .. } => {
3055 if let Some(project_id) = *remote_id_rx.borrow() {
3056 self.client
3057 .send(proto::UnregisterProject { project_id })
3058 .log_err();
3059 }
3060 }
3061 ProjectClientState::Remote { remote_id, .. } => {
3062 self.client
3063 .send(proto::LeaveProject {
3064 project_id: *remote_id,
3065 })
3066 .log_err();
3067 }
3068 }
3069 }
3070
3071 fn app_will_quit(
3072 &mut self,
3073 _: &mut MutableAppContext,
3074 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3075 use futures::FutureExt;
3076
3077 let shutdown_futures = self
3078 .language_servers
3079 .drain()
3080 .filter_map(|(_, server)| server.shutdown())
3081 .collect::<Vec<_>>();
3082 Some(
3083 async move {
3084 futures::future::join_all(shutdown_futures).await;
3085 }
3086 .boxed(),
3087 )
3088 }
3089}
3090
3091impl Collaborator {
3092 fn from_proto(
3093 message: proto::Collaborator,
3094 user_store: &ModelHandle<UserStore>,
3095 cx: &mut AsyncAppContext,
3096 ) -> impl Future<Output = Result<Self>> {
3097 let user = user_store.update(cx, |user_store, cx| {
3098 user_store.fetch_user(message.user_id, cx)
3099 });
3100
3101 async move {
3102 Ok(Self {
3103 peer_id: PeerId(message.peer_id),
3104 user: user.await?,
3105 replica_id: message.replica_id as ReplicaId,
3106 })
3107 }
3108 }
3109}
3110
3111impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3112 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3113 Self {
3114 worktree_id,
3115 path: path.as_ref().into(),
3116 }
3117 }
3118}
3119
3120impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3121 fn from(options: lsp::CreateFileOptions) -> Self {
3122 Self {
3123 overwrite: options.overwrite.unwrap_or(false),
3124 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3125 }
3126 }
3127}
3128
3129impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3130 fn from(options: lsp::RenameFileOptions) -> Self {
3131 Self {
3132 overwrite: options.overwrite.unwrap_or(false),
3133 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3134 }
3135 }
3136}
3137
3138impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3139 fn from(options: lsp::DeleteFileOptions) -> Self {
3140 Self {
3141 recursive: options.recursive.unwrap_or(false),
3142 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3143 }
3144 }
3145}
3146
3147#[cfg(test)]
3148mod tests {
3149 use super::{Event, *};
3150 use fs::RealFs;
3151 use futures::StreamExt;
3152 use gpui::test::subscribe;
3153 use language::{
3154 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3155 };
3156 use lsp::Url;
3157 use serde_json::json;
3158 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3159 use unindent::Unindent as _;
3160 use util::test::temp_tree;
3161 use worktree::WorktreeHandle as _;
3162
3163 #[gpui::test]
3164 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3165 let dir = temp_tree(json!({
3166 "root": {
3167 "apple": "",
3168 "banana": {
3169 "carrot": {
3170 "date": "",
3171 "endive": "",
3172 }
3173 },
3174 "fennel": {
3175 "grape": "",
3176 }
3177 }
3178 }));
3179
3180 let root_link_path = dir.path().join("root_link");
3181 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3182 unix::fs::symlink(
3183 &dir.path().join("root/fennel"),
3184 &dir.path().join("root/finnochio"),
3185 )
3186 .unwrap();
3187
3188 let project = Project::test(Arc::new(RealFs), &mut cx);
3189
3190 let (tree, _) = project
3191 .update(&mut cx, |project, cx| {
3192 project.find_or_create_local_worktree(&root_link_path, false, cx)
3193 })
3194 .await
3195 .unwrap();
3196
3197 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3198 .await;
3199 cx.read(|cx| {
3200 let tree = tree.read(cx);
3201 assert_eq!(tree.file_count(), 5);
3202 assert_eq!(
3203 tree.inode_for_path("fennel/grape"),
3204 tree.inode_for_path("finnochio/grape")
3205 );
3206 });
3207
3208 let cancel_flag = Default::default();
3209 let results = project
3210 .read_with(&cx, |project, cx| {
3211 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3212 })
3213 .await;
3214 assert_eq!(
3215 results
3216 .into_iter()
3217 .map(|result| result.path)
3218 .collect::<Vec<Arc<Path>>>(),
3219 vec![
3220 PathBuf::from("banana/carrot/date").into(),
3221 PathBuf::from("banana/carrot/endive").into(),
3222 ]
3223 );
3224 }
3225
3226 #[gpui::test]
3227 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3228 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3229 let progress_token = language_server_config
3230 .disk_based_diagnostics_progress_token
3231 .clone()
3232 .unwrap();
3233
3234 let language = Arc::new(Language::new(
3235 LanguageConfig {
3236 name: "Rust".to_string(),
3237 path_suffixes: vec!["rs".to_string()],
3238 language_server: Some(language_server_config),
3239 ..Default::default()
3240 },
3241 Some(tree_sitter_rust::language()),
3242 ));
3243
3244 let fs = FakeFs::new(cx.background());
3245 fs.insert_tree(
3246 "/dir",
3247 json!({
3248 "a.rs": "fn a() { A }",
3249 "b.rs": "const y: i32 = 1",
3250 }),
3251 )
3252 .await;
3253
3254 let project = Project::test(fs, &mut cx);
3255 project.update(&mut cx, |project, _| {
3256 Arc::get_mut(&mut project.languages).unwrap().add(language);
3257 });
3258
3259 let (tree, _) = project
3260 .update(&mut cx, |project, cx| {
3261 project.find_or_create_local_worktree("/dir", false, cx)
3262 })
3263 .await
3264 .unwrap();
3265 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3266
3267 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3268 .await;
3269
3270 // Cause worktree to start the fake language server
3271 let _buffer = project
3272 .update(&mut cx, |project, cx| {
3273 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3274 })
3275 .await
3276 .unwrap();
3277
3278 let mut events = subscribe(&project, &mut cx);
3279
3280 let mut fake_server = fake_servers.next().await.unwrap();
3281 fake_server.start_progress(&progress_token).await;
3282 assert_eq!(
3283 events.next().await.unwrap(),
3284 Event::DiskBasedDiagnosticsStarted
3285 );
3286
3287 fake_server.start_progress(&progress_token).await;
3288 fake_server.end_progress(&progress_token).await;
3289 fake_server.start_progress(&progress_token).await;
3290
3291 fake_server
3292 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3293 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3294 version: None,
3295 diagnostics: vec![lsp::Diagnostic {
3296 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3297 severity: Some(lsp::DiagnosticSeverity::ERROR),
3298 message: "undefined variable 'A'".to_string(),
3299 ..Default::default()
3300 }],
3301 })
3302 .await;
3303 assert_eq!(
3304 events.next().await.unwrap(),
3305 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3306 );
3307
3308 fake_server.end_progress(&progress_token).await;
3309 fake_server.end_progress(&progress_token).await;
3310 assert_eq!(
3311 events.next().await.unwrap(),
3312 Event::DiskBasedDiagnosticsUpdated
3313 );
3314 assert_eq!(
3315 events.next().await.unwrap(),
3316 Event::DiskBasedDiagnosticsFinished
3317 );
3318
3319 let buffer = project
3320 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3321 .await
3322 .unwrap();
3323
3324 buffer.read_with(&cx, |buffer, _| {
3325 let snapshot = buffer.snapshot();
3326 let diagnostics = snapshot
3327 .diagnostics_in_range::<_, Point>(0..buffer.len())
3328 .collect::<Vec<_>>();
3329 assert_eq!(
3330 diagnostics,
3331 &[DiagnosticEntry {
3332 range: Point::new(0, 9)..Point::new(0, 10),
3333 diagnostic: Diagnostic {
3334 severity: lsp::DiagnosticSeverity::ERROR,
3335 message: "undefined variable 'A'".to_string(),
3336 group_id: 0,
3337 is_primary: true,
3338 ..Default::default()
3339 }
3340 }]
3341 )
3342 });
3343 }
3344
3345 #[gpui::test]
3346 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3347 let dir = temp_tree(json!({
3348 "root": {
3349 "dir1": {},
3350 "dir2": {
3351 "dir3": {}
3352 }
3353 }
3354 }));
3355
3356 let project = Project::test(Arc::new(RealFs), &mut cx);
3357 let (tree, _) = project
3358 .update(&mut cx, |project, cx| {
3359 project.find_or_create_local_worktree(&dir.path(), false, cx)
3360 })
3361 .await
3362 .unwrap();
3363
3364 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3365 .await;
3366
3367 let cancel_flag = Default::default();
3368 let results = project
3369 .read_with(&cx, |project, cx| {
3370 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3371 })
3372 .await;
3373
3374 assert!(results.is_empty());
3375 }
3376
3377 #[gpui::test]
3378 async fn test_definition(mut cx: gpui::TestAppContext) {
3379 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3380 let language = Arc::new(Language::new(
3381 LanguageConfig {
3382 name: "Rust".to_string(),
3383 path_suffixes: vec!["rs".to_string()],
3384 language_server: Some(language_server_config),
3385 ..Default::default()
3386 },
3387 Some(tree_sitter_rust::language()),
3388 ));
3389
3390 let fs = FakeFs::new(cx.background());
3391 fs.insert_tree(
3392 "/dir",
3393 json!({
3394 "a.rs": "const fn a() { A }",
3395 "b.rs": "const y: i32 = crate::a()",
3396 }),
3397 )
3398 .await;
3399
3400 let project = Project::test(fs, &mut cx);
3401 project.update(&mut cx, |project, _| {
3402 Arc::get_mut(&mut project.languages).unwrap().add(language);
3403 });
3404
3405 let (tree, _) = project
3406 .update(&mut cx, |project, cx| {
3407 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3408 })
3409 .await
3410 .unwrap();
3411 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3412 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3413 .await;
3414
3415 let buffer = project
3416 .update(&mut cx, |project, cx| {
3417 project.open_buffer(
3418 ProjectPath {
3419 worktree_id,
3420 path: Path::new("").into(),
3421 },
3422 cx,
3423 )
3424 })
3425 .await
3426 .unwrap();
3427
3428 let mut fake_server = fake_servers.next().await.unwrap();
3429 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3430 let params = params.text_document_position_params;
3431 assert_eq!(
3432 params.text_document.uri.to_file_path().unwrap(),
3433 Path::new("/dir/b.rs"),
3434 );
3435 assert_eq!(params.position, lsp::Position::new(0, 22));
3436
3437 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3438 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3439 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3440 )))
3441 });
3442
3443 let mut definitions = project
3444 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3445 .await
3446 .unwrap();
3447
3448 assert_eq!(definitions.len(), 1);
3449 let definition = definitions.pop().unwrap();
3450 cx.update(|cx| {
3451 let target_buffer = definition.target_buffer.read(cx);
3452 assert_eq!(
3453 target_buffer
3454 .file()
3455 .unwrap()
3456 .as_local()
3457 .unwrap()
3458 .abs_path(cx),
3459 Path::new("/dir/a.rs"),
3460 );
3461 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3462 assert_eq!(
3463 list_worktrees(&project, cx),
3464 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3465 );
3466
3467 drop(definition);
3468 });
3469 cx.read(|cx| {
3470 assert_eq!(
3471 list_worktrees(&project, cx),
3472 [("/dir/b.rs".as_ref(), false)]
3473 );
3474 });
3475
3476 fn list_worktrees<'a>(
3477 project: &'a ModelHandle<Project>,
3478 cx: &'a AppContext,
3479 ) -> Vec<(&'a Path, bool)> {
3480 project
3481 .read(cx)
3482 .worktrees(cx)
3483 .map(|worktree| {
3484 let worktree = worktree.read(cx);
3485 (
3486 worktree.as_local().unwrap().abs_path().as_ref(),
3487 worktree.is_weak(),
3488 )
3489 })
3490 .collect::<Vec<_>>()
3491 }
3492 }
3493
3494 #[gpui::test]
3495 async fn test_save_file(mut cx: gpui::TestAppContext) {
3496 let fs = FakeFs::new(cx.background());
3497 fs.insert_tree(
3498 "/dir",
3499 json!({
3500 "file1": "the old contents",
3501 }),
3502 )
3503 .await;
3504
3505 let project = Project::test(fs.clone(), &mut cx);
3506 let worktree_id = project
3507 .update(&mut cx, |p, cx| {
3508 p.find_or_create_local_worktree("/dir", false, cx)
3509 })
3510 .await
3511 .unwrap()
3512 .0
3513 .read_with(&cx, |tree, _| tree.id());
3514
3515 let buffer = project
3516 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3517 .await
3518 .unwrap();
3519 buffer
3520 .update(&mut cx, |buffer, cx| {
3521 assert_eq!(buffer.text(), "the old contents");
3522 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3523 buffer.save(cx)
3524 })
3525 .await
3526 .unwrap();
3527
3528 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3529 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3530 }
3531
3532 #[gpui::test]
3533 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3534 let fs = FakeFs::new(cx.background());
3535 fs.insert_tree(
3536 "/dir",
3537 json!({
3538 "file1": "the old contents",
3539 }),
3540 )
3541 .await;
3542
3543 let project = Project::test(fs.clone(), &mut cx);
3544 let worktree_id = project
3545 .update(&mut cx, |p, cx| {
3546 p.find_or_create_local_worktree("/dir/file1", false, cx)
3547 })
3548 .await
3549 .unwrap()
3550 .0
3551 .read_with(&cx, |tree, _| tree.id());
3552
3553 let buffer = project
3554 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3555 .await
3556 .unwrap();
3557 buffer
3558 .update(&mut cx, |buffer, cx| {
3559 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3560 buffer.save(cx)
3561 })
3562 .await
3563 .unwrap();
3564
3565 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3566 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3567 }
3568
3569 #[gpui::test(retries = 5)]
3570 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3571 let dir = temp_tree(json!({
3572 "a": {
3573 "file1": "",
3574 "file2": "",
3575 "file3": "",
3576 },
3577 "b": {
3578 "c": {
3579 "file4": "",
3580 "file5": "",
3581 }
3582 }
3583 }));
3584
3585 let project = Project::test(Arc::new(RealFs), &mut cx);
3586 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3587
3588 let (tree, _) = project
3589 .update(&mut cx, |p, cx| {
3590 p.find_or_create_local_worktree(dir.path(), false, cx)
3591 })
3592 .await
3593 .unwrap();
3594 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3595
3596 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3597 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3598 async move { buffer.await.unwrap() }
3599 };
3600 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3601 tree.read_with(cx, |tree, _| {
3602 tree.entry_for_path(path)
3603 .expect(&format!("no entry for path {}", path))
3604 .id
3605 })
3606 };
3607
3608 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3609 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3610 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3611 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3612
3613 let file2_id = id_for_path("a/file2", &cx);
3614 let file3_id = id_for_path("a/file3", &cx);
3615 let file4_id = id_for_path("b/c/file4", &cx);
3616
3617 // Wait for the initial scan.
3618 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3619 .await;
3620
3621 // Create a remote copy of this worktree.
3622 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3623 let (remote, load_task) = cx.update(|cx| {
3624 Worktree::remote(
3625 1,
3626 1,
3627 initial_snapshot.to_proto(&Default::default(), Default::default()),
3628 rpc.clone(),
3629 cx,
3630 )
3631 });
3632 load_task.await;
3633
3634 cx.read(|cx| {
3635 assert!(!buffer2.read(cx).is_dirty());
3636 assert!(!buffer3.read(cx).is_dirty());
3637 assert!(!buffer4.read(cx).is_dirty());
3638 assert!(!buffer5.read(cx).is_dirty());
3639 });
3640
3641 // Rename and delete files and directories.
3642 tree.flush_fs_events(&cx).await;
3643 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3644 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3645 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3646 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3647 tree.flush_fs_events(&cx).await;
3648
3649 let expected_paths = vec![
3650 "a",
3651 "a/file1",
3652 "a/file2.new",
3653 "b",
3654 "d",
3655 "d/file3",
3656 "d/file4",
3657 ];
3658
3659 cx.read(|app| {
3660 assert_eq!(
3661 tree.read(app)
3662 .paths()
3663 .map(|p| p.to_str().unwrap())
3664 .collect::<Vec<_>>(),
3665 expected_paths
3666 );
3667
3668 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3669 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3670 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3671
3672 assert_eq!(
3673 buffer2.read(app).file().unwrap().path().as_ref(),
3674 Path::new("a/file2.new")
3675 );
3676 assert_eq!(
3677 buffer3.read(app).file().unwrap().path().as_ref(),
3678 Path::new("d/file3")
3679 );
3680 assert_eq!(
3681 buffer4.read(app).file().unwrap().path().as_ref(),
3682 Path::new("d/file4")
3683 );
3684 assert_eq!(
3685 buffer5.read(app).file().unwrap().path().as_ref(),
3686 Path::new("b/c/file5")
3687 );
3688
3689 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3690 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3691 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3692 assert!(buffer5.read(app).file().unwrap().is_deleted());
3693 });
3694
3695 // Update the remote worktree. Check that it becomes consistent with the
3696 // local worktree.
3697 remote.update(&mut cx, |remote, cx| {
3698 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3699 &initial_snapshot,
3700 1,
3701 1,
3702 0,
3703 true,
3704 );
3705 remote
3706 .as_remote_mut()
3707 .unwrap()
3708 .snapshot
3709 .apply_remote_update(update_message)
3710 .unwrap();
3711
3712 assert_eq!(
3713 remote
3714 .paths()
3715 .map(|p| p.to_str().unwrap())
3716 .collect::<Vec<_>>(),
3717 expected_paths
3718 );
3719 });
3720 }
3721
3722 #[gpui::test]
3723 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3724 let fs = FakeFs::new(cx.background());
3725 fs.insert_tree(
3726 "/the-dir",
3727 json!({
3728 "a.txt": "a-contents",
3729 "b.txt": "b-contents",
3730 }),
3731 )
3732 .await;
3733
3734 let project = Project::test(fs.clone(), &mut cx);
3735 let worktree_id = project
3736 .update(&mut cx, |p, cx| {
3737 p.find_or_create_local_worktree("/the-dir", false, cx)
3738 })
3739 .await
3740 .unwrap()
3741 .0
3742 .read_with(&cx, |tree, _| tree.id());
3743
3744 // Spawn multiple tasks to open paths, repeating some paths.
3745 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3746 (
3747 p.open_buffer((worktree_id, "a.txt"), cx),
3748 p.open_buffer((worktree_id, "b.txt"), cx),
3749 p.open_buffer((worktree_id, "a.txt"), cx),
3750 )
3751 });
3752
3753 let buffer_a_1 = buffer_a_1.await.unwrap();
3754 let buffer_a_2 = buffer_a_2.await.unwrap();
3755 let buffer_b = buffer_b.await.unwrap();
3756 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3757 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3758
3759 // There is only one buffer per path.
3760 let buffer_a_id = buffer_a_1.id();
3761 assert_eq!(buffer_a_2.id(), buffer_a_id);
3762
3763 // Open the same path again while it is still open.
3764 drop(buffer_a_1);
3765 let buffer_a_3 = project
3766 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3767 .await
3768 .unwrap();
3769
3770 // There's still only one buffer per path.
3771 assert_eq!(buffer_a_3.id(), buffer_a_id);
3772 }
3773
3774 #[gpui::test]
3775 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3776 use std::fs;
3777
3778 let dir = temp_tree(json!({
3779 "file1": "abc",
3780 "file2": "def",
3781 "file3": "ghi",
3782 }));
3783
3784 let project = Project::test(Arc::new(RealFs), &mut cx);
3785 let (worktree, _) = project
3786 .update(&mut cx, |p, cx| {
3787 p.find_or_create_local_worktree(dir.path(), false, cx)
3788 })
3789 .await
3790 .unwrap();
3791 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3792
3793 worktree.flush_fs_events(&cx).await;
3794 worktree
3795 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3796 .await;
3797
3798 let buffer1 = project
3799 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3800 .await
3801 .unwrap();
3802 let events = Rc::new(RefCell::new(Vec::new()));
3803
3804 // initially, the buffer isn't dirty.
3805 buffer1.update(&mut cx, |buffer, cx| {
3806 cx.subscribe(&buffer1, {
3807 let events = events.clone();
3808 move |_, _, event, _| events.borrow_mut().push(event.clone())
3809 })
3810 .detach();
3811
3812 assert!(!buffer.is_dirty());
3813 assert!(events.borrow().is_empty());
3814
3815 buffer.edit(vec![1..2], "", cx);
3816 });
3817
3818 // after the first edit, the buffer is dirty, and emits a dirtied event.
3819 buffer1.update(&mut cx, |buffer, cx| {
3820 assert!(buffer.text() == "ac");
3821 assert!(buffer.is_dirty());
3822 assert_eq!(
3823 *events.borrow(),
3824 &[language::Event::Edited, language::Event::Dirtied]
3825 );
3826 events.borrow_mut().clear();
3827 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3828 });
3829
3830 // after saving, the buffer is not dirty, and emits a saved event.
3831 buffer1.update(&mut cx, |buffer, cx| {
3832 assert!(!buffer.is_dirty());
3833 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3834 events.borrow_mut().clear();
3835
3836 buffer.edit(vec![1..1], "B", cx);
3837 buffer.edit(vec![2..2], "D", cx);
3838 });
3839
3840 // after editing again, the buffer is dirty, and emits another dirty event.
3841 buffer1.update(&mut cx, |buffer, cx| {
3842 assert!(buffer.text() == "aBDc");
3843 assert!(buffer.is_dirty());
3844 assert_eq!(
3845 *events.borrow(),
3846 &[
3847 language::Event::Edited,
3848 language::Event::Dirtied,
3849 language::Event::Edited,
3850 ],
3851 );
3852 events.borrow_mut().clear();
3853
3854 // TODO - currently, after restoring the buffer to its
3855 // previously-saved state, the is still considered dirty.
3856 buffer.edit([1..3], "", cx);
3857 assert!(buffer.text() == "ac");
3858 assert!(buffer.is_dirty());
3859 });
3860
3861 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3862
3863 // When a file is deleted, the buffer is considered dirty.
3864 let events = Rc::new(RefCell::new(Vec::new()));
3865 let buffer2 = project
3866 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3867 .await
3868 .unwrap();
3869 buffer2.update(&mut cx, |_, cx| {
3870 cx.subscribe(&buffer2, {
3871 let events = events.clone();
3872 move |_, _, event, _| events.borrow_mut().push(event.clone())
3873 })
3874 .detach();
3875 });
3876
3877 fs::remove_file(dir.path().join("file2")).unwrap();
3878 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3879 assert_eq!(
3880 *events.borrow(),
3881 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3882 );
3883
3884 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3885 let events = Rc::new(RefCell::new(Vec::new()));
3886 let buffer3 = project
3887 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3888 .await
3889 .unwrap();
3890 buffer3.update(&mut cx, |_, cx| {
3891 cx.subscribe(&buffer3, {
3892 let events = events.clone();
3893 move |_, _, event, _| events.borrow_mut().push(event.clone())
3894 })
3895 .detach();
3896 });
3897
3898 worktree.flush_fs_events(&cx).await;
3899 buffer3.update(&mut cx, |buffer, cx| {
3900 buffer.edit(Some(0..0), "x", cx);
3901 });
3902 events.borrow_mut().clear();
3903 fs::remove_file(dir.path().join("file3")).unwrap();
3904 buffer3
3905 .condition(&cx, |_, _| !events.borrow().is_empty())
3906 .await;
3907 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3908 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3909 }
3910
3911 #[gpui::test]
3912 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3913 use std::fs;
3914
3915 let initial_contents = "aaa\nbbbbb\nc\n";
3916 let dir = temp_tree(json!({ "the-file": initial_contents }));
3917
3918 let project = Project::test(Arc::new(RealFs), &mut cx);
3919 let (worktree, _) = project
3920 .update(&mut cx, |p, cx| {
3921 p.find_or_create_local_worktree(dir.path(), false, cx)
3922 })
3923 .await
3924 .unwrap();
3925 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3926
3927 worktree
3928 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3929 .await;
3930
3931 let abs_path = dir.path().join("the-file");
3932 let buffer = project
3933 .update(&mut cx, |p, cx| {
3934 p.open_buffer((worktree_id, "the-file"), cx)
3935 })
3936 .await
3937 .unwrap();
3938
3939 // TODO
3940 // Add a cursor on each row.
3941 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3942 // assert!(!buffer.is_dirty());
3943 // buffer.add_selection_set(
3944 // &(0..3)
3945 // .map(|row| Selection {
3946 // id: row as usize,
3947 // start: Point::new(row, 1),
3948 // end: Point::new(row, 1),
3949 // reversed: false,
3950 // goal: SelectionGoal::None,
3951 // })
3952 // .collect::<Vec<_>>(),
3953 // cx,
3954 // )
3955 // });
3956
3957 // Change the file on disk, adding two new lines of text, and removing
3958 // one line.
3959 buffer.read_with(&cx, |buffer, _| {
3960 assert!(!buffer.is_dirty());
3961 assert!(!buffer.has_conflict());
3962 });
3963 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3964 fs::write(&abs_path, new_contents).unwrap();
3965
3966 // Because the buffer was not modified, it is reloaded from disk. Its
3967 // contents are edited according to the diff between the old and new
3968 // file contents.
3969 buffer
3970 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3971 .await;
3972
3973 buffer.update(&mut cx, |buffer, _| {
3974 assert_eq!(buffer.text(), new_contents);
3975 assert!(!buffer.is_dirty());
3976 assert!(!buffer.has_conflict());
3977
3978 // TODO
3979 // let cursor_positions = buffer
3980 // .selection_set(selection_set_id)
3981 // .unwrap()
3982 // .selections::<Point>(&*buffer)
3983 // .map(|selection| {
3984 // assert_eq!(selection.start, selection.end);
3985 // selection.start
3986 // })
3987 // .collect::<Vec<_>>();
3988 // assert_eq!(
3989 // cursor_positions,
3990 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3991 // );
3992 });
3993
3994 // Modify the buffer
3995 buffer.update(&mut cx, |buffer, cx| {
3996 buffer.edit(vec![0..0], " ", cx);
3997 assert!(buffer.is_dirty());
3998 assert!(!buffer.has_conflict());
3999 });
4000
4001 // Change the file on disk again, adding blank lines to the beginning.
4002 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4003
4004 // Because the buffer is modified, it doesn't reload from disk, but is
4005 // marked as having a conflict.
4006 buffer
4007 .condition(&cx, |buffer, _| buffer.has_conflict())
4008 .await;
4009 }
4010
4011 #[gpui::test]
4012 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
4013 let fs = FakeFs::new(cx.background());
4014 fs.insert_tree(
4015 "/the-dir",
4016 json!({
4017 "a.rs": "
4018 fn foo(mut v: Vec<usize>) {
4019 for x in &v {
4020 v.push(1);
4021 }
4022 }
4023 "
4024 .unindent(),
4025 }),
4026 )
4027 .await;
4028
4029 let project = Project::test(fs.clone(), &mut cx);
4030 let (worktree, _) = project
4031 .update(&mut cx, |p, cx| {
4032 p.find_or_create_local_worktree("/the-dir", false, cx)
4033 })
4034 .await
4035 .unwrap();
4036 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4037
4038 let buffer = project
4039 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4040 .await
4041 .unwrap();
4042
4043 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4044 let message = lsp::PublishDiagnosticsParams {
4045 uri: buffer_uri.clone(),
4046 diagnostics: vec![
4047 lsp::Diagnostic {
4048 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4049 severity: Some(DiagnosticSeverity::WARNING),
4050 message: "error 1".to_string(),
4051 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4052 location: lsp::Location {
4053 uri: buffer_uri.clone(),
4054 range: lsp::Range::new(
4055 lsp::Position::new(1, 8),
4056 lsp::Position::new(1, 9),
4057 ),
4058 },
4059 message: "error 1 hint 1".to_string(),
4060 }]),
4061 ..Default::default()
4062 },
4063 lsp::Diagnostic {
4064 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4065 severity: Some(DiagnosticSeverity::HINT),
4066 message: "error 1 hint 1".to_string(),
4067 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4068 location: lsp::Location {
4069 uri: buffer_uri.clone(),
4070 range: lsp::Range::new(
4071 lsp::Position::new(1, 8),
4072 lsp::Position::new(1, 9),
4073 ),
4074 },
4075 message: "original diagnostic".to_string(),
4076 }]),
4077 ..Default::default()
4078 },
4079 lsp::Diagnostic {
4080 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4081 severity: Some(DiagnosticSeverity::ERROR),
4082 message: "error 2".to_string(),
4083 related_information: Some(vec![
4084 lsp::DiagnosticRelatedInformation {
4085 location: lsp::Location {
4086 uri: buffer_uri.clone(),
4087 range: lsp::Range::new(
4088 lsp::Position::new(1, 13),
4089 lsp::Position::new(1, 15),
4090 ),
4091 },
4092 message: "error 2 hint 1".to_string(),
4093 },
4094 lsp::DiagnosticRelatedInformation {
4095 location: lsp::Location {
4096 uri: buffer_uri.clone(),
4097 range: lsp::Range::new(
4098 lsp::Position::new(1, 13),
4099 lsp::Position::new(1, 15),
4100 ),
4101 },
4102 message: "error 2 hint 2".to_string(),
4103 },
4104 ]),
4105 ..Default::default()
4106 },
4107 lsp::Diagnostic {
4108 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4109 severity: Some(DiagnosticSeverity::HINT),
4110 message: "error 2 hint 1".to_string(),
4111 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4112 location: lsp::Location {
4113 uri: buffer_uri.clone(),
4114 range: lsp::Range::new(
4115 lsp::Position::new(2, 8),
4116 lsp::Position::new(2, 17),
4117 ),
4118 },
4119 message: "original diagnostic".to_string(),
4120 }]),
4121 ..Default::default()
4122 },
4123 lsp::Diagnostic {
4124 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4125 severity: Some(DiagnosticSeverity::HINT),
4126 message: "error 2 hint 2".to_string(),
4127 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4128 location: lsp::Location {
4129 uri: buffer_uri.clone(),
4130 range: lsp::Range::new(
4131 lsp::Position::new(2, 8),
4132 lsp::Position::new(2, 17),
4133 ),
4134 },
4135 message: "original diagnostic".to_string(),
4136 }]),
4137 ..Default::default()
4138 },
4139 ],
4140 version: None,
4141 };
4142
4143 project
4144 .update(&mut cx, |p, cx| {
4145 p.update_diagnostics(message, &Default::default(), cx)
4146 })
4147 .unwrap();
4148 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4149
4150 assert_eq!(
4151 buffer
4152 .diagnostics_in_range::<_, Point>(0..buffer.len())
4153 .collect::<Vec<_>>(),
4154 &[
4155 DiagnosticEntry {
4156 range: Point::new(1, 8)..Point::new(1, 9),
4157 diagnostic: Diagnostic {
4158 severity: DiagnosticSeverity::WARNING,
4159 message: "error 1".to_string(),
4160 group_id: 0,
4161 is_primary: true,
4162 ..Default::default()
4163 }
4164 },
4165 DiagnosticEntry {
4166 range: Point::new(1, 8)..Point::new(1, 9),
4167 diagnostic: Diagnostic {
4168 severity: DiagnosticSeverity::HINT,
4169 message: "error 1 hint 1".to_string(),
4170 group_id: 0,
4171 is_primary: false,
4172 ..Default::default()
4173 }
4174 },
4175 DiagnosticEntry {
4176 range: Point::new(1, 13)..Point::new(1, 15),
4177 diagnostic: Diagnostic {
4178 severity: DiagnosticSeverity::HINT,
4179 message: "error 2 hint 1".to_string(),
4180 group_id: 1,
4181 is_primary: false,
4182 ..Default::default()
4183 }
4184 },
4185 DiagnosticEntry {
4186 range: Point::new(1, 13)..Point::new(1, 15),
4187 diagnostic: Diagnostic {
4188 severity: DiagnosticSeverity::HINT,
4189 message: "error 2 hint 2".to_string(),
4190 group_id: 1,
4191 is_primary: false,
4192 ..Default::default()
4193 }
4194 },
4195 DiagnosticEntry {
4196 range: Point::new(2, 8)..Point::new(2, 17),
4197 diagnostic: Diagnostic {
4198 severity: DiagnosticSeverity::ERROR,
4199 message: "error 2".to_string(),
4200 group_id: 1,
4201 is_primary: true,
4202 ..Default::default()
4203 }
4204 }
4205 ]
4206 );
4207
4208 assert_eq!(
4209 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4210 &[
4211 DiagnosticEntry {
4212 range: Point::new(1, 8)..Point::new(1, 9),
4213 diagnostic: Diagnostic {
4214 severity: DiagnosticSeverity::WARNING,
4215 message: "error 1".to_string(),
4216 group_id: 0,
4217 is_primary: true,
4218 ..Default::default()
4219 }
4220 },
4221 DiagnosticEntry {
4222 range: Point::new(1, 8)..Point::new(1, 9),
4223 diagnostic: Diagnostic {
4224 severity: DiagnosticSeverity::HINT,
4225 message: "error 1 hint 1".to_string(),
4226 group_id: 0,
4227 is_primary: false,
4228 ..Default::default()
4229 }
4230 },
4231 ]
4232 );
4233 assert_eq!(
4234 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4235 &[
4236 DiagnosticEntry {
4237 range: Point::new(1, 13)..Point::new(1, 15),
4238 diagnostic: Diagnostic {
4239 severity: DiagnosticSeverity::HINT,
4240 message: "error 2 hint 1".to_string(),
4241 group_id: 1,
4242 is_primary: false,
4243 ..Default::default()
4244 }
4245 },
4246 DiagnosticEntry {
4247 range: Point::new(1, 13)..Point::new(1, 15),
4248 diagnostic: Diagnostic {
4249 severity: DiagnosticSeverity::HINT,
4250 message: "error 2 hint 2".to_string(),
4251 group_id: 1,
4252 is_primary: false,
4253 ..Default::default()
4254 }
4255 },
4256 DiagnosticEntry {
4257 range: Point::new(2, 8)..Point::new(2, 17),
4258 diagnostic: Diagnostic {
4259 severity: DiagnosticSeverity::ERROR,
4260 message: "error 2".to_string(),
4261 group_id: 1,
4262 is_primary: true,
4263 ..Default::default()
4264 }
4265 }
4266 ]
4267 );
4268 }
4269
4270 #[gpui::test]
4271 async fn test_rename(mut cx: gpui::TestAppContext) {
4272 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4273 let language = Arc::new(Language::new(
4274 LanguageConfig {
4275 name: "Rust".to_string(),
4276 path_suffixes: vec!["rs".to_string()],
4277 language_server: Some(language_server_config),
4278 ..Default::default()
4279 },
4280 Some(tree_sitter_rust::language()),
4281 ));
4282
4283 let fs = FakeFs::new(cx.background());
4284 fs.insert_tree(
4285 "/dir",
4286 json!({
4287 "one.rs": "const ONE: usize = 1;",
4288 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4289 }),
4290 )
4291 .await;
4292
4293 let project = Project::test(fs.clone(), &mut cx);
4294 project.update(&mut cx, |project, _| {
4295 Arc::get_mut(&mut project.languages).unwrap().add(language);
4296 });
4297
4298 let (tree, _) = project
4299 .update(&mut cx, |project, cx| {
4300 project.find_or_create_local_worktree("/dir", false, cx)
4301 })
4302 .await
4303 .unwrap();
4304 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4305 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4306 .await;
4307
4308 let buffer = project
4309 .update(&mut cx, |project, cx| {
4310 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4311 })
4312 .await
4313 .unwrap();
4314
4315 let mut fake_server = fake_servers.next().await.unwrap();
4316
4317 let response = project.update(&mut cx, |project, cx| {
4318 project.prepare_rename(buffer.clone(), 7, cx)
4319 });
4320 fake_server
4321 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4322 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4323 assert_eq!(params.position, lsp::Position::new(0, 7));
4324 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4325 lsp::Position::new(0, 6),
4326 lsp::Position::new(0, 9),
4327 )))
4328 })
4329 .next()
4330 .await
4331 .unwrap();
4332 let range = response.await.unwrap().unwrap();
4333 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4334 assert_eq!(range, 6..9);
4335
4336 let response = project.update(&mut cx, |project, cx| {
4337 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4338 });
4339 fake_server
4340 .handle_request::<lsp::request::Rename, _>(|params| {
4341 assert_eq!(
4342 params.text_document_position.text_document.uri.as_str(),
4343 "file:///dir/one.rs"
4344 );
4345 assert_eq!(
4346 params.text_document_position.position,
4347 lsp::Position::new(0, 7)
4348 );
4349 assert_eq!(params.new_name, "THREE");
4350 Some(lsp::WorkspaceEdit {
4351 changes: Some(
4352 [
4353 (
4354 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4355 vec![lsp::TextEdit::new(
4356 lsp::Range::new(
4357 lsp::Position::new(0, 6),
4358 lsp::Position::new(0, 9),
4359 ),
4360 "THREE".to_string(),
4361 )],
4362 ),
4363 (
4364 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4365 vec![
4366 lsp::TextEdit::new(
4367 lsp::Range::new(
4368 lsp::Position::new(0, 24),
4369 lsp::Position::new(0, 27),
4370 ),
4371 "THREE".to_string(),
4372 ),
4373 lsp::TextEdit::new(
4374 lsp::Range::new(
4375 lsp::Position::new(0, 35),
4376 lsp::Position::new(0, 38),
4377 ),
4378 "THREE".to_string(),
4379 ),
4380 ],
4381 ),
4382 ]
4383 .into_iter()
4384 .collect(),
4385 ),
4386 ..Default::default()
4387 })
4388 })
4389 .next()
4390 .await
4391 .unwrap();
4392 let mut transaction = response.await.unwrap().0;
4393 assert_eq!(transaction.len(), 2);
4394 assert_eq!(
4395 transaction
4396 .remove_entry(&buffer)
4397 .unwrap()
4398 .0
4399 .read_with(&cx, |buffer, _| buffer.text()),
4400 "const THREE: usize = 1;"
4401 );
4402 assert_eq!(
4403 transaction
4404 .into_keys()
4405 .next()
4406 .unwrap()
4407 .read_with(&cx, |buffer, _| buffer.text()),
4408 "const TWO: usize = one::THREE + one::THREE;"
4409 );
4410 }
4411}