1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::Future;
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 loading_language_servers:
43 HashMap<(WorktreeId, String), watch::Receiver<Option<Arc<LanguageServer>>>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
187 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
188 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
189 client.add_entity_request_handler(Self::handle_open_buffer);
190 client.add_entity_request_handler(Self::handle_save_buffer);
191 }
192
193 pub fn local(
194 client: Arc<Client>,
195 user_store: ModelHandle<UserStore>,
196 languages: Arc<LanguageRegistry>,
197 fs: Arc<dyn Fs>,
198 cx: &mut MutableAppContext,
199 ) -> ModelHandle<Self> {
200 cx.add_model(|cx: &mut ModelContext<Self>| {
201 let (remote_id_tx, remote_id_rx) = watch::channel();
202 let _maintain_remote_id_task = cx.spawn_weak({
203 let rpc = client.clone();
204 move |this, mut cx| {
205 async move {
206 let mut status = rpc.status();
207 while let Some(status) = status.recv().await {
208 if let Some(this) = this.upgrade(&cx) {
209 let remote_id = if let client::Status::Connected { .. } = status {
210 let response = rpc.request(proto::RegisterProject {}).await?;
211 Some(response.project_id)
212 } else {
213 None
214 };
215
216 if let Some(project_id) = remote_id {
217 let mut registrations = Vec::new();
218 this.update(&mut cx, |this, cx| {
219 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
220 registrations.push(worktree.update(
221 cx,
222 |worktree, cx| {
223 let worktree = worktree.as_local_mut().unwrap();
224 worktree.register(project_id, cx)
225 },
226 ));
227 }
228 });
229 for registration in registrations {
230 registration.await?;
231 }
232 }
233 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
234 }
235 }
236 Ok(())
237 }
238 .log_err()
239 }
240 });
241
242 Self {
243 worktrees: Default::default(),
244 collaborators: Default::default(),
245 open_buffers: Default::default(),
246 loading_buffers: Default::default(),
247 shared_buffers: Default::default(),
248 client_state: ProjectClientState::Local {
249 is_shared: false,
250 remote_id_tx,
251 remote_id_rx,
252 _maintain_remote_id_task,
253 },
254 opened_buffer: broadcast::channel(1).0,
255 subscriptions: Vec::new(),
256 active_entry: None,
257 languages,
258 client,
259 user_store,
260 fs,
261 language_servers_with_diagnostics_running: 0,
262 language_servers: Default::default(),
263 loading_language_servers: Default::default(),
264 }
265 })
266 }
267
268 pub async fn remote(
269 remote_id: u64,
270 client: Arc<Client>,
271 user_store: ModelHandle<UserStore>,
272 languages: Arc<LanguageRegistry>,
273 fs: Arc<dyn Fs>,
274 cx: &mut AsyncAppContext,
275 ) -> Result<ModelHandle<Self>> {
276 client.authenticate_and_connect(&cx).await?;
277
278 let response = client
279 .request(proto::JoinProject {
280 project_id: remote_id,
281 })
282 .await?;
283
284 let replica_id = response.replica_id as ReplicaId;
285
286 let mut worktrees = Vec::new();
287 for worktree in response.worktrees {
288 let (worktree, load_task) = cx
289 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
290 worktrees.push(worktree);
291 load_task.detach();
292 }
293
294 let this = cx.add_model(|cx| {
295 let mut this = Self {
296 worktrees: Vec::new(),
297 open_buffers: Default::default(),
298 loading_buffers: Default::default(),
299 opened_buffer: broadcast::channel(1).0,
300 shared_buffers: Default::default(),
301 active_entry: None,
302 collaborators: Default::default(),
303 languages,
304 user_store: user_store.clone(),
305 fs,
306 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
307 client,
308 client_state: ProjectClientState::Remote {
309 sharing_has_stopped: false,
310 remote_id,
311 replica_id,
312 },
313 language_servers_with_diagnostics_running: 0,
314 language_servers: Default::default(),
315 loading_language_servers: Default::default(),
316 };
317 for worktree in worktrees {
318 this.add_worktree(&worktree, cx);
319 }
320 this
321 });
322
323 let user_ids = response
324 .collaborators
325 .iter()
326 .map(|peer| peer.user_id)
327 .collect();
328 user_store
329 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
330 .await?;
331 let mut collaborators = HashMap::default();
332 for message in response.collaborators {
333 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
334 collaborators.insert(collaborator.peer_id, collaborator);
335 }
336
337 this.update(cx, |this, _| {
338 this.collaborators = collaborators;
339 });
340
341 Ok(this)
342 }
343
344 #[cfg(any(test, feature = "test-support"))]
345 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
346 let languages = Arc::new(LanguageRegistry::new());
347 let http_client = client::test::FakeHttpClient::with_404_response();
348 let client = client::Client::new(http_client.clone());
349 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
350 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
351 }
352
353 #[cfg(any(test, feature = "test-support"))]
354 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
355 self.shared_buffers
356 .get(&peer_id)
357 .and_then(|buffers| buffers.get(&remote_id))
358 .cloned()
359 }
360
361 #[cfg(any(test, feature = "test-support"))]
362 pub fn has_buffered_operations(&self) -> bool {
363 self.open_buffers
364 .values()
365 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
366 }
367
368 pub fn fs(&self) -> &Arc<dyn Fs> {
369 &self.fs
370 }
371
372 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
373 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
374 *remote_id_tx.borrow_mut() = remote_id;
375 }
376
377 self.subscriptions.clear();
378 if let Some(remote_id) = remote_id {
379 self.subscriptions
380 .push(self.client.add_model_for_remote_entity(remote_id, cx));
381 }
382 }
383
384 pub fn remote_id(&self) -> Option<u64> {
385 match &self.client_state {
386 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
387 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
388 }
389 }
390
391 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
392 let mut id = None;
393 let mut watch = None;
394 match &self.client_state {
395 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
396 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
397 }
398
399 async move {
400 if let Some(id) = id {
401 return id;
402 }
403 let mut watch = watch.unwrap();
404 loop {
405 let id = *watch.borrow();
406 if let Some(id) = id {
407 return id;
408 }
409 watch.recv().await;
410 }
411 }
412 }
413
414 pub fn replica_id(&self) -> ReplicaId {
415 match &self.client_state {
416 ProjectClientState::Local { .. } => 0,
417 ProjectClientState::Remote { replica_id, .. } => *replica_id,
418 }
419 }
420
421 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
422 &self.collaborators
423 }
424
425 pub fn worktrees<'a>(
426 &'a self,
427 cx: &'a AppContext,
428 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
429 self.worktrees
430 .iter()
431 .filter_map(move |worktree| worktree.upgrade(cx))
432 }
433
434 pub fn worktree_for_id(
435 &self,
436 id: WorktreeId,
437 cx: &AppContext,
438 ) -> Option<ModelHandle<Worktree>> {
439 self.worktrees(cx)
440 .find(|worktree| worktree.read(cx).id() == id)
441 }
442
443 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
444 let rpc = self.client.clone();
445 cx.spawn(|this, mut cx| async move {
446 let project_id = this.update(&mut cx, |this, _| {
447 if let ProjectClientState::Local {
448 is_shared,
449 remote_id_rx,
450 ..
451 } = &mut this.client_state
452 {
453 *is_shared = true;
454 remote_id_rx
455 .borrow()
456 .ok_or_else(|| anyhow!("no project id"))
457 } else {
458 Err(anyhow!("can't share a remote project"))
459 }
460 })?;
461
462 rpc.request(proto::ShareProject { project_id }).await?;
463 let mut tasks = Vec::new();
464 this.update(&mut cx, |this, cx| {
465 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
466 worktree.update(cx, |worktree, cx| {
467 let worktree = worktree.as_local_mut().unwrap();
468 tasks.push(worktree.share(project_id, cx));
469 });
470 }
471 });
472 for task in tasks {
473 task.await?;
474 }
475 this.update(&mut cx, |_, cx| cx.notify());
476 Ok(())
477 })
478 }
479
480 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
481 let rpc = self.client.clone();
482 cx.spawn(|this, mut cx| async move {
483 let project_id = this.update(&mut cx, |this, _| {
484 if let ProjectClientState::Local {
485 is_shared,
486 remote_id_rx,
487 ..
488 } = &mut this.client_state
489 {
490 *is_shared = false;
491 remote_id_rx
492 .borrow()
493 .ok_or_else(|| anyhow!("no project id"))
494 } else {
495 Err(anyhow!("can't share a remote project"))
496 }
497 })?;
498
499 rpc.send(proto::UnshareProject { project_id })?;
500 this.update(&mut cx, |this, cx| {
501 this.collaborators.clear();
502 this.shared_buffers.clear();
503 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
504 worktree.update(cx, |worktree, _| {
505 worktree.as_local_mut().unwrap().unshare();
506 });
507 }
508 cx.notify()
509 });
510 Ok(())
511 })
512 }
513
514 pub fn is_read_only(&self) -> bool {
515 match &self.client_state {
516 ProjectClientState::Local { .. } => false,
517 ProjectClientState::Remote {
518 sharing_has_stopped,
519 ..
520 } => *sharing_has_stopped,
521 }
522 }
523
524 pub fn is_local(&self) -> bool {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => true,
527 ProjectClientState::Remote { .. } => false,
528 }
529 }
530
531 pub fn is_remote(&self) -> bool {
532 !self.is_local()
533 }
534
535 pub fn open_buffer(
536 &mut self,
537 path: impl Into<ProjectPath>,
538 cx: &mut ModelContext<Self>,
539 ) -> Task<Result<ModelHandle<Buffer>>> {
540 let project_path = path.into();
541 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
542 worktree
543 } else {
544 return Task::ready(Err(anyhow!("no such worktree")));
545 };
546
547 // If there is already a buffer for the given path, then return it.
548 let existing_buffer = self.get_open_buffer(&project_path, cx);
549 if let Some(existing_buffer) = existing_buffer {
550 return Task::ready(Ok(existing_buffer));
551 }
552
553 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
554 // If the given path is already being loaded, then wait for that existing
555 // task to complete and return the same buffer.
556 hash_map::Entry::Occupied(e) => e.get().clone(),
557
558 // Otherwise, record the fact that this path is now being loaded.
559 hash_map::Entry::Vacant(entry) => {
560 let (mut tx, rx) = postage::watch::channel();
561 entry.insert(rx.clone());
562
563 let load_buffer = if worktree.read(cx).is_local() {
564 self.open_local_buffer(&project_path.path, &worktree, cx)
565 } else {
566 self.open_remote_buffer(&project_path.path, &worktree, cx)
567 };
568
569 cx.spawn(move |this, mut cx| async move {
570 let load_result = load_buffer.await;
571 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
572 // Record the fact that the buffer is no longer loading.
573 this.loading_buffers.remove(&project_path);
574 if this.loading_buffers.is_empty() {
575 this.open_buffers
576 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
577 }
578
579 let buffer = load_result.map_err(Arc::new)?;
580 Ok(buffer)
581 }));
582 })
583 .detach();
584 rx
585 }
586 };
587
588 cx.foreground().spawn(async move {
589 loop {
590 if let Some(result) = loading_watch.borrow().as_ref() {
591 match result {
592 Ok(buffer) => return Ok(buffer.clone()),
593 Err(error) => return Err(anyhow!("{}", error)),
594 }
595 }
596 loading_watch.recv().await;
597 }
598 })
599 }
600
601 fn open_local_buffer(
602 &mut self,
603 path: &Arc<Path>,
604 worktree: &ModelHandle<Worktree>,
605 cx: &mut ModelContext<Self>,
606 ) -> Task<Result<ModelHandle<Buffer>>> {
607 let load_buffer = worktree.update(cx, |worktree, cx| {
608 let worktree = worktree.as_local_mut().unwrap();
609 worktree.load_buffer(path, cx)
610 });
611 let worktree = worktree.downgrade();
612 cx.spawn(|this, mut cx| async move {
613 let buffer = load_buffer.await?;
614 let worktree = worktree
615 .upgrade(&cx)
616 .ok_or_else(|| anyhow!("worktree was removed"))?;
617 this.update(&mut cx, |this, cx| {
618 this.register_buffer(&buffer, Some(&worktree), cx)
619 })?;
620 Ok(buffer)
621 })
622 }
623
624 fn open_remote_buffer(
625 &mut self,
626 path: &Arc<Path>,
627 worktree: &ModelHandle<Worktree>,
628 cx: &mut ModelContext<Self>,
629 ) -> Task<Result<ModelHandle<Buffer>>> {
630 let rpc = self.client.clone();
631 let project_id = self.remote_id().unwrap();
632 let remote_worktree_id = worktree.read(cx).id();
633 let path = path.clone();
634 let path_string = path.to_string_lossy().to_string();
635 cx.spawn(|this, mut cx| async move {
636 let response = rpc
637 .request(proto::OpenBuffer {
638 project_id,
639 worktree_id: remote_worktree_id.to_proto(),
640 path: path_string,
641 })
642 .await?;
643 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
644 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
645 .await
646 })
647 }
648
649 fn open_local_buffer_from_lsp_path(
650 &mut self,
651 abs_path: lsp::Url,
652 lang_name: String,
653 lang_server: Arc<LanguageServer>,
654 cx: &mut ModelContext<Self>,
655 ) -> Task<Result<ModelHandle<Buffer>>> {
656 cx.spawn(|this, mut cx| async move {
657 let abs_path = abs_path
658 .to_file_path()
659 .map_err(|_| anyhow!("can't convert URI to path"))?;
660 let (worktree, relative_path) = if let Some(result) =
661 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
662 {
663 result
664 } else {
665 let worktree = this
666 .update(&mut cx, |this, cx| {
667 this.create_local_worktree(&abs_path, true, cx)
668 })
669 .await?;
670 this.update(&mut cx, |this, cx| {
671 this.language_servers
672 .insert((worktree.read(cx).id(), lang_name), lang_server);
673 });
674 (worktree, PathBuf::new())
675 };
676
677 let project_path = ProjectPath {
678 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
679 path: relative_path.into(),
680 };
681 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
682 .await
683 })
684 }
685
686 pub fn save_buffer_as(
687 &self,
688 buffer: ModelHandle<Buffer>,
689 abs_path: PathBuf,
690 cx: &mut ModelContext<Project>,
691 ) -> Task<Result<()>> {
692 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
693 cx.spawn(|this, mut cx| async move {
694 let (worktree, path) = worktree_task.await?;
695 worktree
696 .update(&mut cx, |worktree, cx| {
697 worktree
698 .as_local_mut()
699 .unwrap()
700 .save_buffer_as(buffer.clone(), path, cx)
701 })
702 .await?;
703 this.update(&mut cx, |this, cx| {
704 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
705 });
706 Ok(())
707 })
708 }
709
710 #[cfg(any(test, feature = "test-support"))]
711 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
712 let path = path.into();
713 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
714 self.open_buffers.iter().any(|(_, buffer)| {
715 if let Some(buffer) = buffer.upgrade(cx) {
716 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
717 if file.worktree == worktree && file.path() == &path.path {
718 return true;
719 }
720 }
721 }
722 false
723 })
724 } else {
725 false
726 }
727 }
728
729 fn get_open_buffer(
730 &mut self,
731 path: &ProjectPath,
732 cx: &mut ModelContext<Self>,
733 ) -> Option<ModelHandle<Buffer>> {
734 let mut result = None;
735 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
736 self.open_buffers.retain(|_, buffer| {
737 if let Some(buffer) = buffer.upgrade(cx) {
738 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
739 if file.worktree == worktree && file.path() == &path.path {
740 result = Some(buffer);
741 }
742 }
743 true
744 } else {
745 false
746 }
747 });
748 result
749 }
750
751 fn register_buffer(
752 &mut self,
753 buffer: &ModelHandle<Buffer>,
754 worktree: Option<&ModelHandle<Worktree>>,
755 cx: &mut ModelContext<Self>,
756 ) -> Result<()> {
757 match self.open_buffers.insert(
758 buffer.read(cx).remote_id(),
759 OpenBuffer::Loaded(buffer.downgrade()),
760 ) {
761 None => {}
762 Some(OpenBuffer::Loading(operations)) => {
763 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
764 }
765 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
766 }
767 self.assign_language_to_buffer(&buffer, worktree, cx);
768 Ok(())
769 }
770
771 fn assign_language_to_buffer(
772 &mut self,
773 buffer: &ModelHandle<Buffer>,
774 worktree: Option<&ModelHandle<Worktree>>,
775 cx: &mut ModelContext<Self>,
776 ) -> Option<()> {
777 let (path, full_path) = {
778 let file = buffer.read(cx).file()?;
779 (file.path().clone(), file.full_path(cx))
780 };
781
782 // If the buffer has a language, set it and start/assign the language server
783 if let Some(language) = self.languages.select_language(&full_path).cloned() {
784 buffer.update(cx, |buffer, cx| {
785 buffer.set_language(Some(language.clone()), cx);
786 });
787
788 // For local worktrees, start a language server if needed.
789 // Also assign the language server and any previously stored diagnostics to the buffer.
790 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
791 let worktree_id = local_worktree.id();
792 let worktree_abs_path = local_worktree.abs_path().clone();
793 let buffer = buffer.downgrade();
794 let language_server =
795 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
796
797 cx.spawn_weak(|_, mut cx| async move {
798 if let Some(language_server) = language_server.await {
799 if let Some(buffer) = buffer.upgrade(&cx) {
800 buffer.update(&mut cx, |buffer, cx| {
801 buffer.set_language_server(Some(language_server), cx);
802 });
803 }
804 }
805 })
806 .detach();
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 &mut self,
823 worktree_id: WorktreeId,
824 worktree_path: Arc<Path>,
825 language: Arc<Language>,
826 cx: &mut ModelContext<Self>,
827 ) -> Task<Option<Arc<LanguageServer>>> {
828 enum LspEvent {
829 DiagnosticsStart,
830 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
831 DiagnosticsFinish,
832 }
833
834 let key = (worktree_id, language.name().to_string());
835 if let Some(language_server) = self.language_servers.get(&key) {
836 return Task::ready(Some(language_server.clone()));
837 } else if let Some(mut language_server) = self.loading_language_servers.get(&key).cloned() {
838 return cx
839 .foreground()
840 .spawn(async move { language_server.recv().await.flatten() });
841 }
842
843 let (mut language_server_tx, language_server_rx) = watch::channel();
844 self.loading_language_servers
845 .insert(key.clone(), language_server_rx);
846 let language_server = language.start_server(worktree_path, cx);
847 let rpc = self.client.clone();
848 cx.spawn_weak(|this, mut cx| async move {
849 let language_server = language_server.await.log_err().flatten();
850 if let Some(this) = this.upgrade(&cx) {
851 this.update(&mut cx, |this, _| {
852 this.loading_language_servers.remove(&key);
853 if let Some(language_server) = language_server.clone() {
854 this.language_servers.insert(key, language_server);
855 }
856 });
857 }
858
859 let language_server = language_server?;
860 *language_server_tx.borrow_mut() = Some(language_server.clone());
861
862 let disk_based_sources = language
863 .disk_based_diagnostic_sources()
864 .cloned()
865 .unwrap_or_default();
866 let disk_based_diagnostics_progress_token =
867 language.disk_based_diagnostics_progress_token().cloned();
868 let has_disk_based_diagnostic_progress_token =
869 disk_based_diagnostics_progress_token.is_some();
870 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
871
872 // Listen for `PublishDiagnostics` notifications.
873 language_server
874 .on_notification::<lsp::notification::PublishDiagnostics, _>({
875 let diagnostics_tx = diagnostics_tx.clone();
876 move |params| {
877 if !has_disk_based_diagnostic_progress_token {
878 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
879 }
880 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
881 if !has_disk_based_diagnostic_progress_token {
882 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
883 }
884 }
885 })
886 .detach();
887
888 // Listen for `Progress` notifications. Send an event when the language server
889 // transitions between running jobs and not running any jobs.
890 let mut running_jobs_for_this_server: i32 = 0;
891 language_server
892 .on_notification::<lsp::notification::Progress, _>(move |params| {
893 let token = match params.token {
894 lsp::NumberOrString::Number(_) => None,
895 lsp::NumberOrString::String(token) => Some(token),
896 };
897
898 if token == disk_based_diagnostics_progress_token {
899 match params.value {
900 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
901 lsp::WorkDoneProgress::Begin(_) => {
902 running_jobs_for_this_server += 1;
903 if running_jobs_for_this_server == 1 {
904 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart))
905 .ok();
906 }
907 }
908 lsp::WorkDoneProgress::End(_) => {
909 running_jobs_for_this_server -= 1;
910 if running_jobs_for_this_server == 0 {
911 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish))
912 .ok();
913 }
914 }
915 _ => {}
916 },
917 }
918 }
919 })
920 .detach();
921
922 // Process all the LSP events.
923 cx.spawn(|mut cx| async move {
924 while let Ok(message) = diagnostics_rx.recv().await {
925 let this = this.upgrade(&cx)?;
926 match message {
927 LspEvent::DiagnosticsStart => {
928 this.update(&mut cx, |this, cx| {
929 this.disk_based_diagnostics_started(cx);
930 if let Some(project_id) = this.remote_id() {
931 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
932 .log_err();
933 }
934 });
935 }
936 LspEvent::DiagnosticsUpdate(mut params) => {
937 language.process_diagnostics(&mut params);
938 this.update(&mut cx, |this, cx| {
939 this.update_diagnostics(params, &disk_based_sources, cx)
940 .log_err();
941 });
942 }
943 LspEvent::DiagnosticsFinish => {
944 this.update(&mut cx, |this, cx| {
945 this.disk_based_diagnostics_finished(cx);
946 if let Some(project_id) = this.remote_id() {
947 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
948 .log_err();
949 }
950 });
951 }
952 }
953 }
954 Some(())
955 })
956 .detach();
957
958 Some(language_server)
959 })
960 }
961
962 pub fn update_diagnostics(
963 &mut self,
964 params: lsp::PublishDiagnosticsParams,
965 disk_based_sources: &HashSet<String>,
966 cx: &mut ModelContext<Self>,
967 ) -> Result<()> {
968 let abs_path = params
969 .uri
970 .to_file_path()
971 .map_err(|_| anyhow!("URI is not a file"))?;
972 let mut next_group_id = 0;
973 let mut diagnostics = Vec::default();
974 let mut primary_diagnostic_group_ids = HashMap::default();
975 let mut sources_by_group_id = HashMap::default();
976 let mut supporting_diagnostic_severities = HashMap::default();
977 for diagnostic in ¶ms.diagnostics {
978 let source = diagnostic.source.as_ref();
979 let code = diagnostic.code.as_ref().map(|code| match code {
980 lsp::NumberOrString::Number(code) => code.to_string(),
981 lsp::NumberOrString::String(code) => code.clone(),
982 });
983 let range = range_from_lsp(diagnostic.range);
984 let is_supporting = diagnostic
985 .related_information
986 .as_ref()
987 .map_or(false, |infos| {
988 infos.iter().any(|info| {
989 primary_diagnostic_group_ids.contains_key(&(
990 source,
991 code.clone(),
992 range_from_lsp(info.location.range),
993 ))
994 })
995 });
996
997 if is_supporting {
998 if let Some(severity) = diagnostic.severity {
999 supporting_diagnostic_severities
1000 .insert((source, code.clone(), range), severity);
1001 }
1002 } else {
1003 let group_id = post_inc(&mut next_group_id);
1004 let is_disk_based =
1005 source.map_or(false, |source| disk_based_sources.contains(source));
1006
1007 sources_by_group_id.insert(group_id, source);
1008 primary_diagnostic_group_ids
1009 .insert((source, code.clone(), range.clone()), group_id);
1010
1011 diagnostics.push(DiagnosticEntry {
1012 range,
1013 diagnostic: Diagnostic {
1014 code: code.clone(),
1015 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1016 message: diagnostic.message.clone(),
1017 group_id,
1018 is_primary: true,
1019 is_valid: true,
1020 is_disk_based,
1021 },
1022 });
1023 if let Some(infos) = &diagnostic.related_information {
1024 for info in infos {
1025 if info.location.uri == params.uri && !info.message.is_empty() {
1026 let range = range_from_lsp(info.location.range);
1027 diagnostics.push(DiagnosticEntry {
1028 range,
1029 diagnostic: Diagnostic {
1030 code: code.clone(),
1031 severity: DiagnosticSeverity::INFORMATION,
1032 message: info.message.clone(),
1033 group_id,
1034 is_primary: false,
1035 is_valid: true,
1036 is_disk_based,
1037 },
1038 });
1039 }
1040 }
1041 }
1042 }
1043 }
1044
1045 for entry in &mut diagnostics {
1046 let diagnostic = &mut entry.diagnostic;
1047 if !diagnostic.is_primary {
1048 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1049 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1050 source,
1051 diagnostic.code.clone(),
1052 entry.range.clone(),
1053 )) {
1054 diagnostic.severity = severity;
1055 }
1056 }
1057 }
1058
1059 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1060 Ok(())
1061 }
1062
1063 pub fn update_diagnostic_entries(
1064 &mut self,
1065 abs_path: PathBuf,
1066 version: Option<i32>,
1067 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1068 cx: &mut ModelContext<Project>,
1069 ) -> Result<(), anyhow::Error> {
1070 let (worktree, relative_path) = self
1071 .find_local_worktree(&abs_path, cx)
1072 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1073 let project_path = ProjectPath {
1074 worktree_id: worktree.read(cx).id(),
1075 path: relative_path.into(),
1076 };
1077
1078 for buffer in self.open_buffers.values() {
1079 if let Some(buffer) = buffer.upgrade(cx) {
1080 if buffer
1081 .read(cx)
1082 .file()
1083 .map_or(false, |file| *file.path() == project_path.path)
1084 {
1085 buffer.update(cx, |buffer, cx| {
1086 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1087 })?;
1088 break;
1089 }
1090 }
1091 }
1092 worktree.update(cx, |worktree, cx| {
1093 worktree
1094 .as_local_mut()
1095 .ok_or_else(|| anyhow!("not a local worktree"))?
1096 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1097 })?;
1098 cx.emit(Event::DiagnosticsUpdated(project_path));
1099 Ok(())
1100 }
1101
1102 pub fn format(
1103 &self,
1104 buffers: HashSet<ModelHandle<Buffer>>,
1105 push_to_history: bool,
1106 cx: &mut ModelContext<Project>,
1107 ) -> Task<Result<ProjectTransaction>> {
1108 let mut local_buffers = Vec::new();
1109 let mut remote_buffers = None;
1110 for buffer_handle in buffers {
1111 let buffer = buffer_handle.read(cx);
1112 let worktree;
1113 if let Some(file) = File::from_dyn(buffer.file()) {
1114 worktree = file.worktree.clone();
1115 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1116 let lang_server;
1117 if let Some(lang) = buffer.language() {
1118 if let Some(server) = self
1119 .language_servers
1120 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1121 {
1122 lang_server = server.clone();
1123 } else {
1124 return Task::ready(Ok(Default::default()));
1125 };
1126 } else {
1127 return Task::ready(Ok(Default::default()));
1128 }
1129
1130 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1131 } else {
1132 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1133 }
1134 } else {
1135 return Task::ready(Ok(Default::default()));
1136 }
1137 }
1138
1139 let remote_buffers = self.remote_id().zip(remote_buffers);
1140 let client = self.client.clone();
1141
1142 cx.spawn(|this, mut cx| async move {
1143 let mut project_transaction = ProjectTransaction::default();
1144
1145 if let Some((project_id, remote_buffers)) = remote_buffers {
1146 let response = client
1147 .request(proto::FormatBuffers {
1148 project_id,
1149 buffer_ids: remote_buffers
1150 .iter()
1151 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1152 .collect(),
1153 })
1154 .await?
1155 .transaction
1156 .ok_or_else(|| anyhow!("missing transaction"))?;
1157 project_transaction = this
1158 .update(&mut cx, |this, cx| {
1159 this.deserialize_project_transaction(response, push_to_history, cx)
1160 })
1161 .await?;
1162 }
1163
1164 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1165 let lsp_edits = lang_server
1166 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1167 text_document: lsp::TextDocumentIdentifier::new(
1168 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1169 ),
1170 options: Default::default(),
1171 work_done_progress_params: Default::default(),
1172 })
1173 .await?;
1174
1175 if let Some(lsp_edits) = lsp_edits {
1176 let edits = buffer
1177 .update(&mut cx, |buffer, cx| {
1178 buffer.edits_from_lsp(lsp_edits, None, cx)
1179 })
1180 .await?;
1181 buffer.update(&mut cx, |buffer, cx| {
1182 buffer.finalize_last_transaction();
1183 buffer.start_transaction();
1184 for (range, text) in edits {
1185 buffer.edit([range], text, cx);
1186 }
1187 if buffer.end_transaction(cx).is_some() {
1188 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1189 if !push_to_history {
1190 buffer.forget_transaction(transaction.id);
1191 }
1192 project_transaction.0.insert(cx.handle(), transaction);
1193 }
1194 });
1195 }
1196 }
1197
1198 Ok(project_transaction)
1199 })
1200 }
1201
1202 pub fn definition<T: ToPointUtf16>(
1203 &self,
1204 buffer: &ModelHandle<Buffer>,
1205 position: T,
1206 cx: &mut ModelContext<Self>,
1207 ) -> Task<Result<Vec<Definition>>> {
1208 let position = position.to_point_utf16(buffer.read(cx));
1209 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1210 }
1211
1212 pub fn completions<T: ToPointUtf16>(
1213 &self,
1214 source_buffer_handle: &ModelHandle<Buffer>,
1215 position: T,
1216 cx: &mut ModelContext<Self>,
1217 ) -> Task<Result<Vec<Completion>>> {
1218 let source_buffer_handle = source_buffer_handle.clone();
1219 let source_buffer = source_buffer_handle.read(cx);
1220 let buffer_id = source_buffer.remote_id();
1221 let language = source_buffer.language().cloned();
1222 let worktree;
1223 let buffer_abs_path;
1224 if let Some(file) = File::from_dyn(source_buffer.file()) {
1225 worktree = file.worktree.clone();
1226 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1227 } else {
1228 return Task::ready(Ok(Default::default()));
1229 };
1230
1231 let position = position.to_point_utf16(source_buffer);
1232 let anchor = source_buffer.anchor_after(position);
1233
1234 if worktree.read(cx).as_local().is_some() {
1235 let buffer_abs_path = buffer_abs_path.unwrap();
1236 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1237 server
1238 } else {
1239 return Task::ready(Ok(Default::default()));
1240 };
1241
1242 cx.spawn(|_, cx| async move {
1243 let completions = lang_server
1244 .request::<lsp::request::Completion>(lsp::CompletionParams {
1245 text_document_position: lsp::TextDocumentPositionParams::new(
1246 lsp::TextDocumentIdentifier::new(
1247 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1248 ),
1249 position.to_lsp_position(),
1250 ),
1251 context: Default::default(),
1252 work_done_progress_params: Default::default(),
1253 partial_result_params: Default::default(),
1254 })
1255 .await
1256 .context("lsp completion request failed")?;
1257
1258 let completions = if let Some(completions) = completions {
1259 match completions {
1260 lsp::CompletionResponse::Array(completions) => completions,
1261 lsp::CompletionResponse::List(list) => list.items,
1262 }
1263 } else {
1264 Default::default()
1265 };
1266
1267 source_buffer_handle.read_with(&cx, |this, _| {
1268 Ok(completions
1269 .into_iter()
1270 .filter_map(|lsp_completion| {
1271 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1272 lsp::CompletionTextEdit::Edit(edit) => {
1273 (range_from_lsp(edit.range), edit.new_text.clone())
1274 }
1275 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1276 log::info!("unsupported insert/replace completion");
1277 return None;
1278 }
1279 };
1280
1281 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1282 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1283 if clipped_start == old_range.start && clipped_end == old_range.end {
1284 Some(Completion {
1285 old_range: this.anchor_before(old_range.start)
1286 ..this.anchor_after(old_range.end),
1287 new_text,
1288 label: language
1289 .as_ref()
1290 .and_then(|l| l.label_for_completion(&lsp_completion))
1291 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1292 lsp_completion,
1293 })
1294 } else {
1295 None
1296 }
1297 })
1298 .collect())
1299 })
1300 })
1301 } else if let Some(project_id) = self.remote_id() {
1302 let rpc = self.client.clone();
1303 let message = proto::GetCompletions {
1304 project_id,
1305 buffer_id,
1306 position: Some(language::proto::serialize_anchor(&anchor)),
1307 version: (&source_buffer.version()).into(),
1308 };
1309 cx.spawn_weak(|_, mut cx| async move {
1310 let response = rpc.request(message).await?;
1311
1312 source_buffer_handle
1313 .update(&mut cx, |buffer, _| {
1314 buffer.wait_for_version(response.version.into())
1315 })
1316 .await;
1317
1318 response
1319 .completions
1320 .into_iter()
1321 .map(|completion| {
1322 language::proto::deserialize_completion(completion, language.as_ref())
1323 })
1324 .collect()
1325 })
1326 } else {
1327 Task::ready(Ok(Default::default()))
1328 }
1329 }
1330
1331 pub fn apply_additional_edits_for_completion(
1332 &self,
1333 buffer_handle: ModelHandle<Buffer>,
1334 completion: Completion,
1335 push_to_history: bool,
1336 cx: &mut ModelContext<Self>,
1337 ) -> Task<Result<Option<Transaction>>> {
1338 let buffer = buffer_handle.read(cx);
1339 let buffer_id = buffer.remote_id();
1340
1341 if self.is_local() {
1342 let lang_server = if let Some(language_server) = buffer.language_server() {
1343 language_server.clone()
1344 } else {
1345 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1346 };
1347
1348 cx.spawn(|_, mut cx| async move {
1349 let resolved_completion = lang_server
1350 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1351 .await?;
1352 if let Some(edits) = resolved_completion.additional_text_edits {
1353 let edits = buffer_handle
1354 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1355 .await?;
1356 buffer_handle.update(&mut cx, |buffer, cx| {
1357 buffer.finalize_last_transaction();
1358 buffer.start_transaction();
1359 for (range, text) in edits {
1360 buffer.edit([range], text, cx);
1361 }
1362 let transaction = if buffer.end_transaction(cx).is_some() {
1363 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1364 if !push_to_history {
1365 buffer.forget_transaction(transaction.id);
1366 }
1367 Some(transaction)
1368 } else {
1369 None
1370 };
1371 Ok(transaction)
1372 })
1373 } else {
1374 Ok(None)
1375 }
1376 })
1377 } else if let Some(project_id) = self.remote_id() {
1378 let client = self.client.clone();
1379 cx.spawn(|_, mut cx| async move {
1380 let response = client
1381 .request(proto::ApplyCompletionAdditionalEdits {
1382 project_id,
1383 buffer_id,
1384 completion: Some(language::proto::serialize_completion(&completion)),
1385 })
1386 .await?;
1387
1388 if let Some(transaction) = response.transaction {
1389 let transaction = language::proto::deserialize_transaction(transaction)?;
1390 buffer_handle
1391 .update(&mut cx, |buffer, _| {
1392 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1393 })
1394 .await;
1395 if push_to_history {
1396 buffer_handle.update(&mut cx, |buffer, _| {
1397 buffer.push_transaction(transaction.clone(), Instant::now());
1398 });
1399 }
1400 Ok(Some(transaction))
1401 } else {
1402 Ok(None)
1403 }
1404 })
1405 } else {
1406 Task::ready(Err(anyhow!("project does not have a remote id")))
1407 }
1408 }
1409
1410 pub fn code_actions<T: ToOffset>(
1411 &self,
1412 buffer_handle: &ModelHandle<Buffer>,
1413 range: Range<T>,
1414 cx: &mut ModelContext<Self>,
1415 ) -> Task<Result<Vec<CodeAction>>> {
1416 let buffer_handle = buffer_handle.clone();
1417 let buffer = buffer_handle.read(cx);
1418 let buffer_id = buffer.remote_id();
1419 let worktree;
1420 let buffer_abs_path;
1421 if let Some(file) = File::from_dyn(buffer.file()) {
1422 worktree = file.worktree.clone();
1423 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1424 } else {
1425 return Task::ready(Ok(Default::default()));
1426 };
1427 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1428
1429 if worktree.read(cx).as_local().is_some() {
1430 let buffer_abs_path = buffer_abs_path.unwrap();
1431 let lang_name;
1432 let lang_server;
1433 if let Some(lang) = buffer.language() {
1434 lang_name = lang.name().to_string();
1435 if let Some(server) = self
1436 .language_servers
1437 .get(&(worktree.read(cx).id(), lang_name.clone()))
1438 {
1439 lang_server = server.clone();
1440 } else {
1441 return Task::ready(Ok(Default::default()));
1442 };
1443 } else {
1444 return Task::ready(Ok(Default::default()));
1445 }
1446
1447 let lsp_range = lsp::Range::new(
1448 range.start.to_point_utf16(buffer).to_lsp_position(),
1449 range.end.to_point_utf16(buffer).to_lsp_position(),
1450 );
1451 cx.foreground().spawn(async move {
1452 Ok(lang_server
1453 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1454 text_document: lsp::TextDocumentIdentifier::new(
1455 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1456 ),
1457 range: lsp_range,
1458 work_done_progress_params: Default::default(),
1459 partial_result_params: Default::default(),
1460 context: lsp::CodeActionContext {
1461 diagnostics: Default::default(),
1462 only: Some(vec![
1463 lsp::CodeActionKind::QUICKFIX,
1464 lsp::CodeActionKind::REFACTOR,
1465 lsp::CodeActionKind::REFACTOR_EXTRACT,
1466 ]),
1467 },
1468 })
1469 .await?
1470 .unwrap_or_default()
1471 .into_iter()
1472 .filter_map(|entry| {
1473 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1474 Some(CodeAction {
1475 range: range.clone(),
1476 lsp_action,
1477 })
1478 } else {
1479 None
1480 }
1481 })
1482 .collect())
1483 })
1484 } else if let Some(project_id) = self.remote_id() {
1485 let rpc = self.client.clone();
1486 cx.spawn_weak(|_, mut cx| async move {
1487 let response = rpc
1488 .request(proto::GetCodeActions {
1489 project_id,
1490 buffer_id,
1491 start: Some(language::proto::serialize_anchor(&range.start)),
1492 end: Some(language::proto::serialize_anchor(&range.end)),
1493 })
1494 .await?;
1495
1496 buffer_handle
1497 .update(&mut cx, |buffer, _| {
1498 buffer.wait_for_version(response.version.into())
1499 })
1500 .await;
1501
1502 response
1503 .actions
1504 .into_iter()
1505 .map(language::proto::deserialize_code_action)
1506 .collect()
1507 })
1508 } else {
1509 Task::ready(Ok(Default::default()))
1510 }
1511 }
1512
1513 pub fn apply_code_action(
1514 &self,
1515 buffer_handle: ModelHandle<Buffer>,
1516 mut action: CodeAction,
1517 push_to_history: bool,
1518 cx: &mut ModelContext<Self>,
1519 ) -> Task<Result<ProjectTransaction>> {
1520 if self.is_local() {
1521 let buffer = buffer_handle.read(cx);
1522 let lang_name = if let Some(lang) = buffer.language() {
1523 lang.name().to_string()
1524 } else {
1525 return Task::ready(Ok(Default::default()));
1526 };
1527 let lang_server = if let Some(language_server) = buffer.language_server() {
1528 language_server.clone()
1529 } else {
1530 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1531 };
1532 let range = action.range.to_point_utf16(buffer);
1533
1534 cx.spawn(|this, mut cx| async move {
1535 if let Some(lsp_range) = action
1536 .lsp_action
1537 .data
1538 .as_mut()
1539 .and_then(|d| d.get_mut("codeActionParams"))
1540 .and_then(|d| d.get_mut("range"))
1541 {
1542 *lsp_range = serde_json::to_value(&lsp::Range::new(
1543 range.start.to_lsp_position(),
1544 range.end.to_lsp_position(),
1545 ))
1546 .unwrap();
1547 action.lsp_action = lang_server
1548 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1549 .await?;
1550 } else {
1551 let actions = this
1552 .update(&mut cx, |this, cx| {
1553 this.code_actions(&buffer_handle, action.range, cx)
1554 })
1555 .await?;
1556 action.lsp_action = actions
1557 .into_iter()
1558 .find(|a| a.lsp_action.title == action.lsp_action.title)
1559 .ok_or_else(|| anyhow!("code action is outdated"))?
1560 .lsp_action;
1561 }
1562
1563 if let Some(edit) = action.lsp_action.edit {
1564 Self::deserialize_workspace_edit(
1565 this,
1566 edit,
1567 push_to_history,
1568 lang_name,
1569 lang_server,
1570 &mut cx,
1571 )
1572 .await
1573 } else {
1574 Ok(ProjectTransaction::default())
1575 }
1576 })
1577 } else if let Some(project_id) = self.remote_id() {
1578 let client = self.client.clone();
1579 let request = proto::ApplyCodeAction {
1580 project_id,
1581 buffer_id: buffer_handle.read(cx).remote_id(),
1582 action: Some(language::proto::serialize_code_action(&action)),
1583 };
1584 cx.spawn(|this, mut cx| async move {
1585 let response = client
1586 .request(request)
1587 .await?
1588 .transaction
1589 .ok_or_else(|| anyhow!("missing transaction"))?;
1590 this.update(&mut cx, |this, cx| {
1591 this.deserialize_project_transaction(response, push_to_history, cx)
1592 })
1593 .await
1594 })
1595 } else {
1596 Task::ready(Err(anyhow!("project does not have a remote id")))
1597 }
1598 }
1599
1600 async fn deserialize_workspace_edit(
1601 this: ModelHandle<Self>,
1602 edit: lsp::WorkspaceEdit,
1603 push_to_history: bool,
1604 language_name: String,
1605 language_server: Arc<LanguageServer>,
1606 cx: &mut AsyncAppContext,
1607 ) -> Result<ProjectTransaction> {
1608 let fs = this.read_with(cx, |this, _| this.fs.clone());
1609 let mut operations = Vec::new();
1610 if let Some(document_changes) = edit.document_changes {
1611 match document_changes {
1612 lsp::DocumentChanges::Edits(edits) => {
1613 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1614 }
1615 lsp::DocumentChanges::Operations(ops) => operations = ops,
1616 }
1617 } else if let Some(changes) = edit.changes {
1618 operations.extend(changes.into_iter().map(|(uri, edits)| {
1619 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1620 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1621 uri,
1622 version: None,
1623 },
1624 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1625 })
1626 }));
1627 }
1628
1629 let mut project_transaction = ProjectTransaction::default();
1630 for operation in operations {
1631 match operation {
1632 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1633 let abs_path = op
1634 .uri
1635 .to_file_path()
1636 .map_err(|_| anyhow!("can't convert URI to path"))?;
1637
1638 if let Some(parent_path) = abs_path.parent() {
1639 fs.create_dir(parent_path).await?;
1640 }
1641 if abs_path.ends_with("/") {
1642 fs.create_dir(&abs_path).await?;
1643 } else {
1644 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1645 .await?;
1646 }
1647 }
1648 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1649 let source_abs_path = op
1650 .old_uri
1651 .to_file_path()
1652 .map_err(|_| anyhow!("can't convert URI to path"))?;
1653 let target_abs_path = op
1654 .new_uri
1655 .to_file_path()
1656 .map_err(|_| anyhow!("can't convert URI to path"))?;
1657 fs.rename(
1658 &source_abs_path,
1659 &target_abs_path,
1660 op.options.map(Into::into).unwrap_or_default(),
1661 )
1662 .await?;
1663 }
1664 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1665 let abs_path = op
1666 .uri
1667 .to_file_path()
1668 .map_err(|_| anyhow!("can't convert URI to path"))?;
1669 let options = op.options.map(Into::into).unwrap_or_default();
1670 if abs_path.ends_with("/") {
1671 fs.remove_dir(&abs_path, options).await?;
1672 } else {
1673 fs.remove_file(&abs_path, options).await?;
1674 }
1675 }
1676 lsp::DocumentChangeOperation::Edit(op) => {
1677 let buffer_to_edit = this
1678 .update(cx, |this, cx| {
1679 this.open_local_buffer_from_lsp_path(
1680 op.text_document.uri,
1681 language_name.clone(),
1682 language_server.clone(),
1683 cx,
1684 )
1685 })
1686 .await?;
1687
1688 let edits = buffer_to_edit
1689 .update(cx, |buffer, cx| {
1690 let edits = op.edits.into_iter().map(|edit| match edit {
1691 lsp::OneOf::Left(edit) => edit,
1692 lsp::OneOf::Right(edit) => edit.text_edit,
1693 });
1694 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1695 })
1696 .await?;
1697
1698 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1699 buffer.finalize_last_transaction();
1700 buffer.start_transaction();
1701 for (range, text) in edits {
1702 buffer.edit([range], text, cx);
1703 }
1704 let transaction = if buffer.end_transaction(cx).is_some() {
1705 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1706 if !push_to_history {
1707 buffer.forget_transaction(transaction.id);
1708 }
1709 Some(transaction)
1710 } else {
1711 None
1712 };
1713
1714 transaction
1715 });
1716 if let Some(transaction) = transaction {
1717 project_transaction.0.insert(buffer_to_edit, transaction);
1718 }
1719 }
1720 }
1721 }
1722
1723 Ok(project_transaction)
1724 }
1725
1726 pub fn prepare_rename<T: ToPointUtf16>(
1727 &self,
1728 buffer: ModelHandle<Buffer>,
1729 position: T,
1730 cx: &mut ModelContext<Self>,
1731 ) -> Task<Result<Option<Range<Anchor>>>> {
1732 let position = position.to_point_utf16(buffer.read(cx));
1733 self.request_lsp(buffer, PrepareRename { position }, cx)
1734 }
1735
1736 pub fn perform_rename<T: ToPointUtf16>(
1737 &self,
1738 buffer: ModelHandle<Buffer>,
1739 position: T,
1740 new_name: String,
1741 push_to_history: bool,
1742 cx: &mut ModelContext<Self>,
1743 ) -> Task<Result<ProjectTransaction>> {
1744 let position = position.to_point_utf16(buffer.read(cx));
1745 self.request_lsp(
1746 buffer,
1747 PerformRename {
1748 position,
1749 new_name,
1750 push_to_history,
1751 },
1752 cx,
1753 )
1754 }
1755
1756 fn request_lsp<R: LspCommand>(
1757 &self,
1758 buffer_handle: ModelHandle<Buffer>,
1759 request: R,
1760 cx: &mut ModelContext<Self>,
1761 ) -> Task<Result<R::Response>>
1762 where
1763 <R::LspRequest as lsp::request::Request>::Result: Send,
1764 {
1765 let buffer = buffer_handle.read(cx);
1766 if self.is_local() {
1767 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1768 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1769 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1770 return cx.spawn(|this, cx| async move {
1771 let response = language_server
1772 .request::<R::LspRequest>(lsp_params)
1773 .await
1774 .context("lsp request failed")?;
1775 request
1776 .response_from_lsp(response, this, buffer_handle, cx)
1777 .await
1778 });
1779 }
1780 } else if let Some(project_id) = self.remote_id() {
1781 let rpc = self.client.clone();
1782 let message = request.to_proto(project_id, buffer);
1783 return cx.spawn(|this, cx| async move {
1784 let response = rpc.request(message).await?;
1785 request
1786 .response_from_proto(response, this, buffer_handle, cx)
1787 .await
1788 });
1789 }
1790 Task::ready(Ok(Default::default()))
1791 }
1792
1793 pub fn find_or_create_local_worktree(
1794 &self,
1795 abs_path: impl AsRef<Path>,
1796 weak: bool,
1797 cx: &mut ModelContext<Self>,
1798 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1799 let abs_path = abs_path.as_ref();
1800 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1801 Task::ready(Ok((tree.clone(), relative_path.into())))
1802 } else {
1803 let worktree = self.create_local_worktree(abs_path, weak, cx);
1804 cx.foreground()
1805 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1806 }
1807 }
1808
1809 fn find_local_worktree(
1810 &self,
1811 abs_path: &Path,
1812 cx: &AppContext,
1813 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1814 for tree in self.worktrees(cx) {
1815 if let Some(relative_path) = tree
1816 .read(cx)
1817 .as_local()
1818 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1819 {
1820 return Some((tree.clone(), relative_path.into()));
1821 }
1822 }
1823 None
1824 }
1825
1826 pub fn is_shared(&self) -> bool {
1827 match &self.client_state {
1828 ProjectClientState::Local { is_shared, .. } => *is_shared,
1829 ProjectClientState::Remote { .. } => false,
1830 }
1831 }
1832
1833 fn create_local_worktree(
1834 &self,
1835 abs_path: impl AsRef<Path>,
1836 weak: bool,
1837 cx: &mut ModelContext<Self>,
1838 ) -> Task<Result<ModelHandle<Worktree>>> {
1839 let fs = self.fs.clone();
1840 let client = self.client.clone();
1841 let path = Arc::from(abs_path.as_ref());
1842 cx.spawn(|project, mut cx| async move {
1843 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1844
1845 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1846 project.add_worktree(&worktree, cx);
1847 (project.remote_id(), project.is_shared())
1848 });
1849
1850 if let Some(project_id) = remote_project_id {
1851 worktree
1852 .update(&mut cx, |worktree, cx| {
1853 worktree.as_local_mut().unwrap().register(project_id, cx)
1854 })
1855 .await?;
1856 if is_shared {
1857 worktree
1858 .update(&mut cx, |worktree, cx| {
1859 worktree.as_local_mut().unwrap().share(project_id, cx)
1860 })
1861 .await?;
1862 }
1863 }
1864
1865 Ok(worktree)
1866 })
1867 }
1868
1869 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1870 self.worktrees.retain(|worktree| {
1871 worktree
1872 .upgrade(cx)
1873 .map_or(false, |w| w.read(cx).id() != id)
1874 });
1875 cx.notify();
1876 }
1877
1878 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1879 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1880 if worktree.read(cx).is_local() {
1881 cx.subscribe(&worktree, |this, worktree, _, cx| {
1882 this.update_local_worktree_buffers(worktree, cx);
1883 })
1884 .detach();
1885 }
1886
1887 let push_weak_handle = {
1888 let worktree = worktree.read(cx);
1889 worktree.is_local() && worktree.is_weak()
1890 };
1891 if push_weak_handle {
1892 cx.observe_release(&worktree, |this, cx| {
1893 this.worktrees
1894 .retain(|worktree| worktree.upgrade(cx).is_some());
1895 cx.notify();
1896 })
1897 .detach();
1898 self.worktrees
1899 .push(WorktreeHandle::Weak(worktree.downgrade()));
1900 } else {
1901 self.worktrees
1902 .push(WorktreeHandle::Strong(worktree.clone()));
1903 }
1904 cx.notify();
1905 }
1906
1907 fn update_local_worktree_buffers(
1908 &mut self,
1909 worktree_handle: ModelHandle<Worktree>,
1910 cx: &mut ModelContext<Self>,
1911 ) {
1912 let snapshot = worktree_handle.read(cx).snapshot();
1913 let mut buffers_to_delete = Vec::new();
1914 for (buffer_id, buffer) in &self.open_buffers {
1915 if let Some(buffer) = buffer.upgrade(cx) {
1916 buffer.update(cx, |buffer, cx| {
1917 if let Some(old_file) = File::from_dyn(buffer.file()) {
1918 if old_file.worktree != worktree_handle {
1919 return;
1920 }
1921
1922 let new_file = if let Some(entry) = old_file
1923 .entry_id
1924 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1925 {
1926 File {
1927 is_local: true,
1928 entry_id: Some(entry.id),
1929 mtime: entry.mtime,
1930 path: entry.path.clone(),
1931 worktree: worktree_handle.clone(),
1932 }
1933 } else if let Some(entry) =
1934 snapshot.entry_for_path(old_file.path().as_ref())
1935 {
1936 File {
1937 is_local: true,
1938 entry_id: Some(entry.id),
1939 mtime: entry.mtime,
1940 path: entry.path.clone(),
1941 worktree: worktree_handle.clone(),
1942 }
1943 } else {
1944 File {
1945 is_local: true,
1946 entry_id: None,
1947 path: old_file.path().clone(),
1948 mtime: old_file.mtime(),
1949 worktree: worktree_handle.clone(),
1950 }
1951 };
1952
1953 if let Some(project_id) = self.remote_id() {
1954 self.client
1955 .send(proto::UpdateBufferFile {
1956 project_id,
1957 buffer_id: *buffer_id as u64,
1958 file: Some(new_file.to_proto()),
1959 })
1960 .log_err();
1961 }
1962 buffer.file_updated(Box::new(new_file), cx).detach();
1963 }
1964 });
1965 } else {
1966 buffers_to_delete.push(*buffer_id);
1967 }
1968 }
1969
1970 for buffer_id in buffers_to_delete {
1971 self.open_buffers.remove(&buffer_id);
1972 }
1973 }
1974
1975 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1976 let new_active_entry = entry.and_then(|project_path| {
1977 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1978 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1979 Some(ProjectEntry {
1980 worktree_id: project_path.worktree_id,
1981 entry_id: entry.id,
1982 })
1983 });
1984 if new_active_entry != self.active_entry {
1985 self.active_entry = new_active_entry;
1986 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1987 }
1988 }
1989
1990 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1991 self.language_servers_with_diagnostics_running > 0
1992 }
1993
1994 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1995 let mut summary = DiagnosticSummary::default();
1996 for (_, path_summary) in self.diagnostic_summaries(cx) {
1997 summary.error_count += path_summary.error_count;
1998 summary.warning_count += path_summary.warning_count;
1999 summary.info_count += path_summary.info_count;
2000 summary.hint_count += path_summary.hint_count;
2001 }
2002 summary
2003 }
2004
2005 pub fn diagnostic_summaries<'a>(
2006 &'a self,
2007 cx: &'a AppContext,
2008 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2009 self.worktrees(cx).flat_map(move |worktree| {
2010 let worktree = worktree.read(cx);
2011 let worktree_id = worktree.id();
2012 worktree
2013 .diagnostic_summaries()
2014 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2015 })
2016 }
2017
2018 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2019 self.language_servers_with_diagnostics_running += 1;
2020 if self.language_servers_with_diagnostics_running == 1 {
2021 cx.emit(Event::DiskBasedDiagnosticsStarted);
2022 }
2023 }
2024
2025 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2026 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2027 self.language_servers_with_diagnostics_running -= 1;
2028 if self.language_servers_with_diagnostics_running == 0 {
2029 cx.emit(Event::DiskBasedDiagnosticsFinished);
2030 }
2031 }
2032
2033 pub fn active_entry(&self) -> Option<ProjectEntry> {
2034 self.active_entry
2035 }
2036
2037 // RPC message handlers
2038
2039 async fn handle_unshare_project(
2040 this: ModelHandle<Self>,
2041 _: TypedEnvelope<proto::UnshareProject>,
2042 _: Arc<Client>,
2043 mut cx: AsyncAppContext,
2044 ) -> Result<()> {
2045 this.update(&mut cx, |this, cx| {
2046 if let ProjectClientState::Remote {
2047 sharing_has_stopped,
2048 ..
2049 } = &mut this.client_state
2050 {
2051 *sharing_has_stopped = true;
2052 this.collaborators.clear();
2053 cx.notify();
2054 } else {
2055 unreachable!()
2056 }
2057 });
2058
2059 Ok(())
2060 }
2061
2062 async fn handle_add_collaborator(
2063 this: ModelHandle<Self>,
2064 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2065 _: Arc<Client>,
2066 mut cx: AsyncAppContext,
2067 ) -> Result<()> {
2068 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2069 let collaborator = envelope
2070 .payload
2071 .collaborator
2072 .take()
2073 .ok_or_else(|| anyhow!("empty collaborator"))?;
2074
2075 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2076 this.update(&mut cx, |this, cx| {
2077 this.collaborators
2078 .insert(collaborator.peer_id, collaborator);
2079 cx.notify();
2080 });
2081
2082 Ok(())
2083 }
2084
2085 async fn handle_remove_collaborator(
2086 this: ModelHandle<Self>,
2087 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2088 _: Arc<Client>,
2089 mut cx: AsyncAppContext,
2090 ) -> Result<()> {
2091 this.update(&mut cx, |this, cx| {
2092 let peer_id = PeerId(envelope.payload.peer_id);
2093 let replica_id = this
2094 .collaborators
2095 .remove(&peer_id)
2096 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2097 .replica_id;
2098 this.shared_buffers.remove(&peer_id);
2099 for (_, buffer) in &this.open_buffers {
2100 if let Some(buffer) = buffer.upgrade(cx) {
2101 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2102 }
2103 }
2104 cx.notify();
2105 Ok(())
2106 })
2107 }
2108
2109 async fn handle_share_worktree(
2110 this: ModelHandle<Self>,
2111 envelope: TypedEnvelope<proto::ShareWorktree>,
2112 client: Arc<Client>,
2113 mut cx: AsyncAppContext,
2114 ) -> Result<()> {
2115 this.update(&mut cx, |this, cx| {
2116 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2117 let replica_id = this.replica_id();
2118 let worktree = envelope
2119 .payload
2120 .worktree
2121 .ok_or_else(|| anyhow!("invalid worktree"))?;
2122 let (worktree, load_task) =
2123 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2124 this.add_worktree(&worktree, cx);
2125 load_task.detach();
2126 Ok(())
2127 })
2128 }
2129
2130 async fn handle_unregister_worktree(
2131 this: ModelHandle<Self>,
2132 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2133 _: Arc<Client>,
2134 mut cx: AsyncAppContext,
2135 ) -> Result<()> {
2136 this.update(&mut cx, |this, cx| {
2137 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2138 this.remove_worktree(worktree_id, cx);
2139 Ok(())
2140 })
2141 }
2142
2143 async fn handle_update_worktree(
2144 this: ModelHandle<Self>,
2145 envelope: TypedEnvelope<proto::UpdateWorktree>,
2146 _: Arc<Client>,
2147 mut cx: AsyncAppContext,
2148 ) -> Result<()> {
2149 this.update(&mut cx, |this, cx| {
2150 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2151 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2152 worktree.update(cx, |worktree, _| {
2153 let worktree = worktree.as_remote_mut().unwrap();
2154 worktree.update_from_remote(envelope)
2155 })?;
2156 }
2157 Ok(())
2158 })
2159 }
2160
2161 async fn handle_update_diagnostic_summary(
2162 this: ModelHandle<Self>,
2163 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2164 _: Arc<Client>,
2165 mut cx: AsyncAppContext,
2166 ) -> Result<()> {
2167 this.update(&mut cx, |this, cx| {
2168 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2169 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2170 if let Some(summary) = envelope.payload.summary {
2171 let project_path = ProjectPath {
2172 worktree_id,
2173 path: Path::new(&summary.path).into(),
2174 };
2175 worktree.update(cx, |worktree, _| {
2176 worktree
2177 .as_remote_mut()
2178 .unwrap()
2179 .update_diagnostic_summary(project_path.path.clone(), &summary);
2180 });
2181 cx.emit(Event::DiagnosticsUpdated(project_path));
2182 }
2183 }
2184 Ok(())
2185 })
2186 }
2187
2188 async fn handle_disk_based_diagnostics_updating(
2189 this: ModelHandle<Self>,
2190 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2191 _: Arc<Client>,
2192 mut cx: AsyncAppContext,
2193 ) -> Result<()> {
2194 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2195 Ok(())
2196 }
2197
2198 async fn handle_disk_based_diagnostics_updated(
2199 this: ModelHandle<Self>,
2200 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2201 _: Arc<Client>,
2202 mut cx: AsyncAppContext,
2203 ) -> Result<()> {
2204 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2205 Ok(())
2206 }
2207
2208 async fn handle_update_buffer(
2209 this: ModelHandle<Self>,
2210 envelope: TypedEnvelope<proto::UpdateBuffer>,
2211 _: Arc<Client>,
2212 mut cx: AsyncAppContext,
2213 ) -> Result<()> {
2214 this.update(&mut cx, |this, cx| {
2215 let payload = envelope.payload.clone();
2216 let buffer_id = payload.buffer_id;
2217 let ops = payload
2218 .operations
2219 .into_iter()
2220 .map(|op| language::proto::deserialize_operation(op))
2221 .collect::<Result<Vec<_>, _>>()?;
2222 let is_remote = this.is_remote();
2223 match this.open_buffers.entry(buffer_id) {
2224 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2225 OpenBuffer::Loaded(buffer) => {
2226 if let Some(buffer) = buffer.upgrade(cx) {
2227 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2228 }
2229 }
2230 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2231 },
2232 hash_map::Entry::Vacant(e) => {
2233 if is_remote && this.loading_buffers.len() > 0 {
2234 e.insert(OpenBuffer::Loading(ops));
2235 }
2236 }
2237 }
2238 Ok(())
2239 })
2240 }
2241
2242 async fn handle_update_buffer_file(
2243 this: ModelHandle<Self>,
2244 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2245 _: Arc<Client>,
2246 mut cx: AsyncAppContext,
2247 ) -> Result<()> {
2248 this.update(&mut cx, |this, cx| {
2249 let payload = envelope.payload.clone();
2250 let buffer_id = payload.buffer_id;
2251 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2252 let worktree = this
2253 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2254 .ok_or_else(|| anyhow!("no such worktree"))?;
2255 let file = File::from_proto(file, worktree.clone(), cx)?;
2256 let buffer = this
2257 .open_buffers
2258 .get_mut(&buffer_id)
2259 .and_then(|b| b.upgrade(cx))
2260 .ok_or_else(|| anyhow!("no such buffer"))?;
2261 buffer.update(cx, |buffer, cx| {
2262 buffer.file_updated(Box::new(file), cx).detach();
2263 });
2264 Ok(())
2265 })
2266 }
2267
2268 async fn handle_save_buffer(
2269 this: ModelHandle<Self>,
2270 envelope: TypedEnvelope<proto::SaveBuffer>,
2271 _: Arc<Client>,
2272 mut cx: AsyncAppContext,
2273 ) -> Result<proto::BufferSaved> {
2274 let buffer_id = envelope.payload.buffer_id;
2275 let sender_id = envelope.original_sender_id()?;
2276 let requested_version = envelope.payload.version.try_into()?;
2277
2278 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2279 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2280 let buffer = this
2281 .shared_buffers
2282 .get(&sender_id)
2283 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2284 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2285 Ok::<_, anyhow::Error>((project_id, buffer))
2286 })?;
2287
2288 if !buffer
2289 .read_with(&cx, |buffer, _| buffer.version())
2290 .observed_all(&requested_version)
2291 {
2292 Err(anyhow!("save request depends on unreceived edits"))?;
2293 }
2294
2295 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2296 Ok(proto::BufferSaved {
2297 project_id,
2298 buffer_id,
2299 version: (&saved_version).into(),
2300 mtime: Some(mtime.into()),
2301 })
2302 }
2303
2304 async fn handle_format_buffers(
2305 this: ModelHandle<Self>,
2306 envelope: TypedEnvelope<proto::FormatBuffers>,
2307 _: Arc<Client>,
2308 mut cx: AsyncAppContext,
2309 ) -> Result<proto::FormatBuffersResponse> {
2310 let sender_id = envelope.original_sender_id()?;
2311 let format = this.update(&mut cx, |this, cx| {
2312 let shared_buffers = this
2313 .shared_buffers
2314 .get(&sender_id)
2315 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2316 let mut buffers = HashSet::default();
2317 for buffer_id in &envelope.payload.buffer_ids {
2318 buffers.insert(
2319 shared_buffers
2320 .get(buffer_id)
2321 .cloned()
2322 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2323 );
2324 }
2325 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2326 })?;
2327
2328 let project_transaction = format.await?;
2329 let project_transaction = this.update(&mut cx, |this, cx| {
2330 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2331 });
2332 Ok(proto::FormatBuffersResponse {
2333 transaction: Some(project_transaction),
2334 })
2335 }
2336
2337 async fn handle_get_completions(
2338 this: ModelHandle<Self>,
2339 envelope: TypedEnvelope<proto::GetCompletions>,
2340 _: Arc<Client>,
2341 mut cx: AsyncAppContext,
2342 ) -> Result<proto::GetCompletionsResponse> {
2343 let sender_id = envelope.original_sender_id()?;
2344 let position = envelope
2345 .payload
2346 .position
2347 .and_then(language::proto::deserialize_anchor)
2348 .ok_or_else(|| anyhow!("invalid position"))?;
2349 let version = clock::Global::from(envelope.payload.version);
2350 let buffer = this.read_with(&cx, |this, _| {
2351 this.shared_buffers
2352 .get(&sender_id)
2353 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2354 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2355 })?;
2356 if !buffer
2357 .read_with(&cx, |buffer, _| buffer.version())
2358 .observed_all(&version)
2359 {
2360 Err(anyhow!("completion request depends on unreceived edits"))?;
2361 }
2362 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2363 let completions = this
2364 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2365 .await?;
2366
2367 Ok(proto::GetCompletionsResponse {
2368 completions: completions
2369 .iter()
2370 .map(language::proto::serialize_completion)
2371 .collect(),
2372 version: (&version).into(),
2373 })
2374 }
2375
2376 async fn handle_apply_additional_edits_for_completion(
2377 this: ModelHandle<Self>,
2378 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2379 _: Arc<Client>,
2380 mut cx: AsyncAppContext,
2381 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2382 let sender_id = envelope.original_sender_id()?;
2383 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2384 let buffer = this
2385 .shared_buffers
2386 .get(&sender_id)
2387 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2388 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2389 let language = buffer.read(cx).language();
2390 let completion = language::proto::deserialize_completion(
2391 envelope
2392 .payload
2393 .completion
2394 .ok_or_else(|| anyhow!("invalid completion"))?,
2395 language,
2396 )?;
2397 Ok::<_, anyhow::Error>(
2398 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2399 )
2400 })?;
2401
2402 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2403 transaction: apply_additional_edits
2404 .await?
2405 .as_ref()
2406 .map(language::proto::serialize_transaction),
2407 })
2408 }
2409
2410 async fn handle_get_code_actions(
2411 this: ModelHandle<Self>,
2412 envelope: TypedEnvelope<proto::GetCodeActions>,
2413 _: Arc<Client>,
2414 mut cx: AsyncAppContext,
2415 ) -> Result<proto::GetCodeActionsResponse> {
2416 let sender_id = envelope.original_sender_id()?;
2417 let start = envelope
2418 .payload
2419 .start
2420 .and_then(language::proto::deserialize_anchor)
2421 .ok_or_else(|| anyhow!("invalid start"))?;
2422 let end = envelope
2423 .payload
2424 .end
2425 .and_then(language::proto::deserialize_anchor)
2426 .ok_or_else(|| anyhow!("invalid end"))?;
2427 let buffer = this.update(&mut cx, |this, _| {
2428 this.shared_buffers
2429 .get(&sender_id)
2430 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2431 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2432 })?;
2433 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2434 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2435 Err(anyhow!("code action request references unreceived edits"))?;
2436 }
2437 let code_actions = this.update(&mut cx, |this, cx| {
2438 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2439 })?;
2440
2441 Ok(proto::GetCodeActionsResponse {
2442 actions: code_actions
2443 .await?
2444 .iter()
2445 .map(language::proto::serialize_code_action)
2446 .collect(),
2447 version: (&version).into(),
2448 })
2449 }
2450
2451 async fn handle_apply_code_action(
2452 this: ModelHandle<Self>,
2453 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2454 _: Arc<Client>,
2455 mut cx: AsyncAppContext,
2456 ) -> Result<proto::ApplyCodeActionResponse> {
2457 let sender_id = envelope.original_sender_id()?;
2458 let action = language::proto::deserialize_code_action(
2459 envelope
2460 .payload
2461 .action
2462 .ok_or_else(|| anyhow!("invalid action"))?,
2463 )?;
2464 let apply_code_action = this.update(&mut cx, |this, cx| {
2465 let buffer = this
2466 .shared_buffers
2467 .get(&sender_id)
2468 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2469 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2470 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2471 })?;
2472
2473 let project_transaction = apply_code_action.await?;
2474 let project_transaction = this.update(&mut cx, |this, cx| {
2475 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2476 });
2477 Ok(proto::ApplyCodeActionResponse {
2478 transaction: Some(project_transaction),
2479 })
2480 }
2481
2482 async fn handle_lsp_command<T: LspCommand>(
2483 this: ModelHandle<Self>,
2484 envelope: TypedEnvelope<T::ProtoRequest>,
2485 _: Arc<Client>,
2486 mut cx: AsyncAppContext,
2487 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2488 where
2489 <T::LspRequest as lsp::request::Request>::Result: Send,
2490 {
2491 let sender_id = envelope.original_sender_id()?;
2492 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2493 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2494 let buffer_handle = this
2495 .shared_buffers
2496 .get(&sender_id)
2497 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2498 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2499 let buffer = buffer_handle.read(cx);
2500 let buffer_version = buffer.version();
2501 let request = T::from_proto(envelope.payload, this, buffer)?;
2502 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2503 })?;
2504 let response = request.await?;
2505 this.update(&mut cx, |this, cx| {
2506 Ok(T::response_to_proto(
2507 response,
2508 this,
2509 sender_id,
2510 &buffer_version,
2511 cx,
2512 ))
2513 })
2514 }
2515
2516 async fn handle_open_buffer(
2517 this: ModelHandle<Self>,
2518 envelope: TypedEnvelope<proto::OpenBuffer>,
2519 _: Arc<Client>,
2520 mut cx: AsyncAppContext,
2521 ) -> anyhow::Result<proto::OpenBufferResponse> {
2522 let peer_id = envelope.original_sender_id()?;
2523 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2524 let open_buffer = this.update(&mut cx, |this, cx| {
2525 this.open_buffer(
2526 ProjectPath {
2527 worktree_id,
2528 path: PathBuf::from(envelope.payload.path).into(),
2529 },
2530 cx,
2531 )
2532 });
2533
2534 let buffer = open_buffer.await?;
2535 this.update(&mut cx, |this, cx| {
2536 Ok(proto::OpenBufferResponse {
2537 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2538 })
2539 })
2540 }
2541
2542 fn serialize_project_transaction_for_peer(
2543 &mut self,
2544 project_transaction: ProjectTransaction,
2545 peer_id: PeerId,
2546 cx: &AppContext,
2547 ) -> proto::ProjectTransaction {
2548 let mut serialized_transaction = proto::ProjectTransaction {
2549 buffers: Default::default(),
2550 transactions: Default::default(),
2551 };
2552 for (buffer, transaction) in project_transaction.0 {
2553 serialized_transaction
2554 .buffers
2555 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2556 serialized_transaction
2557 .transactions
2558 .push(language::proto::serialize_transaction(&transaction));
2559 }
2560 serialized_transaction
2561 }
2562
2563 fn deserialize_project_transaction(
2564 &mut self,
2565 message: proto::ProjectTransaction,
2566 push_to_history: bool,
2567 cx: &mut ModelContext<Self>,
2568 ) -> Task<Result<ProjectTransaction>> {
2569 cx.spawn(|this, mut cx| async move {
2570 let mut project_transaction = ProjectTransaction::default();
2571 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2572 let buffer = this
2573 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2574 .await?;
2575 let transaction = language::proto::deserialize_transaction(transaction)?;
2576 project_transaction.0.insert(buffer, transaction);
2577 }
2578 for (buffer, transaction) in &project_transaction.0 {
2579 buffer
2580 .update(&mut cx, |buffer, _| {
2581 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2582 })
2583 .await;
2584
2585 if push_to_history {
2586 buffer.update(&mut cx, |buffer, _| {
2587 buffer.push_transaction(transaction.clone(), Instant::now());
2588 });
2589 }
2590 }
2591
2592 Ok(project_transaction)
2593 })
2594 }
2595
2596 fn serialize_buffer_for_peer(
2597 &mut self,
2598 buffer: &ModelHandle<Buffer>,
2599 peer_id: PeerId,
2600 cx: &AppContext,
2601 ) -> proto::Buffer {
2602 let buffer_id = buffer.read(cx).remote_id();
2603 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2604 match shared_buffers.entry(buffer_id) {
2605 hash_map::Entry::Occupied(_) => proto::Buffer {
2606 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2607 },
2608 hash_map::Entry::Vacant(entry) => {
2609 entry.insert(buffer.clone());
2610 proto::Buffer {
2611 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2612 }
2613 }
2614 }
2615 }
2616
2617 fn deserialize_buffer(
2618 &mut self,
2619 buffer: proto::Buffer,
2620 cx: &mut ModelContext<Self>,
2621 ) -> Task<Result<ModelHandle<Buffer>>> {
2622 let replica_id = self.replica_id();
2623
2624 let mut opened_buffer_tx = self.opened_buffer.clone();
2625 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2626 cx.spawn(|this, mut cx| async move {
2627 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2628 proto::buffer::Variant::Id(id) => {
2629 let buffer = loop {
2630 let buffer = this.read_with(&cx, |this, cx| {
2631 this.open_buffers
2632 .get(&id)
2633 .and_then(|buffer| buffer.upgrade(cx))
2634 });
2635 if let Some(buffer) = buffer {
2636 break buffer;
2637 }
2638 opened_buffer_rx
2639 .recv()
2640 .await
2641 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2642 };
2643 Ok(buffer)
2644 }
2645 proto::buffer::Variant::State(mut buffer) => {
2646 let mut buffer_worktree = None;
2647 let mut buffer_file = None;
2648 if let Some(file) = buffer.file.take() {
2649 this.read_with(&cx, |this, cx| {
2650 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2651 let worktree =
2652 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2653 anyhow!("no worktree found for id {}", file.worktree_id)
2654 })?;
2655 buffer_file =
2656 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2657 as Box<dyn language::File>);
2658 buffer_worktree = Some(worktree);
2659 Ok::<_, anyhow::Error>(())
2660 })?;
2661 }
2662
2663 let buffer = cx.add_model(|cx| {
2664 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2665 });
2666 this.update(&mut cx, |this, cx| {
2667 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2668 })?;
2669
2670 let _ = opened_buffer_tx.send(()).await;
2671 Ok(buffer)
2672 }
2673 }
2674 })
2675 }
2676
2677 async fn handle_close_buffer(
2678 this: ModelHandle<Self>,
2679 envelope: TypedEnvelope<proto::CloseBuffer>,
2680 _: Arc<Client>,
2681 mut cx: AsyncAppContext,
2682 ) -> anyhow::Result<()> {
2683 this.update(&mut cx, |this, cx| {
2684 if let Some(shared_buffers) =
2685 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2686 {
2687 shared_buffers.remove(&envelope.payload.buffer_id);
2688 cx.notify();
2689 }
2690 Ok(())
2691 })
2692 }
2693
2694 async fn handle_buffer_saved(
2695 this: ModelHandle<Self>,
2696 envelope: TypedEnvelope<proto::BufferSaved>,
2697 _: Arc<Client>,
2698 mut cx: AsyncAppContext,
2699 ) -> Result<()> {
2700 let version = envelope.payload.version.try_into()?;
2701 let mtime = envelope
2702 .payload
2703 .mtime
2704 .ok_or_else(|| anyhow!("missing mtime"))?
2705 .into();
2706
2707 this.update(&mut cx, |this, cx| {
2708 let buffer = this
2709 .open_buffers
2710 .get(&envelope.payload.buffer_id)
2711 .and_then(|buffer| buffer.upgrade(cx));
2712 if let Some(buffer) = buffer {
2713 buffer.update(cx, |buffer, cx| {
2714 buffer.did_save(version, mtime, None, cx);
2715 });
2716 }
2717 Ok(())
2718 })
2719 }
2720
2721 async fn handle_buffer_reloaded(
2722 this: ModelHandle<Self>,
2723 envelope: TypedEnvelope<proto::BufferReloaded>,
2724 _: Arc<Client>,
2725 mut cx: AsyncAppContext,
2726 ) -> Result<()> {
2727 let payload = envelope.payload.clone();
2728 let version = payload.version.try_into()?;
2729 let mtime = payload
2730 .mtime
2731 .ok_or_else(|| anyhow!("missing mtime"))?
2732 .into();
2733 this.update(&mut cx, |this, cx| {
2734 let buffer = this
2735 .open_buffers
2736 .get(&payload.buffer_id)
2737 .and_then(|buffer| buffer.upgrade(cx));
2738 if let Some(buffer) = buffer {
2739 buffer.update(cx, |buffer, cx| {
2740 buffer.did_reload(version, mtime, cx);
2741 });
2742 }
2743 Ok(())
2744 })
2745 }
2746
2747 pub fn match_paths<'a>(
2748 &self,
2749 query: &'a str,
2750 include_ignored: bool,
2751 smart_case: bool,
2752 max_results: usize,
2753 cancel_flag: &'a AtomicBool,
2754 cx: &AppContext,
2755 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2756 let worktrees = self
2757 .worktrees(cx)
2758 .filter(|worktree| !worktree.read(cx).is_weak())
2759 .collect::<Vec<_>>();
2760 let include_root_name = worktrees.len() > 1;
2761 let candidate_sets = worktrees
2762 .into_iter()
2763 .map(|worktree| CandidateSet {
2764 snapshot: worktree.read(cx).snapshot(),
2765 include_ignored,
2766 include_root_name,
2767 })
2768 .collect::<Vec<_>>();
2769
2770 let background = cx.background().clone();
2771 async move {
2772 fuzzy::match_paths(
2773 candidate_sets.as_slice(),
2774 query,
2775 smart_case,
2776 max_results,
2777 cancel_flag,
2778 background,
2779 )
2780 .await
2781 }
2782 }
2783}
2784
2785impl WorktreeHandle {
2786 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2787 match self {
2788 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2789 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2790 }
2791 }
2792}
2793
2794impl OpenBuffer {
2795 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2796 match self {
2797 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2798 OpenBuffer::Loading(_) => None,
2799 }
2800 }
2801}
2802
2803struct CandidateSet {
2804 snapshot: Snapshot,
2805 include_ignored: bool,
2806 include_root_name: bool,
2807}
2808
2809impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2810 type Candidates = CandidateSetIter<'a>;
2811
2812 fn id(&self) -> usize {
2813 self.snapshot.id().to_usize()
2814 }
2815
2816 fn len(&self) -> usize {
2817 if self.include_ignored {
2818 self.snapshot.file_count()
2819 } else {
2820 self.snapshot.visible_file_count()
2821 }
2822 }
2823
2824 fn prefix(&self) -> Arc<str> {
2825 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2826 self.snapshot.root_name().into()
2827 } else if self.include_root_name {
2828 format!("{}/", self.snapshot.root_name()).into()
2829 } else {
2830 "".into()
2831 }
2832 }
2833
2834 fn candidates(&'a self, start: usize) -> Self::Candidates {
2835 CandidateSetIter {
2836 traversal: self.snapshot.files(self.include_ignored, start),
2837 }
2838 }
2839}
2840
2841struct CandidateSetIter<'a> {
2842 traversal: Traversal<'a>,
2843}
2844
2845impl<'a> Iterator for CandidateSetIter<'a> {
2846 type Item = PathMatchCandidate<'a>;
2847
2848 fn next(&mut self) -> Option<Self::Item> {
2849 self.traversal.next().map(|entry| {
2850 if let EntryKind::File(char_bag) = entry.kind {
2851 PathMatchCandidate {
2852 path: &entry.path,
2853 char_bag,
2854 }
2855 } else {
2856 unreachable!()
2857 }
2858 })
2859 }
2860}
2861
2862impl Entity for Project {
2863 type Event = Event;
2864
2865 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2866 match &self.client_state {
2867 ProjectClientState::Local { remote_id_rx, .. } => {
2868 if let Some(project_id) = *remote_id_rx.borrow() {
2869 self.client
2870 .send(proto::UnregisterProject { project_id })
2871 .log_err();
2872 }
2873 }
2874 ProjectClientState::Remote { remote_id, .. } => {
2875 self.client
2876 .send(proto::LeaveProject {
2877 project_id: *remote_id,
2878 })
2879 .log_err();
2880 }
2881 }
2882 }
2883
2884 fn app_will_quit(
2885 &mut self,
2886 _: &mut MutableAppContext,
2887 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2888 use futures::FutureExt;
2889
2890 let shutdown_futures = self
2891 .language_servers
2892 .drain()
2893 .filter_map(|(_, server)| server.shutdown())
2894 .collect::<Vec<_>>();
2895 Some(
2896 async move {
2897 futures::future::join_all(shutdown_futures).await;
2898 }
2899 .boxed(),
2900 )
2901 }
2902}
2903
2904impl Collaborator {
2905 fn from_proto(
2906 message: proto::Collaborator,
2907 user_store: &ModelHandle<UserStore>,
2908 cx: &mut AsyncAppContext,
2909 ) -> impl Future<Output = Result<Self>> {
2910 let user = user_store.update(cx, |user_store, cx| {
2911 user_store.fetch_user(message.user_id, cx)
2912 });
2913
2914 async move {
2915 Ok(Self {
2916 peer_id: PeerId(message.peer_id),
2917 user: user.await?,
2918 replica_id: message.replica_id as ReplicaId,
2919 })
2920 }
2921 }
2922}
2923
2924impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2925 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2926 Self {
2927 worktree_id,
2928 path: path.as_ref().into(),
2929 }
2930 }
2931}
2932
2933impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2934 fn from(options: lsp::CreateFileOptions) -> Self {
2935 Self {
2936 overwrite: options.overwrite.unwrap_or(false),
2937 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2938 }
2939 }
2940}
2941
2942impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2943 fn from(options: lsp::RenameFileOptions) -> Self {
2944 Self {
2945 overwrite: options.overwrite.unwrap_or(false),
2946 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2947 }
2948 }
2949}
2950
2951impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2952 fn from(options: lsp::DeleteFileOptions) -> Self {
2953 Self {
2954 recursive: options.recursive.unwrap_or(false),
2955 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2956 }
2957 }
2958}
2959
2960#[cfg(test)]
2961mod tests {
2962 use super::{Event, *};
2963 use fs::RealFs;
2964 use futures::StreamExt;
2965 use gpui::test::subscribe;
2966 use language::{
2967 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
2968 };
2969 use lsp::Url;
2970 use serde_json::json;
2971 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2972 use unindent::Unindent as _;
2973 use util::test::temp_tree;
2974 use worktree::WorktreeHandle as _;
2975
2976 #[gpui::test]
2977 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2978 let dir = temp_tree(json!({
2979 "root": {
2980 "apple": "",
2981 "banana": {
2982 "carrot": {
2983 "date": "",
2984 "endive": "",
2985 }
2986 },
2987 "fennel": {
2988 "grape": "",
2989 }
2990 }
2991 }));
2992
2993 let root_link_path = dir.path().join("root_link");
2994 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2995 unix::fs::symlink(
2996 &dir.path().join("root/fennel"),
2997 &dir.path().join("root/finnochio"),
2998 )
2999 .unwrap();
3000
3001 let project = Project::test(Arc::new(RealFs), &mut cx);
3002
3003 let (tree, _) = project
3004 .update(&mut cx, |project, cx| {
3005 project.find_or_create_local_worktree(&root_link_path, false, cx)
3006 })
3007 .await
3008 .unwrap();
3009
3010 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3011 .await;
3012 cx.read(|cx| {
3013 let tree = tree.read(cx);
3014 assert_eq!(tree.file_count(), 5);
3015 assert_eq!(
3016 tree.inode_for_path("fennel/grape"),
3017 tree.inode_for_path("finnochio/grape")
3018 );
3019 });
3020
3021 let cancel_flag = Default::default();
3022 let results = project
3023 .read_with(&cx, |project, cx| {
3024 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3025 })
3026 .await;
3027 assert_eq!(
3028 results
3029 .into_iter()
3030 .map(|result| result.path)
3031 .collect::<Vec<Arc<Path>>>(),
3032 vec![
3033 PathBuf::from("banana/carrot/date").into(),
3034 PathBuf::from("banana/carrot/endive").into(),
3035 ]
3036 );
3037 }
3038
3039 #[gpui::test]
3040 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3041 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3042 let progress_token = language_server_config
3043 .disk_based_diagnostics_progress_token
3044 .clone()
3045 .unwrap();
3046
3047 let language = Arc::new(Language::new(
3048 LanguageConfig {
3049 name: "Rust".to_string(),
3050 path_suffixes: vec!["rs".to_string()],
3051 language_server: Some(language_server_config),
3052 ..Default::default()
3053 },
3054 Some(tree_sitter_rust::language()),
3055 ));
3056
3057 let fs = FakeFs::new(cx.background());
3058 fs.insert_tree(
3059 "/dir",
3060 json!({
3061 "a.rs": "fn a() { A }",
3062 "b.rs": "const y: i32 = 1",
3063 }),
3064 )
3065 .await;
3066
3067 let project = Project::test(fs, &mut cx);
3068 project.update(&mut cx, |project, _| {
3069 Arc::get_mut(&mut project.languages).unwrap().add(language);
3070 });
3071
3072 let (tree, _) = project
3073 .update(&mut cx, |project, cx| {
3074 project.find_or_create_local_worktree("/dir", false, cx)
3075 })
3076 .await
3077 .unwrap();
3078 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3079
3080 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3081 .await;
3082
3083 // Cause worktree to start the fake language server
3084 let _buffer = project
3085 .update(&mut cx, |project, cx| {
3086 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3087 })
3088 .await
3089 .unwrap();
3090
3091 let mut events = subscribe(&project, &mut cx);
3092
3093 let mut fake_server = fake_servers.next().await.unwrap();
3094 fake_server.start_progress(&progress_token).await;
3095 assert_eq!(
3096 events.next().await.unwrap(),
3097 Event::DiskBasedDiagnosticsStarted
3098 );
3099
3100 fake_server.start_progress(&progress_token).await;
3101 fake_server.end_progress(&progress_token).await;
3102 fake_server.start_progress(&progress_token).await;
3103
3104 fake_server
3105 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3106 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3107 version: None,
3108 diagnostics: vec![lsp::Diagnostic {
3109 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3110 severity: Some(lsp::DiagnosticSeverity::ERROR),
3111 message: "undefined variable 'A'".to_string(),
3112 ..Default::default()
3113 }],
3114 })
3115 .await;
3116 assert_eq!(
3117 events.next().await.unwrap(),
3118 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3119 );
3120
3121 fake_server.end_progress(&progress_token).await;
3122 fake_server.end_progress(&progress_token).await;
3123 assert_eq!(
3124 events.next().await.unwrap(),
3125 Event::DiskBasedDiagnosticsUpdated
3126 );
3127 assert_eq!(
3128 events.next().await.unwrap(),
3129 Event::DiskBasedDiagnosticsFinished
3130 );
3131
3132 let buffer = project
3133 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3134 .await
3135 .unwrap();
3136
3137 buffer.read_with(&cx, |buffer, _| {
3138 let snapshot = buffer.snapshot();
3139 let diagnostics = snapshot
3140 .diagnostics_in_range::<_, Point>(0..buffer.len())
3141 .collect::<Vec<_>>();
3142 assert_eq!(
3143 diagnostics,
3144 &[DiagnosticEntry {
3145 range: Point::new(0, 9)..Point::new(0, 10),
3146 diagnostic: Diagnostic {
3147 severity: lsp::DiagnosticSeverity::ERROR,
3148 message: "undefined variable 'A'".to_string(),
3149 group_id: 0,
3150 is_primary: true,
3151 ..Default::default()
3152 }
3153 }]
3154 )
3155 });
3156 }
3157
3158 #[gpui::test]
3159 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3160 let dir = temp_tree(json!({
3161 "root": {
3162 "dir1": {},
3163 "dir2": {
3164 "dir3": {}
3165 }
3166 }
3167 }));
3168
3169 let project = Project::test(Arc::new(RealFs), &mut cx);
3170 let (tree, _) = project
3171 .update(&mut cx, |project, cx| {
3172 project.find_or_create_local_worktree(&dir.path(), false, cx)
3173 })
3174 .await
3175 .unwrap();
3176
3177 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3178 .await;
3179
3180 let cancel_flag = Default::default();
3181 let results = project
3182 .read_with(&cx, |project, cx| {
3183 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3184 })
3185 .await;
3186
3187 assert!(results.is_empty());
3188 }
3189
3190 #[gpui::test]
3191 async fn test_definition(mut cx: gpui::TestAppContext) {
3192 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3193 let language = Arc::new(Language::new(
3194 LanguageConfig {
3195 name: "Rust".to_string(),
3196 path_suffixes: vec!["rs".to_string()],
3197 language_server: Some(language_server_config),
3198 ..Default::default()
3199 },
3200 Some(tree_sitter_rust::language()),
3201 ));
3202
3203 let fs = FakeFs::new(cx.background());
3204 fs.insert_tree(
3205 "/dir",
3206 json!({
3207 "a.rs": "const fn a() { A }",
3208 "b.rs": "const y: i32 = crate::a()",
3209 }),
3210 )
3211 .await;
3212
3213 let project = Project::test(fs, &mut cx);
3214 project.update(&mut cx, |project, _| {
3215 Arc::get_mut(&mut project.languages).unwrap().add(language);
3216 });
3217
3218 let (tree, _) = project
3219 .update(&mut cx, |project, cx| {
3220 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3221 })
3222 .await
3223 .unwrap();
3224 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3225 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3226 .await;
3227
3228 let buffer = project
3229 .update(&mut cx, |project, cx| {
3230 project.open_buffer(
3231 ProjectPath {
3232 worktree_id,
3233 path: Path::new("").into(),
3234 },
3235 cx,
3236 )
3237 })
3238 .await
3239 .unwrap();
3240
3241 let mut fake_server = fake_servers.next().await.unwrap();
3242 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3243 let params = params.text_document_position_params;
3244 assert_eq!(
3245 params.text_document.uri.to_file_path().unwrap(),
3246 Path::new("/dir/b.rs"),
3247 );
3248 assert_eq!(params.position, lsp::Position::new(0, 22));
3249
3250 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3251 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3252 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3253 )))
3254 });
3255
3256 let mut definitions = project
3257 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3258 .await
3259 .unwrap();
3260
3261 assert_eq!(definitions.len(), 1);
3262 let definition = definitions.pop().unwrap();
3263 cx.update(|cx| {
3264 let target_buffer = definition.target_buffer.read(cx);
3265 assert_eq!(
3266 target_buffer
3267 .file()
3268 .unwrap()
3269 .as_local()
3270 .unwrap()
3271 .abs_path(cx),
3272 Path::new("/dir/a.rs"),
3273 );
3274 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3275 assert_eq!(
3276 list_worktrees(&project, cx),
3277 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3278 );
3279
3280 drop(definition);
3281 });
3282 cx.read(|cx| {
3283 assert_eq!(
3284 list_worktrees(&project, cx),
3285 [("/dir/b.rs".as_ref(), false)]
3286 );
3287 });
3288
3289 fn list_worktrees<'a>(
3290 project: &'a ModelHandle<Project>,
3291 cx: &'a AppContext,
3292 ) -> Vec<(&'a Path, bool)> {
3293 project
3294 .read(cx)
3295 .worktrees(cx)
3296 .map(|worktree| {
3297 let worktree = worktree.read(cx);
3298 (
3299 worktree.as_local().unwrap().abs_path().as_ref(),
3300 worktree.is_weak(),
3301 )
3302 })
3303 .collect::<Vec<_>>()
3304 }
3305 }
3306
3307 #[gpui::test]
3308 async fn test_save_file(mut cx: gpui::TestAppContext) {
3309 let fs = FakeFs::new(cx.background());
3310 fs.insert_tree(
3311 "/dir",
3312 json!({
3313 "file1": "the old contents",
3314 }),
3315 )
3316 .await;
3317
3318 let project = Project::test(fs.clone(), &mut cx);
3319 let worktree_id = project
3320 .update(&mut cx, |p, cx| {
3321 p.find_or_create_local_worktree("/dir", false, cx)
3322 })
3323 .await
3324 .unwrap()
3325 .0
3326 .read_with(&cx, |tree, _| tree.id());
3327
3328 let buffer = project
3329 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3330 .await
3331 .unwrap();
3332 buffer
3333 .update(&mut cx, |buffer, cx| {
3334 assert_eq!(buffer.text(), "the old contents");
3335 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3336 buffer.save(cx)
3337 })
3338 .await
3339 .unwrap();
3340
3341 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3342 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3343 }
3344
3345 #[gpui::test]
3346 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3347 let fs = FakeFs::new(cx.background());
3348 fs.insert_tree(
3349 "/dir",
3350 json!({
3351 "file1": "the old contents",
3352 }),
3353 )
3354 .await;
3355
3356 let project = Project::test(fs.clone(), &mut cx);
3357 let worktree_id = project
3358 .update(&mut cx, |p, cx| {
3359 p.find_or_create_local_worktree("/dir/file1", false, cx)
3360 })
3361 .await
3362 .unwrap()
3363 .0
3364 .read_with(&cx, |tree, _| tree.id());
3365
3366 let buffer = project
3367 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3368 .await
3369 .unwrap();
3370 buffer
3371 .update(&mut cx, |buffer, cx| {
3372 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3373 buffer.save(cx)
3374 })
3375 .await
3376 .unwrap();
3377
3378 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3379 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3380 }
3381
3382 #[gpui::test(retries = 5)]
3383 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3384 let dir = temp_tree(json!({
3385 "a": {
3386 "file1": "",
3387 "file2": "",
3388 "file3": "",
3389 },
3390 "b": {
3391 "c": {
3392 "file4": "",
3393 "file5": "",
3394 }
3395 }
3396 }));
3397
3398 let project = Project::test(Arc::new(RealFs), &mut cx);
3399 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3400
3401 let (tree, _) = project
3402 .update(&mut cx, |p, cx| {
3403 p.find_or_create_local_worktree(dir.path(), false, cx)
3404 })
3405 .await
3406 .unwrap();
3407 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3408
3409 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3410 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3411 async move { buffer.await.unwrap() }
3412 };
3413 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3414 tree.read_with(cx, |tree, _| {
3415 tree.entry_for_path(path)
3416 .expect(&format!("no entry for path {}", path))
3417 .id
3418 })
3419 };
3420
3421 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3422 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3423 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3424 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3425
3426 let file2_id = id_for_path("a/file2", &cx);
3427 let file3_id = id_for_path("a/file3", &cx);
3428 let file4_id = id_for_path("b/c/file4", &cx);
3429
3430 // Wait for the initial scan.
3431 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3432 .await;
3433
3434 // Create a remote copy of this worktree.
3435 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3436 let (remote, load_task) = cx.update(|cx| {
3437 Worktree::remote(
3438 1,
3439 1,
3440 initial_snapshot.to_proto(&Default::default(), Default::default()),
3441 rpc.clone(),
3442 cx,
3443 )
3444 });
3445 load_task.await;
3446
3447 cx.read(|cx| {
3448 assert!(!buffer2.read(cx).is_dirty());
3449 assert!(!buffer3.read(cx).is_dirty());
3450 assert!(!buffer4.read(cx).is_dirty());
3451 assert!(!buffer5.read(cx).is_dirty());
3452 });
3453
3454 // Rename and delete files and directories.
3455 tree.flush_fs_events(&cx).await;
3456 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3457 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3458 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3459 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3460 tree.flush_fs_events(&cx).await;
3461
3462 let expected_paths = vec![
3463 "a",
3464 "a/file1",
3465 "a/file2.new",
3466 "b",
3467 "d",
3468 "d/file3",
3469 "d/file4",
3470 ];
3471
3472 cx.read(|app| {
3473 assert_eq!(
3474 tree.read(app)
3475 .paths()
3476 .map(|p| p.to_str().unwrap())
3477 .collect::<Vec<_>>(),
3478 expected_paths
3479 );
3480
3481 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3482 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3483 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3484
3485 assert_eq!(
3486 buffer2.read(app).file().unwrap().path().as_ref(),
3487 Path::new("a/file2.new")
3488 );
3489 assert_eq!(
3490 buffer3.read(app).file().unwrap().path().as_ref(),
3491 Path::new("d/file3")
3492 );
3493 assert_eq!(
3494 buffer4.read(app).file().unwrap().path().as_ref(),
3495 Path::new("d/file4")
3496 );
3497 assert_eq!(
3498 buffer5.read(app).file().unwrap().path().as_ref(),
3499 Path::new("b/c/file5")
3500 );
3501
3502 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3503 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3504 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3505 assert!(buffer5.read(app).file().unwrap().is_deleted());
3506 });
3507
3508 // Update the remote worktree. Check that it becomes consistent with the
3509 // local worktree.
3510 remote.update(&mut cx, |remote, cx| {
3511 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3512 &initial_snapshot,
3513 1,
3514 1,
3515 0,
3516 true,
3517 );
3518 remote
3519 .as_remote_mut()
3520 .unwrap()
3521 .snapshot
3522 .apply_remote_update(update_message)
3523 .unwrap();
3524
3525 assert_eq!(
3526 remote
3527 .paths()
3528 .map(|p| p.to_str().unwrap())
3529 .collect::<Vec<_>>(),
3530 expected_paths
3531 );
3532 });
3533 }
3534
3535 #[gpui::test]
3536 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3537 let fs = FakeFs::new(cx.background());
3538 fs.insert_tree(
3539 "/the-dir",
3540 json!({
3541 "a.txt": "a-contents",
3542 "b.txt": "b-contents",
3543 }),
3544 )
3545 .await;
3546
3547 let project = Project::test(fs.clone(), &mut cx);
3548 let worktree_id = project
3549 .update(&mut cx, |p, cx| {
3550 p.find_or_create_local_worktree("/the-dir", false, cx)
3551 })
3552 .await
3553 .unwrap()
3554 .0
3555 .read_with(&cx, |tree, _| tree.id());
3556
3557 // Spawn multiple tasks to open paths, repeating some paths.
3558 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3559 (
3560 p.open_buffer((worktree_id, "a.txt"), cx),
3561 p.open_buffer((worktree_id, "b.txt"), cx),
3562 p.open_buffer((worktree_id, "a.txt"), cx),
3563 )
3564 });
3565
3566 let buffer_a_1 = buffer_a_1.await.unwrap();
3567 let buffer_a_2 = buffer_a_2.await.unwrap();
3568 let buffer_b = buffer_b.await.unwrap();
3569 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3570 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3571
3572 // There is only one buffer per path.
3573 let buffer_a_id = buffer_a_1.id();
3574 assert_eq!(buffer_a_2.id(), buffer_a_id);
3575
3576 // Open the same path again while it is still open.
3577 drop(buffer_a_1);
3578 let buffer_a_3 = project
3579 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3580 .await
3581 .unwrap();
3582
3583 // There's still only one buffer per path.
3584 assert_eq!(buffer_a_3.id(), buffer_a_id);
3585 }
3586
3587 #[gpui::test]
3588 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3589 use std::fs;
3590
3591 let dir = temp_tree(json!({
3592 "file1": "abc",
3593 "file2": "def",
3594 "file3": "ghi",
3595 }));
3596
3597 let project = Project::test(Arc::new(RealFs), &mut cx);
3598 let (worktree, _) = project
3599 .update(&mut cx, |p, cx| {
3600 p.find_or_create_local_worktree(dir.path(), false, cx)
3601 })
3602 .await
3603 .unwrap();
3604 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3605
3606 worktree.flush_fs_events(&cx).await;
3607 worktree
3608 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3609 .await;
3610
3611 let buffer1 = project
3612 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3613 .await
3614 .unwrap();
3615 let events = Rc::new(RefCell::new(Vec::new()));
3616
3617 // initially, the buffer isn't dirty.
3618 buffer1.update(&mut cx, |buffer, cx| {
3619 cx.subscribe(&buffer1, {
3620 let events = events.clone();
3621 move |_, _, event, _| events.borrow_mut().push(event.clone())
3622 })
3623 .detach();
3624
3625 assert!(!buffer.is_dirty());
3626 assert!(events.borrow().is_empty());
3627
3628 buffer.edit(vec![1..2], "", cx);
3629 });
3630
3631 // after the first edit, the buffer is dirty, and emits a dirtied event.
3632 buffer1.update(&mut cx, |buffer, cx| {
3633 assert!(buffer.text() == "ac");
3634 assert!(buffer.is_dirty());
3635 assert_eq!(
3636 *events.borrow(),
3637 &[language::Event::Edited, language::Event::Dirtied]
3638 );
3639 events.borrow_mut().clear();
3640 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3641 });
3642
3643 // after saving, the buffer is not dirty, and emits a saved event.
3644 buffer1.update(&mut cx, |buffer, cx| {
3645 assert!(!buffer.is_dirty());
3646 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3647 events.borrow_mut().clear();
3648
3649 buffer.edit(vec![1..1], "B", cx);
3650 buffer.edit(vec![2..2], "D", cx);
3651 });
3652
3653 // after editing again, the buffer is dirty, and emits another dirty event.
3654 buffer1.update(&mut cx, |buffer, cx| {
3655 assert!(buffer.text() == "aBDc");
3656 assert!(buffer.is_dirty());
3657 assert_eq!(
3658 *events.borrow(),
3659 &[
3660 language::Event::Edited,
3661 language::Event::Dirtied,
3662 language::Event::Edited,
3663 ],
3664 );
3665 events.borrow_mut().clear();
3666
3667 // TODO - currently, after restoring the buffer to its
3668 // previously-saved state, the is still considered dirty.
3669 buffer.edit([1..3], "", cx);
3670 assert!(buffer.text() == "ac");
3671 assert!(buffer.is_dirty());
3672 });
3673
3674 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3675
3676 // When a file is deleted, the buffer is considered dirty.
3677 let events = Rc::new(RefCell::new(Vec::new()));
3678 let buffer2 = project
3679 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3680 .await
3681 .unwrap();
3682 buffer2.update(&mut cx, |_, cx| {
3683 cx.subscribe(&buffer2, {
3684 let events = events.clone();
3685 move |_, _, event, _| events.borrow_mut().push(event.clone())
3686 })
3687 .detach();
3688 });
3689
3690 fs::remove_file(dir.path().join("file2")).unwrap();
3691 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3692 assert_eq!(
3693 *events.borrow(),
3694 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3695 );
3696
3697 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3698 let events = Rc::new(RefCell::new(Vec::new()));
3699 let buffer3 = project
3700 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3701 .await
3702 .unwrap();
3703 buffer3.update(&mut cx, |_, cx| {
3704 cx.subscribe(&buffer3, {
3705 let events = events.clone();
3706 move |_, _, event, _| events.borrow_mut().push(event.clone())
3707 })
3708 .detach();
3709 });
3710
3711 worktree.flush_fs_events(&cx).await;
3712 buffer3.update(&mut cx, |buffer, cx| {
3713 buffer.edit(Some(0..0), "x", cx);
3714 });
3715 events.borrow_mut().clear();
3716 fs::remove_file(dir.path().join("file3")).unwrap();
3717 buffer3
3718 .condition(&cx, |_, _| !events.borrow().is_empty())
3719 .await;
3720 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3721 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3722 }
3723
3724 #[gpui::test]
3725 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3726 use std::fs;
3727
3728 let initial_contents = "aaa\nbbbbb\nc\n";
3729 let dir = temp_tree(json!({ "the-file": initial_contents }));
3730
3731 let project = Project::test(Arc::new(RealFs), &mut cx);
3732 let (worktree, _) = project
3733 .update(&mut cx, |p, cx| {
3734 p.find_or_create_local_worktree(dir.path(), false, cx)
3735 })
3736 .await
3737 .unwrap();
3738 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3739
3740 worktree
3741 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3742 .await;
3743
3744 let abs_path = dir.path().join("the-file");
3745 let buffer = project
3746 .update(&mut cx, |p, cx| {
3747 p.open_buffer((worktree_id, "the-file"), cx)
3748 })
3749 .await
3750 .unwrap();
3751
3752 // TODO
3753 // Add a cursor on each row.
3754 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3755 // assert!(!buffer.is_dirty());
3756 // buffer.add_selection_set(
3757 // &(0..3)
3758 // .map(|row| Selection {
3759 // id: row as usize,
3760 // start: Point::new(row, 1),
3761 // end: Point::new(row, 1),
3762 // reversed: false,
3763 // goal: SelectionGoal::None,
3764 // })
3765 // .collect::<Vec<_>>(),
3766 // cx,
3767 // )
3768 // });
3769
3770 // Change the file on disk, adding two new lines of text, and removing
3771 // one line.
3772 buffer.read_with(&cx, |buffer, _| {
3773 assert!(!buffer.is_dirty());
3774 assert!(!buffer.has_conflict());
3775 });
3776 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3777 fs::write(&abs_path, new_contents).unwrap();
3778
3779 // Because the buffer was not modified, it is reloaded from disk. Its
3780 // contents are edited according to the diff between the old and new
3781 // file contents.
3782 buffer
3783 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3784 .await;
3785
3786 buffer.update(&mut cx, |buffer, _| {
3787 assert_eq!(buffer.text(), new_contents);
3788 assert!(!buffer.is_dirty());
3789 assert!(!buffer.has_conflict());
3790
3791 // TODO
3792 // let cursor_positions = buffer
3793 // .selection_set(selection_set_id)
3794 // .unwrap()
3795 // .selections::<Point>(&*buffer)
3796 // .map(|selection| {
3797 // assert_eq!(selection.start, selection.end);
3798 // selection.start
3799 // })
3800 // .collect::<Vec<_>>();
3801 // assert_eq!(
3802 // cursor_positions,
3803 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3804 // );
3805 });
3806
3807 // Modify the buffer
3808 buffer.update(&mut cx, |buffer, cx| {
3809 buffer.edit(vec![0..0], " ", cx);
3810 assert!(buffer.is_dirty());
3811 assert!(!buffer.has_conflict());
3812 });
3813
3814 // Change the file on disk again, adding blank lines to the beginning.
3815 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3816
3817 // Because the buffer is modified, it doesn't reload from disk, but is
3818 // marked as having a conflict.
3819 buffer
3820 .condition(&cx, |buffer, _| buffer.has_conflict())
3821 .await;
3822 }
3823
3824 #[gpui::test]
3825 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3826 let fs = FakeFs::new(cx.background());
3827 fs.insert_tree(
3828 "/the-dir",
3829 json!({
3830 "a.rs": "
3831 fn foo(mut v: Vec<usize>) {
3832 for x in &v {
3833 v.push(1);
3834 }
3835 }
3836 "
3837 .unindent(),
3838 }),
3839 )
3840 .await;
3841
3842 let project = Project::test(fs.clone(), &mut cx);
3843 let (worktree, _) = project
3844 .update(&mut cx, |p, cx| {
3845 p.find_or_create_local_worktree("/the-dir", false, cx)
3846 })
3847 .await
3848 .unwrap();
3849 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3850
3851 let buffer = project
3852 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3853 .await
3854 .unwrap();
3855
3856 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3857 let message = lsp::PublishDiagnosticsParams {
3858 uri: buffer_uri.clone(),
3859 diagnostics: vec![
3860 lsp::Diagnostic {
3861 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3862 severity: Some(DiagnosticSeverity::WARNING),
3863 message: "error 1".to_string(),
3864 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3865 location: lsp::Location {
3866 uri: buffer_uri.clone(),
3867 range: lsp::Range::new(
3868 lsp::Position::new(1, 8),
3869 lsp::Position::new(1, 9),
3870 ),
3871 },
3872 message: "error 1 hint 1".to_string(),
3873 }]),
3874 ..Default::default()
3875 },
3876 lsp::Diagnostic {
3877 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3878 severity: Some(DiagnosticSeverity::HINT),
3879 message: "error 1 hint 1".to_string(),
3880 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3881 location: lsp::Location {
3882 uri: buffer_uri.clone(),
3883 range: lsp::Range::new(
3884 lsp::Position::new(1, 8),
3885 lsp::Position::new(1, 9),
3886 ),
3887 },
3888 message: "original diagnostic".to_string(),
3889 }]),
3890 ..Default::default()
3891 },
3892 lsp::Diagnostic {
3893 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3894 severity: Some(DiagnosticSeverity::ERROR),
3895 message: "error 2".to_string(),
3896 related_information: Some(vec![
3897 lsp::DiagnosticRelatedInformation {
3898 location: lsp::Location {
3899 uri: buffer_uri.clone(),
3900 range: lsp::Range::new(
3901 lsp::Position::new(1, 13),
3902 lsp::Position::new(1, 15),
3903 ),
3904 },
3905 message: "error 2 hint 1".to_string(),
3906 },
3907 lsp::DiagnosticRelatedInformation {
3908 location: lsp::Location {
3909 uri: buffer_uri.clone(),
3910 range: lsp::Range::new(
3911 lsp::Position::new(1, 13),
3912 lsp::Position::new(1, 15),
3913 ),
3914 },
3915 message: "error 2 hint 2".to_string(),
3916 },
3917 ]),
3918 ..Default::default()
3919 },
3920 lsp::Diagnostic {
3921 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3922 severity: Some(DiagnosticSeverity::HINT),
3923 message: "error 2 hint 1".to_string(),
3924 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3925 location: lsp::Location {
3926 uri: buffer_uri.clone(),
3927 range: lsp::Range::new(
3928 lsp::Position::new(2, 8),
3929 lsp::Position::new(2, 17),
3930 ),
3931 },
3932 message: "original diagnostic".to_string(),
3933 }]),
3934 ..Default::default()
3935 },
3936 lsp::Diagnostic {
3937 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3938 severity: Some(DiagnosticSeverity::HINT),
3939 message: "error 2 hint 2".to_string(),
3940 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3941 location: lsp::Location {
3942 uri: buffer_uri.clone(),
3943 range: lsp::Range::new(
3944 lsp::Position::new(2, 8),
3945 lsp::Position::new(2, 17),
3946 ),
3947 },
3948 message: "original diagnostic".to_string(),
3949 }]),
3950 ..Default::default()
3951 },
3952 ],
3953 version: None,
3954 };
3955
3956 project
3957 .update(&mut cx, |p, cx| {
3958 p.update_diagnostics(message, &Default::default(), cx)
3959 })
3960 .unwrap();
3961 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3962
3963 assert_eq!(
3964 buffer
3965 .diagnostics_in_range::<_, Point>(0..buffer.len())
3966 .collect::<Vec<_>>(),
3967 &[
3968 DiagnosticEntry {
3969 range: Point::new(1, 8)..Point::new(1, 9),
3970 diagnostic: Diagnostic {
3971 severity: DiagnosticSeverity::WARNING,
3972 message: "error 1".to_string(),
3973 group_id: 0,
3974 is_primary: true,
3975 ..Default::default()
3976 }
3977 },
3978 DiagnosticEntry {
3979 range: Point::new(1, 8)..Point::new(1, 9),
3980 diagnostic: Diagnostic {
3981 severity: DiagnosticSeverity::HINT,
3982 message: "error 1 hint 1".to_string(),
3983 group_id: 0,
3984 is_primary: false,
3985 ..Default::default()
3986 }
3987 },
3988 DiagnosticEntry {
3989 range: Point::new(1, 13)..Point::new(1, 15),
3990 diagnostic: Diagnostic {
3991 severity: DiagnosticSeverity::HINT,
3992 message: "error 2 hint 1".to_string(),
3993 group_id: 1,
3994 is_primary: false,
3995 ..Default::default()
3996 }
3997 },
3998 DiagnosticEntry {
3999 range: Point::new(1, 13)..Point::new(1, 15),
4000 diagnostic: Diagnostic {
4001 severity: DiagnosticSeverity::HINT,
4002 message: "error 2 hint 2".to_string(),
4003 group_id: 1,
4004 is_primary: false,
4005 ..Default::default()
4006 }
4007 },
4008 DiagnosticEntry {
4009 range: Point::new(2, 8)..Point::new(2, 17),
4010 diagnostic: Diagnostic {
4011 severity: DiagnosticSeverity::ERROR,
4012 message: "error 2".to_string(),
4013 group_id: 1,
4014 is_primary: true,
4015 ..Default::default()
4016 }
4017 }
4018 ]
4019 );
4020
4021 assert_eq!(
4022 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4023 &[
4024 DiagnosticEntry {
4025 range: Point::new(1, 8)..Point::new(1, 9),
4026 diagnostic: Diagnostic {
4027 severity: DiagnosticSeverity::WARNING,
4028 message: "error 1".to_string(),
4029 group_id: 0,
4030 is_primary: true,
4031 ..Default::default()
4032 }
4033 },
4034 DiagnosticEntry {
4035 range: Point::new(1, 8)..Point::new(1, 9),
4036 diagnostic: Diagnostic {
4037 severity: DiagnosticSeverity::HINT,
4038 message: "error 1 hint 1".to_string(),
4039 group_id: 0,
4040 is_primary: false,
4041 ..Default::default()
4042 }
4043 },
4044 ]
4045 );
4046 assert_eq!(
4047 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4048 &[
4049 DiagnosticEntry {
4050 range: Point::new(1, 13)..Point::new(1, 15),
4051 diagnostic: Diagnostic {
4052 severity: DiagnosticSeverity::HINT,
4053 message: "error 2 hint 1".to_string(),
4054 group_id: 1,
4055 is_primary: false,
4056 ..Default::default()
4057 }
4058 },
4059 DiagnosticEntry {
4060 range: Point::new(1, 13)..Point::new(1, 15),
4061 diagnostic: Diagnostic {
4062 severity: DiagnosticSeverity::HINT,
4063 message: "error 2 hint 2".to_string(),
4064 group_id: 1,
4065 is_primary: false,
4066 ..Default::default()
4067 }
4068 },
4069 DiagnosticEntry {
4070 range: Point::new(2, 8)..Point::new(2, 17),
4071 diagnostic: Diagnostic {
4072 severity: DiagnosticSeverity::ERROR,
4073 message: "error 2".to_string(),
4074 group_id: 1,
4075 is_primary: true,
4076 ..Default::default()
4077 }
4078 }
4079 ]
4080 );
4081 }
4082
4083 #[gpui::test]
4084 async fn test_rename(mut cx: gpui::TestAppContext) {
4085 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4086 let language = Arc::new(Language::new(
4087 LanguageConfig {
4088 name: "Rust".to_string(),
4089 path_suffixes: vec!["rs".to_string()],
4090 language_server: Some(language_server_config),
4091 ..Default::default()
4092 },
4093 Some(tree_sitter_rust::language()),
4094 ));
4095
4096 let fs = FakeFs::new(cx.background());
4097 fs.insert_tree(
4098 "/dir",
4099 json!({
4100 "one.rs": "const ONE: usize = 1;",
4101 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4102 }),
4103 )
4104 .await;
4105
4106 let project = Project::test(fs.clone(), &mut cx);
4107 project.update(&mut cx, |project, _| {
4108 Arc::get_mut(&mut project.languages).unwrap().add(language);
4109 });
4110
4111 let (tree, _) = project
4112 .update(&mut cx, |project, cx| {
4113 project.find_or_create_local_worktree("/dir", false, cx)
4114 })
4115 .await
4116 .unwrap();
4117 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4118 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4119 .await;
4120
4121 let buffer = project
4122 .update(&mut cx, |project, cx| {
4123 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4124 })
4125 .await
4126 .unwrap();
4127
4128 let mut fake_server = fake_servers.next().await.unwrap();
4129
4130 let response = project.update(&mut cx, |project, cx| {
4131 project.prepare_rename(buffer.clone(), 7, cx)
4132 });
4133 fake_server
4134 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4135 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4136 assert_eq!(params.position, lsp::Position::new(0, 7));
4137 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4138 lsp::Position::new(0, 6),
4139 lsp::Position::new(0, 9),
4140 )))
4141 })
4142 .next()
4143 .await
4144 .unwrap();
4145 let range = response.await.unwrap().unwrap();
4146 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4147 assert_eq!(range, 6..9);
4148
4149 let response = project.update(&mut cx, |project, cx| {
4150 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4151 });
4152 fake_server
4153 .handle_request::<lsp::request::Rename, _>(|params| {
4154 assert_eq!(
4155 params.text_document_position.text_document.uri.as_str(),
4156 "file:///dir/one.rs"
4157 );
4158 assert_eq!(
4159 params.text_document_position.position,
4160 lsp::Position::new(0, 7)
4161 );
4162 assert_eq!(params.new_name, "THREE");
4163 Some(lsp::WorkspaceEdit {
4164 changes: Some(
4165 [
4166 (
4167 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4168 vec![lsp::TextEdit::new(
4169 lsp::Range::new(
4170 lsp::Position::new(0, 6),
4171 lsp::Position::new(0, 9),
4172 ),
4173 "THREE".to_string(),
4174 )],
4175 ),
4176 (
4177 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4178 vec![
4179 lsp::TextEdit::new(
4180 lsp::Range::new(
4181 lsp::Position::new(0, 24),
4182 lsp::Position::new(0, 27),
4183 ),
4184 "THREE".to_string(),
4185 ),
4186 lsp::TextEdit::new(
4187 lsp::Range::new(
4188 lsp::Position::new(0, 35),
4189 lsp::Position::new(0, 38),
4190 ),
4191 "THREE".to_string(),
4192 ),
4193 ],
4194 ),
4195 ]
4196 .into_iter()
4197 .collect(),
4198 ),
4199 ..Default::default()
4200 })
4201 })
4202 .next()
4203 .await
4204 .unwrap();
4205 let mut transaction = response.await.unwrap().0;
4206 assert_eq!(transaction.len(), 2);
4207 assert_eq!(
4208 transaction
4209 .remove_entry(&buffer)
4210 .unwrap()
4211 .0
4212 .read_with(&cx, |buffer, _| buffer.text()),
4213 "const THREE: usize = 1;"
4214 );
4215 assert_eq!(
4216 transaction
4217 .into_keys()
4218 .next()
4219 .unwrap()
4220 .read_with(&cx, |buffer, _| buffer.text()),
4221 "const TWO: usize = one::THREE + one::THREE;"
4222 );
4223 }
4224}