1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::{future::Shared, Future, FutureExt};
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 started_language_servers:
43 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
187 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
188 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
189 client.add_entity_request_handler(Self::handle_open_buffer);
190 client.add_entity_request_handler(Self::handle_save_buffer);
191 }
192
193 pub fn local(
194 client: Arc<Client>,
195 user_store: ModelHandle<UserStore>,
196 languages: Arc<LanguageRegistry>,
197 fs: Arc<dyn Fs>,
198 cx: &mut MutableAppContext,
199 ) -> ModelHandle<Self> {
200 cx.add_model(|cx: &mut ModelContext<Self>| {
201 let (remote_id_tx, remote_id_rx) = watch::channel();
202 let _maintain_remote_id_task = cx.spawn_weak({
203 let rpc = client.clone();
204 move |this, mut cx| {
205 async move {
206 let mut status = rpc.status();
207 while let Some(status) = status.recv().await {
208 if let Some(this) = this.upgrade(&cx) {
209 let remote_id = if let client::Status::Connected { .. } = status {
210 let response = rpc.request(proto::RegisterProject {}).await?;
211 Some(response.project_id)
212 } else {
213 None
214 };
215
216 if let Some(project_id) = remote_id {
217 let mut registrations = Vec::new();
218 this.update(&mut cx, |this, cx| {
219 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
220 registrations.push(worktree.update(
221 cx,
222 |worktree, cx| {
223 let worktree = worktree.as_local_mut().unwrap();
224 worktree.register(project_id, cx)
225 },
226 ));
227 }
228 });
229 for registration in registrations {
230 registration.await?;
231 }
232 }
233 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
234 }
235 }
236 Ok(())
237 }
238 .log_err()
239 }
240 });
241
242 Self {
243 worktrees: Default::default(),
244 collaborators: Default::default(),
245 open_buffers: Default::default(),
246 loading_buffers: Default::default(),
247 shared_buffers: Default::default(),
248 client_state: ProjectClientState::Local {
249 is_shared: false,
250 remote_id_tx,
251 remote_id_rx,
252 _maintain_remote_id_task,
253 },
254 opened_buffer: broadcast::channel(1).0,
255 subscriptions: Vec::new(),
256 active_entry: None,
257 languages,
258 client,
259 user_store,
260 fs,
261 language_servers_with_diagnostics_running: 0,
262 language_servers: Default::default(),
263 started_language_servers: Default::default(),
264 }
265 })
266 }
267
268 pub async fn remote(
269 remote_id: u64,
270 client: Arc<Client>,
271 user_store: ModelHandle<UserStore>,
272 languages: Arc<LanguageRegistry>,
273 fs: Arc<dyn Fs>,
274 cx: &mut AsyncAppContext,
275 ) -> Result<ModelHandle<Self>> {
276 client.authenticate_and_connect(&cx).await?;
277
278 let response = client
279 .request(proto::JoinProject {
280 project_id: remote_id,
281 })
282 .await?;
283
284 let replica_id = response.replica_id as ReplicaId;
285
286 let mut worktrees = Vec::new();
287 for worktree in response.worktrees {
288 let (worktree, load_task) = cx
289 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
290 worktrees.push(worktree);
291 load_task.detach();
292 }
293
294 let this = cx.add_model(|cx| {
295 let mut this = Self {
296 worktrees: Vec::new(),
297 open_buffers: Default::default(),
298 loading_buffers: Default::default(),
299 opened_buffer: broadcast::channel(1).0,
300 shared_buffers: Default::default(),
301 active_entry: None,
302 collaborators: Default::default(),
303 languages,
304 user_store: user_store.clone(),
305 fs,
306 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
307 client,
308 client_state: ProjectClientState::Remote {
309 sharing_has_stopped: false,
310 remote_id,
311 replica_id,
312 },
313 language_servers_with_diagnostics_running: 0,
314 language_servers: Default::default(),
315 started_language_servers: Default::default(),
316 };
317 for worktree in worktrees {
318 this.add_worktree(&worktree, cx);
319 }
320 this
321 });
322
323 let user_ids = response
324 .collaborators
325 .iter()
326 .map(|peer| peer.user_id)
327 .collect();
328 user_store
329 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
330 .await?;
331 let mut collaborators = HashMap::default();
332 for message in response.collaborators {
333 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
334 collaborators.insert(collaborator.peer_id, collaborator);
335 }
336
337 this.update(cx, |this, _| {
338 this.collaborators = collaborators;
339 });
340
341 Ok(this)
342 }
343
344 #[cfg(any(test, feature = "test-support"))]
345 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
346 let languages = Arc::new(LanguageRegistry::new());
347 let http_client = client::test::FakeHttpClient::with_404_response();
348 let client = client::Client::new(http_client.clone());
349 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
350 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
351 }
352
353 #[cfg(any(test, feature = "test-support"))]
354 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
355 self.shared_buffers
356 .get(&peer_id)
357 .and_then(|buffers| buffers.get(&remote_id))
358 .cloned()
359 }
360
361 #[cfg(any(test, feature = "test-support"))]
362 pub fn has_buffered_operations(&self) -> bool {
363 self.open_buffers
364 .values()
365 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
366 }
367
368 pub fn fs(&self) -> &Arc<dyn Fs> {
369 &self.fs
370 }
371
372 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
373 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
374 *remote_id_tx.borrow_mut() = remote_id;
375 }
376
377 self.subscriptions.clear();
378 if let Some(remote_id) = remote_id {
379 self.subscriptions
380 .push(self.client.add_model_for_remote_entity(remote_id, cx));
381 }
382 }
383
384 pub fn remote_id(&self) -> Option<u64> {
385 match &self.client_state {
386 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
387 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
388 }
389 }
390
391 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
392 let mut id = None;
393 let mut watch = None;
394 match &self.client_state {
395 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
396 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
397 }
398
399 async move {
400 if let Some(id) = id {
401 return id;
402 }
403 let mut watch = watch.unwrap();
404 loop {
405 let id = *watch.borrow();
406 if let Some(id) = id {
407 return id;
408 }
409 watch.recv().await;
410 }
411 }
412 }
413
414 pub fn replica_id(&self) -> ReplicaId {
415 match &self.client_state {
416 ProjectClientState::Local { .. } => 0,
417 ProjectClientState::Remote { replica_id, .. } => *replica_id,
418 }
419 }
420
421 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
422 &self.collaborators
423 }
424
425 pub fn worktrees<'a>(
426 &'a self,
427 cx: &'a AppContext,
428 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
429 self.worktrees
430 .iter()
431 .filter_map(move |worktree| worktree.upgrade(cx))
432 }
433
434 pub fn worktree_for_id(
435 &self,
436 id: WorktreeId,
437 cx: &AppContext,
438 ) -> Option<ModelHandle<Worktree>> {
439 self.worktrees(cx)
440 .find(|worktree| worktree.read(cx).id() == id)
441 }
442
443 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
444 let rpc = self.client.clone();
445 cx.spawn(|this, mut cx| async move {
446 let project_id = this.update(&mut cx, |this, _| {
447 if let ProjectClientState::Local {
448 is_shared,
449 remote_id_rx,
450 ..
451 } = &mut this.client_state
452 {
453 *is_shared = true;
454 remote_id_rx
455 .borrow()
456 .ok_or_else(|| anyhow!("no project id"))
457 } else {
458 Err(anyhow!("can't share a remote project"))
459 }
460 })?;
461
462 rpc.request(proto::ShareProject { project_id }).await?;
463 let mut tasks = Vec::new();
464 this.update(&mut cx, |this, cx| {
465 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
466 worktree.update(cx, |worktree, cx| {
467 let worktree = worktree.as_local_mut().unwrap();
468 tasks.push(worktree.share(project_id, cx));
469 });
470 }
471 });
472 for task in tasks {
473 task.await?;
474 }
475 this.update(&mut cx, |_, cx| cx.notify());
476 Ok(())
477 })
478 }
479
480 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
481 let rpc = self.client.clone();
482 cx.spawn(|this, mut cx| async move {
483 let project_id = this.update(&mut cx, |this, _| {
484 if let ProjectClientState::Local {
485 is_shared,
486 remote_id_rx,
487 ..
488 } = &mut this.client_state
489 {
490 *is_shared = false;
491 remote_id_rx
492 .borrow()
493 .ok_or_else(|| anyhow!("no project id"))
494 } else {
495 Err(anyhow!("can't share a remote project"))
496 }
497 })?;
498
499 rpc.send(proto::UnshareProject { project_id })?;
500 this.update(&mut cx, |this, cx| {
501 this.collaborators.clear();
502 this.shared_buffers.clear();
503 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
504 worktree.update(cx, |worktree, _| {
505 worktree.as_local_mut().unwrap().unshare();
506 });
507 }
508 cx.notify()
509 });
510 Ok(())
511 })
512 }
513
514 pub fn is_read_only(&self) -> bool {
515 match &self.client_state {
516 ProjectClientState::Local { .. } => false,
517 ProjectClientState::Remote {
518 sharing_has_stopped,
519 ..
520 } => *sharing_has_stopped,
521 }
522 }
523
524 pub fn is_local(&self) -> bool {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => true,
527 ProjectClientState::Remote { .. } => false,
528 }
529 }
530
531 pub fn is_remote(&self) -> bool {
532 !self.is_local()
533 }
534
535 pub fn open_buffer(
536 &mut self,
537 path: impl Into<ProjectPath>,
538 cx: &mut ModelContext<Self>,
539 ) -> Task<Result<ModelHandle<Buffer>>> {
540 let project_path = path.into();
541 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
542 worktree
543 } else {
544 return Task::ready(Err(anyhow!("no such worktree")));
545 };
546
547 // If there is already a buffer for the given path, then return it.
548 let existing_buffer = self.get_open_buffer(&project_path, cx);
549 if let Some(existing_buffer) = existing_buffer {
550 return Task::ready(Ok(existing_buffer));
551 }
552
553 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
554 // If the given path is already being loaded, then wait for that existing
555 // task to complete and return the same buffer.
556 hash_map::Entry::Occupied(e) => e.get().clone(),
557
558 // Otherwise, record the fact that this path is now being loaded.
559 hash_map::Entry::Vacant(entry) => {
560 let (mut tx, rx) = postage::watch::channel();
561 entry.insert(rx.clone());
562
563 let load_buffer = if worktree.read(cx).is_local() {
564 self.open_local_buffer(&project_path.path, &worktree, cx)
565 } else {
566 self.open_remote_buffer(&project_path.path, &worktree, cx)
567 };
568
569 cx.spawn(move |this, mut cx| async move {
570 let load_result = load_buffer.await;
571 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
572 // Record the fact that the buffer is no longer loading.
573 this.loading_buffers.remove(&project_path);
574 if this.loading_buffers.is_empty() {
575 this.open_buffers
576 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
577 }
578
579 let buffer = load_result.map_err(Arc::new)?;
580 Ok(buffer)
581 }));
582 })
583 .detach();
584 rx
585 }
586 };
587
588 cx.foreground().spawn(async move {
589 loop {
590 if let Some(result) = loading_watch.borrow().as_ref() {
591 match result {
592 Ok(buffer) => return Ok(buffer.clone()),
593 Err(error) => return Err(anyhow!("{}", error)),
594 }
595 }
596 loading_watch.recv().await;
597 }
598 })
599 }
600
601 fn open_local_buffer(
602 &mut self,
603 path: &Arc<Path>,
604 worktree: &ModelHandle<Worktree>,
605 cx: &mut ModelContext<Self>,
606 ) -> Task<Result<ModelHandle<Buffer>>> {
607 let load_buffer = worktree.update(cx, |worktree, cx| {
608 let worktree = worktree.as_local_mut().unwrap();
609 worktree.load_buffer(path, cx)
610 });
611 let worktree = worktree.downgrade();
612 cx.spawn(|this, mut cx| async move {
613 let buffer = load_buffer.await?;
614 let worktree = worktree
615 .upgrade(&cx)
616 .ok_or_else(|| anyhow!("worktree was removed"))?;
617 this.update(&mut cx, |this, cx| {
618 this.register_buffer(&buffer, Some(&worktree), cx)
619 })?;
620 Ok(buffer)
621 })
622 }
623
624 fn open_remote_buffer(
625 &mut self,
626 path: &Arc<Path>,
627 worktree: &ModelHandle<Worktree>,
628 cx: &mut ModelContext<Self>,
629 ) -> Task<Result<ModelHandle<Buffer>>> {
630 let rpc = self.client.clone();
631 let project_id = self.remote_id().unwrap();
632 let remote_worktree_id = worktree.read(cx).id();
633 let path = path.clone();
634 let path_string = path.to_string_lossy().to_string();
635 cx.spawn(|this, mut cx| async move {
636 let response = rpc
637 .request(proto::OpenBuffer {
638 project_id,
639 worktree_id: remote_worktree_id.to_proto(),
640 path: path_string,
641 })
642 .await?;
643 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
644 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
645 .await
646 })
647 }
648
649 fn open_local_buffer_from_lsp_path(
650 &mut self,
651 abs_path: lsp::Url,
652 lang_name: String,
653 lang_server: Arc<LanguageServer>,
654 cx: &mut ModelContext<Self>,
655 ) -> Task<Result<ModelHandle<Buffer>>> {
656 cx.spawn(|this, mut cx| async move {
657 let abs_path = abs_path
658 .to_file_path()
659 .map_err(|_| anyhow!("can't convert URI to path"))?;
660 let (worktree, relative_path) = if let Some(result) =
661 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
662 {
663 result
664 } else {
665 let worktree = this
666 .update(&mut cx, |this, cx| {
667 this.create_local_worktree(&abs_path, true, cx)
668 })
669 .await?;
670 this.update(&mut cx, |this, cx| {
671 this.language_servers
672 .insert((worktree.read(cx).id(), lang_name), lang_server);
673 });
674 (worktree, PathBuf::new())
675 };
676
677 let project_path = ProjectPath {
678 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
679 path: relative_path.into(),
680 };
681 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
682 .await
683 })
684 }
685
686 pub fn save_buffer_as(
687 &self,
688 buffer: ModelHandle<Buffer>,
689 abs_path: PathBuf,
690 cx: &mut ModelContext<Project>,
691 ) -> Task<Result<()>> {
692 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
693 cx.spawn(|this, mut cx| async move {
694 let (worktree, path) = worktree_task.await?;
695 worktree
696 .update(&mut cx, |worktree, cx| {
697 worktree
698 .as_local_mut()
699 .unwrap()
700 .save_buffer_as(buffer.clone(), path, cx)
701 })
702 .await?;
703 this.update(&mut cx, |this, cx| {
704 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
705 });
706 Ok(())
707 })
708 }
709
710 #[cfg(any(test, feature = "test-support"))]
711 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
712 let path = path.into();
713 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
714 self.open_buffers.iter().any(|(_, buffer)| {
715 if let Some(buffer) = buffer.upgrade(cx) {
716 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
717 if file.worktree == worktree && file.path() == &path.path {
718 return true;
719 }
720 }
721 }
722 false
723 })
724 } else {
725 false
726 }
727 }
728
729 fn get_open_buffer(
730 &mut self,
731 path: &ProjectPath,
732 cx: &mut ModelContext<Self>,
733 ) -> Option<ModelHandle<Buffer>> {
734 let mut result = None;
735 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
736 self.open_buffers.retain(|_, buffer| {
737 if let Some(buffer) = buffer.upgrade(cx) {
738 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
739 if file.worktree == worktree && file.path() == &path.path {
740 result = Some(buffer);
741 }
742 }
743 true
744 } else {
745 false
746 }
747 });
748 result
749 }
750
751 fn register_buffer(
752 &mut self,
753 buffer: &ModelHandle<Buffer>,
754 worktree: Option<&ModelHandle<Worktree>>,
755 cx: &mut ModelContext<Self>,
756 ) -> Result<()> {
757 match self.open_buffers.insert(
758 buffer.read(cx).remote_id(),
759 OpenBuffer::Loaded(buffer.downgrade()),
760 ) {
761 None => {}
762 Some(OpenBuffer::Loading(operations)) => {
763 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
764 }
765 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
766 }
767 self.assign_language_to_buffer(&buffer, worktree, cx);
768 Ok(())
769 }
770
771 fn assign_language_to_buffer(
772 &mut self,
773 buffer: &ModelHandle<Buffer>,
774 worktree: Option<&ModelHandle<Worktree>>,
775 cx: &mut ModelContext<Self>,
776 ) -> Option<()> {
777 let (path, full_path) = {
778 let file = buffer.read(cx).file()?;
779 (file.path().clone(), file.full_path(cx))
780 };
781
782 // If the buffer has a language, set it and start/assign the language server
783 if let Some(language) = self.languages.select_language(&full_path).cloned() {
784 buffer.update(cx, |buffer, cx| {
785 buffer.set_language(Some(language.clone()), cx);
786 });
787
788 // For local worktrees, start a language server if needed.
789 // Also assign the language server and any previously stored diagnostics to the buffer.
790 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
791 let worktree_id = local_worktree.id();
792 let worktree_abs_path = local_worktree.abs_path().clone();
793 let buffer = buffer.downgrade();
794 let language_server =
795 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
796
797 cx.spawn_weak(|_, mut cx| async move {
798 if let Some(language_server) = language_server.await {
799 if let Some(buffer) = buffer.upgrade(&cx) {
800 buffer.update(&mut cx, |buffer, cx| {
801 buffer.set_language_server(Some(language_server), cx);
802 });
803 }
804 }
805 })
806 .detach();
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 &mut self,
823 worktree_id: WorktreeId,
824 worktree_path: Arc<Path>,
825 language: Arc<Language>,
826 cx: &mut ModelContext<Self>,
827 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
828 enum LspEvent {
829 DiagnosticsStart,
830 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
831 DiagnosticsFinish,
832 }
833
834 let key = (worktree_id, language.name().to_string());
835 self.started_language_servers
836 .entry(key.clone())
837 .or_insert_with(|| {
838 let language_server = language.start_server(worktree_path, cx);
839 let rpc = self.client.clone();
840 cx.spawn_weak(|this, mut cx| async move {
841 let language_server = language_server.await.log_err().flatten();
842 if let Some(this) = this.upgrade(&cx) {
843 this.update(&mut cx, |this, _| {
844 if let Some(language_server) = language_server.clone() {
845 this.language_servers.insert(key, language_server);
846 }
847 });
848 }
849
850 let language_server = language_server?;
851
852 let disk_based_sources = language
853 .disk_based_diagnostic_sources()
854 .cloned()
855 .unwrap_or_default();
856 let disk_based_diagnostics_progress_token =
857 language.disk_based_diagnostics_progress_token().cloned();
858 let has_disk_based_diagnostic_progress_token =
859 disk_based_diagnostics_progress_token.is_some();
860 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
861
862 // Listen for `PublishDiagnostics` notifications.
863 language_server
864 .on_notification::<lsp::notification::PublishDiagnostics, _>({
865 let diagnostics_tx = diagnostics_tx.clone();
866 move |params| {
867 if !has_disk_based_diagnostic_progress_token {
868 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
869 }
870 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
871 .ok();
872 if !has_disk_based_diagnostic_progress_token {
873 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
874 }
875 }
876 })
877 .detach();
878
879 // Listen for `Progress` notifications. Send an event when the language server
880 // transitions between running jobs and not running any jobs.
881 let mut running_jobs_for_this_server: i32 = 0;
882 language_server
883 .on_notification::<lsp::notification::Progress, _>(move |params| {
884 let token = match params.token {
885 lsp::NumberOrString::Number(_) => None,
886 lsp::NumberOrString::String(token) => Some(token),
887 };
888
889 if token == disk_based_diagnostics_progress_token {
890 match params.value {
891 lsp::ProgressParamsValue::WorkDone(progress) => {
892 match progress {
893 lsp::WorkDoneProgress::Begin(_) => {
894 running_jobs_for_this_server += 1;
895 if running_jobs_for_this_server == 1 {
896 block_on(
897 diagnostics_tx
898 .send(LspEvent::DiagnosticsStart),
899 )
900 .ok();
901 }
902 }
903 lsp::WorkDoneProgress::End(_) => {
904 running_jobs_for_this_server -= 1;
905 if running_jobs_for_this_server == 0 {
906 block_on(
907 diagnostics_tx
908 .send(LspEvent::DiagnosticsFinish),
909 )
910 .ok();
911 }
912 }
913 _ => {}
914 }
915 }
916 }
917 }
918 })
919 .detach();
920
921 // Process all the LSP events.
922 cx.spawn(|mut cx| async move {
923 while let Ok(message) = diagnostics_rx.recv().await {
924 let this = this.upgrade(&cx)?;
925 match message {
926 LspEvent::DiagnosticsStart => {
927 this.update(&mut cx, |this, cx| {
928 this.disk_based_diagnostics_started(cx);
929 if let Some(project_id) = this.remote_id() {
930 rpc.send(proto::DiskBasedDiagnosticsUpdating {
931 project_id,
932 })
933 .log_err();
934 }
935 });
936 }
937 LspEvent::DiagnosticsUpdate(mut params) => {
938 language.process_diagnostics(&mut params);
939 this.update(&mut cx, |this, cx| {
940 this.update_diagnostics(params, &disk_based_sources, cx)
941 .log_err();
942 });
943 }
944 LspEvent::DiagnosticsFinish => {
945 this.update(&mut cx, |this, cx| {
946 this.disk_based_diagnostics_finished(cx);
947 if let Some(project_id) = this.remote_id() {
948 rpc.send(proto::DiskBasedDiagnosticsUpdated {
949 project_id,
950 })
951 .log_err();
952 }
953 });
954 }
955 }
956 }
957 Some(())
958 })
959 .detach();
960
961 Some(language_server)
962 })
963 .shared()
964 })
965 .clone()
966 }
967
968 pub fn update_diagnostics(
969 &mut self,
970 params: lsp::PublishDiagnosticsParams,
971 disk_based_sources: &HashSet<String>,
972 cx: &mut ModelContext<Self>,
973 ) -> Result<()> {
974 let abs_path = params
975 .uri
976 .to_file_path()
977 .map_err(|_| anyhow!("URI is not a file"))?;
978 let mut next_group_id = 0;
979 let mut diagnostics = Vec::default();
980 let mut primary_diagnostic_group_ids = HashMap::default();
981 let mut sources_by_group_id = HashMap::default();
982 let mut supporting_diagnostic_severities = HashMap::default();
983 for diagnostic in ¶ms.diagnostics {
984 let source = diagnostic.source.as_ref();
985 let code = diagnostic.code.as_ref().map(|code| match code {
986 lsp::NumberOrString::Number(code) => code.to_string(),
987 lsp::NumberOrString::String(code) => code.clone(),
988 });
989 let range = range_from_lsp(diagnostic.range);
990 let is_supporting = diagnostic
991 .related_information
992 .as_ref()
993 .map_or(false, |infos| {
994 infos.iter().any(|info| {
995 primary_diagnostic_group_ids.contains_key(&(
996 source,
997 code.clone(),
998 range_from_lsp(info.location.range),
999 ))
1000 })
1001 });
1002
1003 if is_supporting {
1004 if let Some(severity) = diagnostic.severity {
1005 supporting_diagnostic_severities
1006 .insert((source, code.clone(), range), severity);
1007 }
1008 } else {
1009 let group_id = post_inc(&mut next_group_id);
1010 let is_disk_based =
1011 source.map_or(false, |source| disk_based_sources.contains(source));
1012
1013 sources_by_group_id.insert(group_id, source);
1014 primary_diagnostic_group_ids
1015 .insert((source, code.clone(), range.clone()), group_id);
1016
1017 diagnostics.push(DiagnosticEntry {
1018 range,
1019 diagnostic: Diagnostic {
1020 code: code.clone(),
1021 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1022 message: diagnostic.message.clone(),
1023 group_id,
1024 is_primary: true,
1025 is_valid: true,
1026 is_disk_based,
1027 },
1028 });
1029 if let Some(infos) = &diagnostic.related_information {
1030 for info in infos {
1031 if info.location.uri == params.uri && !info.message.is_empty() {
1032 let range = range_from_lsp(info.location.range);
1033 diagnostics.push(DiagnosticEntry {
1034 range,
1035 diagnostic: Diagnostic {
1036 code: code.clone(),
1037 severity: DiagnosticSeverity::INFORMATION,
1038 message: info.message.clone(),
1039 group_id,
1040 is_primary: false,
1041 is_valid: true,
1042 is_disk_based,
1043 },
1044 });
1045 }
1046 }
1047 }
1048 }
1049 }
1050
1051 for entry in &mut diagnostics {
1052 let diagnostic = &mut entry.diagnostic;
1053 if !diagnostic.is_primary {
1054 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1055 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1056 source,
1057 diagnostic.code.clone(),
1058 entry.range.clone(),
1059 )) {
1060 diagnostic.severity = severity;
1061 }
1062 }
1063 }
1064
1065 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1066 Ok(())
1067 }
1068
1069 pub fn update_diagnostic_entries(
1070 &mut self,
1071 abs_path: PathBuf,
1072 version: Option<i32>,
1073 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1074 cx: &mut ModelContext<Project>,
1075 ) -> Result<(), anyhow::Error> {
1076 let (worktree, relative_path) = self
1077 .find_local_worktree(&abs_path, cx)
1078 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1079 let project_path = ProjectPath {
1080 worktree_id: worktree.read(cx).id(),
1081 path: relative_path.into(),
1082 };
1083
1084 for buffer in self.open_buffers.values() {
1085 if let Some(buffer) = buffer.upgrade(cx) {
1086 if buffer
1087 .read(cx)
1088 .file()
1089 .map_or(false, |file| *file.path() == project_path.path)
1090 {
1091 buffer.update(cx, |buffer, cx| {
1092 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1093 })?;
1094 break;
1095 }
1096 }
1097 }
1098 worktree.update(cx, |worktree, cx| {
1099 worktree
1100 .as_local_mut()
1101 .ok_or_else(|| anyhow!("not a local worktree"))?
1102 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1103 })?;
1104 cx.emit(Event::DiagnosticsUpdated(project_path));
1105 Ok(())
1106 }
1107
1108 pub fn format(
1109 &self,
1110 buffers: HashSet<ModelHandle<Buffer>>,
1111 push_to_history: bool,
1112 cx: &mut ModelContext<Project>,
1113 ) -> Task<Result<ProjectTransaction>> {
1114 let mut local_buffers = Vec::new();
1115 let mut remote_buffers = None;
1116 for buffer_handle in buffers {
1117 let buffer = buffer_handle.read(cx);
1118 let worktree;
1119 if let Some(file) = File::from_dyn(buffer.file()) {
1120 worktree = file.worktree.clone();
1121 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1122 let lang_server;
1123 if let Some(lang) = buffer.language() {
1124 if let Some(server) = self
1125 .language_servers
1126 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1127 {
1128 lang_server = server.clone();
1129 } else {
1130 return Task::ready(Ok(Default::default()));
1131 };
1132 } else {
1133 return Task::ready(Ok(Default::default()));
1134 }
1135
1136 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1137 } else {
1138 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1139 }
1140 } else {
1141 return Task::ready(Ok(Default::default()));
1142 }
1143 }
1144
1145 let remote_buffers = self.remote_id().zip(remote_buffers);
1146 let client = self.client.clone();
1147
1148 cx.spawn(|this, mut cx| async move {
1149 let mut project_transaction = ProjectTransaction::default();
1150
1151 if let Some((project_id, remote_buffers)) = remote_buffers {
1152 let response = client
1153 .request(proto::FormatBuffers {
1154 project_id,
1155 buffer_ids: remote_buffers
1156 .iter()
1157 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1158 .collect(),
1159 })
1160 .await?
1161 .transaction
1162 .ok_or_else(|| anyhow!("missing transaction"))?;
1163 project_transaction = this
1164 .update(&mut cx, |this, cx| {
1165 this.deserialize_project_transaction(response, push_to_history, cx)
1166 })
1167 .await?;
1168 }
1169
1170 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1171 let lsp_edits = lang_server
1172 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1173 text_document: lsp::TextDocumentIdentifier::new(
1174 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1175 ),
1176 options: Default::default(),
1177 work_done_progress_params: Default::default(),
1178 })
1179 .await?;
1180
1181 if let Some(lsp_edits) = lsp_edits {
1182 let edits = buffer
1183 .update(&mut cx, |buffer, cx| {
1184 buffer.edits_from_lsp(lsp_edits, None, cx)
1185 })
1186 .await?;
1187 buffer.update(&mut cx, |buffer, cx| {
1188 buffer.finalize_last_transaction();
1189 buffer.start_transaction();
1190 for (range, text) in edits {
1191 buffer.edit([range], text, cx);
1192 }
1193 if buffer.end_transaction(cx).is_some() {
1194 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1195 if !push_to_history {
1196 buffer.forget_transaction(transaction.id);
1197 }
1198 project_transaction.0.insert(cx.handle(), transaction);
1199 }
1200 });
1201 }
1202 }
1203
1204 Ok(project_transaction)
1205 })
1206 }
1207
1208 pub fn definition<T: ToPointUtf16>(
1209 &self,
1210 buffer: &ModelHandle<Buffer>,
1211 position: T,
1212 cx: &mut ModelContext<Self>,
1213 ) -> Task<Result<Vec<Definition>>> {
1214 let position = position.to_point_utf16(buffer.read(cx));
1215 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1216 }
1217
1218 pub fn completions<T: ToPointUtf16>(
1219 &self,
1220 source_buffer_handle: &ModelHandle<Buffer>,
1221 position: T,
1222 cx: &mut ModelContext<Self>,
1223 ) -> Task<Result<Vec<Completion>>> {
1224 let source_buffer_handle = source_buffer_handle.clone();
1225 let source_buffer = source_buffer_handle.read(cx);
1226 let buffer_id = source_buffer.remote_id();
1227 let language = source_buffer.language().cloned();
1228 let worktree;
1229 let buffer_abs_path;
1230 if let Some(file) = File::from_dyn(source_buffer.file()) {
1231 worktree = file.worktree.clone();
1232 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1233 } else {
1234 return Task::ready(Ok(Default::default()));
1235 };
1236
1237 let position = position.to_point_utf16(source_buffer);
1238 let anchor = source_buffer.anchor_after(position);
1239
1240 if worktree.read(cx).as_local().is_some() {
1241 let buffer_abs_path = buffer_abs_path.unwrap();
1242 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1243 server
1244 } else {
1245 return Task::ready(Ok(Default::default()));
1246 };
1247
1248 cx.spawn(|_, cx| async move {
1249 let completions = lang_server
1250 .request::<lsp::request::Completion>(lsp::CompletionParams {
1251 text_document_position: lsp::TextDocumentPositionParams::new(
1252 lsp::TextDocumentIdentifier::new(
1253 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1254 ),
1255 position.to_lsp_position(),
1256 ),
1257 context: Default::default(),
1258 work_done_progress_params: Default::default(),
1259 partial_result_params: Default::default(),
1260 })
1261 .await
1262 .context("lsp completion request failed")?;
1263
1264 let completions = if let Some(completions) = completions {
1265 match completions {
1266 lsp::CompletionResponse::Array(completions) => completions,
1267 lsp::CompletionResponse::List(list) => list.items,
1268 }
1269 } else {
1270 Default::default()
1271 };
1272
1273 source_buffer_handle.read_with(&cx, |this, _| {
1274 Ok(completions
1275 .into_iter()
1276 .filter_map(|lsp_completion| {
1277 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1278 lsp::CompletionTextEdit::Edit(edit) => {
1279 (range_from_lsp(edit.range), edit.new_text.clone())
1280 }
1281 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1282 log::info!("unsupported insert/replace completion");
1283 return None;
1284 }
1285 };
1286
1287 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1288 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1289 if clipped_start == old_range.start && clipped_end == old_range.end {
1290 Some(Completion {
1291 old_range: this.anchor_before(old_range.start)
1292 ..this.anchor_after(old_range.end),
1293 new_text,
1294 label: language
1295 .as_ref()
1296 .and_then(|l| l.label_for_completion(&lsp_completion))
1297 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1298 lsp_completion,
1299 })
1300 } else {
1301 None
1302 }
1303 })
1304 .collect())
1305 })
1306 })
1307 } else if let Some(project_id) = self.remote_id() {
1308 let rpc = self.client.clone();
1309 let message = proto::GetCompletions {
1310 project_id,
1311 buffer_id,
1312 position: Some(language::proto::serialize_anchor(&anchor)),
1313 version: (&source_buffer.version()).into(),
1314 };
1315 cx.spawn_weak(|_, mut cx| async move {
1316 let response = rpc.request(message).await?;
1317
1318 source_buffer_handle
1319 .update(&mut cx, |buffer, _| {
1320 buffer.wait_for_version(response.version.into())
1321 })
1322 .await;
1323
1324 response
1325 .completions
1326 .into_iter()
1327 .map(|completion| {
1328 language::proto::deserialize_completion(completion, language.as_ref())
1329 })
1330 .collect()
1331 })
1332 } else {
1333 Task::ready(Ok(Default::default()))
1334 }
1335 }
1336
1337 pub fn apply_additional_edits_for_completion(
1338 &self,
1339 buffer_handle: ModelHandle<Buffer>,
1340 completion: Completion,
1341 push_to_history: bool,
1342 cx: &mut ModelContext<Self>,
1343 ) -> Task<Result<Option<Transaction>>> {
1344 let buffer = buffer_handle.read(cx);
1345 let buffer_id = buffer.remote_id();
1346
1347 if self.is_local() {
1348 let lang_server = if let Some(language_server) = buffer.language_server() {
1349 language_server.clone()
1350 } else {
1351 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1352 };
1353
1354 cx.spawn(|_, mut cx| async move {
1355 let resolved_completion = lang_server
1356 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1357 .await?;
1358 if let Some(edits) = resolved_completion.additional_text_edits {
1359 let edits = buffer_handle
1360 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1361 .await?;
1362 buffer_handle.update(&mut cx, |buffer, cx| {
1363 buffer.finalize_last_transaction();
1364 buffer.start_transaction();
1365 for (range, text) in edits {
1366 buffer.edit([range], text, cx);
1367 }
1368 let transaction = if buffer.end_transaction(cx).is_some() {
1369 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1370 if !push_to_history {
1371 buffer.forget_transaction(transaction.id);
1372 }
1373 Some(transaction)
1374 } else {
1375 None
1376 };
1377 Ok(transaction)
1378 })
1379 } else {
1380 Ok(None)
1381 }
1382 })
1383 } else if let Some(project_id) = self.remote_id() {
1384 let client = self.client.clone();
1385 cx.spawn(|_, mut cx| async move {
1386 let response = client
1387 .request(proto::ApplyCompletionAdditionalEdits {
1388 project_id,
1389 buffer_id,
1390 completion: Some(language::proto::serialize_completion(&completion)),
1391 })
1392 .await?;
1393
1394 if let Some(transaction) = response.transaction {
1395 let transaction = language::proto::deserialize_transaction(transaction)?;
1396 buffer_handle
1397 .update(&mut cx, |buffer, _| {
1398 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1399 })
1400 .await;
1401 if push_to_history {
1402 buffer_handle.update(&mut cx, |buffer, _| {
1403 buffer.push_transaction(transaction.clone(), Instant::now());
1404 });
1405 }
1406 Ok(Some(transaction))
1407 } else {
1408 Ok(None)
1409 }
1410 })
1411 } else {
1412 Task::ready(Err(anyhow!("project does not have a remote id")))
1413 }
1414 }
1415
1416 pub fn code_actions<T: ToOffset>(
1417 &self,
1418 buffer_handle: &ModelHandle<Buffer>,
1419 range: Range<T>,
1420 cx: &mut ModelContext<Self>,
1421 ) -> Task<Result<Vec<CodeAction>>> {
1422 let buffer_handle = buffer_handle.clone();
1423 let buffer = buffer_handle.read(cx);
1424 let buffer_id = buffer.remote_id();
1425 let worktree;
1426 let buffer_abs_path;
1427 if let Some(file) = File::from_dyn(buffer.file()) {
1428 worktree = file.worktree.clone();
1429 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1430 } else {
1431 return Task::ready(Ok(Default::default()));
1432 };
1433 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1434
1435 if worktree.read(cx).as_local().is_some() {
1436 let buffer_abs_path = buffer_abs_path.unwrap();
1437 let lang_name;
1438 let lang_server;
1439 if let Some(lang) = buffer.language() {
1440 lang_name = lang.name().to_string();
1441 if let Some(server) = self
1442 .language_servers
1443 .get(&(worktree.read(cx).id(), lang_name.clone()))
1444 {
1445 lang_server = server.clone();
1446 } else {
1447 return Task::ready(Ok(Default::default()));
1448 };
1449 } else {
1450 return Task::ready(Ok(Default::default()));
1451 }
1452
1453 let lsp_range = lsp::Range::new(
1454 range.start.to_point_utf16(buffer).to_lsp_position(),
1455 range.end.to_point_utf16(buffer).to_lsp_position(),
1456 );
1457 cx.foreground().spawn(async move {
1458 Ok(lang_server
1459 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1460 text_document: lsp::TextDocumentIdentifier::new(
1461 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1462 ),
1463 range: lsp_range,
1464 work_done_progress_params: Default::default(),
1465 partial_result_params: Default::default(),
1466 context: lsp::CodeActionContext {
1467 diagnostics: Default::default(),
1468 only: Some(vec![
1469 lsp::CodeActionKind::QUICKFIX,
1470 lsp::CodeActionKind::REFACTOR,
1471 lsp::CodeActionKind::REFACTOR_EXTRACT,
1472 ]),
1473 },
1474 })
1475 .await?
1476 .unwrap_or_default()
1477 .into_iter()
1478 .filter_map(|entry| {
1479 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1480 Some(CodeAction {
1481 range: range.clone(),
1482 lsp_action,
1483 })
1484 } else {
1485 None
1486 }
1487 })
1488 .collect())
1489 })
1490 } else if let Some(project_id) = self.remote_id() {
1491 let rpc = self.client.clone();
1492 cx.spawn_weak(|_, mut cx| async move {
1493 let response = rpc
1494 .request(proto::GetCodeActions {
1495 project_id,
1496 buffer_id,
1497 start: Some(language::proto::serialize_anchor(&range.start)),
1498 end: Some(language::proto::serialize_anchor(&range.end)),
1499 })
1500 .await?;
1501
1502 buffer_handle
1503 .update(&mut cx, |buffer, _| {
1504 buffer.wait_for_version(response.version.into())
1505 })
1506 .await;
1507
1508 response
1509 .actions
1510 .into_iter()
1511 .map(language::proto::deserialize_code_action)
1512 .collect()
1513 })
1514 } else {
1515 Task::ready(Ok(Default::default()))
1516 }
1517 }
1518
1519 pub fn apply_code_action(
1520 &self,
1521 buffer_handle: ModelHandle<Buffer>,
1522 mut action: CodeAction,
1523 push_to_history: bool,
1524 cx: &mut ModelContext<Self>,
1525 ) -> Task<Result<ProjectTransaction>> {
1526 if self.is_local() {
1527 let buffer = buffer_handle.read(cx);
1528 let lang_name = if let Some(lang) = buffer.language() {
1529 lang.name().to_string()
1530 } else {
1531 return Task::ready(Ok(Default::default()));
1532 };
1533 let lang_server = if let Some(language_server) = buffer.language_server() {
1534 language_server.clone()
1535 } else {
1536 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1537 };
1538 let range = action.range.to_point_utf16(buffer);
1539
1540 cx.spawn(|this, mut cx| async move {
1541 if let Some(lsp_range) = action
1542 .lsp_action
1543 .data
1544 .as_mut()
1545 .and_then(|d| d.get_mut("codeActionParams"))
1546 .and_then(|d| d.get_mut("range"))
1547 {
1548 *lsp_range = serde_json::to_value(&lsp::Range::new(
1549 range.start.to_lsp_position(),
1550 range.end.to_lsp_position(),
1551 ))
1552 .unwrap();
1553 action.lsp_action = lang_server
1554 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1555 .await?;
1556 } else {
1557 let actions = this
1558 .update(&mut cx, |this, cx| {
1559 this.code_actions(&buffer_handle, action.range, cx)
1560 })
1561 .await?;
1562 action.lsp_action = actions
1563 .into_iter()
1564 .find(|a| a.lsp_action.title == action.lsp_action.title)
1565 .ok_or_else(|| anyhow!("code action is outdated"))?
1566 .lsp_action;
1567 }
1568
1569 if let Some(edit) = action.lsp_action.edit {
1570 Self::deserialize_workspace_edit(
1571 this,
1572 edit,
1573 push_to_history,
1574 lang_name,
1575 lang_server,
1576 &mut cx,
1577 )
1578 .await
1579 } else {
1580 Ok(ProjectTransaction::default())
1581 }
1582 })
1583 } else if let Some(project_id) = self.remote_id() {
1584 let client = self.client.clone();
1585 let request = proto::ApplyCodeAction {
1586 project_id,
1587 buffer_id: buffer_handle.read(cx).remote_id(),
1588 action: Some(language::proto::serialize_code_action(&action)),
1589 };
1590 cx.spawn(|this, mut cx| async move {
1591 let response = client
1592 .request(request)
1593 .await?
1594 .transaction
1595 .ok_or_else(|| anyhow!("missing transaction"))?;
1596 this.update(&mut cx, |this, cx| {
1597 this.deserialize_project_transaction(response, push_to_history, cx)
1598 })
1599 .await
1600 })
1601 } else {
1602 Task::ready(Err(anyhow!("project does not have a remote id")))
1603 }
1604 }
1605
1606 async fn deserialize_workspace_edit(
1607 this: ModelHandle<Self>,
1608 edit: lsp::WorkspaceEdit,
1609 push_to_history: bool,
1610 language_name: String,
1611 language_server: Arc<LanguageServer>,
1612 cx: &mut AsyncAppContext,
1613 ) -> Result<ProjectTransaction> {
1614 let fs = this.read_with(cx, |this, _| this.fs.clone());
1615 let mut operations = Vec::new();
1616 if let Some(document_changes) = edit.document_changes {
1617 match document_changes {
1618 lsp::DocumentChanges::Edits(edits) => {
1619 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1620 }
1621 lsp::DocumentChanges::Operations(ops) => operations = ops,
1622 }
1623 } else if let Some(changes) = edit.changes {
1624 operations.extend(changes.into_iter().map(|(uri, edits)| {
1625 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1626 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1627 uri,
1628 version: None,
1629 },
1630 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1631 })
1632 }));
1633 }
1634
1635 let mut project_transaction = ProjectTransaction::default();
1636 for operation in operations {
1637 match operation {
1638 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1639 let abs_path = op
1640 .uri
1641 .to_file_path()
1642 .map_err(|_| anyhow!("can't convert URI to path"))?;
1643
1644 if let Some(parent_path) = abs_path.parent() {
1645 fs.create_dir(parent_path).await?;
1646 }
1647 if abs_path.ends_with("/") {
1648 fs.create_dir(&abs_path).await?;
1649 } else {
1650 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1651 .await?;
1652 }
1653 }
1654 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1655 let source_abs_path = op
1656 .old_uri
1657 .to_file_path()
1658 .map_err(|_| anyhow!("can't convert URI to path"))?;
1659 let target_abs_path = op
1660 .new_uri
1661 .to_file_path()
1662 .map_err(|_| anyhow!("can't convert URI to path"))?;
1663 fs.rename(
1664 &source_abs_path,
1665 &target_abs_path,
1666 op.options.map(Into::into).unwrap_or_default(),
1667 )
1668 .await?;
1669 }
1670 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1671 let abs_path = op
1672 .uri
1673 .to_file_path()
1674 .map_err(|_| anyhow!("can't convert URI to path"))?;
1675 let options = op.options.map(Into::into).unwrap_or_default();
1676 if abs_path.ends_with("/") {
1677 fs.remove_dir(&abs_path, options).await?;
1678 } else {
1679 fs.remove_file(&abs_path, options).await?;
1680 }
1681 }
1682 lsp::DocumentChangeOperation::Edit(op) => {
1683 let buffer_to_edit = this
1684 .update(cx, |this, cx| {
1685 this.open_local_buffer_from_lsp_path(
1686 op.text_document.uri,
1687 language_name.clone(),
1688 language_server.clone(),
1689 cx,
1690 )
1691 })
1692 .await?;
1693
1694 let edits = buffer_to_edit
1695 .update(cx, |buffer, cx| {
1696 let edits = op.edits.into_iter().map(|edit| match edit {
1697 lsp::OneOf::Left(edit) => edit,
1698 lsp::OneOf::Right(edit) => edit.text_edit,
1699 });
1700 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1701 })
1702 .await?;
1703
1704 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1705 buffer.finalize_last_transaction();
1706 buffer.start_transaction();
1707 for (range, text) in edits {
1708 buffer.edit([range], text, cx);
1709 }
1710 let transaction = if buffer.end_transaction(cx).is_some() {
1711 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1712 if !push_to_history {
1713 buffer.forget_transaction(transaction.id);
1714 }
1715 Some(transaction)
1716 } else {
1717 None
1718 };
1719
1720 transaction
1721 });
1722 if let Some(transaction) = transaction {
1723 project_transaction.0.insert(buffer_to_edit, transaction);
1724 }
1725 }
1726 }
1727 }
1728
1729 Ok(project_transaction)
1730 }
1731
1732 pub fn prepare_rename<T: ToPointUtf16>(
1733 &self,
1734 buffer: ModelHandle<Buffer>,
1735 position: T,
1736 cx: &mut ModelContext<Self>,
1737 ) -> Task<Result<Option<Range<Anchor>>>> {
1738 let position = position.to_point_utf16(buffer.read(cx));
1739 self.request_lsp(buffer, PrepareRename { position }, cx)
1740 }
1741
1742 pub fn perform_rename<T: ToPointUtf16>(
1743 &self,
1744 buffer: ModelHandle<Buffer>,
1745 position: T,
1746 new_name: String,
1747 push_to_history: bool,
1748 cx: &mut ModelContext<Self>,
1749 ) -> Task<Result<ProjectTransaction>> {
1750 let position = position.to_point_utf16(buffer.read(cx));
1751 self.request_lsp(
1752 buffer,
1753 PerformRename {
1754 position,
1755 new_name,
1756 push_to_history,
1757 },
1758 cx,
1759 )
1760 }
1761
1762 fn request_lsp<R: LspCommand>(
1763 &self,
1764 buffer_handle: ModelHandle<Buffer>,
1765 request: R,
1766 cx: &mut ModelContext<Self>,
1767 ) -> Task<Result<R::Response>>
1768 where
1769 <R::LspRequest as lsp::request::Request>::Result: Send,
1770 {
1771 let buffer = buffer_handle.read(cx);
1772 if self.is_local() {
1773 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1774 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1775 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1776 return cx.spawn(|this, cx| async move {
1777 let response = language_server
1778 .request::<R::LspRequest>(lsp_params)
1779 .await
1780 .context("lsp request failed")?;
1781 request
1782 .response_from_lsp(response, this, buffer_handle, cx)
1783 .await
1784 });
1785 }
1786 } else if let Some(project_id) = self.remote_id() {
1787 let rpc = self.client.clone();
1788 let message = request.to_proto(project_id, buffer);
1789 return cx.spawn(|this, cx| async move {
1790 let response = rpc.request(message).await?;
1791 request
1792 .response_from_proto(response, this, buffer_handle, cx)
1793 .await
1794 });
1795 }
1796 Task::ready(Ok(Default::default()))
1797 }
1798
1799 pub fn find_or_create_local_worktree(
1800 &self,
1801 abs_path: impl AsRef<Path>,
1802 weak: bool,
1803 cx: &mut ModelContext<Self>,
1804 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1805 let abs_path = abs_path.as_ref();
1806 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1807 Task::ready(Ok((tree.clone(), relative_path.into())))
1808 } else {
1809 let worktree = self.create_local_worktree(abs_path, weak, cx);
1810 cx.foreground()
1811 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1812 }
1813 }
1814
1815 fn find_local_worktree(
1816 &self,
1817 abs_path: &Path,
1818 cx: &AppContext,
1819 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1820 for tree in self.worktrees(cx) {
1821 if let Some(relative_path) = tree
1822 .read(cx)
1823 .as_local()
1824 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1825 {
1826 return Some((tree.clone(), relative_path.into()));
1827 }
1828 }
1829 None
1830 }
1831
1832 pub fn is_shared(&self) -> bool {
1833 match &self.client_state {
1834 ProjectClientState::Local { is_shared, .. } => *is_shared,
1835 ProjectClientState::Remote { .. } => false,
1836 }
1837 }
1838
1839 fn create_local_worktree(
1840 &self,
1841 abs_path: impl AsRef<Path>,
1842 weak: bool,
1843 cx: &mut ModelContext<Self>,
1844 ) -> Task<Result<ModelHandle<Worktree>>> {
1845 let fs = self.fs.clone();
1846 let client = self.client.clone();
1847 let path = Arc::from(abs_path.as_ref());
1848 cx.spawn(|project, mut cx| async move {
1849 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1850
1851 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1852 project.add_worktree(&worktree, cx);
1853 (project.remote_id(), project.is_shared())
1854 });
1855
1856 if let Some(project_id) = remote_project_id {
1857 worktree
1858 .update(&mut cx, |worktree, cx| {
1859 worktree.as_local_mut().unwrap().register(project_id, cx)
1860 })
1861 .await?;
1862 if is_shared {
1863 worktree
1864 .update(&mut cx, |worktree, cx| {
1865 worktree.as_local_mut().unwrap().share(project_id, cx)
1866 })
1867 .await?;
1868 }
1869 }
1870
1871 Ok(worktree)
1872 })
1873 }
1874
1875 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1876 self.worktrees.retain(|worktree| {
1877 worktree
1878 .upgrade(cx)
1879 .map_or(false, |w| w.read(cx).id() != id)
1880 });
1881 cx.notify();
1882 }
1883
1884 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1885 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1886 if worktree.read(cx).is_local() {
1887 cx.subscribe(&worktree, |this, worktree, _, cx| {
1888 this.update_local_worktree_buffers(worktree, cx);
1889 })
1890 .detach();
1891 }
1892
1893 let push_weak_handle = {
1894 let worktree = worktree.read(cx);
1895 worktree.is_local() && worktree.is_weak()
1896 };
1897 if push_weak_handle {
1898 cx.observe_release(&worktree, |this, cx| {
1899 this.worktrees
1900 .retain(|worktree| worktree.upgrade(cx).is_some());
1901 cx.notify();
1902 })
1903 .detach();
1904 self.worktrees
1905 .push(WorktreeHandle::Weak(worktree.downgrade()));
1906 } else {
1907 self.worktrees
1908 .push(WorktreeHandle::Strong(worktree.clone()));
1909 }
1910 cx.notify();
1911 }
1912
1913 fn update_local_worktree_buffers(
1914 &mut self,
1915 worktree_handle: ModelHandle<Worktree>,
1916 cx: &mut ModelContext<Self>,
1917 ) {
1918 let snapshot = worktree_handle.read(cx).snapshot();
1919 let mut buffers_to_delete = Vec::new();
1920 for (buffer_id, buffer) in &self.open_buffers {
1921 if let Some(buffer) = buffer.upgrade(cx) {
1922 buffer.update(cx, |buffer, cx| {
1923 if let Some(old_file) = File::from_dyn(buffer.file()) {
1924 if old_file.worktree != worktree_handle {
1925 return;
1926 }
1927
1928 let new_file = if let Some(entry) = old_file
1929 .entry_id
1930 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1931 {
1932 File {
1933 is_local: true,
1934 entry_id: Some(entry.id),
1935 mtime: entry.mtime,
1936 path: entry.path.clone(),
1937 worktree: worktree_handle.clone(),
1938 }
1939 } else if let Some(entry) =
1940 snapshot.entry_for_path(old_file.path().as_ref())
1941 {
1942 File {
1943 is_local: true,
1944 entry_id: Some(entry.id),
1945 mtime: entry.mtime,
1946 path: entry.path.clone(),
1947 worktree: worktree_handle.clone(),
1948 }
1949 } else {
1950 File {
1951 is_local: true,
1952 entry_id: None,
1953 path: old_file.path().clone(),
1954 mtime: old_file.mtime(),
1955 worktree: worktree_handle.clone(),
1956 }
1957 };
1958
1959 if let Some(project_id) = self.remote_id() {
1960 self.client
1961 .send(proto::UpdateBufferFile {
1962 project_id,
1963 buffer_id: *buffer_id as u64,
1964 file: Some(new_file.to_proto()),
1965 })
1966 .log_err();
1967 }
1968 buffer.file_updated(Box::new(new_file), cx).detach();
1969 }
1970 });
1971 } else {
1972 buffers_to_delete.push(*buffer_id);
1973 }
1974 }
1975
1976 for buffer_id in buffers_to_delete {
1977 self.open_buffers.remove(&buffer_id);
1978 }
1979 }
1980
1981 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1982 let new_active_entry = entry.and_then(|project_path| {
1983 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1984 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1985 Some(ProjectEntry {
1986 worktree_id: project_path.worktree_id,
1987 entry_id: entry.id,
1988 })
1989 });
1990 if new_active_entry != self.active_entry {
1991 self.active_entry = new_active_entry;
1992 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1993 }
1994 }
1995
1996 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1997 self.language_servers_with_diagnostics_running > 0
1998 }
1999
2000 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2001 let mut summary = DiagnosticSummary::default();
2002 for (_, path_summary) in self.diagnostic_summaries(cx) {
2003 summary.error_count += path_summary.error_count;
2004 summary.warning_count += path_summary.warning_count;
2005 summary.info_count += path_summary.info_count;
2006 summary.hint_count += path_summary.hint_count;
2007 }
2008 summary
2009 }
2010
2011 pub fn diagnostic_summaries<'a>(
2012 &'a self,
2013 cx: &'a AppContext,
2014 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2015 self.worktrees(cx).flat_map(move |worktree| {
2016 let worktree = worktree.read(cx);
2017 let worktree_id = worktree.id();
2018 worktree
2019 .diagnostic_summaries()
2020 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2021 })
2022 }
2023
2024 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2025 self.language_servers_with_diagnostics_running += 1;
2026 if self.language_servers_with_diagnostics_running == 1 {
2027 cx.emit(Event::DiskBasedDiagnosticsStarted);
2028 }
2029 }
2030
2031 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2032 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2033 self.language_servers_with_diagnostics_running -= 1;
2034 if self.language_servers_with_diagnostics_running == 0 {
2035 cx.emit(Event::DiskBasedDiagnosticsFinished);
2036 }
2037 }
2038
2039 pub fn active_entry(&self) -> Option<ProjectEntry> {
2040 self.active_entry
2041 }
2042
2043 // RPC message handlers
2044
2045 async fn handle_unshare_project(
2046 this: ModelHandle<Self>,
2047 _: TypedEnvelope<proto::UnshareProject>,
2048 _: Arc<Client>,
2049 mut cx: AsyncAppContext,
2050 ) -> Result<()> {
2051 this.update(&mut cx, |this, cx| {
2052 if let ProjectClientState::Remote {
2053 sharing_has_stopped,
2054 ..
2055 } = &mut this.client_state
2056 {
2057 *sharing_has_stopped = true;
2058 this.collaborators.clear();
2059 cx.notify();
2060 } else {
2061 unreachable!()
2062 }
2063 });
2064
2065 Ok(())
2066 }
2067
2068 async fn handle_add_collaborator(
2069 this: ModelHandle<Self>,
2070 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2071 _: Arc<Client>,
2072 mut cx: AsyncAppContext,
2073 ) -> Result<()> {
2074 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2075 let collaborator = envelope
2076 .payload
2077 .collaborator
2078 .take()
2079 .ok_or_else(|| anyhow!("empty collaborator"))?;
2080
2081 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2082 this.update(&mut cx, |this, cx| {
2083 this.collaborators
2084 .insert(collaborator.peer_id, collaborator);
2085 cx.notify();
2086 });
2087
2088 Ok(())
2089 }
2090
2091 async fn handle_remove_collaborator(
2092 this: ModelHandle<Self>,
2093 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2094 _: Arc<Client>,
2095 mut cx: AsyncAppContext,
2096 ) -> Result<()> {
2097 this.update(&mut cx, |this, cx| {
2098 let peer_id = PeerId(envelope.payload.peer_id);
2099 let replica_id = this
2100 .collaborators
2101 .remove(&peer_id)
2102 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2103 .replica_id;
2104 this.shared_buffers.remove(&peer_id);
2105 for (_, buffer) in &this.open_buffers {
2106 if let Some(buffer) = buffer.upgrade(cx) {
2107 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2108 }
2109 }
2110 cx.notify();
2111 Ok(())
2112 })
2113 }
2114
2115 async fn handle_share_worktree(
2116 this: ModelHandle<Self>,
2117 envelope: TypedEnvelope<proto::ShareWorktree>,
2118 client: Arc<Client>,
2119 mut cx: AsyncAppContext,
2120 ) -> Result<()> {
2121 this.update(&mut cx, |this, cx| {
2122 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2123 let replica_id = this.replica_id();
2124 let worktree = envelope
2125 .payload
2126 .worktree
2127 .ok_or_else(|| anyhow!("invalid worktree"))?;
2128 let (worktree, load_task) =
2129 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2130 this.add_worktree(&worktree, cx);
2131 load_task.detach();
2132 Ok(())
2133 })
2134 }
2135
2136 async fn handle_unregister_worktree(
2137 this: ModelHandle<Self>,
2138 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2139 _: Arc<Client>,
2140 mut cx: AsyncAppContext,
2141 ) -> Result<()> {
2142 this.update(&mut cx, |this, cx| {
2143 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2144 this.remove_worktree(worktree_id, cx);
2145 Ok(())
2146 })
2147 }
2148
2149 async fn handle_update_worktree(
2150 this: ModelHandle<Self>,
2151 envelope: TypedEnvelope<proto::UpdateWorktree>,
2152 _: Arc<Client>,
2153 mut cx: AsyncAppContext,
2154 ) -> Result<()> {
2155 this.update(&mut cx, |this, cx| {
2156 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2157 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2158 worktree.update(cx, |worktree, _| {
2159 let worktree = worktree.as_remote_mut().unwrap();
2160 worktree.update_from_remote(envelope)
2161 })?;
2162 }
2163 Ok(())
2164 })
2165 }
2166
2167 async fn handle_update_diagnostic_summary(
2168 this: ModelHandle<Self>,
2169 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2170 _: Arc<Client>,
2171 mut cx: AsyncAppContext,
2172 ) -> Result<()> {
2173 this.update(&mut cx, |this, cx| {
2174 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2175 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2176 if let Some(summary) = envelope.payload.summary {
2177 let project_path = ProjectPath {
2178 worktree_id,
2179 path: Path::new(&summary.path).into(),
2180 };
2181 worktree.update(cx, |worktree, _| {
2182 worktree
2183 .as_remote_mut()
2184 .unwrap()
2185 .update_diagnostic_summary(project_path.path.clone(), &summary);
2186 });
2187 cx.emit(Event::DiagnosticsUpdated(project_path));
2188 }
2189 }
2190 Ok(())
2191 })
2192 }
2193
2194 async fn handle_disk_based_diagnostics_updating(
2195 this: ModelHandle<Self>,
2196 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2197 _: Arc<Client>,
2198 mut cx: AsyncAppContext,
2199 ) -> Result<()> {
2200 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2201 Ok(())
2202 }
2203
2204 async fn handle_disk_based_diagnostics_updated(
2205 this: ModelHandle<Self>,
2206 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2207 _: Arc<Client>,
2208 mut cx: AsyncAppContext,
2209 ) -> Result<()> {
2210 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2211 Ok(())
2212 }
2213
2214 async fn handle_update_buffer(
2215 this: ModelHandle<Self>,
2216 envelope: TypedEnvelope<proto::UpdateBuffer>,
2217 _: Arc<Client>,
2218 mut cx: AsyncAppContext,
2219 ) -> Result<()> {
2220 this.update(&mut cx, |this, cx| {
2221 let payload = envelope.payload.clone();
2222 let buffer_id = payload.buffer_id;
2223 let ops = payload
2224 .operations
2225 .into_iter()
2226 .map(|op| language::proto::deserialize_operation(op))
2227 .collect::<Result<Vec<_>, _>>()?;
2228 let is_remote = this.is_remote();
2229 match this.open_buffers.entry(buffer_id) {
2230 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2231 OpenBuffer::Loaded(buffer) => {
2232 if let Some(buffer) = buffer.upgrade(cx) {
2233 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2234 }
2235 }
2236 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2237 },
2238 hash_map::Entry::Vacant(e) => {
2239 if is_remote && this.loading_buffers.len() > 0 {
2240 e.insert(OpenBuffer::Loading(ops));
2241 }
2242 }
2243 }
2244 Ok(())
2245 })
2246 }
2247
2248 async fn handle_update_buffer_file(
2249 this: ModelHandle<Self>,
2250 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2251 _: Arc<Client>,
2252 mut cx: AsyncAppContext,
2253 ) -> Result<()> {
2254 this.update(&mut cx, |this, cx| {
2255 let payload = envelope.payload.clone();
2256 let buffer_id = payload.buffer_id;
2257 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2258 let worktree = this
2259 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2260 .ok_or_else(|| anyhow!("no such worktree"))?;
2261 let file = File::from_proto(file, worktree.clone(), cx)?;
2262 let buffer = this
2263 .open_buffers
2264 .get_mut(&buffer_id)
2265 .and_then(|b| b.upgrade(cx))
2266 .ok_or_else(|| anyhow!("no such buffer"))?;
2267 buffer.update(cx, |buffer, cx| {
2268 buffer.file_updated(Box::new(file), cx).detach();
2269 });
2270 Ok(())
2271 })
2272 }
2273
2274 async fn handle_save_buffer(
2275 this: ModelHandle<Self>,
2276 envelope: TypedEnvelope<proto::SaveBuffer>,
2277 _: Arc<Client>,
2278 mut cx: AsyncAppContext,
2279 ) -> Result<proto::BufferSaved> {
2280 let buffer_id = envelope.payload.buffer_id;
2281 let sender_id = envelope.original_sender_id()?;
2282 let requested_version = envelope.payload.version.try_into()?;
2283
2284 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2285 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2286 let buffer = this
2287 .shared_buffers
2288 .get(&sender_id)
2289 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2290 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2291 Ok::<_, anyhow::Error>((project_id, buffer))
2292 })?;
2293
2294 if !buffer
2295 .read_with(&cx, |buffer, _| buffer.version())
2296 .observed_all(&requested_version)
2297 {
2298 Err(anyhow!("save request depends on unreceived edits"))?;
2299 }
2300
2301 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2302 Ok(proto::BufferSaved {
2303 project_id,
2304 buffer_id,
2305 version: (&saved_version).into(),
2306 mtime: Some(mtime.into()),
2307 })
2308 }
2309
2310 async fn handle_format_buffers(
2311 this: ModelHandle<Self>,
2312 envelope: TypedEnvelope<proto::FormatBuffers>,
2313 _: Arc<Client>,
2314 mut cx: AsyncAppContext,
2315 ) -> Result<proto::FormatBuffersResponse> {
2316 let sender_id = envelope.original_sender_id()?;
2317 let format = this.update(&mut cx, |this, cx| {
2318 let shared_buffers = this
2319 .shared_buffers
2320 .get(&sender_id)
2321 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2322 let mut buffers = HashSet::default();
2323 for buffer_id in &envelope.payload.buffer_ids {
2324 buffers.insert(
2325 shared_buffers
2326 .get(buffer_id)
2327 .cloned()
2328 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2329 );
2330 }
2331 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2332 })?;
2333
2334 let project_transaction = format.await?;
2335 let project_transaction = this.update(&mut cx, |this, cx| {
2336 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2337 });
2338 Ok(proto::FormatBuffersResponse {
2339 transaction: Some(project_transaction),
2340 })
2341 }
2342
2343 async fn handle_get_completions(
2344 this: ModelHandle<Self>,
2345 envelope: TypedEnvelope<proto::GetCompletions>,
2346 _: Arc<Client>,
2347 mut cx: AsyncAppContext,
2348 ) -> Result<proto::GetCompletionsResponse> {
2349 let sender_id = envelope.original_sender_id()?;
2350 let position = envelope
2351 .payload
2352 .position
2353 .and_then(language::proto::deserialize_anchor)
2354 .ok_or_else(|| anyhow!("invalid position"))?;
2355 let version = clock::Global::from(envelope.payload.version);
2356 let buffer = this.read_with(&cx, |this, _| {
2357 this.shared_buffers
2358 .get(&sender_id)
2359 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2360 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2361 })?;
2362 if !buffer
2363 .read_with(&cx, |buffer, _| buffer.version())
2364 .observed_all(&version)
2365 {
2366 Err(anyhow!("completion request depends on unreceived edits"))?;
2367 }
2368 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2369 let completions = this
2370 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2371 .await?;
2372
2373 Ok(proto::GetCompletionsResponse {
2374 completions: completions
2375 .iter()
2376 .map(language::proto::serialize_completion)
2377 .collect(),
2378 version: (&version).into(),
2379 })
2380 }
2381
2382 async fn handle_apply_additional_edits_for_completion(
2383 this: ModelHandle<Self>,
2384 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2385 _: Arc<Client>,
2386 mut cx: AsyncAppContext,
2387 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2388 let sender_id = envelope.original_sender_id()?;
2389 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2390 let buffer = this
2391 .shared_buffers
2392 .get(&sender_id)
2393 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2394 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2395 let language = buffer.read(cx).language();
2396 let completion = language::proto::deserialize_completion(
2397 envelope
2398 .payload
2399 .completion
2400 .ok_or_else(|| anyhow!("invalid completion"))?,
2401 language,
2402 )?;
2403 Ok::<_, anyhow::Error>(
2404 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2405 )
2406 })?;
2407
2408 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2409 transaction: apply_additional_edits
2410 .await?
2411 .as_ref()
2412 .map(language::proto::serialize_transaction),
2413 })
2414 }
2415
2416 async fn handle_get_code_actions(
2417 this: ModelHandle<Self>,
2418 envelope: TypedEnvelope<proto::GetCodeActions>,
2419 _: Arc<Client>,
2420 mut cx: AsyncAppContext,
2421 ) -> Result<proto::GetCodeActionsResponse> {
2422 let sender_id = envelope.original_sender_id()?;
2423 let start = envelope
2424 .payload
2425 .start
2426 .and_then(language::proto::deserialize_anchor)
2427 .ok_or_else(|| anyhow!("invalid start"))?;
2428 let end = envelope
2429 .payload
2430 .end
2431 .and_then(language::proto::deserialize_anchor)
2432 .ok_or_else(|| anyhow!("invalid end"))?;
2433 let buffer = this.update(&mut cx, |this, _| {
2434 this.shared_buffers
2435 .get(&sender_id)
2436 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2437 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2438 })?;
2439 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2440 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2441 Err(anyhow!("code action request references unreceived edits"))?;
2442 }
2443 let code_actions = this.update(&mut cx, |this, cx| {
2444 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2445 })?;
2446
2447 Ok(proto::GetCodeActionsResponse {
2448 actions: code_actions
2449 .await?
2450 .iter()
2451 .map(language::proto::serialize_code_action)
2452 .collect(),
2453 version: (&version).into(),
2454 })
2455 }
2456
2457 async fn handle_apply_code_action(
2458 this: ModelHandle<Self>,
2459 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2460 _: Arc<Client>,
2461 mut cx: AsyncAppContext,
2462 ) -> Result<proto::ApplyCodeActionResponse> {
2463 let sender_id = envelope.original_sender_id()?;
2464 let action = language::proto::deserialize_code_action(
2465 envelope
2466 .payload
2467 .action
2468 .ok_or_else(|| anyhow!("invalid action"))?,
2469 )?;
2470 let apply_code_action = this.update(&mut cx, |this, cx| {
2471 let buffer = this
2472 .shared_buffers
2473 .get(&sender_id)
2474 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2475 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2476 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2477 })?;
2478
2479 let project_transaction = apply_code_action.await?;
2480 let project_transaction = this.update(&mut cx, |this, cx| {
2481 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2482 });
2483 Ok(proto::ApplyCodeActionResponse {
2484 transaction: Some(project_transaction),
2485 })
2486 }
2487
2488 async fn handle_lsp_command<T: LspCommand>(
2489 this: ModelHandle<Self>,
2490 envelope: TypedEnvelope<T::ProtoRequest>,
2491 _: Arc<Client>,
2492 mut cx: AsyncAppContext,
2493 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2494 where
2495 <T::LspRequest as lsp::request::Request>::Result: Send,
2496 {
2497 let sender_id = envelope.original_sender_id()?;
2498 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2499 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2500 let buffer_handle = this
2501 .shared_buffers
2502 .get(&sender_id)
2503 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2504 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2505 let buffer = buffer_handle.read(cx);
2506 let buffer_version = buffer.version();
2507 let request = T::from_proto(envelope.payload, this, buffer)?;
2508 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2509 })?;
2510 let response = request.await?;
2511 this.update(&mut cx, |this, cx| {
2512 Ok(T::response_to_proto(
2513 response,
2514 this,
2515 sender_id,
2516 &buffer_version,
2517 cx,
2518 ))
2519 })
2520 }
2521
2522 async fn handle_open_buffer(
2523 this: ModelHandle<Self>,
2524 envelope: TypedEnvelope<proto::OpenBuffer>,
2525 _: Arc<Client>,
2526 mut cx: AsyncAppContext,
2527 ) -> anyhow::Result<proto::OpenBufferResponse> {
2528 let peer_id = envelope.original_sender_id()?;
2529 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2530 let open_buffer = this.update(&mut cx, |this, cx| {
2531 this.open_buffer(
2532 ProjectPath {
2533 worktree_id,
2534 path: PathBuf::from(envelope.payload.path).into(),
2535 },
2536 cx,
2537 )
2538 });
2539
2540 let buffer = open_buffer.await?;
2541 this.update(&mut cx, |this, cx| {
2542 Ok(proto::OpenBufferResponse {
2543 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2544 })
2545 })
2546 }
2547
2548 fn serialize_project_transaction_for_peer(
2549 &mut self,
2550 project_transaction: ProjectTransaction,
2551 peer_id: PeerId,
2552 cx: &AppContext,
2553 ) -> proto::ProjectTransaction {
2554 let mut serialized_transaction = proto::ProjectTransaction {
2555 buffers: Default::default(),
2556 transactions: Default::default(),
2557 };
2558 for (buffer, transaction) in project_transaction.0 {
2559 serialized_transaction
2560 .buffers
2561 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2562 serialized_transaction
2563 .transactions
2564 .push(language::proto::serialize_transaction(&transaction));
2565 }
2566 serialized_transaction
2567 }
2568
2569 fn deserialize_project_transaction(
2570 &mut self,
2571 message: proto::ProjectTransaction,
2572 push_to_history: bool,
2573 cx: &mut ModelContext<Self>,
2574 ) -> Task<Result<ProjectTransaction>> {
2575 cx.spawn(|this, mut cx| async move {
2576 let mut project_transaction = ProjectTransaction::default();
2577 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2578 let buffer = this
2579 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2580 .await?;
2581 let transaction = language::proto::deserialize_transaction(transaction)?;
2582 project_transaction.0.insert(buffer, transaction);
2583 }
2584 for (buffer, transaction) in &project_transaction.0 {
2585 buffer
2586 .update(&mut cx, |buffer, _| {
2587 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2588 })
2589 .await;
2590
2591 if push_to_history {
2592 buffer.update(&mut cx, |buffer, _| {
2593 buffer.push_transaction(transaction.clone(), Instant::now());
2594 });
2595 }
2596 }
2597
2598 Ok(project_transaction)
2599 })
2600 }
2601
2602 fn serialize_buffer_for_peer(
2603 &mut self,
2604 buffer: &ModelHandle<Buffer>,
2605 peer_id: PeerId,
2606 cx: &AppContext,
2607 ) -> proto::Buffer {
2608 let buffer_id = buffer.read(cx).remote_id();
2609 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2610 match shared_buffers.entry(buffer_id) {
2611 hash_map::Entry::Occupied(_) => proto::Buffer {
2612 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2613 },
2614 hash_map::Entry::Vacant(entry) => {
2615 entry.insert(buffer.clone());
2616 proto::Buffer {
2617 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2618 }
2619 }
2620 }
2621 }
2622
2623 fn deserialize_buffer(
2624 &mut self,
2625 buffer: proto::Buffer,
2626 cx: &mut ModelContext<Self>,
2627 ) -> Task<Result<ModelHandle<Buffer>>> {
2628 let replica_id = self.replica_id();
2629
2630 let mut opened_buffer_tx = self.opened_buffer.clone();
2631 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2632 cx.spawn(|this, mut cx| async move {
2633 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2634 proto::buffer::Variant::Id(id) => {
2635 let buffer = loop {
2636 let buffer = this.read_with(&cx, |this, cx| {
2637 this.open_buffers
2638 .get(&id)
2639 .and_then(|buffer| buffer.upgrade(cx))
2640 });
2641 if let Some(buffer) = buffer {
2642 break buffer;
2643 }
2644 opened_buffer_rx
2645 .recv()
2646 .await
2647 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2648 };
2649 Ok(buffer)
2650 }
2651 proto::buffer::Variant::State(mut buffer) => {
2652 let mut buffer_worktree = None;
2653 let mut buffer_file = None;
2654 if let Some(file) = buffer.file.take() {
2655 this.read_with(&cx, |this, cx| {
2656 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2657 let worktree =
2658 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2659 anyhow!("no worktree found for id {}", file.worktree_id)
2660 })?;
2661 buffer_file =
2662 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2663 as Box<dyn language::File>);
2664 buffer_worktree = Some(worktree);
2665 Ok::<_, anyhow::Error>(())
2666 })?;
2667 }
2668
2669 let buffer = cx.add_model(|cx| {
2670 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2671 });
2672 this.update(&mut cx, |this, cx| {
2673 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2674 })?;
2675
2676 let _ = opened_buffer_tx.send(()).await;
2677 Ok(buffer)
2678 }
2679 }
2680 })
2681 }
2682
2683 async fn handle_close_buffer(
2684 this: ModelHandle<Self>,
2685 envelope: TypedEnvelope<proto::CloseBuffer>,
2686 _: Arc<Client>,
2687 mut cx: AsyncAppContext,
2688 ) -> anyhow::Result<()> {
2689 this.update(&mut cx, |this, cx| {
2690 if let Some(shared_buffers) =
2691 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2692 {
2693 shared_buffers.remove(&envelope.payload.buffer_id);
2694 cx.notify();
2695 }
2696 Ok(())
2697 })
2698 }
2699
2700 async fn handle_buffer_saved(
2701 this: ModelHandle<Self>,
2702 envelope: TypedEnvelope<proto::BufferSaved>,
2703 _: Arc<Client>,
2704 mut cx: AsyncAppContext,
2705 ) -> Result<()> {
2706 let version = envelope.payload.version.try_into()?;
2707 let mtime = envelope
2708 .payload
2709 .mtime
2710 .ok_or_else(|| anyhow!("missing mtime"))?
2711 .into();
2712
2713 this.update(&mut cx, |this, cx| {
2714 let buffer = this
2715 .open_buffers
2716 .get(&envelope.payload.buffer_id)
2717 .and_then(|buffer| buffer.upgrade(cx));
2718 if let Some(buffer) = buffer {
2719 buffer.update(cx, |buffer, cx| {
2720 buffer.did_save(version, mtime, None, cx);
2721 });
2722 }
2723 Ok(())
2724 })
2725 }
2726
2727 async fn handle_buffer_reloaded(
2728 this: ModelHandle<Self>,
2729 envelope: TypedEnvelope<proto::BufferReloaded>,
2730 _: Arc<Client>,
2731 mut cx: AsyncAppContext,
2732 ) -> Result<()> {
2733 let payload = envelope.payload.clone();
2734 let version = payload.version.try_into()?;
2735 let mtime = payload
2736 .mtime
2737 .ok_or_else(|| anyhow!("missing mtime"))?
2738 .into();
2739 this.update(&mut cx, |this, cx| {
2740 let buffer = this
2741 .open_buffers
2742 .get(&payload.buffer_id)
2743 .and_then(|buffer| buffer.upgrade(cx));
2744 if let Some(buffer) = buffer {
2745 buffer.update(cx, |buffer, cx| {
2746 buffer.did_reload(version, mtime, cx);
2747 });
2748 }
2749 Ok(())
2750 })
2751 }
2752
2753 pub fn match_paths<'a>(
2754 &self,
2755 query: &'a str,
2756 include_ignored: bool,
2757 smart_case: bool,
2758 max_results: usize,
2759 cancel_flag: &'a AtomicBool,
2760 cx: &AppContext,
2761 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2762 let worktrees = self
2763 .worktrees(cx)
2764 .filter(|worktree| !worktree.read(cx).is_weak())
2765 .collect::<Vec<_>>();
2766 let include_root_name = worktrees.len() > 1;
2767 let candidate_sets = worktrees
2768 .into_iter()
2769 .map(|worktree| CandidateSet {
2770 snapshot: worktree.read(cx).snapshot(),
2771 include_ignored,
2772 include_root_name,
2773 })
2774 .collect::<Vec<_>>();
2775
2776 let background = cx.background().clone();
2777 async move {
2778 fuzzy::match_paths(
2779 candidate_sets.as_slice(),
2780 query,
2781 smart_case,
2782 max_results,
2783 cancel_flag,
2784 background,
2785 )
2786 .await
2787 }
2788 }
2789}
2790
2791impl WorktreeHandle {
2792 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2793 match self {
2794 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2795 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2796 }
2797 }
2798}
2799
2800impl OpenBuffer {
2801 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2802 match self {
2803 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2804 OpenBuffer::Loading(_) => None,
2805 }
2806 }
2807}
2808
2809struct CandidateSet {
2810 snapshot: Snapshot,
2811 include_ignored: bool,
2812 include_root_name: bool,
2813}
2814
2815impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2816 type Candidates = CandidateSetIter<'a>;
2817
2818 fn id(&self) -> usize {
2819 self.snapshot.id().to_usize()
2820 }
2821
2822 fn len(&self) -> usize {
2823 if self.include_ignored {
2824 self.snapshot.file_count()
2825 } else {
2826 self.snapshot.visible_file_count()
2827 }
2828 }
2829
2830 fn prefix(&self) -> Arc<str> {
2831 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2832 self.snapshot.root_name().into()
2833 } else if self.include_root_name {
2834 format!("{}/", self.snapshot.root_name()).into()
2835 } else {
2836 "".into()
2837 }
2838 }
2839
2840 fn candidates(&'a self, start: usize) -> Self::Candidates {
2841 CandidateSetIter {
2842 traversal: self.snapshot.files(self.include_ignored, start),
2843 }
2844 }
2845}
2846
2847struct CandidateSetIter<'a> {
2848 traversal: Traversal<'a>,
2849}
2850
2851impl<'a> Iterator for CandidateSetIter<'a> {
2852 type Item = PathMatchCandidate<'a>;
2853
2854 fn next(&mut self) -> Option<Self::Item> {
2855 self.traversal.next().map(|entry| {
2856 if let EntryKind::File(char_bag) = entry.kind {
2857 PathMatchCandidate {
2858 path: &entry.path,
2859 char_bag,
2860 }
2861 } else {
2862 unreachable!()
2863 }
2864 })
2865 }
2866}
2867
2868impl Entity for Project {
2869 type Event = Event;
2870
2871 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2872 match &self.client_state {
2873 ProjectClientState::Local { remote_id_rx, .. } => {
2874 if let Some(project_id) = *remote_id_rx.borrow() {
2875 self.client
2876 .send(proto::UnregisterProject { project_id })
2877 .log_err();
2878 }
2879 }
2880 ProjectClientState::Remote { remote_id, .. } => {
2881 self.client
2882 .send(proto::LeaveProject {
2883 project_id: *remote_id,
2884 })
2885 .log_err();
2886 }
2887 }
2888 }
2889
2890 fn app_will_quit(
2891 &mut self,
2892 _: &mut MutableAppContext,
2893 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2894 let shutdown_futures = self
2895 .language_servers
2896 .drain()
2897 .filter_map(|(_, server)| server.shutdown())
2898 .collect::<Vec<_>>();
2899 Some(
2900 async move {
2901 futures::future::join_all(shutdown_futures).await;
2902 }
2903 .boxed(),
2904 )
2905 }
2906}
2907
2908impl Collaborator {
2909 fn from_proto(
2910 message: proto::Collaborator,
2911 user_store: &ModelHandle<UserStore>,
2912 cx: &mut AsyncAppContext,
2913 ) -> impl Future<Output = Result<Self>> {
2914 let user = user_store.update(cx, |user_store, cx| {
2915 user_store.fetch_user(message.user_id, cx)
2916 });
2917
2918 async move {
2919 Ok(Self {
2920 peer_id: PeerId(message.peer_id),
2921 user: user.await?,
2922 replica_id: message.replica_id as ReplicaId,
2923 })
2924 }
2925 }
2926}
2927
2928impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2929 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2930 Self {
2931 worktree_id,
2932 path: path.as_ref().into(),
2933 }
2934 }
2935}
2936
2937impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2938 fn from(options: lsp::CreateFileOptions) -> Self {
2939 Self {
2940 overwrite: options.overwrite.unwrap_or(false),
2941 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2942 }
2943 }
2944}
2945
2946impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2947 fn from(options: lsp::RenameFileOptions) -> Self {
2948 Self {
2949 overwrite: options.overwrite.unwrap_or(false),
2950 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2951 }
2952 }
2953}
2954
2955impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2956 fn from(options: lsp::DeleteFileOptions) -> Self {
2957 Self {
2958 recursive: options.recursive.unwrap_or(false),
2959 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2960 }
2961 }
2962}
2963
2964#[cfg(test)]
2965mod tests {
2966 use super::{Event, *};
2967 use fs::RealFs;
2968 use futures::StreamExt;
2969 use gpui::test::subscribe;
2970 use language::{
2971 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
2972 };
2973 use lsp::Url;
2974 use serde_json::json;
2975 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2976 use unindent::Unindent as _;
2977 use util::test::temp_tree;
2978 use worktree::WorktreeHandle as _;
2979
2980 #[gpui::test]
2981 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2982 let dir = temp_tree(json!({
2983 "root": {
2984 "apple": "",
2985 "banana": {
2986 "carrot": {
2987 "date": "",
2988 "endive": "",
2989 }
2990 },
2991 "fennel": {
2992 "grape": "",
2993 }
2994 }
2995 }));
2996
2997 let root_link_path = dir.path().join("root_link");
2998 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2999 unix::fs::symlink(
3000 &dir.path().join("root/fennel"),
3001 &dir.path().join("root/finnochio"),
3002 )
3003 .unwrap();
3004
3005 let project = Project::test(Arc::new(RealFs), &mut cx);
3006
3007 let (tree, _) = project
3008 .update(&mut cx, |project, cx| {
3009 project.find_or_create_local_worktree(&root_link_path, false, cx)
3010 })
3011 .await
3012 .unwrap();
3013
3014 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3015 .await;
3016 cx.read(|cx| {
3017 let tree = tree.read(cx);
3018 assert_eq!(tree.file_count(), 5);
3019 assert_eq!(
3020 tree.inode_for_path("fennel/grape"),
3021 tree.inode_for_path("finnochio/grape")
3022 );
3023 });
3024
3025 let cancel_flag = Default::default();
3026 let results = project
3027 .read_with(&cx, |project, cx| {
3028 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3029 })
3030 .await;
3031 assert_eq!(
3032 results
3033 .into_iter()
3034 .map(|result| result.path)
3035 .collect::<Vec<Arc<Path>>>(),
3036 vec![
3037 PathBuf::from("banana/carrot/date").into(),
3038 PathBuf::from("banana/carrot/endive").into(),
3039 ]
3040 );
3041 }
3042
3043 #[gpui::test]
3044 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3045 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3046 let progress_token = language_server_config
3047 .disk_based_diagnostics_progress_token
3048 .clone()
3049 .unwrap();
3050
3051 let language = Arc::new(Language::new(
3052 LanguageConfig {
3053 name: "Rust".to_string(),
3054 path_suffixes: vec!["rs".to_string()],
3055 language_server: Some(language_server_config),
3056 ..Default::default()
3057 },
3058 Some(tree_sitter_rust::language()),
3059 ));
3060
3061 let fs = FakeFs::new(cx.background());
3062 fs.insert_tree(
3063 "/dir",
3064 json!({
3065 "a.rs": "fn a() { A }",
3066 "b.rs": "const y: i32 = 1",
3067 }),
3068 )
3069 .await;
3070
3071 let project = Project::test(fs, &mut cx);
3072 project.update(&mut cx, |project, cx| {
3073 Arc::get_mut(&mut project.languages)
3074 .unwrap()
3075 .add(language, cx.background());
3076 });
3077
3078 let (tree, _) = project
3079 .update(&mut cx, |project, cx| {
3080 project.find_or_create_local_worktree("/dir", false, cx)
3081 })
3082 .await
3083 .unwrap();
3084 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3085
3086 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3087 .await;
3088
3089 // Cause worktree to start the fake language server
3090 let _buffer = project
3091 .update(&mut cx, |project, cx| {
3092 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3093 })
3094 .await
3095 .unwrap();
3096
3097 let mut events = subscribe(&project, &mut cx);
3098
3099 let mut fake_server = fake_servers.next().await.unwrap();
3100 fake_server.start_progress(&progress_token).await;
3101 assert_eq!(
3102 events.next().await.unwrap(),
3103 Event::DiskBasedDiagnosticsStarted
3104 );
3105
3106 fake_server.start_progress(&progress_token).await;
3107 fake_server.end_progress(&progress_token).await;
3108 fake_server.start_progress(&progress_token).await;
3109
3110 fake_server
3111 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3112 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3113 version: None,
3114 diagnostics: vec![lsp::Diagnostic {
3115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3116 severity: Some(lsp::DiagnosticSeverity::ERROR),
3117 message: "undefined variable 'A'".to_string(),
3118 ..Default::default()
3119 }],
3120 })
3121 .await;
3122 assert_eq!(
3123 events.next().await.unwrap(),
3124 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3125 );
3126
3127 fake_server.end_progress(&progress_token).await;
3128 fake_server.end_progress(&progress_token).await;
3129 assert_eq!(
3130 events.next().await.unwrap(),
3131 Event::DiskBasedDiagnosticsUpdated
3132 );
3133 assert_eq!(
3134 events.next().await.unwrap(),
3135 Event::DiskBasedDiagnosticsFinished
3136 );
3137
3138 let buffer = project
3139 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3140 .await
3141 .unwrap();
3142
3143 buffer.read_with(&cx, |buffer, _| {
3144 let snapshot = buffer.snapshot();
3145 let diagnostics = snapshot
3146 .diagnostics_in_range::<_, Point>(0..buffer.len())
3147 .collect::<Vec<_>>();
3148 assert_eq!(
3149 diagnostics,
3150 &[DiagnosticEntry {
3151 range: Point::new(0, 9)..Point::new(0, 10),
3152 diagnostic: Diagnostic {
3153 severity: lsp::DiagnosticSeverity::ERROR,
3154 message: "undefined variable 'A'".to_string(),
3155 group_id: 0,
3156 is_primary: true,
3157 ..Default::default()
3158 }
3159 }]
3160 )
3161 });
3162 }
3163
3164 #[gpui::test]
3165 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3166 let dir = temp_tree(json!({
3167 "root": {
3168 "dir1": {},
3169 "dir2": {
3170 "dir3": {}
3171 }
3172 }
3173 }));
3174
3175 let project = Project::test(Arc::new(RealFs), &mut cx);
3176 let (tree, _) = project
3177 .update(&mut cx, |project, cx| {
3178 project.find_or_create_local_worktree(&dir.path(), false, cx)
3179 })
3180 .await
3181 .unwrap();
3182
3183 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3184 .await;
3185
3186 let cancel_flag = Default::default();
3187 let results = project
3188 .read_with(&cx, |project, cx| {
3189 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3190 })
3191 .await;
3192
3193 assert!(results.is_empty());
3194 }
3195
3196 #[gpui::test]
3197 async fn test_definition(mut cx: gpui::TestAppContext) {
3198 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3199 let language = Arc::new(Language::new(
3200 LanguageConfig {
3201 name: "Rust".to_string(),
3202 path_suffixes: vec!["rs".to_string()],
3203 language_server: Some(language_server_config),
3204 ..Default::default()
3205 },
3206 Some(tree_sitter_rust::language()),
3207 ));
3208
3209 let fs = FakeFs::new(cx.background());
3210 fs.insert_tree(
3211 "/dir",
3212 json!({
3213 "a.rs": "const fn a() { A }",
3214 "b.rs": "const y: i32 = crate::a()",
3215 }),
3216 )
3217 .await;
3218
3219 let project = Project::test(fs, &mut cx);
3220 project.update(&mut cx, |project, cx| {
3221 Arc::get_mut(&mut project.languages)
3222 .unwrap()
3223 .add(language, cx.background());
3224 });
3225
3226 let (tree, _) = project
3227 .update(&mut cx, |project, cx| {
3228 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3229 })
3230 .await
3231 .unwrap();
3232 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3233 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3234 .await;
3235
3236 let buffer = project
3237 .update(&mut cx, |project, cx| {
3238 project.open_buffer(
3239 ProjectPath {
3240 worktree_id,
3241 path: Path::new("").into(),
3242 },
3243 cx,
3244 )
3245 })
3246 .await
3247 .unwrap();
3248
3249 let mut fake_server = fake_servers.next().await.unwrap();
3250 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3251 let params = params.text_document_position_params;
3252 assert_eq!(
3253 params.text_document.uri.to_file_path().unwrap(),
3254 Path::new("/dir/b.rs"),
3255 );
3256 assert_eq!(params.position, lsp::Position::new(0, 22));
3257
3258 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3259 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3260 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3261 )))
3262 });
3263
3264 let mut definitions = project
3265 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3266 .await
3267 .unwrap();
3268
3269 assert_eq!(definitions.len(), 1);
3270 let definition = definitions.pop().unwrap();
3271 cx.update(|cx| {
3272 let target_buffer = definition.target_buffer.read(cx);
3273 assert_eq!(
3274 target_buffer
3275 .file()
3276 .unwrap()
3277 .as_local()
3278 .unwrap()
3279 .abs_path(cx),
3280 Path::new("/dir/a.rs"),
3281 );
3282 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3283 assert_eq!(
3284 list_worktrees(&project, cx),
3285 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3286 );
3287
3288 drop(definition);
3289 });
3290 cx.read(|cx| {
3291 assert_eq!(
3292 list_worktrees(&project, cx),
3293 [("/dir/b.rs".as_ref(), false)]
3294 );
3295 });
3296
3297 fn list_worktrees<'a>(
3298 project: &'a ModelHandle<Project>,
3299 cx: &'a AppContext,
3300 ) -> Vec<(&'a Path, bool)> {
3301 project
3302 .read(cx)
3303 .worktrees(cx)
3304 .map(|worktree| {
3305 let worktree = worktree.read(cx);
3306 (
3307 worktree.as_local().unwrap().abs_path().as_ref(),
3308 worktree.is_weak(),
3309 )
3310 })
3311 .collect::<Vec<_>>()
3312 }
3313 }
3314
3315 #[gpui::test]
3316 async fn test_save_file(mut cx: gpui::TestAppContext) {
3317 let fs = FakeFs::new(cx.background());
3318 fs.insert_tree(
3319 "/dir",
3320 json!({
3321 "file1": "the old contents",
3322 }),
3323 )
3324 .await;
3325
3326 let project = Project::test(fs.clone(), &mut cx);
3327 let worktree_id = project
3328 .update(&mut cx, |p, cx| {
3329 p.find_or_create_local_worktree("/dir", false, cx)
3330 })
3331 .await
3332 .unwrap()
3333 .0
3334 .read_with(&cx, |tree, _| tree.id());
3335
3336 let buffer = project
3337 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3338 .await
3339 .unwrap();
3340 buffer
3341 .update(&mut cx, |buffer, cx| {
3342 assert_eq!(buffer.text(), "the old contents");
3343 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3344 buffer.save(cx)
3345 })
3346 .await
3347 .unwrap();
3348
3349 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3350 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3351 }
3352
3353 #[gpui::test]
3354 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3355 let fs = FakeFs::new(cx.background());
3356 fs.insert_tree(
3357 "/dir",
3358 json!({
3359 "file1": "the old contents",
3360 }),
3361 )
3362 .await;
3363
3364 let project = Project::test(fs.clone(), &mut cx);
3365 let worktree_id = project
3366 .update(&mut cx, |p, cx| {
3367 p.find_or_create_local_worktree("/dir/file1", false, cx)
3368 })
3369 .await
3370 .unwrap()
3371 .0
3372 .read_with(&cx, |tree, _| tree.id());
3373
3374 let buffer = project
3375 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3376 .await
3377 .unwrap();
3378 buffer
3379 .update(&mut cx, |buffer, cx| {
3380 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3381 buffer.save(cx)
3382 })
3383 .await
3384 .unwrap();
3385
3386 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3387 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3388 }
3389
3390 #[gpui::test(retries = 5)]
3391 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3392 let dir = temp_tree(json!({
3393 "a": {
3394 "file1": "",
3395 "file2": "",
3396 "file3": "",
3397 },
3398 "b": {
3399 "c": {
3400 "file4": "",
3401 "file5": "",
3402 }
3403 }
3404 }));
3405
3406 let project = Project::test(Arc::new(RealFs), &mut cx);
3407 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3408
3409 let (tree, _) = project
3410 .update(&mut cx, |p, cx| {
3411 p.find_or_create_local_worktree(dir.path(), false, cx)
3412 })
3413 .await
3414 .unwrap();
3415 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3416
3417 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3418 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3419 async move { buffer.await.unwrap() }
3420 };
3421 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3422 tree.read_with(cx, |tree, _| {
3423 tree.entry_for_path(path)
3424 .expect(&format!("no entry for path {}", path))
3425 .id
3426 })
3427 };
3428
3429 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3430 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3431 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3432 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3433
3434 let file2_id = id_for_path("a/file2", &cx);
3435 let file3_id = id_for_path("a/file3", &cx);
3436 let file4_id = id_for_path("b/c/file4", &cx);
3437
3438 // Wait for the initial scan.
3439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3440 .await;
3441
3442 // Create a remote copy of this worktree.
3443 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3444 let (remote, load_task) = cx.update(|cx| {
3445 Worktree::remote(
3446 1,
3447 1,
3448 initial_snapshot.to_proto(&Default::default(), Default::default()),
3449 rpc.clone(),
3450 cx,
3451 )
3452 });
3453 load_task.await;
3454
3455 cx.read(|cx| {
3456 assert!(!buffer2.read(cx).is_dirty());
3457 assert!(!buffer3.read(cx).is_dirty());
3458 assert!(!buffer4.read(cx).is_dirty());
3459 assert!(!buffer5.read(cx).is_dirty());
3460 });
3461
3462 // Rename and delete files and directories.
3463 tree.flush_fs_events(&cx).await;
3464 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3465 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3466 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3467 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3468 tree.flush_fs_events(&cx).await;
3469
3470 let expected_paths = vec![
3471 "a",
3472 "a/file1",
3473 "a/file2.new",
3474 "b",
3475 "d",
3476 "d/file3",
3477 "d/file4",
3478 ];
3479
3480 cx.read(|app| {
3481 assert_eq!(
3482 tree.read(app)
3483 .paths()
3484 .map(|p| p.to_str().unwrap())
3485 .collect::<Vec<_>>(),
3486 expected_paths
3487 );
3488
3489 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3490 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3491 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3492
3493 assert_eq!(
3494 buffer2.read(app).file().unwrap().path().as_ref(),
3495 Path::new("a/file2.new")
3496 );
3497 assert_eq!(
3498 buffer3.read(app).file().unwrap().path().as_ref(),
3499 Path::new("d/file3")
3500 );
3501 assert_eq!(
3502 buffer4.read(app).file().unwrap().path().as_ref(),
3503 Path::new("d/file4")
3504 );
3505 assert_eq!(
3506 buffer5.read(app).file().unwrap().path().as_ref(),
3507 Path::new("b/c/file5")
3508 );
3509
3510 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3511 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3512 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3513 assert!(buffer5.read(app).file().unwrap().is_deleted());
3514 });
3515
3516 // Update the remote worktree. Check that it becomes consistent with the
3517 // local worktree.
3518 remote.update(&mut cx, |remote, cx| {
3519 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3520 &initial_snapshot,
3521 1,
3522 1,
3523 0,
3524 true,
3525 );
3526 remote
3527 .as_remote_mut()
3528 .unwrap()
3529 .snapshot
3530 .apply_remote_update(update_message)
3531 .unwrap();
3532
3533 assert_eq!(
3534 remote
3535 .paths()
3536 .map(|p| p.to_str().unwrap())
3537 .collect::<Vec<_>>(),
3538 expected_paths
3539 );
3540 });
3541 }
3542
3543 #[gpui::test]
3544 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3545 let fs = FakeFs::new(cx.background());
3546 fs.insert_tree(
3547 "/the-dir",
3548 json!({
3549 "a.txt": "a-contents",
3550 "b.txt": "b-contents",
3551 }),
3552 )
3553 .await;
3554
3555 let project = Project::test(fs.clone(), &mut cx);
3556 let worktree_id = project
3557 .update(&mut cx, |p, cx| {
3558 p.find_or_create_local_worktree("/the-dir", false, cx)
3559 })
3560 .await
3561 .unwrap()
3562 .0
3563 .read_with(&cx, |tree, _| tree.id());
3564
3565 // Spawn multiple tasks to open paths, repeating some paths.
3566 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3567 (
3568 p.open_buffer((worktree_id, "a.txt"), cx),
3569 p.open_buffer((worktree_id, "b.txt"), cx),
3570 p.open_buffer((worktree_id, "a.txt"), cx),
3571 )
3572 });
3573
3574 let buffer_a_1 = buffer_a_1.await.unwrap();
3575 let buffer_a_2 = buffer_a_2.await.unwrap();
3576 let buffer_b = buffer_b.await.unwrap();
3577 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3578 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3579
3580 // There is only one buffer per path.
3581 let buffer_a_id = buffer_a_1.id();
3582 assert_eq!(buffer_a_2.id(), buffer_a_id);
3583
3584 // Open the same path again while it is still open.
3585 drop(buffer_a_1);
3586 let buffer_a_3 = project
3587 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3588 .await
3589 .unwrap();
3590
3591 // There's still only one buffer per path.
3592 assert_eq!(buffer_a_3.id(), buffer_a_id);
3593 }
3594
3595 #[gpui::test]
3596 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3597 use std::fs;
3598
3599 let dir = temp_tree(json!({
3600 "file1": "abc",
3601 "file2": "def",
3602 "file3": "ghi",
3603 }));
3604
3605 let project = Project::test(Arc::new(RealFs), &mut cx);
3606 let (worktree, _) = project
3607 .update(&mut cx, |p, cx| {
3608 p.find_or_create_local_worktree(dir.path(), false, cx)
3609 })
3610 .await
3611 .unwrap();
3612 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3613
3614 worktree.flush_fs_events(&cx).await;
3615 worktree
3616 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3617 .await;
3618
3619 let buffer1 = project
3620 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3621 .await
3622 .unwrap();
3623 let events = Rc::new(RefCell::new(Vec::new()));
3624
3625 // initially, the buffer isn't dirty.
3626 buffer1.update(&mut cx, |buffer, cx| {
3627 cx.subscribe(&buffer1, {
3628 let events = events.clone();
3629 move |_, _, event, _| events.borrow_mut().push(event.clone())
3630 })
3631 .detach();
3632
3633 assert!(!buffer.is_dirty());
3634 assert!(events.borrow().is_empty());
3635
3636 buffer.edit(vec![1..2], "", cx);
3637 });
3638
3639 // after the first edit, the buffer is dirty, and emits a dirtied event.
3640 buffer1.update(&mut cx, |buffer, cx| {
3641 assert!(buffer.text() == "ac");
3642 assert!(buffer.is_dirty());
3643 assert_eq!(
3644 *events.borrow(),
3645 &[language::Event::Edited, language::Event::Dirtied]
3646 );
3647 events.borrow_mut().clear();
3648 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3649 });
3650
3651 // after saving, the buffer is not dirty, and emits a saved event.
3652 buffer1.update(&mut cx, |buffer, cx| {
3653 assert!(!buffer.is_dirty());
3654 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3655 events.borrow_mut().clear();
3656
3657 buffer.edit(vec![1..1], "B", cx);
3658 buffer.edit(vec![2..2], "D", cx);
3659 });
3660
3661 // after editing again, the buffer is dirty, and emits another dirty event.
3662 buffer1.update(&mut cx, |buffer, cx| {
3663 assert!(buffer.text() == "aBDc");
3664 assert!(buffer.is_dirty());
3665 assert_eq!(
3666 *events.borrow(),
3667 &[
3668 language::Event::Edited,
3669 language::Event::Dirtied,
3670 language::Event::Edited,
3671 ],
3672 );
3673 events.borrow_mut().clear();
3674
3675 // TODO - currently, after restoring the buffer to its
3676 // previously-saved state, the is still considered dirty.
3677 buffer.edit([1..3], "", cx);
3678 assert!(buffer.text() == "ac");
3679 assert!(buffer.is_dirty());
3680 });
3681
3682 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3683
3684 // When a file is deleted, the buffer is considered dirty.
3685 let events = Rc::new(RefCell::new(Vec::new()));
3686 let buffer2 = project
3687 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3688 .await
3689 .unwrap();
3690 buffer2.update(&mut cx, |_, cx| {
3691 cx.subscribe(&buffer2, {
3692 let events = events.clone();
3693 move |_, _, event, _| events.borrow_mut().push(event.clone())
3694 })
3695 .detach();
3696 });
3697
3698 fs::remove_file(dir.path().join("file2")).unwrap();
3699 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3700 assert_eq!(
3701 *events.borrow(),
3702 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3703 );
3704
3705 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3706 let events = Rc::new(RefCell::new(Vec::new()));
3707 let buffer3 = project
3708 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3709 .await
3710 .unwrap();
3711 buffer3.update(&mut cx, |_, cx| {
3712 cx.subscribe(&buffer3, {
3713 let events = events.clone();
3714 move |_, _, event, _| events.borrow_mut().push(event.clone())
3715 })
3716 .detach();
3717 });
3718
3719 worktree.flush_fs_events(&cx).await;
3720 buffer3.update(&mut cx, |buffer, cx| {
3721 buffer.edit(Some(0..0), "x", cx);
3722 });
3723 events.borrow_mut().clear();
3724 fs::remove_file(dir.path().join("file3")).unwrap();
3725 buffer3
3726 .condition(&cx, |_, _| !events.borrow().is_empty())
3727 .await;
3728 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3729 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3730 }
3731
3732 #[gpui::test]
3733 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3734 use std::fs;
3735
3736 let initial_contents = "aaa\nbbbbb\nc\n";
3737 let dir = temp_tree(json!({ "the-file": initial_contents }));
3738
3739 let project = Project::test(Arc::new(RealFs), &mut cx);
3740 let (worktree, _) = project
3741 .update(&mut cx, |p, cx| {
3742 p.find_or_create_local_worktree(dir.path(), false, cx)
3743 })
3744 .await
3745 .unwrap();
3746 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3747
3748 worktree
3749 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3750 .await;
3751
3752 let abs_path = dir.path().join("the-file");
3753 let buffer = project
3754 .update(&mut cx, |p, cx| {
3755 p.open_buffer((worktree_id, "the-file"), cx)
3756 })
3757 .await
3758 .unwrap();
3759
3760 // TODO
3761 // Add a cursor on each row.
3762 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3763 // assert!(!buffer.is_dirty());
3764 // buffer.add_selection_set(
3765 // &(0..3)
3766 // .map(|row| Selection {
3767 // id: row as usize,
3768 // start: Point::new(row, 1),
3769 // end: Point::new(row, 1),
3770 // reversed: false,
3771 // goal: SelectionGoal::None,
3772 // })
3773 // .collect::<Vec<_>>(),
3774 // cx,
3775 // )
3776 // });
3777
3778 // Change the file on disk, adding two new lines of text, and removing
3779 // one line.
3780 buffer.read_with(&cx, |buffer, _| {
3781 assert!(!buffer.is_dirty());
3782 assert!(!buffer.has_conflict());
3783 });
3784 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3785 fs::write(&abs_path, new_contents).unwrap();
3786
3787 // Because the buffer was not modified, it is reloaded from disk. Its
3788 // contents are edited according to the diff between the old and new
3789 // file contents.
3790 buffer
3791 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3792 .await;
3793
3794 buffer.update(&mut cx, |buffer, _| {
3795 assert_eq!(buffer.text(), new_contents);
3796 assert!(!buffer.is_dirty());
3797 assert!(!buffer.has_conflict());
3798
3799 // TODO
3800 // let cursor_positions = buffer
3801 // .selection_set(selection_set_id)
3802 // .unwrap()
3803 // .selections::<Point>(&*buffer)
3804 // .map(|selection| {
3805 // assert_eq!(selection.start, selection.end);
3806 // selection.start
3807 // })
3808 // .collect::<Vec<_>>();
3809 // assert_eq!(
3810 // cursor_positions,
3811 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3812 // );
3813 });
3814
3815 // Modify the buffer
3816 buffer.update(&mut cx, |buffer, cx| {
3817 buffer.edit(vec![0..0], " ", cx);
3818 assert!(buffer.is_dirty());
3819 assert!(!buffer.has_conflict());
3820 });
3821
3822 // Change the file on disk again, adding blank lines to the beginning.
3823 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3824
3825 // Because the buffer is modified, it doesn't reload from disk, but is
3826 // marked as having a conflict.
3827 buffer
3828 .condition(&cx, |buffer, _| buffer.has_conflict())
3829 .await;
3830 }
3831
3832 #[gpui::test]
3833 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3834 let fs = FakeFs::new(cx.background());
3835 fs.insert_tree(
3836 "/the-dir",
3837 json!({
3838 "a.rs": "
3839 fn foo(mut v: Vec<usize>) {
3840 for x in &v {
3841 v.push(1);
3842 }
3843 }
3844 "
3845 .unindent(),
3846 }),
3847 )
3848 .await;
3849
3850 let project = Project::test(fs.clone(), &mut cx);
3851 let (worktree, _) = project
3852 .update(&mut cx, |p, cx| {
3853 p.find_or_create_local_worktree("/the-dir", false, cx)
3854 })
3855 .await
3856 .unwrap();
3857 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3858
3859 let buffer = project
3860 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3861 .await
3862 .unwrap();
3863
3864 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3865 let message = lsp::PublishDiagnosticsParams {
3866 uri: buffer_uri.clone(),
3867 diagnostics: vec![
3868 lsp::Diagnostic {
3869 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3870 severity: Some(DiagnosticSeverity::WARNING),
3871 message: "error 1".to_string(),
3872 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3873 location: lsp::Location {
3874 uri: buffer_uri.clone(),
3875 range: lsp::Range::new(
3876 lsp::Position::new(1, 8),
3877 lsp::Position::new(1, 9),
3878 ),
3879 },
3880 message: "error 1 hint 1".to_string(),
3881 }]),
3882 ..Default::default()
3883 },
3884 lsp::Diagnostic {
3885 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3886 severity: Some(DiagnosticSeverity::HINT),
3887 message: "error 1 hint 1".to_string(),
3888 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3889 location: lsp::Location {
3890 uri: buffer_uri.clone(),
3891 range: lsp::Range::new(
3892 lsp::Position::new(1, 8),
3893 lsp::Position::new(1, 9),
3894 ),
3895 },
3896 message: "original diagnostic".to_string(),
3897 }]),
3898 ..Default::default()
3899 },
3900 lsp::Diagnostic {
3901 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3902 severity: Some(DiagnosticSeverity::ERROR),
3903 message: "error 2".to_string(),
3904 related_information: Some(vec![
3905 lsp::DiagnosticRelatedInformation {
3906 location: lsp::Location {
3907 uri: buffer_uri.clone(),
3908 range: lsp::Range::new(
3909 lsp::Position::new(1, 13),
3910 lsp::Position::new(1, 15),
3911 ),
3912 },
3913 message: "error 2 hint 1".to_string(),
3914 },
3915 lsp::DiagnosticRelatedInformation {
3916 location: lsp::Location {
3917 uri: buffer_uri.clone(),
3918 range: lsp::Range::new(
3919 lsp::Position::new(1, 13),
3920 lsp::Position::new(1, 15),
3921 ),
3922 },
3923 message: "error 2 hint 2".to_string(),
3924 },
3925 ]),
3926 ..Default::default()
3927 },
3928 lsp::Diagnostic {
3929 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3930 severity: Some(DiagnosticSeverity::HINT),
3931 message: "error 2 hint 1".to_string(),
3932 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3933 location: lsp::Location {
3934 uri: buffer_uri.clone(),
3935 range: lsp::Range::new(
3936 lsp::Position::new(2, 8),
3937 lsp::Position::new(2, 17),
3938 ),
3939 },
3940 message: "original diagnostic".to_string(),
3941 }]),
3942 ..Default::default()
3943 },
3944 lsp::Diagnostic {
3945 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3946 severity: Some(DiagnosticSeverity::HINT),
3947 message: "error 2 hint 2".to_string(),
3948 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3949 location: lsp::Location {
3950 uri: buffer_uri.clone(),
3951 range: lsp::Range::new(
3952 lsp::Position::new(2, 8),
3953 lsp::Position::new(2, 17),
3954 ),
3955 },
3956 message: "original diagnostic".to_string(),
3957 }]),
3958 ..Default::default()
3959 },
3960 ],
3961 version: None,
3962 };
3963
3964 project
3965 .update(&mut cx, |p, cx| {
3966 p.update_diagnostics(message, &Default::default(), cx)
3967 })
3968 .unwrap();
3969 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3970
3971 assert_eq!(
3972 buffer
3973 .diagnostics_in_range::<_, Point>(0..buffer.len())
3974 .collect::<Vec<_>>(),
3975 &[
3976 DiagnosticEntry {
3977 range: Point::new(1, 8)..Point::new(1, 9),
3978 diagnostic: Diagnostic {
3979 severity: DiagnosticSeverity::WARNING,
3980 message: "error 1".to_string(),
3981 group_id: 0,
3982 is_primary: true,
3983 ..Default::default()
3984 }
3985 },
3986 DiagnosticEntry {
3987 range: Point::new(1, 8)..Point::new(1, 9),
3988 diagnostic: Diagnostic {
3989 severity: DiagnosticSeverity::HINT,
3990 message: "error 1 hint 1".to_string(),
3991 group_id: 0,
3992 is_primary: false,
3993 ..Default::default()
3994 }
3995 },
3996 DiagnosticEntry {
3997 range: Point::new(1, 13)..Point::new(1, 15),
3998 diagnostic: Diagnostic {
3999 severity: DiagnosticSeverity::HINT,
4000 message: "error 2 hint 1".to_string(),
4001 group_id: 1,
4002 is_primary: false,
4003 ..Default::default()
4004 }
4005 },
4006 DiagnosticEntry {
4007 range: Point::new(1, 13)..Point::new(1, 15),
4008 diagnostic: Diagnostic {
4009 severity: DiagnosticSeverity::HINT,
4010 message: "error 2 hint 2".to_string(),
4011 group_id: 1,
4012 is_primary: false,
4013 ..Default::default()
4014 }
4015 },
4016 DiagnosticEntry {
4017 range: Point::new(2, 8)..Point::new(2, 17),
4018 diagnostic: Diagnostic {
4019 severity: DiagnosticSeverity::ERROR,
4020 message: "error 2".to_string(),
4021 group_id: 1,
4022 is_primary: true,
4023 ..Default::default()
4024 }
4025 }
4026 ]
4027 );
4028
4029 assert_eq!(
4030 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4031 &[
4032 DiagnosticEntry {
4033 range: Point::new(1, 8)..Point::new(1, 9),
4034 diagnostic: Diagnostic {
4035 severity: DiagnosticSeverity::WARNING,
4036 message: "error 1".to_string(),
4037 group_id: 0,
4038 is_primary: true,
4039 ..Default::default()
4040 }
4041 },
4042 DiagnosticEntry {
4043 range: Point::new(1, 8)..Point::new(1, 9),
4044 diagnostic: Diagnostic {
4045 severity: DiagnosticSeverity::HINT,
4046 message: "error 1 hint 1".to_string(),
4047 group_id: 0,
4048 is_primary: false,
4049 ..Default::default()
4050 }
4051 },
4052 ]
4053 );
4054 assert_eq!(
4055 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4056 &[
4057 DiagnosticEntry {
4058 range: Point::new(1, 13)..Point::new(1, 15),
4059 diagnostic: Diagnostic {
4060 severity: DiagnosticSeverity::HINT,
4061 message: "error 2 hint 1".to_string(),
4062 group_id: 1,
4063 is_primary: false,
4064 ..Default::default()
4065 }
4066 },
4067 DiagnosticEntry {
4068 range: Point::new(1, 13)..Point::new(1, 15),
4069 diagnostic: Diagnostic {
4070 severity: DiagnosticSeverity::HINT,
4071 message: "error 2 hint 2".to_string(),
4072 group_id: 1,
4073 is_primary: false,
4074 ..Default::default()
4075 }
4076 },
4077 DiagnosticEntry {
4078 range: Point::new(2, 8)..Point::new(2, 17),
4079 diagnostic: Diagnostic {
4080 severity: DiagnosticSeverity::ERROR,
4081 message: "error 2".to_string(),
4082 group_id: 1,
4083 is_primary: true,
4084 ..Default::default()
4085 }
4086 }
4087 ]
4088 );
4089 }
4090
4091 #[gpui::test]
4092 async fn test_rename(mut cx: gpui::TestAppContext) {
4093 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4094 let language = Arc::new(Language::new(
4095 LanguageConfig {
4096 name: "Rust".to_string(),
4097 path_suffixes: vec!["rs".to_string()],
4098 language_server: Some(language_server_config),
4099 ..Default::default()
4100 },
4101 Some(tree_sitter_rust::language()),
4102 ));
4103
4104 let fs = FakeFs::new(cx.background());
4105 fs.insert_tree(
4106 "/dir",
4107 json!({
4108 "one.rs": "const ONE: usize = 1;",
4109 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4110 }),
4111 )
4112 .await;
4113
4114 let project = Project::test(fs.clone(), &mut cx);
4115 project.update(&mut cx, |project, cx| {
4116 Arc::get_mut(&mut project.languages)
4117 .unwrap()
4118 .add(language, cx.background());
4119 });
4120
4121 let (tree, _) = project
4122 .update(&mut cx, |project, cx| {
4123 project.find_or_create_local_worktree("/dir", false, cx)
4124 })
4125 .await
4126 .unwrap();
4127 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4128 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4129 .await;
4130
4131 let buffer = project
4132 .update(&mut cx, |project, cx| {
4133 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4134 })
4135 .await
4136 .unwrap();
4137
4138 let mut fake_server = fake_servers.next().await.unwrap();
4139
4140 let response = project.update(&mut cx, |project, cx| {
4141 project.prepare_rename(buffer.clone(), 7, cx)
4142 });
4143 fake_server
4144 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4145 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4146 assert_eq!(params.position, lsp::Position::new(0, 7));
4147 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4148 lsp::Position::new(0, 6),
4149 lsp::Position::new(0, 9),
4150 )))
4151 })
4152 .next()
4153 .await
4154 .unwrap();
4155 let range = response.await.unwrap().unwrap();
4156 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4157 assert_eq!(range, 6..9);
4158
4159 let response = project.update(&mut cx, |project, cx| {
4160 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4161 });
4162 fake_server
4163 .handle_request::<lsp::request::Rename, _>(|params| {
4164 assert_eq!(
4165 params.text_document_position.text_document.uri.as_str(),
4166 "file:///dir/one.rs"
4167 );
4168 assert_eq!(
4169 params.text_document_position.position,
4170 lsp::Position::new(0, 7)
4171 );
4172 assert_eq!(params.new_name, "THREE");
4173 Some(lsp::WorkspaceEdit {
4174 changes: Some(
4175 [
4176 (
4177 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4178 vec![lsp::TextEdit::new(
4179 lsp::Range::new(
4180 lsp::Position::new(0, 6),
4181 lsp::Position::new(0, 9),
4182 ),
4183 "THREE".to_string(),
4184 )],
4185 ),
4186 (
4187 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4188 vec![
4189 lsp::TextEdit::new(
4190 lsp::Range::new(
4191 lsp::Position::new(0, 24),
4192 lsp::Position::new(0, 27),
4193 ),
4194 "THREE".to_string(),
4195 ),
4196 lsp::TextEdit::new(
4197 lsp::Range::new(
4198 lsp::Position::new(0, 35),
4199 lsp::Position::new(0, 38),
4200 ),
4201 "THREE".to_string(),
4202 ),
4203 ],
4204 ),
4205 ]
4206 .into_iter()
4207 .collect(),
4208 ),
4209 ..Default::default()
4210 })
4211 })
4212 .next()
4213 .await
4214 .unwrap();
4215 let mut transaction = response.await.unwrap().0;
4216 assert_eq!(transaction.len(), 2);
4217 assert_eq!(
4218 transaction
4219 .remove_entry(&buffer)
4220 .unwrap()
4221 .0
4222 .read_with(&cx, |buffer, _| buffer.text()),
4223 "const THREE: usize = 1;"
4224 );
4225 assert_eq!(
4226 transaction
4227 .into_keys()
4228 .next()
4229 .unwrap()
4230 .read_with(&cx, |buffer, _| buffer.text()),
4231 "const TWO: usize = one::THREE + one::THREE;"
4232 );
4233 }
4234}