1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::{future::Shared, Future, FutureExt};
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 started_language_servers:
43 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121#[derive(Default)]
122pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
123
124impl DiagnosticSummary {
125 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
126 let mut this = Self {
127 error_count: 0,
128 warning_count: 0,
129 info_count: 0,
130 hint_count: 0,
131 };
132
133 for entry in diagnostics {
134 if entry.diagnostic.is_primary {
135 match entry.diagnostic.severity {
136 DiagnosticSeverity::ERROR => this.error_count += 1,
137 DiagnosticSeverity::WARNING => this.warning_count += 1,
138 DiagnosticSeverity::INFORMATION => this.info_count += 1,
139 DiagnosticSeverity::HINT => this.hint_count += 1,
140 _ => {}
141 }
142 }
143 }
144
145 this
146 }
147
148 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
149 proto::DiagnosticSummary {
150 path: path.to_string_lossy().to_string(),
151 error_count: self.error_count as u32,
152 warning_count: self.warning_count as u32,
153 info_count: self.info_count as u32,
154 hint_count: self.hint_count as u32,
155 }
156 }
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160pub struct ProjectEntry {
161 pub worktree_id: WorktreeId,
162 pub entry_id: usize,
163}
164
165impl Project {
166 pub fn init(client: &Arc<Client>) {
167 client.add_entity_message_handler(Self::handle_add_collaborator);
168 client.add_entity_message_handler(Self::handle_buffer_reloaded);
169 client.add_entity_message_handler(Self::handle_buffer_saved);
170 client.add_entity_message_handler(Self::handle_close_buffer);
171 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
172 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
173 client.add_entity_message_handler(Self::handle_remove_collaborator);
174 client.add_entity_message_handler(Self::handle_share_worktree);
175 client.add_entity_message_handler(Self::handle_unregister_worktree);
176 client.add_entity_message_handler(Self::handle_unshare_project);
177 client.add_entity_message_handler(Self::handle_update_buffer_file);
178 client.add_entity_message_handler(Self::handle_update_buffer);
179 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
180 client.add_entity_message_handler(Self::handle_update_worktree);
181 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
182 client.add_entity_request_handler(Self::handle_apply_code_action);
183 client.add_entity_request_handler(Self::handle_format_buffers);
184 client.add_entity_request_handler(Self::handle_get_code_actions);
185 client.add_entity_request_handler(Self::handle_get_completions);
186 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
187 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
188 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
189 client.add_entity_request_handler(Self::handle_open_buffer);
190 client.add_entity_request_handler(Self::handle_save_buffer);
191 }
192
193 pub fn local(
194 client: Arc<Client>,
195 user_store: ModelHandle<UserStore>,
196 languages: Arc<LanguageRegistry>,
197 fs: Arc<dyn Fs>,
198 cx: &mut MutableAppContext,
199 ) -> ModelHandle<Self> {
200 cx.add_model(|cx: &mut ModelContext<Self>| {
201 let (remote_id_tx, remote_id_rx) = watch::channel();
202 let _maintain_remote_id_task = cx.spawn_weak({
203 let rpc = client.clone();
204 move |this, mut cx| {
205 async move {
206 let mut status = rpc.status();
207 while let Some(status) = status.recv().await {
208 if let Some(this) = this.upgrade(&cx) {
209 let remote_id = if let client::Status::Connected { .. } = status {
210 let response = rpc.request(proto::RegisterProject {}).await?;
211 Some(response.project_id)
212 } else {
213 None
214 };
215
216 if let Some(project_id) = remote_id {
217 let mut registrations = Vec::new();
218 this.update(&mut cx, |this, cx| {
219 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
220 registrations.push(worktree.update(
221 cx,
222 |worktree, cx| {
223 let worktree = worktree.as_local_mut().unwrap();
224 worktree.register(project_id, cx)
225 },
226 ));
227 }
228 });
229 for registration in registrations {
230 registration.await?;
231 }
232 }
233 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
234 }
235 }
236 Ok(())
237 }
238 .log_err()
239 }
240 });
241
242 Self {
243 worktrees: Default::default(),
244 collaborators: Default::default(),
245 open_buffers: Default::default(),
246 loading_buffers: Default::default(),
247 shared_buffers: Default::default(),
248 client_state: ProjectClientState::Local {
249 is_shared: false,
250 remote_id_tx,
251 remote_id_rx,
252 _maintain_remote_id_task,
253 },
254 opened_buffer: broadcast::channel(1).0,
255 subscriptions: Vec::new(),
256 active_entry: None,
257 languages,
258 client,
259 user_store,
260 fs,
261 language_servers_with_diagnostics_running: 0,
262 language_servers: Default::default(),
263 started_language_servers: Default::default(),
264 }
265 })
266 }
267
268 pub async fn remote(
269 remote_id: u64,
270 client: Arc<Client>,
271 user_store: ModelHandle<UserStore>,
272 languages: Arc<LanguageRegistry>,
273 fs: Arc<dyn Fs>,
274 cx: &mut AsyncAppContext,
275 ) -> Result<ModelHandle<Self>> {
276 client.authenticate_and_connect(&cx).await?;
277
278 let response = client
279 .request(proto::JoinProject {
280 project_id: remote_id,
281 })
282 .await?;
283
284 let replica_id = response.replica_id as ReplicaId;
285
286 let mut worktrees = Vec::new();
287 for worktree in response.worktrees {
288 let (worktree, load_task) = cx
289 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
290 worktrees.push(worktree);
291 load_task.detach();
292 }
293
294 let this = cx.add_model(|cx| {
295 let mut this = Self {
296 worktrees: Vec::new(),
297 open_buffers: Default::default(),
298 loading_buffers: Default::default(),
299 opened_buffer: broadcast::channel(1).0,
300 shared_buffers: Default::default(),
301 active_entry: None,
302 collaborators: Default::default(),
303 languages,
304 user_store: user_store.clone(),
305 fs,
306 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
307 client,
308 client_state: ProjectClientState::Remote {
309 sharing_has_stopped: false,
310 remote_id,
311 replica_id,
312 },
313 language_servers_with_diagnostics_running: 0,
314 language_servers: Default::default(),
315 started_language_servers: Default::default(),
316 };
317 for worktree in worktrees {
318 this.add_worktree(&worktree, cx);
319 }
320 this
321 });
322
323 let user_ids = response
324 .collaborators
325 .iter()
326 .map(|peer| peer.user_id)
327 .collect();
328 user_store
329 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
330 .await?;
331 let mut collaborators = HashMap::default();
332 for message in response.collaborators {
333 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
334 collaborators.insert(collaborator.peer_id, collaborator);
335 }
336
337 this.update(cx, |this, _| {
338 this.collaborators = collaborators;
339 });
340
341 Ok(this)
342 }
343
344 #[cfg(any(test, feature = "test-support"))]
345 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
346 let languages = Arc::new(LanguageRegistry::new());
347 let http_client = client::test::FakeHttpClient::with_404_response();
348 let client = client::Client::new(http_client.clone());
349 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
350 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
351 }
352
353 #[cfg(any(test, feature = "test-support"))]
354 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
355 self.shared_buffers
356 .get(&peer_id)
357 .and_then(|buffers| buffers.get(&remote_id))
358 .cloned()
359 }
360
361 #[cfg(any(test, feature = "test-support"))]
362 pub fn has_buffered_operations(&self) -> bool {
363 self.open_buffers
364 .values()
365 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
366 }
367
368 pub fn fs(&self) -> &Arc<dyn Fs> {
369 &self.fs
370 }
371
372 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
373 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
374 *remote_id_tx.borrow_mut() = remote_id;
375 }
376
377 self.subscriptions.clear();
378 if let Some(remote_id) = remote_id {
379 self.subscriptions
380 .push(self.client.add_model_for_remote_entity(remote_id, cx));
381 }
382 }
383
384 pub fn remote_id(&self) -> Option<u64> {
385 match &self.client_state {
386 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
387 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
388 }
389 }
390
391 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
392 let mut id = None;
393 let mut watch = None;
394 match &self.client_state {
395 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
396 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
397 }
398
399 async move {
400 if let Some(id) = id {
401 return id;
402 }
403 let mut watch = watch.unwrap();
404 loop {
405 let id = *watch.borrow();
406 if let Some(id) = id {
407 return id;
408 }
409 watch.recv().await;
410 }
411 }
412 }
413
414 pub fn replica_id(&self) -> ReplicaId {
415 match &self.client_state {
416 ProjectClientState::Local { .. } => 0,
417 ProjectClientState::Remote { replica_id, .. } => *replica_id,
418 }
419 }
420
421 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
422 &self.collaborators
423 }
424
425 pub fn worktrees<'a>(
426 &'a self,
427 cx: &'a AppContext,
428 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
429 self.worktrees
430 .iter()
431 .filter_map(move |worktree| worktree.upgrade(cx))
432 }
433
434 pub fn worktree_for_id(
435 &self,
436 id: WorktreeId,
437 cx: &AppContext,
438 ) -> Option<ModelHandle<Worktree>> {
439 self.worktrees(cx)
440 .find(|worktree| worktree.read(cx).id() == id)
441 }
442
443 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
444 let rpc = self.client.clone();
445 cx.spawn(|this, mut cx| async move {
446 let project_id = this.update(&mut cx, |this, _| {
447 if let ProjectClientState::Local {
448 is_shared,
449 remote_id_rx,
450 ..
451 } = &mut this.client_state
452 {
453 *is_shared = true;
454 remote_id_rx
455 .borrow()
456 .ok_or_else(|| anyhow!("no project id"))
457 } else {
458 Err(anyhow!("can't share a remote project"))
459 }
460 })?;
461
462 rpc.request(proto::ShareProject { project_id }).await?;
463 let mut tasks = Vec::new();
464 this.update(&mut cx, |this, cx| {
465 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
466 worktree.update(cx, |worktree, cx| {
467 let worktree = worktree.as_local_mut().unwrap();
468 tasks.push(worktree.share(project_id, cx));
469 });
470 }
471 });
472 for task in tasks {
473 task.await?;
474 }
475 this.update(&mut cx, |_, cx| cx.notify());
476 Ok(())
477 })
478 }
479
480 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
481 let rpc = self.client.clone();
482 cx.spawn(|this, mut cx| async move {
483 let project_id = this.update(&mut cx, |this, _| {
484 if let ProjectClientState::Local {
485 is_shared,
486 remote_id_rx,
487 ..
488 } = &mut this.client_state
489 {
490 *is_shared = false;
491 remote_id_rx
492 .borrow()
493 .ok_or_else(|| anyhow!("no project id"))
494 } else {
495 Err(anyhow!("can't share a remote project"))
496 }
497 })?;
498
499 rpc.send(proto::UnshareProject { project_id })?;
500 this.update(&mut cx, |this, cx| {
501 this.collaborators.clear();
502 this.shared_buffers.clear();
503 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
504 worktree.update(cx, |worktree, _| {
505 worktree.as_local_mut().unwrap().unshare();
506 });
507 }
508 cx.notify()
509 });
510 Ok(())
511 })
512 }
513
514 pub fn is_read_only(&self) -> bool {
515 match &self.client_state {
516 ProjectClientState::Local { .. } => false,
517 ProjectClientState::Remote {
518 sharing_has_stopped,
519 ..
520 } => *sharing_has_stopped,
521 }
522 }
523
524 pub fn is_local(&self) -> bool {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => true,
527 ProjectClientState::Remote { .. } => false,
528 }
529 }
530
531 pub fn is_remote(&self) -> bool {
532 !self.is_local()
533 }
534
535 pub fn open_buffer(
536 &mut self,
537 path: impl Into<ProjectPath>,
538 cx: &mut ModelContext<Self>,
539 ) -> Task<Result<ModelHandle<Buffer>>> {
540 let project_path = path.into();
541 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
542 worktree
543 } else {
544 return Task::ready(Err(anyhow!("no such worktree")));
545 };
546
547 // If there is already a buffer for the given path, then return it.
548 let existing_buffer = self.get_open_buffer(&project_path, cx);
549 if let Some(existing_buffer) = existing_buffer {
550 return Task::ready(Ok(existing_buffer));
551 }
552
553 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
554 // If the given path is already being loaded, then wait for that existing
555 // task to complete and return the same buffer.
556 hash_map::Entry::Occupied(e) => e.get().clone(),
557
558 // Otherwise, record the fact that this path is now being loaded.
559 hash_map::Entry::Vacant(entry) => {
560 let (mut tx, rx) = postage::watch::channel();
561 entry.insert(rx.clone());
562
563 let load_buffer = if worktree.read(cx).is_local() {
564 self.open_local_buffer(&project_path.path, &worktree, cx)
565 } else {
566 self.open_remote_buffer(&project_path.path, &worktree, cx)
567 };
568
569 cx.spawn(move |this, mut cx| async move {
570 let load_result = load_buffer.await;
571 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
572 // Record the fact that the buffer is no longer loading.
573 this.loading_buffers.remove(&project_path);
574 if this.loading_buffers.is_empty() {
575 this.open_buffers
576 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
577 }
578
579 let buffer = load_result.map_err(Arc::new)?;
580 Ok(buffer)
581 }));
582 })
583 .detach();
584 rx
585 }
586 };
587
588 cx.foreground().spawn(async move {
589 loop {
590 if let Some(result) = loading_watch.borrow().as_ref() {
591 match result {
592 Ok(buffer) => return Ok(buffer.clone()),
593 Err(error) => return Err(anyhow!("{}", error)),
594 }
595 }
596 loading_watch.recv().await;
597 }
598 })
599 }
600
601 fn open_local_buffer(
602 &mut self,
603 path: &Arc<Path>,
604 worktree: &ModelHandle<Worktree>,
605 cx: &mut ModelContext<Self>,
606 ) -> Task<Result<ModelHandle<Buffer>>> {
607 let load_buffer = worktree.update(cx, |worktree, cx| {
608 let worktree = worktree.as_local_mut().unwrap();
609 worktree.load_buffer(path, cx)
610 });
611 let worktree = worktree.downgrade();
612 cx.spawn(|this, mut cx| async move {
613 let buffer = load_buffer.await?;
614 let worktree = worktree
615 .upgrade(&cx)
616 .ok_or_else(|| anyhow!("worktree was removed"))?;
617 this.update(&mut cx, |this, cx| {
618 this.register_buffer(&buffer, Some(&worktree), cx)
619 })?;
620 Ok(buffer)
621 })
622 }
623
624 fn open_remote_buffer(
625 &mut self,
626 path: &Arc<Path>,
627 worktree: &ModelHandle<Worktree>,
628 cx: &mut ModelContext<Self>,
629 ) -> Task<Result<ModelHandle<Buffer>>> {
630 let rpc = self.client.clone();
631 let project_id = self.remote_id().unwrap();
632 let remote_worktree_id = worktree.read(cx).id();
633 let path = path.clone();
634 let path_string = path.to_string_lossy().to_string();
635 cx.spawn(|this, mut cx| async move {
636 let response = rpc
637 .request(proto::OpenBuffer {
638 project_id,
639 worktree_id: remote_worktree_id.to_proto(),
640 path: path_string,
641 })
642 .await?;
643 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
644 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
645 .await
646 })
647 }
648
649 fn open_local_buffer_from_lsp_path(
650 &mut self,
651 abs_path: lsp::Url,
652 lang_name: String,
653 lang_server: Arc<LanguageServer>,
654 cx: &mut ModelContext<Self>,
655 ) -> Task<Result<ModelHandle<Buffer>>> {
656 cx.spawn(|this, mut cx| async move {
657 let abs_path = abs_path
658 .to_file_path()
659 .map_err(|_| anyhow!("can't convert URI to path"))?;
660 let (worktree, relative_path) = if let Some(result) =
661 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
662 {
663 result
664 } else {
665 let worktree = this
666 .update(&mut cx, |this, cx| {
667 this.create_local_worktree(&abs_path, true, cx)
668 })
669 .await?;
670 this.update(&mut cx, |this, cx| {
671 this.language_servers
672 .insert((worktree.read(cx).id(), lang_name), lang_server);
673 });
674 (worktree, PathBuf::new())
675 };
676
677 let project_path = ProjectPath {
678 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
679 path: relative_path.into(),
680 };
681 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
682 .await
683 })
684 }
685
686 pub fn save_buffer_as(
687 &self,
688 buffer: ModelHandle<Buffer>,
689 abs_path: PathBuf,
690 cx: &mut ModelContext<Project>,
691 ) -> Task<Result<()>> {
692 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
693 cx.spawn(|this, mut cx| async move {
694 let (worktree, path) = worktree_task.await?;
695 worktree
696 .update(&mut cx, |worktree, cx| {
697 worktree
698 .as_local_mut()
699 .unwrap()
700 .save_buffer_as(buffer.clone(), path, cx)
701 })
702 .await?;
703 this.update(&mut cx, |this, cx| {
704 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
705 });
706 Ok(())
707 })
708 }
709
710 #[cfg(any(test, feature = "test-support"))]
711 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
712 let path = path.into();
713 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
714 self.open_buffers.iter().any(|(_, buffer)| {
715 if let Some(buffer) = buffer.upgrade(cx) {
716 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
717 if file.worktree == worktree && file.path() == &path.path {
718 return true;
719 }
720 }
721 }
722 false
723 })
724 } else {
725 false
726 }
727 }
728
729 fn get_open_buffer(
730 &mut self,
731 path: &ProjectPath,
732 cx: &mut ModelContext<Self>,
733 ) -> Option<ModelHandle<Buffer>> {
734 let mut result = None;
735 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
736 self.open_buffers.retain(|_, buffer| {
737 if let Some(buffer) = buffer.upgrade(cx) {
738 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
739 if file.worktree == worktree && file.path() == &path.path {
740 result = Some(buffer);
741 }
742 }
743 true
744 } else {
745 false
746 }
747 });
748 result
749 }
750
751 fn register_buffer(
752 &mut self,
753 buffer: &ModelHandle<Buffer>,
754 worktree: Option<&ModelHandle<Worktree>>,
755 cx: &mut ModelContext<Self>,
756 ) -> Result<()> {
757 match self.open_buffers.insert(
758 buffer.read(cx).remote_id(),
759 OpenBuffer::Loaded(buffer.downgrade()),
760 ) {
761 None => {}
762 Some(OpenBuffer::Loading(operations)) => {
763 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
764 }
765 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
766 }
767 self.assign_language_to_buffer(&buffer, worktree, cx);
768 Ok(())
769 }
770
771 fn assign_language_to_buffer(
772 &mut self,
773 buffer: &ModelHandle<Buffer>,
774 worktree: Option<&ModelHandle<Worktree>>,
775 cx: &mut ModelContext<Self>,
776 ) -> Option<()> {
777 let (path, full_path) = {
778 let file = buffer.read(cx).file()?;
779 (file.path().clone(), file.full_path(cx))
780 };
781
782 // If the buffer has a language, set it and start/assign the language server
783 if let Some(language) = self.languages.select_language(&full_path).cloned() {
784 buffer.update(cx, |buffer, cx| {
785 buffer.set_language(Some(language.clone()), cx);
786 });
787
788 // For local worktrees, start a language server if needed.
789 // Also assign the language server and any previously stored diagnostics to the buffer.
790 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
791 let worktree_id = local_worktree.id();
792 let worktree_abs_path = local_worktree.abs_path().clone();
793 let buffer = buffer.downgrade();
794 let language_server =
795 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
796
797 cx.spawn_weak(|_, mut cx| async move {
798 if let Some(language_server) = language_server.await {
799 if let Some(buffer) = buffer.upgrade(&cx) {
800 buffer.update(&mut cx, |buffer, cx| {
801 buffer.set_language_server(Some(language_server), cx);
802 });
803 }
804 }
805 })
806 .detach();
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 &mut self,
823 worktree_id: WorktreeId,
824 worktree_path: Arc<Path>,
825 language: Arc<Language>,
826 cx: &mut ModelContext<Self>,
827 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
828 enum LspEvent {
829 DiagnosticsStart,
830 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
831 DiagnosticsFinish,
832 }
833
834 let key = (worktree_id, language.name().to_string());
835 self.started_language_servers
836 .entry(key.clone())
837 .or_insert_with(|| {
838 let language_server = self.languages.start_language_server(
839 &language,
840 worktree_path,
841 self.client.http_client(),
842 cx,
843 );
844 let rpc = self.client.clone();
845 cx.spawn_weak(|this, mut cx| async move {
846 let language_server = language_server?.await.log_err()?;
847 if let Some(this) = this.upgrade(&cx) {
848 this.update(&mut cx, |this, _| {
849 this.language_servers.insert(key, language_server.clone());
850 });
851 }
852
853 let disk_based_sources = language
854 .disk_based_diagnostic_sources()
855 .cloned()
856 .unwrap_or_default();
857 let disk_based_diagnostics_progress_token =
858 language.disk_based_diagnostics_progress_token().cloned();
859 let has_disk_based_diagnostic_progress_token =
860 disk_based_diagnostics_progress_token.is_some();
861 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
862
863 // Listen for `PublishDiagnostics` notifications.
864 language_server
865 .on_notification::<lsp::notification::PublishDiagnostics, _>({
866 let diagnostics_tx = diagnostics_tx.clone();
867 move |params| {
868 if !has_disk_based_diagnostic_progress_token {
869 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
870 }
871 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
872 .ok();
873 if !has_disk_based_diagnostic_progress_token {
874 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
875 }
876 }
877 })
878 .detach();
879
880 // Listen for `Progress` notifications. Send an event when the language server
881 // transitions between running jobs and not running any jobs.
882 let mut running_jobs_for_this_server: i32 = 0;
883 language_server
884 .on_notification::<lsp::notification::Progress, _>(move |params| {
885 let token = match params.token {
886 lsp::NumberOrString::Number(_) => None,
887 lsp::NumberOrString::String(token) => Some(token),
888 };
889
890 if token == disk_based_diagnostics_progress_token {
891 match params.value {
892 lsp::ProgressParamsValue::WorkDone(progress) => {
893 match progress {
894 lsp::WorkDoneProgress::Begin(_) => {
895 running_jobs_for_this_server += 1;
896 if running_jobs_for_this_server == 1 {
897 block_on(
898 diagnostics_tx
899 .send(LspEvent::DiagnosticsStart),
900 )
901 .ok();
902 }
903 }
904 lsp::WorkDoneProgress::End(_) => {
905 running_jobs_for_this_server -= 1;
906 if running_jobs_for_this_server == 0 {
907 block_on(
908 diagnostics_tx
909 .send(LspEvent::DiagnosticsFinish),
910 )
911 .ok();
912 }
913 }
914 _ => {}
915 }
916 }
917 }
918 }
919 })
920 .detach();
921
922 // Process all the LSP events.
923 cx.spawn(|mut cx| async move {
924 while let Ok(message) = diagnostics_rx.recv().await {
925 let this = this.upgrade(&cx)?;
926 match message {
927 LspEvent::DiagnosticsStart => {
928 this.update(&mut cx, |this, cx| {
929 this.disk_based_diagnostics_started(cx);
930 if let Some(project_id) = this.remote_id() {
931 rpc.send(proto::DiskBasedDiagnosticsUpdating {
932 project_id,
933 })
934 .log_err();
935 }
936 });
937 }
938 LspEvent::DiagnosticsUpdate(mut params) => {
939 language.process_diagnostics(&mut params);
940 this.update(&mut cx, |this, cx| {
941 this.update_diagnostics(params, &disk_based_sources, cx)
942 .log_err();
943 });
944 }
945 LspEvent::DiagnosticsFinish => {
946 this.update(&mut cx, |this, cx| {
947 this.disk_based_diagnostics_finished(cx);
948 if let Some(project_id) = this.remote_id() {
949 rpc.send(proto::DiskBasedDiagnosticsUpdated {
950 project_id,
951 })
952 .log_err();
953 }
954 });
955 }
956 }
957 }
958 Some(())
959 })
960 .detach();
961
962 Some(language_server)
963 })
964 .shared()
965 })
966 .clone()
967 }
968
969 pub fn update_diagnostics(
970 &mut self,
971 params: lsp::PublishDiagnosticsParams,
972 disk_based_sources: &HashSet<String>,
973 cx: &mut ModelContext<Self>,
974 ) -> Result<()> {
975 let abs_path = params
976 .uri
977 .to_file_path()
978 .map_err(|_| anyhow!("URI is not a file"))?;
979 let mut next_group_id = 0;
980 let mut diagnostics = Vec::default();
981 let mut primary_diagnostic_group_ids = HashMap::default();
982 let mut sources_by_group_id = HashMap::default();
983 let mut supporting_diagnostic_severities = HashMap::default();
984 for diagnostic in ¶ms.diagnostics {
985 let source = diagnostic.source.as_ref();
986 let code = diagnostic.code.as_ref().map(|code| match code {
987 lsp::NumberOrString::Number(code) => code.to_string(),
988 lsp::NumberOrString::String(code) => code.clone(),
989 });
990 let range = range_from_lsp(diagnostic.range);
991 let is_supporting = diagnostic
992 .related_information
993 .as_ref()
994 .map_or(false, |infos| {
995 infos.iter().any(|info| {
996 primary_diagnostic_group_ids.contains_key(&(
997 source,
998 code.clone(),
999 range_from_lsp(info.location.range),
1000 ))
1001 })
1002 });
1003
1004 if is_supporting {
1005 if let Some(severity) = diagnostic.severity {
1006 supporting_diagnostic_severities
1007 .insert((source, code.clone(), range), severity);
1008 }
1009 } else {
1010 let group_id = post_inc(&mut next_group_id);
1011 let is_disk_based =
1012 source.map_or(false, |source| disk_based_sources.contains(source));
1013
1014 sources_by_group_id.insert(group_id, source);
1015 primary_diagnostic_group_ids
1016 .insert((source, code.clone(), range.clone()), group_id);
1017
1018 diagnostics.push(DiagnosticEntry {
1019 range,
1020 diagnostic: Diagnostic {
1021 code: code.clone(),
1022 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1023 message: diagnostic.message.clone(),
1024 group_id,
1025 is_primary: true,
1026 is_valid: true,
1027 is_disk_based,
1028 },
1029 });
1030 if let Some(infos) = &diagnostic.related_information {
1031 for info in infos {
1032 if info.location.uri == params.uri && !info.message.is_empty() {
1033 let range = range_from_lsp(info.location.range);
1034 diagnostics.push(DiagnosticEntry {
1035 range,
1036 diagnostic: Diagnostic {
1037 code: code.clone(),
1038 severity: DiagnosticSeverity::INFORMATION,
1039 message: info.message.clone(),
1040 group_id,
1041 is_primary: false,
1042 is_valid: true,
1043 is_disk_based,
1044 },
1045 });
1046 }
1047 }
1048 }
1049 }
1050 }
1051
1052 for entry in &mut diagnostics {
1053 let diagnostic = &mut entry.diagnostic;
1054 if !diagnostic.is_primary {
1055 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1056 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1057 source,
1058 diagnostic.code.clone(),
1059 entry.range.clone(),
1060 )) {
1061 diagnostic.severity = severity;
1062 }
1063 }
1064 }
1065
1066 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1067 Ok(())
1068 }
1069
1070 pub fn update_diagnostic_entries(
1071 &mut self,
1072 abs_path: PathBuf,
1073 version: Option<i32>,
1074 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1075 cx: &mut ModelContext<Project>,
1076 ) -> Result<(), anyhow::Error> {
1077 let (worktree, relative_path) = self
1078 .find_local_worktree(&abs_path, cx)
1079 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1080 let project_path = ProjectPath {
1081 worktree_id: worktree.read(cx).id(),
1082 path: relative_path.into(),
1083 };
1084
1085 for buffer in self.open_buffers.values() {
1086 if let Some(buffer) = buffer.upgrade(cx) {
1087 if buffer
1088 .read(cx)
1089 .file()
1090 .map_or(false, |file| *file.path() == project_path.path)
1091 {
1092 buffer.update(cx, |buffer, cx| {
1093 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1094 })?;
1095 break;
1096 }
1097 }
1098 }
1099 worktree.update(cx, |worktree, cx| {
1100 worktree
1101 .as_local_mut()
1102 .ok_or_else(|| anyhow!("not a local worktree"))?
1103 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1104 })?;
1105 cx.emit(Event::DiagnosticsUpdated(project_path));
1106 Ok(())
1107 }
1108
1109 pub fn format(
1110 &self,
1111 buffers: HashSet<ModelHandle<Buffer>>,
1112 push_to_history: bool,
1113 cx: &mut ModelContext<Project>,
1114 ) -> Task<Result<ProjectTransaction>> {
1115 let mut local_buffers = Vec::new();
1116 let mut remote_buffers = None;
1117 for buffer_handle in buffers {
1118 let buffer = buffer_handle.read(cx);
1119 let worktree;
1120 if let Some(file) = File::from_dyn(buffer.file()) {
1121 worktree = file.worktree.clone();
1122 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1123 let lang_server;
1124 if let Some(lang) = buffer.language() {
1125 if let Some(server) = self
1126 .language_servers
1127 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1128 {
1129 lang_server = server.clone();
1130 } else {
1131 return Task::ready(Ok(Default::default()));
1132 };
1133 } else {
1134 return Task::ready(Ok(Default::default()));
1135 }
1136
1137 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1138 } else {
1139 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1140 }
1141 } else {
1142 return Task::ready(Ok(Default::default()));
1143 }
1144 }
1145
1146 let remote_buffers = self.remote_id().zip(remote_buffers);
1147 let client = self.client.clone();
1148
1149 cx.spawn(|this, mut cx| async move {
1150 let mut project_transaction = ProjectTransaction::default();
1151
1152 if let Some((project_id, remote_buffers)) = remote_buffers {
1153 let response = client
1154 .request(proto::FormatBuffers {
1155 project_id,
1156 buffer_ids: remote_buffers
1157 .iter()
1158 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1159 .collect(),
1160 })
1161 .await?
1162 .transaction
1163 .ok_or_else(|| anyhow!("missing transaction"))?;
1164 project_transaction = this
1165 .update(&mut cx, |this, cx| {
1166 this.deserialize_project_transaction(response, push_to_history, cx)
1167 })
1168 .await?;
1169 }
1170
1171 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1172 let lsp_edits = lang_server
1173 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1174 text_document: lsp::TextDocumentIdentifier::new(
1175 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1176 ),
1177 options: Default::default(),
1178 work_done_progress_params: Default::default(),
1179 })
1180 .await?;
1181
1182 if let Some(lsp_edits) = lsp_edits {
1183 let edits = buffer
1184 .update(&mut cx, |buffer, cx| {
1185 buffer.edits_from_lsp(lsp_edits, None, cx)
1186 })
1187 .await?;
1188 buffer.update(&mut cx, |buffer, cx| {
1189 buffer.finalize_last_transaction();
1190 buffer.start_transaction();
1191 for (range, text) in edits {
1192 buffer.edit([range], text, cx);
1193 }
1194 if buffer.end_transaction(cx).is_some() {
1195 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1196 if !push_to_history {
1197 buffer.forget_transaction(transaction.id);
1198 }
1199 project_transaction.0.insert(cx.handle(), transaction);
1200 }
1201 });
1202 }
1203 }
1204
1205 Ok(project_transaction)
1206 })
1207 }
1208
1209 pub fn definition<T: ToPointUtf16>(
1210 &self,
1211 buffer: &ModelHandle<Buffer>,
1212 position: T,
1213 cx: &mut ModelContext<Self>,
1214 ) -> Task<Result<Vec<Definition>>> {
1215 let position = position.to_point_utf16(buffer.read(cx));
1216 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1217 }
1218
1219 pub fn completions<T: ToPointUtf16>(
1220 &self,
1221 source_buffer_handle: &ModelHandle<Buffer>,
1222 position: T,
1223 cx: &mut ModelContext<Self>,
1224 ) -> Task<Result<Vec<Completion>>> {
1225 let source_buffer_handle = source_buffer_handle.clone();
1226 let source_buffer = source_buffer_handle.read(cx);
1227 let buffer_id = source_buffer.remote_id();
1228 let language = source_buffer.language().cloned();
1229 let worktree;
1230 let buffer_abs_path;
1231 if let Some(file) = File::from_dyn(source_buffer.file()) {
1232 worktree = file.worktree.clone();
1233 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1234 } else {
1235 return Task::ready(Ok(Default::default()));
1236 };
1237
1238 let position = position.to_point_utf16(source_buffer);
1239 let anchor = source_buffer.anchor_after(position);
1240
1241 if worktree.read(cx).as_local().is_some() {
1242 let buffer_abs_path = buffer_abs_path.unwrap();
1243 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1244 server
1245 } else {
1246 return Task::ready(Ok(Default::default()));
1247 };
1248
1249 cx.spawn(|_, cx| async move {
1250 let completions = lang_server
1251 .request::<lsp::request::Completion>(lsp::CompletionParams {
1252 text_document_position: lsp::TextDocumentPositionParams::new(
1253 lsp::TextDocumentIdentifier::new(
1254 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1255 ),
1256 position.to_lsp_position(),
1257 ),
1258 context: Default::default(),
1259 work_done_progress_params: Default::default(),
1260 partial_result_params: Default::default(),
1261 })
1262 .await
1263 .context("lsp completion request failed")?;
1264
1265 let completions = if let Some(completions) = completions {
1266 match completions {
1267 lsp::CompletionResponse::Array(completions) => completions,
1268 lsp::CompletionResponse::List(list) => list.items,
1269 }
1270 } else {
1271 Default::default()
1272 };
1273
1274 source_buffer_handle.read_with(&cx, |this, _| {
1275 Ok(completions
1276 .into_iter()
1277 .filter_map(|lsp_completion| {
1278 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1279 lsp::CompletionTextEdit::Edit(edit) => {
1280 (range_from_lsp(edit.range), edit.new_text.clone())
1281 }
1282 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1283 log::info!("unsupported insert/replace completion");
1284 return None;
1285 }
1286 };
1287
1288 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1289 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1290 if clipped_start == old_range.start && clipped_end == old_range.end {
1291 Some(Completion {
1292 old_range: this.anchor_before(old_range.start)
1293 ..this.anchor_after(old_range.end),
1294 new_text,
1295 label: language
1296 .as_ref()
1297 .and_then(|l| l.label_for_completion(&lsp_completion))
1298 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1299 lsp_completion,
1300 })
1301 } else {
1302 None
1303 }
1304 })
1305 .collect())
1306 })
1307 })
1308 } else if let Some(project_id) = self.remote_id() {
1309 let rpc = self.client.clone();
1310 let message = proto::GetCompletions {
1311 project_id,
1312 buffer_id,
1313 position: Some(language::proto::serialize_anchor(&anchor)),
1314 version: (&source_buffer.version()).into(),
1315 };
1316 cx.spawn_weak(|_, mut cx| async move {
1317 let response = rpc.request(message).await?;
1318
1319 source_buffer_handle
1320 .update(&mut cx, |buffer, _| {
1321 buffer.wait_for_version(response.version.into())
1322 })
1323 .await;
1324
1325 response
1326 .completions
1327 .into_iter()
1328 .map(|completion| {
1329 language::proto::deserialize_completion(completion, language.as_ref())
1330 })
1331 .collect()
1332 })
1333 } else {
1334 Task::ready(Ok(Default::default()))
1335 }
1336 }
1337
1338 pub fn apply_additional_edits_for_completion(
1339 &self,
1340 buffer_handle: ModelHandle<Buffer>,
1341 completion: Completion,
1342 push_to_history: bool,
1343 cx: &mut ModelContext<Self>,
1344 ) -> Task<Result<Option<Transaction>>> {
1345 let buffer = buffer_handle.read(cx);
1346 let buffer_id = buffer.remote_id();
1347
1348 if self.is_local() {
1349 let lang_server = if let Some(language_server) = buffer.language_server() {
1350 language_server.clone()
1351 } else {
1352 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1353 };
1354
1355 cx.spawn(|_, mut cx| async move {
1356 let resolved_completion = lang_server
1357 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1358 .await?;
1359 if let Some(edits) = resolved_completion.additional_text_edits {
1360 let edits = buffer_handle
1361 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1362 .await?;
1363 buffer_handle.update(&mut cx, |buffer, cx| {
1364 buffer.finalize_last_transaction();
1365 buffer.start_transaction();
1366 for (range, text) in edits {
1367 buffer.edit([range], text, cx);
1368 }
1369 let transaction = if buffer.end_transaction(cx).is_some() {
1370 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1371 if !push_to_history {
1372 buffer.forget_transaction(transaction.id);
1373 }
1374 Some(transaction)
1375 } else {
1376 None
1377 };
1378 Ok(transaction)
1379 })
1380 } else {
1381 Ok(None)
1382 }
1383 })
1384 } else if let Some(project_id) = self.remote_id() {
1385 let client = self.client.clone();
1386 cx.spawn(|_, mut cx| async move {
1387 let response = client
1388 .request(proto::ApplyCompletionAdditionalEdits {
1389 project_id,
1390 buffer_id,
1391 completion: Some(language::proto::serialize_completion(&completion)),
1392 })
1393 .await?;
1394
1395 if let Some(transaction) = response.transaction {
1396 let transaction = language::proto::deserialize_transaction(transaction)?;
1397 buffer_handle
1398 .update(&mut cx, |buffer, _| {
1399 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1400 })
1401 .await;
1402 if push_to_history {
1403 buffer_handle.update(&mut cx, |buffer, _| {
1404 buffer.push_transaction(transaction.clone(), Instant::now());
1405 });
1406 }
1407 Ok(Some(transaction))
1408 } else {
1409 Ok(None)
1410 }
1411 })
1412 } else {
1413 Task::ready(Err(anyhow!("project does not have a remote id")))
1414 }
1415 }
1416
1417 pub fn code_actions<T: ToOffset>(
1418 &self,
1419 buffer_handle: &ModelHandle<Buffer>,
1420 range: Range<T>,
1421 cx: &mut ModelContext<Self>,
1422 ) -> Task<Result<Vec<CodeAction>>> {
1423 let buffer_handle = buffer_handle.clone();
1424 let buffer = buffer_handle.read(cx);
1425 let buffer_id = buffer.remote_id();
1426 let worktree;
1427 let buffer_abs_path;
1428 if let Some(file) = File::from_dyn(buffer.file()) {
1429 worktree = file.worktree.clone();
1430 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1431 } else {
1432 return Task::ready(Ok(Default::default()));
1433 };
1434 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1435
1436 if worktree.read(cx).as_local().is_some() {
1437 let buffer_abs_path = buffer_abs_path.unwrap();
1438 let lang_name;
1439 let lang_server;
1440 if let Some(lang) = buffer.language() {
1441 lang_name = lang.name().to_string();
1442 if let Some(server) = self
1443 .language_servers
1444 .get(&(worktree.read(cx).id(), lang_name.clone()))
1445 {
1446 lang_server = server.clone();
1447 } else {
1448 return Task::ready(Ok(Default::default()));
1449 };
1450 } else {
1451 return Task::ready(Ok(Default::default()));
1452 }
1453
1454 let lsp_range = lsp::Range::new(
1455 range.start.to_point_utf16(buffer).to_lsp_position(),
1456 range.end.to_point_utf16(buffer).to_lsp_position(),
1457 );
1458 cx.foreground().spawn(async move {
1459 Ok(lang_server
1460 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1461 text_document: lsp::TextDocumentIdentifier::new(
1462 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1463 ),
1464 range: lsp_range,
1465 work_done_progress_params: Default::default(),
1466 partial_result_params: Default::default(),
1467 context: lsp::CodeActionContext {
1468 diagnostics: Default::default(),
1469 only: Some(vec![
1470 lsp::CodeActionKind::QUICKFIX,
1471 lsp::CodeActionKind::REFACTOR,
1472 lsp::CodeActionKind::REFACTOR_EXTRACT,
1473 ]),
1474 },
1475 })
1476 .await?
1477 .unwrap_or_default()
1478 .into_iter()
1479 .filter_map(|entry| {
1480 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1481 Some(CodeAction {
1482 range: range.clone(),
1483 lsp_action,
1484 })
1485 } else {
1486 None
1487 }
1488 })
1489 .collect())
1490 })
1491 } else if let Some(project_id) = self.remote_id() {
1492 let rpc = self.client.clone();
1493 cx.spawn_weak(|_, mut cx| async move {
1494 let response = rpc
1495 .request(proto::GetCodeActions {
1496 project_id,
1497 buffer_id,
1498 start: Some(language::proto::serialize_anchor(&range.start)),
1499 end: Some(language::proto::serialize_anchor(&range.end)),
1500 })
1501 .await?;
1502
1503 buffer_handle
1504 .update(&mut cx, |buffer, _| {
1505 buffer.wait_for_version(response.version.into())
1506 })
1507 .await;
1508
1509 response
1510 .actions
1511 .into_iter()
1512 .map(language::proto::deserialize_code_action)
1513 .collect()
1514 })
1515 } else {
1516 Task::ready(Ok(Default::default()))
1517 }
1518 }
1519
1520 pub fn apply_code_action(
1521 &self,
1522 buffer_handle: ModelHandle<Buffer>,
1523 mut action: CodeAction,
1524 push_to_history: bool,
1525 cx: &mut ModelContext<Self>,
1526 ) -> Task<Result<ProjectTransaction>> {
1527 if self.is_local() {
1528 let buffer = buffer_handle.read(cx);
1529 let lang_name = if let Some(lang) = buffer.language() {
1530 lang.name().to_string()
1531 } else {
1532 return Task::ready(Ok(Default::default()));
1533 };
1534 let lang_server = if let Some(language_server) = buffer.language_server() {
1535 language_server.clone()
1536 } else {
1537 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1538 };
1539 let range = action.range.to_point_utf16(buffer);
1540
1541 cx.spawn(|this, mut cx| async move {
1542 if let Some(lsp_range) = action
1543 .lsp_action
1544 .data
1545 .as_mut()
1546 .and_then(|d| d.get_mut("codeActionParams"))
1547 .and_then(|d| d.get_mut("range"))
1548 {
1549 *lsp_range = serde_json::to_value(&lsp::Range::new(
1550 range.start.to_lsp_position(),
1551 range.end.to_lsp_position(),
1552 ))
1553 .unwrap();
1554 action.lsp_action = lang_server
1555 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1556 .await?;
1557 } else {
1558 let actions = this
1559 .update(&mut cx, |this, cx| {
1560 this.code_actions(&buffer_handle, action.range, cx)
1561 })
1562 .await?;
1563 action.lsp_action = actions
1564 .into_iter()
1565 .find(|a| a.lsp_action.title == action.lsp_action.title)
1566 .ok_or_else(|| anyhow!("code action is outdated"))?
1567 .lsp_action;
1568 }
1569
1570 if let Some(edit) = action.lsp_action.edit {
1571 Self::deserialize_workspace_edit(
1572 this,
1573 edit,
1574 push_to_history,
1575 lang_name,
1576 lang_server,
1577 &mut cx,
1578 )
1579 .await
1580 } else {
1581 Ok(ProjectTransaction::default())
1582 }
1583 })
1584 } else if let Some(project_id) = self.remote_id() {
1585 let client = self.client.clone();
1586 let request = proto::ApplyCodeAction {
1587 project_id,
1588 buffer_id: buffer_handle.read(cx).remote_id(),
1589 action: Some(language::proto::serialize_code_action(&action)),
1590 };
1591 cx.spawn(|this, mut cx| async move {
1592 let response = client
1593 .request(request)
1594 .await?
1595 .transaction
1596 .ok_or_else(|| anyhow!("missing transaction"))?;
1597 this.update(&mut cx, |this, cx| {
1598 this.deserialize_project_transaction(response, push_to_history, cx)
1599 })
1600 .await
1601 })
1602 } else {
1603 Task::ready(Err(anyhow!("project does not have a remote id")))
1604 }
1605 }
1606
1607 async fn deserialize_workspace_edit(
1608 this: ModelHandle<Self>,
1609 edit: lsp::WorkspaceEdit,
1610 push_to_history: bool,
1611 language_name: String,
1612 language_server: Arc<LanguageServer>,
1613 cx: &mut AsyncAppContext,
1614 ) -> Result<ProjectTransaction> {
1615 let fs = this.read_with(cx, |this, _| this.fs.clone());
1616 let mut operations = Vec::new();
1617 if let Some(document_changes) = edit.document_changes {
1618 match document_changes {
1619 lsp::DocumentChanges::Edits(edits) => {
1620 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1621 }
1622 lsp::DocumentChanges::Operations(ops) => operations = ops,
1623 }
1624 } else if let Some(changes) = edit.changes {
1625 operations.extend(changes.into_iter().map(|(uri, edits)| {
1626 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1627 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1628 uri,
1629 version: None,
1630 },
1631 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1632 })
1633 }));
1634 }
1635
1636 let mut project_transaction = ProjectTransaction::default();
1637 for operation in operations {
1638 match operation {
1639 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1640 let abs_path = op
1641 .uri
1642 .to_file_path()
1643 .map_err(|_| anyhow!("can't convert URI to path"))?;
1644
1645 if let Some(parent_path) = abs_path.parent() {
1646 fs.create_dir(parent_path).await?;
1647 }
1648 if abs_path.ends_with("/") {
1649 fs.create_dir(&abs_path).await?;
1650 } else {
1651 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1652 .await?;
1653 }
1654 }
1655 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1656 let source_abs_path = op
1657 .old_uri
1658 .to_file_path()
1659 .map_err(|_| anyhow!("can't convert URI to path"))?;
1660 let target_abs_path = op
1661 .new_uri
1662 .to_file_path()
1663 .map_err(|_| anyhow!("can't convert URI to path"))?;
1664 fs.rename(
1665 &source_abs_path,
1666 &target_abs_path,
1667 op.options.map(Into::into).unwrap_or_default(),
1668 )
1669 .await?;
1670 }
1671 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1672 let abs_path = op
1673 .uri
1674 .to_file_path()
1675 .map_err(|_| anyhow!("can't convert URI to path"))?;
1676 let options = op.options.map(Into::into).unwrap_or_default();
1677 if abs_path.ends_with("/") {
1678 fs.remove_dir(&abs_path, options).await?;
1679 } else {
1680 fs.remove_file(&abs_path, options).await?;
1681 }
1682 }
1683 lsp::DocumentChangeOperation::Edit(op) => {
1684 let buffer_to_edit = this
1685 .update(cx, |this, cx| {
1686 this.open_local_buffer_from_lsp_path(
1687 op.text_document.uri,
1688 language_name.clone(),
1689 language_server.clone(),
1690 cx,
1691 )
1692 })
1693 .await?;
1694
1695 let edits = buffer_to_edit
1696 .update(cx, |buffer, cx| {
1697 let edits = op.edits.into_iter().map(|edit| match edit {
1698 lsp::OneOf::Left(edit) => edit,
1699 lsp::OneOf::Right(edit) => edit.text_edit,
1700 });
1701 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1702 })
1703 .await?;
1704
1705 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1706 buffer.finalize_last_transaction();
1707 buffer.start_transaction();
1708 for (range, text) in edits {
1709 buffer.edit([range], text, cx);
1710 }
1711 let transaction = if buffer.end_transaction(cx).is_some() {
1712 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1713 if !push_to_history {
1714 buffer.forget_transaction(transaction.id);
1715 }
1716 Some(transaction)
1717 } else {
1718 None
1719 };
1720
1721 transaction
1722 });
1723 if let Some(transaction) = transaction {
1724 project_transaction.0.insert(buffer_to_edit, transaction);
1725 }
1726 }
1727 }
1728 }
1729
1730 Ok(project_transaction)
1731 }
1732
1733 pub fn prepare_rename<T: ToPointUtf16>(
1734 &self,
1735 buffer: ModelHandle<Buffer>,
1736 position: T,
1737 cx: &mut ModelContext<Self>,
1738 ) -> Task<Result<Option<Range<Anchor>>>> {
1739 let position = position.to_point_utf16(buffer.read(cx));
1740 self.request_lsp(buffer, PrepareRename { position }, cx)
1741 }
1742
1743 pub fn perform_rename<T: ToPointUtf16>(
1744 &self,
1745 buffer: ModelHandle<Buffer>,
1746 position: T,
1747 new_name: String,
1748 push_to_history: bool,
1749 cx: &mut ModelContext<Self>,
1750 ) -> Task<Result<ProjectTransaction>> {
1751 let position = position.to_point_utf16(buffer.read(cx));
1752 self.request_lsp(
1753 buffer,
1754 PerformRename {
1755 position,
1756 new_name,
1757 push_to_history,
1758 },
1759 cx,
1760 )
1761 }
1762
1763 fn request_lsp<R: LspCommand>(
1764 &self,
1765 buffer_handle: ModelHandle<Buffer>,
1766 request: R,
1767 cx: &mut ModelContext<Self>,
1768 ) -> Task<Result<R::Response>>
1769 where
1770 <R::LspRequest as lsp::request::Request>::Result: Send,
1771 {
1772 let buffer = buffer_handle.read(cx);
1773 if self.is_local() {
1774 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1775 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1776 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1777 return cx.spawn(|this, cx| async move {
1778 let response = language_server
1779 .request::<R::LspRequest>(lsp_params)
1780 .await
1781 .context("lsp request failed")?;
1782 request
1783 .response_from_lsp(response, this, buffer_handle, cx)
1784 .await
1785 });
1786 }
1787 } else if let Some(project_id) = self.remote_id() {
1788 let rpc = self.client.clone();
1789 let message = request.to_proto(project_id, buffer);
1790 return cx.spawn(|this, cx| async move {
1791 let response = rpc.request(message).await?;
1792 request
1793 .response_from_proto(response, this, buffer_handle, cx)
1794 .await
1795 });
1796 }
1797 Task::ready(Ok(Default::default()))
1798 }
1799
1800 pub fn find_or_create_local_worktree(
1801 &self,
1802 abs_path: impl AsRef<Path>,
1803 weak: bool,
1804 cx: &mut ModelContext<Self>,
1805 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1806 let abs_path = abs_path.as_ref();
1807 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1808 Task::ready(Ok((tree.clone(), relative_path.into())))
1809 } else {
1810 let worktree = self.create_local_worktree(abs_path, weak, cx);
1811 cx.foreground()
1812 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1813 }
1814 }
1815
1816 fn find_local_worktree(
1817 &self,
1818 abs_path: &Path,
1819 cx: &AppContext,
1820 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1821 for tree in self.worktrees(cx) {
1822 if let Some(relative_path) = tree
1823 .read(cx)
1824 .as_local()
1825 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1826 {
1827 return Some((tree.clone(), relative_path.into()));
1828 }
1829 }
1830 None
1831 }
1832
1833 pub fn is_shared(&self) -> bool {
1834 match &self.client_state {
1835 ProjectClientState::Local { is_shared, .. } => *is_shared,
1836 ProjectClientState::Remote { .. } => false,
1837 }
1838 }
1839
1840 fn create_local_worktree(
1841 &self,
1842 abs_path: impl AsRef<Path>,
1843 weak: bool,
1844 cx: &mut ModelContext<Self>,
1845 ) -> Task<Result<ModelHandle<Worktree>>> {
1846 let fs = self.fs.clone();
1847 let client = self.client.clone();
1848 let path = Arc::from(abs_path.as_ref());
1849 cx.spawn(|project, mut cx| async move {
1850 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1851
1852 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1853 project.add_worktree(&worktree, cx);
1854 (project.remote_id(), project.is_shared())
1855 });
1856
1857 if let Some(project_id) = remote_project_id {
1858 worktree
1859 .update(&mut cx, |worktree, cx| {
1860 worktree.as_local_mut().unwrap().register(project_id, cx)
1861 })
1862 .await?;
1863 if is_shared {
1864 worktree
1865 .update(&mut cx, |worktree, cx| {
1866 worktree.as_local_mut().unwrap().share(project_id, cx)
1867 })
1868 .await?;
1869 }
1870 }
1871
1872 Ok(worktree)
1873 })
1874 }
1875
1876 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1877 self.worktrees.retain(|worktree| {
1878 worktree
1879 .upgrade(cx)
1880 .map_or(false, |w| w.read(cx).id() != id)
1881 });
1882 cx.notify();
1883 }
1884
1885 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1886 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1887 if worktree.read(cx).is_local() {
1888 cx.subscribe(&worktree, |this, worktree, _, cx| {
1889 this.update_local_worktree_buffers(worktree, cx);
1890 })
1891 .detach();
1892 }
1893
1894 let push_weak_handle = {
1895 let worktree = worktree.read(cx);
1896 worktree.is_local() && worktree.is_weak()
1897 };
1898 if push_weak_handle {
1899 cx.observe_release(&worktree, |this, cx| {
1900 this.worktrees
1901 .retain(|worktree| worktree.upgrade(cx).is_some());
1902 cx.notify();
1903 })
1904 .detach();
1905 self.worktrees
1906 .push(WorktreeHandle::Weak(worktree.downgrade()));
1907 } else {
1908 self.worktrees
1909 .push(WorktreeHandle::Strong(worktree.clone()));
1910 }
1911 cx.notify();
1912 }
1913
1914 fn update_local_worktree_buffers(
1915 &mut self,
1916 worktree_handle: ModelHandle<Worktree>,
1917 cx: &mut ModelContext<Self>,
1918 ) {
1919 let snapshot = worktree_handle.read(cx).snapshot();
1920 let mut buffers_to_delete = Vec::new();
1921 for (buffer_id, buffer) in &self.open_buffers {
1922 if let Some(buffer) = buffer.upgrade(cx) {
1923 buffer.update(cx, |buffer, cx| {
1924 if let Some(old_file) = File::from_dyn(buffer.file()) {
1925 if old_file.worktree != worktree_handle {
1926 return;
1927 }
1928
1929 let new_file = if let Some(entry) = old_file
1930 .entry_id
1931 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1932 {
1933 File {
1934 is_local: true,
1935 entry_id: Some(entry.id),
1936 mtime: entry.mtime,
1937 path: entry.path.clone(),
1938 worktree: worktree_handle.clone(),
1939 }
1940 } else if let Some(entry) =
1941 snapshot.entry_for_path(old_file.path().as_ref())
1942 {
1943 File {
1944 is_local: true,
1945 entry_id: Some(entry.id),
1946 mtime: entry.mtime,
1947 path: entry.path.clone(),
1948 worktree: worktree_handle.clone(),
1949 }
1950 } else {
1951 File {
1952 is_local: true,
1953 entry_id: None,
1954 path: old_file.path().clone(),
1955 mtime: old_file.mtime(),
1956 worktree: worktree_handle.clone(),
1957 }
1958 };
1959
1960 if let Some(project_id) = self.remote_id() {
1961 self.client
1962 .send(proto::UpdateBufferFile {
1963 project_id,
1964 buffer_id: *buffer_id as u64,
1965 file: Some(new_file.to_proto()),
1966 })
1967 .log_err();
1968 }
1969 buffer.file_updated(Box::new(new_file), cx).detach();
1970 }
1971 });
1972 } else {
1973 buffers_to_delete.push(*buffer_id);
1974 }
1975 }
1976
1977 for buffer_id in buffers_to_delete {
1978 self.open_buffers.remove(&buffer_id);
1979 }
1980 }
1981
1982 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1983 let new_active_entry = entry.and_then(|project_path| {
1984 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1985 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1986 Some(ProjectEntry {
1987 worktree_id: project_path.worktree_id,
1988 entry_id: entry.id,
1989 })
1990 });
1991 if new_active_entry != self.active_entry {
1992 self.active_entry = new_active_entry;
1993 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1994 }
1995 }
1996
1997 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1998 self.language_servers_with_diagnostics_running > 0
1999 }
2000
2001 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2002 let mut summary = DiagnosticSummary::default();
2003 for (_, path_summary) in self.diagnostic_summaries(cx) {
2004 summary.error_count += path_summary.error_count;
2005 summary.warning_count += path_summary.warning_count;
2006 summary.info_count += path_summary.info_count;
2007 summary.hint_count += path_summary.hint_count;
2008 }
2009 summary
2010 }
2011
2012 pub fn diagnostic_summaries<'a>(
2013 &'a self,
2014 cx: &'a AppContext,
2015 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2016 self.worktrees(cx).flat_map(move |worktree| {
2017 let worktree = worktree.read(cx);
2018 let worktree_id = worktree.id();
2019 worktree
2020 .diagnostic_summaries()
2021 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2022 })
2023 }
2024
2025 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2026 self.language_servers_with_diagnostics_running += 1;
2027 if self.language_servers_with_diagnostics_running == 1 {
2028 cx.emit(Event::DiskBasedDiagnosticsStarted);
2029 }
2030 }
2031
2032 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2033 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2034 self.language_servers_with_diagnostics_running -= 1;
2035 if self.language_servers_with_diagnostics_running == 0 {
2036 cx.emit(Event::DiskBasedDiagnosticsFinished);
2037 }
2038 }
2039
2040 pub fn active_entry(&self) -> Option<ProjectEntry> {
2041 self.active_entry
2042 }
2043
2044 // RPC message handlers
2045
2046 async fn handle_unshare_project(
2047 this: ModelHandle<Self>,
2048 _: TypedEnvelope<proto::UnshareProject>,
2049 _: Arc<Client>,
2050 mut cx: AsyncAppContext,
2051 ) -> Result<()> {
2052 this.update(&mut cx, |this, cx| {
2053 if let ProjectClientState::Remote {
2054 sharing_has_stopped,
2055 ..
2056 } = &mut this.client_state
2057 {
2058 *sharing_has_stopped = true;
2059 this.collaborators.clear();
2060 cx.notify();
2061 } else {
2062 unreachable!()
2063 }
2064 });
2065
2066 Ok(())
2067 }
2068
2069 async fn handle_add_collaborator(
2070 this: ModelHandle<Self>,
2071 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2072 _: Arc<Client>,
2073 mut cx: AsyncAppContext,
2074 ) -> Result<()> {
2075 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2076 let collaborator = envelope
2077 .payload
2078 .collaborator
2079 .take()
2080 .ok_or_else(|| anyhow!("empty collaborator"))?;
2081
2082 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2083 this.update(&mut cx, |this, cx| {
2084 this.collaborators
2085 .insert(collaborator.peer_id, collaborator);
2086 cx.notify();
2087 });
2088
2089 Ok(())
2090 }
2091
2092 async fn handle_remove_collaborator(
2093 this: ModelHandle<Self>,
2094 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2095 _: Arc<Client>,
2096 mut cx: AsyncAppContext,
2097 ) -> Result<()> {
2098 this.update(&mut cx, |this, cx| {
2099 let peer_id = PeerId(envelope.payload.peer_id);
2100 let replica_id = this
2101 .collaborators
2102 .remove(&peer_id)
2103 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2104 .replica_id;
2105 this.shared_buffers.remove(&peer_id);
2106 for (_, buffer) in &this.open_buffers {
2107 if let Some(buffer) = buffer.upgrade(cx) {
2108 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2109 }
2110 }
2111 cx.notify();
2112 Ok(())
2113 })
2114 }
2115
2116 async fn handle_share_worktree(
2117 this: ModelHandle<Self>,
2118 envelope: TypedEnvelope<proto::ShareWorktree>,
2119 client: Arc<Client>,
2120 mut cx: AsyncAppContext,
2121 ) -> Result<()> {
2122 this.update(&mut cx, |this, cx| {
2123 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2124 let replica_id = this.replica_id();
2125 let worktree = envelope
2126 .payload
2127 .worktree
2128 .ok_or_else(|| anyhow!("invalid worktree"))?;
2129 let (worktree, load_task) =
2130 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2131 this.add_worktree(&worktree, cx);
2132 load_task.detach();
2133 Ok(())
2134 })
2135 }
2136
2137 async fn handle_unregister_worktree(
2138 this: ModelHandle<Self>,
2139 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2140 _: Arc<Client>,
2141 mut cx: AsyncAppContext,
2142 ) -> Result<()> {
2143 this.update(&mut cx, |this, cx| {
2144 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2145 this.remove_worktree(worktree_id, cx);
2146 Ok(())
2147 })
2148 }
2149
2150 async fn handle_update_worktree(
2151 this: ModelHandle<Self>,
2152 envelope: TypedEnvelope<proto::UpdateWorktree>,
2153 _: Arc<Client>,
2154 mut cx: AsyncAppContext,
2155 ) -> Result<()> {
2156 this.update(&mut cx, |this, cx| {
2157 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2158 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2159 worktree.update(cx, |worktree, _| {
2160 let worktree = worktree.as_remote_mut().unwrap();
2161 worktree.update_from_remote(envelope)
2162 })?;
2163 }
2164 Ok(())
2165 })
2166 }
2167
2168 async fn handle_update_diagnostic_summary(
2169 this: ModelHandle<Self>,
2170 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2171 _: Arc<Client>,
2172 mut cx: AsyncAppContext,
2173 ) -> Result<()> {
2174 this.update(&mut cx, |this, cx| {
2175 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2176 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2177 if let Some(summary) = envelope.payload.summary {
2178 let project_path = ProjectPath {
2179 worktree_id,
2180 path: Path::new(&summary.path).into(),
2181 };
2182 worktree.update(cx, |worktree, _| {
2183 worktree
2184 .as_remote_mut()
2185 .unwrap()
2186 .update_diagnostic_summary(project_path.path.clone(), &summary);
2187 });
2188 cx.emit(Event::DiagnosticsUpdated(project_path));
2189 }
2190 }
2191 Ok(())
2192 })
2193 }
2194
2195 async fn handle_disk_based_diagnostics_updating(
2196 this: ModelHandle<Self>,
2197 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2198 _: Arc<Client>,
2199 mut cx: AsyncAppContext,
2200 ) -> Result<()> {
2201 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2202 Ok(())
2203 }
2204
2205 async fn handle_disk_based_diagnostics_updated(
2206 this: ModelHandle<Self>,
2207 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2208 _: Arc<Client>,
2209 mut cx: AsyncAppContext,
2210 ) -> Result<()> {
2211 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2212 Ok(())
2213 }
2214
2215 async fn handle_update_buffer(
2216 this: ModelHandle<Self>,
2217 envelope: TypedEnvelope<proto::UpdateBuffer>,
2218 _: Arc<Client>,
2219 mut cx: AsyncAppContext,
2220 ) -> Result<()> {
2221 this.update(&mut cx, |this, cx| {
2222 let payload = envelope.payload.clone();
2223 let buffer_id = payload.buffer_id;
2224 let ops = payload
2225 .operations
2226 .into_iter()
2227 .map(|op| language::proto::deserialize_operation(op))
2228 .collect::<Result<Vec<_>, _>>()?;
2229 let is_remote = this.is_remote();
2230 match this.open_buffers.entry(buffer_id) {
2231 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2232 OpenBuffer::Loaded(buffer) => {
2233 if let Some(buffer) = buffer.upgrade(cx) {
2234 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2235 }
2236 }
2237 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2238 },
2239 hash_map::Entry::Vacant(e) => {
2240 if is_remote && this.loading_buffers.len() > 0 {
2241 e.insert(OpenBuffer::Loading(ops));
2242 }
2243 }
2244 }
2245 Ok(())
2246 })
2247 }
2248
2249 async fn handle_update_buffer_file(
2250 this: ModelHandle<Self>,
2251 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2252 _: Arc<Client>,
2253 mut cx: AsyncAppContext,
2254 ) -> Result<()> {
2255 this.update(&mut cx, |this, cx| {
2256 let payload = envelope.payload.clone();
2257 let buffer_id = payload.buffer_id;
2258 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2259 let worktree = this
2260 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2261 .ok_or_else(|| anyhow!("no such worktree"))?;
2262 let file = File::from_proto(file, worktree.clone(), cx)?;
2263 let buffer = this
2264 .open_buffers
2265 .get_mut(&buffer_id)
2266 .and_then(|b| b.upgrade(cx))
2267 .ok_or_else(|| anyhow!("no such buffer"))?;
2268 buffer.update(cx, |buffer, cx| {
2269 buffer.file_updated(Box::new(file), cx).detach();
2270 });
2271 Ok(())
2272 })
2273 }
2274
2275 async fn handle_save_buffer(
2276 this: ModelHandle<Self>,
2277 envelope: TypedEnvelope<proto::SaveBuffer>,
2278 _: Arc<Client>,
2279 mut cx: AsyncAppContext,
2280 ) -> Result<proto::BufferSaved> {
2281 let buffer_id = envelope.payload.buffer_id;
2282 let sender_id = envelope.original_sender_id()?;
2283 let requested_version = envelope.payload.version.try_into()?;
2284
2285 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2286 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2287 let buffer = this
2288 .shared_buffers
2289 .get(&sender_id)
2290 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2291 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2292 Ok::<_, anyhow::Error>((project_id, buffer))
2293 })?;
2294
2295 if !buffer
2296 .read_with(&cx, |buffer, _| buffer.version())
2297 .observed_all(&requested_version)
2298 {
2299 Err(anyhow!("save request depends on unreceived edits"))?;
2300 }
2301
2302 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2303 Ok(proto::BufferSaved {
2304 project_id,
2305 buffer_id,
2306 version: (&saved_version).into(),
2307 mtime: Some(mtime.into()),
2308 })
2309 }
2310
2311 async fn handle_format_buffers(
2312 this: ModelHandle<Self>,
2313 envelope: TypedEnvelope<proto::FormatBuffers>,
2314 _: Arc<Client>,
2315 mut cx: AsyncAppContext,
2316 ) -> Result<proto::FormatBuffersResponse> {
2317 let sender_id = envelope.original_sender_id()?;
2318 let format = this.update(&mut cx, |this, cx| {
2319 let shared_buffers = this
2320 .shared_buffers
2321 .get(&sender_id)
2322 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2323 let mut buffers = HashSet::default();
2324 for buffer_id in &envelope.payload.buffer_ids {
2325 buffers.insert(
2326 shared_buffers
2327 .get(buffer_id)
2328 .cloned()
2329 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2330 );
2331 }
2332 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2333 })?;
2334
2335 let project_transaction = format.await?;
2336 let project_transaction = this.update(&mut cx, |this, cx| {
2337 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2338 });
2339 Ok(proto::FormatBuffersResponse {
2340 transaction: Some(project_transaction),
2341 })
2342 }
2343
2344 async fn handle_get_completions(
2345 this: ModelHandle<Self>,
2346 envelope: TypedEnvelope<proto::GetCompletions>,
2347 _: Arc<Client>,
2348 mut cx: AsyncAppContext,
2349 ) -> Result<proto::GetCompletionsResponse> {
2350 let sender_id = envelope.original_sender_id()?;
2351 let position = envelope
2352 .payload
2353 .position
2354 .and_then(language::proto::deserialize_anchor)
2355 .ok_or_else(|| anyhow!("invalid position"))?;
2356 let version = clock::Global::from(envelope.payload.version);
2357 let buffer = this.read_with(&cx, |this, _| {
2358 this.shared_buffers
2359 .get(&sender_id)
2360 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2361 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2362 })?;
2363 if !buffer
2364 .read_with(&cx, |buffer, _| buffer.version())
2365 .observed_all(&version)
2366 {
2367 Err(anyhow!("completion request depends on unreceived edits"))?;
2368 }
2369 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2370 let completions = this
2371 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2372 .await?;
2373
2374 Ok(proto::GetCompletionsResponse {
2375 completions: completions
2376 .iter()
2377 .map(language::proto::serialize_completion)
2378 .collect(),
2379 version: (&version).into(),
2380 })
2381 }
2382
2383 async fn handle_apply_additional_edits_for_completion(
2384 this: ModelHandle<Self>,
2385 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2386 _: Arc<Client>,
2387 mut cx: AsyncAppContext,
2388 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2389 let sender_id = envelope.original_sender_id()?;
2390 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2391 let buffer = this
2392 .shared_buffers
2393 .get(&sender_id)
2394 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2395 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2396 let language = buffer.read(cx).language();
2397 let completion = language::proto::deserialize_completion(
2398 envelope
2399 .payload
2400 .completion
2401 .ok_or_else(|| anyhow!("invalid completion"))?,
2402 language,
2403 )?;
2404 Ok::<_, anyhow::Error>(
2405 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2406 )
2407 })?;
2408
2409 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2410 transaction: apply_additional_edits
2411 .await?
2412 .as_ref()
2413 .map(language::proto::serialize_transaction),
2414 })
2415 }
2416
2417 async fn handle_get_code_actions(
2418 this: ModelHandle<Self>,
2419 envelope: TypedEnvelope<proto::GetCodeActions>,
2420 _: Arc<Client>,
2421 mut cx: AsyncAppContext,
2422 ) -> Result<proto::GetCodeActionsResponse> {
2423 let sender_id = envelope.original_sender_id()?;
2424 let start = envelope
2425 .payload
2426 .start
2427 .and_then(language::proto::deserialize_anchor)
2428 .ok_or_else(|| anyhow!("invalid start"))?;
2429 let end = envelope
2430 .payload
2431 .end
2432 .and_then(language::proto::deserialize_anchor)
2433 .ok_or_else(|| anyhow!("invalid end"))?;
2434 let buffer = this.update(&mut cx, |this, _| {
2435 this.shared_buffers
2436 .get(&sender_id)
2437 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2438 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2439 })?;
2440 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2441 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2442 Err(anyhow!("code action request references unreceived edits"))?;
2443 }
2444 let code_actions = this.update(&mut cx, |this, cx| {
2445 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2446 })?;
2447
2448 Ok(proto::GetCodeActionsResponse {
2449 actions: code_actions
2450 .await?
2451 .iter()
2452 .map(language::proto::serialize_code_action)
2453 .collect(),
2454 version: (&version).into(),
2455 })
2456 }
2457
2458 async fn handle_apply_code_action(
2459 this: ModelHandle<Self>,
2460 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2461 _: Arc<Client>,
2462 mut cx: AsyncAppContext,
2463 ) -> Result<proto::ApplyCodeActionResponse> {
2464 let sender_id = envelope.original_sender_id()?;
2465 let action = language::proto::deserialize_code_action(
2466 envelope
2467 .payload
2468 .action
2469 .ok_or_else(|| anyhow!("invalid action"))?,
2470 )?;
2471 let apply_code_action = this.update(&mut cx, |this, cx| {
2472 let buffer = this
2473 .shared_buffers
2474 .get(&sender_id)
2475 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2476 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2477 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2478 })?;
2479
2480 let project_transaction = apply_code_action.await?;
2481 let project_transaction = this.update(&mut cx, |this, cx| {
2482 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2483 });
2484 Ok(proto::ApplyCodeActionResponse {
2485 transaction: Some(project_transaction),
2486 })
2487 }
2488
2489 async fn handle_lsp_command<T: LspCommand>(
2490 this: ModelHandle<Self>,
2491 envelope: TypedEnvelope<T::ProtoRequest>,
2492 _: Arc<Client>,
2493 mut cx: AsyncAppContext,
2494 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2495 where
2496 <T::LspRequest as lsp::request::Request>::Result: Send,
2497 {
2498 let sender_id = envelope.original_sender_id()?;
2499 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2500 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2501 let buffer_handle = this
2502 .shared_buffers
2503 .get(&sender_id)
2504 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2505 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2506 let buffer = buffer_handle.read(cx);
2507 let buffer_version = buffer.version();
2508 let request = T::from_proto(envelope.payload, this, buffer)?;
2509 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2510 })?;
2511 let response = request.await?;
2512 this.update(&mut cx, |this, cx| {
2513 Ok(T::response_to_proto(
2514 response,
2515 this,
2516 sender_id,
2517 &buffer_version,
2518 cx,
2519 ))
2520 })
2521 }
2522
2523 async fn handle_open_buffer(
2524 this: ModelHandle<Self>,
2525 envelope: TypedEnvelope<proto::OpenBuffer>,
2526 _: Arc<Client>,
2527 mut cx: AsyncAppContext,
2528 ) -> anyhow::Result<proto::OpenBufferResponse> {
2529 let peer_id = envelope.original_sender_id()?;
2530 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2531 let open_buffer = this.update(&mut cx, |this, cx| {
2532 this.open_buffer(
2533 ProjectPath {
2534 worktree_id,
2535 path: PathBuf::from(envelope.payload.path).into(),
2536 },
2537 cx,
2538 )
2539 });
2540
2541 let buffer = open_buffer.await?;
2542 this.update(&mut cx, |this, cx| {
2543 Ok(proto::OpenBufferResponse {
2544 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2545 })
2546 })
2547 }
2548
2549 fn serialize_project_transaction_for_peer(
2550 &mut self,
2551 project_transaction: ProjectTransaction,
2552 peer_id: PeerId,
2553 cx: &AppContext,
2554 ) -> proto::ProjectTransaction {
2555 let mut serialized_transaction = proto::ProjectTransaction {
2556 buffers: Default::default(),
2557 transactions: Default::default(),
2558 };
2559 for (buffer, transaction) in project_transaction.0 {
2560 serialized_transaction
2561 .buffers
2562 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2563 serialized_transaction
2564 .transactions
2565 .push(language::proto::serialize_transaction(&transaction));
2566 }
2567 serialized_transaction
2568 }
2569
2570 fn deserialize_project_transaction(
2571 &mut self,
2572 message: proto::ProjectTransaction,
2573 push_to_history: bool,
2574 cx: &mut ModelContext<Self>,
2575 ) -> Task<Result<ProjectTransaction>> {
2576 cx.spawn(|this, mut cx| async move {
2577 let mut project_transaction = ProjectTransaction::default();
2578 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2579 let buffer = this
2580 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2581 .await?;
2582 let transaction = language::proto::deserialize_transaction(transaction)?;
2583 project_transaction.0.insert(buffer, transaction);
2584 }
2585 for (buffer, transaction) in &project_transaction.0 {
2586 buffer
2587 .update(&mut cx, |buffer, _| {
2588 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2589 })
2590 .await;
2591
2592 if push_to_history {
2593 buffer.update(&mut cx, |buffer, _| {
2594 buffer.push_transaction(transaction.clone(), Instant::now());
2595 });
2596 }
2597 }
2598
2599 Ok(project_transaction)
2600 })
2601 }
2602
2603 fn serialize_buffer_for_peer(
2604 &mut self,
2605 buffer: &ModelHandle<Buffer>,
2606 peer_id: PeerId,
2607 cx: &AppContext,
2608 ) -> proto::Buffer {
2609 let buffer_id = buffer.read(cx).remote_id();
2610 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2611 match shared_buffers.entry(buffer_id) {
2612 hash_map::Entry::Occupied(_) => proto::Buffer {
2613 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2614 },
2615 hash_map::Entry::Vacant(entry) => {
2616 entry.insert(buffer.clone());
2617 proto::Buffer {
2618 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2619 }
2620 }
2621 }
2622 }
2623
2624 fn deserialize_buffer(
2625 &mut self,
2626 buffer: proto::Buffer,
2627 cx: &mut ModelContext<Self>,
2628 ) -> Task<Result<ModelHandle<Buffer>>> {
2629 let replica_id = self.replica_id();
2630
2631 let mut opened_buffer_tx = self.opened_buffer.clone();
2632 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2633 cx.spawn(|this, mut cx| async move {
2634 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2635 proto::buffer::Variant::Id(id) => {
2636 let buffer = loop {
2637 let buffer = this.read_with(&cx, |this, cx| {
2638 this.open_buffers
2639 .get(&id)
2640 .and_then(|buffer| buffer.upgrade(cx))
2641 });
2642 if let Some(buffer) = buffer {
2643 break buffer;
2644 }
2645 opened_buffer_rx
2646 .recv()
2647 .await
2648 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2649 };
2650 Ok(buffer)
2651 }
2652 proto::buffer::Variant::State(mut buffer) => {
2653 let mut buffer_worktree = None;
2654 let mut buffer_file = None;
2655 if let Some(file) = buffer.file.take() {
2656 this.read_with(&cx, |this, cx| {
2657 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2658 let worktree =
2659 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2660 anyhow!("no worktree found for id {}", file.worktree_id)
2661 })?;
2662 buffer_file =
2663 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2664 as Box<dyn language::File>);
2665 buffer_worktree = Some(worktree);
2666 Ok::<_, anyhow::Error>(())
2667 })?;
2668 }
2669
2670 let buffer = cx.add_model(|cx| {
2671 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2672 });
2673 this.update(&mut cx, |this, cx| {
2674 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2675 })?;
2676
2677 let _ = opened_buffer_tx.send(()).await;
2678 Ok(buffer)
2679 }
2680 }
2681 })
2682 }
2683
2684 async fn handle_close_buffer(
2685 this: ModelHandle<Self>,
2686 envelope: TypedEnvelope<proto::CloseBuffer>,
2687 _: Arc<Client>,
2688 mut cx: AsyncAppContext,
2689 ) -> anyhow::Result<()> {
2690 this.update(&mut cx, |this, cx| {
2691 if let Some(shared_buffers) =
2692 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2693 {
2694 shared_buffers.remove(&envelope.payload.buffer_id);
2695 cx.notify();
2696 }
2697 Ok(())
2698 })
2699 }
2700
2701 async fn handle_buffer_saved(
2702 this: ModelHandle<Self>,
2703 envelope: TypedEnvelope<proto::BufferSaved>,
2704 _: Arc<Client>,
2705 mut cx: AsyncAppContext,
2706 ) -> Result<()> {
2707 let version = envelope.payload.version.try_into()?;
2708 let mtime = envelope
2709 .payload
2710 .mtime
2711 .ok_or_else(|| anyhow!("missing mtime"))?
2712 .into();
2713
2714 this.update(&mut cx, |this, cx| {
2715 let buffer = this
2716 .open_buffers
2717 .get(&envelope.payload.buffer_id)
2718 .and_then(|buffer| buffer.upgrade(cx));
2719 if let Some(buffer) = buffer {
2720 buffer.update(cx, |buffer, cx| {
2721 buffer.did_save(version, mtime, None, cx);
2722 });
2723 }
2724 Ok(())
2725 })
2726 }
2727
2728 async fn handle_buffer_reloaded(
2729 this: ModelHandle<Self>,
2730 envelope: TypedEnvelope<proto::BufferReloaded>,
2731 _: Arc<Client>,
2732 mut cx: AsyncAppContext,
2733 ) -> Result<()> {
2734 let payload = envelope.payload.clone();
2735 let version = payload.version.try_into()?;
2736 let mtime = payload
2737 .mtime
2738 .ok_or_else(|| anyhow!("missing mtime"))?
2739 .into();
2740 this.update(&mut cx, |this, cx| {
2741 let buffer = this
2742 .open_buffers
2743 .get(&payload.buffer_id)
2744 .and_then(|buffer| buffer.upgrade(cx));
2745 if let Some(buffer) = buffer {
2746 buffer.update(cx, |buffer, cx| {
2747 buffer.did_reload(version, mtime, cx);
2748 });
2749 }
2750 Ok(())
2751 })
2752 }
2753
2754 pub fn match_paths<'a>(
2755 &self,
2756 query: &'a str,
2757 include_ignored: bool,
2758 smart_case: bool,
2759 max_results: usize,
2760 cancel_flag: &'a AtomicBool,
2761 cx: &AppContext,
2762 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2763 let worktrees = self
2764 .worktrees(cx)
2765 .filter(|worktree| !worktree.read(cx).is_weak())
2766 .collect::<Vec<_>>();
2767 let include_root_name = worktrees.len() > 1;
2768 let candidate_sets = worktrees
2769 .into_iter()
2770 .map(|worktree| CandidateSet {
2771 snapshot: worktree.read(cx).snapshot(),
2772 include_ignored,
2773 include_root_name,
2774 })
2775 .collect::<Vec<_>>();
2776
2777 let background = cx.background().clone();
2778 async move {
2779 fuzzy::match_paths(
2780 candidate_sets.as_slice(),
2781 query,
2782 smart_case,
2783 max_results,
2784 cancel_flag,
2785 background,
2786 )
2787 .await
2788 }
2789 }
2790}
2791
2792impl WorktreeHandle {
2793 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2794 match self {
2795 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2796 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2797 }
2798 }
2799}
2800
2801impl OpenBuffer {
2802 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2803 match self {
2804 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2805 OpenBuffer::Loading(_) => None,
2806 }
2807 }
2808}
2809
2810struct CandidateSet {
2811 snapshot: Snapshot,
2812 include_ignored: bool,
2813 include_root_name: bool,
2814}
2815
2816impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2817 type Candidates = CandidateSetIter<'a>;
2818
2819 fn id(&self) -> usize {
2820 self.snapshot.id().to_usize()
2821 }
2822
2823 fn len(&self) -> usize {
2824 if self.include_ignored {
2825 self.snapshot.file_count()
2826 } else {
2827 self.snapshot.visible_file_count()
2828 }
2829 }
2830
2831 fn prefix(&self) -> Arc<str> {
2832 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2833 self.snapshot.root_name().into()
2834 } else if self.include_root_name {
2835 format!("{}/", self.snapshot.root_name()).into()
2836 } else {
2837 "".into()
2838 }
2839 }
2840
2841 fn candidates(&'a self, start: usize) -> Self::Candidates {
2842 CandidateSetIter {
2843 traversal: self.snapshot.files(self.include_ignored, start),
2844 }
2845 }
2846}
2847
2848struct CandidateSetIter<'a> {
2849 traversal: Traversal<'a>,
2850}
2851
2852impl<'a> Iterator for CandidateSetIter<'a> {
2853 type Item = PathMatchCandidate<'a>;
2854
2855 fn next(&mut self) -> Option<Self::Item> {
2856 self.traversal.next().map(|entry| {
2857 if let EntryKind::File(char_bag) = entry.kind {
2858 PathMatchCandidate {
2859 path: &entry.path,
2860 char_bag,
2861 }
2862 } else {
2863 unreachable!()
2864 }
2865 })
2866 }
2867}
2868
2869impl Entity for Project {
2870 type Event = Event;
2871
2872 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2873 match &self.client_state {
2874 ProjectClientState::Local { remote_id_rx, .. } => {
2875 if let Some(project_id) = *remote_id_rx.borrow() {
2876 self.client
2877 .send(proto::UnregisterProject { project_id })
2878 .log_err();
2879 }
2880 }
2881 ProjectClientState::Remote { remote_id, .. } => {
2882 self.client
2883 .send(proto::LeaveProject {
2884 project_id: *remote_id,
2885 })
2886 .log_err();
2887 }
2888 }
2889 }
2890
2891 fn app_will_quit(
2892 &mut self,
2893 _: &mut MutableAppContext,
2894 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2895 let shutdown_futures = self
2896 .language_servers
2897 .drain()
2898 .filter_map(|(_, server)| server.shutdown())
2899 .collect::<Vec<_>>();
2900 Some(
2901 async move {
2902 futures::future::join_all(shutdown_futures).await;
2903 }
2904 .boxed(),
2905 )
2906 }
2907}
2908
2909impl Collaborator {
2910 fn from_proto(
2911 message: proto::Collaborator,
2912 user_store: &ModelHandle<UserStore>,
2913 cx: &mut AsyncAppContext,
2914 ) -> impl Future<Output = Result<Self>> {
2915 let user = user_store.update(cx, |user_store, cx| {
2916 user_store.fetch_user(message.user_id, cx)
2917 });
2918
2919 async move {
2920 Ok(Self {
2921 peer_id: PeerId(message.peer_id),
2922 user: user.await?,
2923 replica_id: message.replica_id as ReplicaId,
2924 })
2925 }
2926 }
2927}
2928
2929impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2930 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2931 Self {
2932 worktree_id,
2933 path: path.as_ref().into(),
2934 }
2935 }
2936}
2937
2938impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2939 fn from(options: lsp::CreateFileOptions) -> Self {
2940 Self {
2941 overwrite: options.overwrite.unwrap_or(false),
2942 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2943 }
2944 }
2945}
2946
2947impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2948 fn from(options: lsp::RenameFileOptions) -> Self {
2949 Self {
2950 overwrite: options.overwrite.unwrap_or(false),
2951 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2952 }
2953 }
2954}
2955
2956impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2957 fn from(options: lsp::DeleteFileOptions) -> Self {
2958 Self {
2959 recursive: options.recursive.unwrap_or(false),
2960 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2961 }
2962 }
2963}
2964
2965#[cfg(test)]
2966mod tests {
2967 use super::{Event, *};
2968 use fs::RealFs;
2969 use futures::StreamExt;
2970 use gpui::test::subscribe;
2971 use language::{
2972 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
2973 };
2974 use lsp::Url;
2975 use serde_json::json;
2976 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2977 use unindent::Unindent as _;
2978 use util::test::temp_tree;
2979 use worktree::WorktreeHandle as _;
2980
2981 #[gpui::test]
2982 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2983 let dir = temp_tree(json!({
2984 "root": {
2985 "apple": "",
2986 "banana": {
2987 "carrot": {
2988 "date": "",
2989 "endive": "",
2990 }
2991 },
2992 "fennel": {
2993 "grape": "",
2994 }
2995 }
2996 }));
2997
2998 let root_link_path = dir.path().join("root_link");
2999 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3000 unix::fs::symlink(
3001 &dir.path().join("root/fennel"),
3002 &dir.path().join("root/finnochio"),
3003 )
3004 .unwrap();
3005
3006 let project = Project::test(Arc::new(RealFs), &mut cx);
3007
3008 let (tree, _) = project
3009 .update(&mut cx, |project, cx| {
3010 project.find_or_create_local_worktree(&root_link_path, false, cx)
3011 })
3012 .await
3013 .unwrap();
3014
3015 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3016 .await;
3017 cx.read(|cx| {
3018 let tree = tree.read(cx);
3019 assert_eq!(tree.file_count(), 5);
3020 assert_eq!(
3021 tree.inode_for_path("fennel/grape"),
3022 tree.inode_for_path("finnochio/grape")
3023 );
3024 });
3025
3026 let cancel_flag = Default::default();
3027 let results = project
3028 .read_with(&cx, |project, cx| {
3029 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3030 })
3031 .await;
3032 assert_eq!(
3033 results
3034 .into_iter()
3035 .map(|result| result.path)
3036 .collect::<Vec<Arc<Path>>>(),
3037 vec![
3038 PathBuf::from("banana/carrot/date").into(),
3039 PathBuf::from("banana/carrot/endive").into(),
3040 ]
3041 );
3042 }
3043
3044 #[gpui::test]
3045 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3046 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3047 let progress_token = language_server_config
3048 .disk_based_diagnostics_progress_token
3049 .clone()
3050 .unwrap();
3051
3052 let language = Arc::new(Language::new(
3053 LanguageConfig {
3054 name: "Rust".to_string(),
3055 path_suffixes: vec!["rs".to_string()],
3056 language_server: Some(language_server_config),
3057 ..Default::default()
3058 },
3059 Some(tree_sitter_rust::language()),
3060 ));
3061
3062 let fs = FakeFs::new(cx.background());
3063 fs.insert_tree(
3064 "/dir",
3065 json!({
3066 "a.rs": "fn a() { A }",
3067 "b.rs": "const y: i32 = 1",
3068 }),
3069 )
3070 .await;
3071
3072 let project = Project::test(fs, &mut cx);
3073 project.update(&mut cx, |project, _| {
3074 Arc::get_mut(&mut project.languages).unwrap().add(language);
3075 });
3076
3077 let (tree, _) = project
3078 .update(&mut cx, |project, cx| {
3079 project.find_or_create_local_worktree("/dir", false, cx)
3080 })
3081 .await
3082 .unwrap();
3083 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3084
3085 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3086 .await;
3087
3088 // Cause worktree to start the fake language server
3089 let _buffer = project
3090 .update(&mut cx, |project, cx| {
3091 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3092 })
3093 .await
3094 .unwrap();
3095
3096 let mut events = subscribe(&project, &mut cx);
3097
3098 let mut fake_server = fake_servers.next().await.unwrap();
3099 fake_server.start_progress(&progress_token).await;
3100 assert_eq!(
3101 events.next().await.unwrap(),
3102 Event::DiskBasedDiagnosticsStarted
3103 );
3104
3105 fake_server.start_progress(&progress_token).await;
3106 fake_server.end_progress(&progress_token).await;
3107 fake_server.start_progress(&progress_token).await;
3108
3109 fake_server
3110 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3111 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3112 version: None,
3113 diagnostics: vec![lsp::Diagnostic {
3114 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3115 severity: Some(lsp::DiagnosticSeverity::ERROR),
3116 message: "undefined variable 'A'".to_string(),
3117 ..Default::default()
3118 }],
3119 })
3120 .await;
3121 assert_eq!(
3122 events.next().await.unwrap(),
3123 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3124 );
3125
3126 fake_server.end_progress(&progress_token).await;
3127 fake_server.end_progress(&progress_token).await;
3128 assert_eq!(
3129 events.next().await.unwrap(),
3130 Event::DiskBasedDiagnosticsUpdated
3131 );
3132 assert_eq!(
3133 events.next().await.unwrap(),
3134 Event::DiskBasedDiagnosticsFinished
3135 );
3136
3137 let buffer = project
3138 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3139 .await
3140 .unwrap();
3141
3142 buffer.read_with(&cx, |buffer, _| {
3143 let snapshot = buffer.snapshot();
3144 let diagnostics = snapshot
3145 .diagnostics_in_range::<_, Point>(0..buffer.len())
3146 .collect::<Vec<_>>();
3147 assert_eq!(
3148 diagnostics,
3149 &[DiagnosticEntry {
3150 range: Point::new(0, 9)..Point::new(0, 10),
3151 diagnostic: Diagnostic {
3152 severity: lsp::DiagnosticSeverity::ERROR,
3153 message: "undefined variable 'A'".to_string(),
3154 group_id: 0,
3155 is_primary: true,
3156 ..Default::default()
3157 }
3158 }]
3159 )
3160 });
3161 }
3162
3163 #[gpui::test]
3164 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3165 let dir = temp_tree(json!({
3166 "root": {
3167 "dir1": {},
3168 "dir2": {
3169 "dir3": {}
3170 }
3171 }
3172 }));
3173
3174 let project = Project::test(Arc::new(RealFs), &mut cx);
3175 let (tree, _) = project
3176 .update(&mut cx, |project, cx| {
3177 project.find_or_create_local_worktree(&dir.path(), false, cx)
3178 })
3179 .await
3180 .unwrap();
3181
3182 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3183 .await;
3184
3185 let cancel_flag = Default::default();
3186 let results = project
3187 .read_with(&cx, |project, cx| {
3188 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3189 })
3190 .await;
3191
3192 assert!(results.is_empty());
3193 }
3194
3195 #[gpui::test]
3196 async fn test_definition(mut cx: gpui::TestAppContext) {
3197 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3198 let language = Arc::new(Language::new(
3199 LanguageConfig {
3200 name: "Rust".to_string(),
3201 path_suffixes: vec!["rs".to_string()],
3202 language_server: Some(language_server_config),
3203 ..Default::default()
3204 },
3205 Some(tree_sitter_rust::language()),
3206 ));
3207
3208 let fs = FakeFs::new(cx.background());
3209 fs.insert_tree(
3210 "/dir",
3211 json!({
3212 "a.rs": "const fn a() { A }",
3213 "b.rs": "const y: i32 = crate::a()",
3214 }),
3215 )
3216 .await;
3217
3218 let project = Project::test(fs, &mut cx);
3219 project.update(&mut cx, |project, _| {
3220 Arc::get_mut(&mut project.languages).unwrap().add(language);
3221 });
3222
3223 let (tree, _) = project
3224 .update(&mut cx, |project, cx| {
3225 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3226 })
3227 .await
3228 .unwrap();
3229 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3230 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3231 .await;
3232
3233 let buffer = project
3234 .update(&mut cx, |project, cx| {
3235 project.open_buffer(
3236 ProjectPath {
3237 worktree_id,
3238 path: Path::new("").into(),
3239 },
3240 cx,
3241 )
3242 })
3243 .await
3244 .unwrap();
3245
3246 let mut fake_server = fake_servers.next().await.unwrap();
3247 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3248 let params = params.text_document_position_params;
3249 assert_eq!(
3250 params.text_document.uri.to_file_path().unwrap(),
3251 Path::new("/dir/b.rs"),
3252 );
3253 assert_eq!(params.position, lsp::Position::new(0, 22));
3254
3255 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3256 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3257 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3258 )))
3259 });
3260
3261 let mut definitions = project
3262 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3263 .await
3264 .unwrap();
3265
3266 assert_eq!(definitions.len(), 1);
3267 let definition = definitions.pop().unwrap();
3268 cx.update(|cx| {
3269 let target_buffer = definition.target_buffer.read(cx);
3270 assert_eq!(
3271 target_buffer
3272 .file()
3273 .unwrap()
3274 .as_local()
3275 .unwrap()
3276 .abs_path(cx),
3277 Path::new("/dir/a.rs"),
3278 );
3279 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3280 assert_eq!(
3281 list_worktrees(&project, cx),
3282 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3283 );
3284
3285 drop(definition);
3286 });
3287 cx.read(|cx| {
3288 assert_eq!(
3289 list_worktrees(&project, cx),
3290 [("/dir/b.rs".as_ref(), false)]
3291 );
3292 });
3293
3294 fn list_worktrees<'a>(
3295 project: &'a ModelHandle<Project>,
3296 cx: &'a AppContext,
3297 ) -> Vec<(&'a Path, bool)> {
3298 project
3299 .read(cx)
3300 .worktrees(cx)
3301 .map(|worktree| {
3302 let worktree = worktree.read(cx);
3303 (
3304 worktree.as_local().unwrap().abs_path().as_ref(),
3305 worktree.is_weak(),
3306 )
3307 })
3308 .collect::<Vec<_>>()
3309 }
3310 }
3311
3312 #[gpui::test]
3313 async fn test_save_file(mut cx: gpui::TestAppContext) {
3314 let fs = FakeFs::new(cx.background());
3315 fs.insert_tree(
3316 "/dir",
3317 json!({
3318 "file1": "the old contents",
3319 }),
3320 )
3321 .await;
3322
3323 let project = Project::test(fs.clone(), &mut cx);
3324 let worktree_id = project
3325 .update(&mut cx, |p, cx| {
3326 p.find_or_create_local_worktree("/dir", false, cx)
3327 })
3328 .await
3329 .unwrap()
3330 .0
3331 .read_with(&cx, |tree, _| tree.id());
3332
3333 let buffer = project
3334 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3335 .await
3336 .unwrap();
3337 buffer
3338 .update(&mut cx, |buffer, cx| {
3339 assert_eq!(buffer.text(), "the old contents");
3340 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3341 buffer.save(cx)
3342 })
3343 .await
3344 .unwrap();
3345
3346 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3347 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3348 }
3349
3350 #[gpui::test]
3351 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3352 let fs = FakeFs::new(cx.background());
3353 fs.insert_tree(
3354 "/dir",
3355 json!({
3356 "file1": "the old contents",
3357 }),
3358 )
3359 .await;
3360
3361 let project = Project::test(fs.clone(), &mut cx);
3362 let worktree_id = project
3363 .update(&mut cx, |p, cx| {
3364 p.find_or_create_local_worktree("/dir/file1", false, cx)
3365 })
3366 .await
3367 .unwrap()
3368 .0
3369 .read_with(&cx, |tree, _| tree.id());
3370
3371 let buffer = project
3372 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3373 .await
3374 .unwrap();
3375 buffer
3376 .update(&mut cx, |buffer, cx| {
3377 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3378 buffer.save(cx)
3379 })
3380 .await
3381 .unwrap();
3382
3383 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3384 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3385 }
3386
3387 #[gpui::test(retries = 5)]
3388 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3389 let dir = temp_tree(json!({
3390 "a": {
3391 "file1": "",
3392 "file2": "",
3393 "file3": "",
3394 },
3395 "b": {
3396 "c": {
3397 "file4": "",
3398 "file5": "",
3399 }
3400 }
3401 }));
3402
3403 let project = Project::test(Arc::new(RealFs), &mut cx);
3404 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3405
3406 let (tree, _) = project
3407 .update(&mut cx, |p, cx| {
3408 p.find_or_create_local_worktree(dir.path(), false, cx)
3409 })
3410 .await
3411 .unwrap();
3412 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3413
3414 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3415 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3416 async move { buffer.await.unwrap() }
3417 };
3418 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3419 tree.read_with(cx, |tree, _| {
3420 tree.entry_for_path(path)
3421 .expect(&format!("no entry for path {}", path))
3422 .id
3423 })
3424 };
3425
3426 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3427 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3428 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3429 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3430
3431 let file2_id = id_for_path("a/file2", &cx);
3432 let file3_id = id_for_path("a/file3", &cx);
3433 let file4_id = id_for_path("b/c/file4", &cx);
3434
3435 // Wait for the initial scan.
3436 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3437 .await;
3438
3439 // Create a remote copy of this worktree.
3440 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3441 let (remote, load_task) = cx.update(|cx| {
3442 Worktree::remote(
3443 1,
3444 1,
3445 initial_snapshot.to_proto(&Default::default(), Default::default()),
3446 rpc.clone(),
3447 cx,
3448 )
3449 });
3450 load_task.await;
3451
3452 cx.read(|cx| {
3453 assert!(!buffer2.read(cx).is_dirty());
3454 assert!(!buffer3.read(cx).is_dirty());
3455 assert!(!buffer4.read(cx).is_dirty());
3456 assert!(!buffer5.read(cx).is_dirty());
3457 });
3458
3459 // Rename and delete files and directories.
3460 tree.flush_fs_events(&cx).await;
3461 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3462 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3463 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3464 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3465 tree.flush_fs_events(&cx).await;
3466
3467 let expected_paths = vec![
3468 "a",
3469 "a/file1",
3470 "a/file2.new",
3471 "b",
3472 "d",
3473 "d/file3",
3474 "d/file4",
3475 ];
3476
3477 cx.read(|app| {
3478 assert_eq!(
3479 tree.read(app)
3480 .paths()
3481 .map(|p| p.to_str().unwrap())
3482 .collect::<Vec<_>>(),
3483 expected_paths
3484 );
3485
3486 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3487 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3488 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3489
3490 assert_eq!(
3491 buffer2.read(app).file().unwrap().path().as_ref(),
3492 Path::new("a/file2.new")
3493 );
3494 assert_eq!(
3495 buffer3.read(app).file().unwrap().path().as_ref(),
3496 Path::new("d/file3")
3497 );
3498 assert_eq!(
3499 buffer4.read(app).file().unwrap().path().as_ref(),
3500 Path::new("d/file4")
3501 );
3502 assert_eq!(
3503 buffer5.read(app).file().unwrap().path().as_ref(),
3504 Path::new("b/c/file5")
3505 );
3506
3507 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3508 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3509 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3510 assert!(buffer5.read(app).file().unwrap().is_deleted());
3511 });
3512
3513 // Update the remote worktree. Check that it becomes consistent with the
3514 // local worktree.
3515 remote.update(&mut cx, |remote, cx| {
3516 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3517 &initial_snapshot,
3518 1,
3519 1,
3520 0,
3521 true,
3522 );
3523 remote
3524 .as_remote_mut()
3525 .unwrap()
3526 .snapshot
3527 .apply_remote_update(update_message)
3528 .unwrap();
3529
3530 assert_eq!(
3531 remote
3532 .paths()
3533 .map(|p| p.to_str().unwrap())
3534 .collect::<Vec<_>>(),
3535 expected_paths
3536 );
3537 });
3538 }
3539
3540 #[gpui::test]
3541 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3542 let fs = FakeFs::new(cx.background());
3543 fs.insert_tree(
3544 "/the-dir",
3545 json!({
3546 "a.txt": "a-contents",
3547 "b.txt": "b-contents",
3548 }),
3549 )
3550 .await;
3551
3552 let project = Project::test(fs.clone(), &mut cx);
3553 let worktree_id = project
3554 .update(&mut cx, |p, cx| {
3555 p.find_or_create_local_worktree("/the-dir", false, cx)
3556 })
3557 .await
3558 .unwrap()
3559 .0
3560 .read_with(&cx, |tree, _| tree.id());
3561
3562 // Spawn multiple tasks to open paths, repeating some paths.
3563 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3564 (
3565 p.open_buffer((worktree_id, "a.txt"), cx),
3566 p.open_buffer((worktree_id, "b.txt"), cx),
3567 p.open_buffer((worktree_id, "a.txt"), cx),
3568 )
3569 });
3570
3571 let buffer_a_1 = buffer_a_1.await.unwrap();
3572 let buffer_a_2 = buffer_a_2.await.unwrap();
3573 let buffer_b = buffer_b.await.unwrap();
3574 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3575 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3576
3577 // There is only one buffer per path.
3578 let buffer_a_id = buffer_a_1.id();
3579 assert_eq!(buffer_a_2.id(), buffer_a_id);
3580
3581 // Open the same path again while it is still open.
3582 drop(buffer_a_1);
3583 let buffer_a_3 = project
3584 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3585 .await
3586 .unwrap();
3587
3588 // There's still only one buffer per path.
3589 assert_eq!(buffer_a_3.id(), buffer_a_id);
3590 }
3591
3592 #[gpui::test]
3593 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3594 use std::fs;
3595
3596 let dir = temp_tree(json!({
3597 "file1": "abc",
3598 "file2": "def",
3599 "file3": "ghi",
3600 }));
3601
3602 let project = Project::test(Arc::new(RealFs), &mut cx);
3603 let (worktree, _) = project
3604 .update(&mut cx, |p, cx| {
3605 p.find_or_create_local_worktree(dir.path(), false, cx)
3606 })
3607 .await
3608 .unwrap();
3609 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3610
3611 worktree.flush_fs_events(&cx).await;
3612 worktree
3613 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3614 .await;
3615
3616 let buffer1 = project
3617 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3618 .await
3619 .unwrap();
3620 let events = Rc::new(RefCell::new(Vec::new()));
3621
3622 // initially, the buffer isn't dirty.
3623 buffer1.update(&mut cx, |buffer, cx| {
3624 cx.subscribe(&buffer1, {
3625 let events = events.clone();
3626 move |_, _, event, _| events.borrow_mut().push(event.clone())
3627 })
3628 .detach();
3629
3630 assert!(!buffer.is_dirty());
3631 assert!(events.borrow().is_empty());
3632
3633 buffer.edit(vec![1..2], "", cx);
3634 });
3635
3636 // after the first edit, the buffer is dirty, and emits a dirtied event.
3637 buffer1.update(&mut cx, |buffer, cx| {
3638 assert!(buffer.text() == "ac");
3639 assert!(buffer.is_dirty());
3640 assert_eq!(
3641 *events.borrow(),
3642 &[language::Event::Edited, language::Event::Dirtied]
3643 );
3644 events.borrow_mut().clear();
3645 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3646 });
3647
3648 // after saving, the buffer is not dirty, and emits a saved event.
3649 buffer1.update(&mut cx, |buffer, cx| {
3650 assert!(!buffer.is_dirty());
3651 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3652 events.borrow_mut().clear();
3653
3654 buffer.edit(vec![1..1], "B", cx);
3655 buffer.edit(vec![2..2], "D", cx);
3656 });
3657
3658 // after editing again, the buffer is dirty, and emits another dirty event.
3659 buffer1.update(&mut cx, |buffer, cx| {
3660 assert!(buffer.text() == "aBDc");
3661 assert!(buffer.is_dirty());
3662 assert_eq!(
3663 *events.borrow(),
3664 &[
3665 language::Event::Edited,
3666 language::Event::Dirtied,
3667 language::Event::Edited,
3668 ],
3669 );
3670 events.borrow_mut().clear();
3671
3672 // TODO - currently, after restoring the buffer to its
3673 // previously-saved state, the is still considered dirty.
3674 buffer.edit([1..3], "", cx);
3675 assert!(buffer.text() == "ac");
3676 assert!(buffer.is_dirty());
3677 });
3678
3679 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3680
3681 // When a file is deleted, the buffer is considered dirty.
3682 let events = Rc::new(RefCell::new(Vec::new()));
3683 let buffer2 = project
3684 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3685 .await
3686 .unwrap();
3687 buffer2.update(&mut cx, |_, cx| {
3688 cx.subscribe(&buffer2, {
3689 let events = events.clone();
3690 move |_, _, event, _| events.borrow_mut().push(event.clone())
3691 })
3692 .detach();
3693 });
3694
3695 fs::remove_file(dir.path().join("file2")).unwrap();
3696 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3697 assert_eq!(
3698 *events.borrow(),
3699 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3700 );
3701
3702 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3703 let events = Rc::new(RefCell::new(Vec::new()));
3704 let buffer3 = project
3705 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3706 .await
3707 .unwrap();
3708 buffer3.update(&mut cx, |_, cx| {
3709 cx.subscribe(&buffer3, {
3710 let events = events.clone();
3711 move |_, _, event, _| events.borrow_mut().push(event.clone())
3712 })
3713 .detach();
3714 });
3715
3716 worktree.flush_fs_events(&cx).await;
3717 buffer3.update(&mut cx, |buffer, cx| {
3718 buffer.edit(Some(0..0), "x", cx);
3719 });
3720 events.borrow_mut().clear();
3721 fs::remove_file(dir.path().join("file3")).unwrap();
3722 buffer3
3723 .condition(&cx, |_, _| !events.borrow().is_empty())
3724 .await;
3725 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3726 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3727 }
3728
3729 #[gpui::test]
3730 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3731 use std::fs;
3732
3733 let initial_contents = "aaa\nbbbbb\nc\n";
3734 let dir = temp_tree(json!({ "the-file": initial_contents }));
3735
3736 let project = Project::test(Arc::new(RealFs), &mut cx);
3737 let (worktree, _) = project
3738 .update(&mut cx, |p, cx| {
3739 p.find_or_create_local_worktree(dir.path(), false, cx)
3740 })
3741 .await
3742 .unwrap();
3743 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3744
3745 worktree
3746 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3747 .await;
3748
3749 let abs_path = dir.path().join("the-file");
3750 let buffer = project
3751 .update(&mut cx, |p, cx| {
3752 p.open_buffer((worktree_id, "the-file"), cx)
3753 })
3754 .await
3755 .unwrap();
3756
3757 // TODO
3758 // Add a cursor on each row.
3759 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3760 // assert!(!buffer.is_dirty());
3761 // buffer.add_selection_set(
3762 // &(0..3)
3763 // .map(|row| Selection {
3764 // id: row as usize,
3765 // start: Point::new(row, 1),
3766 // end: Point::new(row, 1),
3767 // reversed: false,
3768 // goal: SelectionGoal::None,
3769 // })
3770 // .collect::<Vec<_>>(),
3771 // cx,
3772 // )
3773 // });
3774
3775 // Change the file on disk, adding two new lines of text, and removing
3776 // one line.
3777 buffer.read_with(&cx, |buffer, _| {
3778 assert!(!buffer.is_dirty());
3779 assert!(!buffer.has_conflict());
3780 });
3781 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3782 fs::write(&abs_path, new_contents).unwrap();
3783
3784 // Because the buffer was not modified, it is reloaded from disk. Its
3785 // contents are edited according to the diff between the old and new
3786 // file contents.
3787 buffer
3788 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3789 .await;
3790
3791 buffer.update(&mut cx, |buffer, _| {
3792 assert_eq!(buffer.text(), new_contents);
3793 assert!(!buffer.is_dirty());
3794 assert!(!buffer.has_conflict());
3795
3796 // TODO
3797 // let cursor_positions = buffer
3798 // .selection_set(selection_set_id)
3799 // .unwrap()
3800 // .selections::<Point>(&*buffer)
3801 // .map(|selection| {
3802 // assert_eq!(selection.start, selection.end);
3803 // selection.start
3804 // })
3805 // .collect::<Vec<_>>();
3806 // assert_eq!(
3807 // cursor_positions,
3808 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3809 // );
3810 });
3811
3812 // Modify the buffer
3813 buffer.update(&mut cx, |buffer, cx| {
3814 buffer.edit(vec![0..0], " ", cx);
3815 assert!(buffer.is_dirty());
3816 assert!(!buffer.has_conflict());
3817 });
3818
3819 // Change the file on disk again, adding blank lines to the beginning.
3820 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3821
3822 // Because the buffer is modified, it doesn't reload from disk, but is
3823 // marked as having a conflict.
3824 buffer
3825 .condition(&cx, |buffer, _| buffer.has_conflict())
3826 .await;
3827 }
3828
3829 #[gpui::test]
3830 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3831 let fs = FakeFs::new(cx.background());
3832 fs.insert_tree(
3833 "/the-dir",
3834 json!({
3835 "a.rs": "
3836 fn foo(mut v: Vec<usize>) {
3837 for x in &v {
3838 v.push(1);
3839 }
3840 }
3841 "
3842 .unindent(),
3843 }),
3844 )
3845 .await;
3846
3847 let project = Project::test(fs.clone(), &mut cx);
3848 let (worktree, _) = project
3849 .update(&mut cx, |p, cx| {
3850 p.find_or_create_local_worktree("/the-dir", false, cx)
3851 })
3852 .await
3853 .unwrap();
3854 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3855
3856 let buffer = project
3857 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3858 .await
3859 .unwrap();
3860
3861 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3862 let message = lsp::PublishDiagnosticsParams {
3863 uri: buffer_uri.clone(),
3864 diagnostics: vec![
3865 lsp::Diagnostic {
3866 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3867 severity: Some(DiagnosticSeverity::WARNING),
3868 message: "error 1".to_string(),
3869 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3870 location: lsp::Location {
3871 uri: buffer_uri.clone(),
3872 range: lsp::Range::new(
3873 lsp::Position::new(1, 8),
3874 lsp::Position::new(1, 9),
3875 ),
3876 },
3877 message: "error 1 hint 1".to_string(),
3878 }]),
3879 ..Default::default()
3880 },
3881 lsp::Diagnostic {
3882 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3883 severity: Some(DiagnosticSeverity::HINT),
3884 message: "error 1 hint 1".to_string(),
3885 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3886 location: lsp::Location {
3887 uri: buffer_uri.clone(),
3888 range: lsp::Range::new(
3889 lsp::Position::new(1, 8),
3890 lsp::Position::new(1, 9),
3891 ),
3892 },
3893 message: "original diagnostic".to_string(),
3894 }]),
3895 ..Default::default()
3896 },
3897 lsp::Diagnostic {
3898 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3899 severity: Some(DiagnosticSeverity::ERROR),
3900 message: "error 2".to_string(),
3901 related_information: Some(vec![
3902 lsp::DiagnosticRelatedInformation {
3903 location: lsp::Location {
3904 uri: buffer_uri.clone(),
3905 range: lsp::Range::new(
3906 lsp::Position::new(1, 13),
3907 lsp::Position::new(1, 15),
3908 ),
3909 },
3910 message: "error 2 hint 1".to_string(),
3911 },
3912 lsp::DiagnosticRelatedInformation {
3913 location: lsp::Location {
3914 uri: buffer_uri.clone(),
3915 range: lsp::Range::new(
3916 lsp::Position::new(1, 13),
3917 lsp::Position::new(1, 15),
3918 ),
3919 },
3920 message: "error 2 hint 2".to_string(),
3921 },
3922 ]),
3923 ..Default::default()
3924 },
3925 lsp::Diagnostic {
3926 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3927 severity: Some(DiagnosticSeverity::HINT),
3928 message: "error 2 hint 1".to_string(),
3929 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3930 location: lsp::Location {
3931 uri: buffer_uri.clone(),
3932 range: lsp::Range::new(
3933 lsp::Position::new(2, 8),
3934 lsp::Position::new(2, 17),
3935 ),
3936 },
3937 message: "original diagnostic".to_string(),
3938 }]),
3939 ..Default::default()
3940 },
3941 lsp::Diagnostic {
3942 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3943 severity: Some(DiagnosticSeverity::HINT),
3944 message: "error 2 hint 2".to_string(),
3945 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3946 location: lsp::Location {
3947 uri: buffer_uri.clone(),
3948 range: lsp::Range::new(
3949 lsp::Position::new(2, 8),
3950 lsp::Position::new(2, 17),
3951 ),
3952 },
3953 message: "original diagnostic".to_string(),
3954 }]),
3955 ..Default::default()
3956 },
3957 ],
3958 version: None,
3959 };
3960
3961 project
3962 .update(&mut cx, |p, cx| {
3963 p.update_diagnostics(message, &Default::default(), cx)
3964 })
3965 .unwrap();
3966 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3967
3968 assert_eq!(
3969 buffer
3970 .diagnostics_in_range::<_, Point>(0..buffer.len())
3971 .collect::<Vec<_>>(),
3972 &[
3973 DiagnosticEntry {
3974 range: Point::new(1, 8)..Point::new(1, 9),
3975 diagnostic: Diagnostic {
3976 severity: DiagnosticSeverity::WARNING,
3977 message: "error 1".to_string(),
3978 group_id: 0,
3979 is_primary: true,
3980 ..Default::default()
3981 }
3982 },
3983 DiagnosticEntry {
3984 range: Point::new(1, 8)..Point::new(1, 9),
3985 diagnostic: Diagnostic {
3986 severity: DiagnosticSeverity::HINT,
3987 message: "error 1 hint 1".to_string(),
3988 group_id: 0,
3989 is_primary: false,
3990 ..Default::default()
3991 }
3992 },
3993 DiagnosticEntry {
3994 range: Point::new(1, 13)..Point::new(1, 15),
3995 diagnostic: Diagnostic {
3996 severity: DiagnosticSeverity::HINT,
3997 message: "error 2 hint 1".to_string(),
3998 group_id: 1,
3999 is_primary: false,
4000 ..Default::default()
4001 }
4002 },
4003 DiagnosticEntry {
4004 range: Point::new(1, 13)..Point::new(1, 15),
4005 diagnostic: Diagnostic {
4006 severity: DiagnosticSeverity::HINT,
4007 message: "error 2 hint 2".to_string(),
4008 group_id: 1,
4009 is_primary: false,
4010 ..Default::default()
4011 }
4012 },
4013 DiagnosticEntry {
4014 range: Point::new(2, 8)..Point::new(2, 17),
4015 diagnostic: Diagnostic {
4016 severity: DiagnosticSeverity::ERROR,
4017 message: "error 2".to_string(),
4018 group_id: 1,
4019 is_primary: true,
4020 ..Default::default()
4021 }
4022 }
4023 ]
4024 );
4025
4026 assert_eq!(
4027 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4028 &[
4029 DiagnosticEntry {
4030 range: Point::new(1, 8)..Point::new(1, 9),
4031 diagnostic: Diagnostic {
4032 severity: DiagnosticSeverity::WARNING,
4033 message: "error 1".to_string(),
4034 group_id: 0,
4035 is_primary: true,
4036 ..Default::default()
4037 }
4038 },
4039 DiagnosticEntry {
4040 range: Point::new(1, 8)..Point::new(1, 9),
4041 diagnostic: Diagnostic {
4042 severity: DiagnosticSeverity::HINT,
4043 message: "error 1 hint 1".to_string(),
4044 group_id: 0,
4045 is_primary: false,
4046 ..Default::default()
4047 }
4048 },
4049 ]
4050 );
4051 assert_eq!(
4052 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4053 &[
4054 DiagnosticEntry {
4055 range: Point::new(1, 13)..Point::new(1, 15),
4056 diagnostic: Diagnostic {
4057 severity: DiagnosticSeverity::HINT,
4058 message: "error 2 hint 1".to_string(),
4059 group_id: 1,
4060 is_primary: false,
4061 ..Default::default()
4062 }
4063 },
4064 DiagnosticEntry {
4065 range: Point::new(1, 13)..Point::new(1, 15),
4066 diagnostic: Diagnostic {
4067 severity: DiagnosticSeverity::HINT,
4068 message: "error 2 hint 2".to_string(),
4069 group_id: 1,
4070 is_primary: false,
4071 ..Default::default()
4072 }
4073 },
4074 DiagnosticEntry {
4075 range: Point::new(2, 8)..Point::new(2, 17),
4076 diagnostic: Diagnostic {
4077 severity: DiagnosticSeverity::ERROR,
4078 message: "error 2".to_string(),
4079 group_id: 1,
4080 is_primary: true,
4081 ..Default::default()
4082 }
4083 }
4084 ]
4085 );
4086 }
4087
4088 #[gpui::test]
4089 async fn test_rename(mut cx: gpui::TestAppContext) {
4090 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4091 let language = Arc::new(Language::new(
4092 LanguageConfig {
4093 name: "Rust".to_string(),
4094 path_suffixes: vec!["rs".to_string()],
4095 language_server: Some(language_server_config),
4096 ..Default::default()
4097 },
4098 Some(tree_sitter_rust::language()),
4099 ));
4100
4101 let fs = FakeFs::new(cx.background());
4102 fs.insert_tree(
4103 "/dir",
4104 json!({
4105 "one.rs": "const ONE: usize = 1;",
4106 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4107 }),
4108 )
4109 .await;
4110
4111 let project = Project::test(fs.clone(), &mut cx);
4112 project.update(&mut cx, |project, _| {
4113 Arc::get_mut(&mut project.languages).unwrap().add(language);
4114 });
4115
4116 let (tree, _) = project
4117 .update(&mut cx, |project, cx| {
4118 project.find_or_create_local_worktree("/dir", false, cx)
4119 })
4120 .await
4121 .unwrap();
4122 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4123 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4124 .await;
4125
4126 let buffer = project
4127 .update(&mut cx, |project, cx| {
4128 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4129 })
4130 .await
4131 .unwrap();
4132
4133 let mut fake_server = fake_servers.next().await.unwrap();
4134
4135 let response = project.update(&mut cx, |project, cx| {
4136 project.prepare_rename(buffer.clone(), 7, cx)
4137 });
4138 fake_server
4139 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4140 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4141 assert_eq!(params.position, lsp::Position::new(0, 7));
4142 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4143 lsp::Position::new(0, 6),
4144 lsp::Position::new(0, 9),
4145 )))
4146 })
4147 .next()
4148 .await
4149 .unwrap();
4150 let range = response.await.unwrap().unwrap();
4151 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4152 assert_eq!(range, 6..9);
4153
4154 let response = project.update(&mut cx, |project, cx| {
4155 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4156 });
4157 fake_server
4158 .handle_request::<lsp::request::Rename, _>(|params| {
4159 assert_eq!(
4160 params.text_document_position.text_document.uri.as_str(),
4161 "file:///dir/one.rs"
4162 );
4163 assert_eq!(
4164 params.text_document_position.position,
4165 lsp::Position::new(0, 7)
4166 );
4167 assert_eq!(params.new_name, "THREE");
4168 Some(lsp::WorkspaceEdit {
4169 changes: Some(
4170 [
4171 (
4172 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4173 vec![lsp::TextEdit::new(
4174 lsp::Range::new(
4175 lsp::Position::new(0, 6),
4176 lsp::Position::new(0, 9),
4177 ),
4178 "THREE".to_string(),
4179 )],
4180 ),
4181 (
4182 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4183 vec![
4184 lsp::TextEdit::new(
4185 lsp::Range::new(
4186 lsp::Position::new(0, 24),
4187 lsp::Position::new(0, 27),
4188 ),
4189 "THREE".to_string(),
4190 ),
4191 lsp::TextEdit::new(
4192 lsp::Range::new(
4193 lsp::Position::new(0, 35),
4194 lsp::Position::new(0, 38),
4195 ),
4196 "THREE".to_string(),
4197 ),
4198 ],
4199 ),
4200 ]
4201 .into_iter()
4202 .collect(),
4203 ),
4204 ..Default::default()
4205 })
4206 })
4207 .next()
4208 .await
4209 .unwrap();
4210 let mut transaction = response.await.unwrap().0;
4211 assert_eq!(transaction.len(), 2);
4212 assert_eq!(
4213 transaction
4214 .remove_entry(&buffer)
4215 .unwrap()
4216 .0
4217 .read_with(&cx, |buffer, _| buffer.text()),
4218 "const THREE: usize = 1;"
4219 );
4220 assert_eq!(
4221 transaction
4222 .into_keys()
4223 .next()
4224 .unwrap()
4225 .read_with(&cx, |buffer, _| buffer.text()),
4226 "const TWO: usize = one::THREE + one::THREE;"
4227 );
4228 }
4229}