1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::Future;
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 client: Arc<client::Client>,
43 user_store: ModelHandle<UserStore>,
44 fs: Arc<dyn Fs>,
45 client_state: ProjectClientState,
46 collaborators: HashMap<PeerId, Collaborator>,
47 subscriptions: Vec<client::Subscription>,
48 language_servers_with_diagnostics_running: isize,
49 open_buffers: HashMap<u64, OpenBuffer>,
50 opened_buffer: broadcast::Sender<()>,
51 loading_buffers: HashMap<
52 ProjectPath,
53 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
54 >,
55 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
56}
57
58enum OpenBuffer {
59 Loaded(WeakModelHandle<Buffer>),
60 Loading(Vec<Operation>),
61}
62
63enum WorktreeHandle {
64 Strong(ModelHandle<Worktree>),
65 Weak(WeakModelHandle<Worktree>),
66}
67
68enum ProjectClientState {
69 Local {
70 is_shared: bool,
71 remote_id_tx: watch::Sender<Option<u64>>,
72 remote_id_rx: watch::Receiver<Option<u64>>,
73 _maintain_remote_id_task: Task<Option<()>>,
74 },
75 Remote {
76 sharing_has_stopped: bool,
77 remote_id: u64,
78 replica_id: ReplicaId,
79 },
80}
81
82#[derive(Clone, Debug)]
83pub struct Collaborator {
84 pub user: Arc<User>,
85 pub peer_id: PeerId,
86 pub replica_id: ReplicaId,
87}
88
89#[derive(Clone, Debug, PartialEq)]
90pub enum Event {
91 ActiveEntryChanged(Option<ProjectEntry>),
92 WorktreeRemoved(WorktreeId),
93 DiskBasedDiagnosticsStarted,
94 DiskBasedDiagnosticsUpdated,
95 DiskBasedDiagnosticsFinished,
96 DiagnosticsUpdated(ProjectPath),
97}
98
99#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
100pub struct ProjectPath {
101 pub worktree_id: WorktreeId,
102 pub path: Arc<Path>,
103}
104
105#[derive(Clone, Debug, Default, PartialEq)]
106pub struct DiagnosticSummary {
107 pub error_count: usize,
108 pub warning_count: usize,
109 pub info_count: usize,
110 pub hint_count: usize,
111}
112
113#[derive(Debug)]
114pub struct Definition {
115 pub target_buffer: ModelHandle<Buffer>,
116 pub target_range: Range<language::Anchor>,
117}
118
119#[derive(Default)]
120pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
121
122impl DiagnosticSummary {
123 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
124 let mut this = Self {
125 error_count: 0,
126 warning_count: 0,
127 info_count: 0,
128 hint_count: 0,
129 };
130
131 for entry in diagnostics {
132 if entry.diagnostic.is_primary {
133 match entry.diagnostic.severity {
134 DiagnosticSeverity::ERROR => this.error_count += 1,
135 DiagnosticSeverity::WARNING => this.warning_count += 1,
136 DiagnosticSeverity::INFORMATION => this.info_count += 1,
137 DiagnosticSeverity::HINT => this.hint_count += 1,
138 _ => {}
139 }
140 }
141 }
142
143 this
144 }
145
146 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
147 proto::DiagnosticSummary {
148 path: path.to_string_lossy().to_string(),
149 error_count: self.error_count as u32,
150 warning_count: self.warning_count as u32,
151 info_count: self.info_count as u32,
152 hint_count: self.hint_count as u32,
153 }
154 }
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158pub struct ProjectEntry {
159 pub worktree_id: WorktreeId,
160 pub entry_id: usize,
161}
162
163impl Project {
164 pub fn init(client: &Arc<Client>) {
165 client.add_entity_message_handler(Self::handle_add_collaborator);
166 client.add_entity_message_handler(Self::handle_buffer_reloaded);
167 client.add_entity_message_handler(Self::handle_buffer_saved);
168 client.add_entity_message_handler(Self::handle_close_buffer);
169 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
170 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
171 client.add_entity_message_handler(Self::handle_remove_collaborator);
172 client.add_entity_message_handler(Self::handle_share_worktree);
173 client.add_entity_message_handler(Self::handle_unregister_worktree);
174 client.add_entity_message_handler(Self::handle_unshare_project);
175 client.add_entity_message_handler(Self::handle_update_buffer_file);
176 client.add_entity_message_handler(Self::handle_update_buffer);
177 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
178 client.add_entity_message_handler(Self::handle_update_worktree);
179 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
180 client.add_entity_request_handler(Self::handle_apply_code_action);
181 client.add_entity_request_handler(Self::handle_format_buffers);
182 client.add_entity_request_handler(Self::handle_get_code_actions);
183 client.add_entity_request_handler(Self::handle_get_completions);
184 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
185 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
186 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
187 client.add_entity_request_handler(Self::handle_open_buffer);
188 client.add_entity_request_handler(Self::handle_save_buffer);
189 }
190
191 pub fn local(
192 client: Arc<Client>,
193 user_store: ModelHandle<UserStore>,
194 languages: Arc<LanguageRegistry>,
195 fs: Arc<dyn Fs>,
196 cx: &mut MutableAppContext,
197 ) -> ModelHandle<Self> {
198 cx.add_model(|cx: &mut ModelContext<Self>| {
199 let (remote_id_tx, remote_id_rx) = watch::channel();
200 let _maintain_remote_id_task = cx.spawn_weak({
201 let rpc = client.clone();
202 move |this, mut cx| {
203 async move {
204 let mut status = rpc.status();
205 while let Some(status) = status.recv().await {
206 if let Some(this) = this.upgrade(&cx) {
207 let remote_id = if let client::Status::Connected { .. } = status {
208 let response = rpc.request(proto::RegisterProject {}).await?;
209 Some(response.project_id)
210 } else {
211 None
212 };
213
214 if let Some(project_id) = remote_id {
215 let mut registrations = Vec::new();
216 this.update(&mut cx, |this, cx| {
217 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
218 registrations.push(worktree.update(
219 cx,
220 |worktree, cx| {
221 let worktree = worktree.as_local_mut().unwrap();
222 worktree.register(project_id, cx)
223 },
224 ));
225 }
226 });
227 for registration in registrations {
228 registration.await?;
229 }
230 }
231 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
232 }
233 }
234 Ok(())
235 }
236 .log_err()
237 }
238 });
239
240 Self {
241 worktrees: Default::default(),
242 collaborators: Default::default(),
243 open_buffers: Default::default(),
244 loading_buffers: Default::default(),
245 shared_buffers: Default::default(),
246 client_state: ProjectClientState::Local {
247 is_shared: false,
248 remote_id_tx,
249 remote_id_rx,
250 _maintain_remote_id_task,
251 },
252 opened_buffer: broadcast::channel(1).0,
253 subscriptions: Vec::new(),
254 active_entry: None,
255 languages,
256 client,
257 user_store,
258 fs,
259 language_servers_with_diagnostics_running: 0,
260 language_servers: Default::default(),
261 }
262 })
263 }
264
265 pub async fn remote(
266 remote_id: u64,
267 client: Arc<Client>,
268 user_store: ModelHandle<UserStore>,
269 languages: Arc<LanguageRegistry>,
270 fs: Arc<dyn Fs>,
271 cx: &mut AsyncAppContext,
272 ) -> Result<ModelHandle<Self>> {
273 client.authenticate_and_connect(&cx).await?;
274
275 let response = client
276 .request(proto::JoinProject {
277 project_id: remote_id,
278 })
279 .await?;
280
281 let replica_id = response.replica_id as ReplicaId;
282
283 let mut worktrees = Vec::new();
284 for worktree in response.worktrees {
285 let (worktree, load_task) = cx
286 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
287 worktrees.push(worktree);
288 load_task.detach();
289 }
290
291 let this = cx.add_model(|cx| {
292 let mut this = Self {
293 worktrees: Vec::new(),
294 open_buffers: Default::default(),
295 loading_buffers: Default::default(),
296 opened_buffer: broadcast::channel(1).0,
297 shared_buffers: Default::default(),
298 active_entry: None,
299 collaborators: Default::default(),
300 languages,
301 user_store: user_store.clone(),
302 fs,
303 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
304 client,
305 client_state: ProjectClientState::Remote {
306 sharing_has_stopped: false,
307 remote_id,
308 replica_id,
309 },
310 language_servers_with_diagnostics_running: 0,
311 language_servers: Default::default(),
312 };
313 for worktree in worktrees {
314 this.add_worktree(&worktree, cx);
315 }
316 this
317 });
318
319 let user_ids = response
320 .collaborators
321 .iter()
322 .map(|peer| peer.user_id)
323 .collect();
324 user_store
325 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
326 .await?;
327 let mut collaborators = HashMap::default();
328 for message in response.collaborators {
329 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
330 collaborators.insert(collaborator.peer_id, collaborator);
331 }
332
333 this.update(cx, |this, _| {
334 this.collaborators = collaborators;
335 });
336
337 Ok(this)
338 }
339
340 #[cfg(any(test, feature = "test-support"))]
341 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
342 let languages = Arc::new(LanguageRegistry::new());
343 let http_client = client::test::FakeHttpClient::with_404_response();
344 let client = client::Client::new(http_client.clone());
345 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
346 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
347 }
348
349 #[cfg(any(test, feature = "test-support"))]
350 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
351 self.shared_buffers
352 .get(&peer_id)
353 .and_then(|buffers| buffers.get(&remote_id))
354 .cloned()
355 }
356
357 #[cfg(any(test, feature = "test-support"))]
358 pub fn has_buffered_operations(&self) -> bool {
359 self.open_buffers
360 .values()
361 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
362 }
363
364 pub fn fs(&self) -> &Arc<dyn Fs> {
365 &self.fs
366 }
367
368 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
369 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
370 *remote_id_tx.borrow_mut() = remote_id;
371 }
372
373 self.subscriptions.clear();
374 if let Some(remote_id) = remote_id {
375 self.subscriptions
376 .push(self.client.add_model_for_remote_entity(remote_id, cx));
377 }
378 }
379
380 pub fn remote_id(&self) -> Option<u64> {
381 match &self.client_state {
382 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
383 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
384 }
385 }
386
387 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
388 let mut id = None;
389 let mut watch = None;
390 match &self.client_state {
391 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
392 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
393 }
394
395 async move {
396 if let Some(id) = id {
397 return id;
398 }
399 let mut watch = watch.unwrap();
400 loop {
401 let id = *watch.borrow();
402 if let Some(id) = id {
403 return id;
404 }
405 watch.recv().await;
406 }
407 }
408 }
409
410 pub fn replica_id(&self) -> ReplicaId {
411 match &self.client_state {
412 ProjectClientState::Local { .. } => 0,
413 ProjectClientState::Remote { replica_id, .. } => *replica_id,
414 }
415 }
416
417 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
418 &self.collaborators
419 }
420
421 pub fn worktrees<'a>(
422 &'a self,
423 cx: &'a AppContext,
424 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
425 self.worktrees
426 .iter()
427 .filter_map(move |worktree| worktree.upgrade(cx))
428 }
429
430 pub fn worktree_for_id(
431 &self,
432 id: WorktreeId,
433 cx: &AppContext,
434 ) -> Option<ModelHandle<Worktree>> {
435 self.worktrees(cx)
436 .find(|worktree| worktree.read(cx).id() == id)
437 }
438
439 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
440 let rpc = self.client.clone();
441 cx.spawn(|this, mut cx| async move {
442 let project_id = this.update(&mut cx, |this, _| {
443 if let ProjectClientState::Local {
444 is_shared,
445 remote_id_rx,
446 ..
447 } = &mut this.client_state
448 {
449 *is_shared = true;
450 remote_id_rx
451 .borrow()
452 .ok_or_else(|| anyhow!("no project id"))
453 } else {
454 Err(anyhow!("can't share a remote project"))
455 }
456 })?;
457
458 rpc.request(proto::ShareProject { project_id }).await?;
459 let mut tasks = Vec::new();
460 this.update(&mut cx, |this, cx| {
461 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
462 worktree.update(cx, |worktree, cx| {
463 let worktree = worktree.as_local_mut().unwrap();
464 tasks.push(worktree.share(project_id, cx));
465 });
466 }
467 });
468 for task in tasks {
469 task.await?;
470 }
471 this.update(&mut cx, |_, cx| cx.notify());
472 Ok(())
473 })
474 }
475
476 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
477 let rpc = self.client.clone();
478 cx.spawn(|this, mut cx| async move {
479 let project_id = this.update(&mut cx, |this, _| {
480 if let ProjectClientState::Local {
481 is_shared,
482 remote_id_rx,
483 ..
484 } = &mut this.client_state
485 {
486 *is_shared = false;
487 remote_id_rx
488 .borrow()
489 .ok_or_else(|| anyhow!("no project id"))
490 } else {
491 Err(anyhow!("can't share a remote project"))
492 }
493 })?;
494
495 rpc.send(proto::UnshareProject { project_id })?;
496 this.update(&mut cx, |this, cx| {
497 this.collaborators.clear();
498 this.shared_buffers.clear();
499 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
500 worktree.update(cx, |worktree, _| {
501 worktree.as_local_mut().unwrap().unshare();
502 });
503 }
504 cx.notify()
505 });
506 Ok(())
507 })
508 }
509
510 pub fn is_read_only(&self) -> bool {
511 match &self.client_state {
512 ProjectClientState::Local { .. } => false,
513 ProjectClientState::Remote {
514 sharing_has_stopped,
515 ..
516 } => *sharing_has_stopped,
517 }
518 }
519
520 pub fn is_local(&self) -> bool {
521 match &self.client_state {
522 ProjectClientState::Local { .. } => true,
523 ProjectClientState::Remote { .. } => false,
524 }
525 }
526
527 pub fn is_remote(&self) -> bool {
528 !self.is_local()
529 }
530
531 pub fn open_buffer(
532 &mut self,
533 path: impl Into<ProjectPath>,
534 cx: &mut ModelContext<Self>,
535 ) -> Task<Result<ModelHandle<Buffer>>> {
536 let project_path = path.into();
537 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
538 worktree
539 } else {
540 return Task::ready(Err(anyhow!("no such worktree")));
541 };
542
543 // If there is already a buffer for the given path, then return it.
544 let existing_buffer = self.get_open_buffer(&project_path, cx);
545 if let Some(existing_buffer) = existing_buffer {
546 return Task::ready(Ok(existing_buffer));
547 }
548
549 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
550 // If the given path is already being loaded, then wait for that existing
551 // task to complete and return the same buffer.
552 hash_map::Entry::Occupied(e) => e.get().clone(),
553
554 // Otherwise, record the fact that this path is now being loaded.
555 hash_map::Entry::Vacant(entry) => {
556 let (mut tx, rx) = postage::watch::channel();
557 entry.insert(rx.clone());
558
559 let load_buffer = if worktree.read(cx).is_local() {
560 self.open_local_buffer(&project_path.path, &worktree, cx)
561 } else {
562 self.open_remote_buffer(&project_path.path, &worktree, cx)
563 };
564
565 cx.spawn(move |this, mut cx| async move {
566 let load_result = load_buffer.await;
567 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
568 // Record the fact that the buffer is no longer loading.
569 this.loading_buffers.remove(&project_path);
570 if this.loading_buffers.is_empty() {
571 this.open_buffers
572 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
573 }
574
575 let buffer = load_result.map_err(Arc::new)?;
576 Ok(buffer)
577 }));
578 })
579 .detach();
580 rx
581 }
582 };
583
584 cx.foreground().spawn(async move {
585 loop {
586 if let Some(result) = loading_watch.borrow().as_ref() {
587 match result {
588 Ok(buffer) => return Ok(buffer.clone()),
589 Err(error) => return Err(anyhow!("{}", error)),
590 }
591 }
592 loading_watch.recv().await;
593 }
594 })
595 }
596
597 fn open_local_buffer(
598 &mut self,
599 path: &Arc<Path>,
600 worktree: &ModelHandle<Worktree>,
601 cx: &mut ModelContext<Self>,
602 ) -> Task<Result<ModelHandle<Buffer>>> {
603 let load_buffer = worktree.update(cx, |worktree, cx| {
604 let worktree = worktree.as_local_mut().unwrap();
605 worktree.load_buffer(path, cx)
606 });
607 let worktree = worktree.downgrade();
608 cx.spawn(|this, mut cx| async move {
609 let buffer = load_buffer.await?;
610 let worktree = worktree
611 .upgrade(&cx)
612 .ok_or_else(|| anyhow!("worktree was removed"))?;
613 this.update(&mut cx, |this, cx| {
614 this.register_buffer(&buffer, Some(&worktree), cx)
615 })?;
616 Ok(buffer)
617 })
618 }
619
620 fn open_remote_buffer(
621 &mut self,
622 path: &Arc<Path>,
623 worktree: &ModelHandle<Worktree>,
624 cx: &mut ModelContext<Self>,
625 ) -> Task<Result<ModelHandle<Buffer>>> {
626 let rpc = self.client.clone();
627 let project_id = self.remote_id().unwrap();
628 let remote_worktree_id = worktree.read(cx).id();
629 let path = path.clone();
630 let path_string = path.to_string_lossy().to_string();
631 cx.spawn(|this, mut cx| async move {
632 let response = rpc
633 .request(proto::OpenBuffer {
634 project_id,
635 worktree_id: remote_worktree_id.to_proto(),
636 path: path_string,
637 })
638 .await?;
639 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
640 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
641 .await
642 })
643 }
644
645 fn open_local_buffer_from_lsp_path(
646 &mut self,
647 abs_path: lsp::Url,
648 lang_name: String,
649 lang_server: Arc<LanguageServer>,
650 cx: &mut ModelContext<Self>,
651 ) -> Task<Result<ModelHandle<Buffer>>> {
652 cx.spawn(|this, mut cx| async move {
653 let abs_path = abs_path
654 .to_file_path()
655 .map_err(|_| anyhow!("can't convert URI to path"))?;
656 let (worktree, relative_path) = if let Some(result) =
657 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
658 {
659 result
660 } else {
661 let worktree = this
662 .update(&mut cx, |this, cx| {
663 this.create_local_worktree(&abs_path, true, cx)
664 })
665 .await?;
666 this.update(&mut cx, |this, cx| {
667 this.language_servers
668 .insert((worktree.read(cx).id(), lang_name), lang_server);
669 });
670 (worktree, PathBuf::new())
671 };
672
673 let project_path = ProjectPath {
674 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
675 path: relative_path.into(),
676 };
677 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
678 .await
679 })
680 }
681
682 pub fn save_buffer_as(
683 &self,
684 buffer: ModelHandle<Buffer>,
685 abs_path: PathBuf,
686 cx: &mut ModelContext<Project>,
687 ) -> Task<Result<()>> {
688 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
689 cx.spawn(|this, mut cx| async move {
690 let (worktree, path) = worktree_task.await?;
691 worktree
692 .update(&mut cx, |worktree, cx| {
693 worktree
694 .as_local_mut()
695 .unwrap()
696 .save_buffer_as(buffer.clone(), path, cx)
697 })
698 .await?;
699 this.update(&mut cx, |this, cx| {
700 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
701 });
702 Ok(())
703 })
704 }
705
706 #[cfg(any(test, feature = "test-support"))]
707 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
708 let path = path.into();
709 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
710 self.open_buffers.iter().any(|(_, buffer)| {
711 if let Some(buffer) = buffer.upgrade(cx) {
712 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
713 if file.worktree == worktree && file.path() == &path.path {
714 return true;
715 }
716 }
717 }
718 false
719 })
720 } else {
721 false
722 }
723 }
724
725 fn get_open_buffer(
726 &mut self,
727 path: &ProjectPath,
728 cx: &mut ModelContext<Self>,
729 ) -> Option<ModelHandle<Buffer>> {
730 let mut result = None;
731 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
732 self.open_buffers.retain(|_, buffer| {
733 if let Some(buffer) = buffer.upgrade(cx) {
734 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
735 if file.worktree == worktree && file.path() == &path.path {
736 result = Some(buffer);
737 }
738 }
739 true
740 } else {
741 false
742 }
743 });
744 result
745 }
746
747 fn register_buffer(
748 &mut self,
749 buffer: &ModelHandle<Buffer>,
750 worktree: Option<&ModelHandle<Worktree>>,
751 cx: &mut ModelContext<Self>,
752 ) -> Result<()> {
753 match self.open_buffers.insert(
754 buffer.read(cx).remote_id(),
755 OpenBuffer::Loaded(buffer.downgrade()),
756 ) {
757 None => {}
758 Some(OpenBuffer::Loading(operations)) => {
759 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
760 }
761 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
762 }
763 self.assign_language_to_buffer(&buffer, worktree, cx);
764 Ok(())
765 }
766
767 fn assign_language_to_buffer(
768 &mut self,
769 buffer: &ModelHandle<Buffer>,
770 worktree: Option<&ModelHandle<Worktree>>,
771 cx: &mut ModelContext<Self>,
772 ) -> Option<()> {
773 let (path, full_path) = {
774 let file = buffer.read(cx).file()?;
775 (file.path().clone(), file.full_path(cx))
776 };
777
778 // If the buffer has a language, set it and start/assign the language server
779 if let Some(language) = self.languages.select_language(&full_path) {
780 buffer.update(cx, |buffer, cx| {
781 buffer.set_language(Some(language.clone()), cx);
782 });
783
784 // For local worktrees, start a language server if needed.
785 // Also assign the language server and any previously stored diagnostics to the buffer.
786 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
787 let worktree_id = local_worktree.id();
788 let worktree_abs_path = local_worktree.abs_path().clone();
789
790 let language_server = match self
791 .language_servers
792 .entry((worktree_id, language.name().to_string()))
793 {
794 hash_map::Entry::Occupied(e) => Some(e.get().clone()),
795 hash_map::Entry::Vacant(e) => Self::start_language_server(
796 self.client.clone(),
797 language.clone(),
798 &worktree_abs_path,
799 cx,
800 )
801 .map(|server| e.insert(server).clone()),
802 };
803
804 buffer.update(cx, |buffer, cx| {
805 buffer.set_language_server(language_server, cx);
806 });
807 }
808 }
809
810 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
811 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
812 buffer.update(cx, |buffer, cx| {
813 buffer.update_diagnostics(diagnostics, None, cx).log_err();
814 });
815 }
816 }
817
818 None
819 }
820
821 fn start_language_server(
822 rpc: Arc<Client>,
823 language: Arc<Language>,
824 worktree_path: &Path,
825 cx: &mut ModelContext<Self>,
826 ) -> Option<Arc<LanguageServer>> {
827 enum LspEvent {
828 DiagnosticsStart,
829 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
830 DiagnosticsFinish,
831 }
832
833 let language_server = language
834 .start_server(worktree_path, cx)
835 .log_err()
836 .flatten()?;
837 let disk_based_sources = language
838 .disk_based_diagnostic_sources()
839 .cloned()
840 .unwrap_or_default();
841 let disk_based_diagnostics_progress_token =
842 language.disk_based_diagnostics_progress_token().cloned();
843 let has_disk_based_diagnostic_progress_token =
844 disk_based_diagnostics_progress_token.is_some();
845 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
846
847 // Listen for `PublishDiagnostics` notifications.
848 language_server
849 .on_notification::<lsp::notification::PublishDiagnostics, _>({
850 let diagnostics_tx = diagnostics_tx.clone();
851 move |params| {
852 if !has_disk_based_diagnostic_progress_token {
853 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
854 }
855 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params))).ok();
856 if !has_disk_based_diagnostic_progress_token {
857 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
858 }
859 }
860 })
861 .detach();
862
863 // Listen for `Progress` notifications. Send an event when the language server
864 // transitions between running jobs and not running any jobs.
865 let mut running_jobs_for_this_server: i32 = 0;
866 language_server
867 .on_notification::<lsp::notification::Progress, _>(move |params| {
868 let token = match params.token {
869 lsp::NumberOrString::Number(_) => None,
870 lsp::NumberOrString::String(token) => Some(token),
871 };
872
873 if token == disk_based_diagnostics_progress_token {
874 match params.value {
875 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
876 lsp::WorkDoneProgress::Begin(_) => {
877 running_jobs_for_this_server += 1;
878 if running_jobs_for_this_server == 1 {
879 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
880 }
881 }
882 lsp::WorkDoneProgress::End(_) => {
883 running_jobs_for_this_server -= 1;
884 if running_jobs_for_this_server == 0 {
885 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
886 }
887 }
888 _ => {}
889 },
890 }
891 }
892 })
893 .detach();
894
895 // Process all the LSP events.
896 cx.spawn_weak(|this, mut cx| async move {
897 while let Ok(message) = diagnostics_rx.recv().await {
898 let this = this.upgrade(&cx)?;
899 match message {
900 LspEvent::DiagnosticsStart => {
901 this.update(&mut cx, |this, cx| {
902 this.disk_based_diagnostics_started(cx);
903 if let Some(project_id) = this.remote_id() {
904 rpc.send(proto::DiskBasedDiagnosticsUpdating { project_id })
905 .log_err();
906 }
907 });
908 }
909 LspEvent::DiagnosticsUpdate(mut params) => {
910 language.process_diagnostics(&mut params);
911 this.update(&mut cx, |this, cx| {
912 this.update_diagnostics(params, &disk_based_sources, cx)
913 .log_err();
914 });
915 }
916 LspEvent::DiagnosticsFinish => {
917 this.update(&mut cx, |this, cx| {
918 this.disk_based_diagnostics_finished(cx);
919 if let Some(project_id) = this.remote_id() {
920 rpc.send(proto::DiskBasedDiagnosticsUpdated { project_id })
921 .log_err();
922 }
923 });
924 }
925 }
926 }
927 Some(())
928 })
929 .detach();
930
931 Some(language_server)
932 }
933
934 pub fn update_diagnostics(
935 &mut self,
936 params: lsp::PublishDiagnosticsParams,
937 disk_based_sources: &HashSet<String>,
938 cx: &mut ModelContext<Self>,
939 ) -> Result<()> {
940 let abs_path = params
941 .uri
942 .to_file_path()
943 .map_err(|_| anyhow!("URI is not a file"))?;
944 let mut next_group_id = 0;
945 let mut diagnostics = Vec::default();
946 let mut primary_diagnostic_group_ids = HashMap::default();
947 let mut sources_by_group_id = HashMap::default();
948 let mut supporting_diagnostic_severities = HashMap::default();
949 for diagnostic in ¶ms.diagnostics {
950 let source = diagnostic.source.as_ref();
951 let code = diagnostic.code.as_ref().map(|code| match code {
952 lsp::NumberOrString::Number(code) => code.to_string(),
953 lsp::NumberOrString::String(code) => code.clone(),
954 });
955 let range = range_from_lsp(diagnostic.range);
956 let is_supporting = diagnostic
957 .related_information
958 .as_ref()
959 .map_or(false, |infos| {
960 infos.iter().any(|info| {
961 primary_diagnostic_group_ids.contains_key(&(
962 source,
963 code.clone(),
964 range_from_lsp(info.location.range),
965 ))
966 })
967 });
968
969 if is_supporting {
970 if let Some(severity) = diagnostic.severity {
971 supporting_diagnostic_severities
972 .insert((source, code.clone(), range), severity);
973 }
974 } else {
975 let group_id = post_inc(&mut next_group_id);
976 let is_disk_based =
977 source.map_or(false, |source| disk_based_sources.contains(source));
978
979 sources_by_group_id.insert(group_id, source);
980 primary_diagnostic_group_ids
981 .insert((source, code.clone(), range.clone()), group_id);
982
983 diagnostics.push(DiagnosticEntry {
984 range,
985 diagnostic: Diagnostic {
986 code: code.clone(),
987 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
988 message: diagnostic.message.clone(),
989 group_id,
990 is_primary: true,
991 is_valid: true,
992 is_disk_based,
993 },
994 });
995 if let Some(infos) = &diagnostic.related_information {
996 for info in infos {
997 if info.location.uri == params.uri && !info.message.is_empty() {
998 let range = range_from_lsp(info.location.range);
999 diagnostics.push(DiagnosticEntry {
1000 range,
1001 diagnostic: Diagnostic {
1002 code: code.clone(),
1003 severity: DiagnosticSeverity::INFORMATION,
1004 message: info.message.clone(),
1005 group_id,
1006 is_primary: false,
1007 is_valid: true,
1008 is_disk_based,
1009 },
1010 });
1011 }
1012 }
1013 }
1014 }
1015 }
1016
1017 for entry in &mut diagnostics {
1018 let diagnostic = &mut entry.diagnostic;
1019 if !diagnostic.is_primary {
1020 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1021 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1022 source,
1023 diagnostic.code.clone(),
1024 entry.range.clone(),
1025 )) {
1026 diagnostic.severity = severity;
1027 }
1028 }
1029 }
1030
1031 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1032 Ok(())
1033 }
1034
1035 pub fn update_diagnostic_entries(
1036 &mut self,
1037 abs_path: PathBuf,
1038 version: Option<i32>,
1039 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1040 cx: &mut ModelContext<Project>,
1041 ) -> Result<(), anyhow::Error> {
1042 let (worktree, relative_path) = self
1043 .find_local_worktree(&abs_path, cx)
1044 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1045 let project_path = ProjectPath {
1046 worktree_id: worktree.read(cx).id(),
1047 path: relative_path.into(),
1048 };
1049
1050 for buffer in self.open_buffers.values() {
1051 if let Some(buffer) = buffer.upgrade(cx) {
1052 if buffer
1053 .read(cx)
1054 .file()
1055 .map_or(false, |file| *file.path() == project_path.path)
1056 {
1057 buffer.update(cx, |buffer, cx| {
1058 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1059 })?;
1060 break;
1061 }
1062 }
1063 }
1064 worktree.update(cx, |worktree, cx| {
1065 worktree
1066 .as_local_mut()
1067 .ok_or_else(|| anyhow!("not a local worktree"))?
1068 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1069 })?;
1070 cx.emit(Event::DiagnosticsUpdated(project_path));
1071 Ok(())
1072 }
1073
1074 pub fn format(
1075 &self,
1076 buffers: HashSet<ModelHandle<Buffer>>,
1077 push_to_history: bool,
1078 cx: &mut ModelContext<Project>,
1079 ) -> Task<Result<ProjectTransaction>> {
1080 let mut local_buffers = Vec::new();
1081 let mut remote_buffers = None;
1082 for buffer_handle in buffers {
1083 let buffer = buffer_handle.read(cx);
1084 let worktree;
1085 if let Some(file) = File::from_dyn(buffer.file()) {
1086 worktree = file.worktree.clone();
1087 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1088 let lang_server;
1089 if let Some(lang) = buffer.language() {
1090 if let Some(server) = self
1091 .language_servers
1092 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1093 {
1094 lang_server = server.clone();
1095 } else {
1096 return Task::ready(Ok(Default::default()));
1097 };
1098 } else {
1099 return Task::ready(Ok(Default::default()));
1100 }
1101
1102 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1103 } else {
1104 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1105 }
1106 } else {
1107 return Task::ready(Ok(Default::default()));
1108 }
1109 }
1110
1111 let remote_buffers = self.remote_id().zip(remote_buffers);
1112 let client = self.client.clone();
1113
1114 cx.spawn(|this, mut cx| async move {
1115 let mut project_transaction = ProjectTransaction::default();
1116
1117 if let Some((project_id, remote_buffers)) = remote_buffers {
1118 let response = client
1119 .request(proto::FormatBuffers {
1120 project_id,
1121 buffer_ids: remote_buffers
1122 .iter()
1123 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1124 .collect(),
1125 })
1126 .await?
1127 .transaction
1128 .ok_or_else(|| anyhow!("missing transaction"))?;
1129 project_transaction = this
1130 .update(&mut cx, |this, cx| {
1131 this.deserialize_project_transaction(response, push_to_history, cx)
1132 })
1133 .await?;
1134 }
1135
1136 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1137 let lsp_edits = lang_server
1138 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1139 text_document: lsp::TextDocumentIdentifier::new(
1140 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1141 ),
1142 options: Default::default(),
1143 work_done_progress_params: Default::default(),
1144 })
1145 .await?;
1146
1147 if let Some(lsp_edits) = lsp_edits {
1148 let edits = buffer
1149 .update(&mut cx, |buffer, cx| {
1150 buffer.edits_from_lsp(lsp_edits, None, cx)
1151 })
1152 .await?;
1153 buffer.update(&mut cx, |buffer, cx| {
1154 buffer.finalize_last_transaction();
1155 buffer.start_transaction();
1156 for (range, text) in edits {
1157 buffer.edit([range], text, cx);
1158 }
1159 if buffer.end_transaction(cx).is_some() {
1160 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1161 if !push_to_history {
1162 buffer.forget_transaction(transaction.id);
1163 }
1164 project_transaction.0.insert(cx.handle(), transaction);
1165 }
1166 });
1167 }
1168 }
1169
1170 Ok(project_transaction)
1171 })
1172 }
1173
1174 pub fn definition<T: ToPointUtf16>(
1175 &self,
1176 buffer: &ModelHandle<Buffer>,
1177 position: T,
1178 cx: &mut ModelContext<Self>,
1179 ) -> Task<Result<Vec<Definition>>> {
1180 let position = position.to_point_utf16(buffer.read(cx));
1181 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1182 }
1183
1184 pub fn completions<T: ToPointUtf16>(
1185 &self,
1186 source_buffer_handle: &ModelHandle<Buffer>,
1187 position: T,
1188 cx: &mut ModelContext<Self>,
1189 ) -> Task<Result<Vec<Completion>>> {
1190 let source_buffer_handle = source_buffer_handle.clone();
1191 let source_buffer = source_buffer_handle.read(cx);
1192 let buffer_id = source_buffer.remote_id();
1193 let language = source_buffer.language().cloned();
1194 let worktree;
1195 let buffer_abs_path;
1196 if let Some(file) = File::from_dyn(source_buffer.file()) {
1197 worktree = file.worktree.clone();
1198 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1199 } else {
1200 return Task::ready(Ok(Default::default()));
1201 };
1202
1203 let position = position.to_point_utf16(source_buffer);
1204 let anchor = source_buffer.anchor_after(position);
1205
1206 if worktree.read(cx).as_local().is_some() {
1207 let buffer_abs_path = buffer_abs_path.unwrap();
1208 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1209 server
1210 } else {
1211 return Task::ready(Ok(Default::default()));
1212 };
1213
1214 cx.spawn(|_, cx| async move {
1215 let completions = lang_server
1216 .request::<lsp::request::Completion>(lsp::CompletionParams {
1217 text_document_position: lsp::TextDocumentPositionParams::new(
1218 lsp::TextDocumentIdentifier::new(
1219 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1220 ),
1221 position.to_lsp_position(),
1222 ),
1223 context: Default::default(),
1224 work_done_progress_params: Default::default(),
1225 partial_result_params: Default::default(),
1226 })
1227 .await
1228 .context("lsp completion request failed")?;
1229
1230 let completions = if let Some(completions) = completions {
1231 match completions {
1232 lsp::CompletionResponse::Array(completions) => completions,
1233 lsp::CompletionResponse::List(list) => list.items,
1234 }
1235 } else {
1236 Default::default()
1237 };
1238
1239 source_buffer_handle.read_with(&cx, |this, _| {
1240 Ok(completions
1241 .into_iter()
1242 .filter_map(|lsp_completion| {
1243 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1244 lsp::CompletionTextEdit::Edit(edit) => {
1245 (range_from_lsp(edit.range), edit.new_text.clone())
1246 }
1247 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1248 log::info!("unsupported insert/replace completion");
1249 return None;
1250 }
1251 };
1252
1253 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1254 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1255 if clipped_start == old_range.start && clipped_end == old_range.end {
1256 Some(Completion {
1257 old_range: this.anchor_before(old_range.start)
1258 ..this.anchor_after(old_range.end),
1259 new_text,
1260 label: language
1261 .as_ref()
1262 .and_then(|l| l.label_for_completion(&lsp_completion))
1263 .unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
1264 lsp_completion,
1265 })
1266 } else {
1267 None
1268 }
1269 })
1270 .collect())
1271 })
1272 })
1273 } else if let Some(project_id) = self.remote_id() {
1274 let rpc = self.client.clone();
1275 let message = proto::GetCompletions {
1276 project_id,
1277 buffer_id,
1278 position: Some(language::proto::serialize_anchor(&anchor)),
1279 version: (&source_buffer.version()).into(),
1280 };
1281 cx.spawn_weak(|_, mut cx| async move {
1282 let response = rpc.request(message).await?;
1283
1284 source_buffer_handle
1285 .update(&mut cx, |buffer, _| {
1286 buffer.wait_for_version(response.version.into())
1287 })
1288 .await;
1289
1290 response
1291 .completions
1292 .into_iter()
1293 .map(|completion| {
1294 language::proto::deserialize_completion(completion, language.as_ref())
1295 })
1296 .collect()
1297 })
1298 } else {
1299 Task::ready(Ok(Default::default()))
1300 }
1301 }
1302
1303 pub fn apply_additional_edits_for_completion(
1304 &self,
1305 buffer_handle: ModelHandle<Buffer>,
1306 completion: Completion,
1307 push_to_history: bool,
1308 cx: &mut ModelContext<Self>,
1309 ) -> Task<Result<Option<Transaction>>> {
1310 let buffer = buffer_handle.read(cx);
1311 let buffer_id = buffer.remote_id();
1312
1313 if self.is_local() {
1314 let lang_server = if let Some(language_server) = buffer.language_server() {
1315 language_server.clone()
1316 } else {
1317 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1318 };
1319
1320 cx.spawn(|_, mut cx| async move {
1321 let resolved_completion = lang_server
1322 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1323 .await?;
1324 if let Some(edits) = resolved_completion.additional_text_edits {
1325 let edits = buffer_handle
1326 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1327 .await?;
1328 buffer_handle.update(&mut cx, |buffer, cx| {
1329 buffer.finalize_last_transaction();
1330 buffer.start_transaction();
1331 for (range, text) in edits {
1332 buffer.edit([range], text, cx);
1333 }
1334 let transaction = if buffer.end_transaction(cx).is_some() {
1335 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1336 if !push_to_history {
1337 buffer.forget_transaction(transaction.id);
1338 }
1339 Some(transaction)
1340 } else {
1341 None
1342 };
1343 Ok(transaction)
1344 })
1345 } else {
1346 Ok(None)
1347 }
1348 })
1349 } else if let Some(project_id) = self.remote_id() {
1350 let client = self.client.clone();
1351 cx.spawn(|_, mut cx| async move {
1352 let response = client
1353 .request(proto::ApplyCompletionAdditionalEdits {
1354 project_id,
1355 buffer_id,
1356 completion: Some(language::proto::serialize_completion(&completion)),
1357 })
1358 .await?;
1359
1360 if let Some(transaction) = response.transaction {
1361 let transaction = language::proto::deserialize_transaction(transaction)?;
1362 buffer_handle
1363 .update(&mut cx, |buffer, _| {
1364 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1365 })
1366 .await;
1367 if push_to_history {
1368 buffer_handle.update(&mut cx, |buffer, _| {
1369 buffer.push_transaction(transaction.clone(), Instant::now());
1370 });
1371 }
1372 Ok(Some(transaction))
1373 } else {
1374 Ok(None)
1375 }
1376 })
1377 } else {
1378 Task::ready(Err(anyhow!("project does not have a remote id")))
1379 }
1380 }
1381
1382 pub fn code_actions<T: ToOffset>(
1383 &self,
1384 buffer_handle: &ModelHandle<Buffer>,
1385 range: Range<T>,
1386 cx: &mut ModelContext<Self>,
1387 ) -> Task<Result<Vec<CodeAction>>> {
1388 let buffer_handle = buffer_handle.clone();
1389 let buffer = buffer_handle.read(cx);
1390 let buffer_id = buffer.remote_id();
1391 let worktree;
1392 let buffer_abs_path;
1393 if let Some(file) = File::from_dyn(buffer.file()) {
1394 worktree = file.worktree.clone();
1395 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1396 } else {
1397 return Task::ready(Ok(Default::default()));
1398 };
1399 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1400
1401 if worktree.read(cx).as_local().is_some() {
1402 let buffer_abs_path = buffer_abs_path.unwrap();
1403 let lang_name;
1404 let lang_server;
1405 if let Some(lang) = buffer.language() {
1406 lang_name = lang.name().to_string();
1407 if let Some(server) = self
1408 .language_servers
1409 .get(&(worktree.read(cx).id(), lang_name.clone()))
1410 {
1411 lang_server = server.clone();
1412 } else {
1413 return Task::ready(Ok(Default::default()));
1414 };
1415 } else {
1416 return Task::ready(Ok(Default::default()));
1417 }
1418
1419 let lsp_range = lsp::Range::new(
1420 range.start.to_point_utf16(buffer).to_lsp_position(),
1421 range.end.to_point_utf16(buffer).to_lsp_position(),
1422 );
1423 cx.foreground().spawn(async move {
1424 Ok(lang_server
1425 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1426 text_document: lsp::TextDocumentIdentifier::new(
1427 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1428 ),
1429 range: lsp_range,
1430 work_done_progress_params: Default::default(),
1431 partial_result_params: Default::default(),
1432 context: lsp::CodeActionContext {
1433 diagnostics: Default::default(),
1434 only: Some(vec![
1435 lsp::CodeActionKind::QUICKFIX,
1436 lsp::CodeActionKind::REFACTOR,
1437 lsp::CodeActionKind::REFACTOR_EXTRACT,
1438 ]),
1439 },
1440 })
1441 .await?
1442 .unwrap_or_default()
1443 .into_iter()
1444 .filter_map(|entry| {
1445 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1446 Some(CodeAction {
1447 range: range.clone(),
1448 lsp_action,
1449 })
1450 } else {
1451 None
1452 }
1453 })
1454 .collect())
1455 })
1456 } else if let Some(project_id) = self.remote_id() {
1457 let rpc = self.client.clone();
1458 cx.spawn_weak(|_, mut cx| async move {
1459 let response = rpc
1460 .request(proto::GetCodeActions {
1461 project_id,
1462 buffer_id,
1463 start: Some(language::proto::serialize_anchor(&range.start)),
1464 end: Some(language::proto::serialize_anchor(&range.end)),
1465 })
1466 .await?;
1467
1468 buffer_handle
1469 .update(&mut cx, |buffer, _| {
1470 buffer.wait_for_version(response.version.into())
1471 })
1472 .await;
1473
1474 response
1475 .actions
1476 .into_iter()
1477 .map(language::proto::deserialize_code_action)
1478 .collect()
1479 })
1480 } else {
1481 Task::ready(Ok(Default::default()))
1482 }
1483 }
1484
1485 pub fn apply_code_action(
1486 &self,
1487 buffer_handle: ModelHandle<Buffer>,
1488 mut action: CodeAction,
1489 push_to_history: bool,
1490 cx: &mut ModelContext<Self>,
1491 ) -> Task<Result<ProjectTransaction>> {
1492 if self.is_local() {
1493 let buffer = buffer_handle.read(cx);
1494 let lang_name = if let Some(lang) = buffer.language() {
1495 lang.name().to_string()
1496 } else {
1497 return Task::ready(Ok(Default::default()));
1498 };
1499 let lang_server = if let Some(language_server) = buffer.language_server() {
1500 language_server.clone()
1501 } else {
1502 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1503 };
1504 let range = action.range.to_point_utf16(buffer);
1505
1506 cx.spawn(|this, mut cx| async move {
1507 if let Some(lsp_range) = action
1508 .lsp_action
1509 .data
1510 .as_mut()
1511 .and_then(|d| d.get_mut("codeActionParams"))
1512 .and_then(|d| d.get_mut("range"))
1513 {
1514 *lsp_range = serde_json::to_value(&lsp::Range::new(
1515 range.start.to_lsp_position(),
1516 range.end.to_lsp_position(),
1517 ))
1518 .unwrap();
1519 action.lsp_action = lang_server
1520 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1521 .await?;
1522 } else {
1523 let actions = this
1524 .update(&mut cx, |this, cx| {
1525 this.code_actions(&buffer_handle, action.range, cx)
1526 })
1527 .await?;
1528 action.lsp_action = actions
1529 .into_iter()
1530 .find(|a| a.lsp_action.title == action.lsp_action.title)
1531 .ok_or_else(|| anyhow!("code action is outdated"))?
1532 .lsp_action;
1533 }
1534
1535 if let Some(edit) = action.lsp_action.edit {
1536 Self::deserialize_workspace_edit(
1537 this,
1538 edit,
1539 push_to_history,
1540 lang_name,
1541 lang_server,
1542 &mut cx,
1543 )
1544 .await
1545 } else {
1546 Ok(ProjectTransaction::default())
1547 }
1548 })
1549 } else if let Some(project_id) = self.remote_id() {
1550 let client = self.client.clone();
1551 let request = proto::ApplyCodeAction {
1552 project_id,
1553 buffer_id: buffer_handle.read(cx).remote_id(),
1554 action: Some(language::proto::serialize_code_action(&action)),
1555 };
1556 cx.spawn(|this, mut cx| async move {
1557 let response = client
1558 .request(request)
1559 .await?
1560 .transaction
1561 .ok_or_else(|| anyhow!("missing transaction"))?;
1562 this.update(&mut cx, |this, cx| {
1563 this.deserialize_project_transaction(response, push_to_history, cx)
1564 })
1565 .await
1566 })
1567 } else {
1568 Task::ready(Err(anyhow!("project does not have a remote id")))
1569 }
1570 }
1571
1572 async fn deserialize_workspace_edit(
1573 this: ModelHandle<Self>,
1574 edit: lsp::WorkspaceEdit,
1575 push_to_history: bool,
1576 language_name: String,
1577 language_server: Arc<LanguageServer>,
1578 cx: &mut AsyncAppContext,
1579 ) -> Result<ProjectTransaction> {
1580 let fs = this.read_with(cx, |this, _| this.fs.clone());
1581 let mut operations = Vec::new();
1582 if let Some(document_changes) = edit.document_changes {
1583 match document_changes {
1584 lsp::DocumentChanges::Edits(edits) => {
1585 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1586 }
1587 lsp::DocumentChanges::Operations(ops) => operations = ops,
1588 }
1589 } else if let Some(changes) = edit.changes {
1590 operations.extend(changes.into_iter().map(|(uri, edits)| {
1591 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1592 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1593 uri,
1594 version: None,
1595 },
1596 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1597 })
1598 }));
1599 }
1600
1601 let mut project_transaction = ProjectTransaction::default();
1602 for operation in operations {
1603 match operation {
1604 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1605 let abs_path = op
1606 .uri
1607 .to_file_path()
1608 .map_err(|_| anyhow!("can't convert URI to path"))?;
1609
1610 if let Some(parent_path) = abs_path.parent() {
1611 fs.create_dir(parent_path).await?;
1612 }
1613 if abs_path.ends_with("/") {
1614 fs.create_dir(&abs_path).await?;
1615 } else {
1616 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1617 .await?;
1618 }
1619 }
1620 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1621 let source_abs_path = op
1622 .old_uri
1623 .to_file_path()
1624 .map_err(|_| anyhow!("can't convert URI to path"))?;
1625 let target_abs_path = op
1626 .new_uri
1627 .to_file_path()
1628 .map_err(|_| anyhow!("can't convert URI to path"))?;
1629 fs.rename(
1630 &source_abs_path,
1631 &target_abs_path,
1632 op.options.map(Into::into).unwrap_or_default(),
1633 )
1634 .await?;
1635 }
1636 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1637 let abs_path = op
1638 .uri
1639 .to_file_path()
1640 .map_err(|_| anyhow!("can't convert URI to path"))?;
1641 let options = op.options.map(Into::into).unwrap_or_default();
1642 if abs_path.ends_with("/") {
1643 fs.remove_dir(&abs_path, options).await?;
1644 } else {
1645 fs.remove_file(&abs_path, options).await?;
1646 }
1647 }
1648 lsp::DocumentChangeOperation::Edit(op) => {
1649 let buffer_to_edit = this
1650 .update(cx, |this, cx| {
1651 this.open_local_buffer_from_lsp_path(
1652 op.text_document.uri,
1653 language_name.clone(),
1654 language_server.clone(),
1655 cx,
1656 )
1657 })
1658 .await?;
1659
1660 let edits = buffer_to_edit
1661 .update(cx, |buffer, cx| {
1662 let edits = op.edits.into_iter().map(|edit| match edit {
1663 lsp::OneOf::Left(edit) => edit,
1664 lsp::OneOf::Right(edit) => edit.text_edit,
1665 });
1666 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1667 })
1668 .await?;
1669
1670 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1671 buffer.finalize_last_transaction();
1672 buffer.start_transaction();
1673 for (range, text) in edits {
1674 buffer.edit([range], text, cx);
1675 }
1676 let transaction = if buffer.end_transaction(cx).is_some() {
1677 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1678 if !push_to_history {
1679 buffer.forget_transaction(transaction.id);
1680 }
1681 Some(transaction)
1682 } else {
1683 None
1684 };
1685
1686 transaction
1687 });
1688 if let Some(transaction) = transaction {
1689 project_transaction.0.insert(buffer_to_edit, transaction);
1690 }
1691 }
1692 }
1693 }
1694
1695 Ok(project_transaction)
1696 }
1697
1698 pub fn prepare_rename<T: ToPointUtf16>(
1699 &self,
1700 buffer: ModelHandle<Buffer>,
1701 position: T,
1702 cx: &mut ModelContext<Self>,
1703 ) -> Task<Result<Option<Range<Anchor>>>> {
1704 let position = position.to_point_utf16(buffer.read(cx));
1705 self.request_lsp(buffer, PrepareRename { position }, cx)
1706 }
1707
1708 pub fn perform_rename<T: ToPointUtf16>(
1709 &self,
1710 buffer: ModelHandle<Buffer>,
1711 position: T,
1712 new_name: String,
1713 push_to_history: bool,
1714 cx: &mut ModelContext<Self>,
1715 ) -> Task<Result<ProjectTransaction>> {
1716 let position = position.to_point_utf16(buffer.read(cx));
1717 self.request_lsp(
1718 buffer,
1719 PerformRename {
1720 position,
1721 new_name,
1722 push_to_history,
1723 },
1724 cx,
1725 )
1726 }
1727
1728 fn request_lsp<R: LspCommand>(
1729 &self,
1730 buffer_handle: ModelHandle<Buffer>,
1731 request: R,
1732 cx: &mut ModelContext<Self>,
1733 ) -> Task<Result<R::Response>>
1734 where
1735 <R::LspRequest as lsp::request::Request>::Result: Send,
1736 {
1737 let buffer = buffer_handle.read(cx);
1738 if self.is_local() {
1739 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1740 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1741 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1742 return cx.spawn(|this, cx| async move {
1743 let response = language_server
1744 .request::<R::LspRequest>(lsp_params)
1745 .await
1746 .context("lsp request failed")?;
1747 request
1748 .response_from_lsp(response, this, buffer_handle, cx)
1749 .await
1750 });
1751 }
1752 } else if let Some(project_id) = self.remote_id() {
1753 let rpc = self.client.clone();
1754 let message = request.to_proto(project_id, buffer);
1755 return cx.spawn(|this, cx| async move {
1756 let response = rpc.request(message).await?;
1757 request
1758 .response_from_proto(response, this, buffer_handle, cx)
1759 .await
1760 });
1761 }
1762 Task::ready(Ok(Default::default()))
1763 }
1764
1765 pub fn find_or_create_local_worktree(
1766 &self,
1767 abs_path: impl AsRef<Path>,
1768 weak: bool,
1769 cx: &mut ModelContext<Self>,
1770 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1771 let abs_path = abs_path.as_ref();
1772 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1773 Task::ready(Ok((tree.clone(), relative_path.into())))
1774 } else {
1775 let worktree = self.create_local_worktree(abs_path, weak, cx);
1776 cx.foreground()
1777 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1778 }
1779 }
1780
1781 fn find_local_worktree(
1782 &self,
1783 abs_path: &Path,
1784 cx: &AppContext,
1785 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1786 for tree in self.worktrees(cx) {
1787 if let Some(relative_path) = tree
1788 .read(cx)
1789 .as_local()
1790 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1791 {
1792 return Some((tree.clone(), relative_path.into()));
1793 }
1794 }
1795 None
1796 }
1797
1798 pub fn is_shared(&self) -> bool {
1799 match &self.client_state {
1800 ProjectClientState::Local { is_shared, .. } => *is_shared,
1801 ProjectClientState::Remote { .. } => false,
1802 }
1803 }
1804
1805 fn create_local_worktree(
1806 &self,
1807 abs_path: impl AsRef<Path>,
1808 weak: bool,
1809 cx: &mut ModelContext<Self>,
1810 ) -> Task<Result<ModelHandle<Worktree>>> {
1811 let fs = self.fs.clone();
1812 let client = self.client.clone();
1813 let path = Arc::from(abs_path.as_ref());
1814 cx.spawn(|project, mut cx| async move {
1815 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1816
1817 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1818 project.add_worktree(&worktree, cx);
1819 (project.remote_id(), project.is_shared())
1820 });
1821
1822 if let Some(project_id) = remote_project_id {
1823 worktree
1824 .update(&mut cx, |worktree, cx| {
1825 worktree.as_local_mut().unwrap().register(project_id, cx)
1826 })
1827 .await?;
1828 if is_shared {
1829 worktree
1830 .update(&mut cx, |worktree, cx| {
1831 worktree.as_local_mut().unwrap().share(project_id, cx)
1832 })
1833 .await?;
1834 }
1835 }
1836
1837 Ok(worktree)
1838 })
1839 }
1840
1841 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1842 self.worktrees.retain(|worktree| {
1843 worktree
1844 .upgrade(cx)
1845 .map_or(false, |w| w.read(cx).id() != id)
1846 });
1847 cx.notify();
1848 }
1849
1850 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1851 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1852 if worktree.read(cx).is_local() {
1853 cx.subscribe(&worktree, |this, worktree, _, cx| {
1854 this.update_local_worktree_buffers(worktree, cx);
1855 })
1856 .detach();
1857 }
1858
1859 let push_weak_handle = {
1860 let worktree = worktree.read(cx);
1861 worktree.is_local() && worktree.is_weak()
1862 };
1863 if push_weak_handle {
1864 cx.observe_release(&worktree, |this, cx| {
1865 this.worktrees
1866 .retain(|worktree| worktree.upgrade(cx).is_some());
1867 cx.notify();
1868 })
1869 .detach();
1870 self.worktrees
1871 .push(WorktreeHandle::Weak(worktree.downgrade()));
1872 } else {
1873 self.worktrees
1874 .push(WorktreeHandle::Strong(worktree.clone()));
1875 }
1876 cx.notify();
1877 }
1878
1879 fn update_local_worktree_buffers(
1880 &mut self,
1881 worktree_handle: ModelHandle<Worktree>,
1882 cx: &mut ModelContext<Self>,
1883 ) {
1884 let snapshot = worktree_handle.read(cx).snapshot();
1885 let mut buffers_to_delete = Vec::new();
1886 for (buffer_id, buffer) in &self.open_buffers {
1887 if let Some(buffer) = buffer.upgrade(cx) {
1888 buffer.update(cx, |buffer, cx| {
1889 if let Some(old_file) = File::from_dyn(buffer.file()) {
1890 if old_file.worktree != worktree_handle {
1891 return;
1892 }
1893
1894 let new_file = if let Some(entry) = old_file
1895 .entry_id
1896 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1897 {
1898 File {
1899 is_local: true,
1900 entry_id: Some(entry.id),
1901 mtime: entry.mtime,
1902 path: entry.path.clone(),
1903 worktree: worktree_handle.clone(),
1904 }
1905 } else if let Some(entry) =
1906 snapshot.entry_for_path(old_file.path().as_ref())
1907 {
1908 File {
1909 is_local: true,
1910 entry_id: Some(entry.id),
1911 mtime: entry.mtime,
1912 path: entry.path.clone(),
1913 worktree: worktree_handle.clone(),
1914 }
1915 } else {
1916 File {
1917 is_local: true,
1918 entry_id: None,
1919 path: old_file.path().clone(),
1920 mtime: old_file.mtime(),
1921 worktree: worktree_handle.clone(),
1922 }
1923 };
1924
1925 if let Some(project_id) = self.remote_id() {
1926 self.client
1927 .send(proto::UpdateBufferFile {
1928 project_id,
1929 buffer_id: *buffer_id as u64,
1930 file: Some(new_file.to_proto()),
1931 })
1932 .log_err();
1933 }
1934 buffer.file_updated(Box::new(new_file), cx).detach();
1935 }
1936 });
1937 } else {
1938 buffers_to_delete.push(*buffer_id);
1939 }
1940 }
1941
1942 for buffer_id in buffers_to_delete {
1943 self.open_buffers.remove(&buffer_id);
1944 }
1945 }
1946
1947 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
1948 let new_active_entry = entry.and_then(|project_path| {
1949 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1950 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
1951 Some(ProjectEntry {
1952 worktree_id: project_path.worktree_id,
1953 entry_id: entry.id,
1954 })
1955 });
1956 if new_active_entry != self.active_entry {
1957 self.active_entry = new_active_entry;
1958 cx.emit(Event::ActiveEntryChanged(new_active_entry));
1959 }
1960 }
1961
1962 pub fn is_running_disk_based_diagnostics(&self) -> bool {
1963 self.language_servers_with_diagnostics_running > 0
1964 }
1965
1966 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
1967 let mut summary = DiagnosticSummary::default();
1968 for (_, path_summary) in self.diagnostic_summaries(cx) {
1969 summary.error_count += path_summary.error_count;
1970 summary.warning_count += path_summary.warning_count;
1971 summary.info_count += path_summary.info_count;
1972 summary.hint_count += path_summary.hint_count;
1973 }
1974 summary
1975 }
1976
1977 pub fn diagnostic_summaries<'a>(
1978 &'a self,
1979 cx: &'a AppContext,
1980 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
1981 self.worktrees(cx).flat_map(move |worktree| {
1982 let worktree = worktree.read(cx);
1983 let worktree_id = worktree.id();
1984 worktree
1985 .diagnostic_summaries()
1986 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
1987 })
1988 }
1989
1990 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
1991 self.language_servers_with_diagnostics_running += 1;
1992 if self.language_servers_with_diagnostics_running == 1 {
1993 cx.emit(Event::DiskBasedDiagnosticsStarted);
1994 }
1995 }
1996
1997 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
1998 cx.emit(Event::DiskBasedDiagnosticsUpdated);
1999 self.language_servers_with_diagnostics_running -= 1;
2000 if self.language_servers_with_diagnostics_running == 0 {
2001 cx.emit(Event::DiskBasedDiagnosticsFinished);
2002 }
2003 }
2004
2005 pub fn active_entry(&self) -> Option<ProjectEntry> {
2006 self.active_entry
2007 }
2008
2009 // RPC message handlers
2010
2011 async fn handle_unshare_project(
2012 this: ModelHandle<Self>,
2013 _: TypedEnvelope<proto::UnshareProject>,
2014 _: Arc<Client>,
2015 mut cx: AsyncAppContext,
2016 ) -> Result<()> {
2017 this.update(&mut cx, |this, cx| {
2018 if let ProjectClientState::Remote {
2019 sharing_has_stopped,
2020 ..
2021 } = &mut this.client_state
2022 {
2023 *sharing_has_stopped = true;
2024 this.collaborators.clear();
2025 cx.notify();
2026 } else {
2027 unreachable!()
2028 }
2029 });
2030
2031 Ok(())
2032 }
2033
2034 async fn handle_add_collaborator(
2035 this: ModelHandle<Self>,
2036 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2037 _: Arc<Client>,
2038 mut cx: AsyncAppContext,
2039 ) -> Result<()> {
2040 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2041 let collaborator = envelope
2042 .payload
2043 .collaborator
2044 .take()
2045 .ok_or_else(|| anyhow!("empty collaborator"))?;
2046
2047 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2048 this.update(&mut cx, |this, cx| {
2049 this.collaborators
2050 .insert(collaborator.peer_id, collaborator);
2051 cx.notify();
2052 });
2053
2054 Ok(())
2055 }
2056
2057 async fn handle_remove_collaborator(
2058 this: ModelHandle<Self>,
2059 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2060 _: Arc<Client>,
2061 mut cx: AsyncAppContext,
2062 ) -> Result<()> {
2063 this.update(&mut cx, |this, cx| {
2064 let peer_id = PeerId(envelope.payload.peer_id);
2065 let replica_id = this
2066 .collaborators
2067 .remove(&peer_id)
2068 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2069 .replica_id;
2070 this.shared_buffers.remove(&peer_id);
2071 for (_, buffer) in &this.open_buffers {
2072 if let Some(buffer) = buffer.upgrade(cx) {
2073 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2074 }
2075 }
2076 cx.notify();
2077 Ok(())
2078 })
2079 }
2080
2081 async fn handle_share_worktree(
2082 this: ModelHandle<Self>,
2083 envelope: TypedEnvelope<proto::ShareWorktree>,
2084 client: Arc<Client>,
2085 mut cx: AsyncAppContext,
2086 ) -> Result<()> {
2087 this.update(&mut cx, |this, cx| {
2088 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2089 let replica_id = this.replica_id();
2090 let worktree = envelope
2091 .payload
2092 .worktree
2093 .ok_or_else(|| anyhow!("invalid worktree"))?;
2094 let (worktree, load_task) =
2095 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2096 this.add_worktree(&worktree, cx);
2097 load_task.detach();
2098 Ok(())
2099 })
2100 }
2101
2102 async fn handle_unregister_worktree(
2103 this: ModelHandle<Self>,
2104 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2105 _: Arc<Client>,
2106 mut cx: AsyncAppContext,
2107 ) -> Result<()> {
2108 this.update(&mut cx, |this, cx| {
2109 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2110 this.remove_worktree(worktree_id, cx);
2111 Ok(())
2112 })
2113 }
2114
2115 async fn handle_update_worktree(
2116 this: ModelHandle<Self>,
2117 envelope: TypedEnvelope<proto::UpdateWorktree>,
2118 _: Arc<Client>,
2119 mut cx: AsyncAppContext,
2120 ) -> Result<()> {
2121 this.update(&mut cx, |this, cx| {
2122 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2123 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2124 worktree.update(cx, |worktree, _| {
2125 let worktree = worktree.as_remote_mut().unwrap();
2126 worktree.update_from_remote(envelope)
2127 })?;
2128 }
2129 Ok(())
2130 })
2131 }
2132
2133 async fn handle_update_diagnostic_summary(
2134 this: ModelHandle<Self>,
2135 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2136 _: Arc<Client>,
2137 mut cx: AsyncAppContext,
2138 ) -> Result<()> {
2139 this.update(&mut cx, |this, cx| {
2140 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2141 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2142 if let Some(summary) = envelope.payload.summary {
2143 let project_path = ProjectPath {
2144 worktree_id,
2145 path: Path::new(&summary.path).into(),
2146 };
2147 worktree.update(cx, |worktree, _| {
2148 worktree
2149 .as_remote_mut()
2150 .unwrap()
2151 .update_diagnostic_summary(project_path.path.clone(), &summary);
2152 });
2153 cx.emit(Event::DiagnosticsUpdated(project_path));
2154 }
2155 }
2156 Ok(())
2157 })
2158 }
2159
2160 async fn handle_disk_based_diagnostics_updating(
2161 this: ModelHandle<Self>,
2162 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2163 _: Arc<Client>,
2164 mut cx: AsyncAppContext,
2165 ) -> Result<()> {
2166 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2167 Ok(())
2168 }
2169
2170 async fn handle_disk_based_diagnostics_updated(
2171 this: ModelHandle<Self>,
2172 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2173 _: Arc<Client>,
2174 mut cx: AsyncAppContext,
2175 ) -> Result<()> {
2176 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2177 Ok(())
2178 }
2179
2180 async fn handle_update_buffer(
2181 this: ModelHandle<Self>,
2182 envelope: TypedEnvelope<proto::UpdateBuffer>,
2183 _: Arc<Client>,
2184 mut cx: AsyncAppContext,
2185 ) -> Result<()> {
2186 this.update(&mut cx, |this, cx| {
2187 let payload = envelope.payload.clone();
2188 let buffer_id = payload.buffer_id;
2189 let ops = payload
2190 .operations
2191 .into_iter()
2192 .map(|op| language::proto::deserialize_operation(op))
2193 .collect::<Result<Vec<_>, _>>()?;
2194 let is_remote = this.is_remote();
2195 match this.open_buffers.entry(buffer_id) {
2196 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2197 OpenBuffer::Loaded(buffer) => {
2198 if let Some(buffer) = buffer.upgrade(cx) {
2199 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2200 }
2201 }
2202 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2203 },
2204 hash_map::Entry::Vacant(e) => {
2205 if is_remote && this.loading_buffers.len() > 0 {
2206 e.insert(OpenBuffer::Loading(ops));
2207 }
2208 }
2209 }
2210 Ok(())
2211 })
2212 }
2213
2214 async fn handle_update_buffer_file(
2215 this: ModelHandle<Self>,
2216 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2217 _: Arc<Client>,
2218 mut cx: AsyncAppContext,
2219 ) -> Result<()> {
2220 this.update(&mut cx, |this, cx| {
2221 let payload = envelope.payload.clone();
2222 let buffer_id = payload.buffer_id;
2223 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2224 let worktree = this
2225 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2226 .ok_or_else(|| anyhow!("no such worktree"))?;
2227 let file = File::from_proto(file, worktree.clone(), cx)?;
2228 let buffer = this
2229 .open_buffers
2230 .get_mut(&buffer_id)
2231 .and_then(|b| b.upgrade(cx))
2232 .ok_or_else(|| anyhow!("no such buffer"))?;
2233 buffer.update(cx, |buffer, cx| {
2234 buffer.file_updated(Box::new(file), cx).detach();
2235 });
2236 Ok(())
2237 })
2238 }
2239
2240 async fn handle_save_buffer(
2241 this: ModelHandle<Self>,
2242 envelope: TypedEnvelope<proto::SaveBuffer>,
2243 _: Arc<Client>,
2244 mut cx: AsyncAppContext,
2245 ) -> Result<proto::BufferSaved> {
2246 let buffer_id = envelope.payload.buffer_id;
2247 let sender_id = envelope.original_sender_id()?;
2248 let requested_version = envelope.payload.version.try_into()?;
2249
2250 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2251 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2252 let buffer = this
2253 .shared_buffers
2254 .get(&sender_id)
2255 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2256 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2257 Ok::<_, anyhow::Error>((project_id, buffer))
2258 })?;
2259
2260 if !buffer
2261 .read_with(&cx, |buffer, _| buffer.version())
2262 .observed_all(&requested_version)
2263 {
2264 Err(anyhow!("save request depends on unreceived edits"))?;
2265 }
2266
2267 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2268 Ok(proto::BufferSaved {
2269 project_id,
2270 buffer_id,
2271 version: (&saved_version).into(),
2272 mtime: Some(mtime.into()),
2273 })
2274 }
2275
2276 async fn handle_format_buffers(
2277 this: ModelHandle<Self>,
2278 envelope: TypedEnvelope<proto::FormatBuffers>,
2279 _: Arc<Client>,
2280 mut cx: AsyncAppContext,
2281 ) -> Result<proto::FormatBuffersResponse> {
2282 let sender_id = envelope.original_sender_id()?;
2283 let format = this.update(&mut cx, |this, cx| {
2284 let shared_buffers = this
2285 .shared_buffers
2286 .get(&sender_id)
2287 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2288 let mut buffers = HashSet::default();
2289 for buffer_id in &envelope.payload.buffer_ids {
2290 buffers.insert(
2291 shared_buffers
2292 .get(buffer_id)
2293 .cloned()
2294 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2295 );
2296 }
2297 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2298 })?;
2299
2300 let project_transaction = format.await?;
2301 let project_transaction = this.update(&mut cx, |this, cx| {
2302 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2303 });
2304 Ok(proto::FormatBuffersResponse {
2305 transaction: Some(project_transaction),
2306 })
2307 }
2308
2309 async fn handle_get_completions(
2310 this: ModelHandle<Self>,
2311 envelope: TypedEnvelope<proto::GetCompletions>,
2312 _: Arc<Client>,
2313 mut cx: AsyncAppContext,
2314 ) -> Result<proto::GetCompletionsResponse> {
2315 let sender_id = envelope.original_sender_id()?;
2316 let position = envelope
2317 .payload
2318 .position
2319 .and_then(language::proto::deserialize_anchor)
2320 .ok_or_else(|| anyhow!("invalid position"))?;
2321 let version = clock::Global::from(envelope.payload.version);
2322 let buffer = this.read_with(&cx, |this, _| {
2323 this.shared_buffers
2324 .get(&sender_id)
2325 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2326 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2327 })?;
2328 if !buffer
2329 .read_with(&cx, |buffer, _| buffer.version())
2330 .observed_all(&version)
2331 {
2332 Err(anyhow!("completion request depends on unreceived edits"))?;
2333 }
2334 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2335 let completions = this
2336 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2337 .await?;
2338
2339 Ok(proto::GetCompletionsResponse {
2340 completions: completions
2341 .iter()
2342 .map(language::proto::serialize_completion)
2343 .collect(),
2344 version: (&version).into(),
2345 })
2346 }
2347
2348 async fn handle_apply_additional_edits_for_completion(
2349 this: ModelHandle<Self>,
2350 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2351 _: Arc<Client>,
2352 mut cx: AsyncAppContext,
2353 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2354 let sender_id = envelope.original_sender_id()?;
2355 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2356 let buffer = this
2357 .shared_buffers
2358 .get(&sender_id)
2359 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2360 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2361 let language = buffer.read(cx).language();
2362 let completion = language::proto::deserialize_completion(
2363 envelope
2364 .payload
2365 .completion
2366 .ok_or_else(|| anyhow!("invalid completion"))?,
2367 language,
2368 )?;
2369 Ok::<_, anyhow::Error>(
2370 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2371 )
2372 })?;
2373
2374 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2375 transaction: apply_additional_edits
2376 .await?
2377 .as_ref()
2378 .map(language::proto::serialize_transaction),
2379 })
2380 }
2381
2382 async fn handle_get_code_actions(
2383 this: ModelHandle<Self>,
2384 envelope: TypedEnvelope<proto::GetCodeActions>,
2385 _: Arc<Client>,
2386 mut cx: AsyncAppContext,
2387 ) -> Result<proto::GetCodeActionsResponse> {
2388 let sender_id = envelope.original_sender_id()?;
2389 let start = envelope
2390 .payload
2391 .start
2392 .and_then(language::proto::deserialize_anchor)
2393 .ok_or_else(|| anyhow!("invalid start"))?;
2394 let end = envelope
2395 .payload
2396 .end
2397 .and_then(language::proto::deserialize_anchor)
2398 .ok_or_else(|| anyhow!("invalid end"))?;
2399 let buffer = this.update(&mut cx, |this, _| {
2400 this.shared_buffers
2401 .get(&sender_id)
2402 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2403 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2404 })?;
2405 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2406 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2407 Err(anyhow!("code action request references unreceived edits"))?;
2408 }
2409 let code_actions = this.update(&mut cx, |this, cx| {
2410 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2411 })?;
2412
2413 Ok(proto::GetCodeActionsResponse {
2414 actions: code_actions
2415 .await?
2416 .iter()
2417 .map(language::proto::serialize_code_action)
2418 .collect(),
2419 version: (&version).into(),
2420 })
2421 }
2422
2423 async fn handle_apply_code_action(
2424 this: ModelHandle<Self>,
2425 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2426 _: Arc<Client>,
2427 mut cx: AsyncAppContext,
2428 ) -> Result<proto::ApplyCodeActionResponse> {
2429 let sender_id = envelope.original_sender_id()?;
2430 let action = language::proto::deserialize_code_action(
2431 envelope
2432 .payload
2433 .action
2434 .ok_or_else(|| anyhow!("invalid action"))?,
2435 )?;
2436 let apply_code_action = this.update(&mut cx, |this, cx| {
2437 let buffer = this
2438 .shared_buffers
2439 .get(&sender_id)
2440 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2441 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2442 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2443 })?;
2444
2445 let project_transaction = apply_code_action.await?;
2446 let project_transaction = this.update(&mut cx, |this, cx| {
2447 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2448 });
2449 Ok(proto::ApplyCodeActionResponse {
2450 transaction: Some(project_transaction),
2451 })
2452 }
2453
2454 async fn handle_lsp_command<T: LspCommand>(
2455 this: ModelHandle<Self>,
2456 envelope: TypedEnvelope<T::ProtoRequest>,
2457 _: Arc<Client>,
2458 mut cx: AsyncAppContext,
2459 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2460 where
2461 <T::LspRequest as lsp::request::Request>::Result: Send,
2462 {
2463 let sender_id = envelope.original_sender_id()?;
2464 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2465 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2466 let buffer_handle = this
2467 .shared_buffers
2468 .get(&sender_id)
2469 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2470 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2471 let buffer = buffer_handle.read(cx);
2472 let buffer_version = buffer.version();
2473 let request = T::from_proto(envelope.payload, this, buffer)?;
2474 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2475 })?;
2476 let response = request.await?;
2477 this.update(&mut cx, |this, cx| {
2478 Ok(T::response_to_proto(
2479 response,
2480 this,
2481 sender_id,
2482 &buffer_version,
2483 cx,
2484 ))
2485 })
2486 }
2487
2488 async fn handle_open_buffer(
2489 this: ModelHandle<Self>,
2490 envelope: TypedEnvelope<proto::OpenBuffer>,
2491 _: Arc<Client>,
2492 mut cx: AsyncAppContext,
2493 ) -> anyhow::Result<proto::OpenBufferResponse> {
2494 let peer_id = envelope.original_sender_id()?;
2495 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2496 let open_buffer = this.update(&mut cx, |this, cx| {
2497 this.open_buffer(
2498 ProjectPath {
2499 worktree_id,
2500 path: PathBuf::from(envelope.payload.path).into(),
2501 },
2502 cx,
2503 )
2504 });
2505
2506 let buffer = open_buffer.await?;
2507 this.update(&mut cx, |this, cx| {
2508 Ok(proto::OpenBufferResponse {
2509 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2510 })
2511 })
2512 }
2513
2514 fn serialize_project_transaction_for_peer(
2515 &mut self,
2516 project_transaction: ProjectTransaction,
2517 peer_id: PeerId,
2518 cx: &AppContext,
2519 ) -> proto::ProjectTransaction {
2520 let mut serialized_transaction = proto::ProjectTransaction {
2521 buffers: Default::default(),
2522 transactions: Default::default(),
2523 };
2524 for (buffer, transaction) in project_transaction.0 {
2525 serialized_transaction
2526 .buffers
2527 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2528 serialized_transaction
2529 .transactions
2530 .push(language::proto::serialize_transaction(&transaction));
2531 }
2532 serialized_transaction
2533 }
2534
2535 fn deserialize_project_transaction(
2536 &mut self,
2537 message: proto::ProjectTransaction,
2538 push_to_history: bool,
2539 cx: &mut ModelContext<Self>,
2540 ) -> Task<Result<ProjectTransaction>> {
2541 cx.spawn(|this, mut cx| async move {
2542 let mut project_transaction = ProjectTransaction::default();
2543 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2544 let buffer = this
2545 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2546 .await?;
2547 let transaction = language::proto::deserialize_transaction(transaction)?;
2548 project_transaction.0.insert(buffer, transaction);
2549 }
2550 for (buffer, transaction) in &project_transaction.0 {
2551 buffer
2552 .update(&mut cx, |buffer, _| {
2553 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2554 })
2555 .await;
2556
2557 if push_to_history {
2558 buffer.update(&mut cx, |buffer, _| {
2559 buffer.push_transaction(transaction.clone(), Instant::now());
2560 });
2561 }
2562 }
2563
2564 Ok(project_transaction)
2565 })
2566 }
2567
2568 fn serialize_buffer_for_peer(
2569 &mut self,
2570 buffer: &ModelHandle<Buffer>,
2571 peer_id: PeerId,
2572 cx: &AppContext,
2573 ) -> proto::Buffer {
2574 let buffer_id = buffer.read(cx).remote_id();
2575 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2576 match shared_buffers.entry(buffer_id) {
2577 hash_map::Entry::Occupied(_) => proto::Buffer {
2578 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2579 },
2580 hash_map::Entry::Vacant(entry) => {
2581 entry.insert(buffer.clone());
2582 proto::Buffer {
2583 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2584 }
2585 }
2586 }
2587 }
2588
2589 fn deserialize_buffer(
2590 &mut self,
2591 buffer: proto::Buffer,
2592 cx: &mut ModelContext<Self>,
2593 ) -> Task<Result<ModelHandle<Buffer>>> {
2594 let replica_id = self.replica_id();
2595
2596 let mut opened_buffer_tx = self.opened_buffer.clone();
2597 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2598 cx.spawn(|this, mut cx| async move {
2599 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2600 proto::buffer::Variant::Id(id) => {
2601 let buffer = loop {
2602 let buffer = this.read_with(&cx, |this, cx| {
2603 this.open_buffers
2604 .get(&id)
2605 .and_then(|buffer| buffer.upgrade(cx))
2606 });
2607 if let Some(buffer) = buffer {
2608 break buffer;
2609 }
2610 opened_buffer_rx
2611 .recv()
2612 .await
2613 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2614 };
2615 Ok(buffer)
2616 }
2617 proto::buffer::Variant::State(mut buffer) => {
2618 let mut buffer_worktree = None;
2619 let mut buffer_file = None;
2620 if let Some(file) = buffer.file.take() {
2621 this.read_with(&cx, |this, cx| {
2622 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2623 let worktree =
2624 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2625 anyhow!("no worktree found for id {}", file.worktree_id)
2626 })?;
2627 buffer_file =
2628 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2629 as Box<dyn language::File>);
2630 buffer_worktree = Some(worktree);
2631 Ok::<_, anyhow::Error>(())
2632 })?;
2633 }
2634
2635 let buffer = cx.add_model(|cx| {
2636 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2637 });
2638 this.update(&mut cx, |this, cx| {
2639 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2640 })?;
2641
2642 let _ = opened_buffer_tx.send(()).await;
2643 Ok(buffer)
2644 }
2645 }
2646 })
2647 }
2648
2649 async fn handle_close_buffer(
2650 this: ModelHandle<Self>,
2651 envelope: TypedEnvelope<proto::CloseBuffer>,
2652 _: Arc<Client>,
2653 mut cx: AsyncAppContext,
2654 ) -> anyhow::Result<()> {
2655 this.update(&mut cx, |this, cx| {
2656 if let Some(shared_buffers) =
2657 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2658 {
2659 shared_buffers.remove(&envelope.payload.buffer_id);
2660 cx.notify();
2661 }
2662 Ok(())
2663 })
2664 }
2665
2666 async fn handle_buffer_saved(
2667 this: ModelHandle<Self>,
2668 envelope: TypedEnvelope<proto::BufferSaved>,
2669 _: Arc<Client>,
2670 mut cx: AsyncAppContext,
2671 ) -> Result<()> {
2672 let version = envelope.payload.version.try_into()?;
2673 let mtime = envelope
2674 .payload
2675 .mtime
2676 .ok_or_else(|| anyhow!("missing mtime"))?
2677 .into();
2678
2679 this.update(&mut cx, |this, cx| {
2680 let buffer = this
2681 .open_buffers
2682 .get(&envelope.payload.buffer_id)
2683 .and_then(|buffer| buffer.upgrade(cx));
2684 if let Some(buffer) = buffer {
2685 buffer.update(cx, |buffer, cx| {
2686 buffer.did_save(version, mtime, None, cx);
2687 });
2688 }
2689 Ok(())
2690 })
2691 }
2692
2693 async fn handle_buffer_reloaded(
2694 this: ModelHandle<Self>,
2695 envelope: TypedEnvelope<proto::BufferReloaded>,
2696 _: Arc<Client>,
2697 mut cx: AsyncAppContext,
2698 ) -> Result<()> {
2699 let payload = envelope.payload.clone();
2700 let version = payload.version.try_into()?;
2701 let mtime = payload
2702 .mtime
2703 .ok_or_else(|| anyhow!("missing mtime"))?
2704 .into();
2705 this.update(&mut cx, |this, cx| {
2706 let buffer = this
2707 .open_buffers
2708 .get(&payload.buffer_id)
2709 .and_then(|buffer| buffer.upgrade(cx));
2710 if let Some(buffer) = buffer {
2711 buffer.update(cx, |buffer, cx| {
2712 buffer.did_reload(version, mtime, cx);
2713 });
2714 }
2715 Ok(())
2716 })
2717 }
2718
2719 pub fn match_paths<'a>(
2720 &self,
2721 query: &'a str,
2722 include_ignored: bool,
2723 smart_case: bool,
2724 max_results: usize,
2725 cancel_flag: &'a AtomicBool,
2726 cx: &AppContext,
2727 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2728 let worktrees = self
2729 .worktrees(cx)
2730 .filter(|worktree| !worktree.read(cx).is_weak())
2731 .collect::<Vec<_>>();
2732 let include_root_name = worktrees.len() > 1;
2733 let candidate_sets = worktrees
2734 .into_iter()
2735 .map(|worktree| CandidateSet {
2736 snapshot: worktree.read(cx).snapshot(),
2737 include_ignored,
2738 include_root_name,
2739 })
2740 .collect::<Vec<_>>();
2741
2742 let background = cx.background().clone();
2743 async move {
2744 fuzzy::match_paths(
2745 candidate_sets.as_slice(),
2746 query,
2747 smart_case,
2748 max_results,
2749 cancel_flag,
2750 background,
2751 )
2752 .await
2753 }
2754 }
2755}
2756
2757impl WorktreeHandle {
2758 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2759 match self {
2760 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2761 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2762 }
2763 }
2764}
2765
2766impl OpenBuffer {
2767 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2768 match self {
2769 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2770 OpenBuffer::Loading(_) => None,
2771 }
2772 }
2773}
2774
2775struct CandidateSet {
2776 snapshot: Snapshot,
2777 include_ignored: bool,
2778 include_root_name: bool,
2779}
2780
2781impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2782 type Candidates = CandidateSetIter<'a>;
2783
2784 fn id(&self) -> usize {
2785 self.snapshot.id().to_usize()
2786 }
2787
2788 fn len(&self) -> usize {
2789 if self.include_ignored {
2790 self.snapshot.file_count()
2791 } else {
2792 self.snapshot.visible_file_count()
2793 }
2794 }
2795
2796 fn prefix(&self) -> Arc<str> {
2797 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2798 self.snapshot.root_name().into()
2799 } else if self.include_root_name {
2800 format!("{}/", self.snapshot.root_name()).into()
2801 } else {
2802 "".into()
2803 }
2804 }
2805
2806 fn candidates(&'a self, start: usize) -> Self::Candidates {
2807 CandidateSetIter {
2808 traversal: self.snapshot.files(self.include_ignored, start),
2809 }
2810 }
2811}
2812
2813struct CandidateSetIter<'a> {
2814 traversal: Traversal<'a>,
2815}
2816
2817impl<'a> Iterator for CandidateSetIter<'a> {
2818 type Item = PathMatchCandidate<'a>;
2819
2820 fn next(&mut self) -> Option<Self::Item> {
2821 self.traversal.next().map(|entry| {
2822 if let EntryKind::File(char_bag) = entry.kind {
2823 PathMatchCandidate {
2824 path: &entry.path,
2825 char_bag,
2826 }
2827 } else {
2828 unreachable!()
2829 }
2830 })
2831 }
2832}
2833
2834impl Entity for Project {
2835 type Event = Event;
2836
2837 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2838 match &self.client_state {
2839 ProjectClientState::Local { remote_id_rx, .. } => {
2840 if let Some(project_id) = *remote_id_rx.borrow() {
2841 self.client
2842 .send(proto::UnregisterProject { project_id })
2843 .log_err();
2844 }
2845 }
2846 ProjectClientState::Remote { remote_id, .. } => {
2847 self.client
2848 .send(proto::LeaveProject {
2849 project_id: *remote_id,
2850 })
2851 .log_err();
2852 }
2853 }
2854 }
2855
2856 fn app_will_quit(
2857 &mut self,
2858 _: &mut MutableAppContext,
2859 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2860 use futures::FutureExt;
2861
2862 let shutdown_futures = self
2863 .language_servers
2864 .drain()
2865 .filter_map(|(_, server)| server.shutdown())
2866 .collect::<Vec<_>>();
2867 Some(
2868 async move {
2869 futures::future::join_all(shutdown_futures).await;
2870 }
2871 .boxed(),
2872 )
2873 }
2874}
2875
2876impl Collaborator {
2877 fn from_proto(
2878 message: proto::Collaborator,
2879 user_store: &ModelHandle<UserStore>,
2880 cx: &mut AsyncAppContext,
2881 ) -> impl Future<Output = Result<Self>> {
2882 let user = user_store.update(cx, |user_store, cx| {
2883 user_store.fetch_user(message.user_id, cx)
2884 });
2885
2886 async move {
2887 Ok(Self {
2888 peer_id: PeerId(message.peer_id),
2889 user: user.await?,
2890 replica_id: message.replica_id as ReplicaId,
2891 })
2892 }
2893 }
2894}
2895
2896impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2897 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2898 Self {
2899 worktree_id,
2900 path: path.as_ref().into(),
2901 }
2902 }
2903}
2904
2905impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2906 fn from(options: lsp::CreateFileOptions) -> Self {
2907 Self {
2908 overwrite: options.overwrite.unwrap_or(false),
2909 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2910 }
2911 }
2912}
2913
2914impl From<lsp::RenameFileOptions> for fs::RenameOptions {
2915 fn from(options: lsp::RenameFileOptions) -> Self {
2916 Self {
2917 overwrite: options.overwrite.unwrap_or(false),
2918 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2919 }
2920 }
2921}
2922
2923impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
2924 fn from(options: lsp::DeleteFileOptions) -> Self {
2925 Self {
2926 recursive: options.recursive.unwrap_or(false),
2927 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
2928 }
2929 }
2930}
2931
2932#[cfg(test)]
2933mod tests {
2934 use super::{Event, *};
2935 use fs::RealFs;
2936 use futures::StreamExt;
2937 use gpui::test::subscribe;
2938 use language::{
2939 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
2940 };
2941 use lsp::Url;
2942 use serde_json::json;
2943 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
2944 use unindent::Unindent as _;
2945 use util::test::temp_tree;
2946 use worktree::WorktreeHandle as _;
2947
2948 #[gpui::test]
2949 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
2950 let dir = temp_tree(json!({
2951 "root": {
2952 "apple": "",
2953 "banana": {
2954 "carrot": {
2955 "date": "",
2956 "endive": "",
2957 }
2958 },
2959 "fennel": {
2960 "grape": "",
2961 }
2962 }
2963 }));
2964
2965 let root_link_path = dir.path().join("root_link");
2966 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
2967 unix::fs::symlink(
2968 &dir.path().join("root/fennel"),
2969 &dir.path().join("root/finnochio"),
2970 )
2971 .unwrap();
2972
2973 let project = Project::test(Arc::new(RealFs), &mut cx);
2974
2975 let (tree, _) = project
2976 .update(&mut cx, |project, cx| {
2977 project.find_or_create_local_worktree(&root_link_path, false, cx)
2978 })
2979 .await
2980 .unwrap();
2981
2982 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2983 .await;
2984 cx.read(|cx| {
2985 let tree = tree.read(cx);
2986 assert_eq!(tree.file_count(), 5);
2987 assert_eq!(
2988 tree.inode_for_path("fennel/grape"),
2989 tree.inode_for_path("finnochio/grape")
2990 );
2991 });
2992
2993 let cancel_flag = Default::default();
2994 let results = project
2995 .read_with(&cx, |project, cx| {
2996 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
2997 })
2998 .await;
2999 assert_eq!(
3000 results
3001 .into_iter()
3002 .map(|result| result.path)
3003 .collect::<Vec<Arc<Path>>>(),
3004 vec![
3005 PathBuf::from("banana/carrot/date").into(),
3006 PathBuf::from("banana/carrot/endive").into(),
3007 ]
3008 );
3009 }
3010
3011 #[gpui::test]
3012 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3013 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3014 let progress_token = language_server_config
3015 .disk_based_diagnostics_progress_token
3016 .clone()
3017 .unwrap();
3018
3019 let language = Arc::new(Language::new(
3020 LanguageConfig {
3021 name: "Rust".to_string(),
3022 path_suffixes: vec!["rs".to_string()],
3023 language_server: Some(language_server_config),
3024 ..Default::default()
3025 },
3026 Some(tree_sitter_rust::language()),
3027 ));
3028
3029 let fs = FakeFs::new(cx.background());
3030 fs.insert_tree(
3031 "/dir",
3032 json!({
3033 "a.rs": "fn a() { A }",
3034 "b.rs": "const y: i32 = 1",
3035 }),
3036 )
3037 .await;
3038
3039 let project = Project::test(fs, &mut cx);
3040 project.update(&mut cx, |project, _| {
3041 Arc::get_mut(&mut project.languages).unwrap().add(language);
3042 });
3043
3044 let (tree, _) = project
3045 .update(&mut cx, |project, cx| {
3046 project.find_or_create_local_worktree("/dir", false, cx)
3047 })
3048 .await
3049 .unwrap();
3050 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3051
3052 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3053 .await;
3054
3055 // Cause worktree to start the fake language server
3056 let _buffer = project
3057 .update(&mut cx, |project, cx| {
3058 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3059 })
3060 .await
3061 .unwrap();
3062
3063 let mut events = subscribe(&project, &mut cx);
3064
3065 let mut fake_server = fake_servers.next().await.unwrap();
3066 fake_server.start_progress(&progress_token).await;
3067 assert_eq!(
3068 events.next().await.unwrap(),
3069 Event::DiskBasedDiagnosticsStarted
3070 );
3071
3072 fake_server.start_progress(&progress_token).await;
3073 fake_server.end_progress(&progress_token).await;
3074 fake_server.start_progress(&progress_token).await;
3075
3076 fake_server
3077 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3078 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3079 version: None,
3080 diagnostics: vec![lsp::Diagnostic {
3081 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3082 severity: Some(lsp::DiagnosticSeverity::ERROR),
3083 message: "undefined variable 'A'".to_string(),
3084 ..Default::default()
3085 }],
3086 })
3087 .await;
3088 assert_eq!(
3089 events.next().await.unwrap(),
3090 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3091 );
3092
3093 fake_server.end_progress(&progress_token).await;
3094 fake_server.end_progress(&progress_token).await;
3095 assert_eq!(
3096 events.next().await.unwrap(),
3097 Event::DiskBasedDiagnosticsUpdated
3098 );
3099 assert_eq!(
3100 events.next().await.unwrap(),
3101 Event::DiskBasedDiagnosticsFinished
3102 );
3103
3104 let buffer = project
3105 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3106 .await
3107 .unwrap();
3108
3109 buffer.read_with(&cx, |buffer, _| {
3110 let snapshot = buffer.snapshot();
3111 let diagnostics = snapshot
3112 .diagnostics_in_range::<_, Point>(0..buffer.len())
3113 .collect::<Vec<_>>();
3114 assert_eq!(
3115 diagnostics,
3116 &[DiagnosticEntry {
3117 range: Point::new(0, 9)..Point::new(0, 10),
3118 diagnostic: Diagnostic {
3119 severity: lsp::DiagnosticSeverity::ERROR,
3120 message: "undefined variable 'A'".to_string(),
3121 group_id: 0,
3122 is_primary: true,
3123 ..Default::default()
3124 }
3125 }]
3126 )
3127 });
3128 }
3129
3130 #[gpui::test]
3131 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3132 let dir = temp_tree(json!({
3133 "root": {
3134 "dir1": {},
3135 "dir2": {
3136 "dir3": {}
3137 }
3138 }
3139 }));
3140
3141 let project = Project::test(Arc::new(RealFs), &mut cx);
3142 let (tree, _) = project
3143 .update(&mut cx, |project, cx| {
3144 project.find_or_create_local_worktree(&dir.path(), false, cx)
3145 })
3146 .await
3147 .unwrap();
3148
3149 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3150 .await;
3151
3152 let cancel_flag = Default::default();
3153 let results = project
3154 .read_with(&cx, |project, cx| {
3155 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3156 })
3157 .await;
3158
3159 assert!(results.is_empty());
3160 }
3161
3162 #[gpui::test]
3163 async fn test_definition(mut cx: gpui::TestAppContext) {
3164 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3165 let language = Arc::new(Language::new(
3166 LanguageConfig {
3167 name: "Rust".to_string(),
3168 path_suffixes: vec!["rs".to_string()],
3169 language_server: Some(language_server_config),
3170 ..Default::default()
3171 },
3172 Some(tree_sitter_rust::language()),
3173 ));
3174
3175 let fs = FakeFs::new(cx.background());
3176 fs.insert_tree(
3177 "/dir",
3178 json!({
3179 "a.rs": "const fn a() { A }",
3180 "b.rs": "const y: i32 = crate::a()",
3181 }),
3182 )
3183 .await;
3184
3185 let project = Project::test(fs, &mut cx);
3186 project.update(&mut cx, |project, _| {
3187 Arc::get_mut(&mut project.languages).unwrap().add(language);
3188 });
3189
3190 let (tree, _) = project
3191 .update(&mut cx, |project, cx| {
3192 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3193 })
3194 .await
3195 .unwrap();
3196 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3197 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3198 .await;
3199
3200 let buffer = project
3201 .update(&mut cx, |project, cx| {
3202 project.open_buffer(
3203 ProjectPath {
3204 worktree_id,
3205 path: Path::new("").into(),
3206 },
3207 cx,
3208 )
3209 })
3210 .await
3211 .unwrap();
3212
3213 let mut fake_server = fake_servers.next().await.unwrap();
3214 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3215 let params = params.text_document_position_params;
3216 assert_eq!(
3217 params.text_document.uri.to_file_path().unwrap(),
3218 Path::new("/dir/b.rs"),
3219 );
3220 assert_eq!(params.position, lsp::Position::new(0, 22));
3221
3222 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3223 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3224 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3225 )))
3226 });
3227
3228 let mut definitions = project
3229 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3230 .await
3231 .unwrap();
3232
3233 assert_eq!(definitions.len(), 1);
3234 let definition = definitions.pop().unwrap();
3235 cx.update(|cx| {
3236 let target_buffer = definition.target_buffer.read(cx);
3237 assert_eq!(
3238 target_buffer
3239 .file()
3240 .unwrap()
3241 .as_local()
3242 .unwrap()
3243 .abs_path(cx),
3244 Path::new("/dir/a.rs"),
3245 );
3246 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3247 assert_eq!(
3248 list_worktrees(&project, cx),
3249 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3250 );
3251
3252 drop(definition);
3253 });
3254 cx.read(|cx| {
3255 assert_eq!(
3256 list_worktrees(&project, cx),
3257 [("/dir/b.rs".as_ref(), false)]
3258 );
3259 });
3260
3261 fn list_worktrees<'a>(
3262 project: &'a ModelHandle<Project>,
3263 cx: &'a AppContext,
3264 ) -> Vec<(&'a Path, bool)> {
3265 project
3266 .read(cx)
3267 .worktrees(cx)
3268 .map(|worktree| {
3269 let worktree = worktree.read(cx);
3270 (
3271 worktree.as_local().unwrap().abs_path().as_ref(),
3272 worktree.is_weak(),
3273 )
3274 })
3275 .collect::<Vec<_>>()
3276 }
3277 }
3278
3279 #[gpui::test]
3280 async fn test_save_file(mut cx: gpui::TestAppContext) {
3281 let fs = FakeFs::new(cx.background());
3282 fs.insert_tree(
3283 "/dir",
3284 json!({
3285 "file1": "the old contents",
3286 }),
3287 )
3288 .await;
3289
3290 let project = Project::test(fs.clone(), &mut cx);
3291 let worktree_id = project
3292 .update(&mut cx, |p, cx| {
3293 p.find_or_create_local_worktree("/dir", false, cx)
3294 })
3295 .await
3296 .unwrap()
3297 .0
3298 .read_with(&cx, |tree, _| tree.id());
3299
3300 let buffer = project
3301 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3302 .await
3303 .unwrap();
3304 buffer
3305 .update(&mut cx, |buffer, cx| {
3306 assert_eq!(buffer.text(), "the old contents");
3307 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3308 buffer.save(cx)
3309 })
3310 .await
3311 .unwrap();
3312
3313 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3314 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3315 }
3316
3317 #[gpui::test]
3318 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3319 let fs = FakeFs::new(cx.background());
3320 fs.insert_tree(
3321 "/dir",
3322 json!({
3323 "file1": "the old contents",
3324 }),
3325 )
3326 .await;
3327
3328 let project = Project::test(fs.clone(), &mut cx);
3329 let worktree_id = project
3330 .update(&mut cx, |p, cx| {
3331 p.find_or_create_local_worktree("/dir/file1", false, cx)
3332 })
3333 .await
3334 .unwrap()
3335 .0
3336 .read_with(&cx, |tree, _| tree.id());
3337
3338 let buffer = project
3339 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3340 .await
3341 .unwrap();
3342 buffer
3343 .update(&mut cx, |buffer, cx| {
3344 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3345 buffer.save(cx)
3346 })
3347 .await
3348 .unwrap();
3349
3350 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3351 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3352 }
3353
3354 #[gpui::test(retries = 5)]
3355 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3356 let dir = temp_tree(json!({
3357 "a": {
3358 "file1": "",
3359 "file2": "",
3360 "file3": "",
3361 },
3362 "b": {
3363 "c": {
3364 "file4": "",
3365 "file5": "",
3366 }
3367 }
3368 }));
3369
3370 let project = Project::test(Arc::new(RealFs), &mut cx);
3371 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3372
3373 let (tree, _) = project
3374 .update(&mut cx, |p, cx| {
3375 p.find_or_create_local_worktree(dir.path(), false, cx)
3376 })
3377 .await
3378 .unwrap();
3379 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3380
3381 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3382 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3383 async move { buffer.await.unwrap() }
3384 };
3385 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3386 tree.read_with(cx, |tree, _| {
3387 tree.entry_for_path(path)
3388 .expect(&format!("no entry for path {}", path))
3389 .id
3390 })
3391 };
3392
3393 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3394 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3395 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3396 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3397
3398 let file2_id = id_for_path("a/file2", &cx);
3399 let file3_id = id_for_path("a/file3", &cx);
3400 let file4_id = id_for_path("b/c/file4", &cx);
3401
3402 // Wait for the initial scan.
3403 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3404 .await;
3405
3406 // Create a remote copy of this worktree.
3407 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3408 let (remote, load_task) = cx.update(|cx| {
3409 Worktree::remote(
3410 1,
3411 1,
3412 initial_snapshot.to_proto(&Default::default(), Default::default()),
3413 rpc.clone(),
3414 cx,
3415 )
3416 });
3417 load_task.await;
3418
3419 cx.read(|cx| {
3420 assert!(!buffer2.read(cx).is_dirty());
3421 assert!(!buffer3.read(cx).is_dirty());
3422 assert!(!buffer4.read(cx).is_dirty());
3423 assert!(!buffer5.read(cx).is_dirty());
3424 });
3425
3426 // Rename and delete files and directories.
3427 tree.flush_fs_events(&cx).await;
3428 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3429 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3430 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3431 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3432 tree.flush_fs_events(&cx).await;
3433
3434 let expected_paths = vec![
3435 "a",
3436 "a/file1",
3437 "a/file2.new",
3438 "b",
3439 "d",
3440 "d/file3",
3441 "d/file4",
3442 ];
3443
3444 cx.read(|app| {
3445 assert_eq!(
3446 tree.read(app)
3447 .paths()
3448 .map(|p| p.to_str().unwrap())
3449 .collect::<Vec<_>>(),
3450 expected_paths
3451 );
3452
3453 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3454 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3455 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3456
3457 assert_eq!(
3458 buffer2.read(app).file().unwrap().path().as_ref(),
3459 Path::new("a/file2.new")
3460 );
3461 assert_eq!(
3462 buffer3.read(app).file().unwrap().path().as_ref(),
3463 Path::new("d/file3")
3464 );
3465 assert_eq!(
3466 buffer4.read(app).file().unwrap().path().as_ref(),
3467 Path::new("d/file4")
3468 );
3469 assert_eq!(
3470 buffer5.read(app).file().unwrap().path().as_ref(),
3471 Path::new("b/c/file5")
3472 );
3473
3474 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3475 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3476 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3477 assert!(buffer5.read(app).file().unwrap().is_deleted());
3478 });
3479
3480 // Update the remote worktree. Check that it becomes consistent with the
3481 // local worktree.
3482 remote.update(&mut cx, |remote, cx| {
3483 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3484 &initial_snapshot,
3485 1,
3486 1,
3487 0,
3488 true,
3489 );
3490 remote
3491 .as_remote_mut()
3492 .unwrap()
3493 .snapshot
3494 .apply_remote_update(update_message)
3495 .unwrap();
3496
3497 assert_eq!(
3498 remote
3499 .paths()
3500 .map(|p| p.to_str().unwrap())
3501 .collect::<Vec<_>>(),
3502 expected_paths
3503 );
3504 });
3505 }
3506
3507 #[gpui::test]
3508 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3509 let fs = FakeFs::new(cx.background());
3510 fs.insert_tree(
3511 "/the-dir",
3512 json!({
3513 "a.txt": "a-contents",
3514 "b.txt": "b-contents",
3515 }),
3516 )
3517 .await;
3518
3519 let project = Project::test(fs.clone(), &mut cx);
3520 let worktree_id = project
3521 .update(&mut cx, |p, cx| {
3522 p.find_or_create_local_worktree("/the-dir", false, cx)
3523 })
3524 .await
3525 .unwrap()
3526 .0
3527 .read_with(&cx, |tree, _| tree.id());
3528
3529 // Spawn multiple tasks to open paths, repeating some paths.
3530 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3531 (
3532 p.open_buffer((worktree_id, "a.txt"), cx),
3533 p.open_buffer((worktree_id, "b.txt"), cx),
3534 p.open_buffer((worktree_id, "a.txt"), cx),
3535 )
3536 });
3537
3538 let buffer_a_1 = buffer_a_1.await.unwrap();
3539 let buffer_a_2 = buffer_a_2.await.unwrap();
3540 let buffer_b = buffer_b.await.unwrap();
3541 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3542 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3543
3544 // There is only one buffer per path.
3545 let buffer_a_id = buffer_a_1.id();
3546 assert_eq!(buffer_a_2.id(), buffer_a_id);
3547
3548 // Open the same path again while it is still open.
3549 drop(buffer_a_1);
3550 let buffer_a_3 = project
3551 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3552 .await
3553 .unwrap();
3554
3555 // There's still only one buffer per path.
3556 assert_eq!(buffer_a_3.id(), buffer_a_id);
3557 }
3558
3559 #[gpui::test]
3560 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3561 use std::fs;
3562
3563 let dir = temp_tree(json!({
3564 "file1": "abc",
3565 "file2": "def",
3566 "file3": "ghi",
3567 }));
3568
3569 let project = Project::test(Arc::new(RealFs), &mut cx);
3570 let (worktree, _) = project
3571 .update(&mut cx, |p, cx| {
3572 p.find_or_create_local_worktree(dir.path(), false, cx)
3573 })
3574 .await
3575 .unwrap();
3576 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3577
3578 worktree.flush_fs_events(&cx).await;
3579 worktree
3580 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3581 .await;
3582
3583 let buffer1 = project
3584 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3585 .await
3586 .unwrap();
3587 let events = Rc::new(RefCell::new(Vec::new()));
3588
3589 // initially, the buffer isn't dirty.
3590 buffer1.update(&mut cx, |buffer, cx| {
3591 cx.subscribe(&buffer1, {
3592 let events = events.clone();
3593 move |_, _, event, _| events.borrow_mut().push(event.clone())
3594 })
3595 .detach();
3596
3597 assert!(!buffer.is_dirty());
3598 assert!(events.borrow().is_empty());
3599
3600 buffer.edit(vec![1..2], "", cx);
3601 });
3602
3603 // after the first edit, the buffer is dirty, and emits a dirtied event.
3604 buffer1.update(&mut cx, |buffer, cx| {
3605 assert!(buffer.text() == "ac");
3606 assert!(buffer.is_dirty());
3607 assert_eq!(
3608 *events.borrow(),
3609 &[language::Event::Edited, language::Event::Dirtied]
3610 );
3611 events.borrow_mut().clear();
3612 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3613 });
3614
3615 // after saving, the buffer is not dirty, and emits a saved event.
3616 buffer1.update(&mut cx, |buffer, cx| {
3617 assert!(!buffer.is_dirty());
3618 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3619 events.borrow_mut().clear();
3620
3621 buffer.edit(vec![1..1], "B", cx);
3622 buffer.edit(vec![2..2], "D", cx);
3623 });
3624
3625 // after editing again, the buffer is dirty, and emits another dirty event.
3626 buffer1.update(&mut cx, |buffer, cx| {
3627 assert!(buffer.text() == "aBDc");
3628 assert!(buffer.is_dirty());
3629 assert_eq!(
3630 *events.borrow(),
3631 &[
3632 language::Event::Edited,
3633 language::Event::Dirtied,
3634 language::Event::Edited,
3635 ],
3636 );
3637 events.borrow_mut().clear();
3638
3639 // TODO - currently, after restoring the buffer to its
3640 // previously-saved state, the is still considered dirty.
3641 buffer.edit([1..3], "", cx);
3642 assert!(buffer.text() == "ac");
3643 assert!(buffer.is_dirty());
3644 });
3645
3646 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3647
3648 // When a file is deleted, the buffer is considered dirty.
3649 let events = Rc::new(RefCell::new(Vec::new()));
3650 let buffer2 = project
3651 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3652 .await
3653 .unwrap();
3654 buffer2.update(&mut cx, |_, cx| {
3655 cx.subscribe(&buffer2, {
3656 let events = events.clone();
3657 move |_, _, event, _| events.borrow_mut().push(event.clone())
3658 })
3659 .detach();
3660 });
3661
3662 fs::remove_file(dir.path().join("file2")).unwrap();
3663 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3664 assert_eq!(
3665 *events.borrow(),
3666 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3667 );
3668
3669 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3670 let events = Rc::new(RefCell::new(Vec::new()));
3671 let buffer3 = project
3672 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3673 .await
3674 .unwrap();
3675 buffer3.update(&mut cx, |_, cx| {
3676 cx.subscribe(&buffer3, {
3677 let events = events.clone();
3678 move |_, _, event, _| events.borrow_mut().push(event.clone())
3679 })
3680 .detach();
3681 });
3682
3683 worktree.flush_fs_events(&cx).await;
3684 buffer3.update(&mut cx, |buffer, cx| {
3685 buffer.edit(Some(0..0), "x", cx);
3686 });
3687 events.borrow_mut().clear();
3688 fs::remove_file(dir.path().join("file3")).unwrap();
3689 buffer3
3690 .condition(&cx, |_, _| !events.borrow().is_empty())
3691 .await;
3692 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3693 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3694 }
3695
3696 #[gpui::test]
3697 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3698 use std::fs;
3699
3700 let initial_contents = "aaa\nbbbbb\nc\n";
3701 let dir = temp_tree(json!({ "the-file": initial_contents }));
3702
3703 let project = Project::test(Arc::new(RealFs), &mut cx);
3704 let (worktree, _) = project
3705 .update(&mut cx, |p, cx| {
3706 p.find_or_create_local_worktree(dir.path(), false, cx)
3707 })
3708 .await
3709 .unwrap();
3710 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3711
3712 worktree
3713 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3714 .await;
3715
3716 let abs_path = dir.path().join("the-file");
3717 let buffer = project
3718 .update(&mut cx, |p, cx| {
3719 p.open_buffer((worktree_id, "the-file"), cx)
3720 })
3721 .await
3722 .unwrap();
3723
3724 // TODO
3725 // Add a cursor on each row.
3726 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3727 // assert!(!buffer.is_dirty());
3728 // buffer.add_selection_set(
3729 // &(0..3)
3730 // .map(|row| Selection {
3731 // id: row as usize,
3732 // start: Point::new(row, 1),
3733 // end: Point::new(row, 1),
3734 // reversed: false,
3735 // goal: SelectionGoal::None,
3736 // })
3737 // .collect::<Vec<_>>(),
3738 // cx,
3739 // )
3740 // });
3741
3742 // Change the file on disk, adding two new lines of text, and removing
3743 // one line.
3744 buffer.read_with(&cx, |buffer, _| {
3745 assert!(!buffer.is_dirty());
3746 assert!(!buffer.has_conflict());
3747 });
3748 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3749 fs::write(&abs_path, new_contents).unwrap();
3750
3751 // Because the buffer was not modified, it is reloaded from disk. Its
3752 // contents are edited according to the diff between the old and new
3753 // file contents.
3754 buffer
3755 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3756 .await;
3757
3758 buffer.update(&mut cx, |buffer, _| {
3759 assert_eq!(buffer.text(), new_contents);
3760 assert!(!buffer.is_dirty());
3761 assert!(!buffer.has_conflict());
3762
3763 // TODO
3764 // let cursor_positions = buffer
3765 // .selection_set(selection_set_id)
3766 // .unwrap()
3767 // .selections::<Point>(&*buffer)
3768 // .map(|selection| {
3769 // assert_eq!(selection.start, selection.end);
3770 // selection.start
3771 // })
3772 // .collect::<Vec<_>>();
3773 // assert_eq!(
3774 // cursor_positions,
3775 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3776 // );
3777 });
3778
3779 // Modify the buffer
3780 buffer.update(&mut cx, |buffer, cx| {
3781 buffer.edit(vec![0..0], " ", cx);
3782 assert!(buffer.is_dirty());
3783 assert!(!buffer.has_conflict());
3784 });
3785
3786 // Change the file on disk again, adding blank lines to the beginning.
3787 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3788
3789 // Because the buffer is modified, it doesn't reload from disk, but is
3790 // marked as having a conflict.
3791 buffer
3792 .condition(&cx, |buffer, _| buffer.has_conflict())
3793 .await;
3794 }
3795
3796 #[gpui::test]
3797 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3798 let fs = FakeFs::new(cx.background());
3799 fs.insert_tree(
3800 "/the-dir",
3801 json!({
3802 "a.rs": "
3803 fn foo(mut v: Vec<usize>) {
3804 for x in &v {
3805 v.push(1);
3806 }
3807 }
3808 "
3809 .unindent(),
3810 }),
3811 )
3812 .await;
3813
3814 let project = Project::test(fs.clone(), &mut cx);
3815 let (worktree, _) = project
3816 .update(&mut cx, |p, cx| {
3817 p.find_or_create_local_worktree("/the-dir", false, cx)
3818 })
3819 .await
3820 .unwrap();
3821 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3822
3823 let buffer = project
3824 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3825 .await
3826 .unwrap();
3827
3828 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3829 let message = lsp::PublishDiagnosticsParams {
3830 uri: buffer_uri.clone(),
3831 diagnostics: vec![
3832 lsp::Diagnostic {
3833 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3834 severity: Some(DiagnosticSeverity::WARNING),
3835 message: "error 1".to_string(),
3836 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3837 location: lsp::Location {
3838 uri: buffer_uri.clone(),
3839 range: lsp::Range::new(
3840 lsp::Position::new(1, 8),
3841 lsp::Position::new(1, 9),
3842 ),
3843 },
3844 message: "error 1 hint 1".to_string(),
3845 }]),
3846 ..Default::default()
3847 },
3848 lsp::Diagnostic {
3849 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3850 severity: Some(DiagnosticSeverity::HINT),
3851 message: "error 1 hint 1".to_string(),
3852 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3853 location: lsp::Location {
3854 uri: buffer_uri.clone(),
3855 range: lsp::Range::new(
3856 lsp::Position::new(1, 8),
3857 lsp::Position::new(1, 9),
3858 ),
3859 },
3860 message: "original diagnostic".to_string(),
3861 }]),
3862 ..Default::default()
3863 },
3864 lsp::Diagnostic {
3865 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3866 severity: Some(DiagnosticSeverity::ERROR),
3867 message: "error 2".to_string(),
3868 related_information: Some(vec![
3869 lsp::DiagnosticRelatedInformation {
3870 location: lsp::Location {
3871 uri: buffer_uri.clone(),
3872 range: lsp::Range::new(
3873 lsp::Position::new(1, 13),
3874 lsp::Position::new(1, 15),
3875 ),
3876 },
3877 message: "error 2 hint 1".to_string(),
3878 },
3879 lsp::DiagnosticRelatedInformation {
3880 location: lsp::Location {
3881 uri: buffer_uri.clone(),
3882 range: lsp::Range::new(
3883 lsp::Position::new(1, 13),
3884 lsp::Position::new(1, 15),
3885 ),
3886 },
3887 message: "error 2 hint 2".to_string(),
3888 },
3889 ]),
3890 ..Default::default()
3891 },
3892 lsp::Diagnostic {
3893 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3894 severity: Some(DiagnosticSeverity::HINT),
3895 message: "error 2 hint 1".to_string(),
3896 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3897 location: lsp::Location {
3898 uri: buffer_uri.clone(),
3899 range: lsp::Range::new(
3900 lsp::Position::new(2, 8),
3901 lsp::Position::new(2, 17),
3902 ),
3903 },
3904 message: "original diagnostic".to_string(),
3905 }]),
3906 ..Default::default()
3907 },
3908 lsp::Diagnostic {
3909 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3910 severity: Some(DiagnosticSeverity::HINT),
3911 message: "error 2 hint 2".to_string(),
3912 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3913 location: lsp::Location {
3914 uri: buffer_uri.clone(),
3915 range: lsp::Range::new(
3916 lsp::Position::new(2, 8),
3917 lsp::Position::new(2, 17),
3918 ),
3919 },
3920 message: "original diagnostic".to_string(),
3921 }]),
3922 ..Default::default()
3923 },
3924 ],
3925 version: None,
3926 };
3927
3928 project
3929 .update(&mut cx, |p, cx| {
3930 p.update_diagnostics(message, &Default::default(), cx)
3931 })
3932 .unwrap();
3933 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3934
3935 assert_eq!(
3936 buffer
3937 .diagnostics_in_range::<_, Point>(0..buffer.len())
3938 .collect::<Vec<_>>(),
3939 &[
3940 DiagnosticEntry {
3941 range: Point::new(1, 8)..Point::new(1, 9),
3942 diagnostic: Diagnostic {
3943 severity: DiagnosticSeverity::WARNING,
3944 message: "error 1".to_string(),
3945 group_id: 0,
3946 is_primary: true,
3947 ..Default::default()
3948 }
3949 },
3950 DiagnosticEntry {
3951 range: Point::new(1, 8)..Point::new(1, 9),
3952 diagnostic: Diagnostic {
3953 severity: DiagnosticSeverity::HINT,
3954 message: "error 1 hint 1".to_string(),
3955 group_id: 0,
3956 is_primary: false,
3957 ..Default::default()
3958 }
3959 },
3960 DiagnosticEntry {
3961 range: Point::new(1, 13)..Point::new(1, 15),
3962 diagnostic: Diagnostic {
3963 severity: DiagnosticSeverity::HINT,
3964 message: "error 2 hint 1".to_string(),
3965 group_id: 1,
3966 is_primary: false,
3967 ..Default::default()
3968 }
3969 },
3970 DiagnosticEntry {
3971 range: Point::new(1, 13)..Point::new(1, 15),
3972 diagnostic: Diagnostic {
3973 severity: DiagnosticSeverity::HINT,
3974 message: "error 2 hint 2".to_string(),
3975 group_id: 1,
3976 is_primary: false,
3977 ..Default::default()
3978 }
3979 },
3980 DiagnosticEntry {
3981 range: Point::new(2, 8)..Point::new(2, 17),
3982 diagnostic: Diagnostic {
3983 severity: DiagnosticSeverity::ERROR,
3984 message: "error 2".to_string(),
3985 group_id: 1,
3986 is_primary: true,
3987 ..Default::default()
3988 }
3989 }
3990 ]
3991 );
3992
3993 assert_eq!(
3994 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3995 &[
3996 DiagnosticEntry {
3997 range: Point::new(1, 8)..Point::new(1, 9),
3998 diagnostic: Diagnostic {
3999 severity: DiagnosticSeverity::WARNING,
4000 message: "error 1".to_string(),
4001 group_id: 0,
4002 is_primary: true,
4003 ..Default::default()
4004 }
4005 },
4006 DiagnosticEntry {
4007 range: Point::new(1, 8)..Point::new(1, 9),
4008 diagnostic: Diagnostic {
4009 severity: DiagnosticSeverity::HINT,
4010 message: "error 1 hint 1".to_string(),
4011 group_id: 0,
4012 is_primary: false,
4013 ..Default::default()
4014 }
4015 },
4016 ]
4017 );
4018 assert_eq!(
4019 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4020 &[
4021 DiagnosticEntry {
4022 range: Point::new(1, 13)..Point::new(1, 15),
4023 diagnostic: Diagnostic {
4024 severity: DiagnosticSeverity::HINT,
4025 message: "error 2 hint 1".to_string(),
4026 group_id: 1,
4027 is_primary: false,
4028 ..Default::default()
4029 }
4030 },
4031 DiagnosticEntry {
4032 range: Point::new(1, 13)..Point::new(1, 15),
4033 diagnostic: Diagnostic {
4034 severity: DiagnosticSeverity::HINT,
4035 message: "error 2 hint 2".to_string(),
4036 group_id: 1,
4037 is_primary: false,
4038 ..Default::default()
4039 }
4040 },
4041 DiagnosticEntry {
4042 range: Point::new(2, 8)..Point::new(2, 17),
4043 diagnostic: Diagnostic {
4044 severity: DiagnosticSeverity::ERROR,
4045 message: "error 2".to_string(),
4046 group_id: 1,
4047 is_primary: true,
4048 ..Default::default()
4049 }
4050 }
4051 ]
4052 );
4053 }
4054
4055 #[gpui::test]
4056 async fn test_rename(mut cx: gpui::TestAppContext) {
4057 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4058 let language = Arc::new(Language::new(
4059 LanguageConfig {
4060 name: "Rust".to_string(),
4061 path_suffixes: vec!["rs".to_string()],
4062 language_server: Some(language_server_config),
4063 ..Default::default()
4064 },
4065 Some(tree_sitter_rust::language()),
4066 ));
4067
4068 let fs = FakeFs::new(cx.background());
4069 fs.insert_tree(
4070 "/dir",
4071 json!({
4072 "one.rs": "const ONE: usize = 1;",
4073 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4074 }),
4075 )
4076 .await;
4077
4078 let project = Project::test(fs.clone(), &mut cx);
4079 project.update(&mut cx, |project, _| {
4080 Arc::get_mut(&mut project.languages).unwrap().add(language);
4081 });
4082
4083 let (tree, _) = project
4084 .update(&mut cx, |project, cx| {
4085 project.find_or_create_local_worktree("/dir", false, cx)
4086 })
4087 .await
4088 .unwrap();
4089 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4090 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4091 .await;
4092
4093 let buffer = project
4094 .update(&mut cx, |project, cx| {
4095 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4096 })
4097 .await
4098 .unwrap();
4099
4100 let mut fake_server = fake_servers.next().await.unwrap();
4101
4102 let response = project.update(&mut cx, |project, cx| {
4103 project.prepare_rename(buffer.clone(), 7, cx)
4104 });
4105 fake_server
4106 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4107 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4108 assert_eq!(params.position, lsp::Position::new(0, 7));
4109 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4110 lsp::Position::new(0, 6),
4111 lsp::Position::new(0, 9),
4112 )))
4113 })
4114 .next()
4115 .await
4116 .unwrap();
4117 let range = response.await.unwrap().unwrap();
4118 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4119 assert_eq!(range, 6..9);
4120
4121 let response = project.update(&mut cx, |project, cx| {
4122 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4123 });
4124 fake_server
4125 .handle_request::<lsp::request::Rename, _>(|params| {
4126 assert_eq!(
4127 params.text_document_position.text_document.uri.as_str(),
4128 "file:///dir/one.rs"
4129 );
4130 assert_eq!(
4131 params.text_document_position.position,
4132 lsp::Position::new(0, 7)
4133 );
4134 assert_eq!(params.new_name, "THREE");
4135 Some(lsp::WorkspaceEdit {
4136 changes: Some(
4137 [
4138 (
4139 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4140 vec![lsp::TextEdit::new(
4141 lsp::Range::new(
4142 lsp::Position::new(0, 6),
4143 lsp::Position::new(0, 9),
4144 ),
4145 "THREE".to_string(),
4146 )],
4147 ),
4148 (
4149 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4150 vec![
4151 lsp::TextEdit::new(
4152 lsp::Range::new(
4153 lsp::Position::new(0, 24),
4154 lsp::Position::new(0, 27),
4155 ),
4156 "THREE".to_string(),
4157 ),
4158 lsp::TextEdit::new(
4159 lsp::Range::new(
4160 lsp::Position::new(0, 35),
4161 lsp::Position::new(0, 38),
4162 ),
4163 "THREE".to_string(),
4164 ),
4165 ],
4166 ),
4167 ]
4168 .into_iter()
4169 .collect(),
4170 ),
4171 ..Default::default()
4172 })
4173 })
4174 .next()
4175 .await
4176 .unwrap();
4177 let mut transaction = response.await.unwrap().0;
4178 assert_eq!(transaction.len(), 2);
4179 assert_eq!(
4180 transaction
4181 .remove_entry(&buffer)
4182 .unwrap()
4183 .0
4184 .read_with(&cx, |buffer, _| buffer.text()),
4185 "const THREE: usize = 1;"
4186 );
4187 assert_eq!(
4188 transaction
4189 .into_keys()
4190 .next()
4191 .unwrap()
4192 .read_with(&cx, |buffer, _| buffer.text()),
4193 "const TWO: usize = one::THREE + one::THREE;"
4194 );
4195 }
4196}