1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod worktree;
5
6use anyhow::{anyhow, Context, Result};
7use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
8use clock::ReplicaId;
9use collections::{hash_map, HashMap, HashSet};
10use futures::{future::Shared, Future, FutureExt};
11use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
12use gpui::{
13 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
14 UpgradeModelHandle, WeakModelHandle,
15};
16use language::{
17 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
18 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
19 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
20};
21use lsp::{DiagnosticSeverity, LanguageServer};
22use lsp_command::*;
23use postage::{broadcast, prelude::Stream, sink::Sink, watch};
24use smol::block_on;
25use std::{
26 convert::TryInto,
27 ops::Range,
28 path::{Path, PathBuf},
29 sync::{atomic::AtomicBool, Arc},
30 time::Instant,
31};
32use util::{post_inc, ResultExt, TryFutureExt as _};
33
34pub use fs::*;
35pub use worktree::*;
36
37pub struct Project {
38 worktrees: Vec<WorktreeHandle>,
39 active_entry: Option<ProjectEntry>,
40 languages: Arc<LanguageRegistry>,
41 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
42 started_language_servers:
43 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
44 client: Arc<client::Client>,
45 user_store: ModelHandle<UserStore>,
46 fs: Arc<dyn Fs>,
47 client_state: ProjectClientState,
48 collaborators: HashMap<PeerId, Collaborator>,
49 subscriptions: Vec<client::Subscription>,
50 language_servers_with_diagnostics_running: isize,
51 open_buffers: HashMap<u64, OpenBuffer>,
52 opened_buffer: broadcast::Sender<()>,
53 loading_buffers: HashMap<
54 ProjectPath,
55 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
56 >,
57 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
58}
59
60enum OpenBuffer {
61 Loaded(WeakModelHandle<Buffer>),
62 Loading(Vec<Operation>),
63}
64
65enum WorktreeHandle {
66 Strong(ModelHandle<Worktree>),
67 Weak(WeakModelHandle<Worktree>),
68}
69
70enum ProjectClientState {
71 Local {
72 is_shared: bool,
73 remote_id_tx: watch::Sender<Option<u64>>,
74 remote_id_rx: watch::Receiver<Option<u64>>,
75 _maintain_remote_id_task: Task<Option<()>>,
76 },
77 Remote {
78 sharing_has_stopped: bool,
79 remote_id: u64,
80 replica_id: ReplicaId,
81 },
82}
83
84#[derive(Clone, Debug)]
85pub struct Collaborator {
86 pub user: Arc<User>,
87 pub peer_id: PeerId,
88 pub replica_id: ReplicaId,
89}
90
91#[derive(Clone, Debug, PartialEq)]
92pub enum Event {
93 ActiveEntryChanged(Option<ProjectEntry>),
94 WorktreeRemoved(WorktreeId),
95 DiskBasedDiagnosticsStarted,
96 DiskBasedDiagnosticsUpdated,
97 DiskBasedDiagnosticsFinished,
98 DiagnosticsUpdated(ProjectPath),
99}
100
101#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
102pub struct ProjectPath {
103 pub worktree_id: WorktreeId,
104 pub path: Arc<Path>,
105}
106
107#[derive(Clone, Debug, Default, PartialEq)]
108pub struct DiagnosticSummary {
109 pub error_count: usize,
110 pub warning_count: usize,
111 pub info_count: usize,
112 pub hint_count: usize,
113}
114
115#[derive(Debug)]
116pub struct Definition {
117 pub target_buffer: ModelHandle<Buffer>,
118 pub target_range: Range<language::Anchor>,
119}
120
121pub struct ProjectSymbol {
122 pub label: CodeLabel,
123 pub lsp_symbol: lsp::SymbolInformation,
124}
125
126#[derive(Default)]
127pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
128
129impl DiagnosticSummary {
130 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
131 let mut this = Self {
132 error_count: 0,
133 warning_count: 0,
134 info_count: 0,
135 hint_count: 0,
136 };
137
138 for entry in diagnostics {
139 if entry.diagnostic.is_primary {
140 match entry.diagnostic.severity {
141 DiagnosticSeverity::ERROR => this.error_count += 1,
142 DiagnosticSeverity::WARNING => this.warning_count += 1,
143 DiagnosticSeverity::INFORMATION => this.info_count += 1,
144 DiagnosticSeverity::HINT => this.hint_count += 1,
145 _ => {}
146 }
147 }
148 }
149
150 this
151 }
152
153 pub fn to_proto(&self, path: Arc<Path>) -> proto::DiagnosticSummary {
154 proto::DiagnosticSummary {
155 path: path.to_string_lossy().to_string(),
156 error_count: self.error_count as u32,
157 warning_count: self.warning_count as u32,
158 info_count: self.info_count as u32,
159 hint_count: self.hint_count as u32,
160 }
161 }
162}
163
164#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
165pub struct ProjectEntry {
166 pub worktree_id: WorktreeId,
167 pub entry_id: usize,
168}
169
170impl Project {
171 pub fn init(client: &Arc<Client>) {
172 client.add_entity_message_handler(Self::handle_add_collaborator);
173 client.add_entity_message_handler(Self::handle_buffer_reloaded);
174 client.add_entity_message_handler(Self::handle_buffer_saved);
175 client.add_entity_message_handler(Self::handle_close_buffer);
176 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
177 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
178 client.add_entity_message_handler(Self::handle_remove_collaborator);
179 client.add_entity_message_handler(Self::handle_share_worktree);
180 client.add_entity_message_handler(Self::handle_unregister_worktree);
181 client.add_entity_message_handler(Self::handle_unshare_project);
182 client.add_entity_message_handler(Self::handle_update_buffer_file);
183 client.add_entity_message_handler(Self::handle_update_buffer);
184 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
185 client.add_entity_message_handler(Self::handle_update_worktree);
186 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
187 client.add_entity_request_handler(Self::handle_apply_code_action);
188 client.add_entity_request_handler(Self::handle_format_buffers);
189 client.add_entity_request_handler(Self::handle_get_code_actions);
190 client.add_entity_request_handler(Self::handle_get_completions);
191 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
192 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
193 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
194 client.add_entity_request_handler(Self::handle_open_buffer);
195 client.add_entity_request_handler(Self::handle_save_buffer);
196 }
197
198 pub fn local(
199 client: Arc<Client>,
200 user_store: ModelHandle<UserStore>,
201 languages: Arc<LanguageRegistry>,
202 fs: Arc<dyn Fs>,
203 cx: &mut MutableAppContext,
204 ) -> ModelHandle<Self> {
205 cx.add_model(|cx: &mut ModelContext<Self>| {
206 let (remote_id_tx, remote_id_rx) = watch::channel();
207 let _maintain_remote_id_task = cx.spawn_weak({
208 let rpc = client.clone();
209 move |this, mut cx| {
210 async move {
211 let mut status = rpc.status();
212 while let Some(status) = status.recv().await {
213 if let Some(this) = this.upgrade(&cx) {
214 let remote_id = if let client::Status::Connected { .. } = status {
215 let response = rpc.request(proto::RegisterProject {}).await?;
216 Some(response.project_id)
217 } else {
218 None
219 };
220
221 if let Some(project_id) = remote_id {
222 let mut registrations = Vec::new();
223 this.update(&mut cx, |this, cx| {
224 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
225 registrations.push(worktree.update(
226 cx,
227 |worktree, cx| {
228 let worktree = worktree.as_local_mut().unwrap();
229 worktree.register(project_id, cx)
230 },
231 ));
232 }
233 });
234 for registration in registrations {
235 registration.await?;
236 }
237 }
238 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
239 }
240 }
241 Ok(())
242 }
243 .log_err()
244 }
245 });
246
247 Self {
248 worktrees: Default::default(),
249 collaborators: Default::default(),
250 open_buffers: Default::default(),
251 loading_buffers: Default::default(),
252 shared_buffers: Default::default(),
253 client_state: ProjectClientState::Local {
254 is_shared: false,
255 remote_id_tx,
256 remote_id_rx,
257 _maintain_remote_id_task,
258 },
259 opened_buffer: broadcast::channel(1).0,
260 subscriptions: Vec::new(),
261 active_entry: None,
262 languages,
263 client,
264 user_store,
265 fs,
266 language_servers_with_diagnostics_running: 0,
267 language_servers: Default::default(),
268 started_language_servers: Default::default(),
269 }
270 })
271 }
272
273 pub async fn remote(
274 remote_id: u64,
275 client: Arc<Client>,
276 user_store: ModelHandle<UserStore>,
277 languages: Arc<LanguageRegistry>,
278 fs: Arc<dyn Fs>,
279 cx: &mut AsyncAppContext,
280 ) -> Result<ModelHandle<Self>> {
281 client.authenticate_and_connect(&cx).await?;
282
283 let response = client
284 .request(proto::JoinProject {
285 project_id: remote_id,
286 })
287 .await?;
288
289 let replica_id = response.replica_id as ReplicaId;
290
291 let mut worktrees = Vec::new();
292 for worktree in response.worktrees {
293 let (worktree, load_task) = cx
294 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
295 worktrees.push(worktree);
296 load_task.detach();
297 }
298
299 let this = cx.add_model(|cx| {
300 let mut this = Self {
301 worktrees: Vec::new(),
302 open_buffers: Default::default(),
303 loading_buffers: Default::default(),
304 opened_buffer: broadcast::channel(1).0,
305 shared_buffers: Default::default(),
306 active_entry: None,
307 collaborators: Default::default(),
308 languages,
309 user_store: user_store.clone(),
310 fs,
311 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
312 client,
313 client_state: ProjectClientState::Remote {
314 sharing_has_stopped: false,
315 remote_id,
316 replica_id,
317 },
318 language_servers_with_diagnostics_running: 0,
319 language_servers: Default::default(),
320 started_language_servers: Default::default(),
321 };
322 for worktree in worktrees {
323 this.add_worktree(&worktree, cx);
324 }
325 this
326 });
327
328 let user_ids = response
329 .collaborators
330 .iter()
331 .map(|peer| peer.user_id)
332 .collect();
333 user_store
334 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
335 .await?;
336 let mut collaborators = HashMap::default();
337 for message in response.collaborators {
338 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
339 collaborators.insert(collaborator.peer_id, collaborator);
340 }
341
342 this.update(cx, |this, _| {
343 this.collaborators = collaborators;
344 });
345
346 Ok(this)
347 }
348
349 #[cfg(any(test, feature = "test-support"))]
350 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
351 let languages = Arc::new(LanguageRegistry::new());
352 let http_client = client::test::FakeHttpClient::with_404_response();
353 let client = client::Client::new(http_client.clone());
354 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
355 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
356 }
357
358 #[cfg(any(test, feature = "test-support"))]
359 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
360 self.shared_buffers
361 .get(&peer_id)
362 .and_then(|buffers| buffers.get(&remote_id))
363 .cloned()
364 }
365
366 #[cfg(any(test, feature = "test-support"))]
367 pub fn has_buffered_operations(&self) -> bool {
368 self.open_buffers
369 .values()
370 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
371 }
372
373 pub fn fs(&self) -> &Arc<dyn Fs> {
374 &self.fs
375 }
376
377 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
378 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
379 *remote_id_tx.borrow_mut() = remote_id;
380 }
381
382 self.subscriptions.clear();
383 if let Some(remote_id) = remote_id {
384 self.subscriptions
385 .push(self.client.add_model_for_remote_entity(remote_id, cx));
386 }
387 }
388
389 pub fn remote_id(&self) -> Option<u64> {
390 match &self.client_state {
391 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
392 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
393 }
394 }
395
396 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
397 let mut id = None;
398 let mut watch = None;
399 match &self.client_state {
400 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
401 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
402 }
403
404 async move {
405 if let Some(id) = id {
406 return id;
407 }
408 let mut watch = watch.unwrap();
409 loop {
410 let id = *watch.borrow();
411 if let Some(id) = id {
412 return id;
413 }
414 watch.recv().await;
415 }
416 }
417 }
418
419 pub fn replica_id(&self) -> ReplicaId {
420 match &self.client_state {
421 ProjectClientState::Local { .. } => 0,
422 ProjectClientState::Remote { replica_id, .. } => *replica_id,
423 }
424 }
425
426 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
427 &self.collaborators
428 }
429
430 pub fn worktrees<'a>(
431 &'a self,
432 cx: &'a AppContext,
433 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
434 self.worktrees
435 .iter()
436 .filter_map(move |worktree| worktree.upgrade(cx))
437 }
438
439 pub fn worktree_for_id(
440 &self,
441 id: WorktreeId,
442 cx: &AppContext,
443 ) -> Option<ModelHandle<Worktree>> {
444 self.worktrees(cx)
445 .find(|worktree| worktree.read(cx).id() == id)
446 }
447
448 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
449 let rpc = self.client.clone();
450 cx.spawn(|this, mut cx| async move {
451 let project_id = this.update(&mut cx, |this, _| {
452 if let ProjectClientState::Local {
453 is_shared,
454 remote_id_rx,
455 ..
456 } = &mut this.client_state
457 {
458 *is_shared = true;
459 remote_id_rx
460 .borrow()
461 .ok_or_else(|| anyhow!("no project id"))
462 } else {
463 Err(anyhow!("can't share a remote project"))
464 }
465 })?;
466
467 rpc.request(proto::ShareProject { project_id }).await?;
468 let mut tasks = Vec::new();
469 this.update(&mut cx, |this, cx| {
470 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
471 worktree.update(cx, |worktree, cx| {
472 let worktree = worktree.as_local_mut().unwrap();
473 tasks.push(worktree.share(project_id, cx));
474 });
475 }
476 });
477 for task in tasks {
478 task.await?;
479 }
480 this.update(&mut cx, |_, cx| cx.notify());
481 Ok(())
482 })
483 }
484
485 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
486 let rpc = self.client.clone();
487 cx.spawn(|this, mut cx| async move {
488 let project_id = this.update(&mut cx, |this, _| {
489 if let ProjectClientState::Local {
490 is_shared,
491 remote_id_rx,
492 ..
493 } = &mut this.client_state
494 {
495 *is_shared = false;
496 remote_id_rx
497 .borrow()
498 .ok_or_else(|| anyhow!("no project id"))
499 } else {
500 Err(anyhow!("can't share a remote project"))
501 }
502 })?;
503
504 rpc.send(proto::UnshareProject { project_id })?;
505 this.update(&mut cx, |this, cx| {
506 this.collaborators.clear();
507 this.shared_buffers.clear();
508 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
509 worktree.update(cx, |worktree, _| {
510 worktree.as_local_mut().unwrap().unshare();
511 });
512 }
513 cx.notify()
514 });
515 Ok(())
516 })
517 }
518
519 pub fn is_read_only(&self) -> bool {
520 match &self.client_state {
521 ProjectClientState::Local { .. } => false,
522 ProjectClientState::Remote {
523 sharing_has_stopped,
524 ..
525 } => *sharing_has_stopped,
526 }
527 }
528
529 pub fn is_local(&self) -> bool {
530 match &self.client_state {
531 ProjectClientState::Local { .. } => true,
532 ProjectClientState::Remote { .. } => false,
533 }
534 }
535
536 pub fn is_remote(&self) -> bool {
537 !self.is_local()
538 }
539
540 pub fn open_buffer(
541 &mut self,
542 path: impl Into<ProjectPath>,
543 cx: &mut ModelContext<Self>,
544 ) -> Task<Result<ModelHandle<Buffer>>> {
545 let project_path = path.into();
546 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
547 worktree
548 } else {
549 return Task::ready(Err(anyhow!("no such worktree")));
550 };
551
552 // If there is already a buffer for the given path, then return it.
553 let existing_buffer = self.get_open_buffer(&project_path, cx);
554 if let Some(existing_buffer) = existing_buffer {
555 return Task::ready(Ok(existing_buffer));
556 }
557
558 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
559 // If the given path is already being loaded, then wait for that existing
560 // task to complete and return the same buffer.
561 hash_map::Entry::Occupied(e) => e.get().clone(),
562
563 // Otherwise, record the fact that this path is now being loaded.
564 hash_map::Entry::Vacant(entry) => {
565 let (mut tx, rx) = postage::watch::channel();
566 entry.insert(rx.clone());
567
568 let load_buffer = if worktree.read(cx).is_local() {
569 self.open_local_buffer(&project_path.path, &worktree, cx)
570 } else {
571 self.open_remote_buffer(&project_path.path, &worktree, cx)
572 };
573
574 cx.spawn(move |this, mut cx| async move {
575 let load_result = load_buffer.await;
576 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
577 // Record the fact that the buffer is no longer loading.
578 this.loading_buffers.remove(&project_path);
579 if this.loading_buffers.is_empty() {
580 this.open_buffers
581 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
582 }
583
584 let buffer = load_result.map_err(Arc::new)?;
585 Ok(buffer)
586 }));
587 })
588 .detach();
589 rx
590 }
591 };
592
593 cx.foreground().spawn(async move {
594 loop {
595 if let Some(result) = loading_watch.borrow().as_ref() {
596 match result {
597 Ok(buffer) => return Ok(buffer.clone()),
598 Err(error) => return Err(anyhow!("{}", error)),
599 }
600 }
601 loading_watch.recv().await;
602 }
603 })
604 }
605
606 fn open_local_buffer(
607 &mut self,
608 path: &Arc<Path>,
609 worktree: &ModelHandle<Worktree>,
610 cx: &mut ModelContext<Self>,
611 ) -> Task<Result<ModelHandle<Buffer>>> {
612 let load_buffer = worktree.update(cx, |worktree, cx| {
613 let worktree = worktree.as_local_mut().unwrap();
614 worktree.load_buffer(path, cx)
615 });
616 let worktree = worktree.downgrade();
617 cx.spawn(|this, mut cx| async move {
618 let buffer = load_buffer.await?;
619 let worktree = worktree
620 .upgrade(&cx)
621 .ok_or_else(|| anyhow!("worktree was removed"))?;
622 this.update(&mut cx, |this, cx| {
623 this.register_buffer(&buffer, Some(&worktree), cx)
624 })?;
625 Ok(buffer)
626 })
627 }
628
629 fn open_remote_buffer(
630 &mut self,
631 path: &Arc<Path>,
632 worktree: &ModelHandle<Worktree>,
633 cx: &mut ModelContext<Self>,
634 ) -> Task<Result<ModelHandle<Buffer>>> {
635 let rpc = self.client.clone();
636 let project_id = self.remote_id().unwrap();
637 let remote_worktree_id = worktree.read(cx).id();
638 let path = path.clone();
639 let path_string = path.to_string_lossy().to_string();
640 cx.spawn(|this, mut cx| async move {
641 let response = rpc
642 .request(proto::OpenBuffer {
643 project_id,
644 worktree_id: remote_worktree_id.to_proto(),
645 path: path_string,
646 })
647 .await?;
648 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
649 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
650 .await
651 })
652 }
653
654 fn open_local_buffer_from_lsp_path(
655 &mut self,
656 abs_path: lsp::Url,
657 lang_name: String,
658 lang_server: Arc<LanguageServer>,
659 cx: &mut ModelContext<Self>,
660 ) -> Task<Result<ModelHandle<Buffer>>> {
661 cx.spawn(|this, mut cx| async move {
662 let abs_path = abs_path
663 .to_file_path()
664 .map_err(|_| anyhow!("can't convert URI to path"))?;
665 let (worktree, relative_path) = if let Some(result) =
666 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
667 {
668 result
669 } else {
670 let worktree = this
671 .update(&mut cx, |this, cx| {
672 this.create_local_worktree(&abs_path, true, cx)
673 })
674 .await?;
675 this.update(&mut cx, |this, cx| {
676 this.language_servers
677 .insert((worktree.read(cx).id(), lang_name), lang_server);
678 });
679 (worktree, PathBuf::new())
680 };
681
682 let project_path = ProjectPath {
683 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
684 path: relative_path.into(),
685 };
686 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
687 .await
688 })
689 }
690
691 pub fn save_buffer_as(
692 &self,
693 buffer: ModelHandle<Buffer>,
694 abs_path: PathBuf,
695 cx: &mut ModelContext<Project>,
696 ) -> Task<Result<()>> {
697 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
698 cx.spawn(|this, mut cx| async move {
699 let (worktree, path) = worktree_task.await?;
700 worktree
701 .update(&mut cx, |worktree, cx| {
702 worktree
703 .as_local_mut()
704 .unwrap()
705 .save_buffer_as(buffer.clone(), path, cx)
706 })
707 .await?;
708 this.update(&mut cx, |this, cx| {
709 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
710 });
711 Ok(())
712 })
713 }
714
715 #[cfg(any(test, feature = "test-support"))]
716 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
717 let path = path.into();
718 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
719 self.open_buffers.iter().any(|(_, buffer)| {
720 if let Some(buffer) = buffer.upgrade(cx) {
721 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
722 if file.worktree == worktree && file.path() == &path.path {
723 return true;
724 }
725 }
726 }
727 false
728 })
729 } else {
730 false
731 }
732 }
733
734 fn get_open_buffer(
735 &mut self,
736 path: &ProjectPath,
737 cx: &mut ModelContext<Self>,
738 ) -> Option<ModelHandle<Buffer>> {
739 let mut result = None;
740 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
741 self.open_buffers.retain(|_, buffer| {
742 if let Some(buffer) = buffer.upgrade(cx) {
743 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
744 if file.worktree == worktree && file.path() == &path.path {
745 result = Some(buffer);
746 }
747 }
748 true
749 } else {
750 false
751 }
752 });
753 result
754 }
755
756 fn register_buffer(
757 &mut self,
758 buffer: &ModelHandle<Buffer>,
759 worktree: Option<&ModelHandle<Worktree>>,
760 cx: &mut ModelContext<Self>,
761 ) -> Result<()> {
762 match self.open_buffers.insert(
763 buffer.read(cx).remote_id(),
764 OpenBuffer::Loaded(buffer.downgrade()),
765 ) {
766 None => {}
767 Some(OpenBuffer::Loading(operations)) => {
768 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
769 }
770 Some(OpenBuffer::Loaded(_)) => Err(anyhow!("registered the same buffer twice"))?,
771 }
772 self.assign_language_to_buffer(&buffer, worktree, cx);
773 Ok(())
774 }
775
776 fn assign_language_to_buffer(
777 &mut self,
778 buffer: &ModelHandle<Buffer>,
779 worktree: Option<&ModelHandle<Worktree>>,
780 cx: &mut ModelContext<Self>,
781 ) -> Option<()> {
782 let (path, full_path) = {
783 let file = buffer.read(cx).file()?;
784 (file.path().clone(), file.full_path(cx))
785 };
786
787 // If the buffer has a language, set it and start/assign the language server
788 if let Some(language) = self.languages.select_language(&full_path).cloned() {
789 buffer.update(cx, |buffer, cx| {
790 buffer.set_language(Some(language.clone()), cx);
791 });
792
793 // For local worktrees, start a language server if needed.
794 // Also assign the language server and any previously stored diagnostics to the buffer.
795 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
796 let worktree_id = local_worktree.id();
797 let worktree_abs_path = local_worktree.abs_path().clone();
798 let buffer = buffer.downgrade();
799 let language_server =
800 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
801
802 cx.spawn_weak(|_, mut cx| async move {
803 if let Some(language_server) = language_server.await {
804 if let Some(buffer) = buffer.upgrade(&cx) {
805 buffer.update(&mut cx, |buffer, cx| {
806 buffer.set_language_server(Some(language_server), cx);
807 });
808 }
809 }
810 })
811 .detach();
812 }
813 }
814
815 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
816 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
817 buffer.update(cx, |buffer, cx| {
818 buffer.update_diagnostics(diagnostics, None, cx).log_err();
819 });
820 }
821 }
822
823 None
824 }
825
826 fn start_language_server(
827 &mut self,
828 worktree_id: WorktreeId,
829 worktree_path: Arc<Path>,
830 language: Arc<Language>,
831 cx: &mut ModelContext<Self>,
832 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
833 enum LspEvent {
834 DiagnosticsStart,
835 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
836 DiagnosticsFinish,
837 }
838
839 let key = (worktree_id, language.name().to_string());
840 self.started_language_servers
841 .entry(key.clone())
842 .or_insert_with(|| {
843 let language_server = self.languages.start_language_server(
844 &language,
845 worktree_path,
846 self.client.http_client(),
847 cx,
848 );
849 let rpc = self.client.clone();
850 cx.spawn_weak(|this, mut cx| async move {
851 let language_server = language_server?.await.log_err()?;
852 if let Some(this) = this.upgrade(&cx) {
853 this.update(&mut cx, |this, _| {
854 this.language_servers.insert(key, language_server.clone());
855 });
856 }
857
858 let disk_based_sources = language
859 .disk_based_diagnostic_sources()
860 .cloned()
861 .unwrap_or_default();
862 let disk_based_diagnostics_progress_token =
863 language.disk_based_diagnostics_progress_token().cloned();
864 let has_disk_based_diagnostic_progress_token =
865 disk_based_diagnostics_progress_token.is_some();
866 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
867
868 // Listen for `PublishDiagnostics` notifications.
869 language_server
870 .on_notification::<lsp::notification::PublishDiagnostics, _>({
871 let diagnostics_tx = diagnostics_tx.clone();
872 move |params| {
873 if !has_disk_based_diagnostic_progress_token {
874 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
875 }
876 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
877 .ok();
878 if !has_disk_based_diagnostic_progress_token {
879 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
880 }
881 }
882 })
883 .detach();
884
885 // Listen for `Progress` notifications. Send an event when the language server
886 // transitions between running jobs and not running any jobs.
887 let mut running_jobs_for_this_server: i32 = 0;
888 language_server
889 .on_notification::<lsp::notification::Progress, _>(move |params| {
890 let token = match params.token {
891 lsp::NumberOrString::Number(_) => None,
892 lsp::NumberOrString::String(token) => Some(token),
893 };
894
895 if token == disk_based_diagnostics_progress_token {
896 match params.value {
897 lsp::ProgressParamsValue::WorkDone(progress) => {
898 match progress {
899 lsp::WorkDoneProgress::Begin(_) => {
900 running_jobs_for_this_server += 1;
901 if running_jobs_for_this_server == 1 {
902 block_on(
903 diagnostics_tx
904 .send(LspEvent::DiagnosticsStart),
905 )
906 .ok();
907 }
908 }
909 lsp::WorkDoneProgress::End(_) => {
910 running_jobs_for_this_server -= 1;
911 if running_jobs_for_this_server == 0 {
912 block_on(
913 diagnostics_tx
914 .send(LspEvent::DiagnosticsFinish),
915 )
916 .ok();
917 }
918 }
919 _ => {}
920 }
921 }
922 }
923 }
924 })
925 .detach();
926
927 // Process all the LSP events.
928 cx.spawn(|mut cx| async move {
929 while let Ok(message) = diagnostics_rx.recv().await {
930 let this = this.upgrade(&cx)?;
931 match message {
932 LspEvent::DiagnosticsStart => {
933 this.update(&mut cx, |this, cx| {
934 this.disk_based_diagnostics_started(cx);
935 if let Some(project_id) = this.remote_id() {
936 rpc.send(proto::DiskBasedDiagnosticsUpdating {
937 project_id,
938 })
939 .log_err();
940 }
941 });
942 }
943 LspEvent::DiagnosticsUpdate(mut params) => {
944 language.process_diagnostics(&mut params);
945 this.update(&mut cx, |this, cx| {
946 this.update_diagnostics(params, &disk_based_sources, cx)
947 .log_err();
948 });
949 }
950 LspEvent::DiagnosticsFinish => {
951 this.update(&mut cx, |this, cx| {
952 this.disk_based_diagnostics_finished(cx);
953 if let Some(project_id) = this.remote_id() {
954 rpc.send(proto::DiskBasedDiagnosticsUpdated {
955 project_id,
956 })
957 .log_err();
958 }
959 });
960 }
961 }
962 }
963 Some(())
964 })
965 .detach();
966
967 Some(language_server)
968 })
969 .shared()
970 })
971 .clone()
972 }
973
974 pub fn update_diagnostics(
975 &mut self,
976 params: lsp::PublishDiagnosticsParams,
977 disk_based_sources: &HashSet<String>,
978 cx: &mut ModelContext<Self>,
979 ) -> Result<()> {
980 let abs_path = params
981 .uri
982 .to_file_path()
983 .map_err(|_| anyhow!("URI is not a file"))?;
984 let mut next_group_id = 0;
985 let mut diagnostics = Vec::default();
986 let mut primary_diagnostic_group_ids = HashMap::default();
987 let mut sources_by_group_id = HashMap::default();
988 let mut supporting_diagnostic_severities = HashMap::default();
989 for diagnostic in ¶ms.diagnostics {
990 let source = diagnostic.source.as_ref();
991 let code = diagnostic.code.as_ref().map(|code| match code {
992 lsp::NumberOrString::Number(code) => code.to_string(),
993 lsp::NumberOrString::String(code) => code.clone(),
994 });
995 let range = range_from_lsp(diagnostic.range);
996 let is_supporting = diagnostic
997 .related_information
998 .as_ref()
999 .map_or(false, |infos| {
1000 infos.iter().any(|info| {
1001 primary_diagnostic_group_ids.contains_key(&(
1002 source,
1003 code.clone(),
1004 range_from_lsp(info.location.range),
1005 ))
1006 })
1007 });
1008
1009 if is_supporting {
1010 if let Some(severity) = diagnostic.severity {
1011 supporting_diagnostic_severities
1012 .insert((source, code.clone(), range), severity);
1013 }
1014 } else {
1015 let group_id = post_inc(&mut next_group_id);
1016 let is_disk_based =
1017 source.map_or(false, |source| disk_based_sources.contains(source));
1018
1019 sources_by_group_id.insert(group_id, source);
1020 primary_diagnostic_group_ids
1021 .insert((source, code.clone(), range.clone()), group_id);
1022
1023 diagnostics.push(DiagnosticEntry {
1024 range,
1025 diagnostic: Diagnostic {
1026 code: code.clone(),
1027 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1028 message: diagnostic.message.clone(),
1029 group_id,
1030 is_primary: true,
1031 is_valid: true,
1032 is_disk_based,
1033 },
1034 });
1035 if let Some(infos) = &diagnostic.related_information {
1036 for info in infos {
1037 if info.location.uri == params.uri && !info.message.is_empty() {
1038 let range = range_from_lsp(info.location.range);
1039 diagnostics.push(DiagnosticEntry {
1040 range,
1041 diagnostic: Diagnostic {
1042 code: code.clone(),
1043 severity: DiagnosticSeverity::INFORMATION,
1044 message: info.message.clone(),
1045 group_id,
1046 is_primary: false,
1047 is_valid: true,
1048 is_disk_based,
1049 },
1050 });
1051 }
1052 }
1053 }
1054 }
1055 }
1056
1057 for entry in &mut diagnostics {
1058 let diagnostic = &mut entry.diagnostic;
1059 if !diagnostic.is_primary {
1060 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1061 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1062 source,
1063 diagnostic.code.clone(),
1064 entry.range.clone(),
1065 )) {
1066 diagnostic.severity = severity;
1067 }
1068 }
1069 }
1070
1071 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1072 Ok(())
1073 }
1074
1075 pub fn update_diagnostic_entries(
1076 &mut self,
1077 abs_path: PathBuf,
1078 version: Option<i32>,
1079 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1080 cx: &mut ModelContext<Project>,
1081 ) -> Result<(), anyhow::Error> {
1082 let (worktree, relative_path) = self
1083 .find_local_worktree(&abs_path, cx)
1084 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1085 let project_path = ProjectPath {
1086 worktree_id: worktree.read(cx).id(),
1087 path: relative_path.into(),
1088 };
1089
1090 for buffer in self.open_buffers.values() {
1091 if let Some(buffer) = buffer.upgrade(cx) {
1092 if buffer
1093 .read(cx)
1094 .file()
1095 .map_or(false, |file| *file.path() == project_path.path)
1096 {
1097 buffer.update(cx, |buffer, cx| {
1098 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1099 })?;
1100 break;
1101 }
1102 }
1103 }
1104 worktree.update(cx, |worktree, cx| {
1105 worktree
1106 .as_local_mut()
1107 .ok_or_else(|| anyhow!("not a local worktree"))?
1108 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1109 })?;
1110 cx.emit(Event::DiagnosticsUpdated(project_path));
1111 Ok(())
1112 }
1113
1114 pub fn format(
1115 &self,
1116 buffers: HashSet<ModelHandle<Buffer>>,
1117 push_to_history: bool,
1118 cx: &mut ModelContext<Project>,
1119 ) -> Task<Result<ProjectTransaction>> {
1120 let mut local_buffers = Vec::new();
1121 let mut remote_buffers = None;
1122 for buffer_handle in buffers {
1123 let buffer = buffer_handle.read(cx);
1124 let worktree;
1125 if let Some(file) = File::from_dyn(buffer.file()) {
1126 worktree = file.worktree.clone();
1127 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1128 let lang_server;
1129 if let Some(lang) = buffer.language() {
1130 if let Some(server) = self
1131 .language_servers
1132 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1133 {
1134 lang_server = server.clone();
1135 } else {
1136 return Task::ready(Ok(Default::default()));
1137 };
1138 } else {
1139 return Task::ready(Ok(Default::default()));
1140 }
1141
1142 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1143 } else {
1144 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1145 }
1146 } else {
1147 return Task::ready(Ok(Default::default()));
1148 }
1149 }
1150
1151 let remote_buffers = self.remote_id().zip(remote_buffers);
1152 let client = self.client.clone();
1153
1154 cx.spawn(|this, mut cx| async move {
1155 let mut project_transaction = ProjectTransaction::default();
1156
1157 if let Some((project_id, remote_buffers)) = remote_buffers {
1158 let response = client
1159 .request(proto::FormatBuffers {
1160 project_id,
1161 buffer_ids: remote_buffers
1162 .iter()
1163 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1164 .collect(),
1165 })
1166 .await?
1167 .transaction
1168 .ok_or_else(|| anyhow!("missing transaction"))?;
1169 project_transaction = this
1170 .update(&mut cx, |this, cx| {
1171 this.deserialize_project_transaction(response, push_to_history, cx)
1172 })
1173 .await?;
1174 }
1175
1176 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1177 let lsp_edits = lang_server
1178 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1179 text_document: lsp::TextDocumentIdentifier::new(
1180 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1181 ),
1182 options: Default::default(),
1183 work_done_progress_params: Default::default(),
1184 })
1185 .await?;
1186
1187 if let Some(lsp_edits) = lsp_edits {
1188 let edits = buffer
1189 .update(&mut cx, |buffer, cx| {
1190 buffer.edits_from_lsp(lsp_edits, None, cx)
1191 })
1192 .await?;
1193 buffer.update(&mut cx, |buffer, cx| {
1194 buffer.finalize_last_transaction();
1195 buffer.start_transaction();
1196 for (range, text) in edits {
1197 buffer.edit([range], text, cx);
1198 }
1199 if buffer.end_transaction(cx).is_some() {
1200 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1201 if !push_to_history {
1202 buffer.forget_transaction(transaction.id);
1203 }
1204 project_transaction.0.insert(cx.handle(), transaction);
1205 }
1206 });
1207 }
1208 }
1209
1210 Ok(project_transaction)
1211 })
1212 }
1213
1214 pub fn definition<T: ToPointUtf16>(
1215 &self,
1216 buffer: &ModelHandle<Buffer>,
1217 position: T,
1218 cx: &mut ModelContext<Self>,
1219 ) -> Task<Result<Vec<Definition>>> {
1220 let position = position.to_point_utf16(buffer.read(cx));
1221 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1222 }
1223
1224 pub fn symbols(
1225 &self,
1226 query: &str,
1227 cx: &mut ModelContext<Self>,
1228 ) -> Task<Result<Vec<ProjectSymbol>>> {
1229 if self.is_local() {
1230 let mut language_servers = HashMap::default();
1231 for ((_, language_name), language_server) in self.language_servers.iter() {
1232 let language = self.languages.get_language(language_name).unwrap();
1233 language_servers
1234 .entry(Arc::as_ptr(language_server))
1235 .or_insert((language_server.clone(), language.clone()));
1236 }
1237
1238 let mut requests = Vec::new();
1239 for (language_server, _) in language_servers.values() {
1240 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1241 lsp::WorkspaceSymbolParams {
1242 query: query.to_string(),
1243 ..Default::default()
1244 },
1245 ));
1246 }
1247
1248 cx.foreground().spawn(async move {
1249 let responses = futures::future::try_join_all(requests).await?;
1250 let mut symbols = Vec::new();
1251 for ((_, language), lsp_symbols) in language_servers.values().zip(responses) {
1252 for lsp_symbol in lsp_symbols.into_iter().flatten() {
1253 let label = language
1254 .label_for_symbol(&lsp_symbol)
1255 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
1256 symbols.push(ProjectSymbol { label, lsp_symbol });
1257 }
1258 }
1259 Ok(symbols)
1260 })
1261 } else if let Some(project_id) = self.remote_id() {
1262 todo!()
1263 } else {
1264 Task::ready(Ok(Default::default()))
1265 }
1266 }
1267
1268 pub fn completions<T: ToPointUtf16>(
1269 &self,
1270 source_buffer_handle: &ModelHandle<Buffer>,
1271 position: T,
1272 cx: &mut ModelContext<Self>,
1273 ) -> Task<Result<Vec<Completion>>> {
1274 let source_buffer_handle = source_buffer_handle.clone();
1275 let source_buffer = source_buffer_handle.read(cx);
1276 let buffer_id = source_buffer.remote_id();
1277 let language = source_buffer.language().cloned();
1278 let worktree;
1279 let buffer_abs_path;
1280 if let Some(file) = File::from_dyn(source_buffer.file()) {
1281 worktree = file.worktree.clone();
1282 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1283 } else {
1284 return Task::ready(Ok(Default::default()));
1285 };
1286
1287 let position = position.to_point_utf16(source_buffer);
1288 let anchor = source_buffer.anchor_after(position);
1289
1290 if worktree.read(cx).as_local().is_some() {
1291 let buffer_abs_path = buffer_abs_path.unwrap();
1292 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1293 server
1294 } else {
1295 return Task::ready(Ok(Default::default()));
1296 };
1297
1298 cx.spawn(|_, cx| async move {
1299 let completions = lang_server
1300 .request::<lsp::request::Completion>(lsp::CompletionParams {
1301 text_document_position: lsp::TextDocumentPositionParams::new(
1302 lsp::TextDocumentIdentifier::new(
1303 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1304 ),
1305 position.to_lsp_position(),
1306 ),
1307 context: Default::default(),
1308 work_done_progress_params: Default::default(),
1309 partial_result_params: Default::default(),
1310 })
1311 .await
1312 .context("lsp completion request failed")?;
1313
1314 let completions = if let Some(completions) = completions {
1315 match completions {
1316 lsp::CompletionResponse::Array(completions) => completions,
1317 lsp::CompletionResponse::List(list) => list.items,
1318 }
1319 } else {
1320 Default::default()
1321 };
1322
1323 source_buffer_handle.read_with(&cx, |this, _| {
1324 Ok(completions
1325 .into_iter()
1326 .filter_map(|lsp_completion| {
1327 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1328 lsp::CompletionTextEdit::Edit(edit) => {
1329 (range_from_lsp(edit.range), edit.new_text.clone())
1330 }
1331 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1332 log::info!("unsupported insert/replace completion");
1333 return None;
1334 }
1335 };
1336
1337 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1338 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1339 if clipped_start == old_range.start && clipped_end == old_range.end {
1340 Some(Completion {
1341 old_range: this.anchor_before(old_range.start)
1342 ..this.anchor_after(old_range.end),
1343 new_text,
1344 label: language
1345 .as_ref()
1346 .and_then(|l| l.label_for_completion(&lsp_completion))
1347 .unwrap_or_else(|| {
1348 CodeLabel::plain(
1349 lsp_completion.label.clone(),
1350 lsp_completion.filter_text.as_deref(),
1351 )
1352 }),
1353 lsp_completion,
1354 })
1355 } else {
1356 None
1357 }
1358 })
1359 .collect())
1360 })
1361 })
1362 } else if let Some(project_id) = self.remote_id() {
1363 let rpc = self.client.clone();
1364 let message = proto::GetCompletions {
1365 project_id,
1366 buffer_id,
1367 position: Some(language::proto::serialize_anchor(&anchor)),
1368 version: (&source_buffer.version()).into(),
1369 };
1370 cx.spawn_weak(|_, mut cx| async move {
1371 let response = rpc.request(message).await?;
1372
1373 source_buffer_handle
1374 .update(&mut cx, |buffer, _| {
1375 buffer.wait_for_version(response.version.into())
1376 })
1377 .await;
1378
1379 response
1380 .completions
1381 .into_iter()
1382 .map(|completion| {
1383 language::proto::deserialize_completion(completion, language.as_ref())
1384 })
1385 .collect()
1386 })
1387 } else {
1388 Task::ready(Ok(Default::default()))
1389 }
1390 }
1391
1392 pub fn apply_additional_edits_for_completion(
1393 &self,
1394 buffer_handle: ModelHandle<Buffer>,
1395 completion: Completion,
1396 push_to_history: bool,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<Option<Transaction>>> {
1399 let buffer = buffer_handle.read(cx);
1400 let buffer_id = buffer.remote_id();
1401
1402 if self.is_local() {
1403 let lang_server = if let Some(language_server) = buffer.language_server() {
1404 language_server.clone()
1405 } else {
1406 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1407 };
1408
1409 cx.spawn(|_, mut cx| async move {
1410 let resolved_completion = lang_server
1411 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1412 .await?;
1413 if let Some(edits) = resolved_completion.additional_text_edits {
1414 let edits = buffer_handle
1415 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1416 .await?;
1417 buffer_handle.update(&mut cx, |buffer, cx| {
1418 buffer.finalize_last_transaction();
1419 buffer.start_transaction();
1420 for (range, text) in edits {
1421 buffer.edit([range], text, cx);
1422 }
1423 let transaction = if buffer.end_transaction(cx).is_some() {
1424 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1425 if !push_to_history {
1426 buffer.forget_transaction(transaction.id);
1427 }
1428 Some(transaction)
1429 } else {
1430 None
1431 };
1432 Ok(transaction)
1433 })
1434 } else {
1435 Ok(None)
1436 }
1437 })
1438 } else if let Some(project_id) = self.remote_id() {
1439 let client = self.client.clone();
1440 cx.spawn(|_, mut cx| async move {
1441 let response = client
1442 .request(proto::ApplyCompletionAdditionalEdits {
1443 project_id,
1444 buffer_id,
1445 completion: Some(language::proto::serialize_completion(&completion)),
1446 })
1447 .await?;
1448
1449 if let Some(transaction) = response.transaction {
1450 let transaction = language::proto::deserialize_transaction(transaction)?;
1451 buffer_handle
1452 .update(&mut cx, |buffer, _| {
1453 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1454 })
1455 .await;
1456 if push_to_history {
1457 buffer_handle.update(&mut cx, |buffer, _| {
1458 buffer.push_transaction(transaction.clone(), Instant::now());
1459 });
1460 }
1461 Ok(Some(transaction))
1462 } else {
1463 Ok(None)
1464 }
1465 })
1466 } else {
1467 Task::ready(Err(anyhow!("project does not have a remote id")))
1468 }
1469 }
1470
1471 pub fn code_actions<T: ToOffset>(
1472 &self,
1473 buffer_handle: &ModelHandle<Buffer>,
1474 range: Range<T>,
1475 cx: &mut ModelContext<Self>,
1476 ) -> Task<Result<Vec<CodeAction>>> {
1477 let buffer_handle = buffer_handle.clone();
1478 let buffer = buffer_handle.read(cx);
1479 let buffer_id = buffer.remote_id();
1480 let worktree;
1481 let buffer_abs_path;
1482 if let Some(file) = File::from_dyn(buffer.file()) {
1483 worktree = file.worktree.clone();
1484 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1485 } else {
1486 return Task::ready(Ok(Default::default()));
1487 };
1488 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1489
1490 if worktree.read(cx).as_local().is_some() {
1491 let buffer_abs_path = buffer_abs_path.unwrap();
1492 let lang_name;
1493 let lang_server;
1494 if let Some(lang) = buffer.language() {
1495 lang_name = lang.name().to_string();
1496 if let Some(server) = self
1497 .language_servers
1498 .get(&(worktree.read(cx).id(), lang_name.clone()))
1499 {
1500 lang_server = server.clone();
1501 } else {
1502 return Task::ready(Ok(Default::default()));
1503 };
1504 } else {
1505 return Task::ready(Ok(Default::default()));
1506 }
1507
1508 let lsp_range = lsp::Range::new(
1509 range.start.to_point_utf16(buffer).to_lsp_position(),
1510 range.end.to_point_utf16(buffer).to_lsp_position(),
1511 );
1512 cx.foreground().spawn(async move {
1513 Ok(lang_server
1514 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1515 text_document: lsp::TextDocumentIdentifier::new(
1516 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1517 ),
1518 range: lsp_range,
1519 work_done_progress_params: Default::default(),
1520 partial_result_params: Default::default(),
1521 context: lsp::CodeActionContext {
1522 diagnostics: Default::default(),
1523 only: Some(vec![
1524 lsp::CodeActionKind::QUICKFIX,
1525 lsp::CodeActionKind::REFACTOR,
1526 lsp::CodeActionKind::REFACTOR_EXTRACT,
1527 ]),
1528 },
1529 })
1530 .await?
1531 .unwrap_or_default()
1532 .into_iter()
1533 .filter_map(|entry| {
1534 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1535 Some(CodeAction {
1536 range: range.clone(),
1537 lsp_action,
1538 })
1539 } else {
1540 None
1541 }
1542 })
1543 .collect())
1544 })
1545 } else if let Some(project_id) = self.remote_id() {
1546 let rpc = self.client.clone();
1547 cx.spawn_weak(|_, mut cx| async move {
1548 let response = rpc
1549 .request(proto::GetCodeActions {
1550 project_id,
1551 buffer_id,
1552 start: Some(language::proto::serialize_anchor(&range.start)),
1553 end: Some(language::proto::serialize_anchor(&range.end)),
1554 })
1555 .await?;
1556
1557 buffer_handle
1558 .update(&mut cx, |buffer, _| {
1559 buffer.wait_for_version(response.version.into())
1560 })
1561 .await;
1562
1563 response
1564 .actions
1565 .into_iter()
1566 .map(language::proto::deserialize_code_action)
1567 .collect()
1568 })
1569 } else {
1570 Task::ready(Ok(Default::default()))
1571 }
1572 }
1573
1574 pub fn apply_code_action(
1575 &self,
1576 buffer_handle: ModelHandle<Buffer>,
1577 mut action: CodeAction,
1578 push_to_history: bool,
1579 cx: &mut ModelContext<Self>,
1580 ) -> Task<Result<ProjectTransaction>> {
1581 if self.is_local() {
1582 let buffer = buffer_handle.read(cx);
1583 let lang_name = if let Some(lang) = buffer.language() {
1584 lang.name().to_string()
1585 } else {
1586 return Task::ready(Ok(Default::default()));
1587 };
1588 let lang_server = if let Some(language_server) = buffer.language_server() {
1589 language_server.clone()
1590 } else {
1591 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1592 };
1593 let range = action.range.to_point_utf16(buffer);
1594
1595 cx.spawn(|this, mut cx| async move {
1596 if let Some(lsp_range) = action
1597 .lsp_action
1598 .data
1599 .as_mut()
1600 .and_then(|d| d.get_mut("codeActionParams"))
1601 .and_then(|d| d.get_mut("range"))
1602 {
1603 *lsp_range = serde_json::to_value(&lsp::Range::new(
1604 range.start.to_lsp_position(),
1605 range.end.to_lsp_position(),
1606 ))
1607 .unwrap();
1608 action.lsp_action = lang_server
1609 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1610 .await?;
1611 } else {
1612 let actions = this
1613 .update(&mut cx, |this, cx| {
1614 this.code_actions(&buffer_handle, action.range, cx)
1615 })
1616 .await?;
1617 action.lsp_action = actions
1618 .into_iter()
1619 .find(|a| a.lsp_action.title == action.lsp_action.title)
1620 .ok_or_else(|| anyhow!("code action is outdated"))?
1621 .lsp_action;
1622 }
1623
1624 if let Some(edit) = action.lsp_action.edit {
1625 Self::deserialize_workspace_edit(
1626 this,
1627 edit,
1628 push_to_history,
1629 lang_name,
1630 lang_server,
1631 &mut cx,
1632 )
1633 .await
1634 } else {
1635 Ok(ProjectTransaction::default())
1636 }
1637 })
1638 } else if let Some(project_id) = self.remote_id() {
1639 let client = self.client.clone();
1640 let request = proto::ApplyCodeAction {
1641 project_id,
1642 buffer_id: buffer_handle.read(cx).remote_id(),
1643 action: Some(language::proto::serialize_code_action(&action)),
1644 };
1645 cx.spawn(|this, mut cx| async move {
1646 let response = client
1647 .request(request)
1648 .await?
1649 .transaction
1650 .ok_or_else(|| anyhow!("missing transaction"))?;
1651 this.update(&mut cx, |this, cx| {
1652 this.deserialize_project_transaction(response, push_to_history, cx)
1653 })
1654 .await
1655 })
1656 } else {
1657 Task::ready(Err(anyhow!("project does not have a remote id")))
1658 }
1659 }
1660
1661 async fn deserialize_workspace_edit(
1662 this: ModelHandle<Self>,
1663 edit: lsp::WorkspaceEdit,
1664 push_to_history: bool,
1665 language_name: String,
1666 language_server: Arc<LanguageServer>,
1667 cx: &mut AsyncAppContext,
1668 ) -> Result<ProjectTransaction> {
1669 let fs = this.read_with(cx, |this, _| this.fs.clone());
1670 let mut operations = Vec::new();
1671 if let Some(document_changes) = edit.document_changes {
1672 match document_changes {
1673 lsp::DocumentChanges::Edits(edits) => {
1674 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1675 }
1676 lsp::DocumentChanges::Operations(ops) => operations = ops,
1677 }
1678 } else if let Some(changes) = edit.changes {
1679 operations.extend(changes.into_iter().map(|(uri, edits)| {
1680 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1681 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1682 uri,
1683 version: None,
1684 },
1685 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1686 })
1687 }));
1688 }
1689
1690 let mut project_transaction = ProjectTransaction::default();
1691 for operation in operations {
1692 match operation {
1693 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1694 let abs_path = op
1695 .uri
1696 .to_file_path()
1697 .map_err(|_| anyhow!("can't convert URI to path"))?;
1698
1699 if let Some(parent_path) = abs_path.parent() {
1700 fs.create_dir(parent_path).await?;
1701 }
1702 if abs_path.ends_with("/") {
1703 fs.create_dir(&abs_path).await?;
1704 } else {
1705 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1706 .await?;
1707 }
1708 }
1709 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1710 let source_abs_path = op
1711 .old_uri
1712 .to_file_path()
1713 .map_err(|_| anyhow!("can't convert URI to path"))?;
1714 let target_abs_path = op
1715 .new_uri
1716 .to_file_path()
1717 .map_err(|_| anyhow!("can't convert URI to path"))?;
1718 fs.rename(
1719 &source_abs_path,
1720 &target_abs_path,
1721 op.options.map(Into::into).unwrap_or_default(),
1722 )
1723 .await?;
1724 }
1725 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1726 let abs_path = op
1727 .uri
1728 .to_file_path()
1729 .map_err(|_| anyhow!("can't convert URI to path"))?;
1730 let options = op.options.map(Into::into).unwrap_or_default();
1731 if abs_path.ends_with("/") {
1732 fs.remove_dir(&abs_path, options).await?;
1733 } else {
1734 fs.remove_file(&abs_path, options).await?;
1735 }
1736 }
1737 lsp::DocumentChangeOperation::Edit(op) => {
1738 let buffer_to_edit = this
1739 .update(cx, |this, cx| {
1740 this.open_local_buffer_from_lsp_path(
1741 op.text_document.uri,
1742 language_name.clone(),
1743 language_server.clone(),
1744 cx,
1745 )
1746 })
1747 .await?;
1748
1749 let edits = buffer_to_edit
1750 .update(cx, |buffer, cx| {
1751 let edits = op.edits.into_iter().map(|edit| match edit {
1752 lsp::OneOf::Left(edit) => edit,
1753 lsp::OneOf::Right(edit) => edit.text_edit,
1754 });
1755 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1756 })
1757 .await?;
1758
1759 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1760 buffer.finalize_last_transaction();
1761 buffer.start_transaction();
1762 for (range, text) in edits {
1763 buffer.edit([range], text, cx);
1764 }
1765 let transaction = if buffer.end_transaction(cx).is_some() {
1766 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1767 if !push_to_history {
1768 buffer.forget_transaction(transaction.id);
1769 }
1770 Some(transaction)
1771 } else {
1772 None
1773 };
1774
1775 transaction
1776 });
1777 if let Some(transaction) = transaction {
1778 project_transaction.0.insert(buffer_to_edit, transaction);
1779 }
1780 }
1781 }
1782 }
1783
1784 Ok(project_transaction)
1785 }
1786
1787 pub fn prepare_rename<T: ToPointUtf16>(
1788 &self,
1789 buffer: ModelHandle<Buffer>,
1790 position: T,
1791 cx: &mut ModelContext<Self>,
1792 ) -> Task<Result<Option<Range<Anchor>>>> {
1793 let position = position.to_point_utf16(buffer.read(cx));
1794 self.request_lsp(buffer, PrepareRename { position }, cx)
1795 }
1796
1797 pub fn perform_rename<T: ToPointUtf16>(
1798 &self,
1799 buffer: ModelHandle<Buffer>,
1800 position: T,
1801 new_name: String,
1802 push_to_history: bool,
1803 cx: &mut ModelContext<Self>,
1804 ) -> Task<Result<ProjectTransaction>> {
1805 let position = position.to_point_utf16(buffer.read(cx));
1806 self.request_lsp(
1807 buffer,
1808 PerformRename {
1809 position,
1810 new_name,
1811 push_to_history,
1812 },
1813 cx,
1814 )
1815 }
1816
1817 fn request_lsp<R: LspCommand>(
1818 &self,
1819 buffer_handle: ModelHandle<Buffer>,
1820 request: R,
1821 cx: &mut ModelContext<Self>,
1822 ) -> Task<Result<R::Response>>
1823 where
1824 <R::LspRequest as lsp::request::Request>::Result: Send,
1825 {
1826 let buffer = buffer_handle.read(cx);
1827 if self.is_local() {
1828 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
1829 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
1830 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
1831 return cx.spawn(|this, cx| async move {
1832 let response = language_server
1833 .request::<R::LspRequest>(lsp_params)
1834 .await
1835 .context("lsp request failed")?;
1836 request
1837 .response_from_lsp(response, this, buffer_handle, cx)
1838 .await
1839 });
1840 }
1841 } else if let Some(project_id) = self.remote_id() {
1842 let rpc = self.client.clone();
1843 let message = request.to_proto(project_id, buffer);
1844 return cx.spawn(|this, cx| async move {
1845 let response = rpc.request(message).await?;
1846 request
1847 .response_from_proto(response, this, buffer_handle, cx)
1848 .await
1849 });
1850 }
1851 Task::ready(Ok(Default::default()))
1852 }
1853
1854 pub fn find_or_create_local_worktree(
1855 &self,
1856 abs_path: impl AsRef<Path>,
1857 weak: bool,
1858 cx: &mut ModelContext<Self>,
1859 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
1860 let abs_path = abs_path.as_ref();
1861 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
1862 Task::ready(Ok((tree.clone(), relative_path.into())))
1863 } else {
1864 let worktree = self.create_local_worktree(abs_path, weak, cx);
1865 cx.foreground()
1866 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
1867 }
1868 }
1869
1870 fn find_local_worktree(
1871 &self,
1872 abs_path: &Path,
1873 cx: &AppContext,
1874 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
1875 for tree in self.worktrees(cx) {
1876 if let Some(relative_path) = tree
1877 .read(cx)
1878 .as_local()
1879 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
1880 {
1881 return Some((tree.clone(), relative_path.into()));
1882 }
1883 }
1884 None
1885 }
1886
1887 pub fn is_shared(&self) -> bool {
1888 match &self.client_state {
1889 ProjectClientState::Local { is_shared, .. } => *is_shared,
1890 ProjectClientState::Remote { .. } => false,
1891 }
1892 }
1893
1894 fn create_local_worktree(
1895 &self,
1896 abs_path: impl AsRef<Path>,
1897 weak: bool,
1898 cx: &mut ModelContext<Self>,
1899 ) -> Task<Result<ModelHandle<Worktree>>> {
1900 let fs = self.fs.clone();
1901 let client = self.client.clone();
1902 let path = Arc::from(abs_path.as_ref());
1903 cx.spawn(|project, mut cx| async move {
1904 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
1905
1906 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
1907 project.add_worktree(&worktree, cx);
1908 (project.remote_id(), project.is_shared())
1909 });
1910
1911 if let Some(project_id) = remote_project_id {
1912 worktree
1913 .update(&mut cx, |worktree, cx| {
1914 worktree.as_local_mut().unwrap().register(project_id, cx)
1915 })
1916 .await?;
1917 if is_shared {
1918 worktree
1919 .update(&mut cx, |worktree, cx| {
1920 worktree.as_local_mut().unwrap().share(project_id, cx)
1921 })
1922 .await?;
1923 }
1924 }
1925
1926 Ok(worktree)
1927 })
1928 }
1929
1930 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
1931 self.worktrees.retain(|worktree| {
1932 worktree
1933 .upgrade(cx)
1934 .map_or(false, |w| w.read(cx).id() != id)
1935 });
1936 cx.notify();
1937 }
1938
1939 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
1940 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
1941 if worktree.read(cx).is_local() {
1942 cx.subscribe(&worktree, |this, worktree, _, cx| {
1943 this.update_local_worktree_buffers(worktree, cx);
1944 })
1945 .detach();
1946 }
1947
1948 let push_weak_handle = {
1949 let worktree = worktree.read(cx);
1950 worktree.is_local() && worktree.is_weak()
1951 };
1952 if push_weak_handle {
1953 cx.observe_release(&worktree, |this, cx| {
1954 this.worktrees
1955 .retain(|worktree| worktree.upgrade(cx).is_some());
1956 cx.notify();
1957 })
1958 .detach();
1959 self.worktrees
1960 .push(WorktreeHandle::Weak(worktree.downgrade()));
1961 } else {
1962 self.worktrees
1963 .push(WorktreeHandle::Strong(worktree.clone()));
1964 }
1965 cx.notify();
1966 }
1967
1968 fn update_local_worktree_buffers(
1969 &mut self,
1970 worktree_handle: ModelHandle<Worktree>,
1971 cx: &mut ModelContext<Self>,
1972 ) {
1973 let snapshot = worktree_handle.read(cx).snapshot();
1974 let mut buffers_to_delete = Vec::new();
1975 for (buffer_id, buffer) in &self.open_buffers {
1976 if let Some(buffer) = buffer.upgrade(cx) {
1977 buffer.update(cx, |buffer, cx| {
1978 if let Some(old_file) = File::from_dyn(buffer.file()) {
1979 if old_file.worktree != worktree_handle {
1980 return;
1981 }
1982
1983 let new_file = if let Some(entry) = old_file
1984 .entry_id
1985 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1986 {
1987 File {
1988 is_local: true,
1989 entry_id: Some(entry.id),
1990 mtime: entry.mtime,
1991 path: entry.path.clone(),
1992 worktree: worktree_handle.clone(),
1993 }
1994 } else if let Some(entry) =
1995 snapshot.entry_for_path(old_file.path().as_ref())
1996 {
1997 File {
1998 is_local: true,
1999 entry_id: Some(entry.id),
2000 mtime: entry.mtime,
2001 path: entry.path.clone(),
2002 worktree: worktree_handle.clone(),
2003 }
2004 } else {
2005 File {
2006 is_local: true,
2007 entry_id: None,
2008 path: old_file.path().clone(),
2009 mtime: old_file.mtime(),
2010 worktree: worktree_handle.clone(),
2011 }
2012 };
2013
2014 if let Some(project_id) = self.remote_id() {
2015 self.client
2016 .send(proto::UpdateBufferFile {
2017 project_id,
2018 buffer_id: *buffer_id as u64,
2019 file: Some(new_file.to_proto()),
2020 })
2021 .log_err();
2022 }
2023 buffer.file_updated(Box::new(new_file), cx).detach();
2024 }
2025 });
2026 } else {
2027 buffers_to_delete.push(*buffer_id);
2028 }
2029 }
2030
2031 for buffer_id in buffers_to_delete {
2032 self.open_buffers.remove(&buffer_id);
2033 }
2034 }
2035
2036 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2037 let new_active_entry = entry.and_then(|project_path| {
2038 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2039 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2040 Some(ProjectEntry {
2041 worktree_id: project_path.worktree_id,
2042 entry_id: entry.id,
2043 })
2044 });
2045 if new_active_entry != self.active_entry {
2046 self.active_entry = new_active_entry;
2047 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2048 }
2049 }
2050
2051 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2052 self.language_servers_with_diagnostics_running > 0
2053 }
2054
2055 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2056 let mut summary = DiagnosticSummary::default();
2057 for (_, path_summary) in self.diagnostic_summaries(cx) {
2058 summary.error_count += path_summary.error_count;
2059 summary.warning_count += path_summary.warning_count;
2060 summary.info_count += path_summary.info_count;
2061 summary.hint_count += path_summary.hint_count;
2062 }
2063 summary
2064 }
2065
2066 pub fn diagnostic_summaries<'a>(
2067 &'a self,
2068 cx: &'a AppContext,
2069 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2070 self.worktrees(cx).flat_map(move |worktree| {
2071 let worktree = worktree.read(cx);
2072 let worktree_id = worktree.id();
2073 worktree
2074 .diagnostic_summaries()
2075 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2076 })
2077 }
2078
2079 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2080 self.language_servers_with_diagnostics_running += 1;
2081 if self.language_servers_with_diagnostics_running == 1 {
2082 cx.emit(Event::DiskBasedDiagnosticsStarted);
2083 }
2084 }
2085
2086 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2087 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2088 self.language_servers_with_diagnostics_running -= 1;
2089 if self.language_servers_with_diagnostics_running == 0 {
2090 cx.emit(Event::DiskBasedDiagnosticsFinished);
2091 }
2092 }
2093
2094 pub fn active_entry(&self) -> Option<ProjectEntry> {
2095 self.active_entry
2096 }
2097
2098 // RPC message handlers
2099
2100 async fn handle_unshare_project(
2101 this: ModelHandle<Self>,
2102 _: TypedEnvelope<proto::UnshareProject>,
2103 _: Arc<Client>,
2104 mut cx: AsyncAppContext,
2105 ) -> Result<()> {
2106 this.update(&mut cx, |this, cx| {
2107 if let ProjectClientState::Remote {
2108 sharing_has_stopped,
2109 ..
2110 } = &mut this.client_state
2111 {
2112 *sharing_has_stopped = true;
2113 this.collaborators.clear();
2114 cx.notify();
2115 } else {
2116 unreachable!()
2117 }
2118 });
2119
2120 Ok(())
2121 }
2122
2123 async fn handle_add_collaborator(
2124 this: ModelHandle<Self>,
2125 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2126 _: Arc<Client>,
2127 mut cx: AsyncAppContext,
2128 ) -> Result<()> {
2129 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2130 let collaborator = envelope
2131 .payload
2132 .collaborator
2133 .take()
2134 .ok_or_else(|| anyhow!("empty collaborator"))?;
2135
2136 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2137 this.update(&mut cx, |this, cx| {
2138 this.collaborators
2139 .insert(collaborator.peer_id, collaborator);
2140 cx.notify();
2141 });
2142
2143 Ok(())
2144 }
2145
2146 async fn handle_remove_collaborator(
2147 this: ModelHandle<Self>,
2148 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2149 _: Arc<Client>,
2150 mut cx: AsyncAppContext,
2151 ) -> Result<()> {
2152 this.update(&mut cx, |this, cx| {
2153 let peer_id = PeerId(envelope.payload.peer_id);
2154 let replica_id = this
2155 .collaborators
2156 .remove(&peer_id)
2157 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2158 .replica_id;
2159 this.shared_buffers.remove(&peer_id);
2160 for (_, buffer) in &this.open_buffers {
2161 if let Some(buffer) = buffer.upgrade(cx) {
2162 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2163 }
2164 }
2165 cx.notify();
2166 Ok(())
2167 })
2168 }
2169
2170 async fn handle_share_worktree(
2171 this: ModelHandle<Self>,
2172 envelope: TypedEnvelope<proto::ShareWorktree>,
2173 client: Arc<Client>,
2174 mut cx: AsyncAppContext,
2175 ) -> Result<()> {
2176 this.update(&mut cx, |this, cx| {
2177 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2178 let replica_id = this.replica_id();
2179 let worktree = envelope
2180 .payload
2181 .worktree
2182 .ok_or_else(|| anyhow!("invalid worktree"))?;
2183 let (worktree, load_task) =
2184 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2185 this.add_worktree(&worktree, cx);
2186 load_task.detach();
2187 Ok(())
2188 })
2189 }
2190
2191 async fn handle_unregister_worktree(
2192 this: ModelHandle<Self>,
2193 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2194 _: Arc<Client>,
2195 mut cx: AsyncAppContext,
2196 ) -> Result<()> {
2197 this.update(&mut cx, |this, cx| {
2198 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2199 this.remove_worktree(worktree_id, cx);
2200 Ok(())
2201 })
2202 }
2203
2204 async fn handle_update_worktree(
2205 this: ModelHandle<Self>,
2206 envelope: TypedEnvelope<proto::UpdateWorktree>,
2207 _: Arc<Client>,
2208 mut cx: AsyncAppContext,
2209 ) -> Result<()> {
2210 this.update(&mut cx, |this, cx| {
2211 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2212 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2213 worktree.update(cx, |worktree, _| {
2214 let worktree = worktree.as_remote_mut().unwrap();
2215 worktree.update_from_remote(envelope)
2216 })?;
2217 }
2218 Ok(())
2219 })
2220 }
2221
2222 async fn handle_update_diagnostic_summary(
2223 this: ModelHandle<Self>,
2224 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2225 _: Arc<Client>,
2226 mut cx: AsyncAppContext,
2227 ) -> Result<()> {
2228 this.update(&mut cx, |this, cx| {
2229 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2230 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2231 if let Some(summary) = envelope.payload.summary {
2232 let project_path = ProjectPath {
2233 worktree_id,
2234 path: Path::new(&summary.path).into(),
2235 };
2236 worktree.update(cx, |worktree, _| {
2237 worktree
2238 .as_remote_mut()
2239 .unwrap()
2240 .update_diagnostic_summary(project_path.path.clone(), &summary);
2241 });
2242 cx.emit(Event::DiagnosticsUpdated(project_path));
2243 }
2244 }
2245 Ok(())
2246 })
2247 }
2248
2249 async fn handle_disk_based_diagnostics_updating(
2250 this: ModelHandle<Self>,
2251 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2252 _: Arc<Client>,
2253 mut cx: AsyncAppContext,
2254 ) -> Result<()> {
2255 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2256 Ok(())
2257 }
2258
2259 async fn handle_disk_based_diagnostics_updated(
2260 this: ModelHandle<Self>,
2261 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2262 _: Arc<Client>,
2263 mut cx: AsyncAppContext,
2264 ) -> Result<()> {
2265 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2266 Ok(())
2267 }
2268
2269 async fn handle_update_buffer(
2270 this: ModelHandle<Self>,
2271 envelope: TypedEnvelope<proto::UpdateBuffer>,
2272 _: Arc<Client>,
2273 mut cx: AsyncAppContext,
2274 ) -> Result<()> {
2275 this.update(&mut cx, |this, cx| {
2276 let payload = envelope.payload.clone();
2277 let buffer_id = payload.buffer_id;
2278 let ops = payload
2279 .operations
2280 .into_iter()
2281 .map(|op| language::proto::deserialize_operation(op))
2282 .collect::<Result<Vec<_>, _>>()?;
2283 let is_remote = this.is_remote();
2284 match this.open_buffers.entry(buffer_id) {
2285 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2286 OpenBuffer::Loaded(buffer) => {
2287 if let Some(buffer) = buffer.upgrade(cx) {
2288 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2289 }
2290 }
2291 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2292 },
2293 hash_map::Entry::Vacant(e) => {
2294 if is_remote && this.loading_buffers.len() > 0 {
2295 e.insert(OpenBuffer::Loading(ops));
2296 }
2297 }
2298 }
2299 Ok(())
2300 })
2301 }
2302
2303 async fn handle_update_buffer_file(
2304 this: ModelHandle<Self>,
2305 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2306 _: Arc<Client>,
2307 mut cx: AsyncAppContext,
2308 ) -> Result<()> {
2309 this.update(&mut cx, |this, cx| {
2310 let payload = envelope.payload.clone();
2311 let buffer_id = payload.buffer_id;
2312 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2313 let worktree = this
2314 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2315 .ok_or_else(|| anyhow!("no such worktree"))?;
2316 let file = File::from_proto(file, worktree.clone(), cx)?;
2317 let buffer = this
2318 .open_buffers
2319 .get_mut(&buffer_id)
2320 .and_then(|b| b.upgrade(cx))
2321 .ok_or_else(|| anyhow!("no such buffer"))?;
2322 buffer.update(cx, |buffer, cx| {
2323 buffer.file_updated(Box::new(file), cx).detach();
2324 });
2325 Ok(())
2326 })
2327 }
2328
2329 async fn handle_save_buffer(
2330 this: ModelHandle<Self>,
2331 envelope: TypedEnvelope<proto::SaveBuffer>,
2332 _: Arc<Client>,
2333 mut cx: AsyncAppContext,
2334 ) -> Result<proto::BufferSaved> {
2335 let buffer_id = envelope.payload.buffer_id;
2336 let sender_id = envelope.original_sender_id()?;
2337 let requested_version = envelope.payload.version.try_into()?;
2338
2339 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2340 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2341 let buffer = this
2342 .shared_buffers
2343 .get(&sender_id)
2344 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2345 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2346 Ok::<_, anyhow::Error>((project_id, buffer))
2347 })?;
2348
2349 if !buffer
2350 .read_with(&cx, |buffer, _| buffer.version())
2351 .observed_all(&requested_version)
2352 {
2353 Err(anyhow!("save request depends on unreceived edits"))?;
2354 }
2355
2356 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2357 Ok(proto::BufferSaved {
2358 project_id,
2359 buffer_id,
2360 version: (&saved_version).into(),
2361 mtime: Some(mtime.into()),
2362 })
2363 }
2364
2365 async fn handle_format_buffers(
2366 this: ModelHandle<Self>,
2367 envelope: TypedEnvelope<proto::FormatBuffers>,
2368 _: Arc<Client>,
2369 mut cx: AsyncAppContext,
2370 ) -> Result<proto::FormatBuffersResponse> {
2371 let sender_id = envelope.original_sender_id()?;
2372 let format = this.update(&mut cx, |this, cx| {
2373 let shared_buffers = this
2374 .shared_buffers
2375 .get(&sender_id)
2376 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2377 let mut buffers = HashSet::default();
2378 for buffer_id in &envelope.payload.buffer_ids {
2379 buffers.insert(
2380 shared_buffers
2381 .get(buffer_id)
2382 .cloned()
2383 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2384 );
2385 }
2386 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2387 })?;
2388
2389 let project_transaction = format.await?;
2390 let project_transaction = this.update(&mut cx, |this, cx| {
2391 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2392 });
2393 Ok(proto::FormatBuffersResponse {
2394 transaction: Some(project_transaction),
2395 })
2396 }
2397
2398 async fn handle_get_completions(
2399 this: ModelHandle<Self>,
2400 envelope: TypedEnvelope<proto::GetCompletions>,
2401 _: Arc<Client>,
2402 mut cx: AsyncAppContext,
2403 ) -> Result<proto::GetCompletionsResponse> {
2404 let sender_id = envelope.original_sender_id()?;
2405 let position = envelope
2406 .payload
2407 .position
2408 .and_then(language::proto::deserialize_anchor)
2409 .ok_or_else(|| anyhow!("invalid position"))?;
2410 let version = clock::Global::from(envelope.payload.version);
2411 let buffer = this.read_with(&cx, |this, _| {
2412 this.shared_buffers
2413 .get(&sender_id)
2414 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2415 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2416 })?;
2417 if !buffer
2418 .read_with(&cx, |buffer, _| buffer.version())
2419 .observed_all(&version)
2420 {
2421 Err(anyhow!("completion request depends on unreceived edits"))?;
2422 }
2423 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2424 let completions = this
2425 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2426 .await?;
2427
2428 Ok(proto::GetCompletionsResponse {
2429 completions: completions
2430 .iter()
2431 .map(language::proto::serialize_completion)
2432 .collect(),
2433 version: (&version).into(),
2434 })
2435 }
2436
2437 async fn handle_apply_additional_edits_for_completion(
2438 this: ModelHandle<Self>,
2439 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2440 _: Arc<Client>,
2441 mut cx: AsyncAppContext,
2442 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2443 let sender_id = envelope.original_sender_id()?;
2444 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2445 let buffer = this
2446 .shared_buffers
2447 .get(&sender_id)
2448 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2449 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2450 let language = buffer.read(cx).language();
2451 let completion = language::proto::deserialize_completion(
2452 envelope
2453 .payload
2454 .completion
2455 .ok_or_else(|| anyhow!("invalid completion"))?,
2456 language,
2457 )?;
2458 Ok::<_, anyhow::Error>(
2459 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2460 )
2461 })?;
2462
2463 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2464 transaction: apply_additional_edits
2465 .await?
2466 .as_ref()
2467 .map(language::proto::serialize_transaction),
2468 })
2469 }
2470
2471 async fn handle_get_code_actions(
2472 this: ModelHandle<Self>,
2473 envelope: TypedEnvelope<proto::GetCodeActions>,
2474 _: Arc<Client>,
2475 mut cx: AsyncAppContext,
2476 ) -> Result<proto::GetCodeActionsResponse> {
2477 let sender_id = envelope.original_sender_id()?;
2478 let start = envelope
2479 .payload
2480 .start
2481 .and_then(language::proto::deserialize_anchor)
2482 .ok_or_else(|| anyhow!("invalid start"))?;
2483 let end = envelope
2484 .payload
2485 .end
2486 .and_then(language::proto::deserialize_anchor)
2487 .ok_or_else(|| anyhow!("invalid end"))?;
2488 let buffer = this.update(&mut cx, |this, _| {
2489 this.shared_buffers
2490 .get(&sender_id)
2491 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2492 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2493 })?;
2494 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2495 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2496 Err(anyhow!("code action request references unreceived edits"))?;
2497 }
2498 let code_actions = this.update(&mut cx, |this, cx| {
2499 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2500 })?;
2501
2502 Ok(proto::GetCodeActionsResponse {
2503 actions: code_actions
2504 .await?
2505 .iter()
2506 .map(language::proto::serialize_code_action)
2507 .collect(),
2508 version: (&version).into(),
2509 })
2510 }
2511
2512 async fn handle_apply_code_action(
2513 this: ModelHandle<Self>,
2514 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2515 _: Arc<Client>,
2516 mut cx: AsyncAppContext,
2517 ) -> Result<proto::ApplyCodeActionResponse> {
2518 let sender_id = envelope.original_sender_id()?;
2519 let action = language::proto::deserialize_code_action(
2520 envelope
2521 .payload
2522 .action
2523 .ok_or_else(|| anyhow!("invalid action"))?,
2524 )?;
2525 let apply_code_action = this.update(&mut cx, |this, cx| {
2526 let buffer = this
2527 .shared_buffers
2528 .get(&sender_id)
2529 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2530 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2531 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2532 })?;
2533
2534 let project_transaction = apply_code_action.await?;
2535 let project_transaction = this.update(&mut cx, |this, cx| {
2536 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2537 });
2538 Ok(proto::ApplyCodeActionResponse {
2539 transaction: Some(project_transaction),
2540 })
2541 }
2542
2543 async fn handle_lsp_command<T: LspCommand>(
2544 this: ModelHandle<Self>,
2545 envelope: TypedEnvelope<T::ProtoRequest>,
2546 _: Arc<Client>,
2547 mut cx: AsyncAppContext,
2548 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2549 where
2550 <T::LspRequest as lsp::request::Request>::Result: Send,
2551 {
2552 let sender_id = envelope.original_sender_id()?;
2553 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2554 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2555 let buffer_handle = this
2556 .shared_buffers
2557 .get(&sender_id)
2558 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2559 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2560 let buffer = buffer_handle.read(cx);
2561 let buffer_version = buffer.version();
2562 let request = T::from_proto(envelope.payload, this, buffer)?;
2563 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2564 })?;
2565 let response = request.await?;
2566 this.update(&mut cx, |this, cx| {
2567 Ok(T::response_to_proto(
2568 response,
2569 this,
2570 sender_id,
2571 &buffer_version,
2572 cx,
2573 ))
2574 })
2575 }
2576
2577 async fn handle_open_buffer(
2578 this: ModelHandle<Self>,
2579 envelope: TypedEnvelope<proto::OpenBuffer>,
2580 _: Arc<Client>,
2581 mut cx: AsyncAppContext,
2582 ) -> anyhow::Result<proto::OpenBufferResponse> {
2583 let peer_id = envelope.original_sender_id()?;
2584 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2585 let open_buffer = this.update(&mut cx, |this, cx| {
2586 this.open_buffer(
2587 ProjectPath {
2588 worktree_id,
2589 path: PathBuf::from(envelope.payload.path).into(),
2590 },
2591 cx,
2592 )
2593 });
2594
2595 let buffer = open_buffer.await?;
2596 this.update(&mut cx, |this, cx| {
2597 Ok(proto::OpenBufferResponse {
2598 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
2599 })
2600 })
2601 }
2602
2603 fn serialize_project_transaction_for_peer(
2604 &mut self,
2605 project_transaction: ProjectTransaction,
2606 peer_id: PeerId,
2607 cx: &AppContext,
2608 ) -> proto::ProjectTransaction {
2609 let mut serialized_transaction = proto::ProjectTransaction {
2610 buffers: Default::default(),
2611 transactions: Default::default(),
2612 };
2613 for (buffer, transaction) in project_transaction.0 {
2614 serialized_transaction
2615 .buffers
2616 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
2617 serialized_transaction
2618 .transactions
2619 .push(language::proto::serialize_transaction(&transaction));
2620 }
2621 serialized_transaction
2622 }
2623
2624 fn deserialize_project_transaction(
2625 &mut self,
2626 message: proto::ProjectTransaction,
2627 push_to_history: bool,
2628 cx: &mut ModelContext<Self>,
2629 ) -> Task<Result<ProjectTransaction>> {
2630 cx.spawn(|this, mut cx| async move {
2631 let mut project_transaction = ProjectTransaction::default();
2632 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
2633 let buffer = this
2634 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2635 .await?;
2636 let transaction = language::proto::deserialize_transaction(transaction)?;
2637 project_transaction.0.insert(buffer, transaction);
2638 }
2639 for (buffer, transaction) in &project_transaction.0 {
2640 buffer
2641 .update(&mut cx, |buffer, _| {
2642 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2643 })
2644 .await;
2645
2646 if push_to_history {
2647 buffer.update(&mut cx, |buffer, _| {
2648 buffer.push_transaction(transaction.clone(), Instant::now());
2649 });
2650 }
2651 }
2652
2653 Ok(project_transaction)
2654 })
2655 }
2656
2657 fn serialize_buffer_for_peer(
2658 &mut self,
2659 buffer: &ModelHandle<Buffer>,
2660 peer_id: PeerId,
2661 cx: &AppContext,
2662 ) -> proto::Buffer {
2663 let buffer_id = buffer.read(cx).remote_id();
2664 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2665 match shared_buffers.entry(buffer_id) {
2666 hash_map::Entry::Occupied(_) => proto::Buffer {
2667 variant: Some(proto::buffer::Variant::Id(buffer_id)),
2668 },
2669 hash_map::Entry::Vacant(entry) => {
2670 entry.insert(buffer.clone());
2671 proto::Buffer {
2672 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
2673 }
2674 }
2675 }
2676 }
2677
2678 fn deserialize_buffer(
2679 &mut self,
2680 buffer: proto::Buffer,
2681 cx: &mut ModelContext<Self>,
2682 ) -> Task<Result<ModelHandle<Buffer>>> {
2683 let replica_id = self.replica_id();
2684
2685 let mut opened_buffer_tx = self.opened_buffer.clone();
2686 let mut opened_buffer_rx = self.opened_buffer.subscribe();
2687 cx.spawn(|this, mut cx| async move {
2688 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
2689 proto::buffer::Variant::Id(id) => {
2690 let buffer = loop {
2691 let buffer = this.read_with(&cx, |this, cx| {
2692 this.open_buffers
2693 .get(&id)
2694 .and_then(|buffer| buffer.upgrade(cx))
2695 });
2696 if let Some(buffer) = buffer {
2697 break buffer;
2698 }
2699 opened_buffer_rx
2700 .recv()
2701 .await
2702 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
2703 };
2704 Ok(buffer)
2705 }
2706 proto::buffer::Variant::State(mut buffer) => {
2707 let mut buffer_worktree = None;
2708 let mut buffer_file = None;
2709 if let Some(file) = buffer.file.take() {
2710 this.read_with(&cx, |this, cx| {
2711 let worktree_id = WorktreeId::from_proto(file.worktree_id);
2712 let worktree =
2713 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
2714 anyhow!("no worktree found for id {}", file.worktree_id)
2715 })?;
2716 buffer_file =
2717 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
2718 as Box<dyn language::File>);
2719 buffer_worktree = Some(worktree);
2720 Ok::<_, anyhow::Error>(())
2721 })?;
2722 }
2723
2724 let buffer = cx.add_model(|cx| {
2725 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
2726 });
2727 this.update(&mut cx, |this, cx| {
2728 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
2729 })?;
2730
2731 let _ = opened_buffer_tx.send(()).await;
2732 Ok(buffer)
2733 }
2734 }
2735 })
2736 }
2737
2738 async fn handle_close_buffer(
2739 this: ModelHandle<Self>,
2740 envelope: TypedEnvelope<proto::CloseBuffer>,
2741 _: Arc<Client>,
2742 mut cx: AsyncAppContext,
2743 ) -> anyhow::Result<()> {
2744 this.update(&mut cx, |this, cx| {
2745 if let Some(shared_buffers) =
2746 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
2747 {
2748 shared_buffers.remove(&envelope.payload.buffer_id);
2749 cx.notify();
2750 }
2751 Ok(())
2752 })
2753 }
2754
2755 async fn handle_buffer_saved(
2756 this: ModelHandle<Self>,
2757 envelope: TypedEnvelope<proto::BufferSaved>,
2758 _: Arc<Client>,
2759 mut cx: AsyncAppContext,
2760 ) -> Result<()> {
2761 let version = envelope.payload.version.try_into()?;
2762 let mtime = envelope
2763 .payload
2764 .mtime
2765 .ok_or_else(|| anyhow!("missing mtime"))?
2766 .into();
2767
2768 this.update(&mut cx, |this, cx| {
2769 let buffer = this
2770 .open_buffers
2771 .get(&envelope.payload.buffer_id)
2772 .and_then(|buffer| buffer.upgrade(cx));
2773 if let Some(buffer) = buffer {
2774 buffer.update(cx, |buffer, cx| {
2775 buffer.did_save(version, mtime, None, cx);
2776 });
2777 }
2778 Ok(())
2779 })
2780 }
2781
2782 async fn handle_buffer_reloaded(
2783 this: ModelHandle<Self>,
2784 envelope: TypedEnvelope<proto::BufferReloaded>,
2785 _: Arc<Client>,
2786 mut cx: AsyncAppContext,
2787 ) -> Result<()> {
2788 let payload = envelope.payload.clone();
2789 let version = payload.version.try_into()?;
2790 let mtime = payload
2791 .mtime
2792 .ok_or_else(|| anyhow!("missing mtime"))?
2793 .into();
2794 this.update(&mut cx, |this, cx| {
2795 let buffer = this
2796 .open_buffers
2797 .get(&payload.buffer_id)
2798 .and_then(|buffer| buffer.upgrade(cx));
2799 if let Some(buffer) = buffer {
2800 buffer.update(cx, |buffer, cx| {
2801 buffer.did_reload(version, mtime, cx);
2802 });
2803 }
2804 Ok(())
2805 })
2806 }
2807
2808 pub fn match_paths<'a>(
2809 &self,
2810 query: &'a str,
2811 include_ignored: bool,
2812 smart_case: bool,
2813 max_results: usize,
2814 cancel_flag: &'a AtomicBool,
2815 cx: &AppContext,
2816 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
2817 let worktrees = self
2818 .worktrees(cx)
2819 .filter(|worktree| !worktree.read(cx).is_weak())
2820 .collect::<Vec<_>>();
2821 let include_root_name = worktrees.len() > 1;
2822 let candidate_sets = worktrees
2823 .into_iter()
2824 .map(|worktree| CandidateSet {
2825 snapshot: worktree.read(cx).snapshot(),
2826 include_ignored,
2827 include_root_name,
2828 })
2829 .collect::<Vec<_>>();
2830
2831 let background = cx.background().clone();
2832 async move {
2833 fuzzy::match_paths(
2834 candidate_sets.as_slice(),
2835 query,
2836 smart_case,
2837 max_results,
2838 cancel_flag,
2839 background,
2840 )
2841 .await
2842 }
2843 }
2844}
2845
2846impl WorktreeHandle {
2847 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
2848 match self {
2849 WorktreeHandle::Strong(handle) => Some(handle.clone()),
2850 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
2851 }
2852 }
2853}
2854
2855impl OpenBuffer {
2856 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
2857 match self {
2858 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
2859 OpenBuffer::Loading(_) => None,
2860 }
2861 }
2862}
2863
2864struct CandidateSet {
2865 snapshot: Snapshot,
2866 include_ignored: bool,
2867 include_root_name: bool,
2868}
2869
2870impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
2871 type Candidates = CandidateSetIter<'a>;
2872
2873 fn id(&self) -> usize {
2874 self.snapshot.id().to_usize()
2875 }
2876
2877 fn len(&self) -> usize {
2878 if self.include_ignored {
2879 self.snapshot.file_count()
2880 } else {
2881 self.snapshot.visible_file_count()
2882 }
2883 }
2884
2885 fn prefix(&self) -> Arc<str> {
2886 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
2887 self.snapshot.root_name().into()
2888 } else if self.include_root_name {
2889 format!("{}/", self.snapshot.root_name()).into()
2890 } else {
2891 "".into()
2892 }
2893 }
2894
2895 fn candidates(&'a self, start: usize) -> Self::Candidates {
2896 CandidateSetIter {
2897 traversal: self.snapshot.files(self.include_ignored, start),
2898 }
2899 }
2900}
2901
2902struct CandidateSetIter<'a> {
2903 traversal: Traversal<'a>,
2904}
2905
2906impl<'a> Iterator for CandidateSetIter<'a> {
2907 type Item = PathMatchCandidate<'a>;
2908
2909 fn next(&mut self) -> Option<Self::Item> {
2910 self.traversal.next().map(|entry| {
2911 if let EntryKind::File(char_bag) = entry.kind {
2912 PathMatchCandidate {
2913 path: &entry.path,
2914 char_bag,
2915 }
2916 } else {
2917 unreachable!()
2918 }
2919 })
2920 }
2921}
2922
2923impl Entity for Project {
2924 type Event = Event;
2925
2926 fn release(&mut self, _: &mut gpui::MutableAppContext) {
2927 match &self.client_state {
2928 ProjectClientState::Local { remote_id_rx, .. } => {
2929 if let Some(project_id) = *remote_id_rx.borrow() {
2930 self.client
2931 .send(proto::UnregisterProject { project_id })
2932 .log_err();
2933 }
2934 }
2935 ProjectClientState::Remote { remote_id, .. } => {
2936 self.client
2937 .send(proto::LeaveProject {
2938 project_id: *remote_id,
2939 })
2940 .log_err();
2941 }
2942 }
2943 }
2944
2945 fn app_will_quit(
2946 &mut self,
2947 _: &mut MutableAppContext,
2948 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
2949 let shutdown_futures = self
2950 .language_servers
2951 .drain()
2952 .filter_map(|(_, server)| server.shutdown())
2953 .collect::<Vec<_>>();
2954 Some(
2955 async move {
2956 futures::future::join_all(shutdown_futures).await;
2957 }
2958 .boxed(),
2959 )
2960 }
2961}
2962
2963impl Collaborator {
2964 fn from_proto(
2965 message: proto::Collaborator,
2966 user_store: &ModelHandle<UserStore>,
2967 cx: &mut AsyncAppContext,
2968 ) -> impl Future<Output = Result<Self>> {
2969 let user = user_store.update(cx, |user_store, cx| {
2970 user_store.fetch_user(message.user_id, cx)
2971 });
2972
2973 async move {
2974 Ok(Self {
2975 peer_id: PeerId(message.peer_id),
2976 user: user.await?,
2977 replica_id: message.replica_id as ReplicaId,
2978 })
2979 }
2980 }
2981}
2982
2983impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
2984 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
2985 Self {
2986 worktree_id,
2987 path: path.as_ref().into(),
2988 }
2989 }
2990}
2991
2992impl From<lsp::CreateFileOptions> for fs::CreateOptions {
2993 fn from(options: lsp::CreateFileOptions) -> Self {
2994 Self {
2995 overwrite: options.overwrite.unwrap_or(false),
2996 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
2997 }
2998 }
2999}
3000
3001impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3002 fn from(options: lsp::RenameFileOptions) -> Self {
3003 Self {
3004 overwrite: options.overwrite.unwrap_or(false),
3005 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3006 }
3007 }
3008}
3009
3010impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3011 fn from(options: lsp::DeleteFileOptions) -> Self {
3012 Self {
3013 recursive: options.recursive.unwrap_or(false),
3014 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3015 }
3016 }
3017}
3018
3019#[cfg(test)]
3020mod tests {
3021 use super::{Event, *};
3022 use fs::RealFs;
3023 use futures::StreamExt;
3024 use gpui::test::subscribe;
3025 use language::{
3026 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3027 };
3028 use lsp::Url;
3029 use serde_json::json;
3030 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3031 use unindent::Unindent as _;
3032 use util::test::temp_tree;
3033 use worktree::WorktreeHandle as _;
3034
3035 #[gpui::test]
3036 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3037 let dir = temp_tree(json!({
3038 "root": {
3039 "apple": "",
3040 "banana": {
3041 "carrot": {
3042 "date": "",
3043 "endive": "",
3044 }
3045 },
3046 "fennel": {
3047 "grape": "",
3048 }
3049 }
3050 }));
3051
3052 let root_link_path = dir.path().join("root_link");
3053 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3054 unix::fs::symlink(
3055 &dir.path().join("root/fennel"),
3056 &dir.path().join("root/finnochio"),
3057 )
3058 .unwrap();
3059
3060 let project = Project::test(Arc::new(RealFs), &mut cx);
3061
3062 let (tree, _) = project
3063 .update(&mut cx, |project, cx| {
3064 project.find_or_create_local_worktree(&root_link_path, false, cx)
3065 })
3066 .await
3067 .unwrap();
3068
3069 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3070 .await;
3071 cx.read(|cx| {
3072 let tree = tree.read(cx);
3073 assert_eq!(tree.file_count(), 5);
3074 assert_eq!(
3075 tree.inode_for_path("fennel/grape"),
3076 tree.inode_for_path("finnochio/grape")
3077 );
3078 });
3079
3080 let cancel_flag = Default::default();
3081 let results = project
3082 .read_with(&cx, |project, cx| {
3083 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3084 })
3085 .await;
3086 assert_eq!(
3087 results
3088 .into_iter()
3089 .map(|result| result.path)
3090 .collect::<Vec<Arc<Path>>>(),
3091 vec![
3092 PathBuf::from("banana/carrot/date").into(),
3093 PathBuf::from("banana/carrot/endive").into(),
3094 ]
3095 );
3096 }
3097
3098 #[gpui::test]
3099 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3100 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3101 let progress_token = language_server_config
3102 .disk_based_diagnostics_progress_token
3103 .clone()
3104 .unwrap();
3105
3106 let language = Arc::new(Language::new(
3107 LanguageConfig {
3108 name: "Rust".to_string(),
3109 path_suffixes: vec!["rs".to_string()],
3110 language_server: Some(language_server_config),
3111 ..Default::default()
3112 },
3113 Some(tree_sitter_rust::language()),
3114 ));
3115
3116 let fs = FakeFs::new(cx.background());
3117 fs.insert_tree(
3118 "/dir",
3119 json!({
3120 "a.rs": "fn a() { A }",
3121 "b.rs": "const y: i32 = 1",
3122 }),
3123 )
3124 .await;
3125
3126 let project = Project::test(fs, &mut cx);
3127 project.update(&mut cx, |project, _| {
3128 Arc::get_mut(&mut project.languages).unwrap().add(language);
3129 });
3130
3131 let (tree, _) = project
3132 .update(&mut cx, |project, cx| {
3133 project.find_or_create_local_worktree("/dir", false, cx)
3134 })
3135 .await
3136 .unwrap();
3137 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3138
3139 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3140 .await;
3141
3142 // Cause worktree to start the fake language server
3143 let _buffer = project
3144 .update(&mut cx, |project, cx| {
3145 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3146 })
3147 .await
3148 .unwrap();
3149
3150 let mut events = subscribe(&project, &mut cx);
3151
3152 let mut fake_server = fake_servers.next().await.unwrap();
3153 fake_server.start_progress(&progress_token).await;
3154 assert_eq!(
3155 events.next().await.unwrap(),
3156 Event::DiskBasedDiagnosticsStarted
3157 );
3158
3159 fake_server.start_progress(&progress_token).await;
3160 fake_server.end_progress(&progress_token).await;
3161 fake_server.start_progress(&progress_token).await;
3162
3163 fake_server
3164 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3165 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3166 version: None,
3167 diagnostics: vec![lsp::Diagnostic {
3168 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3169 severity: Some(lsp::DiagnosticSeverity::ERROR),
3170 message: "undefined variable 'A'".to_string(),
3171 ..Default::default()
3172 }],
3173 })
3174 .await;
3175 assert_eq!(
3176 events.next().await.unwrap(),
3177 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3178 );
3179
3180 fake_server.end_progress(&progress_token).await;
3181 fake_server.end_progress(&progress_token).await;
3182 assert_eq!(
3183 events.next().await.unwrap(),
3184 Event::DiskBasedDiagnosticsUpdated
3185 );
3186 assert_eq!(
3187 events.next().await.unwrap(),
3188 Event::DiskBasedDiagnosticsFinished
3189 );
3190
3191 let buffer = project
3192 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3193 .await
3194 .unwrap();
3195
3196 buffer.read_with(&cx, |buffer, _| {
3197 let snapshot = buffer.snapshot();
3198 let diagnostics = snapshot
3199 .diagnostics_in_range::<_, Point>(0..buffer.len())
3200 .collect::<Vec<_>>();
3201 assert_eq!(
3202 diagnostics,
3203 &[DiagnosticEntry {
3204 range: Point::new(0, 9)..Point::new(0, 10),
3205 diagnostic: Diagnostic {
3206 severity: lsp::DiagnosticSeverity::ERROR,
3207 message: "undefined variable 'A'".to_string(),
3208 group_id: 0,
3209 is_primary: true,
3210 ..Default::default()
3211 }
3212 }]
3213 )
3214 });
3215 }
3216
3217 #[gpui::test]
3218 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3219 let dir = temp_tree(json!({
3220 "root": {
3221 "dir1": {},
3222 "dir2": {
3223 "dir3": {}
3224 }
3225 }
3226 }));
3227
3228 let project = Project::test(Arc::new(RealFs), &mut cx);
3229 let (tree, _) = project
3230 .update(&mut cx, |project, cx| {
3231 project.find_or_create_local_worktree(&dir.path(), false, cx)
3232 })
3233 .await
3234 .unwrap();
3235
3236 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3237 .await;
3238
3239 let cancel_flag = Default::default();
3240 let results = project
3241 .read_with(&cx, |project, cx| {
3242 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3243 })
3244 .await;
3245
3246 assert!(results.is_empty());
3247 }
3248
3249 #[gpui::test]
3250 async fn test_definition(mut cx: gpui::TestAppContext) {
3251 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3252 let language = Arc::new(Language::new(
3253 LanguageConfig {
3254 name: "Rust".to_string(),
3255 path_suffixes: vec!["rs".to_string()],
3256 language_server: Some(language_server_config),
3257 ..Default::default()
3258 },
3259 Some(tree_sitter_rust::language()),
3260 ));
3261
3262 let fs = FakeFs::new(cx.background());
3263 fs.insert_tree(
3264 "/dir",
3265 json!({
3266 "a.rs": "const fn a() { A }",
3267 "b.rs": "const y: i32 = crate::a()",
3268 }),
3269 )
3270 .await;
3271
3272 let project = Project::test(fs, &mut cx);
3273 project.update(&mut cx, |project, _| {
3274 Arc::get_mut(&mut project.languages).unwrap().add(language);
3275 });
3276
3277 let (tree, _) = project
3278 .update(&mut cx, |project, cx| {
3279 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3280 })
3281 .await
3282 .unwrap();
3283 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3284 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3285 .await;
3286
3287 let buffer = project
3288 .update(&mut cx, |project, cx| {
3289 project.open_buffer(
3290 ProjectPath {
3291 worktree_id,
3292 path: Path::new("").into(),
3293 },
3294 cx,
3295 )
3296 })
3297 .await
3298 .unwrap();
3299
3300 let mut fake_server = fake_servers.next().await.unwrap();
3301 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params| {
3302 let params = params.text_document_position_params;
3303 assert_eq!(
3304 params.text_document.uri.to_file_path().unwrap(),
3305 Path::new("/dir/b.rs"),
3306 );
3307 assert_eq!(params.position, lsp::Position::new(0, 22));
3308
3309 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3310 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3311 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3312 )))
3313 });
3314
3315 let mut definitions = project
3316 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3317 .await
3318 .unwrap();
3319
3320 assert_eq!(definitions.len(), 1);
3321 let definition = definitions.pop().unwrap();
3322 cx.update(|cx| {
3323 let target_buffer = definition.target_buffer.read(cx);
3324 assert_eq!(
3325 target_buffer
3326 .file()
3327 .unwrap()
3328 .as_local()
3329 .unwrap()
3330 .abs_path(cx),
3331 Path::new("/dir/a.rs"),
3332 );
3333 assert_eq!(definition.target_range.to_offset(target_buffer), 9..10);
3334 assert_eq!(
3335 list_worktrees(&project, cx),
3336 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3337 );
3338
3339 drop(definition);
3340 });
3341 cx.read(|cx| {
3342 assert_eq!(
3343 list_worktrees(&project, cx),
3344 [("/dir/b.rs".as_ref(), false)]
3345 );
3346 });
3347
3348 fn list_worktrees<'a>(
3349 project: &'a ModelHandle<Project>,
3350 cx: &'a AppContext,
3351 ) -> Vec<(&'a Path, bool)> {
3352 project
3353 .read(cx)
3354 .worktrees(cx)
3355 .map(|worktree| {
3356 let worktree = worktree.read(cx);
3357 (
3358 worktree.as_local().unwrap().abs_path().as_ref(),
3359 worktree.is_weak(),
3360 )
3361 })
3362 .collect::<Vec<_>>()
3363 }
3364 }
3365
3366 #[gpui::test]
3367 async fn test_save_file(mut cx: gpui::TestAppContext) {
3368 let fs = FakeFs::new(cx.background());
3369 fs.insert_tree(
3370 "/dir",
3371 json!({
3372 "file1": "the old contents",
3373 }),
3374 )
3375 .await;
3376
3377 let project = Project::test(fs.clone(), &mut cx);
3378 let worktree_id = project
3379 .update(&mut cx, |p, cx| {
3380 p.find_or_create_local_worktree("/dir", false, cx)
3381 })
3382 .await
3383 .unwrap()
3384 .0
3385 .read_with(&cx, |tree, _| tree.id());
3386
3387 let buffer = project
3388 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3389 .await
3390 .unwrap();
3391 buffer
3392 .update(&mut cx, |buffer, cx| {
3393 assert_eq!(buffer.text(), "the old contents");
3394 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3395 buffer.save(cx)
3396 })
3397 .await
3398 .unwrap();
3399
3400 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3401 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3402 }
3403
3404 #[gpui::test]
3405 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
3406 let fs = FakeFs::new(cx.background());
3407 fs.insert_tree(
3408 "/dir",
3409 json!({
3410 "file1": "the old contents",
3411 }),
3412 )
3413 .await;
3414
3415 let project = Project::test(fs.clone(), &mut cx);
3416 let worktree_id = project
3417 .update(&mut cx, |p, cx| {
3418 p.find_or_create_local_worktree("/dir/file1", false, cx)
3419 })
3420 .await
3421 .unwrap()
3422 .0
3423 .read_with(&cx, |tree, _| tree.id());
3424
3425 let buffer = project
3426 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3427 .await
3428 .unwrap();
3429 buffer
3430 .update(&mut cx, |buffer, cx| {
3431 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3432 buffer.save(cx)
3433 })
3434 .await
3435 .unwrap();
3436
3437 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3438 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
3439 }
3440
3441 #[gpui::test(retries = 5)]
3442 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
3443 let dir = temp_tree(json!({
3444 "a": {
3445 "file1": "",
3446 "file2": "",
3447 "file3": "",
3448 },
3449 "b": {
3450 "c": {
3451 "file4": "",
3452 "file5": "",
3453 }
3454 }
3455 }));
3456
3457 let project = Project::test(Arc::new(RealFs), &mut cx);
3458 let rpc = project.read_with(&cx, |p, _| p.client.clone());
3459
3460 let (tree, _) = project
3461 .update(&mut cx, |p, cx| {
3462 p.find_or_create_local_worktree(dir.path(), false, cx)
3463 })
3464 .await
3465 .unwrap();
3466 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3467
3468 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3469 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
3470 async move { buffer.await.unwrap() }
3471 };
3472 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
3473 tree.read_with(cx, |tree, _| {
3474 tree.entry_for_path(path)
3475 .expect(&format!("no entry for path {}", path))
3476 .id
3477 })
3478 };
3479
3480 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
3481 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
3482 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
3483 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
3484
3485 let file2_id = id_for_path("a/file2", &cx);
3486 let file3_id = id_for_path("a/file3", &cx);
3487 let file4_id = id_for_path("b/c/file4", &cx);
3488
3489 // Wait for the initial scan.
3490 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3491 .await;
3492
3493 // Create a remote copy of this worktree.
3494 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
3495 let (remote, load_task) = cx.update(|cx| {
3496 Worktree::remote(
3497 1,
3498 1,
3499 initial_snapshot.to_proto(&Default::default(), Default::default()),
3500 rpc.clone(),
3501 cx,
3502 )
3503 });
3504 load_task.await;
3505
3506 cx.read(|cx| {
3507 assert!(!buffer2.read(cx).is_dirty());
3508 assert!(!buffer3.read(cx).is_dirty());
3509 assert!(!buffer4.read(cx).is_dirty());
3510 assert!(!buffer5.read(cx).is_dirty());
3511 });
3512
3513 // Rename and delete files and directories.
3514 tree.flush_fs_events(&cx).await;
3515 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3516 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3517 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3518 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3519 tree.flush_fs_events(&cx).await;
3520
3521 let expected_paths = vec![
3522 "a",
3523 "a/file1",
3524 "a/file2.new",
3525 "b",
3526 "d",
3527 "d/file3",
3528 "d/file4",
3529 ];
3530
3531 cx.read(|app| {
3532 assert_eq!(
3533 tree.read(app)
3534 .paths()
3535 .map(|p| p.to_str().unwrap())
3536 .collect::<Vec<_>>(),
3537 expected_paths
3538 );
3539
3540 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
3541 assert_eq!(id_for_path("d/file3", &cx), file3_id);
3542 assert_eq!(id_for_path("d/file4", &cx), file4_id);
3543
3544 assert_eq!(
3545 buffer2.read(app).file().unwrap().path().as_ref(),
3546 Path::new("a/file2.new")
3547 );
3548 assert_eq!(
3549 buffer3.read(app).file().unwrap().path().as_ref(),
3550 Path::new("d/file3")
3551 );
3552 assert_eq!(
3553 buffer4.read(app).file().unwrap().path().as_ref(),
3554 Path::new("d/file4")
3555 );
3556 assert_eq!(
3557 buffer5.read(app).file().unwrap().path().as_ref(),
3558 Path::new("b/c/file5")
3559 );
3560
3561 assert!(!buffer2.read(app).file().unwrap().is_deleted());
3562 assert!(!buffer3.read(app).file().unwrap().is_deleted());
3563 assert!(!buffer4.read(app).file().unwrap().is_deleted());
3564 assert!(buffer5.read(app).file().unwrap().is_deleted());
3565 });
3566
3567 // Update the remote worktree. Check that it becomes consistent with the
3568 // local worktree.
3569 remote.update(&mut cx, |remote, cx| {
3570 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
3571 &initial_snapshot,
3572 1,
3573 1,
3574 0,
3575 true,
3576 );
3577 remote
3578 .as_remote_mut()
3579 .unwrap()
3580 .snapshot
3581 .apply_remote_update(update_message)
3582 .unwrap();
3583
3584 assert_eq!(
3585 remote
3586 .paths()
3587 .map(|p| p.to_str().unwrap())
3588 .collect::<Vec<_>>(),
3589 expected_paths
3590 );
3591 });
3592 }
3593
3594 #[gpui::test]
3595 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
3596 let fs = FakeFs::new(cx.background());
3597 fs.insert_tree(
3598 "/the-dir",
3599 json!({
3600 "a.txt": "a-contents",
3601 "b.txt": "b-contents",
3602 }),
3603 )
3604 .await;
3605
3606 let project = Project::test(fs.clone(), &mut cx);
3607 let worktree_id = project
3608 .update(&mut cx, |p, cx| {
3609 p.find_or_create_local_worktree("/the-dir", false, cx)
3610 })
3611 .await
3612 .unwrap()
3613 .0
3614 .read_with(&cx, |tree, _| tree.id());
3615
3616 // Spawn multiple tasks to open paths, repeating some paths.
3617 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
3618 (
3619 p.open_buffer((worktree_id, "a.txt"), cx),
3620 p.open_buffer((worktree_id, "b.txt"), cx),
3621 p.open_buffer((worktree_id, "a.txt"), cx),
3622 )
3623 });
3624
3625 let buffer_a_1 = buffer_a_1.await.unwrap();
3626 let buffer_a_2 = buffer_a_2.await.unwrap();
3627 let buffer_b = buffer_b.await.unwrap();
3628 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
3629 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
3630
3631 // There is only one buffer per path.
3632 let buffer_a_id = buffer_a_1.id();
3633 assert_eq!(buffer_a_2.id(), buffer_a_id);
3634
3635 // Open the same path again while it is still open.
3636 drop(buffer_a_1);
3637 let buffer_a_3 = project
3638 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
3639 .await
3640 .unwrap();
3641
3642 // There's still only one buffer per path.
3643 assert_eq!(buffer_a_3.id(), buffer_a_id);
3644 }
3645
3646 #[gpui::test]
3647 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
3648 use std::fs;
3649
3650 let dir = temp_tree(json!({
3651 "file1": "abc",
3652 "file2": "def",
3653 "file3": "ghi",
3654 }));
3655
3656 let project = Project::test(Arc::new(RealFs), &mut cx);
3657 let (worktree, _) = project
3658 .update(&mut cx, |p, cx| {
3659 p.find_or_create_local_worktree(dir.path(), false, cx)
3660 })
3661 .await
3662 .unwrap();
3663 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
3664
3665 worktree.flush_fs_events(&cx).await;
3666 worktree
3667 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3668 .await;
3669
3670 let buffer1 = project
3671 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3672 .await
3673 .unwrap();
3674 let events = Rc::new(RefCell::new(Vec::new()));
3675
3676 // initially, the buffer isn't dirty.
3677 buffer1.update(&mut cx, |buffer, cx| {
3678 cx.subscribe(&buffer1, {
3679 let events = events.clone();
3680 move |_, _, event, _| events.borrow_mut().push(event.clone())
3681 })
3682 .detach();
3683
3684 assert!(!buffer.is_dirty());
3685 assert!(events.borrow().is_empty());
3686
3687 buffer.edit(vec![1..2], "", cx);
3688 });
3689
3690 // after the first edit, the buffer is dirty, and emits a dirtied event.
3691 buffer1.update(&mut cx, |buffer, cx| {
3692 assert!(buffer.text() == "ac");
3693 assert!(buffer.is_dirty());
3694 assert_eq!(
3695 *events.borrow(),
3696 &[language::Event::Edited, language::Event::Dirtied]
3697 );
3698 events.borrow_mut().clear();
3699 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
3700 });
3701
3702 // after saving, the buffer is not dirty, and emits a saved event.
3703 buffer1.update(&mut cx, |buffer, cx| {
3704 assert!(!buffer.is_dirty());
3705 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3706 events.borrow_mut().clear();
3707
3708 buffer.edit(vec![1..1], "B", cx);
3709 buffer.edit(vec![2..2], "D", cx);
3710 });
3711
3712 // after editing again, the buffer is dirty, and emits another dirty event.
3713 buffer1.update(&mut cx, |buffer, cx| {
3714 assert!(buffer.text() == "aBDc");
3715 assert!(buffer.is_dirty());
3716 assert_eq!(
3717 *events.borrow(),
3718 &[
3719 language::Event::Edited,
3720 language::Event::Dirtied,
3721 language::Event::Edited,
3722 ],
3723 );
3724 events.borrow_mut().clear();
3725
3726 // TODO - currently, after restoring the buffer to its
3727 // previously-saved state, the is still considered dirty.
3728 buffer.edit([1..3], "", cx);
3729 assert!(buffer.text() == "ac");
3730 assert!(buffer.is_dirty());
3731 });
3732
3733 assert_eq!(*events.borrow(), &[language::Event::Edited]);
3734
3735 // When a file is deleted, the buffer is considered dirty.
3736 let events = Rc::new(RefCell::new(Vec::new()));
3737 let buffer2 = project
3738 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
3739 .await
3740 .unwrap();
3741 buffer2.update(&mut cx, |_, cx| {
3742 cx.subscribe(&buffer2, {
3743 let events = events.clone();
3744 move |_, _, event, _| events.borrow_mut().push(event.clone())
3745 })
3746 .detach();
3747 });
3748
3749 fs::remove_file(dir.path().join("file2")).unwrap();
3750 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
3751 assert_eq!(
3752 *events.borrow(),
3753 &[language::Event::Dirtied, language::Event::FileHandleChanged]
3754 );
3755
3756 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3757 let events = Rc::new(RefCell::new(Vec::new()));
3758 let buffer3 = project
3759 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
3760 .await
3761 .unwrap();
3762 buffer3.update(&mut cx, |_, cx| {
3763 cx.subscribe(&buffer3, {
3764 let events = events.clone();
3765 move |_, _, event, _| events.borrow_mut().push(event.clone())
3766 })
3767 .detach();
3768 });
3769
3770 worktree.flush_fs_events(&cx).await;
3771 buffer3.update(&mut cx, |buffer, cx| {
3772 buffer.edit(Some(0..0), "x", cx);
3773 });
3774 events.borrow_mut().clear();
3775 fs::remove_file(dir.path().join("file3")).unwrap();
3776 buffer3
3777 .condition(&cx, |_, _| !events.borrow().is_empty())
3778 .await;
3779 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3780 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3781 }
3782
3783 #[gpui::test]
3784 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
3785 use std::fs;
3786
3787 let initial_contents = "aaa\nbbbbb\nc\n";
3788 let dir = temp_tree(json!({ "the-file": initial_contents }));
3789
3790 let project = Project::test(Arc::new(RealFs), &mut cx);
3791 let (worktree, _) = project
3792 .update(&mut cx, |p, cx| {
3793 p.find_or_create_local_worktree(dir.path(), false, cx)
3794 })
3795 .await
3796 .unwrap();
3797 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3798
3799 worktree
3800 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
3801 .await;
3802
3803 let abs_path = dir.path().join("the-file");
3804 let buffer = project
3805 .update(&mut cx, |p, cx| {
3806 p.open_buffer((worktree_id, "the-file"), cx)
3807 })
3808 .await
3809 .unwrap();
3810
3811 // TODO
3812 // Add a cursor on each row.
3813 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
3814 // assert!(!buffer.is_dirty());
3815 // buffer.add_selection_set(
3816 // &(0..3)
3817 // .map(|row| Selection {
3818 // id: row as usize,
3819 // start: Point::new(row, 1),
3820 // end: Point::new(row, 1),
3821 // reversed: false,
3822 // goal: SelectionGoal::None,
3823 // })
3824 // .collect::<Vec<_>>(),
3825 // cx,
3826 // )
3827 // });
3828
3829 // Change the file on disk, adding two new lines of text, and removing
3830 // one line.
3831 buffer.read_with(&cx, |buffer, _| {
3832 assert!(!buffer.is_dirty());
3833 assert!(!buffer.has_conflict());
3834 });
3835 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3836 fs::write(&abs_path, new_contents).unwrap();
3837
3838 // Because the buffer was not modified, it is reloaded from disk. Its
3839 // contents are edited according to the diff between the old and new
3840 // file contents.
3841 buffer
3842 .condition(&cx, |buffer, _| buffer.text() == new_contents)
3843 .await;
3844
3845 buffer.update(&mut cx, |buffer, _| {
3846 assert_eq!(buffer.text(), new_contents);
3847 assert!(!buffer.is_dirty());
3848 assert!(!buffer.has_conflict());
3849
3850 // TODO
3851 // let cursor_positions = buffer
3852 // .selection_set(selection_set_id)
3853 // .unwrap()
3854 // .selections::<Point>(&*buffer)
3855 // .map(|selection| {
3856 // assert_eq!(selection.start, selection.end);
3857 // selection.start
3858 // })
3859 // .collect::<Vec<_>>();
3860 // assert_eq!(
3861 // cursor_positions,
3862 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
3863 // );
3864 });
3865
3866 // Modify the buffer
3867 buffer.update(&mut cx, |buffer, cx| {
3868 buffer.edit(vec![0..0], " ", cx);
3869 assert!(buffer.is_dirty());
3870 assert!(!buffer.has_conflict());
3871 });
3872
3873 // Change the file on disk again, adding blank lines to the beginning.
3874 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
3875
3876 // Because the buffer is modified, it doesn't reload from disk, but is
3877 // marked as having a conflict.
3878 buffer
3879 .condition(&cx, |buffer, _| buffer.has_conflict())
3880 .await;
3881 }
3882
3883 #[gpui::test]
3884 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
3885 let fs = FakeFs::new(cx.background());
3886 fs.insert_tree(
3887 "/the-dir",
3888 json!({
3889 "a.rs": "
3890 fn foo(mut v: Vec<usize>) {
3891 for x in &v {
3892 v.push(1);
3893 }
3894 }
3895 "
3896 .unindent(),
3897 }),
3898 )
3899 .await;
3900
3901 let project = Project::test(fs.clone(), &mut cx);
3902 let (worktree, _) = project
3903 .update(&mut cx, |p, cx| {
3904 p.find_or_create_local_worktree("/the-dir", false, cx)
3905 })
3906 .await
3907 .unwrap();
3908 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
3909
3910 let buffer = project
3911 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3912 .await
3913 .unwrap();
3914
3915 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3916 let message = lsp::PublishDiagnosticsParams {
3917 uri: buffer_uri.clone(),
3918 diagnostics: vec![
3919 lsp::Diagnostic {
3920 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3921 severity: Some(DiagnosticSeverity::WARNING),
3922 message: "error 1".to_string(),
3923 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3924 location: lsp::Location {
3925 uri: buffer_uri.clone(),
3926 range: lsp::Range::new(
3927 lsp::Position::new(1, 8),
3928 lsp::Position::new(1, 9),
3929 ),
3930 },
3931 message: "error 1 hint 1".to_string(),
3932 }]),
3933 ..Default::default()
3934 },
3935 lsp::Diagnostic {
3936 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3937 severity: Some(DiagnosticSeverity::HINT),
3938 message: "error 1 hint 1".to_string(),
3939 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3940 location: lsp::Location {
3941 uri: buffer_uri.clone(),
3942 range: lsp::Range::new(
3943 lsp::Position::new(1, 8),
3944 lsp::Position::new(1, 9),
3945 ),
3946 },
3947 message: "original diagnostic".to_string(),
3948 }]),
3949 ..Default::default()
3950 },
3951 lsp::Diagnostic {
3952 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3953 severity: Some(DiagnosticSeverity::ERROR),
3954 message: "error 2".to_string(),
3955 related_information: Some(vec![
3956 lsp::DiagnosticRelatedInformation {
3957 location: lsp::Location {
3958 uri: buffer_uri.clone(),
3959 range: lsp::Range::new(
3960 lsp::Position::new(1, 13),
3961 lsp::Position::new(1, 15),
3962 ),
3963 },
3964 message: "error 2 hint 1".to_string(),
3965 },
3966 lsp::DiagnosticRelatedInformation {
3967 location: lsp::Location {
3968 uri: buffer_uri.clone(),
3969 range: lsp::Range::new(
3970 lsp::Position::new(1, 13),
3971 lsp::Position::new(1, 15),
3972 ),
3973 },
3974 message: "error 2 hint 2".to_string(),
3975 },
3976 ]),
3977 ..Default::default()
3978 },
3979 lsp::Diagnostic {
3980 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3981 severity: Some(DiagnosticSeverity::HINT),
3982 message: "error 2 hint 1".to_string(),
3983 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3984 location: lsp::Location {
3985 uri: buffer_uri.clone(),
3986 range: lsp::Range::new(
3987 lsp::Position::new(2, 8),
3988 lsp::Position::new(2, 17),
3989 ),
3990 },
3991 message: "original diagnostic".to_string(),
3992 }]),
3993 ..Default::default()
3994 },
3995 lsp::Diagnostic {
3996 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3997 severity: Some(DiagnosticSeverity::HINT),
3998 message: "error 2 hint 2".to_string(),
3999 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4000 location: lsp::Location {
4001 uri: buffer_uri.clone(),
4002 range: lsp::Range::new(
4003 lsp::Position::new(2, 8),
4004 lsp::Position::new(2, 17),
4005 ),
4006 },
4007 message: "original diagnostic".to_string(),
4008 }]),
4009 ..Default::default()
4010 },
4011 ],
4012 version: None,
4013 };
4014
4015 project
4016 .update(&mut cx, |p, cx| {
4017 p.update_diagnostics(message, &Default::default(), cx)
4018 })
4019 .unwrap();
4020 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4021
4022 assert_eq!(
4023 buffer
4024 .diagnostics_in_range::<_, Point>(0..buffer.len())
4025 .collect::<Vec<_>>(),
4026 &[
4027 DiagnosticEntry {
4028 range: Point::new(1, 8)..Point::new(1, 9),
4029 diagnostic: Diagnostic {
4030 severity: DiagnosticSeverity::WARNING,
4031 message: "error 1".to_string(),
4032 group_id: 0,
4033 is_primary: true,
4034 ..Default::default()
4035 }
4036 },
4037 DiagnosticEntry {
4038 range: Point::new(1, 8)..Point::new(1, 9),
4039 diagnostic: Diagnostic {
4040 severity: DiagnosticSeverity::HINT,
4041 message: "error 1 hint 1".to_string(),
4042 group_id: 0,
4043 is_primary: false,
4044 ..Default::default()
4045 }
4046 },
4047 DiagnosticEntry {
4048 range: Point::new(1, 13)..Point::new(1, 15),
4049 diagnostic: Diagnostic {
4050 severity: DiagnosticSeverity::HINT,
4051 message: "error 2 hint 1".to_string(),
4052 group_id: 1,
4053 is_primary: false,
4054 ..Default::default()
4055 }
4056 },
4057 DiagnosticEntry {
4058 range: Point::new(1, 13)..Point::new(1, 15),
4059 diagnostic: Diagnostic {
4060 severity: DiagnosticSeverity::HINT,
4061 message: "error 2 hint 2".to_string(),
4062 group_id: 1,
4063 is_primary: false,
4064 ..Default::default()
4065 }
4066 },
4067 DiagnosticEntry {
4068 range: Point::new(2, 8)..Point::new(2, 17),
4069 diagnostic: Diagnostic {
4070 severity: DiagnosticSeverity::ERROR,
4071 message: "error 2".to_string(),
4072 group_id: 1,
4073 is_primary: true,
4074 ..Default::default()
4075 }
4076 }
4077 ]
4078 );
4079
4080 assert_eq!(
4081 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4082 &[
4083 DiagnosticEntry {
4084 range: Point::new(1, 8)..Point::new(1, 9),
4085 diagnostic: Diagnostic {
4086 severity: DiagnosticSeverity::WARNING,
4087 message: "error 1".to_string(),
4088 group_id: 0,
4089 is_primary: true,
4090 ..Default::default()
4091 }
4092 },
4093 DiagnosticEntry {
4094 range: Point::new(1, 8)..Point::new(1, 9),
4095 diagnostic: Diagnostic {
4096 severity: DiagnosticSeverity::HINT,
4097 message: "error 1 hint 1".to_string(),
4098 group_id: 0,
4099 is_primary: false,
4100 ..Default::default()
4101 }
4102 },
4103 ]
4104 );
4105 assert_eq!(
4106 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4107 &[
4108 DiagnosticEntry {
4109 range: Point::new(1, 13)..Point::new(1, 15),
4110 diagnostic: Diagnostic {
4111 severity: DiagnosticSeverity::HINT,
4112 message: "error 2 hint 1".to_string(),
4113 group_id: 1,
4114 is_primary: false,
4115 ..Default::default()
4116 }
4117 },
4118 DiagnosticEntry {
4119 range: Point::new(1, 13)..Point::new(1, 15),
4120 diagnostic: Diagnostic {
4121 severity: DiagnosticSeverity::HINT,
4122 message: "error 2 hint 2".to_string(),
4123 group_id: 1,
4124 is_primary: false,
4125 ..Default::default()
4126 }
4127 },
4128 DiagnosticEntry {
4129 range: Point::new(2, 8)..Point::new(2, 17),
4130 diagnostic: Diagnostic {
4131 severity: DiagnosticSeverity::ERROR,
4132 message: "error 2".to_string(),
4133 group_id: 1,
4134 is_primary: true,
4135 ..Default::default()
4136 }
4137 }
4138 ]
4139 );
4140 }
4141
4142 #[gpui::test]
4143 async fn test_rename(mut cx: gpui::TestAppContext) {
4144 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4145 let language = Arc::new(Language::new(
4146 LanguageConfig {
4147 name: "Rust".to_string(),
4148 path_suffixes: vec!["rs".to_string()],
4149 language_server: Some(language_server_config),
4150 ..Default::default()
4151 },
4152 Some(tree_sitter_rust::language()),
4153 ));
4154
4155 let fs = FakeFs::new(cx.background());
4156 fs.insert_tree(
4157 "/dir",
4158 json!({
4159 "one.rs": "const ONE: usize = 1;",
4160 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4161 }),
4162 )
4163 .await;
4164
4165 let project = Project::test(fs.clone(), &mut cx);
4166 project.update(&mut cx, |project, _| {
4167 Arc::get_mut(&mut project.languages).unwrap().add(language);
4168 });
4169
4170 let (tree, _) = project
4171 .update(&mut cx, |project, cx| {
4172 project.find_or_create_local_worktree("/dir", false, cx)
4173 })
4174 .await
4175 .unwrap();
4176 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4177 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4178 .await;
4179
4180 let buffer = project
4181 .update(&mut cx, |project, cx| {
4182 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4183 })
4184 .await
4185 .unwrap();
4186
4187 let mut fake_server = fake_servers.next().await.unwrap();
4188
4189 let response = project.update(&mut cx, |project, cx| {
4190 project.prepare_rename(buffer.clone(), 7, cx)
4191 });
4192 fake_server
4193 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params| {
4194 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4195 assert_eq!(params.position, lsp::Position::new(0, 7));
4196 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4197 lsp::Position::new(0, 6),
4198 lsp::Position::new(0, 9),
4199 )))
4200 })
4201 .next()
4202 .await
4203 .unwrap();
4204 let range = response.await.unwrap().unwrap();
4205 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4206 assert_eq!(range, 6..9);
4207
4208 let response = project.update(&mut cx, |project, cx| {
4209 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4210 });
4211 fake_server
4212 .handle_request::<lsp::request::Rename, _>(|params| {
4213 assert_eq!(
4214 params.text_document_position.text_document.uri.as_str(),
4215 "file:///dir/one.rs"
4216 );
4217 assert_eq!(
4218 params.text_document_position.position,
4219 lsp::Position::new(0, 7)
4220 );
4221 assert_eq!(params.new_name, "THREE");
4222 Some(lsp::WorkspaceEdit {
4223 changes: Some(
4224 [
4225 (
4226 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4227 vec![lsp::TextEdit::new(
4228 lsp::Range::new(
4229 lsp::Position::new(0, 6),
4230 lsp::Position::new(0, 9),
4231 ),
4232 "THREE".to_string(),
4233 )],
4234 ),
4235 (
4236 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4237 vec![
4238 lsp::TextEdit::new(
4239 lsp::Range::new(
4240 lsp::Position::new(0, 24),
4241 lsp::Position::new(0, 27),
4242 ),
4243 "THREE".to_string(),
4244 ),
4245 lsp::TextEdit::new(
4246 lsp::Range::new(
4247 lsp::Position::new(0, 35),
4248 lsp::Position::new(0, 38),
4249 ),
4250 "THREE".to_string(),
4251 ),
4252 ],
4253 ),
4254 ]
4255 .into_iter()
4256 .collect(),
4257 ),
4258 ..Default::default()
4259 })
4260 })
4261 .next()
4262 .await
4263 .unwrap();
4264 let mut transaction = response.await.unwrap().0;
4265 assert_eq!(transaction.len(), 2);
4266 assert_eq!(
4267 transaction
4268 .remove_entry(&buffer)
4269 .unwrap()
4270 .0
4271 .read_with(&cx, |buffer, _| buffer.text()),
4272 "const THREE: usize = 1;"
4273 );
4274 assert_eq!(
4275 transaction
4276 .into_keys()
4277 .next()
4278 .unwrap()
4279 .read_with(&cx, |buffer, _| buffer.text()),
4280 "const TWO: usize = one::THREE + one::THREE;"
4281 );
4282 }
4283}