1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::{broadcast, prelude::Stream, sink::Sink, watch};
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: broadcast::Sender<()>,
62 loading_buffers: HashMap<
63 ProjectPath,
64 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
65 >,
66 buffers_state: Rc<RefCell<ProjectBuffers>>,
67 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
68 nonce: u128,
69}
70
71#[derive(Default)]
72struct ProjectBuffers {
73 buffer_request_count: usize,
74 preserved_buffers: Vec<ModelHandle<Buffer>>,
75 open_buffers: HashMap<u64, OpenBuffer>,
76}
77
78enum OpenBuffer {
79 Loaded(WeakModelHandle<Buffer>),
80 Loading(Vec<Operation>),
81}
82
83enum WorktreeHandle {
84 Strong(ModelHandle<Worktree>),
85 Weak(WeakModelHandle<Worktree>),
86}
87
88enum ProjectClientState {
89 Local {
90 is_shared: bool,
91 remote_id_tx: watch::Sender<Option<u64>>,
92 remote_id_rx: watch::Receiver<Option<u64>>,
93 _maintain_remote_id_task: Task<Option<()>>,
94 },
95 Remote {
96 sharing_has_stopped: bool,
97 remote_id: u64,
98 replica_id: ReplicaId,
99 },
100}
101
102#[derive(Clone, Debug)]
103pub struct Collaborator {
104 pub user: Arc<User>,
105 pub peer_id: PeerId,
106 pub replica_id: ReplicaId,
107}
108
109#[derive(Clone, Debug, PartialEq)]
110pub enum Event {
111 ActiveEntryChanged(Option<ProjectEntry>),
112 WorktreeRemoved(WorktreeId),
113 DiskBasedDiagnosticsStarted,
114 DiskBasedDiagnosticsUpdated,
115 DiskBasedDiagnosticsFinished,
116 DiagnosticsUpdated(ProjectPath),
117}
118
119#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
120pub struct ProjectPath {
121 pub worktree_id: WorktreeId,
122 pub path: Arc<Path>,
123}
124
125#[derive(Clone, Debug, Default, PartialEq)]
126pub struct DiagnosticSummary {
127 pub error_count: usize,
128 pub warning_count: usize,
129 pub info_count: usize,
130 pub hint_count: usize,
131}
132
133#[derive(Debug)]
134pub struct Location {
135 pub buffer: ModelHandle<Buffer>,
136 pub range: Range<language::Anchor>,
137}
138
139#[derive(Debug)]
140pub struct DocumentHighlight {
141 pub range: Range<language::Anchor>,
142 pub kind: DocumentHighlightKind,
143}
144
145#[derive(Clone, Debug)]
146pub struct Symbol {
147 pub source_worktree_id: WorktreeId,
148 pub worktree_id: WorktreeId,
149 pub language_name: String,
150 pub path: PathBuf,
151 pub label: CodeLabel,
152 pub name: String,
153 pub kind: lsp::SymbolKind,
154 pub range: Range<PointUtf16>,
155 pub signature: [u8; 32],
156}
157
158pub struct BufferRequestHandle(Rc<RefCell<ProjectBuffers>>);
159
160#[derive(Default)]
161pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
162
163impl DiagnosticSummary {
164 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
165 let mut this = Self {
166 error_count: 0,
167 warning_count: 0,
168 info_count: 0,
169 hint_count: 0,
170 };
171
172 for entry in diagnostics {
173 if entry.diagnostic.is_primary {
174 match entry.diagnostic.severity {
175 DiagnosticSeverity::ERROR => this.error_count += 1,
176 DiagnosticSeverity::WARNING => this.warning_count += 1,
177 DiagnosticSeverity::INFORMATION => this.info_count += 1,
178 DiagnosticSeverity::HINT => this.hint_count += 1,
179 _ => {}
180 }
181 }
182 }
183
184 this
185 }
186
187 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
188 proto::DiagnosticSummary {
189 path: path.to_string_lossy().to_string(),
190 error_count: self.error_count as u32,
191 warning_count: self.warning_count as u32,
192 info_count: self.info_count as u32,
193 hint_count: self.hint_count as u32,
194 }
195 }
196}
197
198#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
199pub struct ProjectEntry {
200 pub worktree_id: WorktreeId,
201 pub entry_id: usize,
202}
203
204impl Project {
205 pub fn init(client: &Arc<Client>) {
206 client.add_entity_message_handler(Self::handle_add_collaborator);
207 client.add_entity_message_handler(Self::handle_buffer_reloaded);
208 client.add_entity_message_handler(Self::handle_buffer_saved);
209 client.add_entity_message_handler(Self::handle_close_buffer);
210 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
211 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
212 client.add_entity_message_handler(Self::handle_remove_collaborator);
213 client.add_entity_message_handler(Self::handle_register_worktree);
214 client.add_entity_message_handler(Self::handle_unregister_worktree);
215 client.add_entity_message_handler(Self::handle_unshare_project);
216 client.add_entity_message_handler(Self::handle_update_buffer_file);
217 client.add_entity_message_handler(Self::handle_update_buffer);
218 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
219 client.add_entity_message_handler(Self::handle_update_worktree);
220 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
221 client.add_entity_request_handler(Self::handle_apply_code_action);
222 client.add_entity_request_handler(Self::handle_format_buffers);
223 client.add_entity_request_handler(Self::handle_get_code_actions);
224 client.add_entity_request_handler(Self::handle_get_completions);
225 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
226 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
227 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
228 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
229 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
230 client.add_entity_request_handler(Self::handle_search_project);
231 client.add_entity_request_handler(Self::handle_get_project_symbols);
232 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
233 client.add_entity_request_handler(Self::handle_open_buffer);
234 client.add_entity_request_handler(Self::handle_save_buffer);
235 }
236
237 pub fn local(
238 client: Arc<Client>,
239 user_store: ModelHandle<UserStore>,
240 languages: Arc<LanguageRegistry>,
241 fs: Arc<dyn Fs>,
242 cx: &mut MutableAppContext,
243 ) -> ModelHandle<Self> {
244 cx.add_model(|cx: &mut ModelContext<Self>| {
245 let (remote_id_tx, remote_id_rx) = watch::channel();
246 let _maintain_remote_id_task = cx.spawn_weak({
247 let rpc = client.clone();
248 move |this, mut cx| {
249 async move {
250 let mut status = rpc.status();
251 while let Some(status) = status.recv().await {
252 if let Some(this) = this.upgrade(&cx) {
253 let remote_id = if let client::Status::Connected { .. } = status {
254 let response = rpc.request(proto::RegisterProject {}).await?;
255 Some(response.project_id)
256 } else {
257 None
258 };
259
260 if let Some(project_id) = remote_id {
261 let mut registrations = Vec::new();
262 this.update(&mut cx, |this, cx| {
263 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
264 registrations.push(worktree.update(
265 cx,
266 |worktree, cx| {
267 let worktree = worktree.as_local_mut().unwrap();
268 worktree.register(project_id, cx)
269 },
270 ));
271 }
272 });
273 for registration in registrations {
274 registration.await?;
275 }
276 }
277 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
278 }
279 }
280 Ok(())
281 }
282 .log_err()
283 }
284 });
285
286 Self {
287 worktrees: Default::default(),
288 collaborators: Default::default(),
289 buffers_state: Default::default(),
290 loading_buffers: Default::default(),
291 shared_buffers: Default::default(),
292 client_state: ProjectClientState::Local {
293 is_shared: false,
294 remote_id_tx,
295 remote_id_rx,
296 _maintain_remote_id_task,
297 },
298 opened_buffer: broadcast::channel(1).0,
299 subscriptions: Vec::new(),
300 active_entry: None,
301 languages,
302 client,
303 user_store,
304 fs,
305 language_servers_with_diagnostics_running: 0,
306 language_servers: Default::default(),
307 started_language_servers: Default::default(),
308 nonce: StdRng::from_entropy().gen(),
309 }
310 })
311 }
312
313 pub async fn remote(
314 remote_id: u64,
315 client: Arc<Client>,
316 user_store: ModelHandle<UserStore>,
317 languages: Arc<LanguageRegistry>,
318 fs: Arc<dyn Fs>,
319 cx: &mut AsyncAppContext,
320 ) -> Result<ModelHandle<Self>> {
321 client.authenticate_and_connect(&cx).await?;
322
323 let response = client
324 .request(proto::JoinProject {
325 project_id: remote_id,
326 })
327 .await?;
328
329 let replica_id = response.replica_id as ReplicaId;
330
331 let mut worktrees = Vec::new();
332 for worktree in response.worktrees {
333 let (worktree, load_task) = cx
334 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
335 worktrees.push(worktree);
336 load_task.detach();
337 }
338
339 let this = cx.add_model(|cx| {
340 let mut this = Self {
341 worktrees: Vec::new(),
342 loading_buffers: Default::default(),
343 opened_buffer: broadcast::channel(1).0,
344 shared_buffers: Default::default(),
345 active_entry: None,
346 collaborators: Default::default(),
347 languages,
348 user_store: user_store.clone(),
349 fs,
350 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
351 client,
352 client_state: ProjectClientState::Remote {
353 sharing_has_stopped: false,
354 remote_id,
355 replica_id,
356 },
357 language_servers_with_diagnostics_running: 0,
358 language_servers: Default::default(),
359 started_language_servers: Default::default(),
360 buffers_state: Default::default(),
361 nonce: StdRng::from_entropy().gen(),
362 };
363 for worktree in worktrees {
364 this.add_worktree(&worktree, cx);
365 }
366 this
367 });
368
369 let user_ids = response
370 .collaborators
371 .iter()
372 .map(|peer| peer.user_id)
373 .collect();
374 user_store
375 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
376 .await?;
377 let mut collaborators = HashMap::default();
378 for message in response.collaborators {
379 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
380 collaborators.insert(collaborator.peer_id, collaborator);
381 }
382
383 this.update(cx, |this, _| {
384 this.collaborators = collaborators;
385 });
386
387 Ok(this)
388 }
389
390 #[cfg(any(test, feature = "test-support"))]
391 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
392 let languages = Arc::new(LanguageRegistry::new());
393 let http_client = client::test::FakeHttpClient::with_404_response();
394 let client = client::Client::new(http_client.clone());
395 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
396 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
397 }
398
399 #[cfg(any(test, feature = "test-support"))]
400 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
401 self.shared_buffers
402 .get(&peer_id)
403 .and_then(|buffers| buffers.get(&remote_id))
404 .cloned()
405 }
406
407 #[cfg(any(test, feature = "test-support"))]
408 pub fn has_buffered_operations(&self) -> bool {
409 self.buffers_state
410 .borrow()
411 .open_buffers
412 .values()
413 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
414 }
415
416 #[cfg(any(test, feature = "test-support"))]
417 pub fn languages(&self) -> &Arc<LanguageRegistry> {
418 &self.languages
419 }
420
421 pub fn fs(&self) -> &Arc<dyn Fs> {
422 &self.fs
423 }
424
425 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
426 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
427 *remote_id_tx.borrow_mut() = remote_id;
428 }
429
430 self.subscriptions.clear();
431 if let Some(remote_id) = remote_id {
432 self.subscriptions
433 .push(self.client.add_model_for_remote_entity(remote_id, cx));
434 }
435 }
436
437 pub fn remote_id(&self) -> Option<u64> {
438 match &self.client_state {
439 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
440 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
441 }
442 }
443
444 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
445 let mut id = None;
446 let mut watch = None;
447 match &self.client_state {
448 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
449 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
450 }
451
452 async move {
453 if let Some(id) = id {
454 return id;
455 }
456 let mut watch = watch.unwrap();
457 loop {
458 let id = *watch.borrow();
459 if let Some(id) = id {
460 return id;
461 }
462 watch.recv().await;
463 }
464 }
465 }
466
467 pub fn replica_id(&self) -> ReplicaId {
468 match &self.client_state {
469 ProjectClientState::Local { .. } => 0,
470 ProjectClientState::Remote { replica_id, .. } => *replica_id,
471 }
472 }
473
474 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
475 &self.collaborators
476 }
477
478 pub fn worktrees<'a>(
479 &'a self,
480 cx: &'a AppContext,
481 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
482 self.worktrees
483 .iter()
484 .filter_map(move |worktree| worktree.upgrade(cx))
485 }
486
487 pub fn strong_worktrees<'a>(
488 &'a self,
489 cx: &'a AppContext,
490 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
491 self.worktrees.iter().filter_map(|worktree| {
492 worktree.upgrade(cx).and_then(|worktree| {
493 if worktree.read(cx).is_weak() {
494 None
495 } else {
496 Some(worktree)
497 }
498 })
499 })
500 }
501
502 pub fn worktree_for_id(
503 &self,
504 id: WorktreeId,
505 cx: &AppContext,
506 ) -> Option<ModelHandle<Worktree>> {
507 self.worktrees(cx)
508 .find(|worktree| worktree.read(cx).id() == id)
509 }
510
511 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
512 let rpc = self.client.clone();
513 cx.spawn(|this, mut cx| async move {
514 let project_id = this.update(&mut cx, |this, _| {
515 if let ProjectClientState::Local {
516 is_shared,
517 remote_id_rx,
518 ..
519 } = &mut this.client_state
520 {
521 *is_shared = true;
522 remote_id_rx
523 .borrow()
524 .ok_or_else(|| anyhow!("no project id"))
525 } else {
526 Err(anyhow!("can't share a remote project"))
527 }
528 })?;
529
530 rpc.request(proto::ShareProject { project_id }).await?;
531 let mut tasks = Vec::new();
532 this.update(&mut cx, |this, cx| {
533 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
534 worktree.update(cx, |worktree, cx| {
535 let worktree = worktree.as_local_mut().unwrap();
536 tasks.push(worktree.share(project_id, cx));
537 });
538 }
539 });
540 for task in tasks {
541 task.await?;
542 }
543 this.update(&mut cx, |_, cx| cx.notify());
544 Ok(())
545 })
546 }
547
548 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
549 let rpc = self.client.clone();
550 cx.spawn(|this, mut cx| async move {
551 let project_id = this.update(&mut cx, |this, _| {
552 if let ProjectClientState::Local {
553 is_shared,
554 remote_id_rx,
555 ..
556 } = &mut this.client_state
557 {
558 *is_shared = false;
559 remote_id_rx
560 .borrow()
561 .ok_or_else(|| anyhow!("no project id"))
562 } else {
563 Err(anyhow!("can't share a remote project"))
564 }
565 })?;
566
567 rpc.send(proto::UnshareProject { project_id })?;
568 this.update(&mut cx, |this, cx| {
569 this.collaborators.clear();
570 this.shared_buffers.clear();
571 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
572 worktree.update(cx, |worktree, _| {
573 worktree.as_local_mut().unwrap().unshare();
574 });
575 }
576 cx.notify()
577 });
578 Ok(())
579 })
580 }
581
582 pub fn is_read_only(&self) -> bool {
583 match &self.client_state {
584 ProjectClientState::Local { .. } => false,
585 ProjectClientState::Remote {
586 sharing_has_stopped,
587 ..
588 } => *sharing_has_stopped,
589 }
590 }
591
592 pub fn is_local(&self) -> bool {
593 match &self.client_state {
594 ProjectClientState::Local { .. } => true,
595 ProjectClientState::Remote { .. } => false,
596 }
597 }
598
599 pub fn is_remote(&self) -> bool {
600 !self.is_local()
601 }
602
603 pub fn open_buffer(
604 &mut self,
605 path: impl Into<ProjectPath>,
606 cx: &mut ModelContext<Self>,
607 ) -> Task<Result<ModelHandle<Buffer>>> {
608 let project_path = path.into();
609 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
610 worktree
611 } else {
612 return Task::ready(Err(anyhow!("no such worktree")));
613 };
614
615 // If there is already a buffer for the given path, then return it.
616 let existing_buffer = self.get_open_buffer(&project_path, cx);
617 if let Some(existing_buffer) = existing_buffer {
618 return Task::ready(Ok(existing_buffer));
619 }
620
621 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
622 // If the given path is already being loaded, then wait for that existing
623 // task to complete and return the same buffer.
624 hash_map::Entry::Occupied(e) => e.get().clone(),
625
626 // Otherwise, record the fact that this path is now being loaded.
627 hash_map::Entry::Vacant(entry) => {
628 let (mut tx, rx) = postage::watch::channel();
629 entry.insert(rx.clone());
630
631 let load_buffer = if worktree.read(cx).is_local() {
632 self.open_local_buffer(&project_path.path, &worktree, cx)
633 } else {
634 self.open_remote_buffer(&project_path.path, &worktree, cx)
635 };
636
637 cx.spawn(move |this, mut cx| async move {
638 let load_result = load_buffer.await;
639 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
640 // Record the fact that the buffer is no longer loading.
641 this.loading_buffers.remove(&project_path);
642 let buffer = load_result.map_err(Arc::new)?;
643 Ok(buffer)
644 }));
645 })
646 .detach();
647 rx
648 }
649 };
650
651 cx.foreground().spawn(async move {
652 loop {
653 if let Some(result) = loading_watch.borrow().as_ref() {
654 match result {
655 Ok(buffer) => return Ok(buffer.clone()),
656 Err(error) => return Err(anyhow!("{}", error)),
657 }
658 }
659 loading_watch.recv().await;
660 }
661 })
662 }
663
664 fn open_local_buffer(
665 &mut self,
666 path: &Arc<Path>,
667 worktree: &ModelHandle<Worktree>,
668 cx: &mut ModelContext<Self>,
669 ) -> Task<Result<ModelHandle<Buffer>>> {
670 let load_buffer = worktree.update(cx, |worktree, cx| {
671 let worktree = worktree.as_local_mut().unwrap();
672 worktree.load_buffer(path, cx)
673 });
674 let worktree = worktree.downgrade();
675 cx.spawn(|this, mut cx| async move {
676 let buffer = load_buffer.await?;
677 let worktree = worktree
678 .upgrade(&cx)
679 .ok_or_else(|| anyhow!("worktree was removed"))?;
680 this.update(&mut cx, |this, cx| {
681 this.register_buffer(&buffer, Some(&worktree), cx)
682 })?;
683 Ok(buffer)
684 })
685 }
686
687 fn open_remote_buffer(
688 &mut self,
689 path: &Arc<Path>,
690 worktree: &ModelHandle<Worktree>,
691 cx: &mut ModelContext<Self>,
692 ) -> Task<Result<ModelHandle<Buffer>>> {
693 let rpc = self.client.clone();
694 let project_id = self.remote_id().unwrap();
695 let remote_worktree_id = worktree.read(cx).id();
696 let path = path.clone();
697 let path_string = path.to_string_lossy().to_string();
698 let request_handle = self.start_buffer_request(cx);
699 cx.spawn(|this, mut cx| async move {
700 let response = rpc
701 .request(proto::OpenBuffer {
702 project_id,
703 worktree_id: remote_worktree_id.to_proto(),
704 path: path_string,
705 })
706 .await?;
707 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
708
709 this.update(&mut cx, |this, cx| {
710 this.deserialize_buffer(buffer, request_handle, cx)
711 })
712 .await
713 })
714 }
715
716 fn open_local_buffer_via_lsp(
717 &mut self,
718 abs_path: lsp::Url,
719 lang_name: String,
720 lang_server: Arc<LanguageServer>,
721 cx: &mut ModelContext<Self>,
722 ) -> Task<Result<ModelHandle<Buffer>>> {
723 cx.spawn(|this, mut cx| async move {
724 let abs_path = abs_path
725 .to_file_path()
726 .map_err(|_| anyhow!("can't convert URI to path"))?;
727 let (worktree, relative_path) = if let Some(result) =
728 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
729 {
730 result
731 } else {
732 let worktree = this
733 .update(&mut cx, |this, cx| {
734 this.create_local_worktree(&abs_path, true, cx)
735 })
736 .await?;
737 this.update(&mut cx, |this, cx| {
738 this.language_servers
739 .insert((worktree.read(cx).id(), lang_name), lang_server);
740 });
741 (worktree, PathBuf::new())
742 };
743
744 let project_path = ProjectPath {
745 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
746 path: relative_path.into(),
747 };
748 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
749 .await
750 })
751 }
752
753 fn start_buffer_request(&self, cx: &AppContext) -> BufferRequestHandle {
754 BufferRequestHandle::new(self.buffers_state.clone(), cx)
755 }
756
757 pub fn save_buffer_as(
758 &self,
759 buffer: ModelHandle<Buffer>,
760 abs_path: PathBuf,
761 cx: &mut ModelContext<Project>,
762 ) -> Task<Result<()>> {
763 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
764 cx.spawn(|this, mut cx| async move {
765 let (worktree, path) = worktree_task.await?;
766 worktree
767 .update(&mut cx, |worktree, cx| {
768 worktree
769 .as_local_mut()
770 .unwrap()
771 .save_buffer_as(buffer.clone(), path, cx)
772 })
773 .await?;
774 this.update(&mut cx, |this, cx| {
775 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
776 });
777 Ok(())
778 })
779 }
780
781 #[cfg(any(test, feature = "test-support"))]
782 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
783 let path = path.into();
784 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
785 self.buffers_state
786 .borrow()
787 .open_buffers
788 .iter()
789 .any(|(_, buffer)| {
790 if let Some(buffer) = buffer.upgrade(cx) {
791 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
792 if file.worktree == worktree && file.path() == &path.path {
793 return true;
794 }
795 }
796 }
797 false
798 })
799 } else {
800 false
801 }
802 }
803
804 pub fn get_open_buffer(
805 &mut self,
806 path: &ProjectPath,
807 cx: &mut ModelContext<Self>,
808 ) -> Option<ModelHandle<Buffer>> {
809 let mut result = None;
810 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
811 self.buffers_state
812 .borrow_mut()
813 .open_buffers
814 .retain(|_, buffer| {
815 if let Some(buffer) = buffer.upgrade(cx) {
816 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
817 if file.worktree == worktree && file.path() == &path.path {
818 result = Some(buffer);
819 }
820 }
821 true
822 } else {
823 false
824 }
825 });
826 result
827 }
828
829 fn register_buffer(
830 &mut self,
831 buffer: &ModelHandle<Buffer>,
832 worktree: Option<&ModelHandle<Worktree>>,
833 cx: &mut ModelContext<Self>,
834 ) -> Result<()> {
835 let remote_id = buffer.read(cx).remote_id();
836 match self
837 .buffers_state
838 .borrow_mut()
839 .open_buffers
840 .insert(remote_id, OpenBuffer::Loaded(buffer.downgrade()))
841 {
842 None => {}
843 Some(OpenBuffer::Loading(operations)) => {
844 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
845 }
846 Some(OpenBuffer::Loaded(existing_handle)) => {
847 if existing_handle.upgrade(cx).is_some() {
848 Err(anyhow!(
849 "already registered buffer with remote id {}",
850 remote_id
851 ))?
852 }
853 }
854 }
855 self.assign_language_to_buffer(&buffer, worktree, cx);
856 Ok(())
857 }
858
859 fn assign_language_to_buffer(
860 &mut self,
861 buffer: &ModelHandle<Buffer>,
862 worktree: Option<&ModelHandle<Worktree>>,
863 cx: &mut ModelContext<Self>,
864 ) -> Option<()> {
865 let (path, full_path) = {
866 let file = buffer.read(cx).file()?;
867 (file.path().clone(), file.full_path(cx))
868 };
869
870 // If the buffer has a language, set it and start/assign the language server
871 if let Some(language) = self.languages.select_language(&full_path) {
872 buffer.update(cx, |buffer, cx| {
873 buffer.set_language(Some(language.clone()), cx);
874 });
875
876 // For local worktrees, start a language server if needed.
877 // Also assign the language server and any previously stored diagnostics to the buffer.
878 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
879 let worktree_id = local_worktree.id();
880 let worktree_abs_path = local_worktree.abs_path().clone();
881 let buffer = buffer.downgrade();
882 let language_server =
883 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
884
885 cx.spawn_weak(|_, mut cx| async move {
886 if let Some(language_server) = language_server.await {
887 if let Some(buffer) = buffer.upgrade(&cx) {
888 buffer.update(&mut cx, |buffer, cx| {
889 buffer.set_language_server(Some(language_server), cx);
890 });
891 }
892 }
893 })
894 .detach();
895 }
896 }
897
898 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
899 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
900 buffer.update(cx, |buffer, cx| {
901 buffer.update_diagnostics(diagnostics, None, cx).log_err();
902 });
903 }
904 }
905
906 None
907 }
908
909 fn start_language_server(
910 &mut self,
911 worktree_id: WorktreeId,
912 worktree_path: Arc<Path>,
913 language: Arc<Language>,
914 cx: &mut ModelContext<Self>,
915 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
916 enum LspEvent {
917 DiagnosticsStart,
918 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
919 DiagnosticsFinish,
920 }
921
922 let key = (worktree_id, language.name().to_string());
923 self.started_language_servers
924 .entry(key.clone())
925 .or_insert_with(|| {
926 let language_server = self.languages.start_language_server(
927 &language,
928 worktree_path,
929 self.client.http_client(),
930 cx,
931 );
932 let rpc = self.client.clone();
933 cx.spawn_weak(|this, mut cx| async move {
934 let language_server = language_server?.await.log_err()?;
935 if let Some(this) = this.upgrade(&cx) {
936 this.update(&mut cx, |this, _| {
937 this.language_servers.insert(key, language_server.clone());
938 });
939 }
940
941 let disk_based_sources = language
942 .disk_based_diagnostic_sources()
943 .cloned()
944 .unwrap_or_default();
945 let disk_based_diagnostics_progress_token =
946 language.disk_based_diagnostics_progress_token().cloned();
947 let has_disk_based_diagnostic_progress_token =
948 disk_based_diagnostics_progress_token.is_some();
949 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
950
951 // Listen for `PublishDiagnostics` notifications.
952 language_server
953 .on_notification::<lsp::notification::PublishDiagnostics, _>({
954 let diagnostics_tx = diagnostics_tx.clone();
955 move |params| {
956 if !has_disk_based_diagnostic_progress_token {
957 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
958 }
959 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
960 .ok();
961 if !has_disk_based_diagnostic_progress_token {
962 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
963 }
964 }
965 })
966 .detach();
967
968 // Listen for `Progress` notifications. Send an event when the language server
969 // transitions between running jobs and not running any jobs.
970 let mut running_jobs_for_this_server: i32 = 0;
971 language_server
972 .on_notification::<lsp::notification::Progress, _>(move |params| {
973 let token = match params.token {
974 lsp::NumberOrString::Number(_) => None,
975 lsp::NumberOrString::String(token) => Some(token),
976 };
977
978 if token == disk_based_diagnostics_progress_token {
979 match params.value {
980 lsp::ProgressParamsValue::WorkDone(progress) => {
981 match progress {
982 lsp::WorkDoneProgress::Begin(_) => {
983 running_jobs_for_this_server += 1;
984 if running_jobs_for_this_server == 1 {
985 block_on(
986 diagnostics_tx
987 .send(LspEvent::DiagnosticsStart),
988 )
989 .ok();
990 }
991 }
992 lsp::WorkDoneProgress::End(_) => {
993 running_jobs_for_this_server -= 1;
994 if running_jobs_for_this_server == 0 {
995 block_on(
996 diagnostics_tx
997 .send(LspEvent::DiagnosticsFinish),
998 )
999 .ok();
1000 }
1001 }
1002 _ => {}
1003 }
1004 }
1005 }
1006 }
1007 })
1008 .detach();
1009
1010 // Process all the LSP events.
1011 cx.spawn(|mut cx| async move {
1012 while let Ok(message) = diagnostics_rx.recv().await {
1013 let this = this.upgrade(&cx)?;
1014 match message {
1015 LspEvent::DiagnosticsStart => {
1016 this.update(&mut cx, |this, cx| {
1017 this.disk_based_diagnostics_started(cx);
1018 if let Some(project_id) = this.remote_id() {
1019 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1020 project_id,
1021 })
1022 .log_err();
1023 }
1024 });
1025 }
1026 LspEvent::DiagnosticsUpdate(mut params) => {
1027 language.process_diagnostics(&mut params);
1028 this.update(&mut cx, |this, cx| {
1029 this.update_diagnostics(params, &disk_based_sources, cx)
1030 .log_err();
1031 });
1032 }
1033 LspEvent::DiagnosticsFinish => {
1034 this.update(&mut cx, |this, cx| {
1035 this.disk_based_diagnostics_finished(cx);
1036 if let Some(project_id) = this.remote_id() {
1037 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1038 project_id,
1039 })
1040 .log_err();
1041 }
1042 });
1043 }
1044 }
1045 }
1046 Some(())
1047 })
1048 .detach();
1049
1050 Some(language_server)
1051 })
1052 .shared()
1053 })
1054 .clone()
1055 }
1056
1057 pub fn update_diagnostics(
1058 &mut self,
1059 params: lsp::PublishDiagnosticsParams,
1060 disk_based_sources: &HashSet<String>,
1061 cx: &mut ModelContext<Self>,
1062 ) -> Result<()> {
1063 let abs_path = params
1064 .uri
1065 .to_file_path()
1066 .map_err(|_| anyhow!("URI is not a file"))?;
1067 let mut next_group_id = 0;
1068 let mut diagnostics = Vec::default();
1069 let mut primary_diagnostic_group_ids = HashMap::default();
1070 let mut sources_by_group_id = HashMap::default();
1071 let mut supporting_diagnostic_severities = HashMap::default();
1072 for diagnostic in ¶ms.diagnostics {
1073 let source = diagnostic.source.as_ref();
1074 let code = diagnostic.code.as_ref().map(|code| match code {
1075 lsp::NumberOrString::Number(code) => code.to_string(),
1076 lsp::NumberOrString::String(code) => code.clone(),
1077 });
1078 let range = range_from_lsp(diagnostic.range);
1079 let is_supporting = diagnostic
1080 .related_information
1081 .as_ref()
1082 .map_or(false, |infos| {
1083 infos.iter().any(|info| {
1084 primary_diagnostic_group_ids.contains_key(&(
1085 source,
1086 code.clone(),
1087 range_from_lsp(info.location.range),
1088 ))
1089 })
1090 });
1091
1092 if is_supporting {
1093 if let Some(severity) = diagnostic.severity {
1094 supporting_diagnostic_severities
1095 .insert((source, code.clone(), range), severity);
1096 }
1097 } else {
1098 let group_id = post_inc(&mut next_group_id);
1099 let is_disk_based =
1100 source.map_or(false, |source| disk_based_sources.contains(source));
1101
1102 sources_by_group_id.insert(group_id, source);
1103 primary_diagnostic_group_ids
1104 .insert((source, code.clone(), range.clone()), group_id);
1105
1106 diagnostics.push(DiagnosticEntry {
1107 range,
1108 diagnostic: Diagnostic {
1109 code: code.clone(),
1110 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1111 message: diagnostic.message.clone(),
1112 group_id,
1113 is_primary: true,
1114 is_valid: true,
1115 is_disk_based,
1116 },
1117 });
1118 if let Some(infos) = &diagnostic.related_information {
1119 for info in infos {
1120 if info.location.uri == params.uri && !info.message.is_empty() {
1121 let range = range_from_lsp(info.location.range);
1122 diagnostics.push(DiagnosticEntry {
1123 range,
1124 diagnostic: Diagnostic {
1125 code: code.clone(),
1126 severity: DiagnosticSeverity::INFORMATION,
1127 message: info.message.clone(),
1128 group_id,
1129 is_primary: false,
1130 is_valid: true,
1131 is_disk_based,
1132 },
1133 });
1134 }
1135 }
1136 }
1137 }
1138 }
1139
1140 for entry in &mut diagnostics {
1141 let diagnostic = &mut entry.diagnostic;
1142 if !diagnostic.is_primary {
1143 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1144 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1145 source,
1146 diagnostic.code.clone(),
1147 entry.range.clone(),
1148 )) {
1149 diagnostic.severity = severity;
1150 }
1151 }
1152 }
1153
1154 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1155 Ok(())
1156 }
1157
1158 pub fn update_diagnostic_entries(
1159 &mut self,
1160 abs_path: PathBuf,
1161 version: Option<i32>,
1162 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1163 cx: &mut ModelContext<Project>,
1164 ) -> Result<(), anyhow::Error> {
1165 let (worktree, relative_path) = self
1166 .find_local_worktree(&abs_path, cx)
1167 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1168 let project_path = ProjectPath {
1169 worktree_id: worktree.read(cx).id(),
1170 path: relative_path.into(),
1171 };
1172
1173 for buffer in self.buffers_state.borrow().open_buffers.values() {
1174 if let Some(buffer) = buffer.upgrade(cx) {
1175 if buffer
1176 .read(cx)
1177 .file()
1178 .map_or(false, |file| *file.path() == project_path.path)
1179 {
1180 buffer.update(cx, |buffer, cx| {
1181 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1182 })?;
1183 break;
1184 }
1185 }
1186 }
1187 worktree.update(cx, |worktree, cx| {
1188 worktree
1189 .as_local_mut()
1190 .ok_or_else(|| anyhow!("not a local worktree"))?
1191 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1192 })?;
1193 cx.emit(Event::DiagnosticsUpdated(project_path));
1194 Ok(())
1195 }
1196
1197 pub fn format(
1198 &self,
1199 buffers: HashSet<ModelHandle<Buffer>>,
1200 push_to_history: bool,
1201 cx: &mut ModelContext<Project>,
1202 ) -> Task<Result<ProjectTransaction>> {
1203 let mut local_buffers = Vec::new();
1204 let mut remote_buffers = None;
1205 for buffer_handle in buffers {
1206 let buffer = buffer_handle.read(cx);
1207 let worktree;
1208 if let Some(file) = File::from_dyn(buffer.file()) {
1209 worktree = file.worktree.clone();
1210 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1211 let lang_server;
1212 if let Some(lang) = buffer.language() {
1213 if let Some(server) = self
1214 .language_servers
1215 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1216 {
1217 lang_server = server.clone();
1218 } else {
1219 return Task::ready(Ok(Default::default()));
1220 };
1221 } else {
1222 return Task::ready(Ok(Default::default()));
1223 }
1224
1225 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1226 } else {
1227 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1228 }
1229 } else {
1230 return Task::ready(Ok(Default::default()));
1231 }
1232 }
1233
1234 let remote_buffers = self.remote_id().zip(remote_buffers);
1235 let client = self.client.clone();
1236 let request_handle = self.start_buffer_request(cx);
1237
1238 cx.spawn(|this, mut cx| async move {
1239 let mut project_transaction = ProjectTransaction::default();
1240
1241 if let Some((project_id, remote_buffers)) = remote_buffers {
1242 let response = client
1243 .request(proto::FormatBuffers {
1244 project_id,
1245 buffer_ids: remote_buffers
1246 .iter()
1247 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1248 .collect(),
1249 })
1250 .await?
1251 .transaction
1252 .ok_or_else(|| anyhow!("missing transaction"))?;
1253 project_transaction = this
1254 .update(&mut cx, |this, cx| {
1255 this.deserialize_project_transaction(
1256 response,
1257 push_to_history,
1258 request_handle,
1259 cx,
1260 )
1261 })
1262 .await?;
1263 }
1264
1265 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1266 let lsp_edits = lang_server
1267 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1268 text_document: lsp::TextDocumentIdentifier::new(
1269 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1270 ),
1271 options: Default::default(),
1272 work_done_progress_params: Default::default(),
1273 })
1274 .await?;
1275
1276 if let Some(lsp_edits) = lsp_edits {
1277 let edits = buffer
1278 .update(&mut cx, |buffer, cx| {
1279 buffer.edits_from_lsp(lsp_edits, None, cx)
1280 })
1281 .await?;
1282 buffer.update(&mut cx, |buffer, cx| {
1283 buffer.finalize_last_transaction();
1284 buffer.start_transaction();
1285 for (range, text) in edits {
1286 buffer.edit([range], text, cx);
1287 }
1288 if buffer.end_transaction(cx).is_some() {
1289 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1290 if !push_to_history {
1291 buffer.forget_transaction(transaction.id);
1292 }
1293 project_transaction.0.insert(cx.handle(), transaction);
1294 }
1295 });
1296 }
1297 }
1298
1299 Ok(project_transaction)
1300 })
1301 }
1302
1303 pub fn definition<T: ToPointUtf16>(
1304 &self,
1305 buffer: &ModelHandle<Buffer>,
1306 position: T,
1307 cx: &mut ModelContext<Self>,
1308 ) -> Task<Result<Vec<Location>>> {
1309 let position = position.to_point_utf16(buffer.read(cx));
1310 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1311 }
1312
1313 pub fn references<T: ToPointUtf16>(
1314 &self,
1315 buffer: &ModelHandle<Buffer>,
1316 position: T,
1317 cx: &mut ModelContext<Self>,
1318 ) -> Task<Result<Vec<Location>>> {
1319 let position = position.to_point_utf16(buffer.read(cx));
1320 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1321 }
1322
1323 pub fn document_highlights<T: ToPointUtf16>(
1324 &self,
1325 buffer: &ModelHandle<Buffer>,
1326 position: T,
1327 cx: &mut ModelContext<Self>,
1328 ) -> Task<Result<Vec<DocumentHighlight>>> {
1329 let position = position.to_point_utf16(buffer.read(cx));
1330 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1331 }
1332
1333 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1334 if self.is_local() {
1335 let mut language_servers = HashMap::default();
1336 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1337 if let Some((worktree, language)) = self
1338 .worktree_for_id(*worktree_id, cx)
1339 .and_then(|worktree| worktree.read(cx).as_local())
1340 .zip(self.languages.get_language(language_name))
1341 {
1342 language_servers
1343 .entry(Arc::as_ptr(language_server))
1344 .or_insert((
1345 language_server.clone(),
1346 *worktree_id,
1347 worktree.abs_path().clone(),
1348 language.clone(),
1349 ));
1350 }
1351 }
1352
1353 let mut requests = Vec::new();
1354 for (language_server, _, _, _) in language_servers.values() {
1355 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1356 lsp::WorkspaceSymbolParams {
1357 query: query.to_string(),
1358 ..Default::default()
1359 },
1360 ));
1361 }
1362
1363 cx.spawn_weak(|this, cx| async move {
1364 let responses = futures::future::try_join_all(requests).await?;
1365
1366 let mut symbols = Vec::new();
1367 if let Some(this) = this.upgrade(&cx) {
1368 this.read_with(&cx, |this, cx| {
1369 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1370 language_servers.into_values().zip(responses)
1371 {
1372 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1373 |lsp_symbol| {
1374 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1375 let mut worktree_id = source_worktree_id;
1376 let path;
1377 if let Some((worktree, rel_path)) =
1378 this.find_local_worktree(&abs_path, cx)
1379 {
1380 worktree_id = worktree.read(cx).id();
1381 path = rel_path;
1382 } else {
1383 path = relativize_path(&worktree_abs_path, &abs_path);
1384 }
1385
1386 let label = language
1387 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1388 .unwrap_or_else(|| {
1389 CodeLabel::plain(lsp_symbol.name.clone(), None)
1390 });
1391 let signature = this.symbol_signature(worktree_id, &path);
1392
1393 Some(Symbol {
1394 source_worktree_id,
1395 worktree_id,
1396 language_name: language.name().to_string(),
1397 name: lsp_symbol.name,
1398 kind: lsp_symbol.kind,
1399 label,
1400 path,
1401 range: range_from_lsp(lsp_symbol.location.range),
1402 signature,
1403 })
1404 },
1405 ));
1406 }
1407 })
1408 }
1409
1410 Ok(symbols)
1411 })
1412 } else if let Some(project_id) = self.remote_id() {
1413 let request = self.client.request(proto::GetProjectSymbols {
1414 project_id,
1415 query: query.to_string(),
1416 });
1417 cx.spawn_weak(|this, cx| async move {
1418 let response = request.await?;
1419 let mut symbols = Vec::new();
1420 if let Some(this) = this.upgrade(&cx) {
1421 this.read_with(&cx, |this, _| {
1422 symbols.extend(
1423 response
1424 .symbols
1425 .into_iter()
1426 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1427 );
1428 })
1429 }
1430 Ok(symbols)
1431 })
1432 } else {
1433 Task::ready(Ok(Default::default()))
1434 }
1435 }
1436
1437 pub fn open_buffer_for_symbol(
1438 &mut self,
1439 symbol: &Symbol,
1440 cx: &mut ModelContext<Self>,
1441 ) -> Task<Result<ModelHandle<Buffer>>> {
1442 if self.is_local() {
1443 let language_server = if let Some(server) = self
1444 .language_servers
1445 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1446 {
1447 server.clone()
1448 } else {
1449 return Task::ready(Err(anyhow!(
1450 "language server for worktree and language not found"
1451 )));
1452 };
1453
1454 let worktree_abs_path = if let Some(worktree_abs_path) = self
1455 .worktree_for_id(symbol.worktree_id, cx)
1456 .and_then(|worktree| worktree.read(cx).as_local())
1457 .map(|local_worktree| local_worktree.abs_path())
1458 {
1459 worktree_abs_path
1460 } else {
1461 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1462 };
1463 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1464 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1465 uri
1466 } else {
1467 return Task::ready(Err(anyhow!("invalid symbol path")));
1468 };
1469
1470 self.open_local_buffer_via_lsp(
1471 symbol_uri,
1472 symbol.language_name.clone(),
1473 language_server,
1474 cx,
1475 )
1476 } else if let Some(project_id) = self.remote_id() {
1477 let request_handle = self.start_buffer_request(cx);
1478 let request = self.client.request(proto::OpenBufferForSymbol {
1479 project_id,
1480 symbol: Some(serialize_symbol(symbol)),
1481 });
1482 cx.spawn(|this, mut cx| async move {
1483 let response = request.await?;
1484 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1485 this.update(&mut cx, |this, cx| {
1486 this.deserialize_buffer(buffer, request_handle, cx)
1487 })
1488 .await
1489 })
1490 } else {
1491 Task::ready(Err(anyhow!("project does not have a remote id")))
1492 }
1493 }
1494
1495 pub fn completions<T: ToPointUtf16>(
1496 &self,
1497 source_buffer_handle: &ModelHandle<Buffer>,
1498 position: T,
1499 cx: &mut ModelContext<Self>,
1500 ) -> Task<Result<Vec<Completion>>> {
1501 let source_buffer_handle = source_buffer_handle.clone();
1502 let source_buffer = source_buffer_handle.read(cx);
1503 let buffer_id = source_buffer.remote_id();
1504 let language = source_buffer.language().cloned();
1505 let worktree;
1506 let buffer_abs_path;
1507 if let Some(file) = File::from_dyn(source_buffer.file()) {
1508 worktree = file.worktree.clone();
1509 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1510 } else {
1511 return Task::ready(Ok(Default::default()));
1512 };
1513
1514 let position = position.to_point_utf16(source_buffer);
1515 let anchor = source_buffer.anchor_after(position);
1516
1517 if worktree.read(cx).as_local().is_some() {
1518 let buffer_abs_path = buffer_abs_path.unwrap();
1519 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1520 server
1521 } else {
1522 return Task::ready(Ok(Default::default()));
1523 };
1524
1525 cx.spawn(|_, cx| async move {
1526 let completions = lang_server
1527 .request::<lsp::request::Completion>(lsp::CompletionParams {
1528 text_document_position: lsp::TextDocumentPositionParams::new(
1529 lsp::TextDocumentIdentifier::new(
1530 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1531 ),
1532 position.to_lsp_position(),
1533 ),
1534 context: Default::default(),
1535 work_done_progress_params: Default::default(),
1536 partial_result_params: Default::default(),
1537 })
1538 .await
1539 .context("lsp completion request failed")?;
1540
1541 let completions = if let Some(completions) = completions {
1542 match completions {
1543 lsp::CompletionResponse::Array(completions) => completions,
1544 lsp::CompletionResponse::List(list) => list.items,
1545 }
1546 } else {
1547 Default::default()
1548 };
1549
1550 source_buffer_handle.read_with(&cx, |this, _| {
1551 Ok(completions
1552 .into_iter()
1553 .filter_map(|lsp_completion| {
1554 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1555 lsp::CompletionTextEdit::Edit(edit) => {
1556 (range_from_lsp(edit.range), edit.new_text.clone())
1557 }
1558 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1559 log::info!("unsupported insert/replace completion");
1560 return None;
1561 }
1562 };
1563
1564 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1565 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1566 if clipped_start == old_range.start && clipped_end == old_range.end {
1567 Some(Completion {
1568 old_range: this.anchor_before(old_range.start)
1569 ..this.anchor_after(old_range.end),
1570 new_text,
1571 label: language
1572 .as_ref()
1573 .and_then(|l| l.label_for_completion(&lsp_completion))
1574 .unwrap_or_else(|| {
1575 CodeLabel::plain(
1576 lsp_completion.label.clone(),
1577 lsp_completion.filter_text.as_deref(),
1578 )
1579 }),
1580 lsp_completion,
1581 })
1582 } else {
1583 None
1584 }
1585 })
1586 .collect())
1587 })
1588 })
1589 } else if let Some(project_id) = self.remote_id() {
1590 let rpc = self.client.clone();
1591 let message = proto::GetCompletions {
1592 project_id,
1593 buffer_id,
1594 position: Some(language::proto::serialize_anchor(&anchor)),
1595 version: (&source_buffer.version()).into(),
1596 };
1597 cx.spawn_weak(|_, mut cx| async move {
1598 let response = rpc.request(message).await?;
1599
1600 source_buffer_handle
1601 .update(&mut cx, |buffer, _| {
1602 buffer.wait_for_version(response.version.into())
1603 })
1604 .await;
1605
1606 response
1607 .completions
1608 .into_iter()
1609 .map(|completion| {
1610 language::proto::deserialize_completion(completion, language.as_ref())
1611 })
1612 .collect()
1613 })
1614 } else {
1615 Task::ready(Ok(Default::default()))
1616 }
1617 }
1618
1619 pub fn apply_additional_edits_for_completion(
1620 &self,
1621 buffer_handle: ModelHandle<Buffer>,
1622 completion: Completion,
1623 push_to_history: bool,
1624 cx: &mut ModelContext<Self>,
1625 ) -> Task<Result<Option<Transaction>>> {
1626 let buffer = buffer_handle.read(cx);
1627 let buffer_id = buffer.remote_id();
1628
1629 if self.is_local() {
1630 let lang_server = if let Some(language_server) = buffer.language_server() {
1631 language_server.clone()
1632 } else {
1633 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1634 };
1635
1636 cx.spawn(|_, mut cx| async move {
1637 let resolved_completion = lang_server
1638 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1639 .await?;
1640 if let Some(edits) = resolved_completion.additional_text_edits {
1641 let edits = buffer_handle
1642 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1643 .await?;
1644 buffer_handle.update(&mut cx, |buffer, cx| {
1645 buffer.finalize_last_transaction();
1646 buffer.start_transaction();
1647 for (range, text) in edits {
1648 buffer.edit([range], text, cx);
1649 }
1650 let transaction = if buffer.end_transaction(cx).is_some() {
1651 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1652 if !push_to_history {
1653 buffer.forget_transaction(transaction.id);
1654 }
1655 Some(transaction)
1656 } else {
1657 None
1658 };
1659 Ok(transaction)
1660 })
1661 } else {
1662 Ok(None)
1663 }
1664 })
1665 } else if let Some(project_id) = self.remote_id() {
1666 let client = self.client.clone();
1667 cx.spawn(|_, mut cx| async move {
1668 let response = client
1669 .request(proto::ApplyCompletionAdditionalEdits {
1670 project_id,
1671 buffer_id,
1672 completion: Some(language::proto::serialize_completion(&completion)),
1673 })
1674 .await?;
1675
1676 if let Some(transaction) = response.transaction {
1677 let transaction = language::proto::deserialize_transaction(transaction)?;
1678 buffer_handle
1679 .update(&mut cx, |buffer, _| {
1680 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1681 })
1682 .await;
1683 if push_to_history {
1684 buffer_handle.update(&mut cx, |buffer, _| {
1685 buffer.push_transaction(transaction.clone(), Instant::now());
1686 });
1687 }
1688 Ok(Some(transaction))
1689 } else {
1690 Ok(None)
1691 }
1692 })
1693 } else {
1694 Task::ready(Err(anyhow!("project does not have a remote id")))
1695 }
1696 }
1697
1698 pub fn code_actions<T: ToOffset>(
1699 &self,
1700 buffer_handle: &ModelHandle<Buffer>,
1701 range: Range<T>,
1702 cx: &mut ModelContext<Self>,
1703 ) -> Task<Result<Vec<CodeAction>>> {
1704 let buffer_handle = buffer_handle.clone();
1705 let buffer = buffer_handle.read(cx);
1706 let buffer_id = buffer.remote_id();
1707 let worktree;
1708 let buffer_abs_path;
1709 if let Some(file) = File::from_dyn(buffer.file()) {
1710 worktree = file.worktree.clone();
1711 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1712 } else {
1713 return Task::ready(Ok(Default::default()));
1714 };
1715 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1716
1717 if worktree.read(cx).as_local().is_some() {
1718 let buffer_abs_path = buffer_abs_path.unwrap();
1719 let lang_name;
1720 let lang_server;
1721 if let Some(lang) = buffer.language() {
1722 lang_name = lang.name().to_string();
1723 if let Some(server) = self
1724 .language_servers
1725 .get(&(worktree.read(cx).id(), lang_name.clone()))
1726 {
1727 lang_server = server.clone();
1728 } else {
1729 return Task::ready(Ok(Default::default()));
1730 };
1731 } else {
1732 return Task::ready(Ok(Default::default()));
1733 }
1734
1735 let lsp_range = lsp::Range::new(
1736 range.start.to_point_utf16(buffer).to_lsp_position(),
1737 range.end.to_point_utf16(buffer).to_lsp_position(),
1738 );
1739 cx.foreground().spawn(async move {
1740 Ok(lang_server
1741 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1742 text_document: lsp::TextDocumentIdentifier::new(
1743 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1744 ),
1745 range: lsp_range,
1746 work_done_progress_params: Default::default(),
1747 partial_result_params: Default::default(),
1748 context: lsp::CodeActionContext {
1749 diagnostics: Default::default(),
1750 only: Some(vec![
1751 lsp::CodeActionKind::QUICKFIX,
1752 lsp::CodeActionKind::REFACTOR,
1753 lsp::CodeActionKind::REFACTOR_EXTRACT,
1754 ]),
1755 },
1756 })
1757 .await?
1758 .unwrap_or_default()
1759 .into_iter()
1760 .filter_map(|entry| {
1761 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1762 Some(CodeAction {
1763 range: range.clone(),
1764 lsp_action,
1765 })
1766 } else {
1767 None
1768 }
1769 })
1770 .collect())
1771 })
1772 } else if let Some(project_id) = self.remote_id() {
1773 let rpc = self.client.clone();
1774 cx.spawn_weak(|_, mut cx| async move {
1775 let response = rpc
1776 .request(proto::GetCodeActions {
1777 project_id,
1778 buffer_id,
1779 start: Some(language::proto::serialize_anchor(&range.start)),
1780 end: Some(language::proto::serialize_anchor(&range.end)),
1781 })
1782 .await?;
1783
1784 buffer_handle
1785 .update(&mut cx, |buffer, _| {
1786 buffer.wait_for_version(response.version.into())
1787 })
1788 .await;
1789
1790 response
1791 .actions
1792 .into_iter()
1793 .map(language::proto::deserialize_code_action)
1794 .collect()
1795 })
1796 } else {
1797 Task::ready(Ok(Default::default()))
1798 }
1799 }
1800
1801 pub fn apply_code_action(
1802 &self,
1803 buffer_handle: ModelHandle<Buffer>,
1804 mut action: CodeAction,
1805 push_to_history: bool,
1806 cx: &mut ModelContext<Self>,
1807 ) -> Task<Result<ProjectTransaction>> {
1808 if self.is_local() {
1809 let buffer = buffer_handle.read(cx);
1810 let lang_name = if let Some(lang) = buffer.language() {
1811 lang.name().to_string()
1812 } else {
1813 return Task::ready(Ok(Default::default()));
1814 };
1815 let lang_server = if let Some(language_server) = buffer.language_server() {
1816 language_server.clone()
1817 } else {
1818 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1819 };
1820 let range = action.range.to_point_utf16(buffer);
1821
1822 cx.spawn(|this, mut cx| async move {
1823 if let Some(lsp_range) = action
1824 .lsp_action
1825 .data
1826 .as_mut()
1827 .and_then(|d| d.get_mut("codeActionParams"))
1828 .and_then(|d| d.get_mut("range"))
1829 {
1830 *lsp_range = serde_json::to_value(&lsp::Range::new(
1831 range.start.to_lsp_position(),
1832 range.end.to_lsp_position(),
1833 ))
1834 .unwrap();
1835 action.lsp_action = lang_server
1836 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1837 .await?;
1838 } else {
1839 let actions = this
1840 .update(&mut cx, |this, cx| {
1841 this.code_actions(&buffer_handle, action.range, cx)
1842 })
1843 .await?;
1844 action.lsp_action = actions
1845 .into_iter()
1846 .find(|a| a.lsp_action.title == action.lsp_action.title)
1847 .ok_or_else(|| anyhow!("code action is outdated"))?
1848 .lsp_action;
1849 }
1850
1851 if let Some(edit) = action.lsp_action.edit {
1852 Self::deserialize_workspace_edit(
1853 this,
1854 edit,
1855 push_to_history,
1856 lang_name,
1857 lang_server,
1858 &mut cx,
1859 )
1860 .await
1861 } else {
1862 Ok(ProjectTransaction::default())
1863 }
1864 })
1865 } else if let Some(project_id) = self.remote_id() {
1866 let client = self.client.clone();
1867 let request_handle = self.start_buffer_request(cx);
1868 let request = proto::ApplyCodeAction {
1869 project_id,
1870 buffer_id: buffer_handle.read(cx).remote_id(),
1871 action: Some(language::proto::serialize_code_action(&action)),
1872 };
1873 cx.spawn(|this, mut cx| async move {
1874 let response = client
1875 .request(request)
1876 .await?
1877 .transaction
1878 .ok_or_else(|| anyhow!("missing transaction"))?;
1879 this.update(&mut cx, |this, cx| {
1880 this.deserialize_project_transaction(
1881 response,
1882 push_to_history,
1883 request_handle,
1884 cx,
1885 )
1886 })
1887 .await
1888 })
1889 } else {
1890 Task::ready(Err(anyhow!("project does not have a remote id")))
1891 }
1892 }
1893
1894 async fn deserialize_workspace_edit(
1895 this: ModelHandle<Self>,
1896 edit: lsp::WorkspaceEdit,
1897 push_to_history: bool,
1898 language_name: String,
1899 language_server: Arc<LanguageServer>,
1900 cx: &mut AsyncAppContext,
1901 ) -> Result<ProjectTransaction> {
1902 let fs = this.read_with(cx, |this, _| this.fs.clone());
1903 let mut operations = Vec::new();
1904 if let Some(document_changes) = edit.document_changes {
1905 match document_changes {
1906 lsp::DocumentChanges::Edits(edits) => {
1907 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1908 }
1909 lsp::DocumentChanges::Operations(ops) => operations = ops,
1910 }
1911 } else if let Some(changes) = edit.changes {
1912 operations.extend(changes.into_iter().map(|(uri, edits)| {
1913 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1914 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1915 uri,
1916 version: None,
1917 },
1918 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1919 })
1920 }));
1921 }
1922
1923 let mut project_transaction = ProjectTransaction::default();
1924 for operation in operations {
1925 match operation {
1926 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1927 let abs_path = op
1928 .uri
1929 .to_file_path()
1930 .map_err(|_| anyhow!("can't convert URI to path"))?;
1931
1932 if let Some(parent_path) = abs_path.parent() {
1933 fs.create_dir(parent_path).await?;
1934 }
1935 if abs_path.ends_with("/") {
1936 fs.create_dir(&abs_path).await?;
1937 } else {
1938 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1939 .await?;
1940 }
1941 }
1942 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1943 let source_abs_path = op
1944 .old_uri
1945 .to_file_path()
1946 .map_err(|_| anyhow!("can't convert URI to path"))?;
1947 let target_abs_path = op
1948 .new_uri
1949 .to_file_path()
1950 .map_err(|_| anyhow!("can't convert URI to path"))?;
1951 fs.rename(
1952 &source_abs_path,
1953 &target_abs_path,
1954 op.options.map(Into::into).unwrap_or_default(),
1955 )
1956 .await?;
1957 }
1958 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1959 let abs_path = op
1960 .uri
1961 .to_file_path()
1962 .map_err(|_| anyhow!("can't convert URI to path"))?;
1963 let options = op.options.map(Into::into).unwrap_or_default();
1964 if abs_path.ends_with("/") {
1965 fs.remove_dir(&abs_path, options).await?;
1966 } else {
1967 fs.remove_file(&abs_path, options).await?;
1968 }
1969 }
1970 lsp::DocumentChangeOperation::Edit(op) => {
1971 let buffer_to_edit = this
1972 .update(cx, |this, cx| {
1973 this.open_local_buffer_via_lsp(
1974 op.text_document.uri,
1975 language_name.clone(),
1976 language_server.clone(),
1977 cx,
1978 )
1979 })
1980 .await?;
1981
1982 let edits = buffer_to_edit
1983 .update(cx, |buffer, cx| {
1984 let edits = op.edits.into_iter().map(|edit| match edit {
1985 lsp::OneOf::Left(edit) => edit,
1986 lsp::OneOf::Right(edit) => edit.text_edit,
1987 });
1988 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1989 })
1990 .await?;
1991
1992 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1993 buffer.finalize_last_transaction();
1994 buffer.start_transaction();
1995 for (range, text) in edits {
1996 buffer.edit([range], text, cx);
1997 }
1998 let transaction = if buffer.end_transaction(cx).is_some() {
1999 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2000 if !push_to_history {
2001 buffer.forget_transaction(transaction.id);
2002 }
2003 Some(transaction)
2004 } else {
2005 None
2006 };
2007
2008 transaction
2009 });
2010 if let Some(transaction) = transaction {
2011 project_transaction.0.insert(buffer_to_edit, transaction);
2012 }
2013 }
2014 }
2015 }
2016
2017 Ok(project_transaction)
2018 }
2019
2020 pub fn prepare_rename<T: ToPointUtf16>(
2021 &self,
2022 buffer: ModelHandle<Buffer>,
2023 position: T,
2024 cx: &mut ModelContext<Self>,
2025 ) -> Task<Result<Option<Range<Anchor>>>> {
2026 let position = position.to_point_utf16(buffer.read(cx));
2027 self.request_lsp(buffer, PrepareRename { position }, cx)
2028 }
2029
2030 pub fn perform_rename<T: ToPointUtf16>(
2031 &self,
2032 buffer: ModelHandle<Buffer>,
2033 position: T,
2034 new_name: String,
2035 push_to_history: bool,
2036 cx: &mut ModelContext<Self>,
2037 ) -> Task<Result<ProjectTransaction>> {
2038 let position = position.to_point_utf16(buffer.read(cx));
2039 self.request_lsp(
2040 buffer,
2041 PerformRename {
2042 position,
2043 new_name,
2044 push_to_history,
2045 },
2046 cx,
2047 )
2048 }
2049
2050 pub fn search(
2051 &self,
2052 query: SearchQuery,
2053 cx: &mut ModelContext<Self>,
2054 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2055 if self.is_local() {
2056 let snapshots = self
2057 .strong_worktrees(cx)
2058 .filter_map(|tree| {
2059 let tree = tree.read(cx).as_local()?;
2060 Some(tree.snapshot())
2061 })
2062 .collect::<Vec<_>>();
2063
2064 let background = cx.background().clone();
2065 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2066 let workers = background.num_cpus().min(path_count);
2067 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2068 cx.background()
2069 .spawn({
2070 let fs = self.fs.clone();
2071 let background = cx.background().clone();
2072 let query = query.clone();
2073 async move {
2074 let fs = &fs;
2075 let query = &query;
2076 let matching_paths_tx = &matching_paths_tx;
2077 let paths_per_worker = (path_count + workers - 1) / workers;
2078 let snapshots = &snapshots;
2079 background
2080 .scoped(|scope| {
2081 for worker_ix in 0..workers {
2082 let worker_start_ix = worker_ix * paths_per_worker;
2083 let worker_end_ix = worker_start_ix + paths_per_worker;
2084 scope.spawn(async move {
2085 let mut snapshot_start_ix = 0;
2086 let mut abs_path = PathBuf::new();
2087 for snapshot in snapshots {
2088 let snapshot_end_ix =
2089 snapshot_start_ix + snapshot.visible_file_count();
2090 if worker_end_ix <= snapshot_start_ix {
2091 break;
2092 } else if worker_start_ix > snapshot_end_ix {
2093 snapshot_start_ix = snapshot_end_ix;
2094 continue;
2095 } else {
2096 let start_in_snapshot = worker_start_ix
2097 .saturating_sub(snapshot_start_ix);
2098 let end_in_snapshot =
2099 cmp::min(worker_end_ix, snapshot_end_ix)
2100 - snapshot_start_ix;
2101
2102 for entry in snapshot
2103 .files(false, start_in_snapshot)
2104 .take(end_in_snapshot - start_in_snapshot)
2105 {
2106 if matching_paths_tx.is_closed() {
2107 break;
2108 }
2109
2110 abs_path.clear();
2111 abs_path.push(&snapshot.abs_path());
2112 abs_path.push(&entry.path);
2113 let matches = if let Some(file) =
2114 fs.open_sync(&abs_path).await.log_err()
2115 {
2116 query.detect(file).unwrap_or(false)
2117 } else {
2118 false
2119 };
2120
2121 if matches {
2122 let project_path =
2123 (snapshot.id(), entry.path.clone());
2124 if matching_paths_tx
2125 .send(project_path)
2126 .await
2127 .is_err()
2128 {
2129 break;
2130 }
2131 }
2132 }
2133
2134 snapshot_start_ix = snapshot_end_ix;
2135 }
2136 }
2137 });
2138 }
2139 })
2140 .await;
2141 }
2142 })
2143 .detach();
2144
2145 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2146 let open_buffers = self
2147 .buffers_state
2148 .borrow()
2149 .open_buffers
2150 .values()
2151 .filter_map(|b| b.upgrade(cx))
2152 .collect::<HashSet<_>>();
2153 cx.spawn(|this, cx| async move {
2154 for buffer in &open_buffers {
2155 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2156 buffers_tx.send((buffer.clone(), snapshot)).await?;
2157 }
2158
2159 let open_buffers = Rc::new(RefCell::new(open_buffers));
2160 while let Some(project_path) = matching_paths_rx.next().await {
2161 if buffers_tx.is_closed() {
2162 break;
2163 }
2164
2165 let this = this.clone();
2166 let open_buffers = open_buffers.clone();
2167 let buffers_tx = buffers_tx.clone();
2168 cx.spawn(|mut cx| async move {
2169 if let Some(buffer) = this
2170 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2171 .await
2172 .log_err()
2173 {
2174 if open_buffers.borrow_mut().insert(buffer.clone()) {
2175 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2176 buffers_tx.send((buffer, snapshot)).await?;
2177 }
2178 }
2179
2180 Ok::<_, anyhow::Error>(())
2181 })
2182 .detach();
2183 }
2184
2185 Ok::<_, anyhow::Error>(())
2186 })
2187 .detach_and_log_err(cx);
2188
2189 let background = cx.background().clone();
2190 cx.background().spawn(async move {
2191 let query = &query;
2192 let mut matched_buffers = Vec::new();
2193 for _ in 0..workers {
2194 matched_buffers.push(HashMap::default());
2195 }
2196 background
2197 .scoped(|scope| {
2198 for worker_matched_buffers in matched_buffers.iter_mut() {
2199 let mut buffers_rx = buffers_rx.clone();
2200 scope.spawn(async move {
2201 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2202 let buffer_matches = query
2203 .search(snapshot.as_rope())
2204 .await
2205 .iter()
2206 .map(|range| {
2207 snapshot.anchor_before(range.start)
2208 ..snapshot.anchor_after(range.end)
2209 })
2210 .collect::<Vec<_>>();
2211 if !buffer_matches.is_empty() {
2212 worker_matched_buffers
2213 .insert(buffer.clone(), buffer_matches);
2214 }
2215 }
2216 });
2217 }
2218 })
2219 .await;
2220 Ok(matched_buffers.into_iter().flatten().collect())
2221 })
2222 } else if let Some(project_id) = self.remote_id() {
2223 let request = self.client.request(query.to_proto(project_id));
2224 let request_handle = self.start_buffer_request(cx);
2225 cx.spawn(|this, mut cx| async move {
2226 let response = request.await?;
2227 let mut result = HashMap::default();
2228 for location in response.locations {
2229 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2230 let target_buffer = this
2231 .update(&mut cx, |this, cx| {
2232 this.deserialize_buffer(buffer, request_handle.clone(), cx)
2233 })
2234 .await?;
2235 let start = location
2236 .start
2237 .and_then(deserialize_anchor)
2238 .ok_or_else(|| anyhow!("missing target start"))?;
2239 let end = location
2240 .end
2241 .and_then(deserialize_anchor)
2242 .ok_or_else(|| anyhow!("missing target end"))?;
2243 result
2244 .entry(target_buffer)
2245 .or_insert(Vec::new())
2246 .push(start..end)
2247 }
2248 Ok(result)
2249 })
2250 } else {
2251 Task::ready(Ok(Default::default()))
2252 }
2253 }
2254
2255 fn request_lsp<R: LspCommand>(
2256 &self,
2257 buffer_handle: ModelHandle<Buffer>,
2258 request: R,
2259 cx: &mut ModelContext<Self>,
2260 ) -> Task<Result<R::Response>>
2261 where
2262 <R::LspRequest as lsp::request::Request>::Result: Send,
2263 {
2264 let buffer = buffer_handle.read(cx);
2265 if self.is_local() {
2266 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2267 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2268 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2269 return cx.spawn(|this, cx| async move {
2270 let response = language_server
2271 .request::<R::LspRequest>(lsp_params)
2272 .await
2273 .context("lsp request failed")?;
2274 request
2275 .response_from_lsp(response, this, buffer_handle, cx)
2276 .await
2277 });
2278 }
2279 } else if let Some(project_id) = self.remote_id() {
2280 let rpc = self.client.clone();
2281 let request_handle = self.start_buffer_request(cx);
2282 let message = request.to_proto(project_id, buffer);
2283 return cx.spawn(|this, cx| async move {
2284 let response = rpc.request(message).await?;
2285 request
2286 .response_from_proto(response, this, buffer_handle, request_handle, cx)
2287 .await
2288 });
2289 }
2290 Task::ready(Ok(Default::default()))
2291 }
2292
2293 pub fn find_or_create_local_worktree(
2294 &self,
2295 abs_path: impl AsRef<Path>,
2296 weak: bool,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2299 let abs_path = abs_path.as_ref();
2300 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2301 Task::ready(Ok((tree.clone(), relative_path.into())))
2302 } else {
2303 let worktree = self.create_local_worktree(abs_path, weak, cx);
2304 cx.foreground()
2305 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2306 }
2307 }
2308
2309 pub fn find_local_worktree(
2310 &self,
2311 abs_path: &Path,
2312 cx: &AppContext,
2313 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2314 for tree in self.worktrees(cx) {
2315 if let Some(relative_path) = tree
2316 .read(cx)
2317 .as_local()
2318 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2319 {
2320 return Some((tree.clone(), relative_path.into()));
2321 }
2322 }
2323 None
2324 }
2325
2326 pub fn is_shared(&self) -> bool {
2327 match &self.client_state {
2328 ProjectClientState::Local { is_shared, .. } => *is_shared,
2329 ProjectClientState::Remote { .. } => false,
2330 }
2331 }
2332
2333 fn create_local_worktree(
2334 &self,
2335 abs_path: impl AsRef<Path>,
2336 weak: bool,
2337 cx: &mut ModelContext<Self>,
2338 ) -> Task<Result<ModelHandle<Worktree>>> {
2339 let fs = self.fs.clone();
2340 let client = self.client.clone();
2341 let path = Arc::from(abs_path.as_ref());
2342 cx.spawn(|project, mut cx| async move {
2343 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
2344
2345 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2346 project.add_worktree(&worktree, cx);
2347 (project.remote_id(), project.is_shared())
2348 });
2349
2350 if let Some(project_id) = remote_project_id {
2351 worktree
2352 .update(&mut cx, |worktree, cx| {
2353 worktree.as_local_mut().unwrap().register(project_id, cx)
2354 })
2355 .await?;
2356 if is_shared {
2357 worktree
2358 .update(&mut cx, |worktree, cx| {
2359 worktree.as_local_mut().unwrap().share(project_id, cx)
2360 })
2361 .await?;
2362 }
2363 }
2364
2365 Ok(worktree)
2366 })
2367 }
2368
2369 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2370 self.worktrees.retain(|worktree| {
2371 worktree
2372 .upgrade(cx)
2373 .map_or(false, |w| w.read(cx).id() != id)
2374 });
2375 cx.notify();
2376 }
2377
2378 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2379 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2380 if worktree.read(cx).is_local() {
2381 cx.subscribe(&worktree, |this, worktree, _, cx| {
2382 this.update_local_worktree_buffers(worktree, cx);
2383 })
2384 .detach();
2385 }
2386
2387 let push_weak_handle = {
2388 let worktree = worktree.read(cx);
2389 worktree.is_local() && worktree.is_weak()
2390 };
2391 if push_weak_handle {
2392 cx.observe_release(&worktree, |this, cx| {
2393 this.worktrees
2394 .retain(|worktree| worktree.upgrade(cx).is_some());
2395 cx.notify();
2396 })
2397 .detach();
2398 self.worktrees
2399 .push(WorktreeHandle::Weak(worktree.downgrade()));
2400 } else {
2401 self.worktrees
2402 .push(WorktreeHandle::Strong(worktree.clone()));
2403 }
2404 cx.notify();
2405 }
2406
2407 fn update_local_worktree_buffers(
2408 &mut self,
2409 worktree_handle: ModelHandle<Worktree>,
2410 cx: &mut ModelContext<Self>,
2411 ) {
2412 let snapshot = worktree_handle.read(cx).snapshot();
2413 let mut buffers_to_delete = Vec::new();
2414 for (buffer_id, buffer) in &self.buffers_state.borrow().open_buffers {
2415 if let Some(buffer) = buffer.upgrade(cx) {
2416 buffer.update(cx, |buffer, cx| {
2417 if let Some(old_file) = File::from_dyn(buffer.file()) {
2418 if old_file.worktree != worktree_handle {
2419 return;
2420 }
2421
2422 let new_file = if let Some(entry) = old_file
2423 .entry_id
2424 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2425 {
2426 File {
2427 is_local: true,
2428 entry_id: Some(entry.id),
2429 mtime: entry.mtime,
2430 path: entry.path.clone(),
2431 worktree: worktree_handle.clone(),
2432 }
2433 } else if let Some(entry) =
2434 snapshot.entry_for_path(old_file.path().as_ref())
2435 {
2436 File {
2437 is_local: true,
2438 entry_id: Some(entry.id),
2439 mtime: entry.mtime,
2440 path: entry.path.clone(),
2441 worktree: worktree_handle.clone(),
2442 }
2443 } else {
2444 File {
2445 is_local: true,
2446 entry_id: None,
2447 path: old_file.path().clone(),
2448 mtime: old_file.mtime(),
2449 worktree: worktree_handle.clone(),
2450 }
2451 };
2452
2453 if let Some(project_id) = self.remote_id() {
2454 self.client
2455 .send(proto::UpdateBufferFile {
2456 project_id,
2457 buffer_id: *buffer_id as u64,
2458 file: Some(new_file.to_proto()),
2459 })
2460 .log_err();
2461 }
2462 buffer.file_updated(Box::new(new_file), cx).detach();
2463 }
2464 });
2465 } else {
2466 buffers_to_delete.push(*buffer_id);
2467 }
2468 }
2469
2470 for buffer_id in buffers_to_delete {
2471 self.buffers_state
2472 .borrow_mut()
2473 .open_buffers
2474 .remove(&buffer_id);
2475 }
2476 }
2477
2478 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2479 let new_active_entry = entry.and_then(|project_path| {
2480 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2481 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2482 Some(ProjectEntry {
2483 worktree_id: project_path.worktree_id,
2484 entry_id: entry.id,
2485 })
2486 });
2487 if new_active_entry != self.active_entry {
2488 self.active_entry = new_active_entry;
2489 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2490 }
2491 }
2492
2493 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2494 self.language_servers_with_diagnostics_running > 0
2495 }
2496
2497 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2498 let mut summary = DiagnosticSummary::default();
2499 for (_, path_summary) in self.diagnostic_summaries(cx) {
2500 summary.error_count += path_summary.error_count;
2501 summary.warning_count += path_summary.warning_count;
2502 summary.info_count += path_summary.info_count;
2503 summary.hint_count += path_summary.hint_count;
2504 }
2505 summary
2506 }
2507
2508 pub fn diagnostic_summaries<'a>(
2509 &'a self,
2510 cx: &'a AppContext,
2511 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2512 self.worktrees(cx).flat_map(move |worktree| {
2513 let worktree = worktree.read(cx);
2514 let worktree_id = worktree.id();
2515 worktree
2516 .diagnostic_summaries()
2517 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2518 })
2519 }
2520
2521 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2522 self.language_servers_with_diagnostics_running += 1;
2523 if self.language_servers_with_diagnostics_running == 1 {
2524 cx.emit(Event::DiskBasedDiagnosticsStarted);
2525 }
2526 }
2527
2528 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2529 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2530 self.language_servers_with_diagnostics_running -= 1;
2531 if self.language_servers_with_diagnostics_running == 0 {
2532 cx.emit(Event::DiskBasedDiagnosticsFinished);
2533 }
2534 }
2535
2536 pub fn active_entry(&self) -> Option<ProjectEntry> {
2537 self.active_entry
2538 }
2539
2540 // RPC message handlers
2541
2542 async fn handle_unshare_project(
2543 this: ModelHandle<Self>,
2544 _: TypedEnvelope<proto::UnshareProject>,
2545 _: Arc<Client>,
2546 mut cx: AsyncAppContext,
2547 ) -> Result<()> {
2548 this.update(&mut cx, |this, cx| {
2549 if let ProjectClientState::Remote {
2550 sharing_has_stopped,
2551 ..
2552 } = &mut this.client_state
2553 {
2554 *sharing_has_stopped = true;
2555 this.collaborators.clear();
2556 cx.notify();
2557 } else {
2558 unreachable!()
2559 }
2560 });
2561
2562 Ok(())
2563 }
2564
2565 async fn handle_add_collaborator(
2566 this: ModelHandle<Self>,
2567 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2568 _: Arc<Client>,
2569 mut cx: AsyncAppContext,
2570 ) -> Result<()> {
2571 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2572 let collaborator = envelope
2573 .payload
2574 .collaborator
2575 .take()
2576 .ok_or_else(|| anyhow!("empty collaborator"))?;
2577
2578 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2579 this.update(&mut cx, |this, cx| {
2580 this.collaborators
2581 .insert(collaborator.peer_id, collaborator);
2582 cx.notify();
2583 });
2584
2585 Ok(())
2586 }
2587
2588 async fn handle_remove_collaborator(
2589 this: ModelHandle<Self>,
2590 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2591 _: Arc<Client>,
2592 mut cx: AsyncAppContext,
2593 ) -> Result<()> {
2594 this.update(&mut cx, |this, cx| {
2595 let peer_id = PeerId(envelope.payload.peer_id);
2596 let replica_id = this
2597 .collaborators
2598 .remove(&peer_id)
2599 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2600 .replica_id;
2601 this.shared_buffers.remove(&peer_id);
2602 for (_, buffer) in &this.buffers_state.borrow().open_buffers {
2603 if let Some(buffer) = buffer.upgrade(cx) {
2604 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2605 }
2606 }
2607 cx.notify();
2608 Ok(())
2609 })
2610 }
2611
2612 async fn handle_register_worktree(
2613 this: ModelHandle<Self>,
2614 envelope: TypedEnvelope<proto::RegisterWorktree>,
2615 client: Arc<Client>,
2616 mut cx: AsyncAppContext,
2617 ) -> Result<()> {
2618 this.update(&mut cx, |this, cx| {
2619 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2620 let replica_id = this.replica_id();
2621 let worktree = proto::Worktree {
2622 id: envelope.payload.worktree_id,
2623 root_name: envelope.payload.root_name,
2624 entries: Default::default(),
2625 diagnostic_summaries: Default::default(),
2626 weak: envelope.payload.weak,
2627 };
2628 let (worktree, load_task) =
2629 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2630 this.add_worktree(&worktree, cx);
2631 load_task.detach();
2632 Ok(())
2633 })
2634 }
2635
2636 async fn handle_unregister_worktree(
2637 this: ModelHandle<Self>,
2638 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2639 _: Arc<Client>,
2640 mut cx: AsyncAppContext,
2641 ) -> Result<()> {
2642 this.update(&mut cx, |this, cx| {
2643 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2644 this.remove_worktree(worktree_id, cx);
2645 Ok(())
2646 })
2647 }
2648
2649 async fn handle_update_worktree(
2650 this: ModelHandle<Self>,
2651 envelope: TypedEnvelope<proto::UpdateWorktree>,
2652 _: Arc<Client>,
2653 mut cx: AsyncAppContext,
2654 ) -> Result<()> {
2655 this.update(&mut cx, |this, cx| {
2656 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2657 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2658 worktree.update(cx, |worktree, _| {
2659 let worktree = worktree.as_remote_mut().unwrap();
2660 worktree.update_from_remote(envelope)
2661 })?;
2662 }
2663 Ok(())
2664 })
2665 }
2666
2667 async fn handle_update_diagnostic_summary(
2668 this: ModelHandle<Self>,
2669 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2670 _: Arc<Client>,
2671 mut cx: AsyncAppContext,
2672 ) -> Result<()> {
2673 this.update(&mut cx, |this, cx| {
2674 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2675 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2676 if let Some(summary) = envelope.payload.summary {
2677 let project_path = ProjectPath {
2678 worktree_id,
2679 path: Path::new(&summary.path).into(),
2680 };
2681 worktree.update(cx, |worktree, _| {
2682 worktree
2683 .as_remote_mut()
2684 .unwrap()
2685 .update_diagnostic_summary(project_path.path.clone(), &summary);
2686 });
2687 cx.emit(Event::DiagnosticsUpdated(project_path));
2688 }
2689 }
2690 Ok(())
2691 })
2692 }
2693
2694 async fn handle_disk_based_diagnostics_updating(
2695 this: ModelHandle<Self>,
2696 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2697 _: Arc<Client>,
2698 mut cx: AsyncAppContext,
2699 ) -> Result<()> {
2700 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2701 Ok(())
2702 }
2703
2704 async fn handle_disk_based_diagnostics_updated(
2705 this: ModelHandle<Self>,
2706 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2707 _: Arc<Client>,
2708 mut cx: AsyncAppContext,
2709 ) -> Result<()> {
2710 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2711 Ok(())
2712 }
2713
2714 async fn handle_update_buffer(
2715 this: ModelHandle<Self>,
2716 envelope: TypedEnvelope<proto::UpdateBuffer>,
2717 _: Arc<Client>,
2718 mut cx: AsyncAppContext,
2719 ) -> Result<()> {
2720 this.update(&mut cx, |this, cx| {
2721 let payload = envelope.payload.clone();
2722 let buffer_id = payload.buffer_id;
2723 let ops = payload
2724 .operations
2725 .into_iter()
2726 .map(|op| language::proto::deserialize_operation(op))
2727 .collect::<Result<Vec<_>, _>>()?;
2728 let is_remote = this.is_remote();
2729 let mut buffers_state = this.buffers_state.borrow_mut();
2730 let buffer_request_count = buffers_state.buffer_request_count;
2731 match buffers_state.open_buffers.entry(buffer_id) {
2732 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2733 OpenBuffer::Loaded(buffer) => {
2734 if let Some(buffer) = buffer.upgrade(cx) {
2735 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2736 } else if is_remote && buffer_request_count > 0 {
2737 e.insert(OpenBuffer::Loading(ops));
2738 }
2739 }
2740 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2741 },
2742 hash_map::Entry::Vacant(e) => {
2743 if is_remote && buffer_request_count > 0 {
2744 e.insert(OpenBuffer::Loading(ops));
2745 }
2746 }
2747 }
2748 Ok(())
2749 })
2750 }
2751
2752 async fn handle_update_buffer_file(
2753 this: ModelHandle<Self>,
2754 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2755 _: Arc<Client>,
2756 mut cx: AsyncAppContext,
2757 ) -> Result<()> {
2758 this.update(&mut cx, |this, cx| {
2759 let payload = envelope.payload.clone();
2760 let buffer_id = payload.buffer_id;
2761 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2762 let worktree = this
2763 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2764 .ok_or_else(|| anyhow!("no such worktree"))?;
2765 let file = File::from_proto(file, worktree.clone(), cx)?;
2766 let buffer = this
2767 .buffers_state
2768 .borrow_mut()
2769 .open_buffers
2770 .get_mut(&buffer_id)
2771 .and_then(|b| b.upgrade(cx))
2772 .ok_or_else(|| anyhow!("no such buffer"))?;
2773 buffer.update(cx, |buffer, cx| {
2774 buffer.file_updated(Box::new(file), cx).detach();
2775 });
2776 Ok(())
2777 })
2778 }
2779
2780 async fn handle_save_buffer(
2781 this: ModelHandle<Self>,
2782 envelope: TypedEnvelope<proto::SaveBuffer>,
2783 _: Arc<Client>,
2784 mut cx: AsyncAppContext,
2785 ) -> Result<proto::BufferSaved> {
2786 let buffer_id = envelope.payload.buffer_id;
2787 let sender_id = envelope.original_sender_id()?;
2788 let requested_version = envelope.payload.version.try_into()?;
2789
2790 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2791 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2792 let buffer = this
2793 .shared_buffers
2794 .get(&sender_id)
2795 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2796 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2797 Ok::<_, anyhow::Error>((project_id, buffer))
2798 })?;
2799
2800 if !buffer
2801 .read_with(&cx, |buffer, _| buffer.version())
2802 .observed_all(&requested_version)
2803 {
2804 Err(anyhow!("save request depends on unreceived edits"))?;
2805 }
2806
2807 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2808 Ok(proto::BufferSaved {
2809 project_id,
2810 buffer_id,
2811 version: (&saved_version).into(),
2812 mtime: Some(mtime.into()),
2813 })
2814 }
2815
2816 async fn handle_format_buffers(
2817 this: ModelHandle<Self>,
2818 envelope: TypedEnvelope<proto::FormatBuffers>,
2819 _: Arc<Client>,
2820 mut cx: AsyncAppContext,
2821 ) -> Result<proto::FormatBuffersResponse> {
2822 let sender_id = envelope.original_sender_id()?;
2823 let format = this.update(&mut cx, |this, cx| {
2824 let shared_buffers = this
2825 .shared_buffers
2826 .get(&sender_id)
2827 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2828 let mut buffers = HashSet::default();
2829 for buffer_id in &envelope.payload.buffer_ids {
2830 buffers.insert(
2831 shared_buffers
2832 .get(buffer_id)
2833 .cloned()
2834 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2835 );
2836 }
2837 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2838 })?;
2839
2840 let project_transaction = format.await?;
2841 let project_transaction = this.update(&mut cx, |this, cx| {
2842 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2843 });
2844 Ok(proto::FormatBuffersResponse {
2845 transaction: Some(project_transaction),
2846 })
2847 }
2848
2849 async fn handle_get_completions(
2850 this: ModelHandle<Self>,
2851 envelope: TypedEnvelope<proto::GetCompletions>,
2852 _: Arc<Client>,
2853 mut cx: AsyncAppContext,
2854 ) -> Result<proto::GetCompletionsResponse> {
2855 let sender_id = envelope.original_sender_id()?;
2856 let position = envelope
2857 .payload
2858 .position
2859 .and_then(language::proto::deserialize_anchor)
2860 .ok_or_else(|| anyhow!("invalid position"))?;
2861 let version = clock::Global::from(envelope.payload.version);
2862 let buffer = this.read_with(&cx, |this, _| {
2863 this.shared_buffers
2864 .get(&sender_id)
2865 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2866 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2867 })?;
2868 if !buffer
2869 .read_with(&cx, |buffer, _| buffer.version())
2870 .observed_all(&version)
2871 {
2872 Err(anyhow!("completion request depends on unreceived edits"))?;
2873 }
2874 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2875 let completions = this
2876 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2877 .await?;
2878
2879 Ok(proto::GetCompletionsResponse {
2880 completions: completions
2881 .iter()
2882 .map(language::proto::serialize_completion)
2883 .collect(),
2884 version: (&version).into(),
2885 })
2886 }
2887
2888 async fn handle_apply_additional_edits_for_completion(
2889 this: ModelHandle<Self>,
2890 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2891 _: Arc<Client>,
2892 mut cx: AsyncAppContext,
2893 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2894 let sender_id = envelope.original_sender_id()?;
2895 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2896 let buffer = this
2897 .shared_buffers
2898 .get(&sender_id)
2899 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2900 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2901 let language = buffer.read(cx).language();
2902 let completion = language::proto::deserialize_completion(
2903 envelope
2904 .payload
2905 .completion
2906 .ok_or_else(|| anyhow!("invalid completion"))?,
2907 language,
2908 )?;
2909 Ok::<_, anyhow::Error>(
2910 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2911 )
2912 })?;
2913
2914 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2915 transaction: apply_additional_edits
2916 .await?
2917 .as_ref()
2918 .map(language::proto::serialize_transaction),
2919 })
2920 }
2921
2922 async fn handle_get_code_actions(
2923 this: ModelHandle<Self>,
2924 envelope: TypedEnvelope<proto::GetCodeActions>,
2925 _: Arc<Client>,
2926 mut cx: AsyncAppContext,
2927 ) -> Result<proto::GetCodeActionsResponse> {
2928 let sender_id = envelope.original_sender_id()?;
2929 let start = envelope
2930 .payload
2931 .start
2932 .and_then(language::proto::deserialize_anchor)
2933 .ok_or_else(|| anyhow!("invalid start"))?;
2934 let end = envelope
2935 .payload
2936 .end
2937 .and_then(language::proto::deserialize_anchor)
2938 .ok_or_else(|| anyhow!("invalid end"))?;
2939 let buffer = this.update(&mut cx, |this, _| {
2940 this.shared_buffers
2941 .get(&sender_id)
2942 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2943 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2944 })?;
2945 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2946 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2947 Err(anyhow!("code action request references unreceived edits"))?;
2948 }
2949 let code_actions = this.update(&mut cx, |this, cx| {
2950 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2951 })?;
2952
2953 Ok(proto::GetCodeActionsResponse {
2954 actions: code_actions
2955 .await?
2956 .iter()
2957 .map(language::proto::serialize_code_action)
2958 .collect(),
2959 version: (&version).into(),
2960 })
2961 }
2962
2963 async fn handle_apply_code_action(
2964 this: ModelHandle<Self>,
2965 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2966 _: Arc<Client>,
2967 mut cx: AsyncAppContext,
2968 ) -> Result<proto::ApplyCodeActionResponse> {
2969 let sender_id = envelope.original_sender_id()?;
2970 let action = language::proto::deserialize_code_action(
2971 envelope
2972 .payload
2973 .action
2974 .ok_or_else(|| anyhow!("invalid action"))?,
2975 )?;
2976 let apply_code_action = this.update(&mut cx, |this, cx| {
2977 let buffer = this
2978 .shared_buffers
2979 .get(&sender_id)
2980 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2981 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2982 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2983 })?;
2984
2985 let project_transaction = apply_code_action.await?;
2986 let project_transaction = this.update(&mut cx, |this, cx| {
2987 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2988 });
2989 Ok(proto::ApplyCodeActionResponse {
2990 transaction: Some(project_transaction),
2991 })
2992 }
2993
2994 async fn handle_lsp_command<T: LspCommand>(
2995 this: ModelHandle<Self>,
2996 envelope: TypedEnvelope<T::ProtoRequest>,
2997 _: Arc<Client>,
2998 mut cx: AsyncAppContext,
2999 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3000 where
3001 <T::LspRequest as lsp::request::Request>::Result: Send,
3002 {
3003 let sender_id = envelope.original_sender_id()?;
3004 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
3005 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3006 let buffer_handle = this
3007 .shared_buffers
3008 .get(&sender_id)
3009 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
3010 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3011 let buffer = buffer_handle.read(cx);
3012 let buffer_version = buffer.version();
3013 let request = T::from_proto(envelope.payload, this, buffer)?;
3014 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3015 })?;
3016 let response = request.await?;
3017 this.update(&mut cx, |this, cx| {
3018 Ok(T::response_to_proto(
3019 response,
3020 this,
3021 sender_id,
3022 &buffer_version,
3023 cx,
3024 ))
3025 })
3026 }
3027
3028 async fn handle_get_project_symbols(
3029 this: ModelHandle<Self>,
3030 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3031 _: Arc<Client>,
3032 mut cx: AsyncAppContext,
3033 ) -> Result<proto::GetProjectSymbolsResponse> {
3034 let symbols = this
3035 .update(&mut cx, |this, cx| {
3036 this.symbols(&envelope.payload.query, cx)
3037 })
3038 .await?;
3039
3040 Ok(proto::GetProjectSymbolsResponse {
3041 symbols: symbols.iter().map(serialize_symbol).collect(),
3042 })
3043 }
3044
3045 async fn handle_search_project(
3046 this: ModelHandle<Self>,
3047 envelope: TypedEnvelope<proto::SearchProject>,
3048 _: Arc<Client>,
3049 mut cx: AsyncAppContext,
3050 ) -> Result<proto::SearchProjectResponse> {
3051 let peer_id = envelope.original_sender_id()?;
3052 let query = SearchQuery::from_proto(envelope.payload)?;
3053 let result = this
3054 .update(&mut cx, |this, cx| this.search(query, cx))
3055 .await?;
3056
3057 this.update(&mut cx, |this, cx| {
3058 let mut locations = Vec::new();
3059 for (buffer, ranges) in result {
3060 for range in ranges {
3061 let start = serialize_anchor(&range.start);
3062 let end = serialize_anchor(&range.end);
3063 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3064 locations.push(proto::Location {
3065 buffer: Some(buffer),
3066 start: Some(start),
3067 end: Some(end),
3068 });
3069 }
3070 }
3071 Ok(proto::SearchProjectResponse { locations })
3072 })
3073 }
3074
3075 async fn handle_open_buffer_for_symbol(
3076 this: ModelHandle<Self>,
3077 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3078 _: Arc<Client>,
3079 mut cx: AsyncAppContext,
3080 ) -> Result<proto::OpenBufferForSymbolResponse> {
3081 let peer_id = envelope.original_sender_id()?;
3082 let symbol = envelope
3083 .payload
3084 .symbol
3085 .ok_or_else(|| anyhow!("invalid symbol"))?;
3086 let symbol = this.read_with(&cx, |this, _| {
3087 let symbol = this.deserialize_symbol(symbol)?;
3088 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3089 if signature == symbol.signature {
3090 Ok(symbol)
3091 } else {
3092 Err(anyhow!("invalid symbol signature"))
3093 }
3094 })?;
3095 let buffer = this
3096 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3097 .await?;
3098
3099 Ok(proto::OpenBufferForSymbolResponse {
3100 buffer: Some(this.update(&mut cx, |this, cx| {
3101 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3102 })),
3103 })
3104 }
3105
3106 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3107 let mut hasher = Sha256::new();
3108 hasher.update(worktree_id.to_proto().to_be_bytes());
3109 hasher.update(path.to_string_lossy().as_bytes());
3110 hasher.update(self.nonce.to_be_bytes());
3111 hasher.finalize().as_slice().try_into().unwrap()
3112 }
3113
3114 async fn handle_open_buffer(
3115 this: ModelHandle<Self>,
3116 envelope: TypedEnvelope<proto::OpenBuffer>,
3117 _: Arc<Client>,
3118 mut cx: AsyncAppContext,
3119 ) -> Result<proto::OpenBufferResponse> {
3120 let peer_id = envelope.original_sender_id()?;
3121 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3122 let open_buffer = this.update(&mut cx, |this, cx| {
3123 this.open_buffer(
3124 ProjectPath {
3125 worktree_id,
3126 path: PathBuf::from(envelope.payload.path).into(),
3127 },
3128 cx,
3129 )
3130 });
3131
3132 let buffer = open_buffer.await?;
3133 this.update(&mut cx, |this, cx| {
3134 Ok(proto::OpenBufferResponse {
3135 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3136 })
3137 })
3138 }
3139
3140 fn serialize_project_transaction_for_peer(
3141 &mut self,
3142 project_transaction: ProjectTransaction,
3143 peer_id: PeerId,
3144 cx: &AppContext,
3145 ) -> proto::ProjectTransaction {
3146 let mut serialized_transaction = proto::ProjectTransaction {
3147 buffers: Default::default(),
3148 transactions: Default::default(),
3149 };
3150 for (buffer, transaction) in project_transaction.0 {
3151 serialized_transaction
3152 .buffers
3153 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3154 serialized_transaction
3155 .transactions
3156 .push(language::proto::serialize_transaction(&transaction));
3157 }
3158 serialized_transaction
3159 }
3160
3161 fn deserialize_project_transaction(
3162 &mut self,
3163 message: proto::ProjectTransaction,
3164 push_to_history: bool,
3165 request_handle: BufferRequestHandle,
3166 cx: &mut ModelContext<Self>,
3167 ) -> Task<Result<ProjectTransaction>> {
3168 cx.spawn(|this, mut cx| async move {
3169 let mut project_transaction = ProjectTransaction::default();
3170 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3171 let buffer = this
3172 .update(&mut cx, |this, cx| {
3173 this.deserialize_buffer(buffer, request_handle.clone(), cx)
3174 })
3175 .await?;
3176 let transaction = language::proto::deserialize_transaction(transaction)?;
3177 project_transaction.0.insert(buffer, transaction);
3178 }
3179
3180 for (buffer, transaction) in &project_transaction.0 {
3181 buffer
3182 .update(&mut cx, |buffer, _| {
3183 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3184 })
3185 .await;
3186
3187 if push_to_history {
3188 buffer.update(&mut cx, |buffer, _| {
3189 buffer.push_transaction(transaction.clone(), Instant::now());
3190 });
3191 }
3192 }
3193
3194 Ok(project_transaction)
3195 })
3196 }
3197
3198 fn serialize_buffer_for_peer(
3199 &mut self,
3200 buffer: &ModelHandle<Buffer>,
3201 peer_id: PeerId,
3202 cx: &AppContext,
3203 ) -> proto::Buffer {
3204 let buffer_id = buffer.read(cx).remote_id();
3205 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3206 match shared_buffers.entry(buffer_id) {
3207 hash_map::Entry::Occupied(_) => proto::Buffer {
3208 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3209 },
3210 hash_map::Entry::Vacant(entry) => {
3211 entry.insert(buffer.clone());
3212 proto::Buffer {
3213 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3214 }
3215 }
3216 }
3217 }
3218
3219 fn deserialize_buffer(
3220 &mut self,
3221 buffer: proto::Buffer,
3222 request_handle: BufferRequestHandle,
3223 cx: &mut ModelContext<Self>,
3224 ) -> Task<Result<ModelHandle<Buffer>>> {
3225 let replica_id = self.replica_id();
3226
3227 let mut opened_buffer_tx = self.opened_buffer.clone();
3228 let mut opened_buffer_rx = self.opened_buffer.subscribe();
3229 cx.spawn(|this, mut cx| async move {
3230 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3231 proto::buffer::Variant::Id(id) => {
3232 let buffer = loop {
3233 let buffer = this.read_with(&cx, |this, cx| {
3234 this.buffers_state
3235 .borrow()
3236 .open_buffers
3237 .get(&id)
3238 .and_then(|buffer| buffer.upgrade(cx))
3239 });
3240 if let Some(buffer) = buffer {
3241 break buffer;
3242 }
3243 opened_buffer_rx
3244 .recv()
3245 .await
3246 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3247 };
3248 Ok(buffer)
3249 }
3250 proto::buffer::Variant::State(mut buffer) => {
3251 let mut buffer_worktree = None;
3252 let mut buffer_file = None;
3253 if let Some(file) = buffer.file.take() {
3254 this.read_with(&cx, |this, cx| {
3255 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3256 let worktree =
3257 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3258 anyhow!("no worktree found for id {}", file.worktree_id)
3259 })?;
3260 buffer_file =
3261 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3262 as Box<dyn language::File>);
3263 buffer_worktree = Some(worktree);
3264 Ok::<_, anyhow::Error>(())
3265 })?;
3266 }
3267
3268 let buffer = cx.add_model(|cx| {
3269 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3270 });
3271
3272 request_handle.preserve_buffer(buffer.clone());
3273 this.update(&mut cx, |this, cx| {
3274 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3275 })?;
3276
3277 let _ = opened_buffer_tx.send(()).await;
3278 Ok(buffer)
3279 }
3280 }
3281 })
3282 }
3283
3284 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3285 let language = self
3286 .languages
3287 .get_language(&serialized_symbol.language_name);
3288 let start = serialized_symbol
3289 .start
3290 .ok_or_else(|| anyhow!("invalid start"))?;
3291 let end = serialized_symbol
3292 .end
3293 .ok_or_else(|| anyhow!("invalid end"))?;
3294 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3295 Ok(Symbol {
3296 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3297 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3298 language_name: serialized_symbol.language_name.clone(),
3299 label: language
3300 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3301 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3302 name: serialized_symbol.name,
3303 path: PathBuf::from(serialized_symbol.path),
3304 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3305 kind,
3306 signature: serialized_symbol
3307 .signature
3308 .try_into()
3309 .map_err(|_| anyhow!("invalid signature"))?,
3310 })
3311 }
3312
3313 async fn handle_close_buffer(
3314 this: ModelHandle<Self>,
3315 envelope: TypedEnvelope<proto::CloseBuffer>,
3316 _: Arc<Client>,
3317 mut cx: AsyncAppContext,
3318 ) -> Result<()> {
3319 this.update(&mut cx, |this, cx| {
3320 if let Some(shared_buffers) =
3321 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
3322 {
3323 shared_buffers.remove(&envelope.payload.buffer_id);
3324 cx.notify();
3325 }
3326 Ok(())
3327 })
3328 }
3329
3330 async fn handle_buffer_saved(
3331 this: ModelHandle<Self>,
3332 envelope: TypedEnvelope<proto::BufferSaved>,
3333 _: Arc<Client>,
3334 mut cx: AsyncAppContext,
3335 ) -> Result<()> {
3336 let version = envelope.payload.version.try_into()?;
3337 let mtime = envelope
3338 .payload
3339 .mtime
3340 .ok_or_else(|| anyhow!("missing mtime"))?
3341 .into();
3342
3343 this.update(&mut cx, |this, cx| {
3344 let buffer = this
3345 .buffers_state
3346 .borrow()
3347 .open_buffers
3348 .get(&envelope.payload.buffer_id)
3349 .and_then(|buffer| buffer.upgrade(cx));
3350 if let Some(buffer) = buffer {
3351 buffer.update(cx, |buffer, cx| {
3352 buffer.did_save(version, mtime, None, cx);
3353 });
3354 }
3355 Ok(())
3356 })
3357 }
3358
3359 async fn handle_buffer_reloaded(
3360 this: ModelHandle<Self>,
3361 envelope: TypedEnvelope<proto::BufferReloaded>,
3362 _: Arc<Client>,
3363 mut cx: AsyncAppContext,
3364 ) -> Result<()> {
3365 let payload = envelope.payload.clone();
3366 let version = payload.version.try_into()?;
3367 let mtime = payload
3368 .mtime
3369 .ok_or_else(|| anyhow!("missing mtime"))?
3370 .into();
3371 this.update(&mut cx, |this, cx| {
3372 let buffer = this
3373 .buffers_state
3374 .borrow()
3375 .open_buffers
3376 .get(&payload.buffer_id)
3377 .and_then(|buffer| buffer.upgrade(cx));
3378 if let Some(buffer) = buffer {
3379 buffer.update(cx, |buffer, cx| {
3380 buffer.did_reload(version, mtime, cx);
3381 });
3382 }
3383 Ok(())
3384 })
3385 }
3386
3387 pub fn match_paths<'a>(
3388 &self,
3389 query: &'a str,
3390 include_ignored: bool,
3391 smart_case: bool,
3392 max_results: usize,
3393 cancel_flag: &'a AtomicBool,
3394 cx: &AppContext,
3395 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3396 let worktrees = self
3397 .worktrees(cx)
3398 .filter(|worktree| !worktree.read(cx).is_weak())
3399 .collect::<Vec<_>>();
3400 let include_root_name = worktrees.len() > 1;
3401 let candidate_sets = worktrees
3402 .into_iter()
3403 .map(|worktree| CandidateSet {
3404 snapshot: worktree.read(cx).snapshot(),
3405 include_ignored,
3406 include_root_name,
3407 })
3408 .collect::<Vec<_>>();
3409
3410 let background = cx.background().clone();
3411 async move {
3412 fuzzy::match_paths(
3413 candidate_sets.as_slice(),
3414 query,
3415 smart_case,
3416 max_results,
3417 cancel_flag,
3418 background,
3419 )
3420 .await
3421 }
3422 }
3423}
3424
3425impl BufferRequestHandle {
3426 fn new(state: Rc<RefCell<ProjectBuffers>>, cx: &AppContext) -> Self {
3427 {
3428 let state = &mut *state.borrow_mut();
3429 state.buffer_request_count += 1;
3430 if state.buffer_request_count == 1 {
3431 state.preserved_buffers.extend(
3432 state
3433 .open_buffers
3434 .values()
3435 .filter_map(|buffer| buffer.upgrade(cx)),
3436 )
3437 }
3438 }
3439 Self(state)
3440 }
3441
3442 fn preserve_buffer(&self, buffer: ModelHandle<Buffer>) {
3443 self.0.borrow_mut().preserved_buffers.push(buffer);
3444 }
3445}
3446
3447impl Clone for BufferRequestHandle {
3448 fn clone(&self) -> Self {
3449 self.0.borrow_mut().buffer_request_count += 1;
3450 Self(self.0.clone())
3451 }
3452}
3453
3454impl Drop for BufferRequestHandle {
3455 fn drop(&mut self) {
3456 let mut state = self.0.borrow_mut();
3457 state.buffer_request_count -= 1;
3458 if state.buffer_request_count == 0 {
3459 state.preserved_buffers.clear();
3460 state
3461 .open_buffers
3462 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
3463 }
3464 }
3465}
3466
3467impl WorktreeHandle {
3468 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3469 match self {
3470 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3471 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3472 }
3473 }
3474}
3475
3476impl OpenBuffer {
3477 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3478 match self {
3479 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
3480 OpenBuffer::Loading(_) => None,
3481 }
3482 }
3483}
3484
3485struct CandidateSet {
3486 snapshot: Snapshot,
3487 include_ignored: bool,
3488 include_root_name: bool,
3489}
3490
3491impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3492 type Candidates = CandidateSetIter<'a>;
3493
3494 fn id(&self) -> usize {
3495 self.snapshot.id().to_usize()
3496 }
3497
3498 fn len(&self) -> usize {
3499 if self.include_ignored {
3500 self.snapshot.file_count()
3501 } else {
3502 self.snapshot.visible_file_count()
3503 }
3504 }
3505
3506 fn prefix(&self) -> Arc<str> {
3507 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3508 self.snapshot.root_name().into()
3509 } else if self.include_root_name {
3510 format!("{}/", self.snapshot.root_name()).into()
3511 } else {
3512 "".into()
3513 }
3514 }
3515
3516 fn candidates(&'a self, start: usize) -> Self::Candidates {
3517 CandidateSetIter {
3518 traversal: self.snapshot.files(self.include_ignored, start),
3519 }
3520 }
3521}
3522
3523struct CandidateSetIter<'a> {
3524 traversal: Traversal<'a>,
3525}
3526
3527impl<'a> Iterator for CandidateSetIter<'a> {
3528 type Item = PathMatchCandidate<'a>;
3529
3530 fn next(&mut self) -> Option<Self::Item> {
3531 self.traversal.next().map(|entry| {
3532 if let EntryKind::File(char_bag) = entry.kind {
3533 PathMatchCandidate {
3534 path: &entry.path,
3535 char_bag,
3536 }
3537 } else {
3538 unreachable!()
3539 }
3540 })
3541 }
3542}
3543
3544impl Entity for Project {
3545 type Event = Event;
3546
3547 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3548 match &self.client_state {
3549 ProjectClientState::Local { remote_id_rx, .. } => {
3550 if let Some(project_id) = *remote_id_rx.borrow() {
3551 self.client
3552 .send(proto::UnregisterProject { project_id })
3553 .log_err();
3554 }
3555 }
3556 ProjectClientState::Remote { remote_id, .. } => {
3557 self.client
3558 .send(proto::LeaveProject {
3559 project_id: *remote_id,
3560 })
3561 .log_err();
3562 }
3563 }
3564 }
3565
3566 fn app_will_quit(
3567 &mut self,
3568 _: &mut MutableAppContext,
3569 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3570 let shutdown_futures = self
3571 .language_servers
3572 .drain()
3573 .filter_map(|(_, server)| server.shutdown())
3574 .collect::<Vec<_>>();
3575 Some(
3576 async move {
3577 futures::future::join_all(shutdown_futures).await;
3578 }
3579 .boxed(),
3580 )
3581 }
3582}
3583
3584impl Collaborator {
3585 fn from_proto(
3586 message: proto::Collaborator,
3587 user_store: &ModelHandle<UserStore>,
3588 cx: &mut AsyncAppContext,
3589 ) -> impl Future<Output = Result<Self>> {
3590 let user = user_store.update(cx, |user_store, cx| {
3591 user_store.fetch_user(message.user_id, cx)
3592 });
3593
3594 async move {
3595 Ok(Self {
3596 peer_id: PeerId(message.peer_id),
3597 user: user.await?,
3598 replica_id: message.replica_id as ReplicaId,
3599 })
3600 }
3601 }
3602}
3603
3604impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3605 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3606 Self {
3607 worktree_id,
3608 path: path.as_ref().into(),
3609 }
3610 }
3611}
3612
3613impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3614 fn from(options: lsp::CreateFileOptions) -> Self {
3615 Self {
3616 overwrite: options.overwrite.unwrap_or(false),
3617 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3618 }
3619 }
3620}
3621
3622impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3623 fn from(options: lsp::RenameFileOptions) -> Self {
3624 Self {
3625 overwrite: options.overwrite.unwrap_or(false),
3626 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3627 }
3628 }
3629}
3630
3631impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3632 fn from(options: lsp::DeleteFileOptions) -> Self {
3633 Self {
3634 recursive: options.recursive.unwrap_or(false),
3635 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3636 }
3637 }
3638}
3639
3640fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3641 proto::Symbol {
3642 source_worktree_id: symbol.source_worktree_id.to_proto(),
3643 worktree_id: symbol.worktree_id.to_proto(),
3644 language_name: symbol.language_name.clone(),
3645 name: symbol.name.clone(),
3646 kind: unsafe { mem::transmute(symbol.kind) },
3647 path: symbol.path.to_string_lossy().to_string(),
3648 start: Some(proto::Point {
3649 row: symbol.range.start.row,
3650 column: symbol.range.start.column,
3651 }),
3652 end: Some(proto::Point {
3653 row: symbol.range.end.row,
3654 column: symbol.range.end.column,
3655 }),
3656 signature: symbol.signature.to_vec(),
3657 }
3658}
3659
3660fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3661 let mut path_components = path.components();
3662 let mut base_components = base.components();
3663 let mut components: Vec<Component> = Vec::new();
3664 loop {
3665 match (path_components.next(), base_components.next()) {
3666 (None, None) => break,
3667 (Some(a), None) => {
3668 components.push(a);
3669 components.extend(path_components.by_ref());
3670 break;
3671 }
3672 (None, _) => components.push(Component::ParentDir),
3673 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3674 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3675 (Some(a), Some(_)) => {
3676 components.push(Component::ParentDir);
3677 for _ in base_components {
3678 components.push(Component::ParentDir);
3679 }
3680 components.push(a);
3681 components.extend(path_components.by_ref());
3682 break;
3683 }
3684 }
3685 }
3686 components.iter().map(|c| c.as_os_str()).collect()
3687}
3688
3689#[cfg(test)]
3690mod tests {
3691 use super::{Event, *};
3692 use fs::RealFs;
3693 use futures::StreamExt;
3694 use gpui::test::subscribe;
3695 use language::{
3696 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3697 };
3698 use lsp::Url;
3699 use serde_json::json;
3700 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3701 use unindent::Unindent as _;
3702 use util::test::temp_tree;
3703 use worktree::WorktreeHandle as _;
3704
3705 #[gpui::test]
3706 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3707 let dir = temp_tree(json!({
3708 "root": {
3709 "apple": "",
3710 "banana": {
3711 "carrot": {
3712 "date": "",
3713 "endive": "",
3714 }
3715 },
3716 "fennel": {
3717 "grape": "",
3718 }
3719 }
3720 }));
3721
3722 let root_link_path = dir.path().join("root_link");
3723 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3724 unix::fs::symlink(
3725 &dir.path().join("root/fennel"),
3726 &dir.path().join("root/finnochio"),
3727 )
3728 .unwrap();
3729
3730 let project = Project::test(Arc::new(RealFs), &mut cx);
3731
3732 let (tree, _) = project
3733 .update(&mut cx, |project, cx| {
3734 project.find_or_create_local_worktree(&root_link_path, false, cx)
3735 })
3736 .await
3737 .unwrap();
3738
3739 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3740 .await;
3741 cx.read(|cx| {
3742 let tree = tree.read(cx);
3743 assert_eq!(tree.file_count(), 5);
3744 assert_eq!(
3745 tree.inode_for_path("fennel/grape"),
3746 tree.inode_for_path("finnochio/grape")
3747 );
3748 });
3749
3750 let cancel_flag = Default::default();
3751 let results = project
3752 .read_with(&cx, |project, cx| {
3753 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3754 })
3755 .await;
3756 assert_eq!(
3757 results
3758 .into_iter()
3759 .map(|result| result.path)
3760 .collect::<Vec<Arc<Path>>>(),
3761 vec![
3762 PathBuf::from("banana/carrot/date").into(),
3763 PathBuf::from("banana/carrot/endive").into(),
3764 ]
3765 );
3766 }
3767
3768 #[gpui::test]
3769 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3770 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3771 let progress_token = language_server_config
3772 .disk_based_diagnostics_progress_token
3773 .clone()
3774 .unwrap();
3775
3776 let language = Arc::new(Language::new(
3777 LanguageConfig {
3778 name: "Rust".into(),
3779 path_suffixes: vec!["rs".to_string()],
3780 language_server: Some(language_server_config),
3781 ..Default::default()
3782 },
3783 Some(tree_sitter_rust::language()),
3784 ));
3785
3786 let fs = FakeFs::new(cx.background());
3787 fs.insert_tree(
3788 "/dir",
3789 json!({
3790 "a.rs": "fn a() { A }",
3791 "b.rs": "const y: i32 = 1",
3792 }),
3793 )
3794 .await;
3795
3796 let project = Project::test(fs, &mut cx);
3797 project.update(&mut cx, |project, _| {
3798 Arc::get_mut(&mut project.languages).unwrap().add(language);
3799 });
3800
3801 let (tree, _) = project
3802 .update(&mut cx, |project, cx| {
3803 project.find_or_create_local_worktree("/dir", false, cx)
3804 })
3805 .await
3806 .unwrap();
3807 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3808
3809 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3810 .await;
3811
3812 // Cause worktree to start the fake language server
3813 let _buffer = project
3814 .update(&mut cx, |project, cx| {
3815 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3816 })
3817 .await
3818 .unwrap();
3819
3820 let mut events = subscribe(&project, &mut cx);
3821
3822 let mut fake_server = fake_servers.next().await.unwrap();
3823 fake_server.start_progress(&progress_token).await;
3824 assert_eq!(
3825 events.next().await.unwrap(),
3826 Event::DiskBasedDiagnosticsStarted
3827 );
3828
3829 fake_server.start_progress(&progress_token).await;
3830 fake_server.end_progress(&progress_token).await;
3831 fake_server.start_progress(&progress_token).await;
3832
3833 fake_server
3834 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3835 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3836 version: None,
3837 diagnostics: vec![lsp::Diagnostic {
3838 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3839 severity: Some(lsp::DiagnosticSeverity::ERROR),
3840 message: "undefined variable 'A'".to_string(),
3841 ..Default::default()
3842 }],
3843 })
3844 .await;
3845 assert_eq!(
3846 events.next().await.unwrap(),
3847 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3848 );
3849
3850 fake_server.end_progress(&progress_token).await;
3851 fake_server.end_progress(&progress_token).await;
3852 assert_eq!(
3853 events.next().await.unwrap(),
3854 Event::DiskBasedDiagnosticsUpdated
3855 );
3856 assert_eq!(
3857 events.next().await.unwrap(),
3858 Event::DiskBasedDiagnosticsFinished
3859 );
3860
3861 let buffer = project
3862 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3863 .await
3864 .unwrap();
3865
3866 buffer.read_with(&cx, |buffer, _| {
3867 let snapshot = buffer.snapshot();
3868 let diagnostics = snapshot
3869 .diagnostics_in_range::<_, Point>(0..buffer.len())
3870 .collect::<Vec<_>>();
3871 assert_eq!(
3872 diagnostics,
3873 &[DiagnosticEntry {
3874 range: Point::new(0, 9)..Point::new(0, 10),
3875 diagnostic: Diagnostic {
3876 severity: lsp::DiagnosticSeverity::ERROR,
3877 message: "undefined variable 'A'".to_string(),
3878 group_id: 0,
3879 is_primary: true,
3880 ..Default::default()
3881 }
3882 }]
3883 )
3884 });
3885 }
3886
3887 #[gpui::test]
3888 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3889 let dir = temp_tree(json!({
3890 "root": {
3891 "dir1": {},
3892 "dir2": {
3893 "dir3": {}
3894 }
3895 }
3896 }));
3897
3898 let project = Project::test(Arc::new(RealFs), &mut cx);
3899 let (tree, _) = project
3900 .update(&mut cx, |project, cx| {
3901 project.find_or_create_local_worktree(&dir.path(), false, cx)
3902 })
3903 .await
3904 .unwrap();
3905
3906 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3907 .await;
3908
3909 let cancel_flag = Default::default();
3910 let results = project
3911 .read_with(&cx, |project, cx| {
3912 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3913 })
3914 .await;
3915
3916 assert!(results.is_empty());
3917 }
3918
3919 #[gpui::test]
3920 async fn test_definition(mut cx: gpui::TestAppContext) {
3921 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3922 let language = Arc::new(Language::new(
3923 LanguageConfig {
3924 name: "Rust".into(),
3925 path_suffixes: vec!["rs".to_string()],
3926 language_server: Some(language_server_config),
3927 ..Default::default()
3928 },
3929 Some(tree_sitter_rust::language()),
3930 ));
3931
3932 let fs = FakeFs::new(cx.background());
3933 fs.insert_tree(
3934 "/dir",
3935 json!({
3936 "a.rs": "const fn a() { A }",
3937 "b.rs": "const y: i32 = crate::a()",
3938 }),
3939 )
3940 .await;
3941
3942 let project = Project::test(fs, &mut cx);
3943 project.update(&mut cx, |project, _| {
3944 Arc::get_mut(&mut project.languages).unwrap().add(language);
3945 });
3946
3947 let (tree, _) = project
3948 .update(&mut cx, |project, cx| {
3949 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3950 })
3951 .await
3952 .unwrap();
3953 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3954 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3955 .await;
3956
3957 let buffer = project
3958 .update(&mut cx, |project, cx| {
3959 project.open_buffer(
3960 ProjectPath {
3961 worktree_id,
3962 path: Path::new("").into(),
3963 },
3964 cx,
3965 )
3966 })
3967 .await
3968 .unwrap();
3969
3970 let mut fake_server = fake_servers.next().await.unwrap();
3971 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3972 let params = params.text_document_position_params;
3973 assert_eq!(
3974 params.text_document.uri.to_file_path().unwrap(),
3975 Path::new("/dir/b.rs"),
3976 );
3977 assert_eq!(params.position, lsp::Position::new(0, 22));
3978
3979 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3980 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3981 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3982 )))
3983 });
3984
3985 let mut definitions = project
3986 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3987 .await
3988 .unwrap();
3989
3990 assert_eq!(definitions.len(), 1);
3991 let definition = definitions.pop().unwrap();
3992 cx.update(|cx| {
3993 let target_buffer = definition.buffer.read(cx);
3994 assert_eq!(
3995 target_buffer
3996 .file()
3997 .unwrap()
3998 .as_local()
3999 .unwrap()
4000 .abs_path(cx),
4001 Path::new("/dir/a.rs"),
4002 );
4003 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4004 assert_eq!(
4005 list_worktrees(&project, cx),
4006 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
4007 );
4008
4009 drop(definition);
4010 });
4011 cx.read(|cx| {
4012 assert_eq!(
4013 list_worktrees(&project, cx),
4014 [("/dir/b.rs".as_ref(), false)]
4015 );
4016 });
4017
4018 fn list_worktrees<'a>(
4019 project: &'a ModelHandle<Project>,
4020 cx: &'a AppContext,
4021 ) -> Vec<(&'a Path, bool)> {
4022 project
4023 .read(cx)
4024 .worktrees(cx)
4025 .map(|worktree| {
4026 let worktree = worktree.read(cx);
4027 (
4028 worktree.as_local().unwrap().abs_path().as_ref(),
4029 worktree.is_weak(),
4030 )
4031 })
4032 .collect::<Vec<_>>()
4033 }
4034 }
4035
4036 #[gpui::test]
4037 async fn test_save_file(mut cx: gpui::TestAppContext) {
4038 let fs = FakeFs::new(cx.background());
4039 fs.insert_tree(
4040 "/dir",
4041 json!({
4042 "file1": "the old contents",
4043 }),
4044 )
4045 .await;
4046
4047 let project = Project::test(fs.clone(), &mut cx);
4048 let worktree_id = project
4049 .update(&mut cx, |p, cx| {
4050 p.find_or_create_local_worktree("/dir", false, cx)
4051 })
4052 .await
4053 .unwrap()
4054 .0
4055 .read_with(&cx, |tree, _| tree.id());
4056
4057 let buffer = project
4058 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4059 .await
4060 .unwrap();
4061 buffer
4062 .update(&mut cx, |buffer, cx| {
4063 assert_eq!(buffer.text(), "the old contents");
4064 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4065 buffer.save(cx)
4066 })
4067 .await
4068 .unwrap();
4069
4070 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4071 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4072 }
4073
4074 #[gpui::test]
4075 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
4076 let fs = FakeFs::new(cx.background());
4077 fs.insert_tree(
4078 "/dir",
4079 json!({
4080 "file1": "the old contents",
4081 }),
4082 )
4083 .await;
4084
4085 let project = Project::test(fs.clone(), &mut cx);
4086 let worktree_id = project
4087 .update(&mut cx, |p, cx| {
4088 p.find_or_create_local_worktree("/dir/file1", false, cx)
4089 })
4090 .await
4091 .unwrap()
4092 .0
4093 .read_with(&cx, |tree, _| tree.id());
4094
4095 let buffer = project
4096 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4097 .await
4098 .unwrap();
4099 buffer
4100 .update(&mut cx, |buffer, cx| {
4101 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4102 buffer.save(cx)
4103 })
4104 .await
4105 .unwrap();
4106
4107 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4108 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4109 }
4110
4111 #[gpui::test(retries = 5)]
4112 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
4113 let dir = temp_tree(json!({
4114 "a": {
4115 "file1": "",
4116 "file2": "",
4117 "file3": "",
4118 },
4119 "b": {
4120 "c": {
4121 "file4": "",
4122 "file5": "",
4123 }
4124 }
4125 }));
4126
4127 let project = Project::test(Arc::new(RealFs), &mut cx);
4128 let rpc = project.read_with(&cx, |p, _| p.client.clone());
4129
4130 let (tree, _) = project
4131 .update(&mut cx, |p, cx| {
4132 p.find_or_create_local_worktree(dir.path(), false, cx)
4133 })
4134 .await
4135 .unwrap();
4136 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4137
4138 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4139 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4140 async move { buffer.await.unwrap() }
4141 };
4142 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4143 tree.read_with(cx, |tree, _| {
4144 tree.entry_for_path(path)
4145 .expect(&format!("no entry for path {}", path))
4146 .id
4147 })
4148 };
4149
4150 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
4151 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
4152 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
4153 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
4154
4155 let file2_id = id_for_path("a/file2", &cx);
4156 let file3_id = id_for_path("a/file3", &cx);
4157 let file4_id = id_for_path("b/c/file4", &cx);
4158
4159 // Wait for the initial scan.
4160 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4161 .await;
4162
4163 // Create a remote copy of this worktree.
4164 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
4165 let (remote, load_task) = cx.update(|cx| {
4166 Worktree::remote(
4167 1,
4168 1,
4169 initial_snapshot.to_proto(&Default::default(), Default::default()),
4170 rpc.clone(),
4171 cx,
4172 )
4173 });
4174 load_task.await;
4175
4176 cx.read(|cx| {
4177 assert!(!buffer2.read(cx).is_dirty());
4178 assert!(!buffer3.read(cx).is_dirty());
4179 assert!(!buffer4.read(cx).is_dirty());
4180 assert!(!buffer5.read(cx).is_dirty());
4181 });
4182
4183 // Rename and delete files and directories.
4184 tree.flush_fs_events(&cx).await;
4185 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4186 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4187 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4188 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4189 tree.flush_fs_events(&cx).await;
4190
4191 let expected_paths = vec![
4192 "a",
4193 "a/file1",
4194 "a/file2.new",
4195 "b",
4196 "d",
4197 "d/file3",
4198 "d/file4",
4199 ];
4200
4201 cx.read(|app| {
4202 assert_eq!(
4203 tree.read(app)
4204 .paths()
4205 .map(|p| p.to_str().unwrap())
4206 .collect::<Vec<_>>(),
4207 expected_paths
4208 );
4209
4210 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4211 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4212 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4213
4214 assert_eq!(
4215 buffer2.read(app).file().unwrap().path().as_ref(),
4216 Path::new("a/file2.new")
4217 );
4218 assert_eq!(
4219 buffer3.read(app).file().unwrap().path().as_ref(),
4220 Path::new("d/file3")
4221 );
4222 assert_eq!(
4223 buffer4.read(app).file().unwrap().path().as_ref(),
4224 Path::new("d/file4")
4225 );
4226 assert_eq!(
4227 buffer5.read(app).file().unwrap().path().as_ref(),
4228 Path::new("b/c/file5")
4229 );
4230
4231 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4232 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4233 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4234 assert!(buffer5.read(app).file().unwrap().is_deleted());
4235 });
4236
4237 // Update the remote worktree. Check that it becomes consistent with the
4238 // local worktree.
4239 remote.update(&mut cx, |remote, cx| {
4240 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4241 &initial_snapshot,
4242 1,
4243 1,
4244 true,
4245 );
4246 remote
4247 .as_remote_mut()
4248 .unwrap()
4249 .snapshot
4250 .apply_remote_update(update_message)
4251 .unwrap();
4252
4253 assert_eq!(
4254 remote
4255 .paths()
4256 .map(|p| p.to_str().unwrap())
4257 .collect::<Vec<_>>(),
4258 expected_paths
4259 );
4260 });
4261 }
4262
4263 #[gpui::test]
4264 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
4265 let fs = FakeFs::new(cx.background());
4266 fs.insert_tree(
4267 "/the-dir",
4268 json!({
4269 "a.txt": "a-contents",
4270 "b.txt": "b-contents",
4271 }),
4272 )
4273 .await;
4274
4275 let project = Project::test(fs.clone(), &mut cx);
4276 let worktree_id = project
4277 .update(&mut cx, |p, cx| {
4278 p.find_or_create_local_worktree("/the-dir", false, cx)
4279 })
4280 .await
4281 .unwrap()
4282 .0
4283 .read_with(&cx, |tree, _| tree.id());
4284
4285 // Spawn multiple tasks to open paths, repeating some paths.
4286 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
4287 (
4288 p.open_buffer((worktree_id, "a.txt"), cx),
4289 p.open_buffer((worktree_id, "b.txt"), cx),
4290 p.open_buffer((worktree_id, "a.txt"), cx),
4291 )
4292 });
4293
4294 let buffer_a_1 = buffer_a_1.await.unwrap();
4295 let buffer_a_2 = buffer_a_2.await.unwrap();
4296 let buffer_b = buffer_b.await.unwrap();
4297 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
4298 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
4299
4300 // There is only one buffer per path.
4301 let buffer_a_id = buffer_a_1.id();
4302 assert_eq!(buffer_a_2.id(), buffer_a_id);
4303
4304 // Open the same path again while it is still open.
4305 drop(buffer_a_1);
4306 let buffer_a_3 = project
4307 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4308 .await
4309 .unwrap();
4310
4311 // There's still only one buffer per path.
4312 assert_eq!(buffer_a_3.id(), buffer_a_id);
4313 }
4314
4315 #[gpui::test]
4316 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
4317 use std::fs;
4318
4319 let dir = temp_tree(json!({
4320 "file1": "abc",
4321 "file2": "def",
4322 "file3": "ghi",
4323 }));
4324
4325 let project = Project::test(Arc::new(RealFs), &mut cx);
4326 let (worktree, _) = project
4327 .update(&mut cx, |p, cx| {
4328 p.find_or_create_local_worktree(dir.path(), false, cx)
4329 })
4330 .await
4331 .unwrap();
4332 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
4333
4334 worktree.flush_fs_events(&cx).await;
4335 worktree
4336 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4337 .await;
4338
4339 let buffer1 = project
4340 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4341 .await
4342 .unwrap();
4343 let events = Rc::new(RefCell::new(Vec::new()));
4344
4345 // initially, the buffer isn't dirty.
4346 buffer1.update(&mut cx, |buffer, cx| {
4347 cx.subscribe(&buffer1, {
4348 let events = events.clone();
4349 move |_, _, event, _| events.borrow_mut().push(event.clone())
4350 })
4351 .detach();
4352
4353 assert!(!buffer.is_dirty());
4354 assert!(events.borrow().is_empty());
4355
4356 buffer.edit(vec![1..2], "", cx);
4357 });
4358
4359 // after the first edit, the buffer is dirty, and emits a dirtied event.
4360 buffer1.update(&mut cx, |buffer, cx| {
4361 assert!(buffer.text() == "ac");
4362 assert!(buffer.is_dirty());
4363 assert_eq!(
4364 *events.borrow(),
4365 &[language::Event::Edited, language::Event::Dirtied]
4366 );
4367 events.borrow_mut().clear();
4368 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4369 });
4370
4371 // after saving, the buffer is not dirty, and emits a saved event.
4372 buffer1.update(&mut cx, |buffer, cx| {
4373 assert!(!buffer.is_dirty());
4374 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4375 events.borrow_mut().clear();
4376
4377 buffer.edit(vec![1..1], "B", cx);
4378 buffer.edit(vec![2..2], "D", cx);
4379 });
4380
4381 // after editing again, the buffer is dirty, and emits another dirty event.
4382 buffer1.update(&mut cx, |buffer, cx| {
4383 assert!(buffer.text() == "aBDc");
4384 assert!(buffer.is_dirty());
4385 assert_eq!(
4386 *events.borrow(),
4387 &[
4388 language::Event::Edited,
4389 language::Event::Dirtied,
4390 language::Event::Edited,
4391 ],
4392 );
4393 events.borrow_mut().clear();
4394
4395 // TODO - currently, after restoring the buffer to its
4396 // previously-saved state, the is still considered dirty.
4397 buffer.edit([1..3], "", cx);
4398 assert!(buffer.text() == "ac");
4399 assert!(buffer.is_dirty());
4400 });
4401
4402 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4403
4404 // When a file is deleted, the buffer is considered dirty.
4405 let events = Rc::new(RefCell::new(Vec::new()));
4406 let buffer2 = project
4407 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4408 .await
4409 .unwrap();
4410 buffer2.update(&mut cx, |_, cx| {
4411 cx.subscribe(&buffer2, {
4412 let events = events.clone();
4413 move |_, _, event, _| events.borrow_mut().push(event.clone())
4414 })
4415 .detach();
4416 });
4417
4418 fs::remove_file(dir.path().join("file2")).unwrap();
4419 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4420 assert_eq!(
4421 *events.borrow(),
4422 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4423 );
4424
4425 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4426 let events = Rc::new(RefCell::new(Vec::new()));
4427 let buffer3 = project
4428 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4429 .await
4430 .unwrap();
4431 buffer3.update(&mut cx, |_, cx| {
4432 cx.subscribe(&buffer3, {
4433 let events = events.clone();
4434 move |_, _, event, _| events.borrow_mut().push(event.clone())
4435 })
4436 .detach();
4437 });
4438
4439 worktree.flush_fs_events(&cx).await;
4440 buffer3.update(&mut cx, |buffer, cx| {
4441 buffer.edit(Some(0..0), "x", cx);
4442 });
4443 events.borrow_mut().clear();
4444 fs::remove_file(dir.path().join("file3")).unwrap();
4445 buffer3
4446 .condition(&cx, |_, _| !events.borrow().is_empty())
4447 .await;
4448 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4449 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4450 }
4451
4452 #[gpui::test]
4453 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
4454 use std::fs;
4455
4456 let initial_contents = "aaa\nbbbbb\nc\n";
4457 let dir = temp_tree(json!({ "the-file": initial_contents }));
4458
4459 let project = Project::test(Arc::new(RealFs), &mut cx);
4460 let (worktree, _) = project
4461 .update(&mut cx, |p, cx| {
4462 p.find_or_create_local_worktree(dir.path(), false, cx)
4463 })
4464 .await
4465 .unwrap();
4466 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4467
4468 worktree
4469 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4470 .await;
4471
4472 let abs_path = dir.path().join("the-file");
4473 let buffer = project
4474 .update(&mut cx, |p, cx| {
4475 p.open_buffer((worktree_id, "the-file"), cx)
4476 })
4477 .await
4478 .unwrap();
4479
4480 // TODO
4481 // Add a cursor on each row.
4482 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4483 // assert!(!buffer.is_dirty());
4484 // buffer.add_selection_set(
4485 // &(0..3)
4486 // .map(|row| Selection {
4487 // id: row as usize,
4488 // start: Point::new(row, 1),
4489 // end: Point::new(row, 1),
4490 // reversed: false,
4491 // goal: SelectionGoal::None,
4492 // })
4493 // .collect::<Vec<_>>(),
4494 // cx,
4495 // )
4496 // });
4497
4498 // Change the file on disk, adding two new lines of text, and removing
4499 // one line.
4500 buffer.read_with(&cx, |buffer, _| {
4501 assert!(!buffer.is_dirty());
4502 assert!(!buffer.has_conflict());
4503 });
4504 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4505 fs::write(&abs_path, new_contents).unwrap();
4506
4507 // Because the buffer was not modified, it is reloaded from disk. Its
4508 // contents are edited according to the diff between the old and new
4509 // file contents.
4510 buffer
4511 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4512 .await;
4513
4514 buffer.update(&mut cx, |buffer, _| {
4515 assert_eq!(buffer.text(), new_contents);
4516 assert!(!buffer.is_dirty());
4517 assert!(!buffer.has_conflict());
4518
4519 // TODO
4520 // let cursor_positions = buffer
4521 // .selection_set(selection_set_id)
4522 // .unwrap()
4523 // .selections::<Point>(&*buffer)
4524 // .map(|selection| {
4525 // assert_eq!(selection.start, selection.end);
4526 // selection.start
4527 // })
4528 // .collect::<Vec<_>>();
4529 // assert_eq!(
4530 // cursor_positions,
4531 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4532 // );
4533 });
4534
4535 // Modify the buffer
4536 buffer.update(&mut cx, |buffer, cx| {
4537 buffer.edit(vec![0..0], " ", cx);
4538 assert!(buffer.is_dirty());
4539 assert!(!buffer.has_conflict());
4540 });
4541
4542 // Change the file on disk again, adding blank lines to the beginning.
4543 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4544
4545 // Because the buffer is modified, it doesn't reload from disk, but is
4546 // marked as having a conflict.
4547 buffer
4548 .condition(&cx, |buffer, _| buffer.has_conflict())
4549 .await;
4550 }
4551
4552 #[gpui::test]
4553 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
4554 let fs = FakeFs::new(cx.background());
4555 fs.insert_tree(
4556 "/the-dir",
4557 json!({
4558 "a.rs": "
4559 fn foo(mut v: Vec<usize>) {
4560 for x in &v {
4561 v.push(1);
4562 }
4563 }
4564 "
4565 .unindent(),
4566 }),
4567 )
4568 .await;
4569
4570 let project = Project::test(fs.clone(), &mut cx);
4571 let (worktree, _) = project
4572 .update(&mut cx, |p, cx| {
4573 p.find_or_create_local_worktree("/the-dir", false, cx)
4574 })
4575 .await
4576 .unwrap();
4577 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4578
4579 let buffer = project
4580 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4581 .await
4582 .unwrap();
4583
4584 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4585 let message = lsp::PublishDiagnosticsParams {
4586 uri: buffer_uri.clone(),
4587 diagnostics: vec![
4588 lsp::Diagnostic {
4589 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4590 severity: Some(DiagnosticSeverity::WARNING),
4591 message: "error 1".to_string(),
4592 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4593 location: lsp::Location {
4594 uri: buffer_uri.clone(),
4595 range: lsp::Range::new(
4596 lsp::Position::new(1, 8),
4597 lsp::Position::new(1, 9),
4598 ),
4599 },
4600 message: "error 1 hint 1".to_string(),
4601 }]),
4602 ..Default::default()
4603 },
4604 lsp::Diagnostic {
4605 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4606 severity: Some(DiagnosticSeverity::HINT),
4607 message: "error 1 hint 1".to_string(),
4608 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4609 location: lsp::Location {
4610 uri: buffer_uri.clone(),
4611 range: lsp::Range::new(
4612 lsp::Position::new(1, 8),
4613 lsp::Position::new(1, 9),
4614 ),
4615 },
4616 message: "original diagnostic".to_string(),
4617 }]),
4618 ..Default::default()
4619 },
4620 lsp::Diagnostic {
4621 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4622 severity: Some(DiagnosticSeverity::ERROR),
4623 message: "error 2".to_string(),
4624 related_information: Some(vec![
4625 lsp::DiagnosticRelatedInformation {
4626 location: lsp::Location {
4627 uri: buffer_uri.clone(),
4628 range: lsp::Range::new(
4629 lsp::Position::new(1, 13),
4630 lsp::Position::new(1, 15),
4631 ),
4632 },
4633 message: "error 2 hint 1".to_string(),
4634 },
4635 lsp::DiagnosticRelatedInformation {
4636 location: lsp::Location {
4637 uri: buffer_uri.clone(),
4638 range: lsp::Range::new(
4639 lsp::Position::new(1, 13),
4640 lsp::Position::new(1, 15),
4641 ),
4642 },
4643 message: "error 2 hint 2".to_string(),
4644 },
4645 ]),
4646 ..Default::default()
4647 },
4648 lsp::Diagnostic {
4649 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4650 severity: Some(DiagnosticSeverity::HINT),
4651 message: "error 2 hint 1".to_string(),
4652 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4653 location: lsp::Location {
4654 uri: buffer_uri.clone(),
4655 range: lsp::Range::new(
4656 lsp::Position::new(2, 8),
4657 lsp::Position::new(2, 17),
4658 ),
4659 },
4660 message: "original diagnostic".to_string(),
4661 }]),
4662 ..Default::default()
4663 },
4664 lsp::Diagnostic {
4665 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4666 severity: Some(DiagnosticSeverity::HINT),
4667 message: "error 2 hint 2".to_string(),
4668 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4669 location: lsp::Location {
4670 uri: buffer_uri.clone(),
4671 range: lsp::Range::new(
4672 lsp::Position::new(2, 8),
4673 lsp::Position::new(2, 17),
4674 ),
4675 },
4676 message: "original diagnostic".to_string(),
4677 }]),
4678 ..Default::default()
4679 },
4680 ],
4681 version: None,
4682 };
4683
4684 project
4685 .update(&mut cx, |p, cx| {
4686 p.update_diagnostics(message, &Default::default(), cx)
4687 })
4688 .unwrap();
4689 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4690
4691 assert_eq!(
4692 buffer
4693 .diagnostics_in_range::<_, Point>(0..buffer.len())
4694 .collect::<Vec<_>>(),
4695 &[
4696 DiagnosticEntry {
4697 range: Point::new(1, 8)..Point::new(1, 9),
4698 diagnostic: Diagnostic {
4699 severity: DiagnosticSeverity::WARNING,
4700 message: "error 1".to_string(),
4701 group_id: 0,
4702 is_primary: true,
4703 ..Default::default()
4704 }
4705 },
4706 DiagnosticEntry {
4707 range: Point::new(1, 8)..Point::new(1, 9),
4708 diagnostic: Diagnostic {
4709 severity: DiagnosticSeverity::HINT,
4710 message: "error 1 hint 1".to_string(),
4711 group_id: 0,
4712 is_primary: false,
4713 ..Default::default()
4714 }
4715 },
4716 DiagnosticEntry {
4717 range: Point::new(1, 13)..Point::new(1, 15),
4718 diagnostic: Diagnostic {
4719 severity: DiagnosticSeverity::HINT,
4720 message: "error 2 hint 1".to_string(),
4721 group_id: 1,
4722 is_primary: false,
4723 ..Default::default()
4724 }
4725 },
4726 DiagnosticEntry {
4727 range: Point::new(1, 13)..Point::new(1, 15),
4728 diagnostic: Diagnostic {
4729 severity: DiagnosticSeverity::HINT,
4730 message: "error 2 hint 2".to_string(),
4731 group_id: 1,
4732 is_primary: false,
4733 ..Default::default()
4734 }
4735 },
4736 DiagnosticEntry {
4737 range: Point::new(2, 8)..Point::new(2, 17),
4738 diagnostic: Diagnostic {
4739 severity: DiagnosticSeverity::ERROR,
4740 message: "error 2".to_string(),
4741 group_id: 1,
4742 is_primary: true,
4743 ..Default::default()
4744 }
4745 }
4746 ]
4747 );
4748
4749 assert_eq!(
4750 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4751 &[
4752 DiagnosticEntry {
4753 range: Point::new(1, 8)..Point::new(1, 9),
4754 diagnostic: Diagnostic {
4755 severity: DiagnosticSeverity::WARNING,
4756 message: "error 1".to_string(),
4757 group_id: 0,
4758 is_primary: true,
4759 ..Default::default()
4760 }
4761 },
4762 DiagnosticEntry {
4763 range: Point::new(1, 8)..Point::new(1, 9),
4764 diagnostic: Diagnostic {
4765 severity: DiagnosticSeverity::HINT,
4766 message: "error 1 hint 1".to_string(),
4767 group_id: 0,
4768 is_primary: false,
4769 ..Default::default()
4770 }
4771 },
4772 ]
4773 );
4774 assert_eq!(
4775 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4776 &[
4777 DiagnosticEntry {
4778 range: Point::new(1, 13)..Point::new(1, 15),
4779 diagnostic: Diagnostic {
4780 severity: DiagnosticSeverity::HINT,
4781 message: "error 2 hint 1".to_string(),
4782 group_id: 1,
4783 is_primary: false,
4784 ..Default::default()
4785 }
4786 },
4787 DiagnosticEntry {
4788 range: Point::new(1, 13)..Point::new(1, 15),
4789 diagnostic: Diagnostic {
4790 severity: DiagnosticSeverity::HINT,
4791 message: "error 2 hint 2".to_string(),
4792 group_id: 1,
4793 is_primary: false,
4794 ..Default::default()
4795 }
4796 },
4797 DiagnosticEntry {
4798 range: Point::new(2, 8)..Point::new(2, 17),
4799 diagnostic: Diagnostic {
4800 severity: DiagnosticSeverity::ERROR,
4801 message: "error 2".to_string(),
4802 group_id: 1,
4803 is_primary: true,
4804 ..Default::default()
4805 }
4806 }
4807 ]
4808 );
4809 }
4810
4811 #[gpui::test]
4812 async fn test_rename(mut cx: gpui::TestAppContext) {
4813 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4814 let language = Arc::new(Language::new(
4815 LanguageConfig {
4816 name: "Rust".into(),
4817 path_suffixes: vec!["rs".to_string()],
4818 language_server: Some(language_server_config),
4819 ..Default::default()
4820 },
4821 Some(tree_sitter_rust::language()),
4822 ));
4823
4824 let fs = FakeFs::new(cx.background());
4825 fs.insert_tree(
4826 "/dir",
4827 json!({
4828 "one.rs": "const ONE: usize = 1;",
4829 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4830 }),
4831 )
4832 .await;
4833
4834 let project = Project::test(fs.clone(), &mut cx);
4835 project.update(&mut cx, |project, _| {
4836 Arc::get_mut(&mut project.languages).unwrap().add(language);
4837 });
4838
4839 let (tree, _) = project
4840 .update(&mut cx, |project, cx| {
4841 project.find_or_create_local_worktree("/dir", false, cx)
4842 })
4843 .await
4844 .unwrap();
4845 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4846 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4847 .await;
4848
4849 let buffer = project
4850 .update(&mut cx, |project, cx| {
4851 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4852 })
4853 .await
4854 .unwrap();
4855
4856 let mut fake_server = fake_servers.next().await.unwrap();
4857
4858 let response = project.update(&mut cx, |project, cx| {
4859 project.prepare_rename(buffer.clone(), 7, cx)
4860 });
4861 fake_server
4862 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4863 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4864 assert_eq!(params.position, lsp::Position::new(0, 7));
4865 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4866 lsp::Position::new(0, 6),
4867 lsp::Position::new(0, 9),
4868 )))
4869 })
4870 .next()
4871 .await
4872 .unwrap();
4873 let range = response.await.unwrap().unwrap();
4874 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4875 assert_eq!(range, 6..9);
4876
4877 let response = project.update(&mut cx, |project, cx| {
4878 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4879 });
4880 fake_server
4881 .handle_request::<lsp::request::Rename, _>(|params, _| {
4882 assert_eq!(
4883 params.text_document_position.text_document.uri.as_str(),
4884 "file:///dir/one.rs"
4885 );
4886 assert_eq!(
4887 params.text_document_position.position,
4888 lsp::Position::new(0, 7)
4889 );
4890 assert_eq!(params.new_name, "THREE");
4891 Some(lsp::WorkspaceEdit {
4892 changes: Some(
4893 [
4894 (
4895 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4896 vec![lsp::TextEdit::new(
4897 lsp::Range::new(
4898 lsp::Position::new(0, 6),
4899 lsp::Position::new(0, 9),
4900 ),
4901 "THREE".to_string(),
4902 )],
4903 ),
4904 (
4905 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4906 vec![
4907 lsp::TextEdit::new(
4908 lsp::Range::new(
4909 lsp::Position::new(0, 24),
4910 lsp::Position::new(0, 27),
4911 ),
4912 "THREE".to_string(),
4913 ),
4914 lsp::TextEdit::new(
4915 lsp::Range::new(
4916 lsp::Position::new(0, 35),
4917 lsp::Position::new(0, 38),
4918 ),
4919 "THREE".to_string(),
4920 ),
4921 ],
4922 ),
4923 ]
4924 .into_iter()
4925 .collect(),
4926 ),
4927 ..Default::default()
4928 })
4929 })
4930 .next()
4931 .await
4932 .unwrap();
4933 let mut transaction = response.await.unwrap().0;
4934 assert_eq!(transaction.len(), 2);
4935 assert_eq!(
4936 transaction
4937 .remove_entry(&buffer)
4938 .unwrap()
4939 .0
4940 .read_with(&cx, |buffer, _| buffer.text()),
4941 "const THREE: usize = 1;"
4942 );
4943 assert_eq!(
4944 transaction
4945 .into_keys()
4946 .next()
4947 .unwrap()
4948 .read_with(&cx, |buffer, _| buffer.text()),
4949 "const TWO: usize = one::THREE + one::THREE;"
4950 );
4951 }
4952
4953 #[gpui::test]
4954 async fn test_search(mut cx: gpui::TestAppContext) {
4955 let fs = FakeFs::new(cx.background());
4956 fs.insert_tree(
4957 "/dir",
4958 json!({
4959 "one.rs": "const ONE: usize = 1;",
4960 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4961 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4962 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4963 }),
4964 )
4965 .await;
4966 let project = Project::test(fs.clone(), &mut cx);
4967 let (tree, _) = project
4968 .update(&mut cx, |project, cx| {
4969 project.find_or_create_local_worktree("/dir", false, cx)
4970 })
4971 .await
4972 .unwrap();
4973 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4974 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4975 .await;
4976
4977 assert_eq!(
4978 search(&project, SearchQuery::text("TWO", false, true), &mut cx)
4979 .await
4980 .unwrap(),
4981 HashMap::from_iter([
4982 ("two.rs".to_string(), vec![6..9]),
4983 ("three.rs".to_string(), vec![37..40])
4984 ])
4985 );
4986
4987 let buffer_4 = project
4988 .update(&mut cx, |project, cx| {
4989 project.open_buffer((worktree_id, "four.rs"), cx)
4990 })
4991 .await
4992 .unwrap();
4993 buffer_4.update(&mut cx, |buffer, cx| {
4994 buffer.edit([20..28, 31..43], "two::TWO", cx);
4995 });
4996
4997 assert_eq!(
4998 search(&project, SearchQuery::text("TWO", false, true), &mut cx)
4999 .await
5000 .unwrap(),
5001 HashMap::from_iter([
5002 ("two.rs".to_string(), vec![6..9]),
5003 ("three.rs".to_string(), vec![37..40]),
5004 ("four.rs".to_string(), vec![25..28, 36..39])
5005 ])
5006 );
5007
5008 async fn search(
5009 project: &ModelHandle<Project>,
5010 query: SearchQuery,
5011 cx: &mut gpui::TestAppContext,
5012 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5013 let results = project
5014 .update(cx, |project, cx| project.search(query, cx))
5015 .await?;
5016
5017 Ok(results
5018 .into_iter()
5019 .map(|(buffer, ranges)| {
5020 buffer.read_with(cx, |buffer, _| {
5021 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5022 let ranges = ranges
5023 .into_iter()
5024 .map(|range| range.to_offset(buffer))
5025 .collect::<Vec<_>>();
5026 (path, ranges)
5027 })
5028 })
5029 .collect())
5030 }
5031 }
5032}