1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::{broadcast, prelude::Stream, sink::Sink, watch};
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: broadcast::Sender<()>,
62 loading_buffers: HashMap<
63 ProjectPath,
64 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
65 >,
66 buffers_state: Rc<RefCell<ProjectBuffers>>,
67 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
68 nonce: u128,
69}
70
71#[derive(Default)]
72struct ProjectBuffers {
73 buffer_request_count: usize,
74 preserved_buffers: Vec<ModelHandle<Buffer>>,
75 open_buffers: HashMap<u64, OpenBuffer>,
76}
77
78enum OpenBuffer {
79 Loaded(WeakModelHandle<Buffer>),
80 Loading(Vec<Operation>),
81}
82
83enum WorktreeHandle {
84 Strong(ModelHandle<Worktree>),
85 Weak(WeakModelHandle<Worktree>),
86}
87
88enum ProjectClientState {
89 Local {
90 is_shared: bool,
91 remote_id_tx: watch::Sender<Option<u64>>,
92 remote_id_rx: watch::Receiver<Option<u64>>,
93 _maintain_remote_id_task: Task<Option<()>>,
94 },
95 Remote {
96 sharing_has_stopped: bool,
97 remote_id: u64,
98 replica_id: ReplicaId,
99 },
100}
101
102#[derive(Clone, Debug)]
103pub struct Collaborator {
104 pub user: Arc<User>,
105 pub peer_id: PeerId,
106 pub replica_id: ReplicaId,
107}
108
109#[derive(Clone, Debug, PartialEq)]
110pub enum Event {
111 ActiveEntryChanged(Option<ProjectEntry>),
112 WorktreeRemoved(WorktreeId),
113 DiskBasedDiagnosticsStarted,
114 DiskBasedDiagnosticsUpdated,
115 DiskBasedDiagnosticsFinished,
116 DiagnosticsUpdated(ProjectPath),
117}
118
119#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
120pub struct ProjectPath {
121 pub worktree_id: WorktreeId,
122 pub path: Arc<Path>,
123}
124
125#[derive(Clone, Debug, Default, PartialEq)]
126pub struct DiagnosticSummary {
127 pub error_count: usize,
128 pub warning_count: usize,
129 pub info_count: usize,
130 pub hint_count: usize,
131}
132
133#[derive(Debug)]
134pub struct Location {
135 pub buffer: ModelHandle<Buffer>,
136 pub range: Range<language::Anchor>,
137}
138
139#[derive(Debug)]
140pub struct DocumentHighlight {
141 pub range: Range<language::Anchor>,
142 pub kind: DocumentHighlightKind,
143}
144
145#[derive(Clone, Debug)]
146pub struct Symbol {
147 pub source_worktree_id: WorktreeId,
148 pub worktree_id: WorktreeId,
149 pub language_name: String,
150 pub path: PathBuf,
151 pub label: CodeLabel,
152 pub name: String,
153 pub kind: lsp::SymbolKind,
154 pub range: Range<PointUtf16>,
155 pub signature: [u8; 32],
156}
157
158pub struct BufferRequestHandle(Rc<RefCell<ProjectBuffers>>);
159
160#[derive(Default)]
161pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
162
163impl DiagnosticSummary {
164 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
165 let mut this = Self {
166 error_count: 0,
167 warning_count: 0,
168 info_count: 0,
169 hint_count: 0,
170 };
171
172 for entry in diagnostics {
173 if entry.diagnostic.is_primary {
174 match entry.diagnostic.severity {
175 DiagnosticSeverity::ERROR => this.error_count += 1,
176 DiagnosticSeverity::WARNING => this.warning_count += 1,
177 DiagnosticSeverity::INFORMATION => this.info_count += 1,
178 DiagnosticSeverity::HINT => this.hint_count += 1,
179 _ => {}
180 }
181 }
182 }
183
184 this
185 }
186
187 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
188 proto::DiagnosticSummary {
189 path: path.to_string_lossy().to_string(),
190 error_count: self.error_count as u32,
191 warning_count: self.warning_count as u32,
192 info_count: self.info_count as u32,
193 hint_count: self.hint_count as u32,
194 }
195 }
196}
197
198#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
199pub struct ProjectEntry {
200 pub worktree_id: WorktreeId,
201 pub entry_id: usize,
202}
203
204impl Project {
205 pub fn init(client: &Arc<Client>) {
206 client.add_entity_message_handler(Self::handle_add_collaborator);
207 client.add_entity_message_handler(Self::handle_buffer_reloaded);
208 client.add_entity_message_handler(Self::handle_buffer_saved);
209 client.add_entity_message_handler(Self::handle_close_buffer);
210 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
211 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
212 client.add_entity_message_handler(Self::handle_remove_collaborator);
213 client.add_entity_message_handler(Self::handle_register_worktree);
214 client.add_entity_message_handler(Self::handle_unregister_worktree);
215 client.add_entity_message_handler(Self::handle_unshare_project);
216 client.add_entity_message_handler(Self::handle_update_buffer_file);
217 client.add_entity_message_handler(Self::handle_update_buffer);
218 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
219 client.add_entity_message_handler(Self::handle_update_worktree);
220 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
221 client.add_entity_request_handler(Self::handle_apply_code_action);
222 client.add_entity_request_handler(Self::handle_format_buffers);
223 client.add_entity_request_handler(Self::handle_get_code_actions);
224 client.add_entity_request_handler(Self::handle_get_completions);
225 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
226 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
227 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
228 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
229 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
230 client.add_entity_request_handler(Self::handle_search_project);
231 client.add_entity_request_handler(Self::handle_get_project_symbols);
232 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
233 client.add_entity_request_handler(Self::handle_open_buffer);
234 client.add_entity_request_handler(Self::handle_save_buffer);
235 }
236
237 pub fn local(
238 client: Arc<Client>,
239 user_store: ModelHandle<UserStore>,
240 languages: Arc<LanguageRegistry>,
241 fs: Arc<dyn Fs>,
242 cx: &mut MutableAppContext,
243 ) -> ModelHandle<Self> {
244 cx.add_model(|cx: &mut ModelContext<Self>| {
245 let (remote_id_tx, remote_id_rx) = watch::channel();
246 let _maintain_remote_id_task = cx.spawn_weak({
247 let rpc = client.clone();
248 move |this, mut cx| {
249 async move {
250 let mut status = rpc.status();
251 while let Some(status) = status.recv().await {
252 if let Some(this) = this.upgrade(&cx) {
253 let remote_id = if let client::Status::Connected { .. } = status {
254 let response = rpc.request(proto::RegisterProject {}).await?;
255 Some(response.project_id)
256 } else {
257 None
258 };
259
260 if let Some(project_id) = remote_id {
261 let mut registrations = Vec::new();
262 this.update(&mut cx, |this, cx| {
263 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
264 registrations.push(worktree.update(
265 cx,
266 |worktree, cx| {
267 let worktree = worktree.as_local_mut().unwrap();
268 worktree.register(project_id, cx)
269 },
270 ));
271 }
272 });
273 for registration in registrations {
274 registration.await?;
275 }
276 }
277 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
278 }
279 }
280 Ok(())
281 }
282 .log_err()
283 }
284 });
285
286 Self {
287 worktrees: Default::default(),
288 collaborators: Default::default(),
289 buffers_state: Default::default(),
290 loading_buffers: Default::default(),
291 shared_buffers: Default::default(),
292 client_state: ProjectClientState::Local {
293 is_shared: false,
294 remote_id_tx,
295 remote_id_rx,
296 _maintain_remote_id_task,
297 },
298 opened_buffer: broadcast::channel(1).0,
299 subscriptions: Vec::new(),
300 active_entry: None,
301 languages,
302 client,
303 user_store,
304 fs,
305 language_servers_with_diagnostics_running: 0,
306 language_servers: Default::default(),
307 started_language_servers: Default::default(),
308 nonce: StdRng::from_entropy().gen(),
309 }
310 })
311 }
312
313 pub async fn remote(
314 remote_id: u64,
315 client: Arc<Client>,
316 user_store: ModelHandle<UserStore>,
317 languages: Arc<LanguageRegistry>,
318 fs: Arc<dyn Fs>,
319 cx: &mut AsyncAppContext,
320 ) -> Result<ModelHandle<Self>> {
321 client.authenticate_and_connect(&cx).await?;
322
323 let response = client
324 .request(proto::JoinProject {
325 project_id: remote_id,
326 })
327 .await?;
328
329 let replica_id = response.replica_id as ReplicaId;
330
331 let mut worktrees = Vec::new();
332 for worktree in response.worktrees {
333 let (worktree, load_task) = cx
334 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
335 worktrees.push(worktree);
336 load_task.detach();
337 }
338
339 let this = cx.add_model(|cx| {
340 let mut this = Self {
341 worktrees: Vec::new(),
342 loading_buffers: Default::default(),
343 opened_buffer: broadcast::channel(1).0,
344 shared_buffers: Default::default(),
345 active_entry: None,
346 collaborators: Default::default(),
347 languages,
348 user_store: user_store.clone(),
349 fs,
350 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
351 client,
352 client_state: ProjectClientState::Remote {
353 sharing_has_stopped: false,
354 remote_id,
355 replica_id,
356 },
357 language_servers_with_diagnostics_running: 0,
358 language_servers: Default::default(),
359 started_language_servers: Default::default(),
360 buffers_state: Default::default(),
361 nonce: StdRng::from_entropy().gen(),
362 };
363 for worktree in worktrees {
364 this.add_worktree(&worktree, cx);
365 }
366 this
367 });
368
369 let user_ids = response
370 .collaborators
371 .iter()
372 .map(|peer| peer.user_id)
373 .collect();
374 user_store
375 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
376 .await?;
377 let mut collaborators = HashMap::default();
378 for message in response.collaborators {
379 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
380 collaborators.insert(collaborator.peer_id, collaborator);
381 }
382
383 this.update(cx, |this, _| {
384 this.collaborators = collaborators;
385 });
386
387 Ok(this)
388 }
389
390 #[cfg(any(test, feature = "test-support"))]
391 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
392 let languages = Arc::new(LanguageRegistry::new());
393 let http_client = client::test::FakeHttpClient::with_404_response();
394 let client = client::Client::new(http_client.clone());
395 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
396 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
397 }
398
399 #[cfg(any(test, feature = "test-support"))]
400 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
401 self.shared_buffers
402 .get(&peer_id)
403 .and_then(|buffers| buffers.get(&remote_id))
404 .cloned()
405 }
406
407 #[cfg(any(test, feature = "test-support"))]
408 pub fn has_buffered_operations(&self, cx: &AppContext) -> bool {
409 self.buffers_state
410 .borrow()
411 .open_buffers
412 .values()
413 .any(|buffer| match buffer {
414 OpenBuffer::Loaded(buffer) => buffer
415 .upgrade(cx)
416 .map_or(false, |buffer| buffer.read(cx).deferred_ops_len() > 0),
417 OpenBuffer::Loading(_) => true,
418 })
419 }
420
421 #[cfg(any(test, feature = "test-support"))]
422 pub fn languages(&self) -> &Arc<LanguageRegistry> {
423 &self.languages
424 }
425
426 pub fn fs(&self) -> &Arc<dyn Fs> {
427 &self.fs
428 }
429
430 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
431 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
432 *remote_id_tx.borrow_mut() = remote_id;
433 }
434
435 self.subscriptions.clear();
436 if let Some(remote_id) = remote_id {
437 self.subscriptions
438 .push(self.client.add_model_for_remote_entity(remote_id, cx));
439 }
440 }
441
442 pub fn remote_id(&self) -> Option<u64> {
443 match &self.client_state {
444 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
445 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
446 }
447 }
448
449 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
450 let mut id = None;
451 let mut watch = None;
452 match &self.client_state {
453 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
454 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
455 }
456
457 async move {
458 if let Some(id) = id {
459 return id;
460 }
461 let mut watch = watch.unwrap();
462 loop {
463 let id = *watch.borrow();
464 if let Some(id) = id {
465 return id;
466 }
467 watch.recv().await;
468 }
469 }
470 }
471
472 pub fn replica_id(&self) -> ReplicaId {
473 match &self.client_state {
474 ProjectClientState::Local { .. } => 0,
475 ProjectClientState::Remote { replica_id, .. } => *replica_id,
476 }
477 }
478
479 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
480 &self.collaborators
481 }
482
483 pub fn worktrees<'a>(
484 &'a self,
485 cx: &'a AppContext,
486 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
487 self.worktrees
488 .iter()
489 .filter_map(move |worktree| worktree.upgrade(cx))
490 }
491
492 pub fn strong_worktrees<'a>(
493 &'a self,
494 cx: &'a AppContext,
495 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
496 self.worktrees.iter().filter_map(|worktree| {
497 worktree.upgrade(cx).and_then(|worktree| {
498 if worktree.read(cx).is_weak() {
499 None
500 } else {
501 Some(worktree)
502 }
503 })
504 })
505 }
506
507 pub fn worktree_for_id(
508 &self,
509 id: WorktreeId,
510 cx: &AppContext,
511 ) -> Option<ModelHandle<Worktree>> {
512 self.worktrees(cx)
513 .find(|worktree| worktree.read(cx).id() == id)
514 }
515
516 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
517 let rpc = self.client.clone();
518 cx.spawn(|this, mut cx| async move {
519 let project_id = this.update(&mut cx, |this, _| {
520 if let ProjectClientState::Local {
521 is_shared,
522 remote_id_rx,
523 ..
524 } = &mut this.client_state
525 {
526 *is_shared = true;
527 remote_id_rx
528 .borrow()
529 .ok_or_else(|| anyhow!("no project id"))
530 } else {
531 Err(anyhow!("can't share a remote project"))
532 }
533 })?;
534
535 rpc.request(proto::ShareProject { project_id }).await?;
536 let mut tasks = Vec::new();
537 this.update(&mut cx, |this, cx| {
538 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
539 worktree.update(cx, |worktree, cx| {
540 let worktree = worktree.as_local_mut().unwrap();
541 tasks.push(worktree.share(project_id, cx));
542 });
543 }
544 });
545 for task in tasks {
546 task.await?;
547 }
548 this.update(&mut cx, |_, cx| cx.notify());
549 Ok(())
550 })
551 }
552
553 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
554 let rpc = self.client.clone();
555 cx.spawn(|this, mut cx| async move {
556 let project_id = this.update(&mut cx, |this, _| {
557 if let ProjectClientState::Local {
558 is_shared,
559 remote_id_rx,
560 ..
561 } = &mut this.client_state
562 {
563 *is_shared = false;
564 remote_id_rx
565 .borrow()
566 .ok_or_else(|| anyhow!("no project id"))
567 } else {
568 Err(anyhow!("can't share a remote project"))
569 }
570 })?;
571
572 rpc.send(proto::UnshareProject { project_id })?;
573 this.update(&mut cx, |this, cx| {
574 this.collaborators.clear();
575 this.shared_buffers.clear();
576 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
577 worktree.update(cx, |worktree, _| {
578 worktree.as_local_mut().unwrap().unshare();
579 });
580 }
581 cx.notify()
582 });
583 Ok(())
584 })
585 }
586
587 pub fn is_read_only(&self) -> bool {
588 match &self.client_state {
589 ProjectClientState::Local { .. } => false,
590 ProjectClientState::Remote {
591 sharing_has_stopped,
592 ..
593 } => *sharing_has_stopped,
594 }
595 }
596
597 pub fn is_local(&self) -> bool {
598 match &self.client_state {
599 ProjectClientState::Local { .. } => true,
600 ProjectClientState::Remote { .. } => false,
601 }
602 }
603
604 pub fn is_remote(&self) -> bool {
605 !self.is_local()
606 }
607
608 pub fn open_buffer(
609 &mut self,
610 path: impl Into<ProjectPath>,
611 cx: &mut ModelContext<Self>,
612 ) -> Task<Result<ModelHandle<Buffer>>> {
613 let project_path = path.into();
614 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
615 worktree
616 } else {
617 return Task::ready(Err(anyhow!("no such worktree")));
618 };
619
620 // If there is already a buffer for the given path, then return it.
621 let existing_buffer = self.get_open_buffer(&project_path, cx);
622 if let Some(existing_buffer) = existing_buffer {
623 return Task::ready(Ok(existing_buffer));
624 }
625
626 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
627 // If the given path is already being loaded, then wait for that existing
628 // task to complete and return the same buffer.
629 hash_map::Entry::Occupied(e) => e.get().clone(),
630
631 // Otherwise, record the fact that this path is now being loaded.
632 hash_map::Entry::Vacant(entry) => {
633 let (mut tx, rx) = postage::watch::channel();
634 entry.insert(rx.clone());
635
636 let load_buffer = if worktree.read(cx).is_local() {
637 self.open_local_buffer(&project_path.path, &worktree, cx)
638 } else {
639 self.open_remote_buffer(&project_path.path, &worktree, cx)
640 };
641
642 cx.spawn(move |this, mut cx| async move {
643 let load_result = load_buffer.await;
644 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
645 // Record the fact that the buffer is no longer loading.
646 this.loading_buffers.remove(&project_path);
647 let buffer = load_result.map_err(Arc::new)?;
648 Ok(buffer)
649 }));
650 })
651 .detach();
652 rx
653 }
654 };
655
656 cx.foreground().spawn(async move {
657 loop {
658 if let Some(result) = loading_watch.borrow().as_ref() {
659 match result {
660 Ok(buffer) => return Ok(buffer.clone()),
661 Err(error) => return Err(anyhow!("{}", error)),
662 }
663 }
664 loading_watch.recv().await;
665 }
666 })
667 }
668
669 fn open_local_buffer(
670 &mut self,
671 path: &Arc<Path>,
672 worktree: &ModelHandle<Worktree>,
673 cx: &mut ModelContext<Self>,
674 ) -> Task<Result<ModelHandle<Buffer>>> {
675 let load_buffer = worktree.update(cx, |worktree, cx| {
676 let worktree = worktree.as_local_mut().unwrap();
677 worktree.load_buffer(path, cx)
678 });
679 let worktree = worktree.downgrade();
680 cx.spawn(|this, mut cx| async move {
681 let buffer = load_buffer.await?;
682 let worktree = worktree
683 .upgrade(&cx)
684 .ok_or_else(|| anyhow!("worktree was removed"))?;
685 this.update(&mut cx, |this, cx| {
686 this.register_buffer(&buffer, Some(&worktree), cx)
687 })?;
688 Ok(buffer)
689 })
690 }
691
692 fn open_remote_buffer(
693 &mut self,
694 path: &Arc<Path>,
695 worktree: &ModelHandle<Worktree>,
696 cx: &mut ModelContext<Self>,
697 ) -> Task<Result<ModelHandle<Buffer>>> {
698 let rpc = self.client.clone();
699 let project_id = self.remote_id().unwrap();
700 let remote_worktree_id = worktree.read(cx).id();
701 let path = path.clone();
702 let path_string = path.to_string_lossy().to_string();
703 let request_handle = self.start_buffer_request(cx);
704 cx.spawn(|this, mut cx| async move {
705 let response = rpc
706 .request(proto::OpenBuffer {
707 project_id,
708 worktree_id: remote_worktree_id.to_proto(),
709 path: path_string,
710 })
711 .await?;
712 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
713
714 this.update(&mut cx, |this, cx| {
715 this.deserialize_buffer(buffer, request_handle, cx)
716 })
717 .await
718 })
719 }
720
721 fn open_local_buffer_via_lsp(
722 &mut self,
723 abs_path: lsp::Url,
724 lang_name: String,
725 lang_server: Arc<LanguageServer>,
726 cx: &mut ModelContext<Self>,
727 ) -> Task<Result<ModelHandle<Buffer>>> {
728 cx.spawn(|this, mut cx| async move {
729 let abs_path = abs_path
730 .to_file_path()
731 .map_err(|_| anyhow!("can't convert URI to path"))?;
732 let (worktree, relative_path) = if let Some(result) =
733 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
734 {
735 result
736 } else {
737 let worktree = this
738 .update(&mut cx, |this, cx| {
739 this.create_local_worktree(&abs_path, true, cx)
740 })
741 .await?;
742 this.update(&mut cx, |this, cx| {
743 this.language_servers
744 .insert((worktree.read(cx).id(), lang_name), lang_server);
745 });
746 (worktree, PathBuf::new())
747 };
748
749 let project_path = ProjectPath {
750 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
751 path: relative_path.into(),
752 };
753 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
754 .await
755 })
756 }
757
758 fn start_buffer_request(&self, cx: &AppContext) -> BufferRequestHandle {
759 BufferRequestHandle::new(self.buffers_state.clone(), cx)
760 }
761
762 pub fn save_buffer_as(
763 &self,
764 buffer: ModelHandle<Buffer>,
765 abs_path: PathBuf,
766 cx: &mut ModelContext<Project>,
767 ) -> Task<Result<()>> {
768 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
769 cx.spawn(|this, mut cx| async move {
770 let (worktree, path) = worktree_task.await?;
771 worktree
772 .update(&mut cx, |worktree, cx| {
773 worktree
774 .as_local_mut()
775 .unwrap()
776 .save_buffer_as(buffer.clone(), path, cx)
777 })
778 .await?;
779 this.update(&mut cx, |this, cx| {
780 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
781 });
782 Ok(())
783 })
784 }
785
786 #[cfg(any(test, feature = "test-support"))]
787 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
788 let path = path.into();
789 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
790 self.buffers_state
791 .borrow()
792 .open_buffers
793 .iter()
794 .any(|(_, buffer)| {
795 if let Some(buffer) = buffer.upgrade(cx) {
796 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
797 if file.worktree == worktree && file.path() == &path.path {
798 return true;
799 }
800 }
801 }
802 false
803 })
804 } else {
805 false
806 }
807 }
808
809 pub fn get_open_buffer(
810 &mut self,
811 path: &ProjectPath,
812 cx: &mut ModelContext<Self>,
813 ) -> Option<ModelHandle<Buffer>> {
814 let mut result = None;
815 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
816 self.buffers_state
817 .borrow_mut()
818 .open_buffers
819 .retain(|_, buffer| {
820 if let Some(buffer) = buffer.upgrade(cx) {
821 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
822 if file.worktree == worktree && file.path() == &path.path {
823 result = Some(buffer);
824 }
825 }
826 true
827 } else {
828 false
829 }
830 });
831 result
832 }
833
834 fn register_buffer(
835 &mut self,
836 buffer: &ModelHandle<Buffer>,
837 worktree: Option<&ModelHandle<Worktree>>,
838 cx: &mut ModelContext<Self>,
839 ) -> Result<()> {
840 let remote_id = buffer.read(cx).remote_id();
841 match self
842 .buffers_state
843 .borrow_mut()
844 .open_buffers
845 .insert(remote_id, OpenBuffer::Loaded(buffer.downgrade()))
846 {
847 None => {}
848 Some(OpenBuffer::Loading(operations)) => {
849 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
850 }
851 Some(OpenBuffer::Loaded(existing_handle)) => {
852 if existing_handle.upgrade(cx).is_some() {
853 Err(anyhow!(
854 "already registered buffer with remote id {}",
855 remote_id
856 ))?
857 }
858 }
859 }
860 self.assign_language_to_buffer(&buffer, worktree, cx);
861 Ok(())
862 }
863
864 fn assign_language_to_buffer(
865 &mut self,
866 buffer: &ModelHandle<Buffer>,
867 worktree: Option<&ModelHandle<Worktree>>,
868 cx: &mut ModelContext<Self>,
869 ) -> Option<()> {
870 let (path, full_path) = {
871 let file = buffer.read(cx).file()?;
872 (file.path().clone(), file.full_path(cx))
873 };
874
875 // If the buffer has a language, set it and start/assign the language server
876 if let Some(language) = self.languages.select_language(&full_path) {
877 buffer.update(cx, |buffer, cx| {
878 buffer.set_language(Some(language.clone()), cx);
879 });
880
881 // For local worktrees, start a language server if needed.
882 // Also assign the language server and any previously stored diagnostics to the buffer.
883 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
884 let worktree_id = local_worktree.id();
885 let worktree_abs_path = local_worktree.abs_path().clone();
886 let buffer = buffer.downgrade();
887 let language_server =
888 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
889
890 cx.spawn_weak(|_, mut cx| async move {
891 if let Some(language_server) = language_server.await {
892 if let Some(buffer) = buffer.upgrade(&cx) {
893 buffer.update(&mut cx, |buffer, cx| {
894 buffer.set_language_server(Some(language_server), cx);
895 });
896 }
897 }
898 })
899 .detach();
900 }
901 }
902
903 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
904 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
905 buffer.update(cx, |buffer, cx| {
906 buffer.update_diagnostics(diagnostics, None, cx).log_err();
907 });
908 }
909 }
910
911 None
912 }
913
914 fn start_language_server(
915 &mut self,
916 worktree_id: WorktreeId,
917 worktree_path: Arc<Path>,
918 language: Arc<Language>,
919 cx: &mut ModelContext<Self>,
920 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
921 enum LspEvent {
922 DiagnosticsStart,
923 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
924 DiagnosticsFinish,
925 }
926
927 let key = (worktree_id, language.name().to_string());
928 self.started_language_servers
929 .entry(key.clone())
930 .or_insert_with(|| {
931 let language_server = self.languages.start_language_server(
932 &language,
933 worktree_path,
934 self.client.http_client(),
935 cx,
936 );
937 let rpc = self.client.clone();
938 cx.spawn_weak(|this, mut cx| async move {
939 let language_server = language_server?.await.log_err()?;
940 if let Some(this) = this.upgrade(&cx) {
941 this.update(&mut cx, |this, _| {
942 this.language_servers.insert(key, language_server.clone());
943 });
944 }
945
946 let disk_based_sources = language
947 .disk_based_diagnostic_sources()
948 .cloned()
949 .unwrap_or_default();
950 let disk_based_diagnostics_progress_token =
951 language.disk_based_diagnostics_progress_token().cloned();
952 let has_disk_based_diagnostic_progress_token =
953 disk_based_diagnostics_progress_token.is_some();
954 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
955
956 // Listen for `PublishDiagnostics` notifications.
957 language_server
958 .on_notification::<lsp::notification::PublishDiagnostics, _>({
959 let diagnostics_tx = diagnostics_tx.clone();
960 move |params| {
961 if !has_disk_based_diagnostic_progress_token {
962 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
963 }
964 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
965 .ok();
966 if !has_disk_based_diagnostic_progress_token {
967 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
968 }
969 }
970 })
971 .detach();
972
973 // Listen for `Progress` notifications. Send an event when the language server
974 // transitions between running jobs and not running any jobs.
975 let mut running_jobs_for_this_server: i32 = 0;
976 language_server
977 .on_notification::<lsp::notification::Progress, _>(move |params| {
978 let token = match params.token {
979 lsp::NumberOrString::Number(_) => None,
980 lsp::NumberOrString::String(token) => Some(token),
981 };
982
983 if token == disk_based_diagnostics_progress_token {
984 match params.value {
985 lsp::ProgressParamsValue::WorkDone(progress) => {
986 match progress {
987 lsp::WorkDoneProgress::Begin(_) => {
988 running_jobs_for_this_server += 1;
989 if running_jobs_for_this_server == 1 {
990 block_on(
991 diagnostics_tx
992 .send(LspEvent::DiagnosticsStart),
993 )
994 .ok();
995 }
996 }
997 lsp::WorkDoneProgress::End(_) => {
998 running_jobs_for_this_server -= 1;
999 if running_jobs_for_this_server == 0 {
1000 block_on(
1001 diagnostics_tx
1002 .send(LspEvent::DiagnosticsFinish),
1003 )
1004 .ok();
1005 }
1006 }
1007 _ => {}
1008 }
1009 }
1010 }
1011 }
1012 })
1013 .detach();
1014
1015 // Process all the LSP events.
1016 cx.spawn(|mut cx| async move {
1017 while let Ok(message) = diagnostics_rx.recv().await {
1018 let this = this.upgrade(&cx)?;
1019 match message {
1020 LspEvent::DiagnosticsStart => {
1021 this.update(&mut cx, |this, cx| {
1022 this.disk_based_diagnostics_started(cx);
1023 if let Some(project_id) = this.remote_id() {
1024 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1025 project_id,
1026 })
1027 .log_err();
1028 }
1029 });
1030 }
1031 LspEvent::DiagnosticsUpdate(mut params) => {
1032 language.process_diagnostics(&mut params);
1033 this.update(&mut cx, |this, cx| {
1034 this.update_diagnostics(params, &disk_based_sources, cx)
1035 .log_err();
1036 });
1037 }
1038 LspEvent::DiagnosticsFinish => {
1039 this.update(&mut cx, |this, cx| {
1040 this.disk_based_diagnostics_finished(cx);
1041 if let Some(project_id) = this.remote_id() {
1042 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1043 project_id,
1044 })
1045 .log_err();
1046 }
1047 });
1048 }
1049 }
1050 }
1051 Some(())
1052 })
1053 .detach();
1054
1055 Some(language_server)
1056 })
1057 .shared()
1058 })
1059 .clone()
1060 }
1061
1062 pub fn update_diagnostics(
1063 &mut self,
1064 params: lsp::PublishDiagnosticsParams,
1065 disk_based_sources: &HashSet<String>,
1066 cx: &mut ModelContext<Self>,
1067 ) -> Result<()> {
1068 let abs_path = params
1069 .uri
1070 .to_file_path()
1071 .map_err(|_| anyhow!("URI is not a file"))?;
1072 let mut next_group_id = 0;
1073 let mut diagnostics = Vec::default();
1074 let mut primary_diagnostic_group_ids = HashMap::default();
1075 let mut sources_by_group_id = HashMap::default();
1076 let mut supporting_diagnostic_severities = HashMap::default();
1077 for diagnostic in ¶ms.diagnostics {
1078 let source = diagnostic.source.as_ref();
1079 let code = diagnostic.code.as_ref().map(|code| match code {
1080 lsp::NumberOrString::Number(code) => code.to_string(),
1081 lsp::NumberOrString::String(code) => code.clone(),
1082 });
1083 let range = range_from_lsp(diagnostic.range);
1084 let is_supporting = diagnostic
1085 .related_information
1086 .as_ref()
1087 .map_or(false, |infos| {
1088 infos.iter().any(|info| {
1089 primary_diagnostic_group_ids.contains_key(&(
1090 source,
1091 code.clone(),
1092 range_from_lsp(info.location.range),
1093 ))
1094 })
1095 });
1096
1097 if is_supporting {
1098 if let Some(severity) = diagnostic.severity {
1099 supporting_diagnostic_severities
1100 .insert((source, code.clone(), range), severity);
1101 }
1102 } else {
1103 let group_id = post_inc(&mut next_group_id);
1104 let is_disk_based =
1105 source.map_or(false, |source| disk_based_sources.contains(source));
1106
1107 sources_by_group_id.insert(group_id, source);
1108 primary_diagnostic_group_ids
1109 .insert((source, code.clone(), range.clone()), group_id);
1110
1111 diagnostics.push(DiagnosticEntry {
1112 range,
1113 diagnostic: Diagnostic {
1114 code: code.clone(),
1115 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1116 message: diagnostic.message.clone(),
1117 group_id,
1118 is_primary: true,
1119 is_valid: true,
1120 is_disk_based,
1121 },
1122 });
1123 if let Some(infos) = &diagnostic.related_information {
1124 for info in infos {
1125 if info.location.uri == params.uri && !info.message.is_empty() {
1126 let range = range_from_lsp(info.location.range);
1127 diagnostics.push(DiagnosticEntry {
1128 range,
1129 diagnostic: Diagnostic {
1130 code: code.clone(),
1131 severity: DiagnosticSeverity::INFORMATION,
1132 message: info.message.clone(),
1133 group_id,
1134 is_primary: false,
1135 is_valid: true,
1136 is_disk_based,
1137 },
1138 });
1139 }
1140 }
1141 }
1142 }
1143 }
1144
1145 for entry in &mut diagnostics {
1146 let diagnostic = &mut entry.diagnostic;
1147 if !diagnostic.is_primary {
1148 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1149 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1150 source,
1151 diagnostic.code.clone(),
1152 entry.range.clone(),
1153 )) {
1154 diagnostic.severity = severity;
1155 }
1156 }
1157 }
1158
1159 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1160 Ok(())
1161 }
1162
1163 pub fn update_diagnostic_entries(
1164 &mut self,
1165 abs_path: PathBuf,
1166 version: Option<i32>,
1167 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1168 cx: &mut ModelContext<Project>,
1169 ) -> Result<(), anyhow::Error> {
1170 let (worktree, relative_path) = self
1171 .find_local_worktree(&abs_path, cx)
1172 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1173 let project_path = ProjectPath {
1174 worktree_id: worktree.read(cx).id(),
1175 path: relative_path.into(),
1176 };
1177
1178 for buffer in self.buffers_state.borrow().open_buffers.values() {
1179 if let Some(buffer) = buffer.upgrade(cx) {
1180 if buffer
1181 .read(cx)
1182 .file()
1183 .map_or(false, |file| *file.path() == project_path.path)
1184 {
1185 buffer.update(cx, |buffer, cx| {
1186 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1187 })?;
1188 break;
1189 }
1190 }
1191 }
1192 worktree.update(cx, |worktree, cx| {
1193 worktree
1194 .as_local_mut()
1195 .ok_or_else(|| anyhow!("not a local worktree"))?
1196 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1197 })?;
1198 cx.emit(Event::DiagnosticsUpdated(project_path));
1199 Ok(())
1200 }
1201
1202 pub fn format(
1203 &self,
1204 buffers: HashSet<ModelHandle<Buffer>>,
1205 push_to_history: bool,
1206 cx: &mut ModelContext<Project>,
1207 ) -> Task<Result<ProjectTransaction>> {
1208 let mut local_buffers = Vec::new();
1209 let mut remote_buffers = None;
1210 for buffer_handle in buffers {
1211 let buffer = buffer_handle.read(cx);
1212 let worktree;
1213 if let Some(file) = File::from_dyn(buffer.file()) {
1214 worktree = file.worktree.clone();
1215 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1216 let lang_server;
1217 if let Some(lang) = buffer.language() {
1218 if let Some(server) = self
1219 .language_servers
1220 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1221 {
1222 lang_server = server.clone();
1223 } else {
1224 return Task::ready(Ok(Default::default()));
1225 };
1226 } else {
1227 return Task::ready(Ok(Default::default()));
1228 }
1229
1230 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1231 } else {
1232 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1233 }
1234 } else {
1235 return Task::ready(Ok(Default::default()));
1236 }
1237 }
1238
1239 let remote_buffers = self.remote_id().zip(remote_buffers);
1240 let client = self.client.clone();
1241 let request_handle = self.start_buffer_request(cx);
1242
1243 cx.spawn(|this, mut cx| async move {
1244 let mut project_transaction = ProjectTransaction::default();
1245
1246 if let Some((project_id, remote_buffers)) = remote_buffers {
1247 let response = client
1248 .request(proto::FormatBuffers {
1249 project_id,
1250 buffer_ids: remote_buffers
1251 .iter()
1252 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1253 .collect(),
1254 })
1255 .await?
1256 .transaction
1257 .ok_or_else(|| anyhow!("missing transaction"))?;
1258 project_transaction = this
1259 .update(&mut cx, |this, cx| {
1260 this.deserialize_project_transaction(
1261 response,
1262 push_to_history,
1263 request_handle,
1264 cx,
1265 )
1266 })
1267 .await?;
1268 }
1269
1270 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1271 let lsp_edits = lang_server
1272 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1273 text_document: lsp::TextDocumentIdentifier::new(
1274 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1275 ),
1276 options: Default::default(),
1277 work_done_progress_params: Default::default(),
1278 })
1279 .await?;
1280
1281 if let Some(lsp_edits) = lsp_edits {
1282 let edits = buffer
1283 .update(&mut cx, |buffer, cx| {
1284 buffer.edits_from_lsp(lsp_edits, None, cx)
1285 })
1286 .await?;
1287 buffer.update(&mut cx, |buffer, cx| {
1288 buffer.finalize_last_transaction();
1289 buffer.start_transaction();
1290 for (range, text) in edits {
1291 buffer.edit([range], text, cx);
1292 }
1293 if buffer.end_transaction(cx).is_some() {
1294 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1295 if !push_to_history {
1296 buffer.forget_transaction(transaction.id);
1297 }
1298 project_transaction.0.insert(cx.handle(), transaction);
1299 }
1300 });
1301 }
1302 }
1303
1304 Ok(project_transaction)
1305 })
1306 }
1307
1308 pub fn definition<T: ToPointUtf16>(
1309 &self,
1310 buffer: &ModelHandle<Buffer>,
1311 position: T,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<Vec<Location>>> {
1314 let position = position.to_point_utf16(buffer.read(cx));
1315 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1316 }
1317
1318 pub fn references<T: ToPointUtf16>(
1319 &self,
1320 buffer: &ModelHandle<Buffer>,
1321 position: T,
1322 cx: &mut ModelContext<Self>,
1323 ) -> Task<Result<Vec<Location>>> {
1324 let position = position.to_point_utf16(buffer.read(cx));
1325 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1326 }
1327
1328 pub fn document_highlights<T: ToPointUtf16>(
1329 &self,
1330 buffer: &ModelHandle<Buffer>,
1331 position: T,
1332 cx: &mut ModelContext<Self>,
1333 ) -> Task<Result<Vec<DocumentHighlight>>> {
1334 let position = position.to_point_utf16(buffer.read(cx));
1335 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1336 }
1337
1338 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1339 if self.is_local() {
1340 let mut language_servers = HashMap::default();
1341 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1342 if let Some((worktree, language)) = self
1343 .worktree_for_id(*worktree_id, cx)
1344 .and_then(|worktree| worktree.read(cx).as_local())
1345 .zip(self.languages.get_language(language_name))
1346 {
1347 language_servers
1348 .entry(Arc::as_ptr(language_server))
1349 .or_insert((
1350 language_server.clone(),
1351 *worktree_id,
1352 worktree.abs_path().clone(),
1353 language.clone(),
1354 ));
1355 }
1356 }
1357
1358 let mut requests = Vec::new();
1359 for (language_server, _, _, _) in language_servers.values() {
1360 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1361 lsp::WorkspaceSymbolParams {
1362 query: query.to_string(),
1363 ..Default::default()
1364 },
1365 ));
1366 }
1367
1368 cx.spawn_weak(|this, cx| async move {
1369 let responses = futures::future::try_join_all(requests).await?;
1370
1371 let mut symbols = Vec::new();
1372 if let Some(this) = this.upgrade(&cx) {
1373 this.read_with(&cx, |this, cx| {
1374 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1375 language_servers.into_values().zip(responses)
1376 {
1377 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1378 |lsp_symbol| {
1379 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1380 let mut worktree_id = source_worktree_id;
1381 let path;
1382 if let Some((worktree, rel_path)) =
1383 this.find_local_worktree(&abs_path, cx)
1384 {
1385 worktree_id = worktree.read(cx).id();
1386 path = rel_path;
1387 } else {
1388 path = relativize_path(&worktree_abs_path, &abs_path);
1389 }
1390
1391 let label = language
1392 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1393 .unwrap_or_else(|| {
1394 CodeLabel::plain(lsp_symbol.name.clone(), None)
1395 });
1396 let signature = this.symbol_signature(worktree_id, &path);
1397
1398 Some(Symbol {
1399 source_worktree_id,
1400 worktree_id,
1401 language_name: language.name().to_string(),
1402 name: lsp_symbol.name,
1403 kind: lsp_symbol.kind,
1404 label,
1405 path,
1406 range: range_from_lsp(lsp_symbol.location.range),
1407 signature,
1408 })
1409 },
1410 ));
1411 }
1412 })
1413 }
1414
1415 Ok(symbols)
1416 })
1417 } else if let Some(project_id) = self.remote_id() {
1418 let request = self.client.request(proto::GetProjectSymbols {
1419 project_id,
1420 query: query.to_string(),
1421 });
1422 cx.spawn_weak(|this, cx| async move {
1423 let response = request.await?;
1424 let mut symbols = Vec::new();
1425 if let Some(this) = this.upgrade(&cx) {
1426 this.read_with(&cx, |this, _| {
1427 symbols.extend(
1428 response
1429 .symbols
1430 .into_iter()
1431 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1432 );
1433 })
1434 }
1435 Ok(symbols)
1436 })
1437 } else {
1438 Task::ready(Ok(Default::default()))
1439 }
1440 }
1441
1442 pub fn open_buffer_for_symbol(
1443 &mut self,
1444 symbol: &Symbol,
1445 cx: &mut ModelContext<Self>,
1446 ) -> Task<Result<ModelHandle<Buffer>>> {
1447 if self.is_local() {
1448 let language_server = if let Some(server) = self
1449 .language_servers
1450 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1451 {
1452 server.clone()
1453 } else {
1454 return Task::ready(Err(anyhow!(
1455 "language server for worktree and language not found"
1456 )));
1457 };
1458
1459 let worktree_abs_path = if let Some(worktree_abs_path) = self
1460 .worktree_for_id(symbol.worktree_id, cx)
1461 .and_then(|worktree| worktree.read(cx).as_local())
1462 .map(|local_worktree| local_worktree.abs_path())
1463 {
1464 worktree_abs_path
1465 } else {
1466 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1467 };
1468 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1469 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1470 uri
1471 } else {
1472 return Task::ready(Err(anyhow!("invalid symbol path")));
1473 };
1474
1475 self.open_local_buffer_via_lsp(
1476 symbol_uri,
1477 symbol.language_name.clone(),
1478 language_server,
1479 cx,
1480 )
1481 } else if let Some(project_id) = self.remote_id() {
1482 let request_handle = self.start_buffer_request(cx);
1483 let request = self.client.request(proto::OpenBufferForSymbol {
1484 project_id,
1485 symbol: Some(serialize_symbol(symbol)),
1486 });
1487 cx.spawn(|this, mut cx| async move {
1488 let response = request.await?;
1489 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1490 this.update(&mut cx, |this, cx| {
1491 this.deserialize_buffer(buffer, request_handle, cx)
1492 })
1493 .await
1494 })
1495 } else {
1496 Task::ready(Err(anyhow!("project does not have a remote id")))
1497 }
1498 }
1499
1500 pub fn completions<T: ToPointUtf16>(
1501 &self,
1502 source_buffer_handle: &ModelHandle<Buffer>,
1503 position: T,
1504 cx: &mut ModelContext<Self>,
1505 ) -> Task<Result<Vec<Completion>>> {
1506 let source_buffer_handle = source_buffer_handle.clone();
1507 let source_buffer = source_buffer_handle.read(cx);
1508 let buffer_id = source_buffer.remote_id();
1509 let language = source_buffer.language().cloned();
1510 let worktree;
1511 let buffer_abs_path;
1512 if let Some(file) = File::from_dyn(source_buffer.file()) {
1513 worktree = file.worktree.clone();
1514 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1515 } else {
1516 return Task::ready(Ok(Default::default()));
1517 };
1518
1519 let position = position.to_point_utf16(source_buffer);
1520 let anchor = source_buffer.anchor_after(position);
1521
1522 if worktree.read(cx).as_local().is_some() {
1523 let buffer_abs_path = buffer_abs_path.unwrap();
1524 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1525 server
1526 } else {
1527 return Task::ready(Ok(Default::default()));
1528 };
1529
1530 cx.spawn(|_, cx| async move {
1531 let completions = lang_server
1532 .request::<lsp::request::Completion>(lsp::CompletionParams {
1533 text_document_position: lsp::TextDocumentPositionParams::new(
1534 lsp::TextDocumentIdentifier::new(
1535 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1536 ),
1537 position.to_lsp_position(),
1538 ),
1539 context: Default::default(),
1540 work_done_progress_params: Default::default(),
1541 partial_result_params: Default::default(),
1542 })
1543 .await
1544 .context("lsp completion request failed")?;
1545
1546 let completions = if let Some(completions) = completions {
1547 match completions {
1548 lsp::CompletionResponse::Array(completions) => completions,
1549 lsp::CompletionResponse::List(list) => list.items,
1550 }
1551 } else {
1552 Default::default()
1553 };
1554
1555 source_buffer_handle.read_with(&cx, |this, _| {
1556 Ok(completions
1557 .into_iter()
1558 .filter_map(|lsp_completion| {
1559 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1560 lsp::CompletionTextEdit::Edit(edit) => {
1561 (range_from_lsp(edit.range), edit.new_text.clone())
1562 }
1563 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1564 log::info!("unsupported insert/replace completion");
1565 return None;
1566 }
1567 };
1568
1569 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1570 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1571 if clipped_start == old_range.start && clipped_end == old_range.end {
1572 Some(Completion {
1573 old_range: this.anchor_before(old_range.start)
1574 ..this.anchor_after(old_range.end),
1575 new_text,
1576 label: language
1577 .as_ref()
1578 .and_then(|l| l.label_for_completion(&lsp_completion))
1579 .unwrap_or_else(|| {
1580 CodeLabel::plain(
1581 lsp_completion.label.clone(),
1582 lsp_completion.filter_text.as_deref(),
1583 )
1584 }),
1585 lsp_completion,
1586 })
1587 } else {
1588 None
1589 }
1590 })
1591 .collect())
1592 })
1593 })
1594 } else if let Some(project_id) = self.remote_id() {
1595 let rpc = self.client.clone();
1596 let message = proto::GetCompletions {
1597 project_id,
1598 buffer_id,
1599 position: Some(language::proto::serialize_anchor(&anchor)),
1600 version: (&source_buffer.version()).into(),
1601 };
1602 cx.spawn_weak(|_, mut cx| async move {
1603 let response = rpc.request(message).await?;
1604
1605 source_buffer_handle
1606 .update(&mut cx, |buffer, _| {
1607 buffer.wait_for_version(response.version.into())
1608 })
1609 .await;
1610
1611 response
1612 .completions
1613 .into_iter()
1614 .map(|completion| {
1615 language::proto::deserialize_completion(completion, language.as_ref())
1616 })
1617 .collect()
1618 })
1619 } else {
1620 Task::ready(Ok(Default::default()))
1621 }
1622 }
1623
1624 pub fn apply_additional_edits_for_completion(
1625 &self,
1626 buffer_handle: ModelHandle<Buffer>,
1627 completion: Completion,
1628 push_to_history: bool,
1629 cx: &mut ModelContext<Self>,
1630 ) -> Task<Result<Option<Transaction>>> {
1631 let buffer = buffer_handle.read(cx);
1632 let buffer_id = buffer.remote_id();
1633
1634 if self.is_local() {
1635 let lang_server = if let Some(language_server) = buffer.language_server() {
1636 language_server.clone()
1637 } else {
1638 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1639 };
1640
1641 cx.spawn(|_, mut cx| async move {
1642 let resolved_completion = lang_server
1643 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1644 .await?;
1645 if let Some(edits) = resolved_completion.additional_text_edits {
1646 let edits = buffer_handle
1647 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1648 .await?;
1649 buffer_handle.update(&mut cx, |buffer, cx| {
1650 buffer.finalize_last_transaction();
1651 buffer.start_transaction();
1652 for (range, text) in edits {
1653 buffer.edit([range], text, cx);
1654 }
1655 let transaction = if buffer.end_transaction(cx).is_some() {
1656 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1657 if !push_to_history {
1658 buffer.forget_transaction(transaction.id);
1659 }
1660 Some(transaction)
1661 } else {
1662 None
1663 };
1664 Ok(transaction)
1665 })
1666 } else {
1667 Ok(None)
1668 }
1669 })
1670 } else if let Some(project_id) = self.remote_id() {
1671 let client = self.client.clone();
1672 cx.spawn(|_, mut cx| async move {
1673 let response = client
1674 .request(proto::ApplyCompletionAdditionalEdits {
1675 project_id,
1676 buffer_id,
1677 completion: Some(language::proto::serialize_completion(&completion)),
1678 })
1679 .await?;
1680
1681 if let Some(transaction) = response.transaction {
1682 let transaction = language::proto::deserialize_transaction(transaction)?;
1683 buffer_handle
1684 .update(&mut cx, |buffer, _| {
1685 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1686 })
1687 .await;
1688 if push_to_history {
1689 buffer_handle.update(&mut cx, |buffer, _| {
1690 buffer.push_transaction(transaction.clone(), Instant::now());
1691 });
1692 }
1693 Ok(Some(transaction))
1694 } else {
1695 Ok(None)
1696 }
1697 })
1698 } else {
1699 Task::ready(Err(anyhow!("project does not have a remote id")))
1700 }
1701 }
1702
1703 pub fn code_actions<T: ToOffset>(
1704 &self,
1705 buffer_handle: &ModelHandle<Buffer>,
1706 range: Range<T>,
1707 cx: &mut ModelContext<Self>,
1708 ) -> Task<Result<Vec<CodeAction>>> {
1709 let buffer_handle = buffer_handle.clone();
1710 let buffer = buffer_handle.read(cx);
1711 let buffer_id = buffer.remote_id();
1712 let worktree;
1713 let buffer_abs_path;
1714 if let Some(file) = File::from_dyn(buffer.file()) {
1715 worktree = file.worktree.clone();
1716 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1717 } else {
1718 return Task::ready(Ok(Default::default()));
1719 };
1720 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1721
1722 if worktree.read(cx).as_local().is_some() {
1723 let buffer_abs_path = buffer_abs_path.unwrap();
1724 let lang_name;
1725 let lang_server;
1726 if let Some(lang) = buffer.language() {
1727 lang_name = lang.name().to_string();
1728 if let Some(server) = self
1729 .language_servers
1730 .get(&(worktree.read(cx).id(), lang_name.clone()))
1731 {
1732 lang_server = server.clone();
1733 } else {
1734 return Task::ready(Ok(Default::default()));
1735 };
1736 } else {
1737 return Task::ready(Ok(Default::default()));
1738 }
1739
1740 let lsp_range = lsp::Range::new(
1741 range.start.to_point_utf16(buffer).to_lsp_position(),
1742 range.end.to_point_utf16(buffer).to_lsp_position(),
1743 );
1744 cx.foreground().spawn(async move {
1745 Ok(lang_server
1746 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1747 text_document: lsp::TextDocumentIdentifier::new(
1748 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1749 ),
1750 range: lsp_range,
1751 work_done_progress_params: Default::default(),
1752 partial_result_params: Default::default(),
1753 context: lsp::CodeActionContext {
1754 diagnostics: Default::default(),
1755 only: Some(vec![
1756 lsp::CodeActionKind::QUICKFIX,
1757 lsp::CodeActionKind::REFACTOR,
1758 lsp::CodeActionKind::REFACTOR_EXTRACT,
1759 ]),
1760 },
1761 })
1762 .await?
1763 .unwrap_or_default()
1764 .into_iter()
1765 .filter_map(|entry| {
1766 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1767 Some(CodeAction {
1768 range: range.clone(),
1769 lsp_action,
1770 })
1771 } else {
1772 None
1773 }
1774 })
1775 .collect())
1776 })
1777 } else if let Some(project_id) = self.remote_id() {
1778 let rpc = self.client.clone();
1779 cx.spawn_weak(|_, mut cx| async move {
1780 let response = rpc
1781 .request(proto::GetCodeActions {
1782 project_id,
1783 buffer_id,
1784 start: Some(language::proto::serialize_anchor(&range.start)),
1785 end: Some(language::proto::serialize_anchor(&range.end)),
1786 })
1787 .await?;
1788
1789 buffer_handle
1790 .update(&mut cx, |buffer, _| {
1791 buffer.wait_for_version(response.version.into())
1792 })
1793 .await;
1794
1795 response
1796 .actions
1797 .into_iter()
1798 .map(language::proto::deserialize_code_action)
1799 .collect()
1800 })
1801 } else {
1802 Task::ready(Ok(Default::default()))
1803 }
1804 }
1805
1806 pub fn apply_code_action(
1807 &self,
1808 buffer_handle: ModelHandle<Buffer>,
1809 mut action: CodeAction,
1810 push_to_history: bool,
1811 cx: &mut ModelContext<Self>,
1812 ) -> Task<Result<ProjectTransaction>> {
1813 if self.is_local() {
1814 let buffer = buffer_handle.read(cx);
1815 let lang_name = if let Some(lang) = buffer.language() {
1816 lang.name().to_string()
1817 } else {
1818 return Task::ready(Ok(Default::default()));
1819 };
1820 let lang_server = if let Some(language_server) = buffer.language_server() {
1821 language_server.clone()
1822 } else {
1823 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1824 };
1825 let range = action.range.to_point_utf16(buffer);
1826
1827 cx.spawn(|this, mut cx| async move {
1828 if let Some(lsp_range) = action
1829 .lsp_action
1830 .data
1831 .as_mut()
1832 .and_then(|d| d.get_mut("codeActionParams"))
1833 .and_then(|d| d.get_mut("range"))
1834 {
1835 *lsp_range = serde_json::to_value(&lsp::Range::new(
1836 range.start.to_lsp_position(),
1837 range.end.to_lsp_position(),
1838 ))
1839 .unwrap();
1840 action.lsp_action = lang_server
1841 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1842 .await?;
1843 } else {
1844 let actions = this
1845 .update(&mut cx, |this, cx| {
1846 this.code_actions(&buffer_handle, action.range, cx)
1847 })
1848 .await?;
1849 action.lsp_action = actions
1850 .into_iter()
1851 .find(|a| a.lsp_action.title == action.lsp_action.title)
1852 .ok_or_else(|| anyhow!("code action is outdated"))?
1853 .lsp_action;
1854 }
1855
1856 if let Some(edit) = action.lsp_action.edit {
1857 Self::deserialize_workspace_edit(
1858 this,
1859 edit,
1860 push_to_history,
1861 lang_name,
1862 lang_server,
1863 &mut cx,
1864 )
1865 .await
1866 } else {
1867 Ok(ProjectTransaction::default())
1868 }
1869 })
1870 } else if let Some(project_id) = self.remote_id() {
1871 let client = self.client.clone();
1872 let request_handle = self.start_buffer_request(cx);
1873 let request = proto::ApplyCodeAction {
1874 project_id,
1875 buffer_id: buffer_handle.read(cx).remote_id(),
1876 action: Some(language::proto::serialize_code_action(&action)),
1877 };
1878 cx.spawn(|this, mut cx| async move {
1879 let response = client
1880 .request(request)
1881 .await?
1882 .transaction
1883 .ok_or_else(|| anyhow!("missing transaction"))?;
1884 this.update(&mut cx, |this, cx| {
1885 this.deserialize_project_transaction(
1886 response,
1887 push_to_history,
1888 request_handle,
1889 cx,
1890 )
1891 })
1892 .await
1893 })
1894 } else {
1895 Task::ready(Err(anyhow!("project does not have a remote id")))
1896 }
1897 }
1898
1899 async fn deserialize_workspace_edit(
1900 this: ModelHandle<Self>,
1901 edit: lsp::WorkspaceEdit,
1902 push_to_history: bool,
1903 language_name: String,
1904 language_server: Arc<LanguageServer>,
1905 cx: &mut AsyncAppContext,
1906 ) -> Result<ProjectTransaction> {
1907 let fs = this.read_with(cx, |this, _| this.fs.clone());
1908 let mut operations = Vec::new();
1909 if let Some(document_changes) = edit.document_changes {
1910 match document_changes {
1911 lsp::DocumentChanges::Edits(edits) => {
1912 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1913 }
1914 lsp::DocumentChanges::Operations(ops) => operations = ops,
1915 }
1916 } else if let Some(changes) = edit.changes {
1917 operations.extend(changes.into_iter().map(|(uri, edits)| {
1918 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1919 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1920 uri,
1921 version: None,
1922 },
1923 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1924 })
1925 }));
1926 }
1927
1928 let mut project_transaction = ProjectTransaction::default();
1929 for operation in operations {
1930 match operation {
1931 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1932 let abs_path = op
1933 .uri
1934 .to_file_path()
1935 .map_err(|_| anyhow!("can't convert URI to path"))?;
1936
1937 if let Some(parent_path) = abs_path.parent() {
1938 fs.create_dir(parent_path).await?;
1939 }
1940 if abs_path.ends_with("/") {
1941 fs.create_dir(&abs_path).await?;
1942 } else {
1943 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1944 .await?;
1945 }
1946 }
1947 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1948 let source_abs_path = op
1949 .old_uri
1950 .to_file_path()
1951 .map_err(|_| anyhow!("can't convert URI to path"))?;
1952 let target_abs_path = op
1953 .new_uri
1954 .to_file_path()
1955 .map_err(|_| anyhow!("can't convert URI to path"))?;
1956 fs.rename(
1957 &source_abs_path,
1958 &target_abs_path,
1959 op.options.map(Into::into).unwrap_or_default(),
1960 )
1961 .await?;
1962 }
1963 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1964 let abs_path = op
1965 .uri
1966 .to_file_path()
1967 .map_err(|_| anyhow!("can't convert URI to path"))?;
1968 let options = op.options.map(Into::into).unwrap_or_default();
1969 if abs_path.ends_with("/") {
1970 fs.remove_dir(&abs_path, options).await?;
1971 } else {
1972 fs.remove_file(&abs_path, options).await?;
1973 }
1974 }
1975 lsp::DocumentChangeOperation::Edit(op) => {
1976 let buffer_to_edit = this
1977 .update(cx, |this, cx| {
1978 this.open_local_buffer_via_lsp(
1979 op.text_document.uri,
1980 language_name.clone(),
1981 language_server.clone(),
1982 cx,
1983 )
1984 })
1985 .await?;
1986
1987 let edits = buffer_to_edit
1988 .update(cx, |buffer, cx| {
1989 let edits = op.edits.into_iter().map(|edit| match edit {
1990 lsp::OneOf::Left(edit) => edit,
1991 lsp::OneOf::Right(edit) => edit.text_edit,
1992 });
1993 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1994 })
1995 .await?;
1996
1997 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1998 buffer.finalize_last_transaction();
1999 buffer.start_transaction();
2000 for (range, text) in edits {
2001 buffer.edit([range], text, cx);
2002 }
2003 let transaction = if buffer.end_transaction(cx).is_some() {
2004 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2005 if !push_to_history {
2006 buffer.forget_transaction(transaction.id);
2007 }
2008 Some(transaction)
2009 } else {
2010 None
2011 };
2012
2013 transaction
2014 });
2015 if let Some(transaction) = transaction {
2016 project_transaction.0.insert(buffer_to_edit, transaction);
2017 }
2018 }
2019 }
2020 }
2021
2022 Ok(project_transaction)
2023 }
2024
2025 pub fn prepare_rename<T: ToPointUtf16>(
2026 &self,
2027 buffer: ModelHandle<Buffer>,
2028 position: T,
2029 cx: &mut ModelContext<Self>,
2030 ) -> Task<Result<Option<Range<Anchor>>>> {
2031 let position = position.to_point_utf16(buffer.read(cx));
2032 self.request_lsp(buffer, PrepareRename { position }, cx)
2033 }
2034
2035 pub fn perform_rename<T: ToPointUtf16>(
2036 &self,
2037 buffer: ModelHandle<Buffer>,
2038 position: T,
2039 new_name: String,
2040 push_to_history: bool,
2041 cx: &mut ModelContext<Self>,
2042 ) -> Task<Result<ProjectTransaction>> {
2043 let position = position.to_point_utf16(buffer.read(cx));
2044 self.request_lsp(
2045 buffer,
2046 PerformRename {
2047 position,
2048 new_name,
2049 push_to_history,
2050 },
2051 cx,
2052 )
2053 }
2054
2055 pub fn search(
2056 &self,
2057 query: SearchQuery,
2058 cx: &mut ModelContext<Self>,
2059 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2060 if self.is_local() {
2061 let snapshots = self
2062 .strong_worktrees(cx)
2063 .filter_map(|tree| {
2064 let tree = tree.read(cx).as_local()?;
2065 Some(tree.snapshot())
2066 })
2067 .collect::<Vec<_>>();
2068
2069 let background = cx.background().clone();
2070 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2071 if path_count == 0 {
2072 return Task::ready(Ok(Default::default()));
2073 }
2074 let workers = background.num_cpus().min(path_count);
2075 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2076 cx.background()
2077 .spawn({
2078 let fs = self.fs.clone();
2079 let background = cx.background().clone();
2080 let query = query.clone();
2081 async move {
2082 let fs = &fs;
2083 let query = &query;
2084 let matching_paths_tx = &matching_paths_tx;
2085 let paths_per_worker = (path_count + workers - 1) / workers;
2086 let snapshots = &snapshots;
2087 background
2088 .scoped(|scope| {
2089 for worker_ix in 0..workers {
2090 let worker_start_ix = worker_ix * paths_per_worker;
2091 let worker_end_ix = worker_start_ix + paths_per_worker;
2092 scope.spawn(async move {
2093 let mut snapshot_start_ix = 0;
2094 let mut abs_path = PathBuf::new();
2095 for snapshot in snapshots {
2096 let snapshot_end_ix =
2097 snapshot_start_ix + snapshot.visible_file_count();
2098 if worker_end_ix <= snapshot_start_ix {
2099 break;
2100 } else if worker_start_ix > snapshot_end_ix {
2101 snapshot_start_ix = snapshot_end_ix;
2102 continue;
2103 } else {
2104 let start_in_snapshot = worker_start_ix
2105 .saturating_sub(snapshot_start_ix);
2106 let end_in_snapshot =
2107 cmp::min(worker_end_ix, snapshot_end_ix)
2108 - snapshot_start_ix;
2109
2110 for entry in snapshot
2111 .files(false, start_in_snapshot)
2112 .take(end_in_snapshot - start_in_snapshot)
2113 {
2114 if matching_paths_tx.is_closed() {
2115 break;
2116 }
2117
2118 abs_path.clear();
2119 abs_path.push(&snapshot.abs_path());
2120 abs_path.push(&entry.path);
2121 let matches = if let Some(file) =
2122 fs.open_sync(&abs_path).await.log_err()
2123 {
2124 query.detect(file).unwrap_or(false)
2125 } else {
2126 false
2127 };
2128
2129 if matches {
2130 let project_path =
2131 (snapshot.id(), entry.path.clone());
2132 if matching_paths_tx
2133 .send(project_path)
2134 .await
2135 .is_err()
2136 {
2137 break;
2138 }
2139 }
2140 }
2141
2142 snapshot_start_ix = snapshot_end_ix;
2143 }
2144 }
2145 });
2146 }
2147 })
2148 .await;
2149 }
2150 })
2151 .detach();
2152
2153 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2154 let open_buffers = self
2155 .buffers_state
2156 .borrow()
2157 .open_buffers
2158 .values()
2159 .filter_map(|b| b.upgrade(cx))
2160 .collect::<HashSet<_>>();
2161 cx.spawn(|this, cx| async move {
2162 for buffer in &open_buffers {
2163 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2164 buffers_tx.send((buffer.clone(), snapshot)).await?;
2165 }
2166
2167 let open_buffers = Rc::new(RefCell::new(open_buffers));
2168 while let Some(project_path) = matching_paths_rx.next().await {
2169 if buffers_tx.is_closed() {
2170 break;
2171 }
2172
2173 let this = this.clone();
2174 let open_buffers = open_buffers.clone();
2175 let buffers_tx = buffers_tx.clone();
2176 cx.spawn(|mut cx| async move {
2177 if let Some(buffer) = this
2178 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2179 .await
2180 .log_err()
2181 {
2182 if open_buffers.borrow_mut().insert(buffer.clone()) {
2183 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2184 buffers_tx.send((buffer, snapshot)).await?;
2185 }
2186 }
2187
2188 Ok::<_, anyhow::Error>(())
2189 })
2190 .detach();
2191 }
2192
2193 Ok::<_, anyhow::Error>(())
2194 })
2195 .detach_and_log_err(cx);
2196
2197 let background = cx.background().clone();
2198 cx.background().spawn(async move {
2199 let query = &query;
2200 let mut matched_buffers = Vec::new();
2201 for _ in 0..workers {
2202 matched_buffers.push(HashMap::default());
2203 }
2204 background
2205 .scoped(|scope| {
2206 for worker_matched_buffers in matched_buffers.iter_mut() {
2207 let mut buffers_rx = buffers_rx.clone();
2208 scope.spawn(async move {
2209 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2210 let buffer_matches = query
2211 .search(snapshot.as_rope())
2212 .await
2213 .iter()
2214 .map(|range| {
2215 snapshot.anchor_before(range.start)
2216 ..snapshot.anchor_after(range.end)
2217 })
2218 .collect::<Vec<_>>();
2219 if !buffer_matches.is_empty() {
2220 worker_matched_buffers
2221 .insert(buffer.clone(), buffer_matches);
2222 }
2223 }
2224 });
2225 }
2226 })
2227 .await;
2228 Ok(matched_buffers.into_iter().flatten().collect())
2229 })
2230 } else if let Some(project_id) = self.remote_id() {
2231 let request = self.client.request(query.to_proto(project_id));
2232 let request_handle = self.start_buffer_request(cx);
2233 cx.spawn(|this, mut cx| async move {
2234 let response = request.await?;
2235 let mut result = HashMap::default();
2236 for location in response.locations {
2237 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2238 let target_buffer = this
2239 .update(&mut cx, |this, cx| {
2240 this.deserialize_buffer(buffer, request_handle.clone(), cx)
2241 })
2242 .await?;
2243 let start = location
2244 .start
2245 .and_then(deserialize_anchor)
2246 .ok_or_else(|| anyhow!("missing target start"))?;
2247 let end = location
2248 .end
2249 .and_then(deserialize_anchor)
2250 .ok_or_else(|| anyhow!("missing target end"))?;
2251 result
2252 .entry(target_buffer)
2253 .or_insert(Vec::new())
2254 .push(start..end)
2255 }
2256 Ok(result)
2257 })
2258 } else {
2259 Task::ready(Ok(Default::default()))
2260 }
2261 }
2262
2263 fn request_lsp<R: LspCommand>(
2264 &self,
2265 buffer_handle: ModelHandle<Buffer>,
2266 request: R,
2267 cx: &mut ModelContext<Self>,
2268 ) -> Task<Result<R::Response>>
2269 where
2270 <R::LspRequest as lsp::request::Request>::Result: Send,
2271 {
2272 let buffer = buffer_handle.read(cx);
2273 if self.is_local() {
2274 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2275 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2276 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2277 return cx.spawn(|this, cx| async move {
2278 let response = language_server
2279 .request::<R::LspRequest>(lsp_params)
2280 .await
2281 .context("lsp request failed")?;
2282 request
2283 .response_from_lsp(response, this, buffer_handle, cx)
2284 .await
2285 });
2286 }
2287 } else if let Some(project_id) = self.remote_id() {
2288 let rpc = self.client.clone();
2289 let request_handle = self.start_buffer_request(cx);
2290 let message = request.to_proto(project_id, buffer);
2291 return cx.spawn(|this, cx| async move {
2292 let response = rpc.request(message).await?;
2293 request
2294 .response_from_proto(response, this, buffer_handle, request_handle, cx)
2295 .await
2296 });
2297 }
2298 Task::ready(Ok(Default::default()))
2299 }
2300
2301 pub fn find_or_create_local_worktree(
2302 &self,
2303 abs_path: impl AsRef<Path>,
2304 weak: bool,
2305 cx: &mut ModelContext<Self>,
2306 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2307 let abs_path = abs_path.as_ref();
2308 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2309 Task::ready(Ok((tree.clone(), relative_path.into())))
2310 } else {
2311 let worktree = self.create_local_worktree(abs_path, weak, cx);
2312 cx.foreground()
2313 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2314 }
2315 }
2316
2317 pub fn find_local_worktree(
2318 &self,
2319 abs_path: &Path,
2320 cx: &AppContext,
2321 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2322 for tree in self.worktrees(cx) {
2323 if let Some(relative_path) = tree
2324 .read(cx)
2325 .as_local()
2326 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2327 {
2328 return Some((tree.clone(), relative_path.into()));
2329 }
2330 }
2331 None
2332 }
2333
2334 pub fn is_shared(&self) -> bool {
2335 match &self.client_state {
2336 ProjectClientState::Local { is_shared, .. } => *is_shared,
2337 ProjectClientState::Remote { .. } => false,
2338 }
2339 }
2340
2341 fn create_local_worktree(
2342 &self,
2343 abs_path: impl AsRef<Path>,
2344 weak: bool,
2345 cx: &mut ModelContext<Self>,
2346 ) -> Task<Result<ModelHandle<Worktree>>> {
2347 let fs = self.fs.clone();
2348 let client = self.client.clone();
2349 let path = Arc::from(abs_path.as_ref());
2350 cx.spawn(|project, mut cx| async move {
2351 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
2352
2353 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2354 project.add_worktree(&worktree, cx);
2355 (project.remote_id(), project.is_shared())
2356 });
2357
2358 if let Some(project_id) = remote_project_id {
2359 worktree
2360 .update(&mut cx, |worktree, cx| {
2361 worktree.as_local_mut().unwrap().register(project_id, cx)
2362 })
2363 .await?;
2364 if is_shared {
2365 worktree
2366 .update(&mut cx, |worktree, cx| {
2367 worktree.as_local_mut().unwrap().share(project_id, cx)
2368 })
2369 .await?;
2370 }
2371 }
2372
2373 Ok(worktree)
2374 })
2375 }
2376
2377 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2378 self.worktrees.retain(|worktree| {
2379 worktree
2380 .upgrade(cx)
2381 .map_or(false, |w| w.read(cx).id() != id)
2382 });
2383 cx.notify();
2384 }
2385
2386 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2387 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2388 if worktree.read(cx).is_local() {
2389 cx.subscribe(&worktree, |this, worktree, _, cx| {
2390 this.update_local_worktree_buffers(worktree, cx);
2391 })
2392 .detach();
2393 }
2394
2395 let push_weak_handle = {
2396 let worktree = worktree.read(cx);
2397 worktree.is_local() && worktree.is_weak()
2398 };
2399 if push_weak_handle {
2400 cx.observe_release(&worktree, |this, cx| {
2401 this.worktrees
2402 .retain(|worktree| worktree.upgrade(cx).is_some());
2403 cx.notify();
2404 })
2405 .detach();
2406 self.worktrees
2407 .push(WorktreeHandle::Weak(worktree.downgrade()));
2408 } else {
2409 self.worktrees
2410 .push(WorktreeHandle::Strong(worktree.clone()));
2411 }
2412 cx.notify();
2413 }
2414
2415 fn update_local_worktree_buffers(
2416 &mut self,
2417 worktree_handle: ModelHandle<Worktree>,
2418 cx: &mut ModelContext<Self>,
2419 ) {
2420 let snapshot = worktree_handle.read(cx).snapshot();
2421 let mut buffers_to_delete = Vec::new();
2422 for (buffer_id, buffer) in &self.buffers_state.borrow().open_buffers {
2423 if let Some(buffer) = buffer.upgrade(cx) {
2424 buffer.update(cx, |buffer, cx| {
2425 if let Some(old_file) = File::from_dyn(buffer.file()) {
2426 if old_file.worktree != worktree_handle {
2427 return;
2428 }
2429
2430 let new_file = if let Some(entry) = old_file
2431 .entry_id
2432 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2433 {
2434 File {
2435 is_local: true,
2436 entry_id: Some(entry.id),
2437 mtime: entry.mtime,
2438 path: entry.path.clone(),
2439 worktree: worktree_handle.clone(),
2440 }
2441 } else if let Some(entry) =
2442 snapshot.entry_for_path(old_file.path().as_ref())
2443 {
2444 File {
2445 is_local: true,
2446 entry_id: Some(entry.id),
2447 mtime: entry.mtime,
2448 path: entry.path.clone(),
2449 worktree: worktree_handle.clone(),
2450 }
2451 } else {
2452 File {
2453 is_local: true,
2454 entry_id: None,
2455 path: old_file.path().clone(),
2456 mtime: old_file.mtime(),
2457 worktree: worktree_handle.clone(),
2458 }
2459 };
2460
2461 if let Some(project_id) = self.remote_id() {
2462 self.client
2463 .send(proto::UpdateBufferFile {
2464 project_id,
2465 buffer_id: *buffer_id as u64,
2466 file: Some(new_file.to_proto()),
2467 })
2468 .log_err();
2469 }
2470 buffer.file_updated(Box::new(new_file), cx).detach();
2471 }
2472 });
2473 } else {
2474 buffers_to_delete.push(*buffer_id);
2475 }
2476 }
2477
2478 for buffer_id in buffers_to_delete {
2479 self.buffers_state
2480 .borrow_mut()
2481 .open_buffers
2482 .remove(&buffer_id);
2483 }
2484 }
2485
2486 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2487 let new_active_entry = entry.and_then(|project_path| {
2488 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2489 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2490 Some(ProjectEntry {
2491 worktree_id: project_path.worktree_id,
2492 entry_id: entry.id,
2493 })
2494 });
2495 if new_active_entry != self.active_entry {
2496 self.active_entry = new_active_entry;
2497 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2498 }
2499 }
2500
2501 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2502 self.language_servers_with_diagnostics_running > 0
2503 }
2504
2505 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2506 let mut summary = DiagnosticSummary::default();
2507 for (_, path_summary) in self.diagnostic_summaries(cx) {
2508 summary.error_count += path_summary.error_count;
2509 summary.warning_count += path_summary.warning_count;
2510 summary.info_count += path_summary.info_count;
2511 summary.hint_count += path_summary.hint_count;
2512 }
2513 summary
2514 }
2515
2516 pub fn diagnostic_summaries<'a>(
2517 &'a self,
2518 cx: &'a AppContext,
2519 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2520 self.worktrees(cx).flat_map(move |worktree| {
2521 let worktree = worktree.read(cx);
2522 let worktree_id = worktree.id();
2523 worktree
2524 .diagnostic_summaries()
2525 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2526 })
2527 }
2528
2529 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2530 self.language_servers_with_diagnostics_running += 1;
2531 if self.language_servers_with_diagnostics_running == 1 {
2532 cx.emit(Event::DiskBasedDiagnosticsStarted);
2533 }
2534 }
2535
2536 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2537 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2538 self.language_servers_with_diagnostics_running -= 1;
2539 if self.language_servers_with_diagnostics_running == 0 {
2540 cx.emit(Event::DiskBasedDiagnosticsFinished);
2541 }
2542 }
2543
2544 pub fn active_entry(&self) -> Option<ProjectEntry> {
2545 self.active_entry
2546 }
2547
2548 // RPC message handlers
2549
2550 async fn handle_unshare_project(
2551 this: ModelHandle<Self>,
2552 _: TypedEnvelope<proto::UnshareProject>,
2553 _: Arc<Client>,
2554 mut cx: AsyncAppContext,
2555 ) -> Result<()> {
2556 this.update(&mut cx, |this, cx| {
2557 if let ProjectClientState::Remote {
2558 sharing_has_stopped,
2559 ..
2560 } = &mut this.client_state
2561 {
2562 *sharing_has_stopped = true;
2563 this.collaborators.clear();
2564 cx.notify();
2565 } else {
2566 unreachable!()
2567 }
2568 });
2569
2570 Ok(())
2571 }
2572
2573 async fn handle_add_collaborator(
2574 this: ModelHandle<Self>,
2575 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2576 _: Arc<Client>,
2577 mut cx: AsyncAppContext,
2578 ) -> Result<()> {
2579 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2580 let collaborator = envelope
2581 .payload
2582 .collaborator
2583 .take()
2584 .ok_or_else(|| anyhow!("empty collaborator"))?;
2585
2586 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2587 this.update(&mut cx, |this, cx| {
2588 this.collaborators
2589 .insert(collaborator.peer_id, collaborator);
2590 cx.notify();
2591 });
2592
2593 Ok(())
2594 }
2595
2596 async fn handle_remove_collaborator(
2597 this: ModelHandle<Self>,
2598 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2599 _: Arc<Client>,
2600 mut cx: AsyncAppContext,
2601 ) -> Result<()> {
2602 this.update(&mut cx, |this, cx| {
2603 let peer_id = PeerId(envelope.payload.peer_id);
2604 let replica_id = this
2605 .collaborators
2606 .remove(&peer_id)
2607 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2608 .replica_id;
2609 this.shared_buffers.remove(&peer_id);
2610 for (_, buffer) in &this.buffers_state.borrow().open_buffers {
2611 if let Some(buffer) = buffer.upgrade(cx) {
2612 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2613 }
2614 }
2615 cx.notify();
2616 Ok(())
2617 })
2618 }
2619
2620 async fn handle_register_worktree(
2621 this: ModelHandle<Self>,
2622 envelope: TypedEnvelope<proto::RegisterWorktree>,
2623 client: Arc<Client>,
2624 mut cx: AsyncAppContext,
2625 ) -> Result<()> {
2626 this.update(&mut cx, |this, cx| {
2627 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2628 let replica_id = this.replica_id();
2629 let worktree = proto::Worktree {
2630 id: envelope.payload.worktree_id,
2631 root_name: envelope.payload.root_name,
2632 entries: Default::default(),
2633 diagnostic_summaries: Default::default(),
2634 weak: envelope.payload.weak,
2635 };
2636 let (worktree, load_task) =
2637 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2638 this.add_worktree(&worktree, cx);
2639 load_task.detach();
2640 Ok(())
2641 })
2642 }
2643
2644 async fn handle_unregister_worktree(
2645 this: ModelHandle<Self>,
2646 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2647 _: Arc<Client>,
2648 mut cx: AsyncAppContext,
2649 ) -> Result<()> {
2650 this.update(&mut cx, |this, cx| {
2651 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2652 this.remove_worktree(worktree_id, cx);
2653 Ok(())
2654 })
2655 }
2656
2657 async fn handle_update_worktree(
2658 this: ModelHandle<Self>,
2659 envelope: TypedEnvelope<proto::UpdateWorktree>,
2660 _: Arc<Client>,
2661 mut cx: AsyncAppContext,
2662 ) -> Result<()> {
2663 this.update(&mut cx, |this, cx| {
2664 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2665 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2666 worktree.update(cx, |worktree, _| {
2667 let worktree = worktree.as_remote_mut().unwrap();
2668 worktree.update_from_remote(envelope)
2669 })?;
2670 }
2671 Ok(())
2672 })
2673 }
2674
2675 async fn handle_update_diagnostic_summary(
2676 this: ModelHandle<Self>,
2677 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2678 _: Arc<Client>,
2679 mut cx: AsyncAppContext,
2680 ) -> Result<()> {
2681 this.update(&mut cx, |this, cx| {
2682 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2683 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2684 if let Some(summary) = envelope.payload.summary {
2685 let project_path = ProjectPath {
2686 worktree_id,
2687 path: Path::new(&summary.path).into(),
2688 };
2689 worktree.update(cx, |worktree, _| {
2690 worktree
2691 .as_remote_mut()
2692 .unwrap()
2693 .update_diagnostic_summary(project_path.path.clone(), &summary);
2694 });
2695 cx.emit(Event::DiagnosticsUpdated(project_path));
2696 }
2697 }
2698 Ok(())
2699 })
2700 }
2701
2702 async fn handle_disk_based_diagnostics_updating(
2703 this: ModelHandle<Self>,
2704 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2705 _: Arc<Client>,
2706 mut cx: AsyncAppContext,
2707 ) -> Result<()> {
2708 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2709 Ok(())
2710 }
2711
2712 async fn handle_disk_based_diagnostics_updated(
2713 this: ModelHandle<Self>,
2714 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2715 _: Arc<Client>,
2716 mut cx: AsyncAppContext,
2717 ) -> Result<()> {
2718 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2719 Ok(())
2720 }
2721
2722 async fn handle_update_buffer(
2723 this: ModelHandle<Self>,
2724 envelope: TypedEnvelope<proto::UpdateBuffer>,
2725 _: Arc<Client>,
2726 mut cx: AsyncAppContext,
2727 ) -> Result<()> {
2728 this.update(&mut cx, |this, cx| {
2729 let payload = envelope.payload.clone();
2730 let buffer_id = payload.buffer_id;
2731 let ops = payload
2732 .operations
2733 .into_iter()
2734 .map(|op| language::proto::deserialize_operation(op))
2735 .collect::<Result<Vec<_>, _>>()?;
2736 let is_remote = this.is_remote();
2737 let mut buffers_state = this.buffers_state.borrow_mut();
2738 let buffer_request_count = buffers_state.buffer_request_count;
2739 match buffers_state.open_buffers.entry(buffer_id) {
2740 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2741 OpenBuffer::Loaded(buffer) => {
2742 if let Some(buffer) = buffer.upgrade(cx) {
2743 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2744 } else if is_remote && buffer_request_count > 0 {
2745 e.insert(OpenBuffer::Loading(ops));
2746 }
2747 }
2748 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2749 },
2750 hash_map::Entry::Vacant(e) => {
2751 if is_remote && buffer_request_count > 0 {
2752 e.insert(OpenBuffer::Loading(ops));
2753 }
2754 }
2755 }
2756 Ok(())
2757 })
2758 }
2759
2760 async fn handle_update_buffer_file(
2761 this: ModelHandle<Self>,
2762 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2763 _: Arc<Client>,
2764 mut cx: AsyncAppContext,
2765 ) -> Result<()> {
2766 this.update(&mut cx, |this, cx| {
2767 let payload = envelope.payload.clone();
2768 let buffer_id = payload.buffer_id;
2769 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2770 let worktree = this
2771 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2772 .ok_or_else(|| anyhow!("no such worktree"))?;
2773 let file = File::from_proto(file, worktree.clone(), cx)?;
2774 let buffer = this
2775 .buffers_state
2776 .borrow_mut()
2777 .open_buffers
2778 .get_mut(&buffer_id)
2779 .and_then(|b| b.upgrade(cx))
2780 .ok_or_else(|| anyhow!("no such buffer"))?;
2781 buffer.update(cx, |buffer, cx| {
2782 buffer.file_updated(Box::new(file), cx).detach();
2783 });
2784 Ok(())
2785 })
2786 }
2787
2788 async fn handle_save_buffer(
2789 this: ModelHandle<Self>,
2790 envelope: TypedEnvelope<proto::SaveBuffer>,
2791 _: Arc<Client>,
2792 mut cx: AsyncAppContext,
2793 ) -> Result<proto::BufferSaved> {
2794 let buffer_id = envelope.payload.buffer_id;
2795 let sender_id = envelope.original_sender_id()?;
2796 let requested_version = envelope.payload.version.try_into()?;
2797
2798 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2799 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2800 let buffer = this
2801 .shared_buffers
2802 .get(&sender_id)
2803 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2804 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2805 Ok::<_, anyhow::Error>((project_id, buffer))
2806 })?;
2807
2808 if !buffer
2809 .read_with(&cx, |buffer, _| buffer.version())
2810 .observed_all(&requested_version)
2811 {
2812 Err(anyhow!("save request depends on unreceived edits"))?;
2813 }
2814
2815 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2816 Ok(proto::BufferSaved {
2817 project_id,
2818 buffer_id,
2819 version: (&saved_version).into(),
2820 mtime: Some(mtime.into()),
2821 })
2822 }
2823
2824 async fn handle_format_buffers(
2825 this: ModelHandle<Self>,
2826 envelope: TypedEnvelope<proto::FormatBuffers>,
2827 _: Arc<Client>,
2828 mut cx: AsyncAppContext,
2829 ) -> Result<proto::FormatBuffersResponse> {
2830 let sender_id = envelope.original_sender_id()?;
2831 let format = this.update(&mut cx, |this, cx| {
2832 let shared_buffers = this
2833 .shared_buffers
2834 .get(&sender_id)
2835 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2836 let mut buffers = HashSet::default();
2837 for buffer_id in &envelope.payload.buffer_ids {
2838 buffers.insert(
2839 shared_buffers
2840 .get(buffer_id)
2841 .cloned()
2842 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2843 );
2844 }
2845 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2846 })?;
2847
2848 let project_transaction = format.await?;
2849 let project_transaction = this.update(&mut cx, |this, cx| {
2850 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2851 });
2852 Ok(proto::FormatBuffersResponse {
2853 transaction: Some(project_transaction),
2854 })
2855 }
2856
2857 async fn handle_get_completions(
2858 this: ModelHandle<Self>,
2859 envelope: TypedEnvelope<proto::GetCompletions>,
2860 _: Arc<Client>,
2861 mut cx: AsyncAppContext,
2862 ) -> Result<proto::GetCompletionsResponse> {
2863 let sender_id = envelope.original_sender_id()?;
2864 let position = envelope
2865 .payload
2866 .position
2867 .and_then(language::proto::deserialize_anchor)
2868 .ok_or_else(|| anyhow!("invalid position"))?;
2869 let version = clock::Global::from(envelope.payload.version);
2870 let buffer = this.read_with(&cx, |this, _| {
2871 this.shared_buffers
2872 .get(&sender_id)
2873 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2874 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2875 })?;
2876 if !buffer
2877 .read_with(&cx, |buffer, _| buffer.version())
2878 .observed_all(&version)
2879 {
2880 Err(anyhow!("completion request depends on unreceived edits"))?;
2881 }
2882 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2883 let completions = this
2884 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2885 .await?;
2886
2887 Ok(proto::GetCompletionsResponse {
2888 completions: completions
2889 .iter()
2890 .map(language::proto::serialize_completion)
2891 .collect(),
2892 version: (&version).into(),
2893 })
2894 }
2895
2896 async fn handle_apply_additional_edits_for_completion(
2897 this: ModelHandle<Self>,
2898 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2899 _: Arc<Client>,
2900 mut cx: AsyncAppContext,
2901 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2902 let sender_id = envelope.original_sender_id()?;
2903 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2904 let buffer = this
2905 .shared_buffers
2906 .get(&sender_id)
2907 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2908 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2909 let language = buffer.read(cx).language();
2910 let completion = language::proto::deserialize_completion(
2911 envelope
2912 .payload
2913 .completion
2914 .ok_or_else(|| anyhow!("invalid completion"))?,
2915 language,
2916 )?;
2917 Ok::<_, anyhow::Error>(
2918 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2919 )
2920 })?;
2921
2922 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2923 transaction: apply_additional_edits
2924 .await?
2925 .as_ref()
2926 .map(language::proto::serialize_transaction),
2927 })
2928 }
2929
2930 async fn handle_get_code_actions(
2931 this: ModelHandle<Self>,
2932 envelope: TypedEnvelope<proto::GetCodeActions>,
2933 _: Arc<Client>,
2934 mut cx: AsyncAppContext,
2935 ) -> Result<proto::GetCodeActionsResponse> {
2936 let sender_id = envelope.original_sender_id()?;
2937 let start = envelope
2938 .payload
2939 .start
2940 .and_then(language::proto::deserialize_anchor)
2941 .ok_or_else(|| anyhow!("invalid start"))?;
2942 let end = envelope
2943 .payload
2944 .end
2945 .and_then(language::proto::deserialize_anchor)
2946 .ok_or_else(|| anyhow!("invalid end"))?;
2947 let buffer = this.update(&mut cx, |this, _| {
2948 this.shared_buffers
2949 .get(&sender_id)
2950 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2951 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2952 })?;
2953 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2954 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2955 Err(anyhow!("code action request references unreceived edits"))?;
2956 }
2957 let code_actions = this.update(&mut cx, |this, cx| {
2958 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2959 })?;
2960
2961 Ok(proto::GetCodeActionsResponse {
2962 actions: code_actions
2963 .await?
2964 .iter()
2965 .map(language::proto::serialize_code_action)
2966 .collect(),
2967 version: (&version).into(),
2968 })
2969 }
2970
2971 async fn handle_apply_code_action(
2972 this: ModelHandle<Self>,
2973 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2974 _: Arc<Client>,
2975 mut cx: AsyncAppContext,
2976 ) -> Result<proto::ApplyCodeActionResponse> {
2977 let sender_id = envelope.original_sender_id()?;
2978 let action = language::proto::deserialize_code_action(
2979 envelope
2980 .payload
2981 .action
2982 .ok_or_else(|| anyhow!("invalid action"))?,
2983 )?;
2984 let apply_code_action = this.update(&mut cx, |this, cx| {
2985 let buffer = this
2986 .shared_buffers
2987 .get(&sender_id)
2988 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2989 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2990 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2991 })?;
2992
2993 let project_transaction = apply_code_action.await?;
2994 let project_transaction = this.update(&mut cx, |this, cx| {
2995 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2996 });
2997 Ok(proto::ApplyCodeActionResponse {
2998 transaction: Some(project_transaction),
2999 })
3000 }
3001
3002 async fn handle_lsp_command<T: LspCommand>(
3003 this: ModelHandle<Self>,
3004 envelope: TypedEnvelope<T::ProtoRequest>,
3005 _: Arc<Client>,
3006 mut cx: AsyncAppContext,
3007 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3008 where
3009 <T::LspRequest as lsp::request::Request>::Result: Send,
3010 {
3011 let sender_id = envelope.original_sender_id()?;
3012 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
3013 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3014 let buffer_handle = this
3015 .shared_buffers
3016 .get(&sender_id)
3017 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
3018 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3019 let buffer = buffer_handle.read(cx);
3020 let buffer_version = buffer.version();
3021 let request = T::from_proto(envelope.payload, this, buffer)?;
3022 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3023 })?;
3024 let response = request.await?;
3025 this.update(&mut cx, |this, cx| {
3026 Ok(T::response_to_proto(
3027 response,
3028 this,
3029 sender_id,
3030 &buffer_version,
3031 cx,
3032 ))
3033 })
3034 }
3035
3036 async fn handle_get_project_symbols(
3037 this: ModelHandle<Self>,
3038 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3039 _: Arc<Client>,
3040 mut cx: AsyncAppContext,
3041 ) -> Result<proto::GetProjectSymbolsResponse> {
3042 let symbols = this
3043 .update(&mut cx, |this, cx| {
3044 this.symbols(&envelope.payload.query, cx)
3045 })
3046 .await?;
3047
3048 Ok(proto::GetProjectSymbolsResponse {
3049 symbols: symbols.iter().map(serialize_symbol).collect(),
3050 })
3051 }
3052
3053 async fn handle_search_project(
3054 this: ModelHandle<Self>,
3055 envelope: TypedEnvelope<proto::SearchProject>,
3056 _: Arc<Client>,
3057 mut cx: AsyncAppContext,
3058 ) -> Result<proto::SearchProjectResponse> {
3059 let peer_id = envelope.original_sender_id()?;
3060 let query = SearchQuery::from_proto(envelope.payload)?;
3061 let result = this
3062 .update(&mut cx, |this, cx| this.search(query, cx))
3063 .await?;
3064
3065 this.update(&mut cx, |this, cx| {
3066 let mut locations = Vec::new();
3067 for (buffer, ranges) in result {
3068 for range in ranges {
3069 let start = serialize_anchor(&range.start);
3070 let end = serialize_anchor(&range.end);
3071 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3072 locations.push(proto::Location {
3073 buffer: Some(buffer),
3074 start: Some(start),
3075 end: Some(end),
3076 });
3077 }
3078 }
3079 Ok(proto::SearchProjectResponse { locations })
3080 })
3081 }
3082
3083 async fn handle_open_buffer_for_symbol(
3084 this: ModelHandle<Self>,
3085 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3086 _: Arc<Client>,
3087 mut cx: AsyncAppContext,
3088 ) -> Result<proto::OpenBufferForSymbolResponse> {
3089 let peer_id = envelope.original_sender_id()?;
3090 let symbol = envelope
3091 .payload
3092 .symbol
3093 .ok_or_else(|| anyhow!("invalid symbol"))?;
3094 let symbol = this.read_with(&cx, |this, _| {
3095 let symbol = this.deserialize_symbol(symbol)?;
3096 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3097 if signature == symbol.signature {
3098 Ok(symbol)
3099 } else {
3100 Err(anyhow!("invalid symbol signature"))
3101 }
3102 })?;
3103 let buffer = this
3104 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3105 .await?;
3106
3107 Ok(proto::OpenBufferForSymbolResponse {
3108 buffer: Some(this.update(&mut cx, |this, cx| {
3109 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3110 })),
3111 })
3112 }
3113
3114 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3115 let mut hasher = Sha256::new();
3116 hasher.update(worktree_id.to_proto().to_be_bytes());
3117 hasher.update(path.to_string_lossy().as_bytes());
3118 hasher.update(self.nonce.to_be_bytes());
3119 hasher.finalize().as_slice().try_into().unwrap()
3120 }
3121
3122 async fn handle_open_buffer(
3123 this: ModelHandle<Self>,
3124 envelope: TypedEnvelope<proto::OpenBuffer>,
3125 _: Arc<Client>,
3126 mut cx: AsyncAppContext,
3127 ) -> Result<proto::OpenBufferResponse> {
3128 let peer_id = envelope.original_sender_id()?;
3129 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3130 let open_buffer = this.update(&mut cx, |this, cx| {
3131 this.open_buffer(
3132 ProjectPath {
3133 worktree_id,
3134 path: PathBuf::from(envelope.payload.path).into(),
3135 },
3136 cx,
3137 )
3138 });
3139
3140 let buffer = open_buffer.await?;
3141 this.update(&mut cx, |this, cx| {
3142 Ok(proto::OpenBufferResponse {
3143 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3144 })
3145 })
3146 }
3147
3148 fn serialize_project_transaction_for_peer(
3149 &mut self,
3150 project_transaction: ProjectTransaction,
3151 peer_id: PeerId,
3152 cx: &AppContext,
3153 ) -> proto::ProjectTransaction {
3154 let mut serialized_transaction = proto::ProjectTransaction {
3155 buffers: Default::default(),
3156 transactions: Default::default(),
3157 };
3158 for (buffer, transaction) in project_transaction.0 {
3159 serialized_transaction
3160 .buffers
3161 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3162 serialized_transaction
3163 .transactions
3164 .push(language::proto::serialize_transaction(&transaction));
3165 }
3166 serialized_transaction
3167 }
3168
3169 fn deserialize_project_transaction(
3170 &mut self,
3171 message: proto::ProjectTransaction,
3172 push_to_history: bool,
3173 request_handle: BufferRequestHandle,
3174 cx: &mut ModelContext<Self>,
3175 ) -> Task<Result<ProjectTransaction>> {
3176 cx.spawn(|this, mut cx| async move {
3177 let mut project_transaction = ProjectTransaction::default();
3178 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3179 let buffer = this
3180 .update(&mut cx, |this, cx| {
3181 this.deserialize_buffer(buffer, request_handle.clone(), cx)
3182 })
3183 .await?;
3184 let transaction = language::proto::deserialize_transaction(transaction)?;
3185 project_transaction.0.insert(buffer, transaction);
3186 }
3187
3188 for (buffer, transaction) in &project_transaction.0 {
3189 buffer
3190 .update(&mut cx, |buffer, _| {
3191 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3192 })
3193 .await;
3194
3195 if push_to_history {
3196 buffer.update(&mut cx, |buffer, _| {
3197 buffer.push_transaction(transaction.clone(), Instant::now());
3198 });
3199 }
3200 }
3201
3202 Ok(project_transaction)
3203 })
3204 }
3205
3206 fn serialize_buffer_for_peer(
3207 &mut self,
3208 buffer: &ModelHandle<Buffer>,
3209 peer_id: PeerId,
3210 cx: &AppContext,
3211 ) -> proto::Buffer {
3212 let buffer_id = buffer.read(cx).remote_id();
3213 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3214 match shared_buffers.entry(buffer_id) {
3215 hash_map::Entry::Occupied(_) => proto::Buffer {
3216 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3217 },
3218 hash_map::Entry::Vacant(entry) => {
3219 entry.insert(buffer.clone());
3220 proto::Buffer {
3221 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3222 }
3223 }
3224 }
3225 }
3226
3227 fn deserialize_buffer(
3228 &mut self,
3229 buffer: proto::Buffer,
3230 request_handle: BufferRequestHandle,
3231 cx: &mut ModelContext<Self>,
3232 ) -> Task<Result<ModelHandle<Buffer>>> {
3233 let replica_id = self.replica_id();
3234
3235 let mut opened_buffer_tx = self.opened_buffer.clone();
3236 let mut opened_buffer_rx = self.opened_buffer.subscribe();
3237 cx.spawn(|this, mut cx| async move {
3238 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3239 proto::buffer::Variant::Id(id) => {
3240 let buffer = loop {
3241 let buffer = this.read_with(&cx, |this, cx| {
3242 this.buffers_state
3243 .borrow()
3244 .open_buffers
3245 .get(&id)
3246 .and_then(|buffer| buffer.upgrade(cx))
3247 });
3248 if let Some(buffer) = buffer {
3249 break buffer;
3250 }
3251 opened_buffer_rx
3252 .recv()
3253 .await
3254 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3255 };
3256 Ok(buffer)
3257 }
3258 proto::buffer::Variant::State(mut buffer) => {
3259 let mut buffer_worktree = None;
3260 let mut buffer_file = None;
3261 if let Some(file) = buffer.file.take() {
3262 this.read_with(&cx, |this, cx| {
3263 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3264 let worktree =
3265 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3266 anyhow!("no worktree found for id {}", file.worktree_id)
3267 })?;
3268 buffer_file =
3269 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3270 as Box<dyn language::File>);
3271 buffer_worktree = Some(worktree);
3272 Ok::<_, anyhow::Error>(())
3273 })?;
3274 }
3275
3276 let buffer = cx.add_model(|cx| {
3277 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3278 });
3279
3280 request_handle.preserve_buffer(buffer.clone());
3281 this.update(&mut cx, |this, cx| {
3282 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3283 })?;
3284
3285 let _ = opened_buffer_tx.send(()).await;
3286 Ok(buffer)
3287 }
3288 }
3289 })
3290 }
3291
3292 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3293 let language = self
3294 .languages
3295 .get_language(&serialized_symbol.language_name);
3296 let start = serialized_symbol
3297 .start
3298 .ok_or_else(|| anyhow!("invalid start"))?;
3299 let end = serialized_symbol
3300 .end
3301 .ok_or_else(|| anyhow!("invalid end"))?;
3302 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3303 Ok(Symbol {
3304 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3305 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3306 language_name: serialized_symbol.language_name.clone(),
3307 label: language
3308 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3309 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3310 name: serialized_symbol.name,
3311 path: PathBuf::from(serialized_symbol.path),
3312 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3313 kind,
3314 signature: serialized_symbol
3315 .signature
3316 .try_into()
3317 .map_err(|_| anyhow!("invalid signature"))?,
3318 })
3319 }
3320
3321 async fn handle_close_buffer(
3322 this: ModelHandle<Self>,
3323 envelope: TypedEnvelope<proto::CloseBuffer>,
3324 _: Arc<Client>,
3325 mut cx: AsyncAppContext,
3326 ) -> Result<()> {
3327 this.update(&mut cx, |this, cx| {
3328 if let Some(shared_buffers) =
3329 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
3330 {
3331 shared_buffers.remove(&envelope.payload.buffer_id);
3332 cx.notify();
3333 }
3334 Ok(())
3335 })
3336 }
3337
3338 async fn handle_buffer_saved(
3339 this: ModelHandle<Self>,
3340 envelope: TypedEnvelope<proto::BufferSaved>,
3341 _: Arc<Client>,
3342 mut cx: AsyncAppContext,
3343 ) -> Result<()> {
3344 let version = envelope.payload.version.try_into()?;
3345 let mtime = envelope
3346 .payload
3347 .mtime
3348 .ok_or_else(|| anyhow!("missing mtime"))?
3349 .into();
3350
3351 this.update(&mut cx, |this, cx| {
3352 let buffer = this
3353 .buffers_state
3354 .borrow()
3355 .open_buffers
3356 .get(&envelope.payload.buffer_id)
3357 .and_then(|buffer| buffer.upgrade(cx));
3358 if let Some(buffer) = buffer {
3359 buffer.update(cx, |buffer, cx| {
3360 buffer.did_save(version, mtime, None, cx);
3361 });
3362 }
3363 Ok(())
3364 })
3365 }
3366
3367 async fn handle_buffer_reloaded(
3368 this: ModelHandle<Self>,
3369 envelope: TypedEnvelope<proto::BufferReloaded>,
3370 _: Arc<Client>,
3371 mut cx: AsyncAppContext,
3372 ) -> Result<()> {
3373 let payload = envelope.payload.clone();
3374 let version = payload.version.try_into()?;
3375 let mtime = payload
3376 .mtime
3377 .ok_or_else(|| anyhow!("missing mtime"))?
3378 .into();
3379 this.update(&mut cx, |this, cx| {
3380 let buffer = this
3381 .buffers_state
3382 .borrow()
3383 .open_buffers
3384 .get(&payload.buffer_id)
3385 .and_then(|buffer| buffer.upgrade(cx));
3386 if let Some(buffer) = buffer {
3387 buffer.update(cx, |buffer, cx| {
3388 buffer.did_reload(version, mtime, cx);
3389 });
3390 }
3391 Ok(())
3392 })
3393 }
3394
3395 pub fn match_paths<'a>(
3396 &self,
3397 query: &'a str,
3398 include_ignored: bool,
3399 smart_case: bool,
3400 max_results: usize,
3401 cancel_flag: &'a AtomicBool,
3402 cx: &AppContext,
3403 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3404 let worktrees = self
3405 .worktrees(cx)
3406 .filter(|worktree| !worktree.read(cx).is_weak())
3407 .collect::<Vec<_>>();
3408 let include_root_name = worktrees.len() > 1;
3409 let candidate_sets = worktrees
3410 .into_iter()
3411 .map(|worktree| CandidateSet {
3412 snapshot: worktree.read(cx).snapshot(),
3413 include_ignored,
3414 include_root_name,
3415 })
3416 .collect::<Vec<_>>();
3417
3418 let background = cx.background().clone();
3419 async move {
3420 fuzzy::match_paths(
3421 candidate_sets.as_slice(),
3422 query,
3423 smart_case,
3424 max_results,
3425 cancel_flag,
3426 background,
3427 )
3428 .await
3429 }
3430 }
3431}
3432
3433impl BufferRequestHandle {
3434 fn new(state: Rc<RefCell<ProjectBuffers>>, cx: &AppContext) -> Self {
3435 {
3436 let state = &mut *state.borrow_mut();
3437 state.buffer_request_count += 1;
3438 if state.buffer_request_count == 1 {
3439 state.preserved_buffers.extend(
3440 state
3441 .open_buffers
3442 .values()
3443 .filter_map(|buffer| buffer.upgrade(cx)),
3444 )
3445 }
3446 }
3447 Self(state)
3448 }
3449
3450 fn preserve_buffer(&self, buffer: ModelHandle<Buffer>) {
3451 self.0.borrow_mut().preserved_buffers.push(buffer);
3452 }
3453}
3454
3455impl Clone for BufferRequestHandle {
3456 fn clone(&self) -> Self {
3457 self.0.borrow_mut().buffer_request_count += 1;
3458 Self(self.0.clone())
3459 }
3460}
3461
3462impl Drop for BufferRequestHandle {
3463 fn drop(&mut self) {
3464 let mut state = self.0.borrow_mut();
3465 state.buffer_request_count -= 1;
3466 if state.buffer_request_count == 0 {
3467 state.preserved_buffers.clear();
3468 state
3469 .open_buffers
3470 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
3471 }
3472 }
3473}
3474
3475impl WorktreeHandle {
3476 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3477 match self {
3478 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3479 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3480 }
3481 }
3482}
3483
3484impl OpenBuffer {
3485 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3486 match self {
3487 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
3488 OpenBuffer::Loading(_) => None,
3489 }
3490 }
3491}
3492
3493struct CandidateSet {
3494 snapshot: Snapshot,
3495 include_ignored: bool,
3496 include_root_name: bool,
3497}
3498
3499impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3500 type Candidates = CandidateSetIter<'a>;
3501
3502 fn id(&self) -> usize {
3503 self.snapshot.id().to_usize()
3504 }
3505
3506 fn len(&self) -> usize {
3507 if self.include_ignored {
3508 self.snapshot.file_count()
3509 } else {
3510 self.snapshot.visible_file_count()
3511 }
3512 }
3513
3514 fn prefix(&self) -> Arc<str> {
3515 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3516 self.snapshot.root_name().into()
3517 } else if self.include_root_name {
3518 format!("{}/", self.snapshot.root_name()).into()
3519 } else {
3520 "".into()
3521 }
3522 }
3523
3524 fn candidates(&'a self, start: usize) -> Self::Candidates {
3525 CandidateSetIter {
3526 traversal: self.snapshot.files(self.include_ignored, start),
3527 }
3528 }
3529}
3530
3531struct CandidateSetIter<'a> {
3532 traversal: Traversal<'a>,
3533}
3534
3535impl<'a> Iterator for CandidateSetIter<'a> {
3536 type Item = PathMatchCandidate<'a>;
3537
3538 fn next(&mut self) -> Option<Self::Item> {
3539 self.traversal.next().map(|entry| {
3540 if let EntryKind::File(char_bag) = entry.kind {
3541 PathMatchCandidate {
3542 path: &entry.path,
3543 char_bag,
3544 }
3545 } else {
3546 unreachable!()
3547 }
3548 })
3549 }
3550}
3551
3552impl Entity for Project {
3553 type Event = Event;
3554
3555 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3556 match &self.client_state {
3557 ProjectClientState::Local { remote_id_rx, .. } => {
3558 if let Some(project_id) = *remote_id_rx.borrow() {
3559 self.client
3560 .send(proto::UnregisterProject { project_id })
3561 .log_err();
3562 }
3563 }
3564 ProjectClientState::Remote { remote_id, .. } => {
3565 self.client
3566 .send(proto::LeaveProject {
3567 project_id: *remote_id,
3568 })
3569 .log_err();
3570 }
3571 }
3572 }
3573
3574 fn app_will_quit(
3575 &mut self,
3576 _: &mut MutableAppContext,
3577 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3578 let shutdown_futures = self
3579 .language_servers
3580 .drain()
3581 .filter_map(|(_, server)| server.shutdown())
3582 .collect::<Vec<_>>();
3583 Some(
3584 async move {
3585 futures::future::join_all(shutdown_futures).await;
3586 }
3587 .boxed(),
3588 )
3589 }
3590}
3591
3592impl Collaborator {
3593 fn from_proto(
3594 message: proto::Collaborator,
3595 user_store: &ModelHandle<UserStore>,
3596 cx: &mut AsyncAppContext,
3597 ) -> impl Future<Output = Result<Self>> {
3598 let user = user_store.update(cx, |user_store, cx| {
3599 user_store.fetch_user(message.user_id, cx)
3600 });
3601
3602 async move {
3603 Ok(Self {
3604 peer_id: PeerId(message.peer_id),
3605 user: user.await?,
3606 replica_id: message.replica_id as ReplicaId,
3607 })
3608 }
3609 }
3610}
3611
3612impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3613 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3614 Self {
3615 worktree_id,
3616 path: path.as_ref().into(),
3617 }
3618 }
3619}
3620
3621impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3622 fn from(options: lsp::CreateFileOptions) -> Self {
3623 Self {
3624 overwrite: options.overwrite.unwrap_or(false),
3625 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3626 }
3627 }
3628}
3629
3630impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3631 fn from(options: lsp::RenameFileOptions) -> Self {
3632 Self {
3633 overwrite: options.overwrite.unwrap_or(false),
3634 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3635 }
3636 }
3637}
3638
3639impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3640 fn from(options: lsp::DeleteFileOptions) -> Self {
3641 Self {
3642 recursive: options.recursive.unwrap_or(false),
3643 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3644 }
3645 }
3646}
3647
3648fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3649 proto::Symbol {
3650 source_worktree_id: symbol.source_worktree_id.to_proto(),
3651 worktree_id: symbol.worktree_id.to_proto(),
3652 language_name: symbol.language_name.clone(),
3653 name: symbol.name.clone(),
3654 kind: unsafe { mem::transmute(symbol.kind) },
3655 path: symbol.path.to_string_lossy().to_string(),
3656 start: Some(proto::Point {
3657 row: symbol.range.start.row,
3658 column: symbol.range.start.column,
3659 }),
3660 end: Some(proto::Point {
3661 row: symbol.range.end.row,
3662 column: symbol.range.end.column,
3663 }),
3664 signature: symbol.signature.to_vec(),
3665 }
3666}
3667
3668fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3669 let mut path_components = path.components();
3670 let mut base_components = base.components();
3671 let mut components: Vec<Component> = Vec::new();
3672 loop {
3673 match (path_components.next(), base_components.next()) {
3674 (None, None) => break,
3675 (Some(a), None) => {
3676 components.push(a);
3677 components.extend(path_components.by_ref());
3678 break;
3679 }
3680 (None, _) => components.push(Component::ParentDir),
3681 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3682 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3683 (Some(a), Some(_)) => {
3684 components.push(Component::ParentDir);
3685 for _ in base_components {
3686 components.push(Component::ParentDir);
3687 }
3688 components.push(a);
3689 components.extend(path_components.by_ref());
3690 break;
3691 }
3692 }
3693 }
3694 components.iter().map(|c| c.as_os_str()).collect()
3695}
3696
3697#[cfg(test)]
3698mod tests {
3699 use super::{Event, *};
3700 use fs::RealFs;
3701 use futures::StreamExt;
3702 use gpui::test::subscribe;
3703 use language::{
3704 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3705 };
3706 use lsp::Url;
3707 use serde_json::json;
3708 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3709 use unindent::Unindent as _;
3710 use util::test::temp_tree;
3711 use worktree::WorktreeHandle as _;
3712
3713 #[gpui::test]
3714 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3715 let dir = temp_tree(json!({
3716 "root": {
3717 "apple": "",
3718 "banana": {
3719 "carrot": {
3720 "date": "",
3721 "endive": "",
3722 }
3723 },
3724 "fennel": {
3725 "grape": "",
3726 }
3727 }
3728 }));
3729
3730 let root_link_path = dir.path().join("root_link");
3731 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3732 unix::fs::symlink(
3733 &dir.path().join("root/fennel"),
3734 &dir.path().join("root/finnochio"),
3735 )
3736 .unwrap();
3737
3738 let project = Project::test(Arc::new(RealFs), &mut cx);
3739
3740 let (tree, _) = project
3741 .update(&mut cx, |project, cx| {
3742 project.find_or_create_local_worktree(&root_link_path, false, cx)
3743 })
3744 .await
3745 .unwrap();
3746
3747 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3748 .await;
3749 cx.read(|cx| {
3750 let tree = tree.read(cx);
3751 assert_eq!(tree.file_count(), 5);
3752 assert_eq!(
3753 tree.inode_for_path("fennel/grape"),
3754 tree.inode_for_path("finnochio/grape")
3755 );
3756 });
3757
3758 let cancel_flag = Default::default();
3759 let results = project
3760 .read_with(&cx, |project, cx| {
3761 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3762 })
3763 .await;
3764 assert_eq!(
3765 results
3766 .into_iter()
3767 .map(|result| result.path)
3768 .collect::<Vec<Arc<Path>>>(),
3769 vec![
3770 PathBuf::from("banana/carrot/date").into(),
3771 PathBuf::from("banana/carrot/endive").into(),
3772 ]
3773 );
3774 }
3775
3776 #[gpui::test]
3777 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3778 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3779 let progress_token = language_server_config
3780 .disk_based_diagnostics_progress_token
3781 .clone()
3782 .unwrap();
3783
3784 let language = Arc::new(Language::new(
3785 LanguageConfig {
3786 name: "Rust".into(),
3787 path_suffixes: vec!["rs".to_string()],
3788 language_server: Some(language_server_config),
3789 ..Default::default()
3790 },
3791 Some(tree_sitter_rust::language()),
3792 ));
3793
3794 let fs = FakeFs::new(cx.background());
3795 fs.insert_tree(
3796 "/dir",
3797 json!({
3798 "a.rs": "fn a() { A }",
3799 "b.rs": "const y: i32 = 1",
3800 }),
3801 )
3802 .await;
3803
3804 let project = Project::test(fs, &mut cx);
3805 project.update(&mut cx, |project, _| {
3806 Arc::get_mut(&mut project.languages).unwrap().add(language);
3807 });
3808
3809 let (tree, _) = project
3810 .update(&mut cx, |project, cx| {
3811 project.find_or_create_local_worktree("/dir", false, cx)
3812 })
3813 .await
3814 .unwrap();
3815 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3816
3817 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3818 .await;
3819
3820 // Cause worktree to start the fake language server
3821 let _buffer = project
3822 .update(&mut cx, |project, cx| {
3823 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3824 })
3825 .await
3826 .unwrap();
3827
3828 let mut events = subscribe(&project, &mut cx);
3829
3830 let mut fake_server = fake_servers.next().await.unwrap();
3831 fake_server.start_progress(&progress_token).await;
3832 assert_eq!(
3833 events.next().await.unwrap(),
3834 Event::DiskBasedDiagnosticsStarted
3835 );
3836
3837 fake_server.start_progress(&progress_token).await;
3838 fake_server.end_progress(&progress_token).await;
3839 fake_server.start_progress(&progress_token).await;
3840
3841 fake_server
3842 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3843 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3844 version: None,
3845 diagnostics: vec![lsp::Diagnostic {
3846 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3847 severity: Some(lsp::DiagnosticSeverity::ERROR),
3848 message: "undefined variable 'A'".to_string(),
3849 ..Default::default()
3850 }],
3851 })
3852 .await;
3853 assert_eq!(
3854 events.next().await.unwrap(),
3855 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3856 );
3857
3858 fake_server.end_progress(&progress_token).await;
3859 fake_server.end_progress(&progress_token).await;
3860 assert_eq!(
3861 events.next().await.unwrap(),
3862 Event::DiskBasedDiagnosticsUpdated
3863 );
3864 assert_eq!(
3865 events.next().await.unwrap(),
3866 Event::DiskBasedDiagnosticsFinished
3867 );
3868
3869 let buffer = project
3870 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3871 .await
3872 .unwrap();
3873
3874 buffer.read_with(&cx, |buffer, _| {
3875 let snapshot = buffer.snapshot();
3876 let diagnostics = snapshot
3877 .diagnostics_in_range::<_, Point>(0..buffer.len())
3878 .collect::<Vec<_>>();
3879 assert_eq!(
3880 diagnostics,
3881 &[DiagnosticEntry {
3882 range: Point::new(0, 9)..Point::new(0, 10),
3883 diagnostic: Diagnostic {
3884 severity: lsp::DiagnosticSeverity::ERROR,
3885 message: "undefined variable 'A'".to_string(),
3886 group_id: 0,
3887 is_primary: true,
3888 ..Default::default()
3889 }
3890 }]
3891 )
3892 });
3893 }
3894
3895 #[gpui::test]
3896 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3897 let dir = temp_tree(json!({
3898 "root": {
3899 "dir1": {},
3900 "dir2": {
3901 "dir3": {}
3902 }
3903 }
3904 }));
3905
3906 let project = Project::test(Arc::new(RealFs), &mut cx);
3907 let (tree, _) = project
3908 .update(&mut cx, |project, cx| {
3909 project.find_or_create_local_worktree(&dir.path(), false, cx)
3910 })
3911 .await
3912 .unwrap();
3913
3914 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3915 .await;
3916
3917 let cancel_flag = Default::default();
3918 let results = project
3919 .read_with(&cx, |project, cx| {
3920 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3921 })
3922 .await;
3923
3924 assert!(results.is_empty());
3925 }
3926
3927 #[gpui::test]
3928 async fn test_definition(mut cx: gpui::TestAppContext) {
3929 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3930 let language = Arc::new(Language::new(
3931 LanguageConfig {
3932 name: "Rust".into(),
3933 path_suffixes: vec!["rs".to_string()],
3934 language_server: Some(language_server_config),
3935 ..Default::default()
3936 },
3937 Some(tree_sitter_rust::language()),
3938 ));
3939
3940 let fs = FakeFs::new(cx.background());
3941 fs.insert_tree(
3942 "/dir",
3943 json!({
3944 "a.rs": "const fn a() { A }",
3945 "b.rs": "const y: i32 = crate::a()",
3946 }),
3947 )
3948 .await;
3949
3950 let project = Project::test(fs, &mut cx);
3951 project.update(&mut cx, |project, _| {
3952 Arc::get_mut(&mut project.languages).unwrap().add(language);
3953 });
3954
3955 let (tree, _) = project
3956 .update(&mut cx, |project, cx| {
3957 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3958 })
3959 .await
3960 .unwrap();
3961 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3962 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3963 .await;
3964
3965 let buffer = project
3966 .update(&mut cx, |project, cx| {
3967 project.open_buffer(
3968 ProjectPath {
3969 worktree_id,
3970 path: Path::new("").into(),
3971 },
3972 cx,
3973 )
3974 })
3975 .await
3976 .unwrap();
3977
3978 let mut fake_server = fake_servers.next().await.unwrap();
3979 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3980 let params = params.text_document_position_params;
3981 assert_eq!(
3982 params.text_document.uri.to_file_path().unwrap(),
3983 Path::new("/dir/b.rs"),
3984 );
3985 assert_eq!(params.position, lsp::Position::new(0, 22));
3986
3987 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3988 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3989 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3990 )))
3991 });
3992
3993 let mut definitions = project
3994 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3995 .await
3996 .unwrap();
3997
3998 assert_eq!(definitions.len(), 1);
3999 let definition = definitions.pop().unwrap();
4000 cx.update(|cx| {
4001 let target_buffer = definition.buffer.read(cx);
4002 assert_eq!(
4003 target_buffer
4004 .file()
4005 .unwrap()
4006 .as_local()
4007 .unwrap()
4008 .abs_path(cx),
4009 Path::new("/dir/a.rs"),
4010 );
4011 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4012 assert_eq!(
4013 list_worktrees(&project, cx),
4014 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
4015 );
4016
4017 drop(definition);
4018 });
4019 cx.read(|cx| {
4020 assert_eq!(
4021 list_worktrees(&project, cx),
4022 [("/dir/b.rs".as_ref(), false)]
4023 );
4024 });
4025
4026 fn list_worktrees<'a>(
4027 project: &'a ModelHandle<Project>,
4028 cx: &'a AppContext,
4029 ) -> Vec<(&'a Path, bool)> {
4030 project
4031 .read(cx)
4032 .worktrees(cx)
4033 .map(|worktree| {
4034 let worktree = worktree.read(cx);
4035 (
4036 worktree.as_local().unwrap().abs_path().as_ref(),
4037 worktree.is_weak(),
4038 )
4039 })
4040 .collect::<Vec<_>>()
4041 }
4042 }
4043
4044 #[gpui::test]
4045 async fn test_save_file(mut cx: gpui::TestAppContext) {
4046 let fs = FakeFs::new(cx.background());
4047 fs.insert_tree(
4048 "/dir",
4049 json!({
4050 "file1": "the old contents",
4051 }),
4052 )
4053 .await;
4054
4055 let project = Project::test(fs.clone(), &mut cx);
4056 let worktree_id = project
4057 .update(&mut cx, |p, cx| {
4058 p.find_or_create_local_worktree("/dir", false, cx)
4059 })
4060 .await
4061 .unwrap()
4062 .0
4063 .read_with(&cx, |tree, _| tree.id());
4064
4065 let buffer = project
4066 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4067 .await
4068 .unwrap();
4069 buffer
4070 .update(&mut cx, |buffer, cx| {
4071 assert_eq!(buffer.text(), "the old contents");
4072 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4073 buffer.save(cx)
4074 })
4075 .await
4076 .unwrap();
4077
4078 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4079 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4080 }
4081
4082 #[gpui::test]
4083 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
4084 let fs = FakeFs::new(cx.background());
4085 fs.insert_tree(
4086 "/dir",
4087 json!({
4088 "file1": "the old contents",
4089 }),
4090 )
4091 .await;
4092
4093 let project = Project::test(fs.clone(), &mut cx);
4094 let worktree_id = project
4095 .update(&mut cx, |p, cx| {
4096 p.find_or_create_local_worktree("/dir/file1", false, cx)
4097 })
4098 .await
4099 .unwrap()
4100 .0
4101 .read_with(&cx, |tree, _| tree.id());
4102
4103 let buffer = project
4104 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4105 .await
4106 .unwrap();
4107 buffer
4108 .update(&mut cx, |buffer, cx| {
4109 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4110 buffer.save(cx)
4111 })
4112 .await
4113 .unwrap();
4114
4115 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4116 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4117 }
4118
4119 #[gpui::test(retries = 5)]
4120 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
4121 let dir = temp_tree(json!({
4122 "a": {
4123 "file1": "",
4124 "file2": "",
4125 "file3": "",
4126 },
4127 "b": {
4128 "c": {
4129 "file4": "",
4130 "file5": "",
4131 }
4132 }
4133 }));
4134
4135 let project = Project::test(Arc::new(RealFs), &mut cx);
4136 let rpc = project.read_with(&cx, |p, _| p.client.clone());
4137
4138 let (tree, _) = project
4139 .update(&mut cx, |p, cx| {
4140 p.find_or_create_local_worktree(dir.path(), false, cx)
4141 })
4142 .await
4143 .unwrap();
4144 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4145
4146 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4147 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4148 async move { buffer.await.unwrap() }
4149 };
4150 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4151 tree.read_with(cx, |tree, _| {
4152 tree.entry_for_path(path)
4153 .expect(&format!("no entry for path {}", path))
4154 .id
4155 })
4156 };
4157
4158 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
4159 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
4160 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
4161 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
4162
4163 let file2_id = id_for_path("a/file2", &cx);
4164 let file3_id = id_for_path("a/file3", &cx);
4165 let file4_id = id_for_path("b/c/file4", &cx);
4166
4167 // Wait for the initial scan.
4168 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4169 .await;
4170
4171 // Create a remote copy of this worktree.
4172 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
4173 let (remote, load_task) = cx.update(|cx| {
4174 Worktree::remote(
4175 1,
4176 1,
4177 initial_snapshot.to_proto(&Default::default(), Default::default()),
4178 rpc.clone(),
4179 cx,
4180 )
4181 });
4182 load_task.await;
4183
4184 cx.read(|cx| {
4185 assert!(!buffer2.read(cx).is_dirty());
4186 assert!(!buffer3.read(cx).is_dirty());
4187 assert!(!buffer4.read(cx).is_dirty());
4188 assert!(!buffer5.read(cx).is_dirty());
4189 });
4190
4191 // Rename and delete files and directories.
4192 tree.flush_fs_events(&cx).await;
4193 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4194 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4195 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4196 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4197 tree.flush_fs_events(&cx).await;
4198
4199 let expected_paths = vec![
4200 "a",
4201 "a/file1",
4202 "a/file2.new",
4203 "b",
4204 "d",
4205 "d/file3",
4206 "d/file4",
4207 ];
4208
4209 cx.read(|app| {
4210 assert_eq!(
4211 tree.read(app)
4212 .paths()
4213 .map(|p| p.to_str().unwrap())
4214 .collect::<Vec<_>>(),
4215 expected_paths
4216 );
4217
4218 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4219 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4220 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4221
4222 assert_eq!(
4223 buffer2.read(app).file().unwrap().path().as_ref(),
4224 Path::new("a/file2.new")
4225 );
4226 assert_eq!(
4227 buffer3.read(app).file().unwrap().path().as_ref(),
4228 Path::new("d/file3")
4229 );
4230 assert_eq!(
4231 buffer4.read(app).file().unwrap().path().as_ref(),
4232 Path::new("d/file4")
4233 );
4234 assert_eq!(
4235 buffer5.read(app).file().unwrap().path().as_ref(),
4236 Path::new("b/c/file5")
4237 );
4238
4239 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4240 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4241 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4242 assert!(buffer5.read(app).file().unwrap().is_deleted());
4243 });
4244
4245 // Update the remote worktree. Check that it becomes consistent with the
4246 // local worktree.
4247 remote.update(&mut cx, |remote, cx| {
4248 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4249 &initial_snapshot,
4250 1,
4251 1,
4252 true,
4253 );
4254 remote
4255 .as_remote_mut()
4256 .unwrap()
4257 .snapshot
4258 .apply_remote_update(update_message)
4259 .unwrap();
4260
4261 assert_eq!(
4262 remote
4263 .paths()
4264 .map(|p| p.to_str().unwrap())
4265 .collect::<Vec<_>>(),
4266 expected_paths
4267 );
4268 });
4269 }
4270
4271 #[gpui::test]
4272 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
4273 let fs = FakeFs::new(cx.background());
4274 fs.insert_tree(
4275 "/the-dir",
4276 json!({
4277 "a.txt": "a-contents",
4278 "b.txt": "b-contents",
4279 }),
4280 )
4281 .await;
4282
4283 let project = Project::test(fs.clone(), &mut cx);
4284 let worktree_id = project
4285 .update(&mut cx, |p, cx| {
4286 p.find_or_create_local_worktree("/the-dir", false, cx)
4287 })
4288 .await
4289 .unwrap()
4290 .0
4291 .read_with(&cx, |tree, _| tree.id());
4292
4293 // Spawn multiple tasks to open paths, repeating some paths.
4294 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
4295 (
4296 p.open_buffer((worktree_id, "a.txt"), cx),
4297 p.open_buffer((worktree_id, "b.txt"), cx),
4298 p.open_buffer((worktree_id, "a.txt"), cx),
4299 )
4300 });
4301
4302 let buffer_a_1 = buffer_a_1.await.unwrap();
4303 let buffer_a_2 = buffer_a_2.await.unwrap();
4304 let buffer_b = buffer_b.await.unwrap();
4305 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
4306 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
4307
4308 // There is only one buffer per path.
4309 let buffer_a_id = buffer_a_1.id();
4310 assert_eq!(buffer_a_2.id(), buffer_a_id);
4311
4312 // Open the same path again while it is still open.
4313 drop(buffer_a_1);
4314 let buffer_a_3 = project
4315 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4316 .await
4317 .unwrap();
4318
4319 // There's still only one buffer per path.
4320 assert_eq!(buffer_a_3.id(), buffer_a_id);
4321 }
4322
4323 #[gpui::test]
4324 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
4325 use std::fs;
4326
4327 let dir = temp_tree(json!({
4328 "file1": "abc",
4329 "file2": "def",
4330 "file3": "ghi",
4331 }));
4332
4333 let project = Project::test(Arc::new(RealFs), &mut cx);
4334 let (worktree, _) = project
4335 .update(&mut cx, |p, cx| {
4336 p.find_or_create_local_worktree(dir.path(), false, cx)
4337 })
4338 .await
4339 .unwrap();
4340 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
4341
4342 worktree.flush_fs_events(&cx).await;
4343 worktree
4344 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4345 .await;
4346
4347 let buffer1 = project
4348 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4349 .await
4350 .unwrap();
4351 let events = Rc::new(RefCell::new(Vec::new()));
4352
4353 // initially, the buffer isn't dirty.
4354 buffer1.update(&mut cx, |buffer, cx| {
4355 cx.subscribe(&buffer1, {
4356 let events = events.clone();
4357 move |_, _, event, _| events.borrow_mut().push(event.clone())
4358 })
4359 .detach();
4360
4361 assert!(!buffer.is_dirty());
4362 assert!(events.borrow().is_empty());
4363
4364 buffer.edit(vec![1..2], "", cx);
4365 });
4366
4367 // after the first edit, the buffer is dirty, and emits a dirtied event.
4368 buffer1.update(&mut cx, |buffer, cx| {
4369 assert!(buffer.text() == "ac");
4370 assert!(buffer.is_dirty());
4371 assert_eq!(
4372 *events.borrow(),
4373 &[language::Event::Edited, language::Event::Dirtied]
4374 );
4375 events.borrow_mut().clear();
4376 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4377 });
4378
4379 // after saving, the buffer is not dirty, and emits a saved event.
4380 buffer1.update(&mut cx, |buffer, cx| {
4381 assert!(!buffer.is_dirty());
4382 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4383 events.borrow_mut().clear();
4384
4385 buffer.edit(vec![1..1], "B", cx);
4386 buffer.edit(vec![2..2], "D", cx);
4387 });
4388
4389 // after editing again, the buffer is dirty, and emits another dirty event.
4390 buffer1.update(&mut cx, |buffer, cx| {
4391 assert!(buffer.text() == "aBDc");
4392 assert!(buffer.is_dirty());
4393 assert_eq!(
4394 *events.borrow(),
4395 &[
4396 language::Event::Edited,
4397 language::Event::Dirtied,
4398 language::Event::Edited,
4399 ],
4400 );
4401 events.borrow_mut().clear();
4402
4403 // TODO - currently, after restoring the buffer to its
4404 // previously-saved state, the is still considered dirty.
4405 buffer.edit([1..3], "", cx);
4406 assert!(buffer.text() == "ac");
4407 assert!(buffer.is_dirty());
4408 });
4409
4410 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4411
4412 // When a file is deleted, the buffer is considered dirty.
4413 let events = Rc::new(RefCell::new(Vec::new()));
4414 let buffer2 = project
4415 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4416 .await
4417 .unwrap();
4418 buffer2.update(&mut cx, |_, cx| {
4419 cx.subscribe(&buffer2, {
4420 let events = events.clone();
4421 move |_, _, event, _| events.borrow_mut().push(event.clone())
4422 })
4423 .detach();
4424 });
4425
4426 fs::remove_file(dir.path().join("file2")).unwrap();
4427 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4428 assert_eq!(
4429 *events.borrow(),
4430 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4431 );
4432
4433 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4434 let events = Rc::new(RefCell::new(Vec::new()));
4435 let buffer3 = project
4436 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4437 .await
4438 .unwrap();
4439 buffer3.update(&mut cx, |_, cx| {
4440 cx.subscribe(&buffer3, {
4441 let events = events.clone();
4442 move |_, _, event, _| events.borrow_mut().push(event.clone())
4443 })
4444 .detach();
4445 });
4446
4447 worktree.flush_fs_events(&cx).await;
4448 buffer3.update(&mut cx, |buffer, cx| {
4449 buffer.edit(Some(0..0), "x", cx);
4450 });
4451 events.borrow_mut().clear();
4452 fs::remove_file(dir.path().join("file3")).unwrap();
4453 buffer3
4454 .condition(&cx, |_, _| !events.borrow().is_empty())
4455 .await;
4456 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4457 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4458 }
4459
4460 #[gpui::test]
4461 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
4462 use std::fs;
4463
4464 let initial_contents = "aaa\nbbbbb\nc\n";
4465 let dir = temp_tree(json!({ "the-file": initial_contents }));
4466
4467 let project = Project::test(Arc::new(RealFs), &mut cx);
4468 let (worktree, _) = project
4469 .update(&mut cx, |p, cx| {
4470 p.find_or_create_local_worktree(dir.path(), false, cx)
4471 })
4472 .await
4473 .unwrap();
4474 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4475
4476 worktree
4477 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4478 .await;
4479
4480 let abs_path = dir.path().join("the-file");
4481 let buffer = project
4482 .update(&mut cx, |p, cx| {
4483 p.open_buffer((worktree_id, "the-file"), cx)
4484 })
4485 .await
4486 .unwrap();
4487
4488 // TODO
4489 // Add a cursor on each row.
4490 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4491 // assert!(!buffer.is_dirty());
4492 // buffer.add_selection_set(
4493 // &(0..3)
4494 // .map(|row| Selection {
4495 // id: row as usize,
4496 // start: Point::new(row, 1),
4497 // end: Point::new(row, 1),
4498 // reversed: false,
4499 // goal: SelectionGoal::None,
4500 // })
4501 // .collect::<Vec<_>>(),
4502 // cx,
4503 // )
4504 // });
4505
4506 // Change the file on disk, adding two new lines of text, and removing
4507 // one line.
4508 buffer.read_with(&cx, |buffer, _| {
4509 assert!(!buffer.is_dirty());
4510 assert!(!buffer.has_conflict());
4511 });
4512 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4513 fs::write(&abs_path, new_contents).unwrap();
4514
4515 // Because the buffer was not modified, it is reloaded from disk. Its
4516 // contents are edited according to the diff between the old and new
4517 // file contents.
4518 buffer
4519 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4520 .await;
4521
4522 buffer.update(&mut cx, |buffer, _| {
4523 assert_eq!(buffer.text(), new_contents);
4524 assert!(!buffer.is_dirty());
4525 assert!(!buffer.has_conflict());
4526
4527 // TODO
4528 // let cursor_positions = buffer
4529 // .selection_set(selection_set_id)
4530 // .unwrap()
4531 // .selections::<Point>(&*buffer)
4532 // .map(|selection| {
4533 // assert_eq!(selection.start, selection.end);
4534 // selection.start
4535 // })
4536 // .collect::<Vec<_>>();
4537 // assert_eq!(
4538 // cursor_positions,
4539 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4540 // );
4541 });
4542
4543 // Modify the buffer
4544 buffer.update(&mut cx, |buffer, cx| {
4545 buffer.edit(vec![0..0], " ", cx);
4546 assert!(buffer.is_dirty());
4547 assert!(!buffer.has_conflict());
4548 });
4549
4550 // Change the file on disk again, adding blank lines to the beginning.
4551 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4552
4553 // Because the buffer is modified, it doesn't reload from disk, but is
4554 // marked as having a conflict.
4555 buffer
4556 .condition(&cx, |buffer, _| buffer.has_conflict())
4557 .await;
4558 }
4559
4560 #[gpui::test]
4561 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
4562 let fs = FakeFs::new(cx.background());
4563 fs.insert_tree(
4564 "/the-dir",
4565 json!({
4566 "a.rs": "
4567 fn foo(mut v: Vec<usize>) {
4568 for x in &v {
4569 v.push(1);
4570 }
4571 }
4572 "
4573 .unindent(),
4574 }),
4575 )
4576 .await;
4577
4578 let project = Project::test(fs.clone(), &mut cx);
4579 let (worktree, _) = project
4580 .update(&mut cx, |p, cx| {
4581 p.find_or_create_local_worktree("/the-dir", false, cx)
4582 })
4583 .await
4584 .unwrap();
4585 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4586
4587 let buffer = project
4588 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4589 .await
4590 .unwrap();
4591
4592 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4593 let message = lsp::PublishDiagnosticsParams {
4594 uri: buffer_uri.clone(),
4595 diagnostics: vec![
4596 lsp::Diagnostic {
4597 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4598 severity: Some(DiagnosticSeverity::WARNING),
4599 message: "error 1".to_string(),
4600 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4601 location: lsp::Location {
4602 uri: buffer_uri.clone(),
4603 range: lsp::Range::new(
4604 lsp::Position::new(1, 8),
4605 lsp::Position::new(1, 9),
4606 ),
4607 },
4608 message: "error 1 hint 1".to_string(),
4609 }]),
4610 ..Default::default()
4611 },
4612 lsp::Diagnostic {
4613 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4614 severity: Some(DiagnosticSeverity::HINT),
4615 message: "error 1 hint 1".to_string(),
4616 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4617 location: lsp::Location {
4618 uri: buffer_uri.clone(),
4619 range: lsp::Range::new(
4620 lsp::Position::new(1, 8),
4621 lsp::Position::new(1, 9),
4622 ),
4623 },
4624 message: "original diagnostic".to_string(),
4625 }]),
4626 ..Default::default()
4627 },
4628 lsp::Diagnostic {
4629 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4630 severity: Some(DiagnosticSeverity::ERROR),
4631 message: "error 2".to_string(),
4632 related_information: Some(vec![
4633 lsp::DiagnosticRelatedInformation {
4634 location: lsp::Location {
4635 uri: buffer_uri.clone(),
4636 range: lsp::Range::new(
4637 lsp::Position::new(1, 13),
4638 lsp::Position::new(1, 15),
4639 ),
4640 },
4641 message: "error 2 hint 1".to_string(),
4642 },
4643 lsp::DiagnosticRelatedInformation {
4644 location: lsp::Location {
4645 uri: buffer_uri.clone(),
4646 range: lsp::Range::new(
4647 lsp::Position::new(1, 13),
4648 lsp::Position::new(1, 15),
4649 ),
4650 },
4651 message: "error 2 hint 2".to_string(),
4652 },
4653 ]),
4654 ..Default::default()
4655 },
4656 lsp::Diagnostic {
4657 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4658 severity: Some(DiagnosticSeverity::HINT),
4659 message: "error 2 hint 1".to_string(),
4660 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4661 location: lsp::Location {
4662 uri: buffer_uri.clone(),
4663 range: lsp::Range::new(
4664 lsp::Position::new(2, 8),
4665 lsp::Position::new(2, 17),
4666 ),
4667 },
4668 message: "original diagnostic".to_string(),
4669 }]),
4670 ..Default::default()
4671 },
4672 lsp::Diagnostic {
4673 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4674 severity: Some(DiagnosticSeverity::HINT),
4675 message: "error 2 hint 2".to_string(),
4676 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4677 location: lsp::Location {
4678 uri: buffer_uri.clone(),
4679 range: lsp::Range::new(
4680 lsp::Position::new(2, 8),
4681 lsp::Position::new(2, 17),
4682 ),
4683 },
4684 message: "original diagnostic".to_string(),
4685 }]),
4686 ..Default::default()
4687 },
4688 ],
4689 version: None,
4690 };
4691
4692 project
4693 .update(&mut cx, |p, cx| {
4694 p.update_diagnostics(message, &Default::default(), cx)
4695 })
4696 .unwrap();
4697 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4698
4699 assert_eq!(
4700 buffer
4701 .diagnostics_in_range::<_, Point>(0..buffer.len())
4702 .collect::<Vec<_>>(),
4703 &[
4704 DiagnosticEntry {
4705 range: Point::new(1, 8)..Point::new(1, 9),
4706 diagnostic: Diagnostic {
4707 severity: DiagnosticSeverity::WARNING,
4708 message: "error 1".to_string(),
4709 group_id: 0,
4710 is_primary: true,
4711 ..Default::default()
4712 }
4713 },
4714 DiagnosticEntry {
4715 range: Point::new(1, 8)..Point::new(1, 9),
4716 diagnostic: Diagnostic {
4717 severity: DiagnosticSeverity::HINT,
4718 message: "error 1 hint 1".to_string(),
4719 group_id: 0,
4720 is_primary: false,
4721 ..Default::default()
4722 }
4723 },
4724 DiagnosticEntry {
4725 range: Point::new(1, 13)..Point::new(1, 15),
4726 diagnostic: Diagnostic {
4727 severity: DiagnosticSeverity::HINT,
4728 message: "error 2 hint 1".to_string(),
4729 group_id: 1,
4730 is_primary: false,
4731 ..Default::default()
4732 }
4733 },
4734 DiagnosticEntry {
4735 range: Point::new(1, 13)..Point::new(1, 15),
4736 diagnostic: Diagnostic {
4737 severity: DiagnosticSeverity::HINT,
4738 message: "error 2 hint 2".to_string(),
4739 group_id: 1,
4740 is_primary: false,
4741 ..Default::default()
4742 }
4743 },
4744 DiagnosticEntry {
4745 range: Point::new(2, 8)..Point::new(2, 17),
4746 diagnostic: Diagnostic {
4747 severity: DiagnosticSeverity::ERROR,
4748 message: "error 2".to_string(),
4749 group_id: 1,
4750 is_primary: true,
4751 ..Default::default()
4752 }
4753 }
4754 ]
4755 );
4756
4757 assert_eq!(
4758 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4759 &[
4760 DiagnosticEntry {
4761 range: Point::new(1, 8)..Point::new(1, 9),
4762 diagnostic: Diagnostic {
4763 severity: DiagnosticSeverity::WARNING,
4764 message: "error 1".to_string(),
4765 group_id: 0,
4766 is_primary: true,
4767 ..Default::default()
4768 }
4769 },
4770 DiagnosticEntry {
4771 range: Point::new(1, 8)..Point::new(1, 9),
4772 diagnostic: Diagnostic {
4773 severity: DiagnosticSeverity::HINT,
4774 message: "error 1 hint 1".to_string(),
4775 group_id: 0,
4776 is_primary: false,
4777 ..Default::default()
4778 }
4779 },
4780 ]
4781 );
4782 assert_eq!(
4783 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4784 &[
4785 DiagnosticEntry {
4786 range: Point::new(1, 13)..Point::new(1, 15),
4787 diagnostic: Diagnostic {
4788 severity: DiagnosticSeverity::HINT,
4789 message: "error 2 hint 1".to_string(),
4790 group_id: 1,
4791 is_primary: false,
4792 ..Default::default()
4793 }
4794 },
4795 DiagnosticEntry {
4796 range: Point::new(1, 13)..Point::new(1, 15),
4797 diagnostic: Diagnostic {
4798 severity: DiagnosticSeverity::HINT,
4799 message: "error 2 hint 2".to_string(),
4800 group_id: 1,
4801 is_primary: false,
4802 ..Default::default()
4803 }
4804 },
4805 DiagnosticEntry {
4806 range: Point::new(2, 8)..Point::new(2, 17),
4807 diagnostic: Diagnostic {
4808 severity: DiagnosticSeverity::ERROR,
4809 message: "error 2".to_string(),
4810 group_id: 1,
4811 is_primary: true,
4812 ..Default::default()
4813 }
4814 }
4815 ]
4816 );
4817 }
4818
4819 #[gpui::test]
4820 async fn test_rename(mut cx: gpui::TestAppContext) {
4821 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4822 let language = Arc::new(Language::new(
4823 LanguageConfig {
4824 name: "Rust".into(),
4825 path_suffixes: vec!["rs".to_string()],
4826 language_server: Some(language_server_config),
4827 ..Default::default()
4828 },
4829 Some(tree_sitter_rust::language()),
4830 ));
4831
4832 let fs = FakeFs::new(cx.background());
4833 fs.insert_tree(
4834 "/dir",
4835 json!({
4836 "one.rs": "const ONE: usize = 1;",
4837 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4838 }),
4839 )
4840 .await;
4841
4842 let project = Project::test(fs.clone(), &mut cx);
4843 project.update(&mut cx, |project, _| {
4844 Arc::get_mut(&mut project.languages).unwrap().add(language);
4845 });
4846
4847 let (tree, _) = project
4848 .update(&mut cx, |project, cx| {
4849 project.find_or_create_local_worktree("/dir", false, cx)
4850 })
4851 .await
4852 .unwrap();
4853 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4854 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4855 .await;
4856
4857 let buffer = project
4858 .update(&mut cx, |project, cx| {
4859 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4860 })
4861 .await
4862 .unwrap();
4863
4864 let mut fake_server = fake_servers.next().await.unwrap();
4865
4866 let response = project.update(&mut cx, |project, cx| {
4867 project.prepare_rename(buffer.clone(), 7, cx)
4868 });
4869 fake_server
4870 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4871 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4872 assert_eq!(params.position, lsp::Position::new(0, 7));
4873 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4874 lsp::Position::new(0, 6),
4875 lsp::Position::new(0, 9),
4876 )))
4877 })
4878 .next()
4879 .await
4880 .unwrap();
4881 let range = response.await.unwrap().unwrap();
4882 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4883 assert_eq!(range, 6..9);
4884
4885 let response = project.update(&mut cx, |project, cx| {
4886 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4887 });
4888 fake_server
4889 .handle_request::<lsp::request::Rename, _>(|params, _| {
4890 assert_eq!(
4891 params.text_document_position.text_document.uri.as_str(),
4892 "file:///dir/one.rs"
4893 );
4894 assert_eq!(
4895 params.text_document_position.position,
4896 lsp::Position::new(0, 7)
4897 );
4898 assert_eq!(params.new_name, "THREE");
4899 Some(lsp::WorkspaceEdit {
4900 changes: Some(
4901 [
4902 (
4903 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4904 vec![lsp::TextEdit::new(
4905 lsp::Range::new(
4906 lsp::Position::new(0, 6),
4907 lsp::Position::new(0, 9),
4908 ),
4909 "THREE".to_string(),
4910 )],
4911 ),
4912 (
4913 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4914 vec![
4915 lsp::TextEdit::new(
4916 lsp::Range::new(
4917 lsp::Position::new(0, 24),
4918 lsp::Position::new(0, 27),
4919 ),
4920 "THREE".to_string(),
4921 ),
4922 lsp::TextEdit::new(
4923 lsp::Range::new(
4924 lsp::Position::new(0, 35),
4925 lsp::Position::new(0, 38),
4926 ),
4927 "THREE".to_string(),
4928 ),
4929 ],
4930 ),
4931 ]
4932 .into_iter()
4933 .collect(),
4934 ),
4935 ..Default::default()
4936 })
4937 })
4938 .next()
4939 .await
4940 .unwrap();
4941 let mut transaction = response.await.unwrap().0;
4942 assert_eq!(transaction.len(), 2);
4943 assert_eq!(
4944 transaction
4945 .remove_entry(&buffer)
4946 .unwrap()
4947 .0
4948 .read_with(&cx, |buffer, _| buffer.text()),
4949 "const THREE: usize = 1;"
4950 );
4951 assert_eq!(
4952 transaction
4953 .into_keys()
4954 .next()
4955 .unwrap()
4956 .read_with(&cx, |buffer, _| buffer.text()),
4957 "const TWO: usize = one::THREE + one::THREE;"
4958 );
4959 }
4960
4961 #[gpui::test]
4962 async fn test_search(mut cx: gpui::TestAppContext) {
4963 let fs = FakeFs::new(cx.background());
4964 fs.insert_tree(
4965 "/dir",
4966 json!({
4967 "one.rs": "const ONE: usize = 1;",
4968 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4969 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4970 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4971 }),
4972 )
4973 .await;
4974 let project = Project::test(fs.clone(), &mut cx);
4975 let (tree, _) = project
4976 .update(&mut cx, |project, cx| {
4977 project.find_or_create_local_worktree("/dir", false, cx)
4978 })
4979 .await
4980 .unwrap();
4981 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4982 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4983 .await;
4984
4985 assert_eq!(
4986 search(&project, SearchQuery::text("TWO", false, true), &mut cx)
4987 .await
4988 .unwrap(),
4989 HashMap::from_iter([
4990 ("two.rs".to_string(), vec![6..9]),
4991 ("three.rs".to_string(), vec![37..40])
4992 ])
4993 );
4994
4995 let buffer_4 = project
4996 .update(&mut cx, |project, cx| {
4997 project.open_buffer((worktree_id, "four.rs"), cx)
4998 })
4999 .await
5000 .unwrap();
5001 buffer_4.update(&mut cx, |buffer, cx| {
5002 buffer.edit([20..28, 31..43], "two::TWO", cx);
5003 });
5004
5005 assert_eq!(
5006 search(&project, SearchQuery::text("TWO", false, true), &mut cx)
5007 .await
5008 .unwrap(),
5009 HashMap::from_iter([
5010 ("two.rs".to_string(), vec![6..9]),
5011 ("three.rs".to_string(), vec![37..40]),
5012 ("four.rs".to_string(), vec![25..28, 36..39])
5013 ])
5014 );
5015
5016 async fn search(
5017 project: &ModelHandle<Project>,
5018 query: SearchQuery,
5019 cx: &mut gpui::TestAppContext,
5020 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5021 let results = project
5022 .update(cx, |project, cx| project.search(query, cx))
5023 .await?;
5024
5025 Ok(results
5026 .into_iter()
5027 .map(|(buffer, ranges)| {
5028 buffer.read_with(cx, |buffer, _| {
5029 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5030 let ranges = ranges
5031 .into_iter()
5032 .map(|range| range.to_offset(buffer))
5033 .collect::<Vec<_>>();
5034 (path, ranges)
5035 })
5036 })
5037 .collect())
5038 }
5039 }
5040}