1use std::{
2 io::{BufRead, BufReader},
3 path::{Path, PathBuf},
4 pin::pin,
5 sync::{atomic::AtomicUsize, Arc},
6};
7
8use anyhow::{anyhow, Context as _, Result};
9use collections::{HashMap, HashSet};
10use fs::Fs;
11use futures::{
12 future::{BoxFuture, Shared},
13 FutureExt, SinkExt,
14};
15use gpui::{App, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity};
16use postage::oneshot;
17use rpc::{
18 proto::{self, SSH_PROJECT_ID},
19 AnyProtoClient, ErrorExt, TypedEnvelope,
20};
21use smol::{
22 channel::{Receiver, Sender},
23 stream::StreamExt,
24};
25use text::ReplicaId;
26use util::{paths::SanitizedPath, ResultExt};
27use worktree::{Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId, WorktreeSettings};
28
29use crate::{search::SearchQuery, ProjectPath};
30
31struct MatchingEntry {
32 worktree_path: Arc<Path>,
33 path: ProjectPath,
34 respond: oneshot::Sender<ProjectPath>,
35}
36
37enum WorktreeStoreState {
38 Local {
39 fs: Arc<dyn Fs>,
40 },
41 Remote {
42 upstream_client: AnyProtoClient,
43 upstream_project_id: u64,
44 },
45}
46
47pub struct WorktreeStore {
48 next_entry_id: Arc<AtomicUsize>,
49 downstream_client: Option<(AnyProtoClient, u64)>,
50 retain_worktrees: bool,
51 worktrees: Vec<WorktreeHandle>,
52 worktrees_reordered: bool,
53 #[allow(clippy::type_complexity)]
54 loading_worktrees:
55 HashMap<SanitizedPath, Shared<Task<Result<Entity<Worktree>, Arc<anyhow::Error>>>>>,
56 state: WorktreeStoreState,
57}
58
59#[derive(Debug)]
60pub enum WorktreeStoreEvent {
61 WorktreeAdded(Entity<Worktree>),
62 WorktreeRemoved(EntityId, WorktreeId),
63 WorktreeReleased(EntityId, WorktreeId),
64 WorktreeOrderChanged,
65 WorktreeUpdateSent(Entity<Worktree>),
66 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
67 WorktreeUpdatedGitRepositories(WorktreeId),
68 WorktreeDeletedEntry(WorktreeId, ProjectEntryId),
69}
70
71impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
72
73impl WorktreeStore {
74 pub fn init(client: &AnyProtoClient) {
75 client.add_model_request_handler(Self::handle_create_project_entry);
76 client.add_model_request_handler(Self::handle_copy_project_entry);
77 client.add_model_request_handler(Self::handle_delete_project_entry);
78 client.add_model_request_handler(Self::handle_expand_project_entry);
79 client.add_model_request_handler(Self::handle_expand_all_for_project_entry);
80 client.add_model_request_handler(Self::handle_git_branches);
81 client.add_model_request_handler(Self::handle_update_branch);
82 }
83
84 pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
85 Self {
86 next_entry_id: Default::default(),
87 loading_worktrees: Default::default(),
88 downstream_client: None,
89 worktrees: Vec::new(),
90 worktrees_reordered: false,
91 retain_worktrees,
92 state: WorktreeStoreState::Local { fs },
93 }
94 }
95
96 pub fn remote(
97 retain_worktrees: bool,
98 upstream_client: AnyProtoClient,
99 upstream_project_id: u64,
100 ) -> Self {
101 Self {
102 next_entry_id: Default::default(),
103 loading_worktrees: Default::default(),
104 downstream_client: None,
105 worktrees: Vec::new(),
106 worktrees_reordered: false,
107 retain_worktrees,
108 state: WorktreeStoreState::Remote {
109 upstream_client,
110 upstream_project_id,
111 },
112 }
113 }
114
115 /// Iterates through all worktrees, including ones that don't appear in the project panel
116 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Entity<Worktree>> {
117 self.worktrees
118 .iter()
119 .filter_map(move |worktree| worktree.upgrade())
120 }
121
122 /// Iterates through all user-visible worktrees, the ones that appear in the project panel.
123 pub fn visible_worktrees<'a>(
124 &'a self,
125 cx: &'a App,
126 ) -> impl 'a + DoubleEndedIterator<Item = Entity<Worktree>> {
127 self.worktrees()
128 .filter(|worktree| worktree.read(cx).is_visible())
129 }
130
131 pub fn worktree_for_id(&self, id: WorktreeId, cx: &App) -> Option<Entity<Worktree>> {
132 self.worktrees()
133 .find(|worktree| worktree.read(cx).id() == id)
134 }
135
136 pub fn current_branch(&self, repository: ProjectPath, cx: &App) -> Option<Arc<str>> {
137 self.worktree_for_id(repository.worktree_id, cx)?
138 .read(cx)
139 .git_entry(repository.path)?
140 .branch()
141 }
142
143 pub fn worktree_for_entry(
144 &self,
145 entry_id: ProjectEntryId,
146 cx: &App,
147 ) -> Option<Entity<Worktree>> {
148 self.worktrees()
149 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
150 }
151
152 pub fn find_worktree(
153 &self,
154 abs_path: impl Into<SanitizedPath>,
155 cx: &App,
156 ) -> Option<(Entity<Worktree>, PathBuf)> {
157 let abs_path: SanitizedPath = abs_path.into();
158 for tree in self.worktrees() {
159 if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) {
160 return Some((tree.clone(), relative_path.into()));
161 }
162 }
163 None
164 }
165
166 pub fn find_or_create_worktree(
167 &mut self,
168 abs_path: impl AsRef<Path>,
169 visible: bool,
170 cx: &mut Context<Self>,
171 ) -> Task<Result<(Entity<Worktree>, PathBuf)>> {
172 let abs_path = abs_path.as_ref();
173 if let Some((tree, relative_path)) = self.find_worktree(abs_path, cx) {
174 Task::ready(Ok((tree, relative_path)))
175 } else {
176 let worktree = self.create_worktree(abs_path, visible, cx);
177 cx.background_executor()
178 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
179 }
180 }
181
182 pub fn entry_for_id<'a>(&'a self, entry_id: ProjectEntryId, cx: &'a App) -> Option<&'a Entry> {
183 self.worktrees()
184 .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id))
185 }
186
187 pub fn worktree_and_entry_for_id<'a>(
188 &'a self,
189 entry_id: ProjectEntryId,
190 cx: &'a App,
191 ) -> Option<(Entity<Worktree>, &'a Entry)> {
192 self.worktrees().find_map(|worktree| {
193 worktree
194 .read(cx)
195 .entry_for_id(entry_id)
196 .map(|e| (worktree.clone(), e))
197 })
198 }
199
200 pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option<Entry> {
201 self.worktree_for_id(path.worktree_id, cx)?
202 .read(cx)
203 .entry_for_path(&path.path)
204 .cloned()
205 }
206
207 pub fn create_worktree(
208 &mut self,
209 abs_path: impl Into<SanitizedPath>,
210 visible: bool,
211 cx: &mut Context<Self>,
212 ) -> Task<Result<Entity<Worktree>>> {
213 let abs_path: SanitizedPath = abs_path.into();
214 if !self.loading_worktrees.contains_key(&abs_path) {
215 let task = match &self.state {
216 WorktreeStoreState::Remote {
217 upstream_client, ..
218 } => {
219 if upstream_client.is_via_collab() {
220 Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
221 } else {
222 self.create_ssh_worktree(
223 upstream_client.clone(),
224 abs_path.clone(),
225 visible,
226 cx,
227 )
228 }
229 }
230 WorktreeStoreState::Local { fs } => {
231 self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx)
232 }
233 };
234
235 self.loading_worktrees
236 .insert(abs_path.clone(), task.shared());
237 }
238 let task = self.loading_worktrees.get(&abs_path).unwrap().clone();
239 cx.spawn(|this, mut cx| async move {
240 let result = task.await;
241 this.update(&mut cx, |this, _| this.loading_worktrees.remove(&abs_path))
242 .ok();
243 match result {
244 Ok(worktree) => Ok(worktree),
245 Err(err) => Err((*err).cloned()),
246 }
247 })
248 }
249
250 fn create_ssh_worktree(
251 &mut self,
252 client: AnyProtoClient,
253 abs_path: impl Into<SanitizedPath>,
254 visible: bool,
255 cx: &mut Context<Self>,
256 ) -> Task<Result<Entity<Worktree>, Arc<anyhow::Error>>> {
257 let mut abs_path = Into::<SanitizedPath>::into(abs_path).to_string();
258 // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/`
259 // in which case want to strip the leading the `/`.
260 // On the host-side, the `~` will get expanded.
261 // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850
262 if abs_path.starts_with("/~") {
263 abs_path = abs_path[1..].to_string();
264 }
265 if abs_path.is_empty() || abs_path == "/" {
266 abs_path = "~/".to_string();
267 }
268 cx.spawn(|this, mut cx| async move {
269 let this = this.upgrade().context("Dropped worktree store")?;
270
271 let response = client
272 .request(proto::AddWorktree {
273 project_id: SSH_PROJECT_ID,
274 path: abs_path.clone(),
275 visible,
276 })
277 .await?;
278
279 if let Some(existing_worktree) = this.read_with(&cx, |this, cx| {
280 this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx)
281 })? {
282 return Ok(existing_worktree);
283 }
284
285 let root_name = PathBuf::from(&response.canonicalized_path)
286 .file_name()
287 .map(|n| n.to_string_lossy().to_string())
288 .unwrap_or(response.canonicalized_path.to_string());
289
290 let worktree = cx.update(|cx| {
291 Worktree::remote(
292 SSH_PROJECT_ID,
293 0,
294 proto::WorktreeMetadata {
295 id: response.worktree_id,
296 root_name,
297 visible,
298 abs_path: response.canonicalized_path,
299 },
300 client,
301 cx,
302 )
303 })?;
304
305 this.update(&mut cx, |this, cx| {
306 this.add(&worktree, cx);
307 })?;
308 Ok(worktree)
309 })
310 }
311
312 fn create_local_worktree(
313 &mut self,
314 fs: Arc<dyn Fs>,
315 abs_path: impl Into<SanitizedPath>,
316 visible: bool,
317 cx: &mut Context<Self>,
318 ) -> Task<Result<Entity<Worktree>, Arc<anyhow::Error>>> {
319 let next_entry_id = self.next_entry_id.clone();
320 let path: SanitizedPath = abs_path.into();
321
322 cx.spawn(move |this, mut cx| async move {
323 let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
324
325 let worktree = worktree?;
326
327 this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
328
329 if visible {
330 cx.update(|cx| {
331 cx.add_recent_document(path.as_path());
332 })
333 .log_err();
334 }
335
336 Ok(worktree)
337 })
338 }
339
340 pub fn add(&mut self, worktree: &Entity<Worktree>, cx: &mut Context<Self>) {
341 let worktree_id = worktree.read(cx).id();
342 debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id));
343
344 let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible();
345 let handle = if push_strong_handle {
346 WorktreeHandle::Strong(worktree.clone())
347 } else {
348 WorktreeHandle::Weak(worktree.downgrade())
349 };
350 if self.worktrees_reordered {
351 self.worktrees.push(handle);
352 } else {
353 let i = match self
354 .worktrees
355 .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
356 other.upgrade().map(|worktree| worktree.read(cx).abs_path())
357 }) {
358 Ok(i) | Err(i) => i,
359 };
360 self.worktrees.insert(i, handle);
361 }
362
363 cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone()));
364 self.send_project_updates(cx);
365
366 let handle_id = worktree.entity_id();
367 cx.subscribe(worktree, |_, worktree, event, cx| {
368 let worktree_id = worktree.update(cx, |worktree, _| worktree.id());
369 match event {
370 worktree::Event::UpdatedEntries(changes) => {
371 cx.emit(WorktreeStoreEvent::WorktreeUpdatedEntries(
372 worktree.read(cx).id(),
373 changes.clone(),
374 ));
375 }
376 worktree::Event::UpdatedGitRepositories(_) => {
377 cx.emit(WorktreeStoreEvent::WorktreeUpdatedGitRepositories(
378 worktree_id,
379 ));
380 }
381 worktree::Event::DeletedEntry(id) => {
382 cx.emit(WorktreeStoreEvent::WorktreeDeletedEntry(worktree_id, *id))
383 }
384 }
385 })
386 .detach();
387 cx.observe_release(worktree, move |this, worktree, cx| {
388 cx.emit(WorktreeStoreEvent::WorktreeReleased(
389 handle_id,
390 worktree.id(),
391 ));
392 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
393 handle_id,
394 worktree.id(),
395 ));
396 this.send_project_updates(cx);
397 })
398 .detach();
399 }
400
401 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut Context<Self>) {
402 self.worktrees.retain(|worktree| {
403 if let Some(worktree) = worktree.upgrade() {
404 if worktree.read(cx).id() == id_to_remove {
405 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
406 worktree.entity_id(),
407 id_to_remove,
408 ));
409 false
410 } else {
411 true
412 }
413 } else {
414 false
415 }
416 });
417 self.send_project_updates(cx);
418 }
419
420 pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
421 self.worktrees_reordered = worktrees_reordered;
422 }
423
424 fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
425 match &self.state {
426 WorktreeStoreState::Remote {
427 upstream_client,
428 upstream_project_id,
429 ..
430 } => Some((upstream_client.clone(), *upstream_project_id)),
431 WorktreeStoreState::Local { .. } => None,
432 }
433 }
434
435 pub fn set_worktrees_from_proto(
436 &mut self,
437 worktrees: Vec<proto::WorktreeMetadata>,
438 replica_id: ReplicaId,
439 cx: &mut Context<Self>,
440 ) -> Result<()> {
441 let mut old_worktrees_by_id = self
442 .worktrees
443 .drain(..)
444 .filter_map(|worktree| {
445 let worktree = worktree.upgrade()?;
446 Some((worktree.read(cx).id(), worktree))
447 })
448 .collect::<HashMap<_, _>>();
449
450 let (client, project_id) = self
451 .upstream_client()
452 .clone()
453 .ok_or_else(|| anyhow!("invalid project"))?;
454
455 for worktree in worktrees {
456 if let Some(old_worktree) =
457 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
458 {
459 let push_strong_handle =
460 self.retain_worktrees || old_worktree.read(cx).is_visible();
461 let handle = if push_strong_handle {
462 WorktreeHandle::Strong(old_worktree.clone())
463 } else {
464 WorktreeHandle::Weak(old_worktree.downgrade())
465 };
466 self.worktrees.push(handle);
467 } else {
468 self.add(
469 &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx),
470 cx,
471 );
472 }
473 }
474 self.send_project_updates(cx);
475
476 Ok(())
477 }
478
479 pub fn move_worktree(
480 &mut self,
481 source: WorktreeId,
482 destination: WorktreeId,
483 cx: &mut Context<Self>,
484 ) -> Result<()> {
485 if source == destination {
486 return Ok(());
487 }
488
489 let mut source_index = None;
490 let mut destination_index = None;
491 for (i, worktree) in self.worktrees.iter().enumerate() {
492 if let Some(worktree) = worktree.upgrade() {
493 let worktree_id = worktree.read(cx).id();
494 if worktree_id == source {
495 source_index = Some(i);
496 if destination_index.is_some() {
497 break;
498 }
499 } else if worktree_id == destination {
500 destination_index = Some(i);
501 if source_index.is_some() {
502 break;
503 }
504 }
505 }
506 }
507
508 let source_index =
509 source_index.with_context(|| format!("Missing worktree for id {source}"))?;
510 let destination_index =
511 destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
512
513 if source_index == destination_index {
514 return Ok(());
515 }
516
517 let worktree_to_move = self.worktrees.remove(source_index);
518 self.worktrees.insert(destination_index, worktree_to_move);
519 self.worktrees_reordered = true;
520 cx.emit(WorktreeStoreEvent::WorktreeOrderChanged);
521 cx.notify();
522 Ok(())
523 }
524
525 pub fn disconnected_from_host(&mut self, cx: &mut App) {
526 for worktree in &self.worktrees {
527 if let Some(worktree) = worktree.upgrade() {
528 worktree.update(cx, |worktree, _| {
529 if let Some(worktree) = worktree.as_remote_mut() {
530 worktree.disconnected_from_host();
531 }
532 });
533 }
534 }
535 }
536
537 pub fn send_project_updates(&mut self, cx: &mut Context<Self>) {
538 let Some((downstream_client, project_id)) = self.downstream_client.clone() else {
539 return;
540 };
541
542 let update = proto::UpdateProject {
543 project_id,
544 worktrees: self.worktree_metadata_protos(cx),
545 };
546
547 // collab has bad concurrency guarantees, so we send requests in serial.
548 let update_project = if downstream_client.is_via_collab() {
549 Some(downstream_client.request(update))
550 } else {
551 downstream_client.send(update).log_err();
552 None
553 };
554 cx.spawn(|this, mut cx| async move {
555 if let Some(update_project) = update_project {
556 update_project.await?;
557 }
558
559 this.update(&mut cx, |this, cx| {
560 let worktrees = this.worktrees().collect::<Vec<_>>();
561
562 for worktree in worktrees {
563 worktree.update(cx, |worktree, cx| {
564 let client = downstream_client.clone();
565 worktree.observe_updates(project_id, cx, {
566 move |update| {
567 let client = client.clone();
568 async move {
569 if client.is_via_collab() {
570 client
571 .request(update)
572 .map(|result| result.log_err().is_some())
573 .await
574 } else {
575 client.send(update).log_err().is_some()
576 }
577 }
578 }
579 });
580 });
581
582 cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone()))
583 }
584
585 anyhow::Ok(())
586 })
587 })
588 .detach_and_log_err(cx);
589 }
590
591 pub fn worktree_metadata_protos(&self, cx: &App) -> Vec<proto::WorktreeMetadata> {
592 self.worktrees()
593 .map(|worktree| {
594 let worktree = worktree.read(cx);
595 proto::WorktreeMetadata {
596 id: worktree.id().to_proto(),
597 root_name: worktree.root_name().into(),
598 visible: worktree.is_visible(),
599 abs_path: worktree.abs_path().to_string_lossy().into(),
600 }
601 })
602 .collect()
603 }
604
605 pub fn shared(
606 &mut self,
607 remote_id: u64,
608 downstream_client: AnyProtoClient,
609 cx: &mut Context<Self>,
610 ) {
611 self.retain_worktrees = true;
612 self.downstream_client = Some((downstream_client, remote_id));
613
614 // When shared, retain all worktrees
615 for worktree_handle in self.worktrees.iter_mut() {
616 match worktree_handle {
617 WorktreeHandle::Strong(_) => {}
618 WorktreeHandle::Weak(worktree) => {
619 if let Some(worktree) = worktree.upgrade() {
620 *worktree_handle = WorktreeHandle::Strong(worktree);
621 }
622 }
623 }
624 }
625 self.send_project_updates(cx);
626 }
627
628 pub fn unshared(&mut self, cx: &mut Context<Self>) {
629 self.retain_worktrees = false;
630 self.downstream_client.take();
631
632 // When not shared, only retain the visible worktrees
633 for worktree_handle in self.worktrees.iter_mut() {
634 if let WorktreeHandle::Strong(worktree) = worktree_handle {
635 let is_visible = worktree.update(cx, |worktree, _| {
636 worktree.stop_observing_updates();
637 worktree.is_visible()
638 });
639 if !is_visible {
640 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
641 }
642 }
643 }
644 }
645
646 /// search over all worktrees and return buffers that *might* match the search.
647 pub fn find_search_candidates(
648 &self,
649 query: SearchQuery,
650 limit: usize,
651 open_entries: HashSet<ProjectEntryId>,
652 fs: Arc<dyn Fs>,
653 cx: &Context<Self>,
654 ) -> Receiver<ProjectPath> {
655 let snapshots = self
656 .visible_worktrees(cx)
657 .filter_map(|tree| {
658 let tree = tree.read(cx);
659 Some((tree.snapshot(), tree.as_local()?.settings()))
660 })
661 .collect::<Vec<_>>();
662
663 let executor = cx.background_executor().clone();
664
665 // We want to return entries in the order they are in the worktrees, so we have one
666 // thread that iterates over the worktrees (and ignored directories) as necessary,
667 // and pushes a oneshot::Receiver to the output channel and a oneshot::Sender to the filter
668 // channel.
669 // We spawn a number of workers that take items from the filter channel and check the query
670 // against the version of the file on disk.
671 let (filter_tx, filter_rx) = smol::channel::bounded(64);
672 let (output_tx, output_rx) = smol::channel::bounded(64);
673 let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
674
675 let input = cx.background_executor().spawn({
676 let fs = fs.clone();
677 let query = query.clone();
678 async move {
679 Self::find_candidate_paths(
680 fs,
681 snapshots,
682 open_entries,
683 query,
684 filter_tx,
685 output_tx,
686 )
687 .await
688 .log_err();
689 }
690 });
691 const MAX_CONCURRENT_FILE_SCANS: usize = 64;
692 let filters = cx.background_executor().spawn(async move {
693 let fs = &fs;
694 let query = &query;
695 executor
696 .scoped(move |scope| {
697 for _ in 0..MAX_CONCURRENT_FILE_SCANS {
698 let filter_rx = filter_rx.clone();
699 scope.spawn(async move {
700 Self::filter_paths(fs, filter_rx, query)
701 .await
702 .log_with_level(log::Level::Debug);
703 })
704 }
705 })
706 .await;
707 });
708 cx.background_executor()
709 .spawn(async move {
710 let mut matched = 0;
711 while let Ok(mut receiver) = output_rx.recv().await {
712 let Some(path) = receiver.next().await else {
713 continue;
714 };
715 let Ok(_) = matching_paths_tx.send(path).await else {
716 break;
717 };
718 matched += 1;
719 if matched == limit {
720 break;
721 }
722 }
723 drop(input);
724 drop(filters);
725 })
726 .detach();
727 matching_paths_rx
728 }
729
730 fn scan_ignored_dir<'a>(
731 fs: &'a Arc<dyn Fs>,
732 snapshot: &'a worktree::Snapshot,
733 path: &'a Path,
734 query: &'a SearchQuery,
735 include_root: bool,
736 filter_tx: &'a Sender<MatchingEntry>,
737 output_tx: &'a Sender<oneshot::Receiver<ProjectPath>>,
738 ) -> BoxFuture<'a, Result<()>> {
739 async move {
740 let abs_path = snapshot.abs_path().join(path);
741 let Some(mut files) = fs
742 .read_dir(&abs_path)
743 .await
744 .with_context(|| format!("listing ignored path {abs_path:?}"))
745 .log_err()
746 else {
747 return Ok(());
748 };
749
750 let mut results = Vec::new();
751
752 while let Some(Ok(file)) = files.next().await {
753 let Some(metadata) = fs
754 .metadata(&file)
755 .await
756 .with_context(|| format!("fetching fs metadata for {abs_path:?}"))
757 .log_err()
758 .flatten()
759 else {
760 continue;
761 };
762 if metadata.is_symlink || metadata.is_fifo {
763 continue;
764 }
765 results.push((
766 file.strip_prefix(snapshot.abs_path())?.to_path_buf(),
767 !metadata.is_dir,
768 ))
769 }
770 results.sort_by(|(a_path, _), (b_path, _)| a_path.cmp(b_path));
771 for (path, is_file) in results {
772 if is_file {
773 if query.filters_path() {
774 let matched_path = if include_root {
775 let mut full_path = PathBuf::from(snapshot.root_name());
776 full_path.push(&path);
777 query.file_matches(&full_path)
778 } else {
779 query.file_matches(&path)
780 };
781 if !matched_path {
782 continue;
783 }
784 }
785 let (tx, rx) = oneshot::channel();
786 output_tx.send(rx).await?;
787 filter_tx
788 .send(MatchingEntry {
789 respond: tx,
790 worktree_path: snapshot.abs_path().clone(),
791 path: ProjectPath {
792 worktree_id: snapshot.id(),
793 path: Arc::from(path),
794 },
795 })
796 .await?;
797 } else {
798 Self::scan_ignored_dir(
799 fs,
800 snapshot,
801 &path,
802 query,
803 include_root,
804 filter_tx,
805 output_tx,
806 )
807 .await?;
808 }
809 }
810 Ok(())
811 }
812 .boxed()
813 }
814
815 async fn find_candidate_paths(
816 fs: Arc<dyn Fs>,
817 snapshots: Vec<(worktree::Snapshot, WorktreeSettings)>,
818 open_entries: HashSet<ProjectEntryId>,
819 query: SearchQuery,
820 filter_tx: Sender<MatchingEntry>,
821 output_tx: Sender<oneshot::Receiver<ProjectPath>>,
822 ) -> Result<()> {
823 let include_root = snapshots.len() > 1;
824 for (snapshot, settings) in snapshots {
825 for entry in snapshot.entries(query.include_ignored(), 0) {
826 if entry.is_dir() && entry.is_ignored {
827 if !settings.is_path_excluded(&entry.path) {
828 Self::scan_ignored_dir(
829 &fs,
830 &snapshot,
831 &entry.path,
832 &query,
833 include_root,
834 &filter_tx,
835 &output_tx,
836 )
837 .await?;
838 }
839 continue;
840 }
841
842 if entry.is_fifo || !entry.is_file() {
843 continue;
844 }
845
846 if query.filters_path() {
847 let matched_path = if include_root {
848 let mut full_path = PathBuf::from(snapshot.root_name());
849 full_path.push(&entry.path);
850 query.file_matches(&full_path)
851 } else {
852 query.file_matches(&entry.path)
853 };
854 if !matched_path {
855 continue;
856 }
857 }
858
859 let (mut tx, rx) = oneshot::channel();
860
861 if open_entries.contains(&entry.id) {
862 tx.send(ProjectPath {
863 worktree_id: snapshot.id(),
864 path: entry.path.clone(),
865 })
866 .await?;
867 } else {
868 filter_tx
869 .send(MatchingEntry {
870 respond: tx,
871 worktree_path: snapshot.abs_path().clone(),
872 path: ProjectPath {
873 worktree_id: snapshot.id(),
874 path: entry.path.clone(),
875 },
876 })
877 .await?;
878 }
879
880 output_tx.send(rx).await?;
881 }
882 }
883 Ok(())
884 }
885
886 pub fn branches(
887 &self,
888 project_path: ProjectPath,
889 cx: &App,
890 ) -> Task<Result<Vec<git::repository::Branch>>> {
891 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
892 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
893 };
894
895 match worktree.read(cx) {
896 Worktree::Local(local_worktree) => {
897 let branches = util::maybe!({
898 let worktree_error = |error| {
899 format!(
900 "{} for worktree {}",
901 error,
902 local_worktree.abs_path().to_string_lossy()
903 )
904 };
905
906 let entry = local_worktree
907 .git_entry(project_path.path)
908 .with_context(|| worktree_error("No git entry found"))?;
909
910 let repo = local_worktree
911 .get_local_repo(&entry)
912 .with_context(|| worktree_error("No repository found"))?
913 .repo()
914 .clone();
915
916 repo.branches()
917 });
918
919 Task::ready(branches)
920 }
921 Worktree::Remote(remote_worktree) => {
922 let request = remote_worktree.client().request(proto::GitBranches {
923 project_id: remote_worktree.project_id(),
924 repository: Some(proto::ProjectPath {
925 worktree_id: project_path.worktree_id.to_proto(),
926 path: project_path.path.to_string_lossy().to_string(), // Root path
927 }),
928 });
929
930 cx.background_executor().spawn(async move {
931 let response = request.await?;
932
933 let branches = response
934 .branches
935 .into_iter()
936 .map(|proto_branch| git::repository::Branch {
937 is_head: proto_branch.is_head,
938 name: proto_branch.name.into(),
939 unix_timestamp: proto_branch
940 .unix_timestamp
941 .map(|timestamp| timestamp as i64),
942 })
943 .collect();
944
945 Ok(branches)
946 })
947 }
948 }
949 }
950
951 pub fn update_or_create_branch(
952 &self,
953 repository: ProjectPath,
954 new_branch: String,
955 cx: &App,
956 ) -> Task<Result<()>> {
957 let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else {
958 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
959 };
960
961 match worktree.read(cx) {
962 Worktree::Local(local_worktree) => {
963 let result = util::maybe!({
964 let worktree_error = |error| {
965 format!(
966 "{} for worktree {}",
967 error,
968 local_worktree.abs_path().to_string_lossy()
969 )
970 };
971
972 let entry = local_worktree
973 .git_entry(repository.path)
974 .with_context(|| worktree_error("No git entry found"))?;
975
976 let repo = local_worktree
977 .get_local_repo(&entry)
978 .with_context(|| worktree_error("No repository found"))?
979 .repo()
980 .clone();
981
982 if !repo.branch_exits(&new_branch)? {
983 repo.create_branch(&new_branch)?;
984 }
985
986 repo.change_branch(&new_branch)?;
987 Ok(())
988 });
989
990 Task::ready(result)
991 }
992 Worktree::Remote(remote_worktree) => {
993 let request = remote_worktree.client().request(proto::UpdateGitBranch {
994 project_id: remote_worktree.project_id(),
995 repository: Some(proto::ProjectPath {
996 worktree_id: repository.worktree_id.to_proto(),
997 path: repository.path.to_string_lossy().to_string(), // Root path
998 }),
999 branch_name: new_branch,
1000 });
1001
1002 cx.background_executor().spawn(async move {
1003 request.await?;
1004 Ok(())
1005 })
1006 }
1007 }
1008 }
1009
1010 async fn filter_paths(
1011 fs: &Arc<dyn Fs>,
1012 mut input: Receiver<MatchingEntry>,
1013 query: &SearchQuery,
1014 ) -> Result<()> {
1015 let mut input = pin!(input);
1016 while let Some(mut entry) = input.next().await {
1017 let abs_path = entry.worktree_path.join(&entry.path.path);
1018 let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
1019 continue;
1020 };
1021
1022 let mut file = BufReader::new(file);
1023 let file_start = file.fill_buf()?;
1024
1025 if let Err(Some(starting_position)) =
1026 std::str::from_utf8(file_start).map_err(|e| e.error_len())
1027 {
1028 // Before attempting to match the file content, throw away files that have invalid UTF-8 sequences early on;
1029 // That way we can still match files in a streaming fashion without having look at "obviously binary" files.
1030 log::debug!(
1031 "Invalid UTF-8 sequence in file {abs_path:?} at byte position {starting_position}"
1032 );
1033 continue;
1034 }
1035
1036 if query.detect(file).unwrap_or(false) {
1037 entry.respond.send(entry.path).await?
1038 }
1039 }
1040
1041 Ok(())
1042 }
1043
1044 pub async fn handle_create_project_entry(
1045 this: Entity<Self>,
1046 envelope: TypedEnvelope<proto::CreateProjectEntry>,
1047 mut cx: AsyncApp,
1048 ) -> Result<proto::ProjectEntryResponse> {
1049 let worktree = this.update(&mut cx, |this, cx| {
1050 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1051 this.worktree_for_id(worktree_id, cx)
1052 .ok_or_else(|| anyhow!("worktree not found"))
1053 })??;
1054 Worktree::handle_create_entry(worktree, envelope.payload, cx).await
1055 }
1056
1057 pub async fn handle_copy_project_entry(
1058 this: Entity<Self>,
1059 envelope: TypedEnvelope<proto::CopyProjectEntry>,
1060 mut cx: AsyncApp,
1061 ) -> Result<proto::ProjectEntryResponse> {
1062 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1063 let worktree = this.update(&mut cx, |this, cx| {
1064 this.worktree_for_entry(entry_id, cx)
1065 .ok_or_else(|| anyhow!("worktree not found"))
1066 })??;
1067 Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
1068 }
1069
1070 pub async fn handle_delete_project_entry(
1071 this: Entity<Self>,
1072 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
1073 mut cx: AsyncApp,
1074 ) -> Result<proto::ProjectEntryResponse> {
1075 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1076 let worktree = this.update(&mut cx, |this, cx| {
1077 this.worktree_for_entry(entry_id, cx)
1078 .ok_or_else(|| anyhow!("worktree not found"))
1079 })??;
1080 Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
1081 }
1082
1083 pub async fn handle_expand_project_entry(
1084 this: Entity<Self>,
1085 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
1086 mut cx: AsyncApp,
1087 ) -> Result<proto::ExpandProjectEntryResponse> {
1088 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1089 let worktree = this
1090 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
1091 .ok_or_else(|| anyhow!("invalid request"))?;
1092 Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
1093 }
1094
1095 pub async fn handle_expand_all_for_project_entry(
1096 this: Entity<Self>,
1097 envelope: TypedEnvelope<proto::ExpandAllForProjectEntry>,
1098 mut cx: AsyncApp,
1099 ) -> Result<proto::ExpandAllForProjectEntryResponse> {
1100 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1101 let worktree = this
1102 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
1103 .ok_or_else(|| anyhow!("invalid request"))?;
1104 Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await
1105 }
1106
1107 pub async fn handle_git_branches(
1108 this: Entity<Self>,
1109 branches: TypedEnvelope<proto::GitBranches>,
1110 cx: AsyncApp,
1111 ) -> Result<proto::GitBranchesResponse> {
1112 let project_path = branches
1113 .payload
1114 .repository
1115 .clone()
1116 .context("Invalid GitBranches call")?;
1117 let project_path = ProjectPath {
1118 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1119 path: Path::new(&project_path.path).into(),
1120 };
1121
1122 let branches = this
1123 .read_with(&cx, |this, cx| this.branches(project_path, cx))?
1124 .await?;
1125
1126 Ok(proto::GitBranchesResponse {
1127 branches: branches
1128 .into_iter()
1129 .map(|branch| proto::Branch {
1130 is_head: branch.is_head,
1131 name: branch.name.to_string(),
1132 unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
1133 })
1134 .collect(),
1135 })
1136 }
1137
1138 pub async fn handle_update_branch(
1139 this: Entity<Self>,
1140 update_branch: TypedEnvelope<proto::UpdateGitBranch>,
1141 cx: AsyncApp,
1142 ) -> Result<proto::Ack> {
1143 let project_path = update_branch
1144 .payload
1145 .repository
1146 .clone()
1147 .context("Invalid GitBranches call")?;
1148 let project_path = ProjectPath {
1149 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1150 path: Path::new(&project_path.path).into(),
1151 };
1152 let new_branch = update_branch.payload.branch_name;
1153
1154 this.read_with(&cx, |this, cx| {
1155 this.update_or_create_branch(project_path, new_branch, cx)
1156 })?
1157 .await?;
1158
1159 Ok(proto::Ack {})
1160 }
1161}
1162
1163#[derive(Clone, Debug)]
1164enum WorktreeHandle {
1165 Strong(Entity<Worktree>),
1166 Weak(WeakEntity<Worktree>),
1167}
1168
1169impl WorktreeHandle {
1170 fn upgrade(&self) -> Option<Entity<Worktree>> {
1171 match self {
1172 WorktreeHandle::Strong(handle) => Some(handle.clone()),
1173 WorktreeHandle::Weak(handle) => handle.upgrade(),
1174 }
1175 }
1176}