1use std::{
2 io::{BufRead, BufReader},
3 path::{Path, PathBuf},
4 pin::pin,
5 sync::{atomic::AtomicUsize, Arc},
6};
7
8use anyhow::{anyhow, Context as _, Result};
9use collections::{HashMap, HashSet};
10use fs::Fs;
11use futures::{
12 future::{BoxFuture, Shared},
13 FutureExt, SinkExt,
14};
15use gpui::{
16 AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, Task, WeakModel,
17};
18use postage::oneshot;
19use rpc::{
20 proto::{self, SSH_PROJECT_ID},
21 AnyProtoClient, ErrorExt, TypedEnvelope,
22};
23use smol::{
24 channel::{Receiver, Sender},
25 stream::StreamExt,
26};
27use text::ReplicaId;
28use util::{paths::SanitizedPath, ResultExt};
29use worktree::{Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId, WorktreeSettings};
30
31use crate::{search::SearchQuery, ProjectPath};
32
33struct MatchingEntry {
34 worktree_path: Arc<Path>,
35 path: ProjectPath,
36 respond: oneshot::Sender<ProjectPath>,
37}
38
39enum WorktreeStoreState {
40 Local {
41 fs: Arc<dyn Fs>,
42 },
43 Remote {
44 upstream_client: AnyProtoClient,
45 upstream_project_id: u64,
46 },
47}
48
49pub struct WorktreeStore {
50 next_entry_id: Arc<AtomicUsize>,
51 downstream_client: Option<(AnyProtoClient, u64)>,
52 retain_worktrees: bool,
53 worktrees: Vec<WorktreeHandle>,
54 worktrees_reordered: bool,
55 #[allow(clippy::type_complexity)]
56 loading_worktrees:
57 HashMap<SanitizedPath, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
58 state: WorktreeStoreState,
59}
60
61pub enum WorktreeStoreEvent {
62 WorktreeAdded(Model<Worktree>),
63 WorktreeRemoved(EntityId, WorktreeId),
64 WorktreeReleased(EntityId, WorktreeId),
65 WorktreeOrderChanged,
66 WorktreeUpdateSent(Model<Worktree>),
67 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
68 WorktreeUpdatedGitRepositories(WorktreeId),
69 WorktreeDeletedEntry(WorktreeId, ProjectEntryId),
70}
71
72impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
73
74impl WorktreeStore {
75 pub fn init(client: &AnyProtoClient) {
76 client.add_model_request_handler(Self::handle_create_project_entry);
77 client.add_model_request_handler(Self::handle_copy_project_entry);
78 client.add_model_request_handler(Self::handle_delete_project_entry);
79 client.add_model_request_handler(Self::handle_expand_project_entry);
80 client.add_model_request_handler(Self::handle_git_branches);
81 client.add_model_request_handler(Self::handle_update_branch);
82 }
83
84 pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
85 Self {
86 next_entry_id: Default::default(),
87 loading_worktrees: Default::default(),
88 downstream_client: None,
89 worktrees: Vec::new(),
90 worktrees_reordered: false,
91 retain_worktrees,
92 state: WorktreeStoreState::Local { fs },
93 }
94 }
95
96 pub fn remote(
97 retain_worktrees: bool,
98 upstream_client: AnyProtoClient,
99 upstream_project_id: u64,
100 ) -> Self {
101 Self {
102 next_entry_id: Default::default(),
103 loading_worktrees: Default::default(),
104 downstream_client: None,
105 worktrees: Vec::new(),
106 worktrees_reordered: false,
107 retain_worktrees,
108 state: WorktreeStoreState::Remote {
109 upstream_client,
110 upstream_project_id,
111 },
112 }
113 }
114
115 /// Iterates through all worktrees, including ones that don't appear in the project panel
116 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
117 self.worktrees
118 .iter()
119 .filter_map(move |worktree| worktree.upgrade())
120 }
121
122 /// Iterates through all user-visible worktrees, the ones that appear in the project panel.
123 pub fn visible_worktrees<'a>(
124 &'a self,
125 cx: &'a AppContext,
126 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
127 self.worktrees()
128 .filter(|worktree| worktree.read(cx).is_visible())
129 }
130
131 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
132 self.worktrees()
133 .find(|worktree| worktree.read(cx).id() == id)
134 }
135
136 pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option<Arc<str>> {
137 self.worktree_for_id(repository.worktree_id, cx)?
138 .read(cx)
139 .git_entry(repository.path)?
140 .branch()
141 }
142
143 pub fn worktree_for_entry(
144 &self,
145 entry_id: ProjectEntryId,
146 cx: &AppContext,
147 ) -> Option<Model<Worktree>> {
148 self.worktrees()
149 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
150 }
151
152 pub fn find_worktree(
153 &self,
154 abs_path: impl Into<SanitizedPath>,
155 cx: &AppContext,
156 ) -> Option<(Model<Worktree>, PathBuf)> {
157 let abs_path: SanitizedPath = abs_path.into();
158 for tree in self.worktrees() {
159 if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) {
160 return Some((tree.clone(), relative_path.into()));
161 }
162 }
163 None
164 }
165
166 pub fn find_or_create_worktree(
167 &mut self,
168 abs_path: impl AsRef<Path>,
169 visible: bool,
170 cx: &mut ModelContext<Self>,
171 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
172 let abs_path = abs_path.as_ref();
173 if let Some((tree, relative_path)) = self.find_worktree(abs_path, cx) {
174 Task::ready(Ok((tree, relative_path)))
175 } else {
176 let worktree = self.create_worktree(abs_path, visible, cx);
177 cx.background_executor()
178 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
179 }
180 }
181
182 pub fn entry_for_id<'a>(
183 &'a self,
184 entry_id: ProjectEntryId,
185 cx: &'a AppContext,
186 ) -> Option<&'a Entry> {
187 self.worktrees()
188 .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id))
189 }
190
191 pub fn worktree_and_entry_for_id<'a>(
192 &'a self,
193 entry_id: ProjectEntryId,
194 cx: &'a AppContext,
195 ) -> Option<(Model<Worktree>, &'a Entry)> {
196 self.worktrees().find_map(|worktree| {
197 worktree
198 .read(cx)
199 .entry_for_id(entry_id)
200 .map(|e| (worktree.clone(), e))
201 })
202 }
203
204 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
205 self.worktree_for_id(path.worktree_id, cx)?
206 .read(cx)
207 .entry_for_path(&path.path)
208 .cloned()
209 }
210
211 pub fn create_worktree(
212 &mut self,
213 abs_path: impl Into<SanitizedPath>,
214 visible: bool,
215 cx: &mut ModelContext<Self>,
216 ) -> Task<Result<Model<Worktree>>> {
217 let abs_path: SanitizedPath = abs_path.into();
218 if !self.loading_worktrees.contains_key(&abs_path) {
219 let task = match &self.state {
220 WorktreeStoreState::Remote {
221 upstream_client, ..
222 } => {
223 if upstream_client.is_via_collab() {
224 Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
225 } else {
226 self.create_ssh_worktree(
227 upstream_client.clone(),
228 abs_path.clone(),
229 visible,
230 cx,
231 )
232 }
233 }
234 WorktreeStoreState::Local { fs } => {
235 self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx)
236 }
237 };
238
239 self.loading_worktrees
240 .insert(abs_path.clone(), task.shared());
241 }
242 let task = self.loading_worktrees.get(&abs_path).unwrap().clone();
243 cx.spawn(|this, mut cx| async move {
244 let result = task.await;
245 this.update(&mut cx, |this, _| this.loading_worktrees.remove(&abs_path))
246 .ok();
247 match result {
248 Ok(worktree) => Ok(worktree),
249 Err(err) => Err((*err).cloned()),
250 }
251 })
252 }
253
254 fn create_ssh_worktree(
255 &mut self,
256 client: AnyProtoClient,
257 abs_path: impl Into<SanitizedPath>,
258 visible: bool,
259 cx: &mut ModelContext<Self>,
260 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
261 let mut abs_path = Into::<SanitizedPath>::into(abs_path).to_string();
262 // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/`
263 // in which case want to strip the leading the `/`.
264 // On the host-side, the `~` will get expanded.
265 // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850
266 if abs_path.starts_with("/~") {
267 abs_path = abs_path[1..].to_string();
268 }
269 if abs_path.is_empty() || abs_path == "/" {
270 abs_path = "~/".to_string();
271 }
272 cx.spawn(|this, mut cx| async move {
273 let this = this.upgrade().context("Dropped worktree store")?;
274
275 let response = client
276 .request(proto::AddWorktree {
277 project_id: SSH_PROJECT_ID,
278 path: abs_path.clone(),
279 visible,
280 })
281 .await?;
282
283 if let Some(existing_worktree) = this.read_with(&cx, |this, cx| {
284 this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx)
285 })? {
286 return Ok(existing_worktree);
287 }
288
289 let root_name = PathBuf::from(&response.canonicalized_path)
290 .file_name()
291 .map(|n| n.to_string_lossy().to_string())
292 .unwrap_or(response.canonicalized_path.to_string());
293
294 let worktree = cx.update(|cx| {
295 Worktree::remote(
296 SSH_PROJECT_ID,
297 0,
298 proto::WorktreeMetadata {
299 id: response.worktree_id,
300 root_name,
301 visible,
302 abs_path: response.canonicalized_path,
303 },
304 client,
305 cx,
306 )
307 })?;
308
309 this.update(&mut cx, |this, cx| {
310 this.add(&worktree, cx);
311 })?;
312 Ok(worktree)
313 })
314 }
315
316 fn create_local_worktree(
317 &mut self,
318 fs: Arc<dyn Fs>,
319 abs_path: impl Into<SanitizedPath>,
320 visible: bool,
321 cx: &mut ModelContext<Self>,
322 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
323 let next_entry_id = self.next_entry_id.clone();
324 let path: SanitizedPath = abs_path.into();
325
326 cx.spawn(move |this, mut cx| async move {
327 let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
328
329 let worktree = worktree?;
330
331 this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
332
333 if visible {
334 cx.update(|cx| {
335 cx.add_recent_document(path.as_path());
336 })
337 .log_err();
338 }
339
340 Ok(worktree)
341 })
342 }
343
344 pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
345 let worktree_id = worktree.read(cx).id();
346 debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id));
347
348 let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible();
349 let handle = if push_strong_handle {
350 WorktreeHandle::Strong(worktree.clone())
351 } else {
352 WorktreeHandle::Weak(worktree.downgrade())
353 };
354 if self.worktrees_reordered {
355 self.worktrees.push(handle);
356 } else {
357 let i = match self
358 .worktrees
359 .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
360 other.upgrade().map(|worktree| worktree.read(cx).abs_path())
361 }) {
362 Ok(i) | Err(i) => i,
363 };
364 self.worktrees.insert(i, handle);
365 }
366
367 cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone()));
368 self.send_project_updates(cx);
369
370 let handle_id = worktree.entity_id();
371 cx.subscribe(worktree, |_, worktree, event, cx| {
372 let worktree_id = worktree.update(cx, |worktree, _| worktree.id());
373 match event {
374 worktree::Event::UpdatedEntries(changes) => {
375 cx.emit(WorktreeStoreEvent::WorktreeUpdatedEntries(
376 worktree.read(cx).id(),
377 changes.clone(),
378 ));
379 }
380 worktree::Event::UpdatedGitRepositories(_) => {
381 cx.emit(WorktreeStoreEvent::WorktreeUpdatedGitRepositories(
382 worktree_id,
383 ));
384 }
385 worktree::Event::DeletedEntry(id) => {
386 cx.emit(WorktreeStoreEvent::WorktreeDeletedEntry(worktree_id, *id))
387 }
388 }
389 })
390 .detach();
391 cx.observe_release(worktree, move |this, worktree, cx| {
392 cx.emit(WorktreeStoreEvent::WorktreeReleased(
393 handle_id,
394 worktree.id(),
395 ));
396 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
397 handle_id,
398 worktree.id(),
399 ));
400 this.send_project_updates(cx);
401 })
402 .detach();
403 }
404
405 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
406 self.worktrees.retain(|worktree| {
407 if let Some(worktree) = worktree.upgrade() {
408 if worktree.read(cx).id() == id_to_remove {
409 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
410 worktree.entity_id(),
411 id_to_remove,
412 ));
413 false
414 } else {
415 true
416 }
417 } else {
418 false
419 }
420 });
421 self.send_project_updates(cx);
422 }
423
424 pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
425 self.worktrees_reordered = worktrees_reordered;
426 }
427
428 fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
429 match &self.state {
430 WorktreeStoreState::Remote {
431 upstream_client,
432 upstream_project_id,
433 ..
434 } => Some((upstream_client.clone(), *upstream_project_id)),
435 WorktreeStoreState::Local { .. } => None,
436 }
437 }
438
439 pub fn set_worktrees_from_proto(
440 &mut self,
441 worktrees: Vec<proto::WorktreeMetadata>,
442 replica_id: ReplicaId,
443 cx: &mut ModelContext<Self>,
444 ) -> Result<()> {
445 let mut old_worktrees_by_id = self
446 .worktrees
447 .drain(..)
448 .filter_map(|worktree| {
449 let worktree = worktree.upgrade()?;
450 Some((worktree.read(cx).id(), worktree))
451 })
452 .collect::<HashMap<_, _>>();
453
454 let (client, project_id) = self
455 .upstream_client()
456 .clone()
457 .ok_or_else(|| anyhow!("invalid project"))?;
458
459 for worktree in worktrees {
460 if let Some(old_worktree) =
461 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
462 {
463 let push_strong_handle =
464 self.retain_worktrees || old_worktree.read(cx).is_visible();
465 let handle = if push_strong_handle {
466 WorktreeHandle::Strong(old_worktree.clone())
467 } else {
468 WorktreeHandle::Weak(old_worktree.downgrade())
469 };
470 self.worktrees.push(handle);
471 } else {
472 self.add(
473 &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx),
474 cx,
475 );
476 }
477 }
478 self.send_project_updates(cx);
479
480 Ok(())
481 }
482
483 pub fn move_worktree(
484 &mut self,
485 source: WorktreeId,
486 destination: WorktreeId,
487 cx: &mut ModelContext<Self>,
488 ) -> Result<()> {
489 if source == destination {
490 return Ok(());
491 }
492
493 let mut source_index = None;
494 let mut destination_index = None;
495 for (i, worktree) in self.worktrees.iter().enumerate() {
496 if let Some(worktree) = worktree.upgrade() {
497 let worktree_id = worktree.read(cx).id();
498 if worktree_id == source {
499 source_index = Some(i);
500 if destination_index.is_some() {
501 break;
502 }
503 } else if worktree_id == destination {
504 destination_index = Some(i);
505 if source_index.is_some() {
506 break;
507 }
508 }
509 }
510 }
511
512 let source_index =
513 source_index.with_context(|| format!("Missing worktree for id {source}"))?;
514 let destination_index =
515 destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
516
517 if source_index == destination_index {
518 return Ok(());
519 }
520
521 let worktree_to_move = self.worktrees.remove(source_index);
522 self.worktrees.insert(destination_index, worktree_to_move);
523 self.worktrees_reordered = true;
524 cx.emit(WorktreeStoreEvent::WorktreeOrderChanged);
525 cx.notify();
526 Ok(())
527 }
528
529 pub fn disconnected_from_host(&mut self, cx: &mut AppContext) {
530 for worktree in &self.worktrees {
531 if let Some(worktree) = worktree.upgrade() {
532 worktree.update(cx, |worktree, _| {
533 if let Some(worktree) = worktree.as_remote_mut() {
534 worktree.disconnected_from_host();
535 }
536 });
537 }
538 }
539 }
540
541 pub fn send_project_updates(&mut self, cx: &mut ModelContext<Self>) {
542 let Some((downstream_client, project_id)) = self.downstream_client.clone() else {
543 return;
544 };
545
546 let update = proto::UpdateProject {
547 project_id,
548 worktrees: self.worktree_metadata_protos(cx),
549 };
550
551 // collab has bad concurrency guarantees, so we send requests in serial.
552 let update_project = if downstream_client.is_via_collab() {
553 Some(downstream_client.request(update))
554 } else {
555 downstream_client.send(update).log_err();
556 None
557 };
558 cx.spawn(|this, mut cx| async move {
559 if let Some(update_project) = update_project {
560 update_project.await?;
561 }
562
563 this.update(&mut cx, |this, cx| {
564 let worktrees = this.worktrees().collect::<Vec<_>>();
565
566 for worktree in worktrees {
567 worktree.update(cx, |worktree, cx| {
568 let client = downstream_client.clone();
569 worktree.observe_updates(project_id, cx, {
570 move |update| {
571 let client = client.clone();
572 async move {
573 if client.is_via_collab() {
574 client
575 .request(update)
576 .map(|result| result.log_err().is_some())
577 .await
578 } else {
579 client.send(update).log_err().is_some()
580 }
581 }
582 }
583 });
584 });
585
586 cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone()))
587 }
588
589 anyhow::Ok(())
590 })
591 })
592 .detach_and_log_err(cx);
593 }
594
595 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
596 self.worktrees()
597 .map(|worktree| {
598 let worktree = worktree.read(cx);
599 proto::WorktreeMetadata {
600 id: worktree.id().to_proto(),
601 root_name: worktree.root_name().into(),
602 visible: worktree.is_visible(),
603 abs_path: worktree.abs_path().to_string_lossy().into(),
604 }
605 })
606 .collect()
607 }
608
609 pub fn shared(
610 &mut self,
611 remote_id: u64,
612 downstream_client: AnyProtoClient,
613 cx: &mut ModelContext<Self>,
614 ) {
615 self.retain_worktrees = true;
616 self.downstream_client = Some((downstream_client, remote_id));
617
618 // When shared, retain all worktrees
619 for worktree_handle in self.worktrees.iter_mut() {
620 match worktree_handle {
621 WorktreeHandle::Strong(_) => {}
622 WorktreeHandle::Weak(worktree) => {
623 if let Some(worktree) = worktree.upgrade() {
624 *worktree_handle = WorktreeHandle::Strong(worktree);
625 }
626 }
627 }
628 }
629 self.send_project_updates(cx);
630 }
631
632 pub fn unshared(&mut self, cx: &mut ModelContext<Self>) {
633 self.retain_worktrees = false;
634 self.downstream_client.take();
635
636 // When not shared, only retain the visible worktrees
637 for worktree_handle in self.worktrees.iter_mut() {
638 if let WorktreeHandle::Strong(worktree) = worktree_handle {
639 let is_visible = worktree.update(cx, |worktree, _| {
640 worktree.stop_observing_updates();
641 worktree.is_visible()
642 });
643 if !is_visible {
644 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
645 }
646 }
647 }
648 }
649
650 /// search over all worktrees and return buffers that *might* match the search.
651 pub fn find_search_candidates(
652 &self,
653 query: SearchQuery,
654 limit: usize,
655 open_entries: HashSet<ProjectEntryId>,
656 fs: Arc<dyn Fs>,
657 cx: &ModelContext<Self>,
658 ) -> Receiver<ProjectPath> {
659 let snapshots = self
660 .visible_worktrees(cx)
661 .filter_map(|tree| {
662 let tree = tree.read(cx);
663 Some((tree.snapshot(), tree.as_local()?.settings()))
664 })
665 .collect::<Vec<_>>();
666
667 let executor = cx.background_executor().clone();
668
669 // We want to return entries in the order they are in the worktrees, so we have one
670 // thread that iterates over the worktrees (and ignored directories) as necessary,
671 // and pushes a oneshot::Receiver to the output channel and a oneshot::Sender to the filter
672 // channel.
673 // We spawn a number of workers that take items from the filter channel and check the query
674 // against the version of the file on disk.
675 let (filter_tx, filter_rx) = smol::channel::bounded(64);
676 let (output_tx, output_rx) = smol::channel::bounded(64);
677 let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
678
679 let input = cx.background_executor().spawn({
680 let fs = fs.clone();
681 let query = query.clone();
682 async move {
683 Self::find_candidate_paths(
684 fs,
685 snapshots,
686 open_entries,
687 query,
688 filter_tx,
689 output_tx,
690 )
691 .await
692 .log_err();
693 }
694 });
695 const MAX_CONCURRENT_FILE_SCANS: usize = 64;
696 let filters = cx.background_executor().spawn(async move {
697 let fs = &fs;
698 let query = &query;
699 executor
700 .scoped(move |scope| {
701 for _ in 0..MAX_CONCURRENT_FILE_SCANS {
702 let filter_rx = filter_rx.clone();
703 scope.spawn(async move {
704 Self::filter_paths(fs, filter_rx, query).await.log_err();
705 })
706 }
707 })
708 .await;
709 });
710 cx.background_executor()
711 .spawn(async move {
712 let mut matched = 0;
713 while let Ok(mut receiver) = output_rx.recv().await {
714 let Some(path) = receiver.next().await else {
715 continue;
716 };
717 let Ok(_) = matching_paths_tx.send(path).await else {
718 break;
719 };
720 matched += 1;
721 if matched == limit {
722 break;
723 }
724 }
725 drop(input);
726 drop(filters);
727 })
728 .detach();
729 matching_paths_rx
730 }
731
732 fn scan_ignored_dir<'a>(
733 fs: &'a Arc<dyn Fs>,
734 snapshot: &'a worktree::Snapshot,
735 path: &'a Path,
736 query: &'a SearchQuery,
737 include_root: bool,
738 filter_tx: &'a Sender<MatchingEntry>,
739 output_tx: &'a Sender<oneshot::Receiver<ProjectPath>>,
740 ) -> BoxFuture<'a, Result<()>> {
741 async move {
742 let abs_path = snapshot.abs_path().join(path);
743 let Some(mut files) = fs
744 .read_dir(&abs_path)
745 .await
746 .with_context(|| format!("listing ignored path {abs_path:?}"))
747 .log_err()
748 else {
749 return Ok(());
750 };
751
752 let mut results = Vec::new();
753
754 while let Some(Ok(file)) = files.next().await {
755 let Some(metadata) = fs
756 .metadata(&file)
757 .await
758 .with_context(|| format!("fetching fs metadata for {abs_path:?}"))
759 .log_err()
760 .flatten()
761 else {
762 continue;
763 };
764 if metadata.is_symlink || metadata.is_fifo {
765 continue;
766 }
767 results.push((
768 file.strip_prefix(snapshot.abs_path())?.to_path_buf(),
769 !metadata.is_dir,
770 ))
771 }
772 results.sort_by(|(a_path, _), (b_path, _)| a_path.cmp(b_path));
773 for (path, is_file) in results {
774 if is_file {
775 if query.filters_path() {
776 let matched_path = if include_root {
777 let mut full_path = PathBuf::from(snapshot.root_name());
778 full_path.push(&path);
779 query.file_matches(&full_path)
780 } else {
781 query.file_matches(&path)
782 };
783 if !matched_path {
784 continue;
785 }
786 }
787 let (tx, rx) = oneshot::channel();
788 output_tx.send(rx).await?;
789 filter_tx
790 .send(MatchingEntry {
791 respond: tx,
792 worktree_path: snapshot.abs_path().clone(),
793 path: ProjectPath {
794 worktree_id: snapshot.id(),
795 path: Arc::from(path),
796 },
797 })
798 .await?;
799 } else {
800 Self::scan_ignored_dir(
801 fs,
802 snapshot,
803 &path,
804 query,
805 include_root,
806 filter_tx,
807 output_tx,
808 )
809 .await?;
810 }
811 }
812 Ok(())
813 }
814 .boxed()
815 }
816
817 async fn find_candidate_paths(
818 fs: Arc<dyn Fs>,
819 snapshots: Vec<(worktree::Snapshot, WorktreeSettings)>,
820 open_entries: HashSet<ProjectEntryId>,
821 query: SearchQuery,
822 filter_tx: Sender<MatchingEntry>,
823 output_tx: Sender<oneshot::Receiver<ProjectPath>>,
824 ) -> Result<()> {
825 let include_root = snapshots.len() > 1;
826 for (snapshot, settings) in snapshots {
827 for entry in snapshot.entries(query.include_ignored(), 0) {
828 if entry.is_dir() && entry.is_ignored {
829 if !settings.is_path_excluded(&entry.path) {
830 Self::scan_ignored_dir(
831 &fs,
832 &snapshot,
833 &entry.path,
834 &query,
835 include_root,
836 &filter_tx,
837 &output_tx,
838 )
839 .await?;
840 }
841 continue;
842 }
843
844 if entry.is_fifo || !entry.is_file() {
845 continue;
846 }
847
848 if query.filters_path() {
849 let matched_path = if include_root {
850 let mut full_path = PathBuf::from(snapshot.root_name());
851 full_path.push(&entry.path);
852 query.file_matches(&full_path)
853 } else {
854 query.file_matches(&entry.path)
855 };
856 if !matched_path {
857 continue;
858 }
859 }
860
861 let (mut tx, rx) = oneshot::channel();
862
863 if open_entries.contains(&entry.id) {
864 tx.send(ProjectPath {
865 worktree_id: snapshot.id(),
866 path: entry.path.clone(),
867 })
868 .await?;
869 } else {
870 filter_tx
871 .send(MatchingEntry {
872 respond: tx,
873 worktree_path: snapshot.abs_path().clone(),
874 path: ProjectPath {
875 worktree_id: snapshot.id(),
876 path: entry.path.clone(),
877 },
878 })
879 .await?;
880 }
881
882 output_tx.send(rx).await?;
883 }
884 }
885 Ok(())
886 }
887
888 pub fn branches(
889 &self,
890 project_path: ProjectPath,
891 cx: &AppContext,
892 ) -> Task<Result<Vec<git::repository::Branch>>> {
893 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
894 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
895 };
896
897 match worktree.read(cx) {
898 Worktree::Local(local_worktree) => {
899 let branches = util::maybe!({
900 let worktree_error = |error| {
901 format!(
902 "{} for worktree {}",
903 error,
904 local_worktree.abs_path().to_string_lossy()
905 )
906 };
907
908 let entry = local_worktree
909 .git_entry(project_path.path)
910 .with_context(|| worktree_error("No git entry found"))?;
911
912 let repo = local_worktree
913 .get_local_repo(&entry)
914 .with_context(|| worktree_error("No repository found"))?
915 .repo()
916 .clone();
917
918 repo.branches()
919 });
920
921 Task::ready(branches)
922 }
923 Worktree::Remote(remote_worktree) => {
924 let request = remote_worktree.client().request(proto::GitBranches {
925 project_id: remote_worktree.project_id(),
926 repository: Some(proto::ProjectPath {
927 worktree_id: project_path.worktree_id.to_proto(),
928 path: project_path.path.to_string_lossy().to_string(), // Root path
929 }),
930 });
931
932 cx.background_executor().spawn(async move {
933 let response = request.await?;
934
935 let branches = response
936 .branches
937 .into_iter()
938 .map(|proto_branch| git::repository::Branch {
939 is_head: proto_branch.is_head,
940 name: proto_branch.name.into(),
941 unix_timestamp: proto_branch
942 .unix_timestamp
943 .map(|timestamp| timestamp as i64),
944 })
945 .collect();
946
947 Ok(branches)
948 })
949 }
950 }
951 }
952
953 pub fn update_or_create_branch(
954 &self,
955 repository: ProjectPath,
956 new_branch: String,
957 cx: &AppContext,
958 ) -> Task<Result<()>> {
959 let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else {
960 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
961 };
962
963 match worktree.read(cx) {
964 Worktree::Local(local_worktree) => {
965 let result = util::maybe!({
966 let worktree_error = |error| {
967 format!(
968 "{} for worktree {}",
969 error,
970 local_worktree.abs_path().to_string_lossy()
971 )
972 };
973
974 let entry = local_worktree
975 .git_entry(repository.path)
976 .with_context(|| worktree_error("No git entry found"))?;
977
978 let repo = local_worktree
979 .get_local_repo(&entry)
980 .with_context(|| worktree_error("No repository found"))?
981 .repo()
982 .clone();
983
984 if !repo.branch_exits(&new_branch)? {
985 repo.create_branch(&new_branch)?;
986 }
987
988 repo.change_branch(&new_branch)?;
989 Ok(())
990 });
991
992 Task::ready(result)
993 }
994 Worktree::Remote(remote_worktree) => {
995 let request = remote_worktree.client().request(proto::UpdateGitBranch {
996 project_id: remote_worktree.project_id(),
997 repository: Some(proto::ProjectPath {
998 worktree_id: repository.worktree_id.to_proto(),
999 path: repository.path.to_string_lossy().to_string(), // Root path
1000 }),
1001 branch_name: new_branch,
1002 });
1003
1004 cx.background_executor().spawn(async move {
1005 request.await?;
1006 Ok(())
1007 })
1008 }
1009 }
1010 }
1011
1012 async fn filter_paths(
1013 fs: &Arc<dyn Fs>,
1014 mut input: Receiver<MatchingEntry>,
1015 query: &SearchQuery,
1016 ) -> Result<()> {
1017 let mut input = pin!(input);
1018 while let Some(mut entry) = input.next().await {
1019 let abs_path = entry.worktree_path.join(&entry.path.path);
1020 let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
1021 continue;
1022 };
1023
1024 let mut file = BufReader::new(file);
1025 let file_start = file.fill_buf()?;
1026
1027 if let Err(Some(starting_position)) =
1028 std::str::from_utf8(file_start).map_err(|e| e.error_len())
1029 {
1030 // Before attempting to match the file content, throw away files that have invalid UTF-8 sequences early on;
1031 // That way we can still match files in a streaming fashion without having look at "obviously binary" files.
1032 return Err(anyhow!(
1033 "Invalid UTF-8 sequence at position {starting_position}"
1034 ));
1035 }
1036
1037 if query.detect(file).unwrap_or(false) {
1038 entry.respond.send(entry.path).await?
1039 }
1040 }
1041
1042 Ok(())
1043 }
1044
1045 pub async fn handle_create_project_entry(
1046 this: Model<Self>,
1047 envelope: TypedEnvelope<proto::CreateProjectEntry>,
1048 mut cx: AsyncAppContext,
1049 ) -> Result<proto::ProjectEntryResponse> {
1050 let worktree = this.update(&mut cx, |this, cx| {
1051 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1052 this.worktree_for_id(worktree_id, cx)
1053 .ok_or_else(|| anyhow!("worktree not found"))
1054 })??;
1055 Worktree::handle_create_entry(worktree, envelope.payload, cx).await
1056 }
1057
1058 pub async fn handle_copy_project_entry(
1059 this: Model<Self>,
1060 envelope: TypedEnvelope<proto::CopyProjectEntry>,
1061 mut cx: AsyncAppContext,
1062 ) -> Result<proto::ProjectEntryResponse> {
1063 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1064 let worktree = this.update(&mut cx, |this, cx| {
1065 this.worktree_for_entry(entry_id, cx)
1066 .ok_or_else(|| anyhow!("worktree not found"))
1067 })??;
1068 Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
1069 }
1070
1071 pub async fn handle_delete_project_entry(
1072 this: Model<Self>,
1073 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
1074 mut cx: AsyncAppContext,
1075 ) -> Result<proto::ProjectEntryResponse> {
1076 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1077 let worktree = this.update(&mut cx, |this, cx| {
1078 this.worktree_for_entry(entry_id, cx)
1079 .ok_or_else(|| anyhow!("worktree not found"))
1080 })??;
1081 Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
1082 }
1083
1084 pub async fn handle_expand_project_entry(
1085 this: Model<Self>,
1086 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
1087 mut cx: AsyncAppContext,
1088 ) -> Result<proto::ExpandProjectEntryResponse> {
1089 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1090 let worktree = this
1091 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
1092 .ok_or_else(|| anyhow!("invalid request"))?;
1093 Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
1094 }
1095
1096 pub async fn handle_git_branches(
1097 this: Model<Self>,
1098 branches: TypedEnvelope<proto::GitBranches>,
1099 cx: AsyncAppContext,
1100 ) -> Result<proto::GitBranchesResponse> {
1101 let project_path = branches
1102 .payload
1103 .repository
1104 .clone()
1105 .context("Invalid GitBranches call")?;
1106 let project_path = ProjectPath {
1107 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1108 path: Path::new(&project_path.path).into(),
1109 };
1110
1111 let branches = this
1112 .read_with(&cx, |this, cx| this.branches(project_path, cx))?
1113 .await?;
1114
1115 Ok(proto::GitBranchesResponse {
1116 branches: branches
1117 .into_iter()
1118 .map(|branch| proto::Branch {
1119 is_head: branch.is_head,
1120 name: branch.name.to_string(),
1121 unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
1122 })
1123 .collect(),
1124 })
1125 }
1126
1127 pub async fn handle_update_branch(
1128 this: Model<Self>,
1129 update_branch: TypedEnvelope<proto::UpdateGitBranch>,
1130 cx: AsyncAppContext,
1131 ) -> Result<proto::Ack> {
1132 let project_path = update_branch
1133 .payload
1134 .repository
1135 .clone()
1136 .context("Invalid GitBranches call")?;
1137 let project_path = ProjectPath {
1138 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1139 path: Path::new(&project_path.path).into(),
1140 };
1141 let new_branch = update_branch.payload.branch_name;
1142
1143 this.read_with(&cx, |this, cx| {
1144 this.update_or_create_branch(project_path, new_branch, cx)
1145 })?
1146 .await?;
1147
1148 Ok(proto::Ack {})
1149 }
1150}
1151
1152#[derive(Clone, Debug)]
1153enum WorktreeHandle {
1154 Strong(Model<Worktree>),
1155 Weak(WeakModel<Worktree>),
1156}
1157
1158impl WorktreeHandle {
1159 fn upgrade(&self) -> Option<Model<Worktree>> {
1160 match self {
1161 WorktreeHandle::Strong(handle) => Some(handle.clone()),
1162 WorktreeHandle::Weak(handle) => handle.upgrade(),
1163 }
1164 }
1165}