1use std::{
2 io::{BufRead, BufReader},
3 path::{Path, PathBuf},
4 pin::pin,
5 sync::{atomic::AtomicUsize, Arc},
6};
7
8use anyhow::{anyhow, Context as _, Result};
9use collections::{HashMap, HashSet};
10use fs::Fs;
11use futures::{
12 future::{BoxFuture, Shared},
13 FutureExt, SinkExt,
14};
15use gpui::{
16 AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, Task, WeakModel,
17};
18use postage::oneshot;
19use rpc::{
20 proto::{self, SSH_PROJECT_ID},
21 AnyProtoClient, ErrorExt, TypedEnvelope,
22};
23use smol::{
24 channel::{Receiver, Sender},
25 stream::StreamExt,
26};
27use text::ReplicaId;
28use util::{paths::SanitizedPath, ResultExt};
29use worktree::{Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId, WorktreeSettings};
30
31use crate::{search::SearchQuery, ProjectPath};
32
33struct MatchingEntry {
34 worktree_path: Arc<Path>,
35 path: ProjectPath,
36 respond: oneshot::Sender<ProjectPath>,
37}
38
39enum WorktreeStoreState {
40 Local {
41 fs: Arc<dyn Fs>,
42 },
43 Remote {
44 upstream_client: AnyProtoClient,
45 upstream_project_id: u64,
46 },
47}
48
49pub struct WorktreeStore {
50 next_entry_id: Arc<AtomicUsize>,
51 downstream_client: Option<(AnyProtoClient, u64)>,
52 retain_worktrees: bool,
53 worktrees: Vec<WorktreeHandle>,
54 worktrees_reordered: bool,
55 #[allow(clippy::type_complexity)]
56 loading_worktrees:
57 HashMap<SanitizedPath, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
58 state: WorktreeStoreState,
59}
60
61pub enum WorktreeStoreEvent {
62 WorktreeAdded(Model<Worktree>),
63 WorktreeRemoved(EntityId, WorktreeId),
64 WorktreeReleased(EntityId, WorktreeId),
65 WorktreeOrderChanged,
66 WorktreeUpdateSent(Model<Worktree>),
67 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
68 WorktreeUpdatedGitRepositories(WorktreeId),
69 WorktreeDeletedEntry(WorktreeId, ProjectEntryId),
70}
71
72impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
73
74impl WorktreeStore {
75 pub fn init(client: &AnyProtoClient) {
76 client.add_model_request_handler(Self::handle_create_project_entry);
77 client.add_model_request_handler(Self::handle_copy_project_entry);
78 client.add_model_request_handler(Self::handle_delete_project_entry);
79 client.add_model_request_handler(Self::handle_expand_project_entry);
80 client.add_model_request_handler(Self::handle_git_branches);
81 client.add_model_request_handler(Self::handle_update_branch);
82 }
83
84 pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
85 Self {
86 next_entry_id: Default::default(),
87 loading_worktrees: Default::default(),
88 downstream_client: None,
89 worktrees: Vec::new(),
90 worktrees_reordered: false,
91 retain_worktrees,
92 state: WorktreeStoreState::Local { fs },
93 }
94 }
95
96 pub fn remote(
97 retain_worktrees: bool,
98 upstream_client: AnyProtoClient,
99 upstream_project_id: u64,
100 ) -> Self {
101 Self {
102 next_entry_id: Default::default(),
103 loading_worktrees: Default::default(),
104 downstream_client: None,
105 worktrees: Vec::new(),
106 worktrees_reordered: false,
107 retain_worktrees,
108 state: WorktreeStoreState::Remote {
109 upstream_client,
110 upstream_project_id,
111 },
112 }
113 }
114
115 /// Iterates through all worktrees, including ones that don't appear in the project panel
116 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
117 self.worktrees
118 .iter()
119 .filter_map(move |worktree| worktree.upgrade())
120 }
121
122 /// Iterates through all user-visible worktrees, the ones that appear in the project panel.
123 pub fn visible_worktrees<'a>(
124 &'a self,
125 cx: &'a AppContext,
126 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
127 self.worktrees()
128 .filter(|worktree| worktree.read(cx).is_visible())
129 }
130
131 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
132 self.worktrees()
133 .find(|worktree| worktree.read(cx).id() == id)
134 }
135
136 pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option<Arc<str>> {
137 self.worktree_for_id(repository.worktree_id, cx)?
138 .read(cx)
139 .git_entry(repository.path)?
140 .branch()
141 }
142
143 pub fn worktree_for_entry(
144 &self,
145 entry_id: ProjectEntryId,
146 cx: &AppContext,
147 ) -> Option<Model<Worktree>> {
148 self.worktrees()
149 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
150 }
151
152 pub fn find_worktree(
153 &self,
154 abs_path: impl Into<SanitizedPath>,
155 cx: &AppContext,
156 ) -> Option<(Model<Worktree>, PathBuf)> {
157 let abs_path: SanitizedPath = abs_path.into();
158 for tree in self.worktrees() {
159 if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) {
160 return Some((tree.clone(), relative_path.into()));
161 }
162 }
163 None
164 }
165
166 pub fn find_or_create_worktree(
167 &mut self,
168 abs_path: impl AsRef<Path>,
169 visible: bool,
170 cx: &mut ModelContext<Self>,
171 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
172 let abs_path = abs_path.as_ref();
173 if let Some((tree, relative_path)) = self.find_worktree(abs_path, cx) {
174 Task::ready(Ok((tree, relative_path)))
175 } else {
176 let worktree = self.create_worktree(abs_path, visible, cx);
177 cx.background_executor()
178 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
179 }
180 }
181
182 pub fn entry_for_id<'a>(
183 &'a self,
184 entry_id: ProjectEntryId,
185 cx: &'a AppContext,
186 ) -> Option<&'a Entry> {
187 self.worktrees()
188 .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id))
189 }
190
191 pub fn worktree_and_entry_for_id<'a>(
192 &'a self,
193 entry_id: ProjectEntryId,
194 cx: &'a AppContext,
195 ) -> Option<(Model<Worktree>, &'a Entry)> {
196 self.worktrees().find_map(|worktree| {
197 worktree
198 .read(cx)
199 .entry_for_id(entry_id)
200 .map(|e| (worktree.clone(), e))
201 })
202 }
203
204 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
205 self.worktree_for_id(path.worktree_id, cx)?
206 .read(cx)
207 .entry_for_path(&path.path)
208 .cloned()
209 }
210
211 pub fn create_worktree(
212 &mut self,
213 abs_path: impl Into<SanitizedPath>,
214 visible: bool,
215 cx: &mut ModelContext<Self>,
216 ) -> Task<Result<Model<Worktree>>> {
217 let abs_path: SanitizedPath = abs_path.into();
218 if !self.loading_worktrees.contains_key(&abs_path) {
219 let task = match &self.state {
220 WorktreeStoreState::Remote {
221 upstream_client, ..
222 } => {
223 if upstream_client.is_via_collab() {
224 Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
225 } else {
226 self.create_ssh_worktree(
227 upstream_client.clone(),
228 abs_path.clone(),
229 visible,
230 cx,
231 )
232 }
233 }
234 WorktreeStoreState::Local { fs } => {
235 self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx)
236 }
237 };
238
239 self.loading_worktrees
240 .insert(abs_path.clone(), task.shared());
241 }
242 let task = self.loading_worktrees.get(&abs_path).unwrap().clone();
243 cx.spawn(|this, mut cx| async move {
244 let result = task.await;
245 this.update(&mut cx, |this, _| this.loading_worktrees.remove(&abs_path))
246 .ok();
247 match result {
248 Ok(worktree) => Ok(worktree),
249 Err(err) => Err((*err).cloned()),
250 }
251 })
252 }
253
254 fn create_ssh_worktree(
255 &mut self,
256 client: AnyProtoClient,
257 abs_path: impl Into<SanitizedPath>,
258 visible: bool,
259 cx: &mut ModelContext<Self>,
260 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
261 let mut abs_path = Into::<SanitizedPath>::into(abs_path).to_string();
262 // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/`
263 // in which case want to strip the leading the `/`.
264 // On the host-side, the `~` will get expanded.
265 // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850
266 if abs_path.starts_with("/~") {
267 abs_path = abs_path[1..].to_string();
268 }
269 if abs_path.is_empty() || abs_path == "/" {
270 abs_path = "~/".to_string();
271 }
272 cx.spawn(|this, mut cx| async move {
273 let this = this.upgrade().context("Dropped worktree store")?;
274
275 let response = client
276 .request(proto::AddWorktree {
277 project_id: SSH_PROJECT_ID,
278 path: abs_path.clone(),
279 visible,
280 })
281 .await?;
282
283 if let Some(existing_worktree) = this.read_with(&cx, |this, cx| {
284 this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx)
285 })? {
286 return Ok(existing_worktree);
287 }
288
289 let root_name = PathBuf::from(&response.canonicalized_path)
290 .file_name()
291 .map(|n| n.to_string_lossy().to_string())
292 .unwrap_or(response.canonicalized_path.to_string());
293
294 let worktree = cx.update(|cx| {
295 Worktree::remote(
296 SSH_PROJECT_ID,
297 0,
298 proto::WorktreeMetadata {
299 id: response.worktree_id,
300 root_name,
301 visible,
302 abs_path: response.canonicalized_path,
303 },
304 client,
305 cx,
306 )
307 })?;
308
309 this.update(&mut cx, |this, cx| {
310 this.add(&worktree, cx);
311 })?;
312 Ok(worktree)
313 })
314 }
315
316 fn create_local_worktree(
317 &mut self,
318 fs: Arc<dyn Fs>,
319 abs_path: impl Into<SanitizedPath>,
320 visible: bool,
321 cx: &mut ModelContext<Self>,
322 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
323 let next_entry_id = self.next_entry_id.clone();
324 let path: SanitizedPath = abs_path.into();
325
326 cx.spawn(move |this, mut cx| async move {
327 let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
328
329 let worktree = worktree?;
330
331 this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
332
333 if visible {
334 cx.update(|cx| {
335 cx.add_recent_document(path.as_path());
336 })
337 .log_err();
338 }
339
340 Ok(worktree)
341 })
342 }
343
344 pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
345 let worktree_id = worktree.read(cx).id();
346 debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id));
347
348 let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible();
349 let handle = if push_strong_handle {
350 WorktreeHandle::Strong(worktree.clone())
351 } else {
352 WorktreeHandle::Weak(worktree.downgrade())
353 };
354 if self.worktrees_reordered {
355 self.worktrees.push(handle);
356 } else {
357 let i = match self
358 .worktrees
359 .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
360 other.upgrade().map(|worktree| worktree.read(cx).abs_path())
361 }) {
362 Ok(i) | Err(i) => i,
363 };
364 self.worktrees.insert(i, handle);
365 }
366
367 cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone()));
368 self.send_project_updates(cx);
369
370 let handle_id = worktree.entity_id();
371 cx.subscribe(worktree, |_, worktree, event, cx| {
372 let worktree_id = worktree.update(cx, |worktree, _| worktree.id());
373 match event {
374 worktree::Event::UpdatedEntries(changes) => {
375 cx.emit(WorktreeStoreEvent::WorktreeUpdatedEntries(
376 worktree.read(cx).id(),
377 changes.clone(),
378 ));
379 }
380 worktree::Event::UpdatedGitRepositories(_) => {
381 cx.emit(WorktreeStoreEvent::WorktreeUpdatedGitRepositories(
382 worktree_id,
383 ));
384 }
385 worktree::Event::DeletedEntry(id) => {
386 cx.emit(WorktreeStoreEvent::WorktreeDeletedEntry(worktree_id, *id))
387 }
388 }
389 })
390 .detach();
391 cx.observe_release(worktree, move |this, worktree, cx| {
392 cx.emit(WorktreeStoreEvent::WorktreeReleased(
393 handle_id,
394 worktree.id(),
395 ));
396 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
397 handle_id,
398 worktree.id(),
399 ));
400 this.send_project_updates(cx);
401 })
402 .detach();
403 }
404
405 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
406 self.worktrees.retain(|worktree| {
407 if let Some(worktree) = worktree.upgrade() {
408 if worktree.read(cx).id() == id_to_remove {
409 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
410 worktree.entity_id(),
411 id_to_remove,
412 ));
413 false
414 } else {
415 true
416 }
417 } else {
418 false
419 }
420 });
421 self.send_project_updates(cx);
422 }
423
424 pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
425 self.worktrees_reordered = worktrees_reordered;
426 }
427
428 fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
429 match &self.state {
430 WorktreeStoreState::Remote {
431 upstream_client,
432 upstream_project_id,
433 ..
434 } => Some((upstream_client.clone(), *upstream_project_id)),
435 WorktreeStoreState::Local { .. } => None,
436 }
437 }
438
439 pub fn set_worktrees_from_proto(
440 &mut self,
441 worktrees: Vec<proto::WorktreeMetadata>,
442 replica_id: ReplicaId,
443 cx: &mut ModelContext<Self>,
444 ) -> Result<()> {
445 let mut old_worktrees_by_id = self
446 .worktrees
447 .drain(..)
448 .filter_map(|worktree| {
449 let worktree = worktree.upgrade()?;
450 Some((worktree.read(cx).id(), worktree))
451 })
452 .collect::<HashMap<_, _>>();
453
454 let (client, project_id) = self
455 .upstream_client()
456 .clone()
457 .ok_or_else(|| anyhow!("invalid project"))?;
458
459 for worktree in worktrees {
460 if let Some(old_worktree) =
461 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
462 {
463 let push_strong_handle =
464 self.retain_worktrees || old_worktree.read(cx).is_visible();
465 let handle = if push_strong_handle {
466 WorktreeHandle::Strong(old_worktree.clone())
467 } else {
468 WorktreeHandle::Weak(old_worktree.downgrade())
469 };
470 self.worktrees.push(handle);
471 } else {
472 self.add(
473 &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx),
474 cx,
475 );
476 }
477 }
478 self.send_project_updates(cx);
479
480 Ok(())
481 }
482
483 pub fn move_worktree(
484 &mut self,
485 source: WorktreeId,
486 destination: WorktreeId,
487 cx: &mut ModelContext<Self>,
488 ) -> Result<()> {
489 if source == destination {
490 return Ok(());
491 }
492
493 let mut source_index = None;
494 let mut destination_index = None;
495 for (i, worktree) in self.worktrees.iter().enumerate() {
496 if let Some(worktree) = worktree.upgrade() {
497 let worktree_id = worktree.read(cx).id();
498 if worktree_id == source {
499 source_index = Some(i);
500 if destination_index.is_some() {
501 break;
502 }
503 } else if worktree_id == destination {
504 destination_index = Some(i);
505 if source_index.is_some() {
506 break;
507 }
508 }
509 }
510 }
511
512 let source_index =
513 source_index.with_context(|| format!("Missing worktree for id {source}"))?;
514 let destination_index =
515 destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
516
517 if source_index == destination_index {
518 return Ok(());
519 }
520
521 let worktree_to_move = self.worktrees.remove(source_index);
522 self.worktrees.insert(destination_index, worktree_to_move);
523 self.worktrees_reordered = true;
524 cx.emit(WorktreeStoreEvent::WorktreeOrderChanged);
525 cx.notify();
526 Ok(())
527 }
528
529 pub fn disconnected_from_host(&mut self, cx: &mut AppContext) {
530 for worktree in &self.worktrees {
531 if let Some(worktree) = worktree.upgrade() {
532 worktree.update(cx, |worktree, _| {
533 if let Some(worktree) = worktree.as_remote_mut() {
534 worktree.disconnected_from_host();
535 }
536 });
537 }
538 }
539 }
540
541 pub fn send_project_updates(&mut self, cx: &mut ModelContext<Self>) {
542 let Some((downstream_client, project_id)) = self.downstream_client.clone() else {
543 return;
544 };
545
546 let update = proto::UpdateProject {
547 project_id,
548 worktrees: self.worktree_metadata_protos(cx),
549 };
550
551 // collab has bad concurrency guarantees, so we send requests in serial.
552 let update_project = if downstream_client.is_via_collab() {
553 Some(downstream_client.request(update))
554 } else {
555 downstream_client.send(update).log_err();
556 None
557 };
558 cx.spawn(|this, mut cx| async move {
559 if let Some(update_project) = update_project {
560 update_project.await?;
561 }
562
563 this.update(&mut cx, |this, cx| {
564 let worktrees = this.worktrees().collect::<Vec<_>>();
565
566 for worktree in worktrees {
567 worktree.update(cx, |worktree, cx| {
568 let client = downstream_client.clone();
569 worktree.observe_updates(project_id, cx, {
570 move |update| {
571 let client = client.clone();
572 async move {
573 if client.is_via_collab() {
574 client
575 .request(update)
576 .map(|result| result.log_err().is_some())
577 .await
578 } else {
579 client.send(update).log_err().is_some()
580 }
581 }
582 }
583 });
584 });
585
586 cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone()))
587 }
588
589 anyhow::Ok(())
590 })
591 })
592 .detach_and_log_err(cx);
593 }
594
595 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
596 self.worktrees()
597 .map(|worktree| {
598 let worktree = worktree.read(cx);
599 proto::WorktreeMetadata {
600 id: worktree.id().to_proto(),
601 root_name: worktree.root_name().into(),
602 visible: worktree.is_visible(),
603 abs_path: worktree.abs_path().to_string_lossy().into(),
604 }
605 })
606 .collect()
607 }
608
609 pub fn shared(
610 &mut self,
611 remote_id: u64,
612 downstream_client: AnyProtoClient,
613 cx: &mut ModelContext<Self>,
614 ) {
615 self.retain_worktrees = true;
616 self.downstream_client = Some((downstream_client, remote_id));
617
618 // When shared, retain all worktrees
619 for worktree_handle in self.worktrees.iter_mut() {
620 match worktree_handle {
621 WorktreeHandle::Strong(_) => {}
622 WorktreeHandle::Weak(worktree) => {
623 if let Some(worktree) = worktree.upgrade() {
624 *worktree_handle = WorktreeHandle::Strong(worktree);
625 }
626 }
627 }
628 }
629 self.send_project_updates(cx);
630 }
631
632 pub fn unshared(&mut self, cx: &mut ModelContext<Self>) {
633 self.retain_worktrees = false;
634 self.downstream_client.take();
635
636 // When not shared, only retain the visible worktrees
637 for worktree_handle in self.worktrees.iter_mut() {
638 if let WorktreeHandle::Strong(worktree) = worktree_handle {
639 let is_visible = worktree.update(cx, |worktree, _| {
640 worktree.stop_observing_updates();
641 worktree.is_visible()
642 });
643 if !is_visible {
644 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
645 }
646 }
647 }
648 }
649
650 /// search over all worktrees and return buffers that *might* match the search.
651 pub fn find_search_candidates(
652 &self,
653 query: SearchQuery,
654 limit: usize,
655 open_entries: HashSet<ProjectEntryId>,
656 fs: Arc<dyn Fs>,
657 cx: &ModelContext<Self>,
658 ) -> Receiver<ProjectPath> {
659 let snapshots = self
660 .visible_worktrees(cx)
661 .filter_map(|tree| {
662 let tree = tree.read(cx);
663 Some((tree.snapshot(), tree.as_local()?.settings()))
664 })
665 .collect::<Vec<_>>();
666
667 let executor = cx.background_executor().clone();
668
669 // We want to return entries in the order they are in the worktrees, so we have one
670 // thread that iterates over the worktrees (and ignored directories) as necessary,
671 // and pushes a oneshot::Receiver to the output channel and a oneshot::Sender to the filter
672 // channel.
673 // We spawn a number of workers that take items from the filter channel and check the query
674 // against the version of the file on disk.
675 let (filter_tx, filter_rx) = smol::channel::bounded(64);
676 let (output_tx, output_rx) = smol::channel::bounded(64);
677 let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
678
679 let input = cx.background_executor().spawn({
680 let fs = fs.clone();
681 let query = query.clone();
682 async move {
683 Self::find_candidate_paths(
684 fs,
685 snapshots,
686 open_entries,
687 query,
688 filter_tx,
689 output_tx,
690 )
691 .await
692 .log_err();
693 }
694 });
695 const MAX_CONCURRENT_FILE_SCANS: usize = 64;
696 let filters = cx.background_executor().spawn(async move {
697 let fs = &fs;
698 let query = &query;
699 executor
700 .scoped(move |scope| {
701 for _ in 0..MAX_CONCURRENT_FILE_SCANS {
702 let filter_rx = filter_rx.clone();
703 scope.spawn(async move {
704 Self::filter_paths(fs, filter_rx, query)
705 .await
706 .log_with_level(log::Level::Debug);
707 })
708 }
709 })
710 .await;
711 });
712 cx.background_executor()
713 .spawn(async move {
714 let mut matched = 0;
715 while let Ok(mut receiver) = output_rx.recv().await {
716 let Some(path) = receiver.next().await else {
717 continue;
718 };
719 let Ok(_) = matching_paths_tx.send(path).await else {
720 break;
721 };
722 matched += 1;
723 if matched == limit {
724 break;
725 }
726 }
727 drop(input);
728 drop(filters);
729 })
730 .detach();
731 matching_paths_rx
732 }
733
734 fn scan_ignored_dir<'a>(
735 fs: &'a Arc<dyn Fs>,
736 snapshot: &'a worktree::Snapshot,
737 path: &'a Path,
738 query: &'a SearchQuery,
739 include_root: bool,
740 filter_tx: &'a Sender<MatchingEntry>,
741 output_tx: &'a Sender<oneshot::Receiver<ProjectPath>>,
742 ) -> BoxFuture<'a, Result<()>> {
743 async move {
744 let abs_path = snapshot.abs_path().join(path);
745 let Some(mut files) = fs
746 .read_dir(&abs_path)
747 .await
748 .with_context(|| format!("listing ignored path {abs_path:?}"))
749 .log_err()
750 else {
751 return Ok(());
752 };
753
754 let mut results = Vec::new();
755
756 while let Some(Ok(file)) = files.next().await {
757 let Some(metadata) = fs
758 .metadata(&file)
759 .await
760 .with_context(|| format!("fetching fs metadata for {abs_path:?}"))
761 .log_err()
762 .flatten()
763 else {
764 continue;
765 };
766 if metadata.is_symlink || metadata.is_fifo {
767 continue;
768 }
769 results.push((
770 file.strip_prefix(snapshot.abs_path())?.to_path_buf(),
771 !metadata.is_dir,
772 ))
773 }
774 results.sort_by(|(a_path, _), (b_path, _)| a_path.cmp(b_path));
775 for (path, is_file) in results {
776 if is_file {
777 if query.filters_path() {
778 let matched_path = if include_root {
779 let mut full_path = PathBuf::from(snapshot.root_name());
780 full_path.push(&path);
781 query.file_matches(&full_path)
782 } else {
783 query.file_matches(&path)
784 };
785 if !matched_path {
786 continue;
787 }
788 }
789 let (tx, rx) = oneshot::channel();
790 output_tx.send(rx).await?;
791 filter_tx
792 .send(MatchingEntry {
793 respond: tx,
794 worktree_path: snapshot.abs_path().clone(),
795 path: ProjectPath {
796 worktree_id: snapshot.id(),
797 path: Arc::from(path),
798 },
799 })
800 .await?;
801 } else {
802 Self::scan_ignored_dir(
803 fs,
804 snapshot,
805 &path,
806 query,
807 include_root,
808 filter_tx,
809 output_tx,
810 )
811 .await?;
812 }
813 }
814 Ok(())
815 }
816 .boxed()
817 }
818
819 async fn find_candidate_paths(
820 fs: Arc<dyn Fs>,
821 snapshots: Vec<(worktree::Snapshot, WorktreeSettings)>,
822 open_entries: HashSet<ProjectEntryId>,
823 query: SearchQuery,
824 filter_tx: Sender<MatchingEntry>,
825 output_tx: Sender<oneshot::Receiver<ProjectPath>>,
826 ) -> Result<()> {
827 let include_root = snapshots.len() > 1;
828 for (snapshot, settings) in snapshots {
829 for entry in snapshot.entries(query.include_ignored(), 0) {
830 if entry.is_dir() && entry.is_ignored {
831 if !settings.is_path_excluded(&entry.path) {
832 Self::scan_ignored_dir(
833 &fs,
834 &snapshot,
835 &entry.path,
836 &query,
837 include_root,
838 &filter_tx,
839 &output_tx,
840 )
841 .await?;
842 }
843 continue;
844 }
845
846 if entry.is_fifo || !entry.is_file() {
847 continue;
848 }
849
850 if query.filters_path() {
851 let matched_path = if include_root {
852 let mut full_path = PathBuf::from(snapshot.root_name());
853 full_path.push(&entry.path);
854 query.file_matches(&full_path)
855 } else {
856 query.file_matches(&entry.path)
857 };
858 if !matched_path {
859 continue;
860 }
861 }
862
863 let (mut tx, rx) = oneshot::channel();
864
865 if open_entries.contains(&entry.id) {
866 tx.send(ProjectPath {
867 worktree_id: snapshot.id(),
868 path: entry.path.clone(),
869 })
870 .await?;
871 } else {
872 filter_tx
873 .send(MatchingEntry {
874 respond: tx,
875 worktree_path: snapshot.abs_path().clone(),
876 path: ProjectPath {
877 worktree_id: snapshot.id(),
878 path: entry.path.clone(),
879 },
880 })
881 .await?;
882 }
883
884 output_tx.send(rx).await?;
885 }
886 }
887 Ok(())
888 }
889
890 pub fn branches(
891 &self,
892 project_path: ProjectPath,
893 cx: &AppContext,
894 ) -> Task<Result<Vec<git::repository::Branch>>> {
895 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
896 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
897 };
898
899 match worktree.read(cx) {
900 Worktree::Local(local_worktree) => {
901 let branches = util::maybe!({
902 let worktree_error = |error| {
903 format!(
904 "{} for worktree {}",
905 error,
906 local_worktree.abs_path().to_string_lossy()
907 )
908 };
909
910 let entry = local_worktree
911 .git_entry(project_path.path)
912 .with_context(|| worktree_error("No git entry found"))?;
913
914 let repo = local_worktree
915 .get_local_repo(&entry)
916 .with_context(|| worktree_error("No repository found"))?
917 .repo()
918 .clone();
919
920 repo.branches()
921 });
922
923 Task::ready(branches)
924 }
925 Worktree::Remote(remote_worktree) => {
926 let request = remote_worktree.client().request(proto::GitBranches {
927 project_id: remote_worktree.project_id(),
928 repository: Some(proto::ProjectPath {
929 worktree_id: project_path.worktree_id.to_proto(),
930 path: project_path.path.to_string_lossy().to_string(), // Root path
931 }),
932 });
933
934 cx.background_executor().spawn(async move {
935 let response = request.await?;
936
937 let branches = response
938 .branches
939 .into_iter()
940 .map(|proto_branch| git::repository::Branch {
941 is_head: proto_branch.is_head,
942 name: proto_branch.name.into(),
943 unix_timestamp: proto_branch
944 .unix_timestamp
945 .map(|timestamp| timestamp as i64),
946 })
947 .collect();
948
949 Ok(branches)
950 })
951 }
952 }
953 }
954
955 pub fn update_or_create_branch(
956 &self,
957 repository: ProjectPath,
958 new_branch: String,
959 cx: &AppContext,
960 ) -> Task<Result<()>> {
961 let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else {
962 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
963 };
964
965 match worktree.read(cx) {
966 Worktree::Local(local_worktree) => {
967 let result = util::maybe!({
968 let worktree_error = |error| {
969 format!(
970 "{} for worktree {}",
971 error,
972 local_worktree.abs_path().to_string_lossy()
973 )
974 };
975
976 let entry = local_worktree
977 .git_entry(repository.path)
978 .with_context(|| worktree_error("No git entry found"))?;
979
980 let repo = local_worktree
981 .get_local_repo(&entry)
982 .with_context(|| worktree_error("No repository found"))?
983 .repo()
984 .clone();
985
986 if !repo.branch_exits(&new_branch)? {
987 repo.create_branch(&new_branch)?;
988 }
989
990 repo.change_branch(&new_branch)?;
991 Ok(())
992 });
993
994 Task::ready(result)
995 }
996 Worktree::Remote(remote_worktree) => {
997 let request = remote_worktree.client().request(proto::UpdateGitBranch {
998 project_id: remote_worktree.project_id(),
999 repository: Some(proto::ProjectPath {
1000 worktree_id: repository.worktree_id.to_proto(),
1001 path: repository.path.to_string_lossy().to_string(), // Root path
1002 }),
1003 branch_name: new_branch,
1004 });
1005
1006 cx.background_executor().spawn(async move {
1007 request.await?;
1008 Ok(())
1009 })
1010 }
1011 }
1012 }
1013
1014 async fn filter_paths(
1015 fs: &Arc<dyn Fs>,
1016 mut input: Receiver<MatchingEntry>,
1017 query: &SearchQuery,
1018 ) -> Result<()> {
1019 let mut input = pin!(input);
1020 while let Some(mut entry) = input.next().await {
1021 let abs_path = entry.worktree_path.join(&entry.path.path);
1022 let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
1023 continue;
1024 };
1025
1026 let mut file = BufReader::new(file);
1027 let file_start = file.fill_buf()?;
1028
1029 if let Err(Some(starting_position)) =
1030 std::str::from_utf8(file_start).map_err(|e| e.error_len())
1031 {
1032 // Before attempting to match the file content, throw away files that have invalid UTF-8 sequences early on;
1033 // That way we can still match files in a streaming fashion without having look at "obviously binary" files.
1034 return Err(anyhow!(
1035 "Invalid UTF-8 sequence in file {abs_path:?} at byte position {starting_position}"
1036 ));
1037 }
1038
1039 if query.detect(file).unwrap_or(false) {
1040 entry.respond.send(entry.path).await?
1041 }
1042 }
1043
1044 Ok(())
1045 }
1046
1047 pub async fn handle_create_project_entry(
1048 this: Model<Self>,
1049 envelope: TypedEnvelope<proto::CreateProjectEntry>,
1050 mut cx: AsyncAppContext,
1051 ) -> Result<proto::ProjectEntryResponse> {
1052 let worktree = this.update(&mut cx, |this, cx| {
1053 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1054 this.worktree_for_id(worktree_id, cx)
1055 .ok_or_else(|| anyhow!("worktree not found"))
1056 })??;
1057 Worktree::handle_create_entry(worktree, envelope.payload, cx).await
1058 }
1059
1060 pub async fn handle_copy_project_entry(
1061 this: Model<Self>,
1062 envelope: TypedEnvelope<proto::CopyProjectEntry>,
1063 mut cx: AsyncAppContext,
1064 ) -> Result<proto::ProjectEntryResponse> {
1065 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1066 let worktree = this.update(&mut cx, |this, cx| {
1067 this.worktree_for_entry(entry_id, cx)
1068 .ok_or_else(|| anyhow!("worktree not found"))
1069 })??;
1070 Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
1071 }
1072
1073 pub async fn handle_delete_project_entry(
1074 this: Model<Self>,
1075 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
1076 mut cx: AsyncAppContext,
1077 ) -> Result<proto::ProjectEntryResponse> {
1078 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1079 let worktree = this.update(&mut cx, |this, cx| {
1080 this.worktree_for_entry(entry_id, cx)
1081 .ok_or_else(|| anyhow!("worktree not found"))
1082 })??;
1083 Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
1084 }
1085
1086 pub async fn handle_expand_project_entry(
1087 this: Model<Self>,
1088 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
1089 mut cx: AsyncAppContext,
1090 ) -> Result<proto::ExpandProjectEntryResponse> {
1091 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1092 let worktree = this
1093 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
1094 .ok_or_else(|| anyhow!("invalid request"))?;
1095 Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
1096 }
1097
1098 pub async fn handle_git_branches(
1099 this: Model<Self>,
1100 branches: TypedEnvelope<proto::GitBranches>,
1101 cx: AsyncAppContext,
1102 ) -> Result<proto::GitBranchesResponse> {
1103 let project_path = branches
1104 .payload
1105 .repository
1106 .clone()
1107 .context("Invalid GitBranches call")?;
1108 let project_path = ProjectPath {
1109 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1110 path: Path::new(&project_path.path).into(),
1111 };
1112
1113 let branches = this
1114 .read_with(&cx, |this, cx| this.branches(project_path, cx))?
1115 .await?;
1116
1117 Ok(proto::GitBranchesResponse {
1118 branches: branches
1119 .into_iter()
1120 .map(|branch| proto::Branch {
1121 is_head: branch.is_head,
1122 name: branch.name.to_string(),
1123 unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
1124 })
1125 .collect(),
1126 })
1127 }
1128
1129 pub async fn handle_update_branch(
1130 this: Model<Self>,
1131 update_branch: TypedEnvelope<proto::UpdateGitBranch>,
1132 cx: AsyncAppContext,
1133 ) -> Result<proto::Ack> {
1134 let project_path = update_branch
1135 .payload
1136 .repository
1137 .clone()
1138 .context("Invalid GitBranches call")?;
1139 let project_path = ProjectPath {
1140 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1141 path: Path::new(&project_path.path).into(),
1142 };
1143 let new_branch = update_branch.payload.branch_name;
1144
1145 this.read_with(&cx, |this, cx| {
1146 this.update_or_create_branch(project_path, new_branch, cx)
1147 })?
1148 .await?;
1149
1150 Ok(proto::Ack {})
1151 }
1152}
1153
1154#[derive(Clone, Debug)]
1155enum WorktreeHandle {
1156 Strong(Model<Worktree>),
1157 Weak(WeakModel<Worktree>),
1158}
1159
1160impl WorktreeHandle {
1161 fn upgrade(&self) -> Option<Model<Worktree>> {
1162 match self {
1163 WorktreeHandle::Strong(handle) => Some(handle.clone()),
1164 WorktreeHandle::Weak(handle) => handle.upgrade(),
1165 }
1166 }
1167}