1use std::{
2 path::{Path, PathBuf},
3 sync::{atomic::AtomicUsize, Arc},
4};
5
6use anyhow::{anyhow, Context as _, Result};
7use collections::{HashMap, HashSet};
8use fs::Fs;
9use futures::{
10 future::{BoxFuture, Shared},
11 FutureExt, SinkExt,
12};
13use gpui::{
14 AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, Task, WeakModel,
15};
16use postage::oneshot;
17use rpc::{
18 proto::{self, SSH_PROJECT_ID},
19 AnyProtoClient, ErrorExt, TypedEnvelope,
20};
21use smol::{
22 channel::{Receiver, Sender},
23 stream::StreamExt,
24};
25use text::ReplicaId;
26use util::{paths::SanitizedPath, ResultExt};
27use worktree::{Entry, ProjectEntryId, Worktree, WorktreeId, WorktreeSettings};
28
29use crate::{search::SearchQuery, LspStore, ProjectPath};
30
31struct MatchingEntry {
32 worktree_path: Arc<Path>,
33 path: ProjectPath,
34 respond: oneshot::Sender<ProjectPath>,
35}
36
37enum WorktreeStoreState {
38 Local {
39 fs: Arc<dyn Fs>,
40 },
41 Remote {
42 upstream_client: AnyProtoClient,
43 upstream_project_id: u64,
44 },
45}
46
47pub struct WorktreeStore {
48 next_entry_id: Arc<AtomicUsize>,
49 downstream_client: Option<(AnyProtoClient, u64)>,
50 retain_worktrees: bool,
51 worktrees: Vec<WorktreeHandle>,
52 worktrees_reordered: bool,
53 #[allow(clippy::type_complexity)]
54 loading_worktrees:
55 HashMap<SanitizedPath, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
56 state: WorktreeStoreState,
57}
58
59pub enum WorktreeStoreEvent {
60 WorktreeAdded(Model<Worktree>),
61 WorktreeRemoved(EntityId, WorktreeId),
62 WorktreeReleased(EntityId, WorktreeId),
63 WorktreeOrderChanged,
64 WorktreeUpdateSent(Model<Worktree>),
65 GitRepositoryUpdated,
66}
67
68impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
69
70impl WorktreeStore {
71 pub fn init(client: &AnyProtoClient) {
72 client.add_model_request_handler(Self::handle_create_project_entry);
73 client.add_model_request_handler(Self::handle_copy_project_entry);
74 client.add_model_request_handler(Self::handle_delete_project_entry);
75 client.add_model_request_handler(Self::handle_expand_project_entry);
76 client.add_model_request_handler(Self::handle_git_branches);
77 client.add_model_request_handler(Self::handle_update_branch);
78 }
79
80 pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
81 Self {
82 next_entry_id: Default::default(),
83 loading_worktrees: Default::default(),
84 downstream_client: None,
85 worktrees: Vec::new(),
86 worktrees_reordered: false,
87 retain_worktrees,
88 state: WorktreeStoreState::Local { fs },
89 }
90 }
91
92 pub fn remote(
93 retain_worktrees: bool,
94 upstream_client: AnyProtoClient,
95 upstream_project_id: u64,
96 ) -> Self {
97 Self {
98 next_entry_id: Default::default(),
99 loading_worktrees: Default::default(),
100 downstream_client: None,
101 worktrees: Vec::new(),
102 worktrees_reordered: false,
103 retain_worktrees,
104 state: WorktreeStoreState::Remote {
105 upstream_client,
106 upstream_project_id,
107 },
108 }
109 }
110
111 /// Iterates through all worktrees, including ones that don't appear in the project panel
112 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
113 self.worktrees
114 .iter()
115 .filter_map(move |worktree| worktree.upgrade())
116 }
117
118 /// Iterates through all user-visible worktrees, the ones that appear in the project panel.
119 pub fn visible_worktrees<'a>(
120 &'a self,
121 cx: &'a AppContext,
122 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
123 self.worktrees()
124 .filter(|worktree| worktree.read(cx).is_visible())
125 }
126
127 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
128 self.worktrees()
129 .find(|worktree| worktree.read(cx).id() == id)
130 }
131
132 pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option<Arc<str>> {
133 self.worktree_for_id(repository.worktree_id, cx)?
134 .read(cx)
135 .git_entry(repository.path)?
136 .branch()
137 }
138
139 pub fn worktree_for_entry(
140 &self,
141 entry_id: ProjectEntryId,
142 cx: &AppContext,
143 ) -> Option<Model<Worktree>> {
144 self.worktrees()
145 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
146 }
147
148 pub fn find_worktree(
149 &self,
150 abs_path: impl Into<SanitizedPath>,
151 cx: &AppContext,
152 ) -> Option<(Model<Worktree>, PathBuf)> {
153 let abs_path: SanitizedPath = abs_path.into();
154 for tree in self.worktrees() {
155 if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) {
156 return Some((tree.clone(), relative_path.into()));
157 }
158 }
159 None
160 }
161
162 pub fn find_or_create_worktree(
163 &mut self,
164 abs_path: impl AsRef<Path>,
165 visible: bool,
166 cx: &mut ModelContext<Self>,
167 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
168 let abs_path = abs_path.as_ref();
169 if let Some((tree, relative_path)) = self.find_worktree(abs_path, cx) {
170 Task::ready(Ok((tree, relative_path)))
171 } else {
172 let worktree = self.create_worktree(abs_path, visible, cx);
173 cx.background_executor()
174 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
175 }
176 }
177
178 pub fn entry_for_id<'a>(
179 &'a self,
180 entry_id: ProjectEntryId,
181 cx: &'a AppContext,
182 ) -> Option<&'a Entry> {
183 self.worktrees()
184 .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id))
185 }
186
187 pub fn worktree_and_entry_for_id<'a>(
188 &'a self,
189 entry_id: ProjectEntryId,
190 cx: &'a AppContext,
191 ) -> Option<(Model<Worktree>, &'a Entry)> {
192 self.worktrees().find_map(|worktree| {
193 worktree
194 .read(cx)
195 .entry_for_id(entry_id)
196 .map(|e| (worktree.clone(), e))
197 })
198 }
199
200 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
201 self.worktree_for_id(path.worktree_id, cx)?
202 .read(cx)
203 .entry_for_path(&path.path)
204 .cloned()
205 }
206
207 pub fn create_worktree(
208 &mut self,
209 abs_path: impl Into<SanitizedPath>,
210 visible: bool,
211 cx: &mut ModelContext<Self>,
212 ) -> Task<Result<Model<Worktree>>> {
213 let abs_path: SanitizedPath = abs_path.into();
214 if !self.loading_worktrees.contains_key(&abs_path) {
215 let task = match &self.state {
216 WorktreeStoreState::Remote {
217 upstream_client, ..
218 } => {
219 if upstream_client.is_via_collab() {
220 Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
221 } else {
222 self.create_ssh_worktree(
223 upstream_client.clone(),
224 abs_path.clone(),
225 visible,
226 cx,
227 )
228 }
229 }
230 WorktreeStoreState::Local { fs } => {
231 self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx)
232 }
233 };
234
235 self.loading_worktrees
236 .insert(abs_path.clone(), task.shared());
237 }
238 let task = self.loading_worktrees.get(&abs_path).unwrap().clone();
239 cx.spawn(|this, mut cx| async move {
240 let result = task.await;
241 this.update(&mut cx, |this, _| this.loading_worktrees.remove(&abs_path))
242 .ok();
243 match result {
244 Ok(worktree) => Ok(worktree),
245 Err(err) => Err((*err).cloned()),
246 }
247 })
248 }
249
250 fn create_ssh_worktree(
251 &mut self,
252 client: AnyProtoClient,
253 abs_path: impl Into<SanitizedPath>,
254 visible: bool,
255 cx: &mut ModelContext<Self>,
256 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
257 let mut abs_path = Into::<SanitizedPath>::into(abs_path).to_string();
258 // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/`
259 // in which case want to strip the leading the `/`.
260 // On the host-side, the `~` will get expanded.
261 // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850
262 if abs_path.starts_with("/~") {
263 abs_path = abs_path[1..].to_string();
264 }
265 if abs_path.is_empty() || abs_path == "/" {
266 abs_path = "~/".to_string();
267 }
268 cx.spawn(|this, mut cx| async move {
269 let this = this.upgrade().context("Dropped worktree store")?;
270
271 let response = client
272 .request(proto::AddWorktree {
273 project_id: SSH_PROJECT_ID,
274 path: abs_path.clone(),
275 visible,
276 })
277 .await?;
278
279 if let Some(existing_worktree) = this.read_with(&cx, |this, cx| {
280 this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx)
281 })? {
282 return Ok(existing_worktree);
283 }
284
285 let root_name = PathBuf::from(&response.canonicalized_path)
286 .file_name()
287 .map(|n| n.to_string_lossy().to_string())
288 .unwrap_or(response.canonicalized_path.to_string());
289
290 let worktree = cx.update(|cx| {
291 Worktree::remote(
292 SSH_PROJECT_ID,
293 0,
294 proto::WorktreeMetadata {
295 id: response.worktree_id,
296 root_name,
297 visible,
298 abs_path: response.canonicalized_path,
299 },
300 client,
301 cx,
302 )
303 })?;
304
305 this.update(&mut cx, |this, cx| {
306 this.add(&worktree, cx);
307 })?;
308 Ok(worktree)
309 })
310 }
311
312 fn create_local_worktree(
313 &mut self,
314 fs: Arc<dyn Fs>,
315 abs_path: impl Into<SanitizedPath>,
316 visible: bool,
317 cx: &mut ModelContext<Self>,
318 ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
319 let next_entry_id = self.next_entry_id.clone();
320 let path: SanitizedPath = abs_path.into();
321
322 cx.spawn(move |this, mut cx| async move {
323 let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
324
325 let worktree = worktree?;
326
327 this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
328
329 if visible {
330 cx.update(|cx| {
331 cx.add_recent_document(path.as_path());
332 })
333 .log_err();
334 }
335
336 Ok(worktree)
337 })
338 }
339
340 pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
341 let worktree_id = worktree.read(cx).id();
342 debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id));
343
344 let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible();
345 let handle = if push_strong_handle {
346 WorktreeHandle::Strong(worktree.clone())
347 } else {
348 WorktreeHandle::Weak(worktree.downgrade())
349 };
350 if self.worktrees_reordered {
351 self.worktrees.push(handle);
352 } else {
353 let i = match self
354 .worktrees
355 .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| {
356 other.upgrade().map(|worktree| worktree.read(cx).abs_path())
357 }) {
358 Ok(i) | Err(i) => i,
359 };
360 self.worktrees.insert(i, handle);
361 }
362
363 cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone()));
364 self.send_project_updates(cx);
365
366 let handle_id = worktree.entity_id();
367 cx.observe_release(worktree, move |this, worktree, cx| {
368 cx.emit(WorktreeStoreEvent::WorktreeReleased(
369 handle_id,
370 worktree.id(),
371 ));
372 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
373 handle_id,
374 worktree.id(),
375 ));
376 this.send_project_updates(cx);
377 })
378 .detach();
379
380 cx.subscribe(
381 worktree,
382 |_this, _, event: &worktree::Event, cx| match event {
383 worktree::Event::UpdatedGitRepositories(_) => {
384 cx.emit(WorktreeStoreEvent::GitRepositoryUpdated);
385 }
386 worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedEntries(_) => {}
387 },
388 )
389 .detach();
390 }
391
392 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
393 self.worktrees.retain(|worktree| {
394 if let Some(worktree) = worktree.upgrade() {
395 if worktree.read(cx).id() == id_to_remove {
396 cx.emit(WorktreeStoreEvent::WorktreeRemoved(
397 worktree.entity_id(),
398 id_to_remove,
399 ));
400 false
401 } else {
402 true
403 }
404 } else {
405 false
406 }
407 });
408 self.send_project_updates(cx);
409 }
410
411 pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
412 self.worktrees_reordered = worktrees_reordered;
413 }
414
415 fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
416 match &self.state {
417 WorktreeStoreState::Remote {
418 upstream_client,
419 upstream_project_id,
420 ..
421 } => Some((upstream_client.clone(), *upstream_project_id)),
422 WorktreeStoreState::Local { .. } => None,
423 }
424 }
425
426 pub fn set_worktrees_from_proto(
427 &mut self,
428 worktrees: Vec<proto::WorktreeMetadata>,
429 replica_id: ReplicaId,
430 cx: &mut ModelContext<Self>,
431 ) -> Result<()> {
432 let mut old_worktrees_by_id = self
433 .worktrees
434 .drain(..)
435 .filter_map(|worktree| {
436 let worktree = worktree.upgrade()?;
437 Some((worktree.read(cx).id(), worktree))
438 })
439 .collect::<HashMap<_, _>>();
440
441 let (client, project_id) = self
442 .upstream_client()
443 .clone()
444 .ok_or_else(|| anyhow!("invalid project"))?;
445
446 for worktree in worktrees {
447 if let Some(old_worktree) =
448 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
449 {
450 let push_strong_handle =
451 self.retain_worktrees || old_worktree.read(cx).is_visible();
452 let handle = if push_strong_handle {
453 WorktreeHandle::Strong(old_worktree.clone())
454 } else {
455 WorktreeHandle::Weak(old_worktree.downgrade())
456 };
457 self.worktrees.push(handle);
458 } else {
459 self.add(
460 &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx),
461 cx,
462 );
463 }
464 }
465 self.send_project_updates(cx);
466
467 Ok(())
468 }
469
470 pub fn move_worktree(
471 &mut self,
472 source: WorktreeId,
473 destination: WorktreeId,
474 cx: &mut ModelContext<Self>,
475 ) -> Result<()> {
476 if source == destination {
477 return Ok(());
478 }
479
480 let mut source_index = None;
481 let mut destination_index = None;
482 for (i, worktree) in self.worktrees.iter().enumerate() {
483 if let Some(worktree) = worktree.upgrade() {
484 let worktree_id = worktree.read(cx).id();
485 if worktree_id == source {
486 source_index = Some(i);
487 if destination_index.is_some() {
488 break;
489 }
490 } else if worktree_id == destination {
491 destination_index = Some(i);
492 if source_index.is_some() {
493 break;
494 }
495 }
496 }
497 }
498
499 let source_index =
500 source_index.with_context(|| format!("Missing worktree for id {source}"))?;
501 let destination_index =
502 destination_index.with_context(|| format!("Missing worktree for id {destination}"))?;
503
504 if source_index == destination_index {
505 return Ok(());
506 }
507
508 let worktree_to_move = self.worktrees.remove(source_index);
509 self.worktrees.insert(destination_index, worktree_to_move);
510 self.worktrees_reordered = true;
511 cx.emit(WorktreeStoreEvent::WorktreeOrderChanged);
512 cx.notify();
513 Ok(())
514 }
515
516 pub fn disconnected_from_host(&mut self, cx: &mut AppContext) {
517 for worktree in &self.worktrees {
518 if let Some(worktree) = worktree.upgrade() {
519 worktree.update(cx, |worktree, _| {
520 if let Some(worktree) = worktree.as_remote_mut() {
521 worktree.disconnected_from_host();
522 }
523 });
524 }
525 }
526 }
527
528 pub fn send_project_updates(&mut self, cx: &mut ModelContext<Self>) {
529 let Some((downstream_client, project_id)) = self.downstream_client.clone() else {
530 return;
531 };
532
533 let update = proto::UpdateProject {
534 project_id,
535 worktrees: self.worktree_metadata_protos(cx),
536 };
537
538 // collab has bad concurrency guarantees, so we send requests in serial.
539 let update_project = if downstream_client.is_via_collab() {
540 Some(downstream_client.request(update))
541 } else {
542 downstream_client.send(update).log_err();
543 None
544 };
545 cx.spawn(|this, mut cx| async move {
546 if let Some(update_project) = update_project {
547 update_project.await?;
548 }
549
550 this.update(&mut cx, |this, cx| {
551 let worktrees = this.worktrees().collect::<Vec<_>>();
552
553 for worktree in worktrees {
554 worktree.update(cx, |worktree, cx| {
555 let client = downstream_client.clone();
556 worktree.observe_updates(project_id, cx, {
557 move |update| {
558 let client = client.clone();
559 async move {
560 if client.is_via_collab() {
561 client
562 .request(update)
563 .map(|result| result.log_err().is_some())
564 .await
565 } else {
566 client.send(update).log_err().is_some()
567 }
568 }
569 }
570 });
571 });
572
573 cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone()))
574 }
575
576 anyhow::Ok(())
577 })
578 })
579 .detach_and_log_err(cx);
580 }
581
582 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
583 self.worktrees()
584 .map(|worktree| {
585 let worktree = worktree.read(cx);
586 proto::WorktreeMetadata {
587 id: worktree.id().to_proto(),
588 root_name: worktree.root_name().into(),
589 visible: worktree.is_visible(),
590 abs_path: worktree.abs_path().to_string_lossy().into(),
591 }
592 })
593 .collect()
594 }
595
596 pub fn shared(
597 &mut self,
598 remote_id: u64,
599 downstream_client: AnyProtoClient,
600 cx: &mut ModelContext<Self>,
601 ) {
602 self.retain_worktrees = true;
603 self.downstream_client = Some((downstream_client, remote_id));
604
605 // When shared, retain all worktrees
606 for worktree_handle in self.worktrees.iter_mut() {
607 match worktree_handle {
608 WorktreeHandle::Strong(_) => {}
609 WorktreeHandle::Weak(worktree) => {
610 if let Some(worktree) = worktree.upgrade() {
611 *worktree_handle = WorktreeHandle::Strong(worktree);
612 }
613 }
614 }
615 }
616 self.send_project_updates(cx);
617 }
618
619 pub fn unshared(&mut self, cx: &mut ModelContext<Self>) {
620 self.retain_worktrees = false;
621 self.downstream_client.take();
622
623 // When not shared, only retain the visible worktrees
624 for worktree_handle in self.worktrees.iter_mut() {
625 if let WorktreeHandle::Strong(worktree) = worktree_handle {
626 let is_visible = worktree.update(cx, |worktree, _| {
627 worktree.stop_observing_updates();
628 worktree.is_visible()
629 });
630 if !is_visible {
631 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
632 }
633 }
634 }
635 }
636
637 /// search over all worktrees and return buffers that *might* match the search.
638 pub fn find_search_candidates(
639 &self,
640 query: SearchQuery,
641 limit: usize,
642 open_entries: HashSet<ProjectEntryId>,
643 fs: Arc<dyn Fs>,
644 cx: &ModelContext<Self>,
645 ) -> Receiver<ProjectPath> {
646 let snapshots = self
647 .visible_worktrees(cx)
648 .filter_map(|tree| {
649 let tree = tree.read(cx);
650 Some((tree.snapshot(), tree.as_local()?.settings()))
651 })
652 .collect::<Vec<_>>();
653
654 let executor = cx.background_executor().clone();
655
656 // We want to return entries in the order they are in the worktrees, so we have one
657 // thread that iterates over the worktrees (and ignored directories) as necessary,
658 // and pushes a oneshot::Receiver to the output channel and a oneshot::Sender to the filter
659 // channel.
660 // We spawn a number of workers that take items from the filter channel and check the query
661 // against the version of the file on disk.
662 let (filter_tx, filter_rx) = smol::channel::bounded(64);
663 let (output_tx, mut output_rx) = smol::channel::bounded(64);
664 let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
665
666 let input = cx.background_executor().spawn({
667 let fs = fs.clone();
668 let query = query.clone();
669 async move {
670 Self::find_candidate_paths(
671 fs,
672 snapshots,
673 open_entries,
674 query,
675 filter_tx,
676 output_tx,
677 )
678 .await
679 .log_err();
680 }
681 });
682 const MAX_CONCURRENT_FILE_SCANS: usize = 64;
683 let filters = cx.background_executor().spawn(async move {
684 let fs = &fs;
685 let query = &query;
686 executor
687 .scoped(move |scope| {
688 for _ in 0..MAX_CONCURRENT_FILE_SCANS {
689 let filter_rx = filter_rx.clone();
690 scope.spawn(async move {
691 Self::filter_paths(fs, filter_rx, query).await.log_err();
692 })
693 }
694 })
695 .await;
696 });
697 cx.background_executor()
698 .spawn(async move {
699 let mut matched = 0;
700 while let Some(mut receiver) = output_rx.next().await {
701 let Some(path) = receiver.next().await else {
702 continue;
703 };
704 let Ok(_) = matching_paths_tx.send(path).await else {
705 break;
706 };
707 matched += 1;
708 if matched == limit {
709 break;
710 }
711 }
712 drop(input);
713 drop(filters);
714 })
715 .detach();
716 matching_paths_rx
717 }
718
719 fn scan_ignored_dir<'a>(
720 fs: &'a Arc<dyn Fs>,
721 snapshot: &'a worktree::Snapshot,
722 path: &'a Path,
723 query: &'a SearchQuery,
724 include_root: bool,
725 filter_tx: &'a Sender<MatchingEntry>,
726 output_tx: &'a Sender<oneshot::Receiver<ProjectPath>>,
727 ) -> BoxFuture<'a, Result<()>> {
728 async move {
729 let abs_path = snapshot.abs_path().join(path);
730 let Some(mut files) = fs
731 .read_dir(&abs_path)
732 .await
733 .with_context(|| format!("listing ignored path {abs_path:?}"))
734 .log_err()
735 else {
736 return Ok(());
737 };
738
739 let mut results = Vec::new();
740
741 while let Some(Ok(file)) = files.next().await {
742 let Some(metadata) = fs
743 .metadata(&file)
744 .await
745 .with_context(|| format!("fetching fs metadata for {abs_path:?}"))
746 .log_err()
747 .flatten()
748 else {
749 continue;
750 };
751 if metadata.is_symlink || metadata.is_fifo {
752 continue;
753 }
754 results.push((
755 file.strip_prefix(snapshot.abs_path())?.to_path_buf(),
756 !metadata.is_dir,
757 ))
758 }
759 results.sort_by(|(a_path, _), (b_path, _)| a_path.cmp(b_path));
760 for (path, is_file) in results {
761 if is_file {
762 if query.filters_path() {
763 let matched_path = if include_root {
764 let mut full_path = PathBuf::from(snapshot.root_name());
765 full_path.push(&path);
766 query.file_matches(&full_path)
767 } else {
768 query.file_matches(&path)
769 };
770 if !matched_path {
771 continue;
772 }
773 }
774 let (tx, rx) = oneshot::channel();
775 output_tx.send(rx).await?;
776 filter_tx
777 .send(MatchingEntry {
778 respond: tx,
779 worktree_path: snapshot.abs_path().clone(),
780 path: ProjectPath {
781 worktree_id: snapshot.id(),
782 path: Arc::from(path),
783 },
784 })
785 .await?;
786 } else {
787 Self::scan_ignored_dir(
788 fs,
789 snapshot,
790 &path,
791 query,
792 include_root,
793 filter_tx,
794 output_tx,
795 )
796 .await?;
797 }
798 }
799 Ok(())
800 }
801 .boxed()
802 }
803
804 async fn find_candidate_paths(
805 fs: Arc<dyn Fs>,
806 snapshots: Vec<(worktree::Snapshot, WorktreeSettings)>,
807 open_entries: HashSet<ProjectEntryId>,
808 query: SearchQuery,
809 filter_tx: Sender<MatchingEntry>,
810 output_tx: Sender<oneshot::Receiver<ProjectPath>>,
811 ) -> Result<()> {
812 let include_root = snapshots.len() > 1;
813 for (snapshot, settings) in snapshots {
814 for entry in snapshot.entries(query.include_ignored(), 0) {
815 if entry.is_dir() && entry.is_ignored {
816 if !settings.is_path_excluded(&entry.path) {
817 Self::scan_ignored_dir(
818 &fs,
819 &snapshot,
820 &entry.path,
821 &query,
822 include_root,
823 &filter_tx,
824 &output_tx,
825 )
826 .await?;
827 }
828 continue;
829 }
830
831 if entry.is_fifo || !entry.is_file() {
832 continue;
833 }
834
835 if query.filters_path() {
836 let matched_path = if include_root {
837 let mut full_path = PathBuf::from(snapshot.root_name());
838 full_path.push(&entry.path);
839 query.file_matches(&full_path)
840 } else {
841 query.file_matches(&entry.path)
842 };
843 if !matched_path {
844 continue;
845 }
846 }
847
848 let (mut tx, rx) = oneshot::channel();
849
850 if open_entries.contains(&entry.id) {
851 tx.send(ProjectPath {
852 worktree_id: snapshot.id(),
853 path: entry.path.clone(),
854 })
855 .await?;
856 } else {
857 filter_tx
858 .send(MatchingEntry {
859 respond: tx,
860 worktree_path: snapshot.abs_path().clone(),
861 path: ProjectPath {
862 worktree_id: snapshot.id(),
863 path: entry.path.clone(),
864 },
865 })
866 .await?;
867 }
868
869 output_tx.send(rx).await?;
870 }
871 }
872 Ok(())
873 }
874
875 pub fn branches(
876 &self,
877 project_path: ProjectPath,
878 cx: &AppContext,
879 ) -> Task<Result<Vec<git::repository::Branch>>> {
880 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
881 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
882 };
883
884 match worktree.read(cx) {
885 Worktree::Local(local_worktree) => {
886 let branches = util::maybe!({
887 let worktree_error = |error| {
888 format!(
889 "{} for worktree {}",
890 error,
891 local_worktree.abs_path().to_string_lossy()
892 )
893 };
894
895 let entry = local_worktree
896 .git_entry(project_path.path)
897 .with_context(|| worktree_error("No git entry found"))?;
898
899 let repo = local_worktree
900 .get_local_repo(&entry)
901 .with_context(|| worktree_error("No repository found"))?
902 .repo()
903 .clone();
904
905 repo.branches()
906 });
907
908 Task::ready(branches)
909 }
910 Worktree::Remote(remote_worktree) => {
911 let request = remote_worktree.client().request(proto::GitBranches {
912 project_id: remote_worktree.project_id(),
913 repository: Some(proto::ProjectPath {
914 worktree_id: project_path.worktree_id.to_proto(),
915 path: project_path.path.to_string_lossy().to_string(), // Root path
916 }),
917 });
918
919 cx.background_executor().spawn(async move {
920 let response = request.await?;
921
922 let branches = response
923 .branches
924 .into_iter()
925 .map(|proto_branch| git::repository::Branch {
926 is_head: proto_branch.is_head,
927 name: proto_branch.name.into(),
928 unix_timestamp: proto_branch
929 .unix_timestamp
930 .map(|timestamp| timestamp as i64),
931 })
932 .collect();
933
934 Ok(branches)
935 })
936 }
937 }
938 }
939
940 pub fn update_or_create_branch(
941 &self,
942 repository: ProjectPath,
943 new_branch: String,
944 cx: &AppContext,
945 ) -> Task<Result<()>> {
946 let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else {
947 return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
948 };
949
950 match worktree.read(cx) {
951 Worktree::Local(local_worktree) => {
952 let result = util::maybe!({
953 let worktree_error = |error| {
954 format!(
955 "{} for worktree {}",
956 error,
957 local_worktree.abs_path().to_string_lossy()
958 )
959 };
960
961 let entry = local_worktree
962 .git_entry(repository.path)
963 .with_context(|| worktree_error("No git entry found"))?;
964
965 let repo = local_worktree
966 .get_local_repo(&entry)
967 .with_context(|| worktree_error("No repository found"))?
968 .repo()
969 .clone();
970
971 if !repo.branch_exits(&new_branch)? {
972 repo.create_branch(&new_branch)?;
973 }
974
975 repo.change_branch(&new_branch)?;
976
977 Ok(())
978 });
979
980 Task::ready(result)
981 }
982 Worktree::Remote(remote_worktree) => {
983 let request = remote_worktree.client().request(proto::UpdateGitBranch {
984 project_id: remote_worktree.project_id(),
985 repository: Some(proto::ProjectPath {
986 worktree_id: repository.worktree_id.to_proto(),
987 path: repository.path.to_string_lossy().to_string(), // Root path
988 }),
989 branch_name: new_branch,
990 });
991
992 cx.background_executor().spawn(async move {
993 request.await?;
994 Ok(())
995 })
996 }
997 }
998 }
999
1000 async fn filter_paths(
1001 fs: &Arc<dyn Fs>,
1002 mut input: Receiver<MatchingEntry>,
1003 query: &SearchQuery,
1004 ) -> Result<()> {
1005 while let Some(mut entry) = input.next().await {
1006 let abs_path = entry.worktree_path.join(&entry.path.path);
1007 let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
1008 continue;
1009 };
1010 if query.detect(file).unwrap_or(false) {
1011 entry.respond.send(entry.path).await?
1012 }
1013 }
1014
1015 Ok(())
1016 }
1017
1018 pub async fn handle_create_project_entry(
1019 this: Model<Self>,
1020 envelope: TypedEnvelope<proto::CreateProjectEntry>,
1021 mut cx: AsyncAppContext,
1022 ) -> Result<proto::ProjectEntryResponse> {
1023 let worktree = this.update(&mut cx, |this, cx| {
1024 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
1025 this.worktree_for_id(worktree_id, cx)
1026 .ok_or_else(|| anyhow!("worktree not found"))
1027 })??;
1028 Worktree::handle_create_entry(worktree, envelope.payload, cx).await
1029 }
1030
1031 pub async fn handle_rename_project_entry(
1032 this: Model<super::Project>,
1033 envelope: TypedEnvelope<proto::RenameProjectEntry>,
1034 mut cx: AsyncAppContext,
1035 ) -> Result<proto::ProjectEntryResponse> {
1036 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1037 let (worktree_id, worktree, old_path, is_dir) = this
1038 .update(&mut cx, |this, cx| {
1039 this.worktree_store
1040 .read(cx)
1041 .worktree_and_entry_for_id(entry_id, cx)
1042 .map(|(worktree, entry)| {
1043 (
1044 worktree.read(cx).id(),
1045 worktree,
1046 entry.path.clone(),
1047 entry.is_dir(),
1048 )
1049 })
1050 })?
1051 .ok_or_else(|| anyhow!("worktree not found"))?;
1052 let (old_abs_path, new_abs_path) = {
1053 let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?;
1054 (
1055 root_path.join(&old_path),
1056 root_path.join(&envelope.payload.new_path),
1057 )
1058 };
1059 let lsp_store = this
1060 .update(&mut cx, |this, _| this.lsp_store())?
1061 .downgrade();
1062 LspStore::will_rename_entry(
1063 lsp_store,
1064 worktree_id,
1065 &old_abs_path,
1066 &new_abs_path,
1067 is_dir,
1068 cx.clone(),
1069 )
1070 .await;
1071 let response = Worktree::handle_rename_entry(worktree, envelope.payload, cx.clone()).await;
1072 this.update(&mut cx, |this, cx| {
1073 this.lsp_store().read(cx).did_rename_entry(
1074 worktree_id,
1075 &old_abs_path,
1076 &new_abs_path,
1077 is_dir,
1078 );
1079 })
1080 .ok();
1081 response
1082 }
1083
1084 pub async fn handle_copy_project_entry(
1085 this: Model<Self>,
1086 envelope: TypedEnvelope<proto::CopyProjectEntry>,
1087 mut cx: AsyncAppContext,
1088 ) -> Result<proto::ProjectEntryResponse> {
1089 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1090 let worktree = this.update(&mut cx, |this, cx| {
1091 this.worktree_for_entry(entry_id, cx)
1092 .ok_or_else(|| anyhow!("worktree not found"))
1093 })??;
1094 Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
1095 }
1096
1097 pub async fn handle_delete_project_entry(
1098 this: Model<Self>,
1099 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
1100 mut cx: AsyncAppContext,
1101 ) -> Result<proto::ProjectEntryResponse> {
1102 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1103 let worktree = this.update(&mut cx, |this, cx| {
1104 this.worktree_for_entry(entry_id, cx)
1105 .ok_or_else(|| anyhow!("worktree not found"))
1106 })??;
1107 Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
1108 }
1109
1110 pub async fn handle_expand_project_entry(
1111 this: Model<Self>,
1112 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
1113 mut cx: AsyncAppContext,
1114 ) -> Result<proto::ExpandProjectEntryResponse> {
1115 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
1116 let worktree = this
1117 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
1118 .ok_or_else(|| anyhow!("invalid request"))?;
1119 Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
1120 }
1121
1122 pub async fn handle_git_branches(
1123 this: Model<Self>,
1124 branches: TypedEnvelope<proto::GitBranches>,
1125 cx: AsyncAppContext,
1126 ) -> Result<proto::GitBranchesResponse> {
1127 let project_path = branches
1128 .payload
1129 .repository
1130 .clone()
1131 .context("Invalid GitBranches call")?;
1132 let project_path = ProjectPath {
1133 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1134 path: Path::new(&project_path.path).into(),
1135 };
1136
1137 let branches = this
1138 .read_with(&cx, |this, cx| this.branches(project_path, cx))?
1139 .await?;
1140
1141 Ok(proto::GitBranchesResponse {
1142 branches: branches
1143 .into_iter()
1144 .map(|branch| proto::Branch {
1145 is_head: branch.is_head,
1146 name: branch.name.to_string(),
1147 unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
1148 })
1149 .collect(),
1150 })
1151 }
1152
1153 pub async fn handle_update_branch(
1154 this: Model<Self>,
1155 update_branch: TypedEnvelope<proto::UpdateGitBranch>,
1156 cx: AsyncAppContext,
1157 ) -> Result<proto::Ack> {
1158 let project_path = update_branch
1159 .payload
1160 .repository
1161 .clone()
1162 .context("Invalid GitBranches call")?;
1163 let project_path = ProjectPath {
1164 worktree_id: WorktreeId::from_proto(project_path.worktree_id),
1165 path: Path::new(&project_path.path).into(),
1166 };
1167 let new_branch = update_branch.payload.branch_name;
1168
1169 this.read_with(&cx, |this, cx| {
1170 this.update_or_create_branch(project_path, new_branch, cx)
1171 })?
1172 .await?;
1173
1174 Ok(proto::Ack {})
1175 }
1176}
1177
1178#[derive(Clone, Debug)]
1179enum WorktreeHandle {
1180 Strong(Model<Worktree>),
1181 Weak(WeakModel<Worktree>),
1182}
1183
1184impl WorktreeHandle {
1185 fn upgrade(&self) -> Option<Model<Worktree>> {
1186 match self {
1187 WorktreeHandle::Strong(handle) => Some(handle.clone()),
1188 WorktreeHandle::Weak(handle) => handle.upgrade(),
1189 }
1190 }
1191}