worktree.rs

   1mod ignore;
   2mod worktree_settings;
   3#[cfg(test)]
   4mod worktree_tests;
   5
   6use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
   7use anyhow::{anyhow, Context as _, Result};
   8use clock::ReplicaId;
   9use collections::{HashMap, HashSet, VecDeque};
  10use fs::{copy_recursive, Fs, MTime, PathEvent, RemoveOptions, Watcher};
  11use futures::{
  12    channel::{
  13        mpsc::{self, UnboundedSender},
  14        oneshot,
  15    },
  16    select_biased,
  17    task::Poll,
  18    FutureExt as _, Stream, StreamExt,
  19};
  20use fuzzy::CharBag;
  21use git::{
  22    repository::{GitRepository, RepoPath},
  23    status::{
  24        FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
  25    },
  26    GitHostingProviderRegistry, COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE,
  27};
  28use gpui::{
  29    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
  30};
  31use ignore::IgnoreStack;
  32use language::DiskState;
  33
  34use parking_lot::Mutex;
  35use paths::local_settings_folder_relative_path;
  36use postage::{
  37    barrier,
  38    prelude::{Sink as _, Stream as _},
  39    watch,
  40};
  41use rpc::{
  42    proto::{self, split_worktree_update},
  43    AnyProtoClient,
  44};
  45pub use settings::WorktreeId;
  46use settings::{Settings, SettingsLocation, SettingsStore};
  47use smallvec::{smallvec, SmallVec};
  48use smol::channel::{self, Sender};
  49use std::{
  50    any::Any,
  51    cmp::Ordering,
  52    collections::hash_map,
  53    convert::TryFrom,
  54    ffi::OsStr,
  55    fmt,
  56    future::Future,
  57    mem::{self},
  58    ops::{Deref, DerefMut},
  59    path::{Path, PathBuf},
  60    pin::Pin,
  61    sync::{
  62        atomic::{AtomicUsize, Ordering::SeqCst},
  63        Arc,
  64    },
  65    time::{Duration, Instant},
  66};
  67use sum_tree::{
  68    Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit,
  69};
  70use text::{LineEnding, Rope};
  71use util::{
  72    paths::{home_dir, PathMatcher, SanitizedPath},
  73    ResultExt,
  74};
  75pub use worktree_settings::WorktreeSettings;
  76
  77#[cfg(feature = "test-support")]
  78pub const FS_WATCH_LATENCY: Duration = Duration::from_millis(100);
  79#[cfg(not(feature = "test-support"))]
  80pub const FS_WATCH_LATENCY: Duration = Duration::from_millis(100);
  81
  82/// A set of local or remote files that are being opened as part of a project.
  83/// Responsible for tracking related FS (for local)/collab (for remote) events and corresponding updates.
  84/// Stores git repositories data and the diagnostics for the file(s).
  85///
  86/// Has an absolute path, and may be set to be visible in Zed UI or not.
  87/// May correspond to a directory or a single file.
  88/// Possible examples:
  89/// * a drag and dropped file — may be added as an invisible, "ephemeral" entry to the current worktree
  90/// * a directory opened in Zed — may be added as a visible entry to the current worktree
  91///
  92/// Uses [`Entry`] to track the state of each file/directory, can look up absolute paths for entries.
  93pub enum Worktree {
  94    Local(LocalWorktree),
  95    Remote(RemoteWorktree),
  96}
  97
  98/// An entry, created in the worktree.
  99#[derive(Debug)]
 100pub enum CreatedEntry {
 101    /// Got created and indexed by the worktree, receiving a corresponding entry.
 102    Included(Entry),
 103    /// Got created, but not indexed due to falling under exclusion filters.
 104    Excluded { abs_path: PathBuf },
 105}
 106
 107pub struct LoadedFile {
 108    pub file: Arc<File>,
 109    pub text: String,
 110}
 111
 112pub struct LoadedBinaryFile {
 113    pub file: Arc<File>,
 114    pub content: Vec<u8>,
 115}
 116
 117pub struct LocalWorktree {
 118    snapshot: LocalSnapshot,
 119    scan_requests_tx: channel::Sender<ScanRequest>,
 120    path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
 121    is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
 122    _background_scanner_tasks: Vec<Task<()>>,
 123    update_observer: Option<UpdateObservationState>,
 124    fs: Arc<dyn Fs>,
 125    fs_case_sensitive: bool,
 126    visible: bool,
 127    next_entry_id: Arc<AtomicUsize>,
 128    settings: WorktreeSettings,
 129    share_private_files: bool,
 130}
 131
 132struct ScanRequest {
 133    relative_paths: Vec<Arc<Path>>,
 134    done: SmallVec<[barrier::Sender; 1]>,
 135}
 136
 137pub struct RemoteWorktree {
 138    snapshot: Snapshot,
 139    background_snapshot: Arc<Mutex<(Snapshot, Vec<proto::UpdateWorktree>)>>,
 140    project_id: u64,
 141    client: AnyProtoClient,
 142    file_scan_inclusions: PathMatcher,
 143    updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
 144    update_observer: Option<mpsc::UnboundedSender<proto::UpdateWorktree>>,
 145    snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
 146    replica_id: ReplicaId,
 147    visible: bool,
 148    disconnected: bool,
 149}
 150
 151#[derive(Clone)]
 152pub struct Snapshot {
 153    id: WorktreeId,
 154    abs_path: SanitizedPath,
 155    root_name: String,
 156    root_char_bag: CharBag,
 157    entries_by_path: SumTree<Entry>,
 158    entries_by_id: SumTree<PathEntry>,
 159    always_included_entries: Vec<Arc<Path>>,
 160    repositories: SumTree<RepositoryEntry>,
 161
 162    /// A number that increases every time the worktree begins scanning
 163    /// a set of paths from the filesystem. This scanning could be caused
 164    /// by some operation performed on the worktree, such as reading or
 165    /// writing a file, or by an event reported by the filesystem.
 166    scan_id: usize,
 167
 168    /// The latest scan id that has completed, and whose preceding scans
 169    /// have all completed. The current `scan_id` could be more than one
 170    /// greater than the `completed_scan_id` if operations are performed
 171    /// on the worktree while it is processing a file-system event.
 172    completed_scan_id: usize,
 173}
 174
 175#[derive(Clone, Debug, PartialEq, Eq)]
 176pub struct RepositoryEntry {
 177    /// The git status entries for this repository.
 178    /// Note that the paths on this repository are relative to the git work directory.
 179    /// If the .git folder is external to Zed, these paths will be relative to that folder,
 180    /// and this data structure might reference files external to this worktree.
 181    ///
 182    /// For example:
 183    ///
 184    ///     my_root_folder/          <-- repository root
 185    ///       .git
 186    ///       my_sub_folder_1/
 187    ///         project_root/        <-- Project root, Zed opened here
 188    ///           changed_file_1     <-- File with changes, in worktree
 189    ///       my_sub_folder_2/
 190    ///         changed_file_2       <-- File with changes, out of worktree
 191    ///           ...
 192    ///
 193    /// With this setup, this field would contain 2 entries, like so:
 194    ///     - my_sub_folder_1/project_root/changed_file_1
 195    ///     - my_sub_folder_2/changed_file_2
 196    pub(crate) statuses_by_path: SumTree<StatusEntry>,
 197    pub work_directory_id: ProjectEntryId,
 198    pub work_directory: WorkDirectory,
 199    pub(crate) branch: Option<Arc<str>>,
 200}
 201
 202impl Deref for RepositoryEntry {
 203    type Target = WorkDirectory;
 204
 205    fn deref(&self) -> &Self::Target {
 206        &self.work_directory
 207    }
 208}
 209
 210impl AsRef<Path> for RepositoryEntry {
 211    fn as_ref(&self) -> &Path {
 212        &self.path
 213    }
 214}
 215
 216impl RepositoryEntry {
 217    pub fn branch(&self) -> Option<Arc<str>> {
 218        self.branch.clone()
 219    }
 220
 221    pub fn work_directory_id(&self) -> ProjectEntryId {
 222        self.work_directory_id
 223    }
 224
 225    pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
 226        self.statuses_by_path.iter().cloned()
 227    }
 228
 229    pub fn status_len(&self) -> usize {
 230        self.statuses_by_path.summary().item_summary.count
 231    }
 232
 233    pub fn status_summary(&self) -> GitSummary {
 234        self.statuses_by_path.summary().item_summary
 235    }
 236
 237    pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
 238        self.statuses_by_path
 239            .get(&PathKey(path.0.clone()), &())
 240            .cloned()
 241    }
 242
 243    pub fn initial_update(&self) -> proto::RepositoryEntry {
 244        proto::RepositoryEntry {
 245            work_directory_id: self.work_directory_id.to_proto(),
 246            branch: self.branch.as_ref().map(|branch| branch.to_string()),
 247            updated_statuses: self
 248                .statuses_by_path
 249                .iter()
 250                .map(|entry| entry.to_proto())
 251                .collect(),
 252            removed_statuses: Default::default(),
 253        }
 254    }
 255
 256    pub fn build_update(&self, old: &Self) -> proto::RepositoryEntry {
 257        let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
 258        let mut removed_statuses: Vec<String> = Vec::new();
 259
 260        let mut new_statuses = self.statuses_by_path.iter().peekable();
 261        let mut old_statuses = old.statuses_by_path.iter().peekable();
 262
 263        let mut current_new_entry = new_statuses.next();
 264        let mut current_old_entry = old_statuses.next();
 265        loop {
 266            match (current_new_entry, current_old_entry) {
 267                (Some(new_entry), Some(old_entry)) => {
 268                    match new_entry.repo_path.cmp(&old_entry.repo_path) {
 269                        Ordering::Less => {
 270                            updated_statuses.push(new_entry.to_proto());
 271                            current_new_entry = new_statuses.next();
 272                        }
 273                        Ordering::Equal => {
 274                            if new_entry.status != old_entry.status {
 275                                updated_statuses.push(new_entry.to_proto());
 276                            }
 277                            current_old_entry = old_statuses.next();
 278                            current_new_entry = new_statuses.next();
 279                        }
 280                        Ordering::Greater => {
 281                            removed_statuses.push(old_entry.repo_path.to_proto());
 282                            current_old_entry = old_statuses.next();
 283                        }
 284                    }
 285                }
 286                (None, Some(old_entry)) => {
 287                    removed_statuses.push(old_entry.repo_path.to_proto());
 288                    current_old_entry = old_statuses.next();
 289                }
 290                (Some(new_entry), None) => {
 291                    updated_statuses.push(new_entry.to_proto());
 292                    current_new_entry = new_statuses.next();
 293                }
 294                (None, None) => break,
 295            }
 296        }
 297
 298        proto::RepositoryEntry {
 299            work_directory_id: self.work_directory_id.to_proto(),
 300            branch: self.branch.as_ref().map(|branch| branch.to_string()),
 301            updated_statuses,
 302            removed_statuses,
 303        }
 304    }
 305}
 306
 307/// This path corresponds to the 'content path' of a repository in relation
 308/// to Zed's project root.
 309/// In the majority of the cases, this is the folder that contains the .git folder.
 310/// But if a sub-folder of a git repository is opened, this corresponds to the
 311/// project root and the .git folder is located in a parent directory.
 312#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
 313pub struct WorkDirectory {
 314    path: Arc<Path>,
 315
 316    /// If location_in_repo is set, it means the .git folder is external
 317    /// and in a parent folder of the project root.
 318    /// In that case, the work_directory field will point to the
 319    /// project-root and location_in_repo contains the location of the
 320    /// project-root in the repository.
 321    ///
 322    /// Example:
 323    ///
 324    ///     my_root_folder/          <-- repository root
 325    ///       .git
 326    ///       my_sub_folder_1/
 327    ///         project_root/        <-- Project root, Zed opened here
 328    ///           ...
 329    ///
 330    /// For this setup, the attributes will have the following values:
 331    ///
 332    ///     work_directory: pointing to "" entry
 333    ///     location_in_repo: Some("my_sub_folder_1/project_root")
 334    pub(crate) location_in_repo: Option<Arc<Path>>,
 335}
 336
 337impl WorkDirectory {
 338    pub fn path_key(&self) -> PathKey {
 339        PathKey(self.path.clone())
 340    }
 341
 342    /// Returns true if the given path is a child of the work directory.
 343    ///
 344    /// Note that the path may not be a member of this repository, if there
 345    /// is a repository in a directory between these two paths
 346    /// external .git folder in a parent folder of the project root.
 347    pub fn directory_contains(&self, path: impl AsRef<Path>) -> bool {
 348        let path = path.as_ref();
 349        path.starts_with(&self.path)
 350    }
 351
 352    /// relativize returns the given project path relative to the root folder of the
 353    /// repository.
 354    /// If the root of the repository (and its .git folder) are located in a parent folder
 355    /// of the project root folder, then the returned RepoPath is relative to the root
 356    /// of the repository and not a valid path inside the project.
 357    pub fn relativize(&self, path: &Path) -> Result<RepoPath> {
 358        if let Some(location_in_repo) = &self.location_in_repo {
 359            Ok(location_in_repo.join(path).into())
 360        } else {
 361            let relativized_path = path
 362                .strip_prefix(&self.path)
 363                .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?;
 364
 365            Ok(relativized_path.into())
 366        }
 367    }
 368
 369    /// This is the opposite operation to `relativize` above
 370    pub fn unrelativize(&self, path: &RepoPath) -> Option<Arc<Path>> {
 371        if let Some(location) = &self.location_in_repo {
 372            // If we fail to strip the prefix, that means this status entry is
 373            // external to this worktree, and we definitely won't have an entry_id
 374            path.strip_prefix(location).ok().map(Into::into)
 375        } else {
 376            Some(self.path.join(path).into())
 377        }
 378    }
 379}
 380
 381impl Default for WorkDirectory {
 382    fn default() -> Self {
 383        Self {
 384            path: Arc::from(Path::new("")),
 385            location_in_repo: None,
 386        }
 387    }
 388}
 389
 390impl Deref for WorkDirectory {
 391    type Target = Path;
 392
 393    fn deref(&self) -> &Self::Target {
 394        self.as_ref()
 395    }
 396}
 397
 398impl AsRef<Path> for WorkDirectory {
 399    fn as_ref(&self) -> &Path {
 400        self.path.as_ref()
 401    }
 402}
 403
 404#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
 405pub struct WorkDirectoryEntry(ProjectEntryId);
 406
 407impl Deref for WorkDirectoryEntry {
 408    type Target = ProjectEntryId;
 409
 410    fn deref(&self) -> &Self::Target {
 411        &self.0
 412    }
 413}
 414
 415impl From<ProjectEntryId> for WorkDirectoryEntry {
 416    fn from(value: ProjectEntryId) -> Self {
 417        WorkDirectoryEntry(value)
 418    }
 419}
 420
 421#[derive(Debug, Clone)]
 422pub struct LocalSnapshot {
 423    snapshot: Snapshot,
 424    /// All of the gitignore files in the worktree, indexed by their relative path.
 425    /// The boolean indicates whether the gitignore needs to be updated.
 426    ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, bool)>,
 427    /// All of the git repositories in the worktree, indexed by the project entry
 428    /// id of their parent directory.
 429    git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
 430    /// The file handle of the root dir
 431    /// (so we can find it after it's been moved)
 432    root_file_handle: Option<Arc<dyn fs::FileHandle>>,
 433}
 434
 435struct BackgroundScannerState {
 436    snapshot: LocalSnapshot,
 437    scanned_dirs: HashSet<ProjectEntryId>,
 438    path_prefixes_to_scan: HashSet<Arc<Path>>,
 439    paths_to_scan: HashSet<Arc<Path>>,
 440    /// The ids of all of the entries that were removed from the snapshot
 441    /// as part of the current update. These entry ids may be re-used
 442    /// if the same inode is discovered at a new path, or if the given
 443    /// path is re-created after being deleted.
 444    removed_entries: HashMap<u64, Entry>,
 445    changed_paths: Vec<Arc<Path>>,
 446    prev_snapshot: Snapshot,
 447    git_hosting_provider_registry: Option<Arc<GitHostingProviderRegistry>>,
 448}
 449
 450#[derive(Debug, Clone)]
 451pub struct LocalRepositoryEntry {
 452    pub(crate) work_directory: WorkDirectory,
 453    pub(crate) git_dir_scan_id: usize,
 454    pub(crate) status_scan_id: usize,
 455    pub(crate) repo_ptr: Arc<dyn GitRepository>,
 456    /// Absolute path to the actual .git folder.
 457    /// Note: if .git is a file, this points to the folder indicated by the .git file
 458    pub(crate) dot_git_dir_abs_path: Arc<Path>,
 459    /// Absolute path to the .git file, if we're in a git worktree.
 460    pub(crate) dot_git_worktree_abs_path: Option<Arc<Path>>,
 461}
 462
 463impl sum_tree::Item for LocalRepositoryEntry {
 464    type Summary = PathSummary<Unit>;
 465
 466    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
 467        PathSummary {
 468            max_path: self.work_directory.path.clone(),
 469            item_summary: Unit,
 470        }
 471    }
 472}
 473
 474impl KeyedItem for LocalRepositoryEntry {
 475    type Key = PathKey;
 476
 477    fn key(&self) -> Self::Key {
 478        PathKey(self.work_directory.path.clone())
 479    }
 480}
 481
 482impl LocalRepositoryEntry {
 483    pub fn repo(&self) -> &Arc<dyn GitRepository> {
 484        &self.repo_ptr
 485    }
 486}
 487
 488impl Deref for LocalRepositoryEntry {
 489    type Target = WorkDirectory;
 490
 491    fn deref(&self) -> &Self::Target {
 492        &self.work_directory
 493    }
 494}
 495
 496impl Deref for LocalSnapshot {
 497    type Target = Snapshot;
 498
 499    fn deref(&self) -> &Self::Target {
 500        &self.snapshot
 501    }
 502}
 503
 504impl DerefMut for LocalSnapshot {
 505    fn deref_mut(&mut self) -> &mut Self::Target {
 506        &mut self.snapshot
 507    }
 508}
 509
 510enum ScanState {
 511    Started,
 512    Updated {
 513        snapshot: LocalSnapshot,
 514        changes: UpdatedEntriesSet,
 515        barrier: SmallVec<[barrier::Sender; 1]>,
 516        scanning: bool,
 517    },
 518    RootUpdated {
 519        new_path: Option<SanitizedPath>,
 520    },
 521}
 522
 523struct UpdateObservationState {
 524    snapshots_tx:
 525        mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
 526    resume_updates: watch::Sender<()>,
 527    _maintain_remote_snapshot: Task<Option<()>>,
 528}
 529
 530#[derive(Clone)]
 531pub enum Event {
 532    UpdatedEntries(UpdatedEntriesSet),
 533    UpdatedGitRepositories(UpdatedGitRepositoriesSet),
 534    DeletedEntry(ProjectEntryId),
 535}
 536
 537const EMPTY_PATH: &str = "";
 538
 539impl EventEmitter<Event> for Worktree {}
 540
 541impl Worktree {
 542    pub async fn local(
 543        path: impl Into<Arc<Path>>,
 544        visible: bool,
 545        fs: Arc<dyn Fs>,
 546        next_entry_id: Arc<AtomicUsize>,
 547        cx: &mut AsyncApp,
 548    ) -> Result<Entity<Self>> {
 549        let abs_path = path.into();
 550        let metadata = fs
 551            .metadata(&abs_path)
 552            .await
 553            .context("failed to stat worktree path")?;
 554
 555        let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
 556            log::error!(
 557                "Failed to determine whether filesystem is case sensitive (falling back to true) due to error: {e:#}"
 558            );
 559            true
 560        });
 561
 562        let root_file_handle = fs.open_handle(&abs_path).await.log_err();
 563
 564        cx.new(move |cx: &mut Context<Worktree>| {
 565            let mut snapshot = LocalSnapshot {
 566                ignores_by_parent_abs_path: Default::default(),
 567                git_repositories: Default::default(),
 568                snapshot: Snapshot::new(
 569                    cx.entity_id().as_u64(),
 570                    abs_path
 571                        .file_name()
 572                        .map_or(String::new(), |f| f.to_string_lossy().to_string()),
 573                    abs_path.clone(),
 574                ),
 575                root_file_handle,
 576            };
 577
 578            let worktree_id = snapshot.id();
 579            let settings_location = Some(SettingsLocation {
 580                worktree_id,
 581                path: Path::new(EMPTY_PATH),
 582            });
 583
 584            let settings = WorktreeSettings::get(settings_location, cx).clone();
 585            cx.observe_global::<SettingsStore>(move |this, cx| {
 586                if let Self::Local(this) = this {
 587                    let settings = WorktreeSettings::get(settings_location, cx).clone();
 588                    if this.settings != settings {
 589                        this.settings = settings;
 590                        this.restart_background_scanners(cx);
 591                    }
 592                }
 593            })
 594            .detach();
 595
 596            let share_private_files = false;
 597            if let Some(metadata) = metadata {
 598                let mut entry = Entry::new(
 599                    Arc::from(Path::new("")),
 600                    &metadata,
 601                    &next_entry_id,
 602                    snapshot.root_char_bag,
 603                    None,
 604                );
 605                if !metadata.is_dir {
 606                    entry.is_private = !share_private_files
 607                        && settings.is_path_private(abs_path.file_name().unwrap().as_ref());
 608                }
 609                snapshot.insert_entry(entry, fs.as_ref());
 610            }
 611
 612            let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
 613            let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
 614            let mut worktree = LocalWorktree {
 615                share_private_files,
 616                next_entry_id,
 617                snapshot,
 618                is_scanning: watch::channel_with(true),
 619                update_observer: None,
 620                scan_requests_tx,
 621                path_prefixes_to_scan_tx,
 622                _background_scanner_tasks: Vec::new(),
 623                fs,
 624                fs_case_sensitive,
 625                visible,
 626                settings,
 627            };
 628            worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
 629            Worktree::Local(worktree)
 630        })
 631    }
 632
 633    pub fn remote(
 634        project_id: u64,
 635        replica_id: ReplicaId,
 636        worktree: proto::WorktreeMetadata,
 637        client: AnyProtoClient,
 638        cx: &mut App,
 639    ) -> Entity<Self> {
 640        cx.new(|cx: &mut Context<Self>| {
 641            let snapshot = Snapshot::new(
 642                worktree.id,
 643                worktree.root_name,
 644                Arc::from(PathBuf::from(worktree.abs_path)),
 645            );
 646
 647            let background_snapshot = Arc::new(Mutex::new((snapshot.clone(), Vec::new())));
 648            let (background_updates_tx, mut background_updates_rx) = mpsc::unbounded();
 649            let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
 650
 651            let worktree_id = snapshot.id();
 652            let settings_location = Some(SettingsLocation {
 653                worktree_id,
 654                path: Path::new(EMPTY_PATH),
 655            });
 656
 657            let settings = WorktreeSettings::get(settings_location, cx).clone();
 658            let worktree = RemoteWorktree {
 659                client,
 660                project_id,
 661                replica_id,
 662                snapshot,
 663                file_scan_inclusions: settings.file_scan_inclusions.clone(),
 664                background_snapshot: background_snapshot.clone(),
 665                updates_tx: Some(background_updates_tx),
 666                update_observer: None,
 667                snapshot_subscriptions: Default::default(),
 668                visible: worktree.visible,
 669                disconnected: false,
 670            };
 671
 672            // Apply updates to a separate snapshot in a background task, then
 673            // send them to a foreground task which updates the model.
 674            cx.background_executor()
 675                .spawn(async move {
 676                    while let Some(update) = background_updates_rx.next().await {
 677                        {
 678                            let mut lock = background_snapshot.lock();
 679                            if let Err(error) = lock
 680                                .0
 681                                .apply_remote_update(update.clone(), &settings.file_scan_inclusions)
 682                            {
 683                                log::error!("error applying worktree update: {}", error);
 684                            }
 685                            lock.1.push(update);
 686                        }
 687                        snapshot_updated_tx.send(()).await.ok();
 688                    }
 689                })
 690                .detach();
 691
 692            // On the foreground task, update to the latest snapshot and notify
 693            // any update observer of all updates that led to that snapshot.
 694            cx.spawn(|this, mut cx| async move {
 695                while (snapshot_updated_rx.recv().await).is_some() {
 696                    this.update(&mut cx, |this, cx| {
 697                        let this = this.as_remote_mut().unwrap();
 698                        {
 699                            let mut lock = this.background_snapshot.lock();
 700                            this.snapshot = lock.0.clone();
 701                            if let Some(tx) = &this.update_observer {
 702                                for update in lock.1.drain(..) {
 703                                    tx.unbounded_send(update).ok();
 704                                }
 705                            }
 706                        };
 707                        cx.emit(Event::UpdatedEntries(Arc::default()));
 708                        cx.notify();
 709                        while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
 710                            if this.observed_snapshot(*scan_id) {
 711                                let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
 712                                let _ = tx.send(());
 713                            } else {
 714                                break;
 715                            }
 716                        }
 717                    })?;
 718                }
 719                anyhow::Ok(())
 720            })
 721            .detach();
 722
 723            Worktree::Remote(worktree)
 724        })
 725    }
 726
 727    pub fn as_local(&self) -> Option<&LocalWorktree> {
 728        if let Worktree::Local(worktree) = self {
 729            Some(worktree)
 730        } else {
 731            None
 732        }
 733    }
 734
 735    pub fn as_remote(&self) -> Option<&RemoteWorktree> {
 736        if let Worktree::Remote(worktree) = self {
 737            Some(worktree)
 738        } else {
 739            None
 740        }
 741    }
 742
 743    pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
 744        if let Worktree::Local(worktree) = self {
 745            Some(worktree)
 746        } else {
 747            None
 748        }
 749    }
 750
 751    pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
 752        if let Worktree::Remote(worktree) = self {
 753            Some(worktree)
 754        } else {
 755            None
 756        }
 757    }
 758
 759    pub fn is_local(&self) -> bool {
 760        matches!(self, Worktree::Local(_))
 761    }
 762
 763    pub fn is_remote(&self) -> bool {
 764        !self.is_local()
 765    }
 766
 767    pub fn settings_location(&self, _: &Context<Self>) -> SettingsLocation<'static> {
 768        SettingsLocation {
 769            worktree_id: self.id(),
 770            path: Path::new(EMPTY_PATH),
 771        }
 772    }
 773
 774    pub fn snapshot(&self) -> Snapshot {
 775        match self {
 776            Worktree::Local(worktree) => worktree.snapshot.snapshot.clone(),
 777            Worktree::Remote(worktree) => worktree.snapshot.clone(),
 778        }
 779    }
 780
 781    pub fn scan_id(&self) -> usize {
 782        match self {
 783            Worktree::Local(worktree) => worktree.snapshot.scan_id,
 784            Worktree::Remote(worktree) => worktree.snapshot.scan_id,
 785        }
 786    }
 787
 788    pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
 789        proto::WorktreeMetadata {
 790            id: self.id().to_proto(),
 791            root_name: self.root_name().to_string(),
 792            visible: self.is_visible(),
 793            abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
 794        }
 795    }
 796
 797    pub fn completed_scan_id(&self) -> usize {
 798        match self {
 799            Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
 800            Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
 801        }
 802    }
 803
 804    pub fn is_visible(&self) -> bool {
 805        match self {
 806            Worktree::Local(worktree) => worktree.visible,
 807            Worktree::Remote(worktree) => worktree.visible,
 808        }
 809    }
 810
 811    pub fn replica_id(&self) -> ReplicaId {
 812        match self {
 813            Worktree::Local(_) => 0,
 814            Worktree::Remote(worktree) => worktree.replica_id,
 815        }
 816    }
 817
 818    pub fn abs_path(&self) -> Arc<Path> {
 819        match self {
 820            Worktree::Local(worktree) => worktree.abs_path.clone().into(),
 821            Worktree::Remote(worktree) => worktree.abs_path.clone().into(),
 822        }
 823    }
 824
 825    pub fn root_file(&self, cx: &Context<Self>) -> Option<Arc<File>> {
 826        let entry = self.root_entry()?;
 827        Some(File::for_entry(entry.clone(), cx.entity()))
 828    }
 829
 830    pub fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
 831    where
 832        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
 833        Fut: 'static + Send + Future<Output = bool>,
 834    {
 835        match self {
 836            Worktree::Local(this) => this.observe_updates(project_id, cx, callback),
 837            Worktree::Remote(this) => this.observe_updates(project_id, cx, callback),
 838        }
 839    }
 840
 841    pub fn stop_observing_updates(&mut self) {
 842        match self {
 843            Worktree::Local(this) => {
 844                this.update_observer.take();
 845            }
 846            Worktree::Remote(this) => {
 847                this.update_observer.take();
 848            }
 849        }
 850    }
 851
 852    #[cfg(any(test, feature = "test-support"))]
 853    pub fn has_update_observer(&self) -> bool {
 854        match self {
 855            Worktree::Local(this) => this.update_observer.is_some(),
 856            Worktree::Remote(this) => this.update_observer.is_some(),
 857        }
 858    }
 859
 860    pub fn load_file(&self, path: &Path, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
 861        match self {
 862            Worktree::Local(this) => this.load_file(path, cx),
 863            Worktree::Remote(_) => {
 864                Task::ready(Err(anyhow!("remote worktrees can't yet load files")))
 865            }
 866        }
 867    }
 868
 869    pub fn load_staged_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
 870        match self {
 871            Worktree::Local(this) => {
 872                let path = Arc::from(path);
 873                let snapshot = this.snapshot();
 874                cx.background_executor().spawn(async move {
 875                    if let Some(repo) = snapshot.repository_for_path(&path) {
 876                        if let Some(repo_path) = repo.relativize(&path).log_err() {
 877                            if let Some(git_repo) =
 878                                snapshot.git_repositories.get(&repo.work_directory_id)
 879                            {
 880                                return Ok(git_repo.repo_ptr.load_index_text(&repo_path));
 881                            }
 882                        }
 883                    }
 884                    Ok(None)
 885                })
 886            }
 887            Worktree::Remote(_) => {
 888                Task::ready(Err(anyhow!("remote worktrees can't yet load staged files")))
 889            }
 890        }
 891    }
 892
 893    pub fn load_binary_file(
 894        &self,
 895        path: &Path,
 896        cx: &Context<Worktree>,
 897    ) -> Task<Result<LoadedBinaryFile>> {
 898        match self {
 899            Worktree::Local(this) => this.load_binary_file(path, cx),
 900            Worktree::Remote(_) => {
 901                Task::ready(Err(anyhow!("remote worktrees can't yet load binary files")))
 902            }
 903        }
 904    }
 905
 906    pub fn write_file(
 907        &self,
 908        path: &Path,
 909        text: Rope,
 910        line_ending: LineEnding,
 911        cx: &Context<Worktree>,
 912    ) -> Task<Result<Arc<File>>> {
 913        match self {
 914            Worktree::Local(this) => this.write_file(path, text, line_ending, cx),
 915            Worktree::Remote(_) => {
 916                Task::ready(Err(anyhow!("remote worktree can't yet write files")))
 917            }
 918        }
 919    }
 920
 921    pub fn create_entry(
 922        &mut self,
 923        path: impl Into<Arc<Path>>,
 924        is_directory: bool,
 925        cx: &Context<Worktree>,
 926    ) -> Task<Result<CreatedEntry>> {
 927        let path = path.into();
 928        let worktree_id = self.id();
 929        match self {
 930            Worktree::Local(this) => this.create_entry(path, is_directory, cx),
 931            Worktree::Remote(this) => {
 932                let project_id = this.project_id;
 933                let request = this.client.request(proto::CreateProjectEntry {
 934                    worktree_id: worktree_id.to_proto(),
 935                    project_id,
 936                    path: path.to_string_lossy().into(),
 937                    is_directory,
 938                });
 939                cx.spawn(move |this, mut cx| async move {
 940                    let response = request.await?;
 941                    match response.entry {
 942                        Some(entry) => this
 943                            .update(&mut cx, |worktree, cx| {
 944                                worktree.as_remote_mut().unwrap().insert_entry(
 945                                    entry,
 946                                    response.worktree_scan_id as usize,
 947                                    cx,
 948                                )
 949                            })?
 950                            .await
 951                            .map(CreatedEntry::Included),
 952                        None => {
 953                            let abs_path = this.update(&mut cx, |worktree, _| {
 954                                worktree
 955                                    .absolutize(&path)
 956                                    .with_context(|| format!("absolutizing {path:?}"))
 957                            })??;
 958                            Ok(CreatedEntry::Excluded { abs_path })
 959                        }
 960                    }
 961                })
 962            }
 963        }
 964    }
 965
 966    pub fn delete_entry(
 967        &mut self,
 968        entry_id: ProjectEntryId,
 969        trash: bool,
 970        cx: &mut Context<Worktree>,
 971    ) -> Option<Task<Result<()>>> {
 972        let task = match self {
 973            Worktree::Local(this) => this.delete_entry(entry_id, trash, cx),
 974            Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx),
 975        }?;
 976
 977        let entry = match self {
 978            Worktree::Local(ref this) => this.entry_for_id(entry_id),
 979            Worktree::Remote(ref this) => this.entry_for_id(entry_id),
 980        }?;
 981
 982        let mut ids = vec![entry_id];
 983        let path = &*entry.path;
 984
 985        self.get_children_ids_recursive(path, &mut ids);
 986
 987        for id in ids {
 988            cx.emit(Event::DeletedEntry(id));
 989        }
 990        Some(task)
 991    }
 992
 993    fn get_children_ids_recursive(&self, path: &Path, ids: &mut Vec<ProjectEntryId>) {
 994        let children_iter = self.child_entries(path);
 995        for child in children_iter {
 996            ids.push(child.id);
 997            self.get_children_ids_recursive(&child.path, ids);
 998        }
 999    }
1000
1001    pub fn rename_entry(
1002        &mut self,
1003        entry_id: ProjectEntryId,
1004        new_path: impl Into<Arc<Path>>,
1005        cx: &Context<Self>,
1006    ) -> Task<Result<CreatedEntry>> {
1007        let new_path = new_path.into();
1008        match self {
1009            Worktree::Local(this) => this.rename_entry(entry_id, new_path, cx),
1010            Worktree::Remote(this) => this.rename_entry(entry_id, new_path, cx),
1011        }
1012    }
1013
1014    pub fn copy_entry(
1015        &mut self,
1016        entry_id: ProjectEntryId,
1017        relative_worktree_source_path: Option<PathBuf>,
1018        new_path: impl Into<Arc<Path>>,
1019        cx: &Context<Self>,
1020    ) -> Task<Result<Option<Entry>>> {
1021        let new_path = new_path.into();
1022        match self {
1023            Worktree::Local(this) => {
1024                this.copy_entry(entry_id, relative_worktree_source_path, new_path, cx)
1025            }
1026            Worktree::Remote(this) => {
1027                let relative_worktree_source_path =
1028                    relative_worktree_source_path.map(|relative_worktree_source_path| {
1029                        relative_worktree_source_path.to_string_lossy().into()
1030                    });
1031                let response = this.client.request(proto::CopyProjectEntry {
1032                    project_id: this.project_id,
1033                    entry_id: entry_id.to_proto(),
1034                    relative_worktree_source_path,
1035                    new_path: new_path.to_string_lossy().into(),
1036                });
1037                cx.spawn(move |this, mut cx| async move {
1038                    let response = response.await?;
1039                    match response.entry {
1040                        Some(entry) => this
1041                            .update(&mut cx, |worktree, cx| {
1042                                worktree.as_remote_mut().unwrap().insert_entry(
1043                                    entry,
1044                                    response.worktree_scan_id as usize,
1045                                    cx,
1046                                )
1047                            })?
1048                            .await
1049                            .map(Some),
1050                        None => Ok(None),
1051                    }
1052                })
1053            }
1054        }
1055    }
1056
1057    pub fn copy_external_entries(
1058        &mut self,
1059        target_directory: PathBuf,
1060        paths: Vec<Arc<Path>>,
1061        overwrite_existing_files: bool,
1062        cx: &Context<Worktree>,
1063    ) -> Task<Result<Vec<ProjectEntryId>>> {
1064        match self {
1065            Worktree::Local(this) => {
1066                this.copy_external_entries(target_directory, paths, overwrite_existing_files, cx)
1067            }
1068            _ => Task::ready(Err(anyhow!(
1069                "Copying external entries is not supported for remote worktrees"
1070            ))),
1071        }
1072    }
1073
1074    pub fn expand_entry(
1075        &mut self,
1076        entry_id: ProjectEntryId,
1077        cx: &Context<Worktree>,
1078    ) -> Option<Task<Result<()>>> {
1079        match self {
1080            Worktree::Local(this) => this.expand_entry(entry_id, cx),
1081            Worktree::Remote(this) => {
1082                let response = this.client.request(proto::ExpandProjectEntry {
1083                    project_id: this.project_id,
1084                    entry_id: entry_id.to_proto(),
1085                });
1086                Some(cx.spawn(move |this, mut cx| async move {
1087                    let response = response.await?;
1088                    this.update(&mut cx, |this, _| {
1089                        this.as_remote_mut()
1090                            .unwrap()
1091                            .wait_for_snapshot(response.worktree_scan_id as usize)
1092                    })?
1093                    .await?;
1094                    Ok(())
1095                }))
1096            }
1097        }
1098    }
1099
1100    pub async fn handle_create_entry(
1101        this: Entity<Self>,
1102        request: proto::CreateProjectEntry,
1103        mut cx: AsyncApp,
1104    ) -> Result<proto::ProjectEntryResponse> {
1105        let (scan_id, entry) = this.update(&mut cx, |this, cx| {
1106            (
1107                this.scan_id(),
1108                this.create_entry(PathBuf::from(request.path), request.is_directory, cx),
1109            )
1110        })?;
1111        Ok(proto::ProjectEntryResponse {
1112            entry: match &entry.await? {
1113                CreatedEntry::Included(entry) => Some(entry.into()),
1114                CreatedEntry::Excluded { .. } => None,
1115            },
1116            worktree_scan_id: scan_id as u64,
1117        })
1118    }
1119
1120    pub async fn handle_delete_entry(
1121        this: Entity<Self>,
1122        request: proto::DeleteProjectEntry,
1123        mut cx: AsyncApp,
1124    ) -> Result<proto::ProjectEntryResponse> {
1125        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1126            (
1127                this.scan_id(),
1128                this.delete_entry(
1129                    ProjectEntryId::from_proto(request.entry_id),
1130                    request.use_trash,
1131                    cx,
1132                ),
1133            )
1134        })?;
1135        task.ok_or_else(|| anyhow!("invalid entry"))?.await?;
1136        Ok(proto::ProjectEntryResponse {
1137            entry: None,
1138            worktree_scan_id: scan_id as u64,
1139        })
1140    }
1141
1142    pub async fn handle_expand_entry(
1143        this: Entity<Self>,
1144        request: proto::ExpandProjectEntry,
1145        mut cx: AsyncApp,
1146    ) -> Result<proto::ExpandProjectEntryResponse> {
1147        let task = this.update(&mut cx, |this, cx| {
1148            this.expand_entry(ProjectEntryId::from_proto(request.entry_id), cx)
1149        })?;
1150        task.ok_or_else(|| anyhow!("no such entry"))?.await?;
1151        let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
1152        Ok(proto::ExpandProjectEntryResponse {
1153            worktree_scan_id: scan_id as u64,
1154        })
1155    }
1156
1157    pub async fn handle_rename_entry(
1158        this: Entity<Self>,
1159        request: proto::RenameProjectEntry,
1160        mut cx: AsyncApp,
1161    ) -> Result<proto::ProjectEntryResponse> {
1162        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1163            (
1164                this.scan_id(),
1165                this.rename_entry(
1166                    ProjectEntryId::from_proto(request.entry_id),
1167                    PathBuf::from(request.new_path),
1168                    cx,
1169                ),
1170            )
1171        })?;
1172        Ok(proto::ProjectEntryResponse {
1173            entry: match &task.await? {
1174                CreatedEntry::Included(entry) => Some(entry.into()),
1175                CreatedEntry::Excluded { .. } => None,
1176            },
1177            worktree_scan_id: scan_id as u64,
1178        })
1179    }
1180
1181    pub async fn handle_copy_entry(
1182        this: Entity<Self>,
1183        request: proto::CopyProjectEntry,
1184        mut cx: AsyncApp,
1185    ) -> Result<proto::ProjectEntryResponse> {
1186        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1187            let relative_worktree_source_path =
1188                request.relative_worktree_source_path.map(PathBuf::from);
1189            (
1190                this.scan_id(),
1191                this.copy_entry(
1192                    ProjectEntryId::from_proto(request.entry_id),
1193                    relative_worktree_source_path,
1194                    PathBuf::from(request.new_path),
1195                    cx,
1196                ),
1197            )
1198        })?;
1199        Ok(proto::ProjectEntryResponse {
1200            entry: task.await?.as_ref().map(|e| e.into()),
1201            worktree_scan_id: scan_id as u64,
1202        })
1203    }
1204}
1205
1206impl LocalWorktree {
1207    pub fn fs(&self) -> &Arc<dyn Fs> {
1208        &self.fs
1209    }
1210
1211    pub fn contains_abs_path(&self, path: &Path) -> bool {
1212        let path = SanitizedPath::from(path);
1213        path.starts_with(&self.abs_path)
1214    }
1215
1216    pub fn is_path_private(&self, path: &Path) -> bool {
1217        !self.share_private_files && self.settings.is_path_private(path)
1218    }
1219
1220    fn restart_background_scanners(&mut self, cx: &Context<Worktree>) {
1221        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
1222        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
1223        self.scan_requests_tx = scan_requests_tx;
1224        self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
1225
1226        self.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
1227        let always_included_entries = mem::take(&mut self.snapshot.always_included_entries);
1228        log::debug!(
1229            "refreshing entries for the following always included paths: {:?}",
1230            always_included_entries
1231        );
1232
1233        // Cleans up old always included entries to ensure they get updated properly. Otherwise,
1234        // nested always included entries may not get updated and will result in out-of-date info.
1235        self.refresh_entries_for_paths(always_included_entries);
1236    }
1237
1238    fn start_background_scanner(
1239        &mut self,
1240        scan_requests_rx: channel::Receiver<ScanRequest>,
1241        path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
1242        cx: &Context<Worktree>,
1243    ) {
1244        let snapshot = self.snapshot();
1245        let share_private_files = self.share_private_files;
1246        let next_entry_id = self.next_entry_id.clone();
1247        let fs = self.fs.clone();
1248        let git_hosting_provider_registry = GitHostingProviderRegistry::try_global(cx);
1249        let settings = self.settings.clone();
1250        let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
1251        let background_scanner = cx.background_executor().spawn({
1252            let abs_path = &snapshot.abs_path;
1253            #[cfg(target_os = "windows")]
1254            let abs_path = abs_path
1255                .as_path()
1256                .canonicalize()
1257                .unwrap_or_else(|_| abs_path.as_path().to_path_buf());
1258            #[cfg(not(target_os = "windows"))]
1259            let abs_path = abs_path.as_path().to_path_buf();
1260            let background = cx.background_executor().clone();
1261            async move {
1262                let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await;
1263                let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
1264                    log::error!("Failed to determine whether filesystem is case sensitive: {e:#}");
1265                    true
1266                });
1267
1268                let mut scanner = BackgroundScanner {
1269                    fs,
1270                    fs_case_sensitive,
1271                    status_updates_tx: scan_states_tx,
1272                    executor: background,
1273                    scan_requests_rx,
1274                    path_prefixes_to_scan_rx,
1275                    next_entry_id,
1276                    state: Mutex::new(BackgroundScannerState {
1277                        prev_snapshot: snapshot.snapshot.clone(),
1278                        snapshot,
1279                        scanned_dirs: Default::default(),
1280                        path_prefixes_to_scan: Default::default(),
1281                        paths_to_scan: Default::default(),
1282                        removed_entries: Default::default(),
1283                        changed_paths: Default::default(),
1284                        git_hosting_provider_registry,
1285                    }),
1286                    phase: BackgroundScannerPhase::InitialScan,
1287                    share_private_files,
1288                    settings,
1289                    watcher,
1290                };
1291
1292                scanner
1293                    .run(Box::pin(
1294                        events.map(|events| events.into_iter().map(Into::into).collect()),
1295                    ))
1296                    .await;
1297            }
1298        });
1299        let scan_state_updater = cx.spawn(|this, mut cx| async move {
1300            while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
1301                this.update(&mut cx, |this, cx| {
1302                    let this = this.as_local_mut().unwrap();
1303                    match state {
1304                        ScanState::Started => {
1305                            *this.is_scanning.0.borrow_mut() = true;
1306                        }
1307                        ScanState::Updated {
1308                            snapshot,
1309                            changes,
1310                            barrier,
1311                            scanning,
1312                        } => {
1313                            *this.is_scanning.0.borrow_mut() = scanning;
1314                            this.set_snapshot(snapshot, changes, cx);
1315                            drop(barrier);
1316                        }
1317                        ScanState::RootUpdated { new_path } => {
1318                            if let Some(new_path) = new_path {
1319                                this.snapshot.git_repositories = Default::default();
1320                                this.snapshot.ignores_by_parent_abs_path = Default::default();
1321                                let root_name = new_path
1322                                    .as_path()
1323                                    .file_name()
1324                                    .map_or(String::new(), |f| f.to_string_lossy().to_string());
1325                                this.snapshot.update_abs_path(new_path, root_name);
1326                            }
1327                            this.restart_background_scanners(cx);
1328                        }
1329                    }
1330                    cx.notify();
1331                })
1332                .ok();
1333            }
1334        });
1335        self._background_scanner_tasks = vec![background_scanner, scan_state_updater];
1336        self.is_scanning = watch::channel_with(true);
1337    }
1338
1339    fn set_snapshot(
1340        &mut self,
1341        new_snapshot: LocalSnapshot,
1342        entry_changes: UpdatedEntriesSet,
1343        cx: &mut Context<Worktree>,
1344    ) {
1345        let repo_changes = self.changed_repos(&self.snapshot, &new_snapshot);
1346        self.snapshot = new_snapshot;
1347
1348        if let Some(share) = self.update_observer.as_mut() {
1349            share
1350                .snapshots_tx
1351                .unbounded_send((
1352                    self.snapshot.clone(),
1353                    entry_changes.clone(),
1354                    repo_changes.clone(),
1355                ))
1356                .ok();
1357        }
1358
1359        if !entry_changes.is_empty() {
1360            cx.emit(Event::UpdatedEntries(entry_changes));
1361        }
1362        if !repo_changes.is_empty() {
1363            cx.emit(Event::UpdatedGitRepositories(repo_changes));
1364        }
1365    }
1366
1367    fn changed_repos(
1368        &self,
1369        old_snapshot: &LocalSnapshot,
1370        new_snapshot: &LocalSnapshot,
1371    ) -> UpdatedGitRepositoriesSet {
1372        let mut changes = Vec::new();
1373        let mut old_repos = old_snapshot.git_repositories.iter().peekable();
1374        let mut new_repos = new_snapshot.git_repositories.iter().peekable();
1375
1376        loop {
1377            match (new_repos.peek().map(clone), old_repos.peek().map(clone)) {
1378                (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => {
1379                    match Ord::cmp(&new_entry_id, &old_entry_id) {
1380                        Ordering::Less => {
1381                            if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
1382                                changes.push((
1383                                    entry.path.clone(),
1384                                    GitRepositoryChange {
1385                                        old_repository: None,
1386                                    },
1387                                ));
1388                            }
1389                            new_repos.next();
1390                        }
1391                        Ordering::Equal => {
1392                            if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id
1393                                || new_repo.status_scan_id != old_repo.status_scan_id
1394                            {
1395                                if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
1396                                    let old_repo = old_snapshot
1397                                        .repositories
1398                                        .get(&PathKey(entry.path.clone()), &())
1399                                        .cloned();
1400                                    changes.push((
1401                                        entry.path.clone(),
1402                                        GitRepositoryChange {
1403                                            old_repository: old_repo,
1404                                        },
1405                                    ));
1406                                }
1407                            }
1408                            new_repos.next();
1409                            old_repos.next();
1410                        }
1411                        Ordering::Greater => {
1412                            if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
1413                                let old_repo = old_snapshot
1414                                    .repositories
1415                                    .get(&PathKey(entry.path.clone()), &())
1416                                    .cloned();
1417                                changes.push((
1418                                    entry.path.clone(),
1419                                    GitRepositoryChange {
1420                                        old_repository: old_repo,
1421                                    },
1422                                ));
1423                            }
1424                            old_repos.next();
1425                        }
1426                    }
1427                }
1428                (Some((entry_id, _)), None) => {
1429                    if let Some(entry) = new_snapshot.entry_for_id(entry_id) {
1430                        changes.push((
1431                            entry.path.clone(),
1432                            GitRepositoryChange {
1433                                old_repository: None,
1434                            },
1435                        ));
1436                    }
1437                    new_repos.next();
1438                }
1439                (None, Some((entry_id, _))) => {
1440                    if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
1441                        let old_repo = old_snapshot
1442                            .repositories
1443                            .get(&PathKey(entry.path.clone()), &())
1444                            .cloned();
1445                        changes.push((
1446                            entry.path.clone(),
1447                            GitRepositoryChange {
1448                                old_repository: old_repo,
1449                            },
1450                        ));
1451                    }
1452                    old_repos.next();
1453                }
1454                (None, None) => break,
1455            }
1456        }
1457
1458        fn clone<T: Clone, U: Clone>(value: &(&T, &U)) -> (T, U) {
1459            (value.0.clone(), value.1.clone())
1460        }
1461
1462        changes.into()
1463    }
1464
1465    pub fn scan_complete(&self) -> impl Future<Output = ()> {
1466        let mut is_scanning_rx = self.is_scanning.1.clone();
1467        async move {
1468            let mut is_scanning = *is_scanning_rx.borrow();
1469            while is_scanning {
1470                if let Some(value) = is_scanning_rx.recv().await {
1471                    is_scanning = value;
1472                } else {
1473                    break;
1474                }
1475            }
1476        }
1477    }
1478
1479    pub fn snapshot(&self) -> LocalSnapshot {
1480        self.snapshot.clone()
1481    }
1482
1483    pub fn settings(&self) -> WorktreeSettings {
1484        self.settings.clone()
1485    }
1486
1487    pub fn local_git_repo(&self, path: &Path) -> Option<Arc<dyn GitRepository>> {
1488        self.local_repo_for_path(path)
1489            .map(|local_repo| local_repo.repo_ptr.clone())
1490    }
1491
1492    pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
1493        self.git_repositories.get(&repo.work_directory_id)
1494    }
1495
1496    fn load_binary_file(
1497        &self,
1498        path: &Path,
1499        cx: &Context<Worktree>,
1500    ) -> Task<Result<LoadedBinaryFile>> {
1501        let path = Arc::from(path);
1502        let abs_path = self.absolutize(&path);
1503        let fs = self.fs.clone();
1504        let entry = self.refresh_entry(path.clone(), None, cx);
1505        let is_private = self.is_path_private(path.as_ref());
1506
1507        let worktree = cx.weak_entity();
1508        cx.background_executor().spawn(async move {
1509            let abs_path = abs_path?;
1510            let content = fs.load_bytes(&abs_path).await?;
1511
1512            let worktree = worktree
1513                .upgrade()
1514                .ok_or_else(|| anyhow!("worktree was dropped"))?;
1515            let file = match entry.await? {
1516                Some(entry) => File::for_entry(entry, worktree),
1517                None => {
1518                    let metadata = fs
1519                        .metadata(&abs_path)
1520                        .await
1521                        .with_context(|| {
1522                            format!("Loading metadata for excluded file {abs_path:?}")
1523                        })?
1524                        .with_context(|| {
1525                            format!("Excluded file {abs_path:?} got removed during loading")
1526                        })?;
1527                    Arc::new(File {
1528                        entry_id: None,
1529                        worktree,
1530                        path,
1531                        disk_state: DiskState::Present {
1532                            mtime: metadata.mtime,
1533                        },
1534                        is_local: true,
1535                        is_private,
1536                    })
1537                }
1538            };
1539
1540            Ok(LoadedBinaryFile { file, content })
1541        })
1542    }
1543
1544    fn load_file(&self, path: &Path, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
1545        let path = Arc::from(path);
1546        let abs_path = self.absolutize(&path);
1547        let fs = self.fs.clone();
1548        let entry = self.refresh_entry(path.clone(), None, cx);
1549        let is_private = self.is_path_private(path.as_ref());
1550
1551        cx.spawn(|this, _cx| async move {
1552            let abs_path = abs_path?;
1553            let text = fs.load(&abs_path).await?;
1554
1555            let worktree = this
1556                .upgrade()
1557                .ok_or_else(|| anyhow!("worktree was dropped"))?;
1558            let file = match entry.await? {
1559                Some(entry) => File::for_entry(entry, worktree),
1560                None => {
1561                    let metadata = fs
1562                        .metadata(&abs_path)
1563                        .await
1564                        .with_context(|| {
1565                            format!("Loading metadata for excluded file {abs_path:?}")
1566                        })?
1567                        .with_context(|| {
1568                            format!("Excluded file {abs_path:?} got removed during loading")
1569                        })?;
1570                    Arc::new(File {
1571                        entry_id: None,
1572                        worktree,
1573                        path,
1574                        disk_state: DiskState::Present {
1575                            mtime: metadata.mtime,
1576                        },
1577                        is_local: true,
1578                        is_private,
1579                    })
1580                }
1581            };
1582
1583            Ok(LoadedFile { file, text })
1584        })
1585    }
1586
1587    /// Find the lowest path in the worktree's datastructures that is an ancestor
1588    fn lowest_ancestor(&self, path: &Path) -> PathBuf {
1589        let mut lowest_ancestor = None;
1590        for path in path.ancestors() {
1591            if self.entry_for_path(path).is_some() {
1592                lowest_ancestor = Some(path.to_path_buf());
1593                break;
1594            }
1595        }
1596
1597        lowest_ancestor.unwrap_or_else(|| PathBuf::from(""))
1598    }
1599
1600    fn create_entry(
1601        &self,
1602        path: impl Into<Arc<Path>>,
1603        is_dir: bool,
1604        cx: &Context<Worktree>,
1605    ) -> Task<Result<CreatedEntry>> {
1606        let path = path.into();
1607        let abs_path = match self.absolutize(&path) {
1608            Ok(path) => path,
1609            Err(e) => return Task::ready(Err(e.context(format!("absolutizing path {path:?}")))),
1610        };
1611        let path_excluded = self.settings.is_path_excluded(&abs_path);
1612        let fs = self.fs.clone();
1613        let task_abs_path = abs_path.clone();
1614        let write = cx.background_executor().spawn(async move {
1615            if is_dir {
1616                fs.create_dir(&task_abs_path)
1617                    .await
1618                    .with_context(|| format!("creating directory {task_abs_path:?}"))
1619            } else {
1620                fs.save(&task_abs_path, &Rope::default(), LineEnding::default())
1621                    .await
1622                    .with_context(|| format!("creating file {task_abs_path:?}"))
1623            }
1624        });
1625
1626        let lowest_ancestor = self.lowest_ancestor(&path);
1627        cx.spawn(|this, mut cx| async move {
1628            write.await?;
1629            if path_excluded {
1630                return Ok(CreatedEntry::Excluded { abs_path });
1631            }
1632
1633            let (result, refreshes) = this.update(&mut cx, |this, cx| {
1634                let mut refreshes = Vec::new();
1635                let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
1636                for refresh_path in refresh_paths.ancestors() {
1637                    if refresh_path == Path::new("") {
1638                        continue;
1639                    }
1640                    let refresh_full_path = lowest_ancestor.join(refresh_path);
1641
1642                    refreshes.push(this.as_local_mut().unwrap().refresh_entry(
1643                        refresh_full_path.into(),
1644                        None,
1645                        cx,
1646                    ));
1647                }
1648                (
1649                    this.as_local_mut().unwrap().refresh_entry(path, None, cx),
1650                    refreshes,
1651                )
1652            })?;
1653            for refresh in refreshes {
1654                refresh.await.log_err();
1655            }
1656
1657            Ok(result
1658                .await?
1659                .map(CreatedEntry::Included)
1660                .unwrap_or_else(|| CreatedEntry::Excluded { abs_path }))
1661        })
1662    }
1663
1664    fn write_file(
1665        &self,
1666        path: impl Into<Arc<Path>>,
1667        text: Rope,
1668        line_ending: LineEnding,
1669        cx: &Context<Worktree>,
1670    ) -> Task<Result<Arc<File>>> {
1671        let path = path.into();
1672        let fs = self.fs.clone();
1673        let is_private = self.is_path_private(&path);
1674        let Ok(abs_path) = self.absolutize(&path) else {
1675            return Task::ready(Err(anyhow!("invalid path {path:?}")));
1676        };
1677
1678        let write = cx.background_executor().spawn({
1679            let fs = fs.clone();
1680            let abs_path = abs_path.clone();
1681            async move { fs.save(&abs_path, &text, line_ending).await }
1682        });
1683
1684        cx.spawn(move |this, mut cx| async move {
1685            write.await?;
1686            let entry = this
1687                .update(&mut cx, |this, cx| {
1688                    this.as_local_mut()
1689                        .unwrap()
1690                        .refresh_entry(path.clone(), None, cx)
1691                })?
1692                .await?;
1693            let worktree = this.upgrade().ok_or_else(|| anyhow!("worktree dropped"))?;
1694            if let Some(entry) = entry {
1695                Ok(File::for_entry(entry, worktree))
1696            } else {
1697                let metadata = fs
1698                    .metadata(&abs_path)
1699                    .await
1700                    .with_context(|| {
1701                        format!("Fetching metadata after saving the excluded buffer {abs_path:?}")
1702                    })?
1703                    .with_context(|| {
1704                        format!("Excluded buffer {path:?} got removed during saving")
1705                    })?;
1706                Ok(Arc::new(File {
1707                    worktree,
1708                    path,
1709                    disk_state: DiskState::Present {
1710                        mtime: metadata.mtime,
1711                    },
1712                    entry_id: None,
1713                    is_local: true,
1714                    is_private,
1715                }))
1716            }
1717        })
1718    }
1719
1720    fn delete_entry(
1721        &self,
1722        entry_id: ProjectEntryId,
1723        trash: bool,
1724        cx: &Context<Worktree>,
1725    ) -> Option<Task<Result<()>>> {
1726        let entry = self.entry_for_id(entry_id)?.clone();
1727        let abs_path = self.absolutize(&entry.path);
1728        let fs = self.fs.clone();
1729
1730        let delete = cx.background_executor().spawn(async move {
1731            if entry.is_file() {
1732                if trash {
1733                    fs.trash_file(&abs_path?, Default::default()).await?;
1734                } else {
1735                    fs.remove_file(&abs_path?, Default::default()).await?;
1736                }
1737            } else if trash {
1738                fs.trash_dir(
1739                    &abs_path?,
1740                    RemoveOptions {
1741                        recursive: true,
1742                        ignore_if_not_exists: false,
1743                    },
1744                )
1745                .await?;
1746            } else {
1747                fs.remove_dir(
1748                    &abs_path?,
1749                    RemoveOptions {
1750                        recursive: true,
1751                        ignore_if_not_exists: false,
1752                    },
1753                )
1754                .await?;
1755            }
1756            anyhow::Ok(entry.path)
1757        });
1758
1759        Some(cx.spawn(|this, mut cx| async move {
1760            let path = delete.await?;
1761            this.update(&mut cx, |this, _| {
1762                this.as_local_mut()
1763                    .unwrap()
1764                    .refresh_entries_for_paths(vec![path])
1765            })?
1766            .recv()
1767            .await;
1768            Ok(())
1769        }))
1770    }
1771
1772    fn rename_entry(
1773        &self,
1774        entry_id: ProjectEntryId,
1775        new_path: impl Into<Arc<Path>>,
1776        cx: &Context<Worktree>,
1777    ) -> Task<Result<CreatedEntry>> {
1778        let old_path = match self.entry_for_id(entry_id) {
1779            Some(entry) => entry.path.clone(),
1780            None => return Task::ready(Err(anyhow!("no entry to rename for id {entry_id:?}"))),
1781        };
1782        let new_path = new_path.into();
1783        let abs_old_path = self.absolutize(&old_path);
1784        let Ok(abs_new_path) = self.absolutize(&new_path) else {
1785            return Task::ready(Err(anyhow!("absolutizing path {new_path:?}")));
1786        };
1787        let abs_path = abs_new_path.clone();
1788        let fs = self.fs.clone();
1789        let case_sensitive = self.fs_case_sensitive;
1790        let rename = cx.background_executor().spawn(async move {
1791            let abs_old_path = abs_old_path?;
1792            let abs_new_path = abs_new_path;
1793
1794            let abs_old_path_lower = abs_old_path.to_str().map(|p| p.to_lowercase());
1795            let abs_new_path_lower = abs_new_path.to_str().map(|p| p.to_lowercase());
1796
1797            // If we're on a case-insensitive FS and we're doing a case-only rename (i.e. `foobar` to `FOOBAR`)
1798            // we want to overwrite, because otherwise we run into a file-already-exists error.
1799            let overwrite = !case_sensitive
1800                && abs_old_path != abs_new_path
1801                && abs_old_path_lower == abs_new_path_lower;
1802
1803            fs.rename(
1804                &abs_old_path,
1805                &abs_new_path,
1806                fs::RenameOptions {
1807                    overwrite,
1808                    ..Default::default()
1809                },
1810            )
1811            .await
1812            .with_context(|| format!("Renaming {abs_old_path:?} into {abs_new_path:?}"))
1813        });
1814
1815        cx.spawn(|this, mut cx| async move {
1816            rename.await?;
1817            Ok(this
1818                .update(&mut cx, |this, cx| {
1819                    this.as_local_mut()
1820                        .unwrap()
1821                        .refresh_entry(new_path.clone(), Some(old_path), cx)
1822                })?
1823                .await?
1824                .map(CreatedEntry::Included)
1825                .unwrap_or_else(|| CreatedEntry::Excluded { abs_path }))
1826        })
1827    }
1828
1829    fn copy_entry(
1830        &self,
1831        entry_id: ProjectEntryId,
1832        relative_worktree_source_path: Option<PathBuf>,
1833        new_path: impl Into<Arc<Path>>,
1834        cx: &Context<Worktree>,
1835    ) -> Task<Result<Option<Entry>>> {
1836        let old_path = match self.entry_for_id(entry_id) {
1837            Some(entry) => entry.path.clone(),
1838            None => return Task::ready(Ok(None)),
1839        };
1840        let new_path = new_path.into();
1841        let abs_old_path =
1842            if let Some(relative_worktree_source_path) = relative_worktree_source_path {
1843                Ok(self.abs_path().join(relative_worktree_source_path))
1844            } else {
1845                self.absolutize(&old_path)
1846            };
1847        let abs_new_path = self.absolutize(&new_path);
1848        let fs = self.fs.clone();
1849        let copy = cx.background_executor().spawn(async move {
1850            copy_recursive(
1851                fs.as_ref(),
1852                &abs_old_path?,
1853                &abs_new_path?,
1854                Default::default(),
1855            )
1856            .await
1857        });
1858
1859        cx.spawn(|this, mut cx| async move {
1860            copy.await?;
1861            this.update(&mut cx, |this, cx| {
1862                this.as_local_mut()
1863                    .unwrap()
1864                    .refresh_entry(new_path.clone(), None, cx)
1865            })?
1866            .await
1867        })
1868    }
1869
1870    pub fn copy_external_entries(
1871        &self,
1872        target_directory: PathBuf,
1873        paths: Vec<Arc<Path>>,
1874        overwrite_existing_files: bool,
1875        cx: &Context<Worktree>,
1876    ) -> Task<Result<Vec<ProjectEntryId>>> {
1877        let worktree_path = self.abs_path().clone();
1878        let fs = self.fs.clone();
1879        let paths = paths
1880            .into_iter()
1881            .filter_map(|source| {
1882                let file_name = source.file_name()?;
1883                let mut target = target_directory.clone();
1884                target.push(file_name);
1885
1886                // Do not allow copying the same file to itself.
1887                if source.as_ref() != target.as_path() {
1888                    Some((source, target))
1889                } else {
1890                    None
1891                }
1892            })
1893            .collect::<Vec<_>>();
1894
1895        let paths_to_refresh = paths
1896            .iter()
1897            .filter_map(|(_, target)| Some(target.strip_prefix(&worktree_path).ok()?.into()))
1898            .collect::<Vec<_>>();
1899
1900        cx.spawn(|this, cx| async move {
1901            cx.background_executor()
1902                .spawn(async move {
1903                    for (source, target) in paths {
1904                        copy_recursive(
1905                            fs.as_ref(),
1906                            &source,
1907                            &target,
1908                            fs::CopyOptions {
1909                                overwrite: overwrite_existing_files,
1910                                ..Default::default()
1911                            },
1912                        )
1913                        .await
1914                        .with_context(|| {
1915                            anyhow!("Failed to copy file from {source:?} to {target:?}")
1916                        })?;
1917                    }
1918                    Ok::<(), anyhow::Error>(())
1919                })
1920                .await
1921                .log_err();
1922            let mut refresh = cx.read_entity(
1923                &this.upgrade().with_context(|| "Dropped worktree")?,
1924                |this, _| {
1925                    Ok::<postage::barrier::Receiver, anyhow::Error>(
1926                        this.as_local()
1927                            .with_context(|| "Worktree is not local")?
1928                            .refresh_entries_for_paths(paths_to_refresh.clone()),
1929                    )
1930                },
1931            )??;
1932
1933            cx.background_executor()
1934                .spawn(async move {
1935                    refresh.next().await;
1936                    Ok::<(), anyhow::Error>(())
1937                })
1938                .await
1939                .log_err();
1940
1941            let this = this.upgrade().with_context(|| "Dropped worktree")?;
1942            cx.read_entity(&this, |this, _| {
1943                paths_to_refresh
1944                    .iter()
1945                    .filter_map(|path| Some(this.entry_for_path(path)?.id))
1946                    .collect()
1947            })
1948        })
1949    }
1950
1951    fn expand_entry(
1952        &self,
1953        entry_id: ProjectEntryId,
1954        cx: &Context<Worktree>,
1955    ) -> Option<Task<Result<()>>> {
1956        let path = self.entry_for_id(entry_id)?.path.clone();
1957        let mut refresh = self.refresh_entries_for_paths(vec![path]);
1958        Some(cx.background_executor().spawn(async move {
1959            refresh.next().await;
1960            Ok(())
1961        }))
1962    }
1963
1964    fn refresh_entries_for_paths(&self, paths: Vec<Arc<Path>>) -> barrier::Receiver {
1965        let (tx, rx) = barrier::channel();
1966        self.scan_requests_tx
1967            .try_send(ScanRequest {
1968                relative_paths: paths,
1969                done: smallvec![tx],
1970            })
1971            .ok();
1972        rx
1973    }
1974
1975    pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) {
1976        self.path_prefixes_to_scan_tx.try_send(path_prefix).ok();
1977    }
1978
1979    fn refresh_entry(
1980        &self,
1981        path: Arc<Path>,
1982        old_path: Option<Arc<Path>>,
1983        cx: &Context<Worktree>,
1984    ) -> Task<Result<Option<Entry>>> {
1985        if self.settings.is_path_excluded(&path) {
1986            return Task::ready(Ok(None));
1987        }
1988        let paths = if let Some(old_path) = old_path.as_ref() {
1989            vec![old_path.clone(), path.clone()]
1990        } else {
1991            vec![path.clone()]
1992        };
1993        let t0 = Instant::now();
1994        let mut refresh = self.refresh_entries_for_paths(paths);
1995        cx.spawn(move |this, mut cx| async move {
1996            refresh.recv().await;
1997            log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed());
1998            let new_entry = this.update(&mut cx, |this, _| {
1999                this.entry_for_path(path)
2000                    .cloned()
2001                    .ok_or_else(|| anyhow!("failed to read path after update"))
2002            })??;
2003            Ok(Some(new_entry))
2004        })
2005    }
2006
2007    fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
2008    where
2009        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
2010        Fut: Send + Future<Output = bool>,
2011    {
2012        if let Some(observer) = self.update_observer.as_mut() {
2013            *observer.resume_updates.borrow_mut() = ();
2014            return;
2015        }
2016
2017        let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
2018        let (snapshots_tx, mut snapshots_rx) =
2019            mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>();
2020        snapshots_tx
2021            .unbounded_send((self.snapshot(), Arc::default(), Arc::default()))
2022            .ok();
2023
2024        let worktree_id = cx.entity_id().as_u64();
2025        let _maintain_remote_snapshot = cx.background_executor().spawn(async move {
2026            let mut is_first = true;
2027            while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
2028                let update;
2029                if is_first {
2030                    update = snapshot.build_initial_update(project_id, worktree_id);
2031                    is_first = false;
2032                } else {
2033                    update =
2034                        snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes);
2035                }
2036
2037                for update in proto::split_worktree_update(update) {
2038                    let _ = resume_updates_rx.try_recv();
2039                    loop {
2040                        let result = callback(update.clone());
2041                        if result.await {
2042                            break;
2043                        } else {
2044                            log::info!("waiting to resume updates");
2045                            if resume_updates_rx.next().await.is_none() {
2046                                return Some(());
2047                            }
2048                        }
2049                    }
2050                }
2051            }
2052            Some(())
2053        });
2054
2055        self.update_observer = Some(UpdateObservationState {
2056            snapshots_tx,
2057            resume_updates: resume_updates_tx,
2058            _maintain_remote_snapshot,
2059        });
2060    }
2061
2062    pub fn share_private_files(&mut self, cx: &Context<Worktree>) {
2063        self.share_private_files = true;
2064        self.restart_background_scanners(cx);
2065    }
2066}
2067
2068impl RemoteWorktree {
2069    pub fn project_id(&self) -> u64 {
2070        self.project_id
2071    }
2072
2073    pub fn client(&self) -> AnyProtoClient {
2074        self.client.clone()
2075    }
2076
2077    pub fn disconnected_from_host(&mut self) {
2078        self.updates_tx.take();
2079        self.snapshot_subscriptions.clear();
2080        self.disconnected = true;
2081    }
2082
2083    pub fn update_from_remote(&self, update: proto::UpdateWorktree) {
2084        if let Some(updates_tx) = &self.updates_tx {
2085            updates_tx
2086                .unbounded_send(update)
2087                .expect("consumer runs to completion");
2088        }
2089    }
2090
2091    fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
2092    where
2093        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
2094        Fut: 'static + Send + Future<Output = bool>,
2095    {
2096        let (tx, mut rx) = mpsc::unbounded();
2097        let initial_update = self
2098            .snapshot
2099            .build_initial_update(project_id, self.id().to_proto());
2100        self.update_observer = Some(tx);
2101        cx.spawn(|this, mut cx| async move {
2102            let mut update = initial_update;
2103            'outer: loop {
2104                // SSH projects use a special project ID of 0, and we need to
2105                // remap it to the correct one here.
2106                update.project_id = project_id;
2107
2108                for chunk in split_worktree_update(update) {
2109                    if !callback(chunk).await {
2110                        break 'outer;
2111                    }
2112                }
2113
2114                if let Some(next_update) = rx.next().await {
2115                    update = next_update;
2116                } else {
2117                    break;
2118                }
2119            }
2120            this.update(&mut cx, |this, _| {
2121                let this = this.as_remote_mut().unwrap();
2122                this.update_observer.take();
2123            })
2124        })
2125        .detach();
2126    }
2127
2128    fn observed_snapshot(&self, scan_id: usize) -> bool {
2129        self.completed_scan_id >= scan_id
2130    }
2131
2132    pub fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
2133        let (tx, rx) = oneshot::channel();
2134        if self.observed_snapshot(scan_id) {
2135            let _ = tx.send(());
2136        } else if self.disconnected {
2137            drop(tx);
2138        } else {
2139            match self
2140                .snapshot_subscriptions
2141                .binary_search_by_key(&scan_id, |probe| probe.0)
2142            {
2143                Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
2144            }
2145        }
2146
2147        async move {
2148            rx.await?;
2149            Ok(())
2150        }
2151    }
2152
2153    fn insert_entry(
2154        &mut self,
2155        entry: proto::Entry,
2156        scan_id: usize,
2157        cx: &Context<Worktree>,
2158    ) -> Task<Result<Entry>> {
2159        let wait_for_snapshot = self.wait_for_snapshot(scan_id);
2160        cx.spawn(|this, mut cx| async move {
2161            wait_for_snapshot.await?;
2162            this.update(&mut cx, |worktree, _| {
2163                let worktree = worktree.as_remote_mut().unwrap();
2164                let snapshot = &mut worktree.background_snapshot.lock().0;
2165                let entry = snapshot.insert_entry(entry, &worktree.file_scan_inclusions);
2166                worktree.snapshot = snapshot.clone();
2167                entry
2168            })?
2169        })
2170    }
2171
2172    fn delete_entry(
2173        &self,
2174        entry_id: ProjectEntryId,
2175        trash: bool,
2176        cx: &Context<Worktree>,
2177    ) -> Option<Task<Result<()>>> {
2178        let response = self.client.request(proto::DeleteProjectEntry {
2179            project_id: self.project_id,
2180            entry_id: entry_id.to_proto(),
2181            use_trash: trash,
2182        });
2183        Some(cx.spawn(move |this, mut cx| async move {
2184            let response = response.await?;
2185            let scan_id = response.worktree_scan_id as usize;
2186
2187            this.update(&mut cx, move |this, _| {
2188                this.as_remote_mut().unwrap().wait_for_snapshot(scan_id)
2189            })?
2190            .await?;
2191
2192            this.update(&mut cx, |this, _| {
2193                let this = this.as_remote_mut().unwrap();
2194                let snapshot = &mut this.background_snapshot.lock().0;
2195                snapshot.delete_entry(entry_id);
2196                this.snapshot = snapshot.clone();
2197            })
2198        }))
2199    }
2200
2201    fn rename_entry(
2202        &self,
2203        entry_id: ProjectEntryId,
2204        new_path: impl Into<Arc<Path>>,
2205        cx: &Context<Worktree>,
2206    ) -> Task<Result<CreatedEntry>> {
2207        let new_path = new_path.into();
2208        let response = self.client.request(proto::RenameProjectEntry {
2209            project_id: self.project_id,
2210            entry_id: entry_id.to_proto(),
2211            new_path: new_path.to_string_lossy().into(),
2212        });
2213        cx.spawn(move |this, mut cx| async move {
2214            let response = response.await?;
2215            match response.entry {
2216                Some(entry) => this
2217                    .update(&mut cx, |this, cx| {
2218                        this.as_remote_mut().unwrap().insert_entry(
2219                            entry,
2220                            response.worktree_scan_id as usize,
2221                            cx,
2222                        )
2223                    })?
2224                    .await
2225                    .map(CreatedEntry::Included),
2226                None => {
2227                    let abs_path = this.update(&mut cx, |worktree, _| {
2228                        worktree
2229                            .absolutize(&new_path)
2230                            .with_context(|| format!("absolutizing {new_path:?}"))
2231                    })??;
2232                    Ok(CreatedEntry::Excluded { abs_path })
2233                }
2234            }
2235        })
2236    }
2237}
2238
2239impl Snapshot {
2240    pub fn new(id: u64, root_name: String, abs_path: Arc<Path>) -> Self {
2241        Snapshot {
2242            id: WorktreeId::from_usize(id as usize),
2243            abs_path: abs_path.into(),
2244            root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
2245            root_name,
2246            always_included_entries: Default::default(),
2247            entries_by_path: Default::default(),
2248            entries_by_id: Default::default(),
2249            repositories: Default::default(),
2250            scan_id: 1,
2251            completed_scan_id: 0,
2252        }
2253    }
2254
2255    pub fn id(&self) -> WorktreeId {
2256        self.id
2257    }
2258
2259    // TODO:
2260    // Consider the following:
2261    //
2262    // ```rust
2263    // let abs_path: Arc<Path> = snapshot.abs_path(); // e.g. "C:\Users\user\Desktop\project"
2264    // let some_non_trimmed_path = Path::new("\\\\?\\C:\\Users\\user\\Desktop\\project\\main.rs");
2265    // // The caller perform some actions here:
2266    // some_non_trimmed_path.strip_prefix(abs_path);  // This fails
2267    // some_non_trimmed_path.starts_with(abs_path);   // This fails too
2268    // ```
2269    //
2270    // This is definitely a bug, but it's not clear if we should handle it here or not.
2271    pub fn abs_path(&self) -> &Arc<Path> {
2272        self.abs_path.as_path()
2273    }
2274
2275    fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
2276        let mut updated_entries = self
2277            .entries_by_path
2278            .iter()
2279            .map(proto::Entry::from)
2280            .collect::<Vec<_>>();
2281        updated_entries.sort_unstable_by_key(|e| e.id);
2282
2283        let mut updated_repositories = self
2284            .repositories
2285            .iter()
2286            .map(|repository| repository.initial_update())
2287            .collect::<Vec<_>>();
2288        updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
2289
2290        proto::UpdateWorktree {
2291            project_id,
2292            worktree_id,
2293            abs_path: self.abs_path().to_string_lossy().into(),
2294            root_name: self.root_name().to_string(),
2295            updated_entries,
2296            removed_entries: Vec::new(),
2297            scan_id: self.scan_id as u64,
2298            is_last_update: self.completed_scan_id == self.scan_id,
2299            updated_repositories,
2300            removed_repositories: Vec::new(),
2301        }
2302    }
2303
2304    pub fn absolutize(&self, path: &Path) -> Result<PathBuf> {
2305        if path
2306            .components()
2307            .any(|component| !matches!(component, std::path::Component::Normal(_)))
2308        {
2309            return Err(anyhow!("invalid path"));
2310        }
2311        if path.file_name().is_some() {
2312            Ok(self.abs_path.as_path().join(path))
2313        } else {
2314            Ok(self.abs_path.as_path().to_path_buf())
2315        }
2316    }
2317
2318    pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
2319        self.entries_by_id.get(&entry_id, &()).is_some()
2320    }
2321
2322    fn insert_entry(
2323        &mut self,
2324        entry: proto::Entry,
2325        always_included_paths: &PathMatcher,
2326    ) -> Result<Entry> {
2327        let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
2328        let old_entry = self.entries_by_id.insert_or_replace(
2329            PathEntry {
2330                id: entry.id,
2331                path: entry.path.clone(),
2332                is_ignored: entry.is_ignored,
2333                scan_id: 0,
2334            },
2335            &(),
2336        );
2337        if let Some(old_entry) = old_entry {
2338            self.entries_by_path.remove(&PathKey(old_entry.path), &());
2339        }
2340        self.entries_by_path.insert_or_replace(entry.clone(), &());
2341        Ok(entry)
2342    }
2343
2344    fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
2345        let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
2346        self.entries_by_path = {
2347            let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
2348            let mut new_entries_by_path =
2349                cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &());
2350            while let Some(entry) = cursor.item() {
2351                if entry.path.starts_with(&removed_entry.path) {
2352                    self.entries_by_id.remove(&entry.id, &());
2353                    cursor.next(&());
2354                } else {
2355                    break;
2356                }
2357            }
2358            new_entries_by_path.append(cursor.suffix(&()), &());
2359            new_entries_by_path
2360        };
2361
2362        Some(removed_entry.path)
2363    }
2364
2365    pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> {
2366        let path = path.as_ref();
2367        self.repository_for_path(path).and_then(|repo| {
2368            let repo_path = repo.relativize(path).unwrap();
2369            repo.statuses_by_path
2370                .get(&PathKey(repo_path.0), &())
2371                .map(|entry| entry.status)
2372        })
2373    }
2374
2375    fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) {
2376        self.abs_path = abs_path;
2377        if root_name != self.root_name {
2378            self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
2379            self.root_name = root_name;
2380        }
2381    }
2382
2383    pub(crate) fn apply_remote_update(
2384        &mut self,
2385        mut update: proto::UpdateWorktree,
2386        always_included_paths: &PathMatcher,
2387    ) -> Result<()> {
2388        log::trace!(
2389            "applying remote worktree update. {} entries updated, {} removed",
2390            update.updated_entries.len(),
2391            update.removed_entries.len()
2392        );
2393        self.update_abs_path(
2394            SanitizedPath::from(PathBuf::from(update.abs_path)),
2395            update.root_name,
2396        );
2397
2398        let mut entries_by_path_edits = Vec::new();
2399        let mut entries_by_id_edits = Vec::new();
2400
2401        for entry_id in update.removed_entries {
2402            let entry_id = ProjectEntryId::from_proto(entry_id);
2403            entries_by_id_edits.push(Edit::Remove(entry_id));
2404            if let Some(entry) = self.entry_for_id(entry_id) {
2405                entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
2406            }
2407        }
2408
2409        for entry in update.updated_entries {
2410            let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
2411            if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
2412                entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
2413            }
2414            if let Some(old_entry) = self.entries_by_path.get(&PathKey(entry.path.clone()), &()) {
2415                if old_entry.id != entry.id {
2416                    entries_by_id_edits.push(Edit::Remove(old_entry.id));
2417                }
2418            }
2419            entries_by_id_edits.push(Edit::Insert(PathEntry {
2420                id: entry.id,
2421                path: entry.path.clone(),
2422                is_ignored: entry.is_ignored,
2423                scan_id: 0,
2424            }));
2425            entries_by_path_edits.push(Edit::Insert(entry));
2426        }
2427
2428        self.entries_by_path.edit(entries_by_path_edits, &());
2429        self.entries_by_id.edit(entries_by_id_edits, &());
2430
2431        update.removed_repositories.sort_unstable();
2432        self.repositories.retain(&(), |entry: &RepositoryEntry| {
2433            update
2434                .removed_repositories
2435                .binary_search(&entry.work_directory_id.to_proto())
2436                .is_err()
2437        });
2438
2439        for repository in update.updated_repositories {
2440            let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id);
2441            if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) {
2442                if self
2443                    .repositories
2444                    .contains(&PathKey(work_dir_entry.path.clone()), &())
2445                {
2446                    let edits = repository
2447                        .removed_statuses
2448                        .into_iter()
2449                        .map(|path| Edit::Remove(PathKey(Path::new(&path).into())))
2450                        .chain(repository.updated_statuses.into_iter().filter_map(
2451                            |updated_status| {
2452                                Some(Edit::Insert(updated_status.try_into().log_err()?))
2453                            },
2454                        ))
2455                        .collect::<Vec<_>>();
2456
2457                    self.repositories
2458                        .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
2459                            repo.branch = repository.branch.map(Into::into);
2460                            repo.statuses_by_path.edit(edits, &());
2461                        });
2462                } else {
2463                    let statuses = SumTree::from_iter(
2464                        repository
2465                            .updated_statuses
2466                            .into_iter()
2467                            .filter_map(|updated_status| updated_status.try_into().log_err()),
2468                        &(),
2469                    );
2470
2471                    self.repositories.insert_or_replace(
2472                        RepositoryEntry {
2473                            work_directory_id,
2474                            work_directory: WorkDirectory {
2475                                path: work_dir_entry.path.clone(),
2476                                // When syncing repository entries from a peer, we don't need
2477                                // the location_in_repo field, since git operations don't happen locally
2478                                // anyway.
2479                                location_in_repo: None,
2480                            },
2481                            branch: repository.branch.map(Into::into),
2482                            statuses_by_path: statuses,
2483                        },
2484                        &(),
2485                    );
2486                }
2487            } else {
2488                log::error!(
2489                    "no work directory entry for repository {:?}",
2490                    repository.work_directory_id
2491                )
2492            }
2493        }
2494
2495        self.scan_id = update.scan_id as usize;
2496        if update.is_last_update {
2497            self.completed_scan_id = update.scan_id as usize;
2498        }
2499
2500        Ok(())
2501    }
2502
2503    pub fn entry_count(&self) -> usize {
2504        self.entries_by_path.summary().count
2505    }
2506
2507    pub fn visible_entry_count(&self) -> usize {
2508        self.entries_by_path.summary().non_ignored_count
2509    }
2510
2511    pub fn dir_count(&self) -> usize {
2512        let summary = self.entries_by_path.summary();
2513        summary.count - summary.file_count
2514    }
2515
2516    pub fn visible_dir_count(&self) -> usize {
2517        let summary = self.entries_by_path.summary();
2518        summary.non_ignored_count - summary.non_ignored_file_count
2519    }
2520
2521    pub fn file_count(&self) -> usize {
2522        self.entries_by_path.summary().file_count
2523    }
2524
2525    pub fn visible_file_count(&self) -> usize {
2526        self.entries_by_path.summary().non_ignored_file_count
2527    }
2528
2529    fn traverse_from_offset(
2530        &self,
2531        include_files: bool,
2532        include_dirs: bool,
2533        include_ignored: bool,
2534        start_offset: usize,
2535    ) -> Traversal {
2536        let mut cursor = self.entries_by_path.cursor(&());
2537        cursor.seek(
2538            &TraversalTarget::Count {
2539                count: start_offset,
2540                include_files,
2541                include_dirs,
2542                include_ignored,
2543            },
2544            Bias::Right,
2545            &(),
2546        );
2547        Traversal {
2548            snapshot: self,
2549            cursor,
2550            include_files,
2551            include_dirs,
2552            include_ignored,
2553        }
2554    }
2555
2556    pub fn traverse_from_path(
2557        &self,
2558        include_files: bool,
2559        include_dirs: bool,
2560        include_ignored: bool,
2561        path: &Path,
2562    ) -> Traversal {
2563        Traversal::new(self, include_files, include_dirs, include_ignored, path)
2564    }
2565
2566    pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
2567        self.traverse_from_offset(true, false, include_ignored, start)
2568    }
2569
2570    pub fn directories(&self, include_ignored: bool, start: usize) -> Traversal {
2571        self.traverse_from_offset(false, true, include_ignored, start)
2572    }
2573
2574    pub fn entries(&self, include_ignored: bool, start: usize) -> Traversal {
2575        self.traverse_from_offset(true, true, include_ignored, start)
2576    }
2577
2578    #[cfg(any(feature = "test-support", test))]
2579    pub fn git_status(&self, work_dir: &Path) -> Option<Vec<StatusEntry>> {
2580        self.repositories
2581            .get(&PathKey(work_dir.into()), &())
2582            .map(|repo| repo.status().collect())
2583    }
2584
2585    pub fn repositories(&self) -> &SumTree<RepositoryEntry> {
2586        &self.repositories
2587    }
2588
2589    pub fn repositories_with_abs_paths(
2590        &self,
2591    ) -> impl '_ + Iterator<Item = (&RepositoryEntry, PathBuf)> {
2592        let base = self.abs_path();
2593        self.repositories.iter().map(|repo| {
2594            let path = repo.work_directory.location_in_repo.as_deref();
2595            let path = path.unwrap_or(repo.work_directory.as_ref());
2596            (repo, base.join(path))
2597        })
2598    }
2599
2600    /// Get the repository whose work directory corresponds to the given path.
2601    pub(crate) fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> {
2602        self.repositories.get(&work_directory, &()).cloned()
2603    }
2604
2605    /// Get the repository whose work directory contains the given path.
2606    pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> {
2607        let mut cursor = self.repositories.cursor::<PathProgress>(&());
2608        let mut repository = None;
2609
2610        // Git repositories may contain other git repositories. As a side effect of
2611        // lexicographic sorting by path, deeper repositories will be after higher repositories
2612        // So, let's loop through every matching repository until we can't find any more to find
2613        // the deepest repository that could contain this path.
2614        while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &())
2615            && cursor.item().is_some()
2616        {
2617            repository = cursor.item();
2618            cursor.next(&());
2619        }
2620
2621        repository
2622    }
2623
2624    /// Given an ordered iterator of entries, returns an iterator of those entries,
2625    /// along with their containing git repository.
2626    pub fn entries_with_repositories<'a>(
2627        &'a self,
2628        entries: impl 'a + Iterator<Item = &'a Entry>,
2629    ) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
2630        let mut containing_repos = Vec::<&RepositoryEntry>::new();
2631        let mut repositories = self.repositories().iter().peekable();
2632        entries.map(move |entry| {
2633            while let Some(repository) = containing_repos.last() {
2634                if repository.directory_contains(&entry.path) {
2635                    break;
2636                } else {
2637                    containing_repos.pop();
2638                }
2639            }
2640            while let Some(repository) = repositories.peek() {
2641                if repository.directory_contains(&entry.path) {
2642                    containing_repos.push(repositories.next().unwrap());
2643                } else {
2644                    break;
2645                }
2646            }
2647            let repo = containing_repos.last().copied();
2648            (entry, repo)
2649        })
2650    }
2651
2652    pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
2653        let empty_path = Path::new("");
2654        self.entries_by_path
2655            .cursor::<()>(&())
2656            .filter(move |entry| entry.path.as_ref() != empty_path)
2657            .map(|entry| &entry.path)
2658    }
2659
2660    pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
2661        let mut cursor = self.entries_by_path.cursor(&());
2662        cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &());
2663        let traversal = Traversal {
2664            snapshot: self,
2665            cursor,
2666            include_files: true,
2667            include_dirs: true,
2668            include_ignored: true,
2669        };
2670        ChildEntriesIter {
2671            traversal,
2672            parent_path,
2673        }
2674    }
2675
2676    pub fn root_entry(&self) -> Option<&Entry> {
2677        self.entry_for_path("")
2678    }
2679
2680    pub fn root_dir(&self) -> Option<Arc<Path>> {
2681        self.root_entry()
2682            .filter(|entry| entry.is_dir())
2683            .map(|_| self.abs_path().clone())
2684    }
2685
2686    pub fn root_name(&self) -> &str {
2687        &self.root_name
2688    }
2689
2690    pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
2691        self.repositories
2692            .get(&PathKey(Path::new("").into()), &())
2693            .map(|entry| entry.to_owned())
2694    }
2695
2696    pub fn git_entry(&self, work_directory_path: Arc<Path>) -> Option<RepositoryEntry> {
2697        self.repositories
2698            .get(&PathKey(work_directory_path), &())
2699            .map(|entry| entry.to_owned())
2700    }
2701
2702    pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
2703        self.repositories.iter()
2704    }
2705
2706    pub fn scan_id(&self) -> usize {
2707        self.scan_id
2708    }
2709
2710    pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
2711        let path = path.as_ref();
2712        self.traverse_from_path(true, true, true, path)
2713            .entry()
2714            .and_then(|entry| {
2715                if entry.path.as_ref() == path {
2716                    Some(entry)
2717                } else {
2718                    None
2719                }
2720            })
2721    }
2722
2723    pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
2724        let entry = self.entries_by_id.get(&id, &())?;
2725        self.entry_for_path(&entry.path)
2726    }
2727
2728    pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
2729        self.entry_for_path(path.as_ref()).map(|e| e.inode)
2730    }
2731}
2732
2733impl LocalSnapshot {
2734    pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
2735        let repository_entry = self.repository_for_path(path)?;
2736        let work_directory_id = repository_entry.work_directory_id();
2737        self.git_repositories.get(&work_directory_id)
2738    }
2739
2740    fn build_update(
2741        &self,
2742        project_id: u64,
2743        worktree_id: u64,
2744        entry_changes: UpdatedEntriesSet,
2745        repo_changes: UpdatedGitRepositoriesSet,
2746    ) -> proto::UpdateWorktree {
2747        let mut updated_entries = Vec::new();
2748        let mut removed_entries = Vec::new();
2749        let mut updated_repositories = Vec::new();
2750        let mut removed_repositories = Vec::new();
2751
2752        for (_, entry_id, path_change) in entry_changes.iter() {
2753            if let PathChange::Removed = path_change {
2754                removed_entries.push(entry_id.0 as u64);
2755            } else if let Some(entry) = self.entry_for_id(*entry_id) {
2756                updated_entries.push(proto::Entry::from(entry));
2757            }
2758        }
2759
2760        for (work_dir_path, change) in repo_changes.iter() {
2761            let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &());
2762            match (&change.old_repository, new_repo) {
2763                (Some(old_repo), Some(new_repo)) => {
2764                    updated_repositories.push(new_repo.build_update(old_repo));
2765                }
2766                (None, Some(new_repo)) => {
2767                    updated_repositories.push(new_repo.initial_update());
2768                }
2769                (Some(old_repo), None) => {
2770                    removed_repositories.push(old_repo.work_directory_id.to_proto());
2771                }
2772                _ => {}
2773            }
2774        }
2775
2776        removed_entries.sort_unstable();
2777        updated_entries.sort_unstable_by_key(|e| e.id);
2778        removed_repositories.sort_unstable();
2779        updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
2780
2781        // TODO - optimize, knowing that removed_entries are sorted.
2782        removed_entries.retain(|id| updated_entries.binary_search_by_key(id, |e| e.id).is_err());
2783
2784        proto::UpdateWorktree {
2785            project_id,
2786            worktree_id,
2787            abs_path: self.abs_path().to_string_lossy().into(),
2788            root_name: self.root_name().to_string(),
2789            updated_entries,
2790            removed_entries,
2791            scan_id: self.scan_id as u64,
2792            is_last_update: self.completed_scan_id == self.scan_id,
2793            updated_repositories,
2794            removed_repositories,
2795        }
2796    }
2797
2798    fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
2799        if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
2800            let abs_path = self.abs_path.as_path().join(&entry.path);
2801            match smol::block_on(build_gitignore(&abs_path, fs)) {
2802                Ok(ignore) => {
2803                    self.ignores_by_parent_abs_path
2804                        .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true));
2805                }
2806                Err(error) => {
2807                    log::error!(
2808                        "error loading .gitignore file {:?} - {:?}",
2809                        &entry.path,
2810                        error
2811                    );
2812                }
2813            }
2814        }
2815
2816        if entry.kind == EntryKind::PendingDir {
2817            if let Some(existing_entry) =
2818                self.entries_by_path.get(&PathKey(entry.path.clone()), &())
2819            {
2820                entry.kind = existing_entry.kind;
2821            }
2822        }
2823
2824        let scan_id = self.scan_id;
2825        let removed = self.entries_by_path.insert_or_replace(entry.clone(), &());
2826        if let Some(removed) = removed {
2827            if removed.id != entry.id {
2828                self.entries_by_id.remove(&removed.id, &());
2829            }
2830        }
2831        self.entries_by_id.insert_or_replace(
2832            PathEntry {
2833                id: entry.id,
2834                path: entry.path.clone(),
2835                is_ignored: entry.is_ignored,
2836                scan_id,
2837            },
2838            &(),
2839        );
2840
2841        entry
2842    }
2843
2844    fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
2845        let mut inodes = TreeSet::default();
2846        for ancestor in path.ancestors().skip(1) {
2847            if let Some(entry) = self.entry_for_path(ancestor) {
2848                inodes.insert(entry.inode);
2849            }
2850        }
2851        inodes
2852    }
2853
2854    fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
2855        let mut new_ignores = Vec::new();
2856        for (index, ancestor) in abs_path.ancestors().enumerate() {
2857            if index > 0 {
2858                if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
2859                    new_ignores.push((ancestor, Some(ignore.clone())));
2860                } else {
2861                    new_ignores.push((ancestor, None));
2862                }
2863            }
2864            if ancestor.join(*DOT_GIT).exists() {
2865                break;
2866            }
2867        }
2868
2869        let mut ignore_stack = IgnoreStack::none();
2870        for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
2871            if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
2872                ignore_stack = IgnoreStack::all();
2873                break;
2874            } else if let Some(ignore) = ignore {
2875                ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
2876            }
2877        }
2878
2879        if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
2880            ignore_stack = IgnoreStack::all();
2881        }
2882
2883        ignore_stack
2884    }
2885
2886    #[cfg(test)]
2887    pub(crate) fn expanded_entries(&self) -> impl Iterator<Item = &Entry> {
2888        self.entries_by_path
2889            .cursor::<()>(&())
2890            .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored))
2891    }
2892
2893    #[cfg(test)]
2894    pub fn check_invariants(&self, git_state: bool) {
2895        use pretty_assertions::assert_eq;
2896
2897        assert_eq!(
2898            self.entries_by_path
2899                .cursor::<()>(&())
2900                .map(|e| (&e.path, e.id))
2901                .collect::<Vec<_>>(),
2902            self.entries_by_id
2903                .cursor::<()>(&())
2904                .map(|e| (&e.path, e.id))
2905                .collect::<collections::BTreeSet<_>>()
2906                .into_iter()
2907                .collect::<Vec<_>>(),
2908            "entries_by_path and entries_by_id are inconsistent"
2909        );
2910
2911        let mut files = self.files(true, 0);
2912        let mut visible_files = self.files(false, 0);
2913        for entry in self.entries_by_path.cursor::<()>(&()) {
2914            if entry.is_file() {
2915                assert_eq!(files.next().unwrap().inode, entry.inode);
2916                if (!entry.is_ignored && !entry.is_external) || entry.is_always_included {
2917                    assert_eq!(visible_files.next().unwrap().inode, entry.inode);
2918                }
2919            }
2920        }
2921
2922        assert!(files.next().is_none());
2923        assert!(visible_files.next().is_none());
2924
2925        let mut bfs_paths = Vec::new();
2926        let mut stack = self
2927            .root_entry()
2928            .map(|e| e.path.as_ref())
2929            .into_iter()
2930            .collect::<Vec<_>>();
2931        while let Some(path) = stack.pop() {
2932            bfs_paths.push(path);
2933            let ix = stack.len();
2934            for child_entry in self.child_entries(path) {
2935                stack.insert(ix, &child_entry.path);
2936            }
2937        }
2938
2939        let dfs_paths_via_iter = self
2940            .entries_by_path
2941            .cursor::<()>(&())
2942            .map(|e| e.path.as_ref())
2943            .collect::<Vec<_>>();
2944        assert_eq!(bfs_paths, dfs_paths_via_iter);
2945
2946        let dfs_paths_via_traversal = self
2947            .entries(true, 0)
2948            .map(|e| e.path.as_ref())
2949            .collect::<Vec<_>>();
2950        assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
2951
2952        if git_state {
2953            for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
2954                let ignore_parent_path = ignore_parent_abs_path
2955                    .strip_prefix(self.abs_path.as_path())
2956                    .unwrap();
2957                assert!(self.entry_for_path(ignore_parent_path).is_some());
2958                assert!(self
2959                    .entry_for_path(ignore_parent_path.join(*GITIGNORE))
2960                    .is_some());
2961            }
2962        }
2963    }
2964
2965    #[cfg(test)]
2966    fn check_git_invariants(&self) {
2967        let dotgit_paths = self
2968            .git_repositories
2969            .iter()
2970            .map(|repo| repo.1.dot_git_dir_abs_path.clone())
2971            .collect::<HashSet<_>>();
2972        let work_dir_paths = self
2973            .repositories
2974            .iter()
2975            .map(|repo| repo.work_directory.path.clone())
2976            .collect::<HashSet<_>>();
2977        assert_eq!(dotgit_paths.len(), work_dir_paths.len());
2978        assert_eq!(self.repositories.iter().count(), work_dir_paths.len());
2979        assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len());
2980        for entry in self.repositories.iter() {
2981            self.git_repositories.get(&entry.work_directory_id).unwrap();
2982        }
2983    }
2984
2985    #[cfg(test)]
2986    pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
2987        let mut paths = Vec::new();
2988        for entry in self.entries_by_path.cursor::<()>(&()) {
2989            if include_ignored || !entry.is_ignored {
2990                paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
2991            }
2992        }
2993        paths.sort_by(|a, b| a.0.cmp(b.0));
2994        paths
2995    }
2996}
2997
2998impl BackgroundScannerState {
2999    fn should_scan_directory(&self, entry: &Entry) -> bool {
3000        (!entry.is_external && (!entry.is_ignored || entry.is_always_included))
3001            || entry.path.file_name() == Some(*DOT_GIT)
3002            || entry.path.file_name() == Some(local_settings_folder_relative_path().as_os_str())
3003            || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
3004            || self
3005                .paths_to_scan
3006                .iter()
3007                .any(|p| p.starts_with(&entry.path))
3008            || self
3009                .path_prefixes_to_scan
3010                .iter()
3011                .any(|p| entry.path.starts_with(p))
3012    }
3013
3014    fn enqueue_scan_dir(&self, abs_path: Arc<Path>, entry: &Entry, scan_job_tx: &Sender<ScanJob>) {
3015        let path = entry.path.clone();
3016        let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
3017        let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
3018
3019        if !ancestor_inodes.contains(&entry.inode) {
3020            ancestor_inodes.insert(entry.inode);
3021            scan_job_tx
3022                .try_send(ScanJob {
3023                    abs_path,
3024                    path,
3025                    ignore_stack,
3026                    scan_queue: scan_job_tx.clone(),
3027                    ancestor_inodes,
3028                    is_external: entry.is_external,
3029                })
3030                .unwrap();
3031        }
3032    }
3033
3034    fn reuse_entry_id(&mut self, entry: &mut Entry) {
3035        if let Some(mtime) = entry.mtime {
3036            // If an entry with the same inode was removed from the worktree during this scan,
3037            // then it *might* represent the same file or directory. But the OS might also have
3038            // re-used the inode for a completely different file or directory.
3039            //
3040            // Conditionally reuse the old entry's id:
3041            // * if the mtime is the same, the file was probably been renamed.
3042            // * if the path is the same, the file may just have been updated
3043            if let Some(removed_entry) = self.removed_entries.remove(&entry.inode) {
3044                if removed_entry.mtime == Some(mtime) || removed_entry.path == entry.path {
3045                    entry.id = removed_entry.id;
3046                }
3047            } else if let Some(existing_entry) = self.snapshot.entry_for_path(&entry.path) {
3048                entry.id = existing_entry.id;
3049            }
3050        }
3051    }
3052
3053    fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry {
3054        self.reuse_entry_id(&mut entry);
3055        let entry = self.snapshot.insert_entry(entry, fs);
3056        if entry.path.file_name() == Some(&DOT_GIT) {
3057            self.insert_git_repository(entry.path.clone(), fs, watcher);
3058        }
3059
3060        #[cfg(test)]
3061        self.snapshot.check_invariants(false);
3062
3063        entry
3064    }
3065
3066    fn populate_dir(
3067        &mut self,
3068        parent_path: &Arc<Path>,
3069        entries: impl IntoIterator<Item = Entry>,
3070        ignore: Option<Arc<Gitignore>>,
3071    ) {
3072        let mut parent_entry = if let Some(parent_entry) = self
3073            .snapshot
3074            .entries_by_path
3075            .get(&PathKey(parent_path.clone()), &())
3076        {
3077            parent_entry.clone()
3078        } else {
3079            log::warn!(
3080                "populating a directory {:?} that has been removed",
3081                parent_path
3082            );
3083            return;
3084        };
3085
3086        match parent_entry.kind {
3087            EntryKind::PendingDir | EntryKind::UnloadedDir => parent_entry.kind = EntryKind::Dir,
3088            EntryKind::Dir => {}
3089            _ => return,
3090        }
3091
3092        if let Some(ignore) = ignore {
3093            let abs_parent_path = self.snapshot.abs_path.as_path().join(parent_path).into();
3094            self.snapshot
3095                .ignores_by_parent_abs_path
3096                .insert(abs_parent_path, (ignore, false));
3097        }
3098
3099        let parent_entry_id = parent_entry.id;
3100        self.scanned_dirs.insert(parent_entry_id);
3101        let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
3102        let mut entries_by_id_edits = Vec::new();
3103
3104        for entry in entries {
3105            entries_by_id_edits.push(Edit::Insert(PathEntry {
3106                id: entry.id,
3107                path: entry.path.clone(),
3108                is_ignored: entry.is_ignored,
3109                scan_id: self.snapshot.scan_id,
3110            }));
3111            entries_by_path_edits.push(Edit::Insert(entry));
3112        }
3113
3114        self.snapshot
3115            .entries_by_path
3116            .edit(entries_by_path_edits, &());
3117        self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
3118
3119        if let Err(ix) = self.changed_paths.binary_search(parent_path) {
3120            self.changed_paths.insert(ix, parent_path.clone());
3121        }
3122
3123        #[cfg(test)]
3124        self.snapshot.check_invariants(false);
3125    }
3126
3127    fn remove_path(&mut self, path: &Path) {
3128        let mut new_entries;
3129        let removed_entries;
3130        {
3131            let mut cursor = self
3132                .snapshot
3133                .entries_by_path
3134                .cursor::<TraversalProgress>(&());
3135            new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &());
3136            removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &());
3137            new_entries.append(cursor.suffix(&()), &());
3138        }
3139        self.snapshot.entries_by_path = new_entries;
3140
3141        let mut removed_ids = Vec::with_capacity(removed_entries.summary().count);
3142        for entry in removed_entries.cursor::<()>(&()) {
3143            match self.removed_entries.entry(entry.inode) {
3144                hash_map::Entry::Occupied(mut e) => {
3145                    let prev_removed_entry = e.get_mut();
3146                    if entry.id > prev_removed_entry.id {
3147                        *prev_removed_entry = entry.clone();
3148                    }
3149                }
3150                hash_map::Entry::Vacant(e) => {
3151                    e.insert(entry.clone());
3152                }
3153            }
3154
3155            if entry.path.file_name() == Some(&GITIGNORE) {
3156                let abs_parent_path = self
3157                    .snapshot
3158                    .abs_path
3159                    .as_path()
3160                    .join(entry.path.parent().unwrap());
3161                if let Some((_, needs_update)) = self
3162                    .snapshot
3163                    .ignores_by_parent_abs_path
3164                    .get_mut(abs_parent_path.as_path())
3165                {
3166                    *needs_update = true;
3167                }
3168            }
3169
3170            if let Err(ix) = removed_ids.binary_search(&entry.id) {
3171                removed_ids.insert(ix, entry.id);
3172            }
3173        }
3174
3175        self.snapshot.entries_by_id.edit(
3176            removed_ids.iter().map(|&id| Edit::Remove(id)).collect(),
3177            &(),
3178        );
3179        self.snapshot
3180            .git_repositories
3181            .retain(|id, _| removed_ids.binary_search(id).is_err());
3182        self.snapshot.repositories.retain(&(), |repository| {
3183            !repository.work_directory.starts_with(path)
3184        });
3185
3186        #[cfg(test)]
3187        self.snapshot.check_invariants(false);
3188    }
3189
3190    fn insert_git_repository(
3191        &mut self,
3192        dot_git_path: Arc<Path>,
3193        fs: &dyn Fs,
3194        watcher: &dyn Watcher,
3195    ) -> Option<LocalRepositoryEntry> {
3196        let work_dir_path: Arc<Path> = match dot_git_path.parent() {
3197            Some(parent_dir) => {
3198                // Guard against repositories inside the repository metadata
3199                if parent_dir.iter().any(|component| component == *DOT_GIT) {
3200                    log::info!(
3201                        "not building git repository for nested `.git` directory, `.git` path in the worktree: {dot_git_path:?}"
3202                    );
3203                    return None;
3204                };
3205                log::info!(
3206                    "building git repository, `.git` path in the worktree: {dot_git_path:?}"
3207                );
3208
3209                parent_dir.into()
3210            }
3211            None => {
3212                // `dot_git_path.parent().is_none()` means `.git` directory is the opened worktree itself,
3213                // no files inside that directory are tracked by git, so no need to build the repo around it
3214                log::info!(
3215                    "not building git repository for the worktree itself, `.git` path in the worktree: {dot_git_path:?}"
3216                );
3217                return None;
3218            }
3219        };
3220
3221        self.insert_git_repository_for_path(work_dir_path, dot_git_path, None, fs, watcher)
3222    }
3223
3224    fn insert_git_repository_for_path(
3225        &mut self,
3226        work_dir_path: Arc<Path>,
3227        dot_git_path: Arc<Path>,
3228        location_in_repo: Option<Arc<Path>>,
3229        fs: &dyn Fs,
3230        watcher: &dyn Watcher,
3231    ) -> Option<LocalRepositoryEntry> {
3232        let work_dir_id = self
3233            .snapshot
3234            .entry_for_path(work_dir_path.clone())
3235            .map(|entry| entry.id)?;
3236
3237        if self.snapshot.git_repositories.get(&work_dir_id).is_some() {
3238            return None;
3239        }
3240
3241        let dot_git_abs_path = self.snapshot.abs_path.as_path().join(&dot_git_path);
3242
3243        let t0 = Instant::now();
3244        let repository = fs.open_repo(&dot_git_abs_path)?;
3245
3246        let actual_repo_path = repository.dot_git_dir();
3247
3248        let actual_dot_git_dir_abs_path = smol::block_on(find_git_dir(&actual_repo_path, fs))?;
3249        watcher.add(&actual_repo_path).log_err()?;
3250
3251        let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path.as_ref() == dot_git_abs_path
3252        {
3253            None
3254        } else {
3255            // The two paths could be different because we opened a git worktree.
3256            // When that happens, the .git path in the worktree (`dot_git_abs_path`) is a file that
3257            // points to the worktree-subdirectory in the actual .git directory (`git_dir_path`)
3258            watcher.add(&dot_git_abs_path).log_err()?;
3259            Some(Arc::from(dot_git_abs_path))
3260        };
3261
3262        log::trace!("constructed libgit2 repo in {:?}", t0.elapsed());
3263        let work_directory = WorkDirectory {
3264            path: work_dir_path.clone(),
3265            location_in_repo,
3266        };
3267
3268        if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() {
3269            git_hosting_providers::register_additional_providers(
3270                git_hosting_provider_registry,
3271                repository.clone(),
3272            );
3273        }
3274
3275        self.snapshot.repositories.insert_or_replace(
3276            RepositoryEntry {
3277                work_directory_id: work_dir_id,
3278                work_directory: work_directory.clone(),
3279                branch: repository.branch_name().map(Into::into),
3280                statuses_by_path: Default::default(),
3281            },
3282            &(),
3283        );
3284
3285        let local_repository = LocalRepositoryEntry {
3286            work_directory: work_directory.clone(),
3287            git_dir_scan_id: 0,
3288            status_scan_id: 0,
3289            repo_ptr: repository.clone(),
3290            dot_git_dir_abs_path: actual_dot_git_dir_abs_path,
3291            dot_git_worktree_abs_path,
3292        };
3293
3294        self.snapshot
3295            .git_repositories
3296            .insert(work_dir_id, local_repository.clone());
3297
3298        Some(local_repository)
3299    }
3300}
3301
3302async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool {
3303    if path.file_name() == Some(&*DOT_GIT) {
3304        return true;
3305    }
3306
3307    // If we're in a bare repository, we are not inside a `.git` folder. In a
3308    // bare repository, the root folder contains what would normally be in the
3309    // `.git` folder.
3310    let head_metadata = fs.metadata(&path.join("HEAD")).await;
3311    if !matches!(head_metadata, Ok(Some(_))) {
3312        return false;
3313    }
3314    let config_metadata = fs.metadata(&path.join("config")).await;
3315    matches!(config_metadata, Ok(Some(_)))
3316}
3317
3318async fn find_git_dir(path: &Path, fs: &dyn Fs) -> Option<Arc<Path>> {
3319    for ancestor in path.ancestors() {
3320        if is_git_dir(ancestor, fs).await {
3321            return Some(Arc::from(ancestor));
3322        }
3323    }
3324    None
3325}
3326
3327async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
3328    let contents = fs.load(abs_path).await?;
3329    let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
3330    let mut builder = GitignoreBuilder::new(parent);
3331    for line in contents.lines() {
3332        builder.add_line(Some(abs_path.into()), line)?;
3333    }
3334    Ok(builder.build()?)
3335}
3336
3337impl Deref for Worktree {
3338    type Target = Snapshot;
3339
3340    fn deref(&self) -> &Self::Target {
3341        match self {
3342            Worktree::Local(worktree) => &worktree.snapshot,
3343            Worktree::Remote(worktree) => &worktree.snapshot,
3344        }
3345    }
3346}
3347
3348impl Deref for LocalWorktree {
3349    type Target = LocalSnapshot;
3350
3351    fn deref(&self) -> &Self::Target {
3352        &self.snapshot
3353    }
3354}
3355
3356impl Deref for RemoteWorktree {
3357    type Target = Snapshot;
3358
3359    fn deref(&self) -> &Self::Target {
3360        &self.snapshot
3361    }
3362}
3363
3364impl fmt::Debug for LocalWorktree {
3365    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3366        self.snapshot.fmt(f)
3367    }
3368}
3369
3370impl fmt::Debug for Snapshot {
3371    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3372        struct EntriesById<'a>(&'a SumTree<PathEntry>);
3373        struct EntriesByPath<'a>(&'a SumTree<Entry>);
3374
3375        impl<'a> fmt::Debug for EntriesByPath<'a> {
3376            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3377                f.debug_map()
3378                    .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
3379                    .finish()
3380            }
3381        }
3382
3383        impl<'a> fmt::Debug for EntriesById<'a> {
3384            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3385                f.debug_list().entries(self.0.iter()).finish()
3386            }
3387        }
3388
3389        f.debug_struct("Snapshot")
3390            .field("id", &self.id)
3391            .field("root_name", &self.root_name)
3392            .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
3393            .field("entries_by_id", &EntriesById(&self.entries_by_id))
3394            .finish()
3395    }
3396}
3397
3398#[derive(Clone, PartialEq)]
3399pub struct File {
3400    pub worktree: Entity<Worktree>,
3401    pub path: Arc<Path>,
3402    pub disk_state: DiskState,
3403    pub entry_id: Option<ProjectEntryId>,
3404    pub is_local: bool,
3405    pub is_private: bool,
3406}
3407
3408impl language::File for File {
3409    fn as_local(&self) -> Option<&dyn language::LocalFile> {
3410        if self.is_local {
3411            Some(self)
3412        } else {
3413            None
3414        }
3415    }
3416
3417    fn disk_state(&self) -> DiskState {
3418        self.disk_state
3419    }
3420
3421    fn path(&self) -> &Arc<Path> {
3422        &self.path
3423    }
3424
3425    fn full_path(&self, cx: &App) -> PathBuf {
3426        let mut full_path = PathBuf::new();
3427        let worktree = self.worktree.read(cx);
3428
3429        if worktree.is_visible() {
3430            full_path.push(worktree.root_name());
3431        } else {
3432            let path = worktree.abs_path();
3433
3434            if worktree.is_local() && path.starts_with(home_dir().as_path()) {
3435                full_path.push("~");
3436                full_path.push(path.strip_prefix(home_dir().as_path()).unwrap());
3437            } else {
3438                full_path.push(path)
3439            }
3440        }
3441
3442        if self.path.components().next().is_some() {
3443            full_path.push(&self.path);
3444        }
3445
3446        full_path
3447    }
3448
3449    /// Returns the last component of this handle's absolute path. If this handle refers to the root
3450    /// of its worktree, then this method will return the name of the worktree itself.
3451    fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr {
3452        self.path
3453            .file_name()
3454            .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
3455    }
3456
3457    fn worktree_id(&self, cx: &App) -> WorktreeId {
3458        self.worktree.read(cx).id()
3459    }
3460
3461    fn as_any(&self) -> &dyn Any {
3462        self
3463    }
3464
3465    fn to_proto(&self, cx: &App) -> rpc::proto::File {
3466        rpc::proto::File {
3467            worktree_id: self.worktree.read(cx).id().to_proto(),
3468            entry_id: self.entry_id.map(|id| id.to_proto()),
3469            path: self.path.to_string_lossy().into(),
3470            mtime: self.disk_state.mtime().map(|time| time.into()),
3471            is_deleted: self.disk_state == DiskState::Deleted,
3472        }
3473    }
3474
3475    fn is_private(&self) -> bool {
3476        self.is_private
3477    }
3478}
3479
3480impl language::LocalFile for File {
3481    fn abs_path(&self, cx: &App) -> PathBuf {
3482        let worktree_path = &self.worktree.read(cx).as_local().unwrap().abs_path;
3483        if self.path.as_ref() == Path::new("") {
3484            worktree_path.as_path().to_path_buf()
3485        } else {
3486            worktree_path.as_path().join(&self.path)
3487        }
3488    }
3489
3490    fn load(&self, cx: &App) -> Task<Result<String>> {
3491        let worktree = self.worktree.read(cx).as_local().unwrap();
3492        let abs_path = worktree.absolutize(&self.path);
3493        let fs = worktree.fs.clone();
3494        cx.background_executor()
3495            .spawn(async move { fs.load(&abs_path?).await })
3496    }
3497
3498    fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>> {
3499        let worktree = self.worktree.read(cx).as_local().unwrap();
3500        let abs_path = worktree.absolutize(&self.path);
3501        let fs = worktree.fs.clone();
3502        cx.background_executor()
3503            .spawn(async move { fs.load_bytes(&abs_path?).await })
3504    }
3505}
3506
3507impl File {
3508    pub fn for_entry(entry: Entry, worktree: Entity<Worktree>) -> Arc<Self> {
3509        Arc::new(Self {
3510            worktree,
3511            path: entry.path.clone(),
3512            disk_state: if let Some(mtime) = entry.mtime {
3513                DiskState::Present { mtime }
3514            } else {
3515                DiskState::New
3516            },
3517            entry_id: Some(entry.id),
3518            is_local: true,
3519            is_private: entry.is_private,
3520        })
3521    }
3522
3523    pub fn from_proto(
3524        proto: rpc::proto::File,
3525        worktree: Entity<Worktree>,
3526        cx: &App,
3527    ) -> Result<Self> {
3528        let worktree_id = worktree
3529            .read(cx)
3530            .as_remote()
3531            .ok_or_else(|| anyhow!("not remote"))?
3532            .id();
3533
3534        if worktree_id.to_proto() != proto.worktree_id {
3535            return Err(anyhow!("worktree id does not match file"));
3536        }
3537
3538        let disk_state = if proto.is_deleted {
3539            DiskState::Deleted
3540        } else {
3541            if let Some(mtime) = proto.mtime.map(&Into::into) {
3542                DiskState::Present { mtime }
3543            } else {
3544                DiskState::New
3545            }
3546        };
3547
3548        Ok(Self {
3549            worktree,
3550            path: Path::new(&proto.path).into(),
3551            disk_state,
3552            entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
3553            is_local: false,
3554            is_private: false,
3555        })
3556    }
3557
3558    pub fn from_dyn(file: Option<&Arc<dyn language::File>>) -> Option<&Self> {
3559        file.and_then(|f| f.as_any().downcast_ref())
3560    }
3561
3562    pub fn worktree_id(&self, cx: &App) -> WorktreeId {
3563        self.worktree.read(cx).id()
3564    }
3565
3566    pub fn project_entry_id(&self, _: &App) -> Option<ProjectEntryId> {
3567        match self.disk_state {
3568            DiskState::Deleted => None,
3569            _ => self.entry_id,
3570        }
3571    }
3572}
3573
3574#[derive(Clone, Debug, PartialEq, Eq)]
3575pub struct Entry {
3576    pub id: ProjectEntryId,
3577    pub kind: EntryKind,
3578    pub path: Arc<Path>,
3579    pub inode: u64,
3580    pub mtime: Option<MTime>,
3581
3582    pub canonical_path: Option<Box<Path>>,
3583    /// Whether this entry is ignored by Git.
3584    ///
3585    /// We only scan ignored entries once the directory is expanded and
3586    /// exclude them from searches.
3587    pub is_ignored: bool,
3588
3589    /// Whether this entry is always included in searches.
3590    ///
3591    /// This is used for entries that are always included in searches, even
3592    /// if they are ignored by git. Overridden by file_scan_exclusions.
3593    pub is_always_included: bool,
3594
3595    /// Whether this entry's canonical path is outside of the worktree.
3596    /// This means the entry is only accessible from the worktree root via a
3597    /// symlink.
3598    ///
3599    /// We only scan entries outside of the worktree once the symlinked
3600    /// directory is expanded. External entries are treated like gitignored
3601    /// entries in that they are not included in searches.
3602    pub is_external: bool,
3603
3604    /// Whether this entry is considered to be a `.env` file.
3605    pub is_private: bool,
3606    /// The entry's size on disk, in bytes.
3607    pub size: u64,
3608    pub char_bag: CharBag,
3609    pub is_fifo: bool,
3610}
3611
3612#[derive(Clone, Copy, Debug, PartialEq, Eq)]
3613pub enum EntryKind {
3614    UnloadedDir,
3615    PendingDir,
3616    Dir,
3617    File,
3618}
3619
3620#[derive(Clone, Copy, Debug, PartialEq)]
3621pub enum PathChange {
3622    /// A filesystem entry was was created.
3623    Added,
3624    /// A filesystem entry was removed.
3625    Removed,
3626    /// A filesystem entry was updated.
3627    Updated,
3628    /// A filesystem entry was either updated or added. We don't know
3629    /// whether or not it already existed, because the path had not
3630    /// been loaded before the event.
3631    AddedOrUpdated,
3632    /// A filesystem entry was found during the initial scan of the worktree.
3633    Loaded,
3634}
3635
3636#[derive(Debug)]
3637pub struct GitRepositoryChange {
3638    /// The previous state of the repository, if it already existed.
3639    pub old_repository: Option<RepositoryEntry>,
3640}
3641
3642pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
3643pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
3644
3645#[derive(Clone, Debug, PartialEq, Eq)]
3646pub struct StatusEntry {
3647    pub repo_path: RepoPath,
3648    pub status: FileStatus,
3649}
3650
3651impl StatusEntry {
3652    pub fn is_staged(&self) -> Option<bool> {
3653        self.status.is_staged()
3654    }
3655
3656    fn to_proto(&self) -> proto::StatusEntry {
3657        let simple_status = match self.status {
3658            FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
3659            FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
3660            FileStatus::Tracked(TrackedStatus {
3661                index_status,
3662                worktree_status,
3663            }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
3664                worktree_status
3665            } else {
3666                index_status
3667            }),
3668        };
3669        proto::StatusEntry {
3670            repo_path: self.repo_path.to_proto(),
3671            simple_status,
3672            status: Some(status_to_proto(self.status)),
3673        }
3674    }
3675}
3676
3677impl TryFrom<proto::StatusEntry> for StatusEntry {
3678    type Error = anyhow::Error;
3679
3680    fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
3681        let repo_path = RepoPath(Path::new(&value.repo_path).into());
3682        let status = status_from_proto(value.simple_status, value.status)?;
3683        Ok(Self { repo_path, status })
3684    }
3685}
3686
3687#[derive(Clone, Debug)]
3688struct PathProgress<'a> {
3689    max_path: &'a Path,
3690}
3691
3692#[derive(Clone, Debug)]
3693pub struct PathSummary<S> {
3694    max_path: Arc<Path>,
3695    item_summary: S,
3696}
3697
3698impl<S: Summary> Summary for PathSummary<S> {
3699    type Context = S::Context;
3700
3701    fn zero(cx: &Self::Context) -> Self {
3702        Self {
3703            max_path: Path::new("").into(),
3704            item_summary: S::zero(cx),
3705        }
3706    }
3707
3708    fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) {
3709        self.max_path = rhs.max_path.clone();
3710        self.item_summary.add_summary(&rhs.item_summary, cx);
3711    }
3712}
3713
3714impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathProgress<'a> {
3715    fn zero(_: &<PathSummary<S> as Summary>::Context) -> Self {
3716        Self {
3717            max_path: Path::new(""),
3718        }
3719    }
3720
3721    fn add_summary(
3722        &mut self,
3723        summary: &'a PathSummary<S>,
3724        _: &<PathSummary<S> as Summary>::Context,
3725    ) {
3726        self.max_path = summary.max_path.as_ref()
3727    }
3728}
3729
3730impl sum_tree::Item for RepositoryEntry {
3731    type Summary = PathSummary<Unit>;
3732
3733    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
3734        PathSummary {
3735            max_path: self.work_directory.path.clone(),
3736            item_summary: Unit,
3737        }
3738    }
3739}
3740
3741impl sum_tree::KeyedItem for RepositoryEntry {
3742    type Key = PathKey;
3743
3744    fn key(&self) -> Self::Key {
3745        PathKey(self.work_directory.path.clone())
3746    }
3747}
3748
3749impl sum_tree::Item for StatusEntry {
3750    type Summary = PathSummary<GitSummary>;
3751
3752    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
3753        PathSummary {
3754            max_path: self.repo_path.0.clone(),
3755            item_summary: self.status.summary(),
3756        }
3757    }
3758}
3759
3760impl sum_tree::KeyedItem for StatusEntry {
3761    type Key = PathKey;
3762
3763    fn key(&self) -> Self::Key {
3764        PathKey(self.repo_path.0.clone())
3765    }
3766}
3767
3768impl<'a> sum_tree::Dimension<'a, PathSummary<GitSummary>> for GitSummary {
3769    fn zero(_cx: &()) -> Self {
3770        Default::default()
3771    }
3772
3773    fn add_summary(&mut self, summary: &'a PathSummary<GitSummary>, _: &()) {
3774        *self += summary.item_summary
3775    }
3776}
3777
3778impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathKey {
3779    fn zero(_: &S::Context) -> Self {
3780        Default::default()
3781    }
3782
3783    fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
3784        self.0 = summary.max_path.clone();
3785    }
3786}
3787
3788impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for TraversalProgress<'a> {
3789    fn zero(_cx: &S::Context) -> Self {
3790        Default::default()
3791    }
3792
3793    fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
3794        self.max_path = summary.max_path.as_ref();
3795    }
3796}
3797
3798impl Entry {
3799    fn new(
3800        path: Arc<Path>,
3801        metadata: &fs::Metadata,
3802        next_entry_id: &AtomicUsize,
3803        root_char_bag: CharBag,
3804        canonical_path: Option<Box<Path>>,
3805    ) -> Self {
3806        let char_bag = char_bag_for_path(root_char_bag, &path);
3807        Self {
3808            id: ProjectEntryId::new(next_entry_id),
3809            kind: if metadata.is_dir {
3810                EntryKind::PendingDir
3811            } else {
3812                EntryKind::File
3813            },
3814            path,
3815            inode: metadata.inode,
3816            mtime: Some(metadata.mtime),
3817            size: metadata.len,
3818            canonical_path,
3819            is_ignored: false,
3820            is_always_included: false,
3821            is_external: false,
3822            is_private: false,
3823            char_bag,
3824            is_fifo: metadata.is_fifo,
3825        }
3826    }
3827
3828    pub fn is_created(&self) -> bool {
3829        self.mtime.is_some()
3830    }
3831
3832    pub fn is_dir(&self) -> bool {
3833        self.kind.is_dir()
3834    }
3835
3836    pub fn is_file(&self) -> bool {
3837        self.kind.is_file()
3838    }
3839}
3840
3841impl EntryKind {
3842    pub fn is_dir(&self) -> bool {
3843        matches!(
3844            self,
3845            EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
3846        )
3847    }
3848
3849    pub fn is_unloaded(&self) -> bool {
3850        matches!(self, EntryKind::UnloadedDir)
3851    }
3852
3853    pub fn is_file(&self) -> bool {
3854        matches!(self, EntryKind::File)
3855    }
3856}
3857
3858impl sum_tree::Item for Entry {
3859    type Summary = EntrySummary;
3860
3861    fn summary(&self, _cx: &()) -> Self::Summary {
3862        let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included
3863        {
3864            0
3865        } else {
3866            1
3867        };
3868        let file_count;
3869        let non_ignored_file_count;
3870        if self.is_file() {
3871            file_count = 1;
3872            non_ignored_file_count = non_ignored_count;
3873        } else {
3874            file_count = 0;
3875            non_ignored_file_count = 0;
3876        }
3877
3878        EntrySummary {
3879            max_path: self.path.clone(),
3880            count: 1,
3881            non_ignored_count,
3882            file_count,
3883            non_ignored_file_count,
3884        }
3885    }
3886}
3887
3888impl sum_tree::KeyedItem for Entry {
3889    type Key = PathKey;
3890
3891    fn key(&self) -> Self::Key {
3892        PathKey(self.path.clone())
3893    }
3894}
3895
3896#[derive(Clone, Debug)]
3897pub struct EntrySummary {
3898    max_path: Arc<Path>,
3899    count: usize,
3900    non_ignored_count: usize,
3901    file_count: usize,
3902    non_ignored_file_count: usize,
3903}
3904
3905impl Default for EntrySummary {
3906    fn default() -> Self {
3907        Self {
3908            max_path: Arc::from(Path::new("")),
3909            count: 0,
3910            non_ignored_count: 0,
3911            file_count: 0,
3912            non_ignored_file_count: 0,
3913        }
3914    }
3915}
3916
3917impl sum_tree::Summary for EntrySummary {
3918    type Context = ();
3919
3920    fn zero(_cx: &()) -> Self {
3921        Default::default()
3922    }
3923
3924    fn add_summary(&mut self, rhs: &Self, _: &()) {
3925        self.max_path = rhs.max_path.clone();
3926        self.count += rhs.count;
3927        self.non_ignored_count += rhs.non_ignored_count;
3928        self.file_count += rhs.file_count;
3929        self.non_ignored_file_count += rhs.non_ignored_file_count;
3930    }
3931}
3932
3933#[derive(Clone, Debug)]
3934struct PathEntry {
3935    id: ProjectEntryId,
3936    path: Arc<Path>,
3937    is_ignored: bool,
3938    scan_id: usize,
3939}
3940
3941impl sum_tree::Item for PathEntry {
3942    type Summary = PathEntrySummary;
3943
3944    fn summary(&self, _cx: &()) -> Self::Summary {
3945        PathEntrySummary { max_id: self.id }
3946    }
3947}
3948
3949impl sum_tree::KeyedItem for PathEntry {
3950    type Key = ProjectEntryId;
3951
3952    fn key(&self) -> Self::Key {
3953        self.id
3954    }
3955}
3956
3957#[derive(Clone, Debug, Default)]
3958struct PathEntrySummary {
3959    max_id: ProjectEntryId,
3960}
3961
3962impl sum_tree::Summary for PathEntrySummary {
3963    type Context = ();
3964
3965    fn zero(_cx: &Self::Context) -> Self {
3966        Default::default()
3967    }
3968
3969    fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
3970        self.max_id = summary.max_id;
3971    }
3972}
3973
3974impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
3975    fn zero(_cx: &()) -> Self {
3976        Default::default()
3977    }
3978
3979    fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
3980        *self = summary.max_id;
3981    }
3982}
3983
3984#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
3985pub struct PathKey(Arc<Path>);
3986
3987impl Default for PathKey {
3988    fn default() -> Self {
3989        Self(Path::new("").into())
3990    }
3991}
3992
3993impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
3994    fn zero(_cx: &()) -> Self {
3995        Default::default()
3996    }
3997
3998    fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
3999        self.0 = summary.max_path.clone();
4000    }
4001}
4002
4003struct BackgroundScanner {
4004    state: Mutex<BackgroundScannerState>,
4005    fs: Arc<dyn Fs>,
4006    fs_case_sensitive: bool,
4007    status_updates_tx: UnboundedSender<ScanState>,
4008    executor: BackgroundExecutor,
4009    scan_requests_rx: channel::Receiver<ScanRequest>,
4010    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
4011    next_entry_id: Arc<AtomicUsize>,
4012    phase: BackgroundScannerPhase,
4013    watcher: Arc<dyn Watcher>,
4014    settings: WorktreeSettings,
4015    share_private_files: bool,
4016}
4017
4018#[derive(PartialEq)]
4019enum BackgroundScannerPhase {
4020    InitialScan,
4021    EventsReceivedDuringInitialScan,
4022    Events,
4023}
4024
4025impl BackgroundScanner {
4026    async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>) {
4027        use futures::FutureExt as _;
4028
4029        // If the worktree root does not contain a git repository, then find
4030        // the git repository in an ancestor directory. Find any gitignore files
4031        // in ancestor directories.
4032        let root_abs_path = self.state.lock().snapshot.abs_path.clone();
4033        for (index, ancestor) in root_abs_path.as_path().ancestors().enumerate() {
4034            if index != 0 {
4035                if let Ok(ignore) =
4036                    build_gitignore(&ancestor.join(*GITIGNORE), self.fs.as_ref()).await
4037                {
4038                    self.state
4039                        .lock()
4040                        .snapshot
4041                        .ignores_by_parent_abs_path
4042                        .insert(ancestor.into(), (ignore.into(), false));
4043                }
4044            }
4045
4046            let ancestor_dot_git = ancestor.join(*DOT_GIT);
4047            // Check whether the directory or file called `.git` exists (in the
4048            // case of worktrees it's a file.)
4049            if self
4050                .fs
4051                .metadata(&ancestor_dot_git)
4052                .await
4053                .is_ok_and(|metadata| metadata.is_some())
4054            {
4055                if index != 0 {
4056                    // We canonicalize, since the FS events use the canonicalized path.
4057                    if let Some(ancestor_dot_git) =
4058                        self.fs.canonicalize(&ancestor_dot_git).await.log_err()
4059                    {
4060                        // We associate the external git repo with our root folder and
4061                        // also mark where in the git repo the root folder is located.
4062                        self.state.lock().insert_git_repository_for_path(
4063                            Path::new("").into(),
4064                            ancestor_dot_git.into(),
4065                            Some(
4066                                root_abs_path
4067                                    .as_path()
4068                                    .strip_prefix(ancestor)
4069                                    .unwrap()
4070                                    .into(),
4071                            ),
4072                            self.fs.as_ref(),
4073                            self.watcher.as_ref(),
4074                        );
4075                    };
4076                }
4077
4078                // Reached root of git repository.
4079                break;
4080            }
4081        }
4082
4083        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4084        {
4085            let mut state = self.state.lock();
4086            state.snapshot.scan_id += 1;
4087            if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
4088                let ignore_stack = state
4089                    .snapshot
4090                    .ignore_stack_for_abs_path(root_abs_path.as_path(), true);
4091                if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) {
4092                    root_entry.is_ignored = true;
4093                    state.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref());
4094                }
4095                state.enqueue_scan_dir(root_abs_path.into(), &root_entry, &scan_job_tx);
4096            }
4097        };
4098
4099        // Perform an initial scan of the directory.
4100        drop(scan_job_tx);
4101        self.scan_dirs(true, scan_job_rx).await;
4102        {
4103            let mut state = self.state.lock();
4104            state.snapshot.completed_scan_id = state.snapshot.scan_id;
4105        }
4106
4107        self.send_status_update(false, SmallVec::new());
4108
4109        // Process any any FS events that occurred while performing the initial scan.
4110        // For these events, update events cannot be as precise, because we didn't
4111        // have the previous state loaded yet.
4112        self.phase = BackgroundScannerPhase::EventsReceivedDuringInitialScan;
4113        if let Poll::Ready(Some(mut paths)) = futures::poll!(fs_events_rx.next()) {
4114            while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
4115                paths.extend(more_paths);
4116            }
4117            self.process_events(paths.into_iter().map(Into::into).collect())
4118                .await;
4119        }
4120
4121        // Continue processing events until the worktree is dropped.
4122        self.phase = BackgroundScannerPhase::Events;
4123
4124        loop {
4125            select_biased! {
4126                // Process any path refresh requests from the worktree. Prioritize
4127                // these before handling changes reported by the filesystem.
4128                request = self.next_scan_request().fuse() => {
4129                    let Ok(request) = request else { break };
4130                    if !self.process_scan_request(request, false).await {
4131                        return;
4132                    }
4133                }
4134
4135                path_prefix = self.path_prefixes_to_scan_rx.recv().fuse() => {
4136                    let Ok(path_prefix) = path_prefix else { break };
4137                    log::trace!("adding path prefix {:?}", path_prefix);
4138
4139                    let did_scan = self.forcibly_load_paths(&[path_prefix.clone()]).await;
4140                    if did_scan {
4141                        let abs_path =
4142                        {
4143                            let mut state = self.state.lock();
4144                            state.path_prefixes_to_scan.insert(path_prefix.clone());
4145                            state.snapshot.abs_path.as_path().join(&path_prefix)
4146                        };
4147
4148                        if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() {
4149                            self.process_events(vec![abs_path]).await;
4150                        }
4151                    }
4152                }
4153
4154                paths = fs_events_rx.next().fuse() => {
4155                    let Some(mut paths) = paths else { break };
4156                    while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
4157                        paths.extend(more_paths);
4158                    }
4159                    self.process_events(paths.into_iter().map(Into::into).collect()).await;
4160                }
4161            }
4162        }
4163    }
4164
4165    async fn process_scan_request(&self, mut request: ScanRequest, scanning: bool) -> bool {
4166        log::debug!("rescanning paths {:?}", request.relative_paths);
4167
4168        request.relative_paths.sort_unstable();
4169        self.forcibly_load_paths(&request.relative_paths).await;
4170
4171        let root_path = self.state.lock().snapshot.abs_path.clone();
4172        let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
4173            Ok(path) => SanitizedPath::from(path),
4174            Err(err) => {
4175                log::error!("failed to canonicalize root path: {}", err);
4176                return true;
4177            }
4178        };
4179        let abs_paths = request
4180            .relative_paths
4181            .iter()
4182            .map(|path| {
4183                if path.file_name().is_some() {
4184                    root_canonical_path.as_path().join(path).to_path_buf()
4185                } else {
4186                    root_canonical_path.as_path().to_path_buf()
4187                }
4188            })
4189            .collect::<Vec<_>>();
4190
4191        {
4192            let mut state = self.state.lock();
4193            let is_idle = state.snapshot.completed_scan_id == state.snapshot.scan_id;
4194            state.snapshot.scan_id += 1;
4195            if is_idle {
4196                state.snapshot.completed_scan_id = state.snapshot.scan_id;
4197            }
4198        }
4199
4200        self.reload_entries_for_paths(
4201            root_path,
4202            root_canonical_path,
4203            &request.relative_paths,
4204            abs_paths,
4205            None,
4206        )
4207        .await;
4208
4209        self.send_status_update(scanning, request.done)
4210    }
4211
4212    async fn process_events(&self, mut abs_paths: Vec<PathBuf>) {
4213        let root_path = self.state.lock().snapshot.abs_path.clone();
4214        let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
4215            Ok(path) => SanitizedPath::from(path),
4216            Err(err) => {
4217                let new_path = self
4218                    .state
4219                    .lock()
4220                    .snapshot
4221                    .root_file_handle
4222                    .clone()
4223                    .and_then(|handle| handle.current_path(&self.fs).log_err())
4224                    .map(SanitizedPath::from)
4225                    .filter(|new_path| *new_path != root_path);
4226
4227                if let Some(new_path) = new_path.as_ref() {
4228                    log::info!(
4229                        "root renamed from {} to {}",
4230                        root_path.as_path().display(),
4231                        new_path.as_path().display()
4232                    )
4233                } else {
4234                    log::warn!("root path could not be canonicalized: {}", err);
4235                }
4236                self.status_updates_tx
4237                    .unbounded_send(ScanState::RootUpdated { new_path })
4238                    .ok();
4239                return;
4240            }
4241        };
4242
4243        let mut relative_paths = Vec::with_capacity(abs_paths.len());
4244        let mut dot_git_abs_paths = Vec::new();
4245        abs_paths.sort_unstable();
4246        abs_paths.dedup_by(|a, b| a.starts_with(b));
4247        abs_paths.retain(|abs_path| {
4248            let abs_path = SanitizedPath::from(abs_path);
4249            let snapshot = &self.state.lock().snapshot;
4250            {
4251                let mut is_git_related = false;
4252
4253                // We don't want to trigger .git rescan for events within .git/fsmonitor--daemon/cookies directory.
4254                #[derive(PartialEq)]
4255                enum FsMonitorParseState {
4256                    Cookies,
4257                    FsMonitor
4258                }
4259                let mut fsmonitor_parse_state = None;
4260                if let Some(dot_git_abs_path) = abs_path.as_path()
4261                    .ancestors()
4262                    .find(|ancestor| {
4263                        let file_name = ancestor.file_name();
4264                        if file_name == Some(*COOKIES) {
4265                            fsmonitor_parse_state = Some(FsMonitorParseState::Cookies);
4266                            false
4267                        } else if fsmonitor_parse_state == Some(FsMonitorParseState::Cookies) && file_name == Some(*FSMONITOR_DAEMON) {
4268                            fsmonitor_parse_state = Some(FsMonitorParseState::FsMonitor);
4269                            false
4270                        } else if fsmonitor_parse_state != Some(FsMonitorParseState::FsMonitor) && smol::block_on(is_git_dir(ancestor, self.fs.as_ref())) {
4271                            true
4272                        } else {
4273                            fsmonitor_parse_state.take();
4274                            false
4275                        }
4276
4277                    })
4278                {
4279                    let dot_git_abs_path = dot_git_abs_path.to_path_buf();
4280                    if !dot_git_abs_paths.contains(&dot_git_abs_path) {
4281                        dot_git_abs_paths.push(dot_git_abs_path);
4282                    }
4283                    is_git_related = true;
4284                }
4285
4286                let relative_path: Arc<Path> =
4287                    if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
4288                        path.into()
4289                    } else {
4290                        if is_git_related {
4291                            log::debug!(
4292                              "ignoring event {abs_path:?}, since it's in git dir outside of root path {root_canonical_path:?}",
4293                            );
4294                        } else {
4295                            log::error!(
4296                              "ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
4297                            );
4298                        }
4299                        return false;
4300                    };
4301
4302                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
4303                    snapshot
4304                        .entry_for_path(parent)
4305                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
4306                });
4307                if !parent_dir_is_loaded {
4308                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
4309                    return false;
4310                }
4311
4312                if self.settings.is_path_excluded(&relative_path) {
4313                    if !is_git_related {
4314                        log::debug!("ignoring FS event for excluded path {relative_path:?}");
4315                    }
4316                    return false;
4317                }
4318
4319                relative_paths.push(relative_path);
4320                true
4321            }
4322        });
4323
4324        if relative_paths.is_empty() && dot_git_abs_paths.is_empty() {
4325            return;
4326        }
4327
4328        self.state.lock().snapshot.scan_id += 1;
4329
4330        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4331        log::debug!("received fs events {:?}", relative_paths);
4332        self.reload_entries_for_paths(
4333            root_path,
4334            root_canonical_path,
4335            &relative_paths,
4336            abs_paths,
4337            Some(scan_job_tx.clone()),
4338        )
4339        .await;
4340
4341        self.update_ignore_statuses(scan_job_tx).await;
4342        self.scan_dirs(false, scan_job_rx).await;
4343
4344        if !dot_git_abs_paths.is_empty() {
4345            self.update_git_repositories(dot_git_abs_paths).await;
4346        }
4347
4348        {
4349            let mut state = self.state.lock();
4350            state.snapshot.completed_scan_id = state.snapshot.scan_id;
4351            for (_, entry) in mem::take(&mut state.removed_entries) {
4352                state.scanned_dirs.remove(&entry.id);
4353            }
4354        }
4355
4356        #[cfg(test)]
4357        self.state.lock().snapshot.check_git_invariants();
4358
4359        self.send_status_update(false, SmallVec::new());
4360    }
4361
4362    async fn forcibly_load_paths(&self, paths: &[Arc<Path>]) -> bool {
4363        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4364        {
4365            let mut state = self.state.lock();
4366            let root_path = state.snapshot.abs_path.clone();
4367            for path in paths {
4368                for ancestor in path.ancestors() {
4369                    if let Some(entry) = state.snapshot.entry_for_path(ancestor) {
4370                        if entry.kind == EntryKind::UnloadedDir {
4371                            let abs_path = root_path.as_path().join(ancestor);
4372                            state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx);
4373                            state.paths_to_scan.insert(path.clone());
4374                            break;
4375                        }
4376                    }
4377                }
4378            }
4379            drop(scan_job_tx);
4380        }
4381        while let Ok(job) = scan_job_rx.recv().await {
4382            self.scan_dir(&job).await.log_err();
4383        }
4384
4385        !mem::take(&mut self.state.lock().paths_to_scan).is_empty()
4386    }
4387
4388    async fn scan_dirs(
4389        &self,
4390        enable_progress_updates: bool,
4391        scan_jobs_rx: channel::Receiver<ScanJob>,
4392    ) {
4393        use futures::FutureExt as _;
4394
4395        if self
4396            .status_updates_tx
4397            .unbounded_send(ScanState::Started)
4398            .is_err()
4399        {
4400            return;
4401        }
4402
4403        let progress_update_count = AtomicUsize::new(0);
4404        self.executor
4405            .scoped(|scope| {
4406                for _ in 0..self.executor.num_cpus() {
4407                    scope.spawn(async {
4408                        let mut last_progress_update_count = 0;
4409                        let progress_update_timer = self.progress_timer(enable_progress_updates).fuse();
4410                        futures::pin_mut!(progress_update_timer);
4411
4412                        loop {
4413                            select_biased! {
4414                                // Process any path refresh requests before moving on to process
4415                                // the scan queue, so that user operations are prioritized.
4416                                request = self.next_scan_request().fuse() => {
4417                                    let Ok(request) = request else { break };
4418                                    if !self.process_scan_request(request, true).await {
4419                                        return;
4420                                    }
4421                                }
4422
4423                                // Send periodic progress updates to the worktree. Use an atomic counter
4424                                // to ensure that only one of the workers sends a progress update after
4425                                // the update interval elapses.
4426                                _ = progress_update_timer => {
4427                                    match progress_update_count.compare_exchange(
4428                                        last_progress_update_count,
4429                                        last_progress_update_count + 1,
4430                                        SeqCst,
4431                                        SeqCst
4432                                    ) {
4433                                        Ok(_) => {
4434                                            last_progress_update_count += 1;
4435                                            self.send_status_update(true, SmallVec::new());
4436                                        }
4437                                        Err(count) => {
4438                                            last_progress_update_count = count;
4439                                        }
4440                                    }
4441                                    progress_update_timer.set(self.progress_timer(enable_progress_updates).fuse());
4442                                }
4443
4444                                // Recursively load directories from the file system.
4445                                job = scan_jobs_rx.recv().fuse() => {
4446                                    let Ok(job) = job else { break };
4447                                    if let Err(err) = self.scan_dir(&job).await {
4448                                        if job.path.as_ref() != Path::new("") {
4449                                            log::error!("error scanning directory {:?}: {}", job.abs_path, err);
4450                                        }
4451                                    }
4452                                }
4453                            }
4454                        }
4455                    })
4456                }
4457            })
4458            .await;
4459    }
4460
4461    fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool {
4462        let mut state = self.state.lock();
4463        if state.changed_paths.is_empty() && scanning {
4464            return true;
4465        }
4466
4467        let new_snapshot = state.snapshot.clone();
4468        let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone());
4469        let changes = self.build_change_set(&old_snapshot, &new_snapshot, &state.changed_paths);
4470        state.changed_paths.clear();
4471
4472        self.status_updates_tx
4473            .unbounded_send(ScanState::Updated {
4474                snapshot: new_snapshot,
4475                changes,
4476                scanning,
4477                barrier,
4478            })
4479            .is_ok()
4480    }
4481
4482    async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
4483        let root_abs_path;
4484        let root_char_bag;
4485        {
4486            let snapshot = &self.state.lock().snapshot;
4487            if self.settings.is_path_excluded(&job.path) {
4488                log::error!("skipping excluded directory {:?}", job.path);
4489                return Ok(());
4490            }
4491            log::debug!("scanning directory {:?}", job.path);
4492            root_abs_path = snapshot.abs_path().clone();
4493            root_char_bag = snapshot.root_char_bag;
4494        }
4495
4496        let next_entry_id = self.next_entry_id.clone();
4497        let mut ignore_stack = job.ignore_stack.clone();
4498        let mut new_ignore = None;
4499        let mut root_canonical_path = None;
4500        let mut new_entries: Vec<Entry> = Vec::new();
4501        let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
4502        let mut child_paths = self
4503            .fs
4504            .read_dir(&job.abs_path)
4505            .await?
4506            .filter_map(|entry| async {
4507                match entry {
4508                    Ok(entry) => Some(entry),
4509                    Err(error) => {
4510                        log::error!("error processing entry {:?}", error);
4511                        None
4512                    }
4513                }
4514            })
4515            .collect::<Vec<_>>()
4516            .await;
4517
4518        // Ensure that .git and .gitignore are processed first.
4519        swap_to_front(&mut child_paths, *GITIGNORE);
4520        swap_to_front(&mut child_paths, *DOT_GIT);
4521
4522        for child_abs_path in child_paths {
4523            let child_abs_path: Arc<Path> = child_abs_path.into();
4524            let child_name = child_abs_path.file_name().unwrap();
4525            let child_path: Arc<Path> = job.path.join(child_name).into();
4526
4527            if child_name == *DOT_GIT {
4528                let repo = self.state.lock().insert_git_repository(
4529                    child_path.clone(),
4530                    self.fs.as_ref(),
4531                    self.watcher.as_ref(),
4532                );
4533
4534                if let Some(local_repo) = repo {
4535                    self.update_git_statuses(UpdateGitStatusesJob {
4536                        local_repository: local_repo,
4537                    });
4538                }
4539            } else if child_name == *GITIGNORE {
4540                match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
4541                    Ok(ignore) => {
4542                        let ignore = Arc::new(ignore);
4543                        ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
4544                        new_ignore = Some(ignore);
4545                    }
4546                    Err(error) => {
4547                        log::error!(
4548                            "error loading .gitignore file {:?} - {:?}",
4549                            child_name,
4550                            error
4551                        );
4552                    }
4553                }
4554            }
4555
4556            if self.settings.is_path_excluded(&child_path) {
4557                log::debug!("skipping excluded child entry {child_path:?}");
4558                self.state.lock().remove_path(&child_path);
4559                continue;
4560            }
4561
4562            let child_metadata = match self.fs.metadata(&child_abs_path).await {
4563                Ok(Some(metadata)) => metadata,
4564                Ok(None) => continue,
4565                Err(err) => {
4566                    log::error!("error processing {child_abs_path:?}: {err:?}");
4567                    continue;
4568                }
4569            };
4570
4571            let mut child_entry = Entry::new(
4572                child_path.clone(),
4573                &child_metadata,
4574                &next_entry_id,
4575                root_char_bag,
4576                None,
4577            );
4578
4579            if job.is_external {
4580                child_entry.is_external = true;
4581            } else if child_metadata.is_symlink {
4582                let canonical_path = match self.fs.canonicalize(&child_abs_path).await {
4583                    Ok(path) => path,
4584                    Err(err) => {
4585                        log::error!(
4586                            "error reading target of symlink {:?}: {:?}",
4587                            child_abs_path,
4588                            err
4589                        );
4590                        continue;
4591                    }
4592                };
4593
4594                // lazily canonicalize the root path in order to determine if
4595                // symlinks point outside of the worktree.
4596                let root_canonical_path = match &root_canonical_path {
4597                    Some(path) => path,
4598                    None => match self.fs.canonicalize(&root_abs_path).await {
4599                        Ok(path) => root_canonical_path.insert(path),
4600                        Err(err) => {
4601                            log::error!("error canonicalizing root {:?}: {:?}", root_abs_path, err);
4602                            continue;
4603                        }
4604                    },
4605                };
4606
4607                if !canonical_path.starts_with(root_canonical_path) {
4608                    child_entry.is_external = true;
4609                }
4610
4611                child_entry.canonical_path = Some(canonical_path.into());
4612            }
4613
4614            if child_entry.is_dir() {
4615                child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
4616                child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
4617
4618                // Avoid recursing until crash in the case of a recursive symlink
4619                if job.ancestor_inodes.contains(&child_entry.inode) {
4620                    new_jobs.push(None);
4621                } else {
4622                    let mut ancestor_inodes = job.ancestor_inodes.clone();
4623                    ancestor_inodes.insert(child_entry.inode);
4624
4625                    new_jobs.push(Some(ScanJob {
4626                        abs_path: child_abs_path.clone(),
4627                        path: child_path,
4628                        is_external: child_entry.is_external,
4629                        ignore_stack: if child_entry.is_ignored {
4630                            IgnoreStack::all()
4631                        } else {
4632                            ignore_stack.clone()
4633                        },
4634                        ancestor_inodes,
4635                        scan_queue: job.scan_queue.clone(),
4636                    }));
4637                }
4638            } else {
4639                child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
4640                child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
4641            }
4642
4643            {
4644                let relative_path = job.path.join(child_name);
4645                if self.is_path_private(&relative_path) {
4646                    log::debug!("detected private file: {relative_path:?}");
4647                    child_entry.is_private = true;
4648                }
4649            }
4650
4651            new_entries.push(child_entry);
4652        }
4653
4654        let mut state = self.state.lock();
4655
4656        // Identify any subdirectories that should not be scanned.
4657        let mut job_ix = 0;
4658        for entry in &mut new_entries {
4659            state.reuse_entry_id(entry);
4660            if entry.is_dir() {
4661                if state.should_scan_directory(entry) {
4662                    job_ix += 1;
4663                } else {
4664                    log::debug!("defer scanning directory {:?}", entry.path);
4665                    entry.kind = EntryKind::UnloadedDir;
4666                    new_jobs.remove(job_ix);
4667                }
4668            }
4669            if entry.is_always_included {
4670                state
4671                    .snapshot
4672                    .always_included_entries
4673                    .push(entry.path.clone());
4674            }
4675        }
4676
4677        state.populate_dir(&job.path, new_entries, new_ignore);
4678        self.watcher.add(job.abs_path.as_ref()).log_err();
4679
4680        for new_job in new_jobs.into_iter().flatten() {
4681            job.scan_queue
4682                .try_send(new_job)
4683                .expect("channel is unbounded");
4684        }
4685
4686        Ok(())
4687    }
4688
4689    /// All list arguments should be sorted before calling this function
4690    async fn reload_entries_for_paths(
4691        &self,
4692        root_abs_path: SanitizedPath,
4693        root_canonical_path: SanitizedPath,
4694        relative_paths: &[Arc<Path>],
4695        abs_paths: Vec<PathBuf>,
4696        scan_queue_tx: Option<Sender<ScanJob>>,
4697    ) {
4698        // grab metadata for all requested paths
4699        let metadata = futures::future::join_all(
4700            abs_paths
4701                .iter()
4702                .map(|abs_path| async move {
4703                    let metadata = self.fs.metadata(abs_path).await?;
4704                    if let Some(metadata) = metadata {
4705                        let canonical_path = self.fs.canonicalize(abs_path).await?;
4706
4707                        // If we're on a case-insensitive filesystem (default on macOS), we want
4708                        // to only ignore metadata for non-symlink files if their absolute-path matches
4709                        // the canonical-path.
4710                        // Because if not, this might be a case-only-renaming (`mv test.txt TEST.TXT`)
4711                        // and we want to ignore the metadata for the old path (`test.txt`) so it's
4712                        // treated as removed.
4713                        if !self.fs_case_sensitive && !metadata.is_symlink {
4714                            let canonical_file_name = canonical_path.file_name();
4715                            let file_name = abs_path.file_name();
4716                            if canonical_file_name != file_name {
4717                                return Ok(None);
4718                            }
4719                        }
4720
4721                        anyhow::Ok(Some((metadata, SanitizedPath::from(canonical_path))))
4722                    } else {
4723                        Ok(None)
4724                    }
4725                })
4726                .collect::<Vec<_>>(),
4727        )
4728        .await;
4729
4730        let mut state = self.state.lock();
4731        let doing_recursive_update = scan_queue_tx.is_some();
4732
4733        // Remove any entries for paths that no longer exist or are being recursively
4734        // refreshed. Do this before adding any new entries, so that renames can be
4735        // detected regardless of the order of the paths.
4736        for (path, metadata) in relative_paths.iter().zip(metadata.iter()) {
4737            if matches!(metadata, Ok(None)) || doing_recursive_update {
4738                log::trace!("remove path {:?}", path);
4739                state.remove_path(path);
4740            }
4741        }
4742
4743        // Group all relative paths by their git repository.
4744        let mut paths_by_git_repo = HashMap::default();
4745        for relative_path in relative_paths.iter() {
4746            let repository_data = state
4747                .snapshot
4748                .local_repo_for_path(relative_path)
4749                .zip(state.snapshot.repository_for_path(relative_path));
4750            if let Some((local_repo, entry)) = repository_data {
4751                if let Ok(repo_path) = local_repo.relativize(relative_path) {
4752                    paths_by_git_repo
4753                        .entry(local_repo.work_directory.clone())
4754                        .or_insert_with(|| RepoPaths {
4755                            entry: entry.clone(),
4756                            repo: local_repo.repo_ptr.clone(),
4757                            repo_paths: Default::default(),
4758                        })
4759                        .add_path(repo_path);
4760                }
4761            }
4762        }
4763
4764        for (work_directory, mut paths) in paths_by_git_repo {
4765            if let Ok(status) = paths.repo.status(&paths.repo_paths) {
4766                let mut changed_path_statuses = Vec::new();
4767                let statuses = paths.entry.statuses_by_path.clone();
4768                let mut cursor = statuses.cursor::<PathProgress>(&());
4769
4770                for (repo_path, status) in &*status.entries {
4771                    paths.remove_repo_path(repo_path);
4772                    if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4773                        if &cursor.item().unwrap().status == status {
4774                            continue;
4775                        }
4776                    }
4777
4778                    changed_path_statuses.push(Edit::Insert(StatusEntry {
4779                        repo_path: repo_path.clone(),
4780                        status: *status,
4781                    }));
4782                }
4783
4784                let mut cursor = statuses.cursor::<PathProgress>(&());
4785                for path in paths.repo_paths {
4786                    if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4787                        changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4788                    }
4789                }
4790
4791                if !changed_path_statuses.is_empty() {
4792                    let work_directory_id = state.snapshot.repositories.update(
4793                        &work_directory.path_key(),
4794                        &(),
4795                        move |repository_entry| {
4796                            repository_entry
4797                                .statuses_by_path
4798                                .edit(changed_path_statuses, &());
4799
4800                            repository_entry.work_directory_id
4801                        },
4802                    );
4803
4804                    if let Some(work_directory_id) = work_directory_id {
4805                        let scan_id = state.snapshot.scan_id;
4806                        state.snapshot.git_repositories.update(
4807                            &work_directory_id,
4808                            |local_repository_entry| {
4809                                local_repository_entry.status_scan_id = scan_id;
4810                            },
4811                        );
4812                    }
4813                }
4814            }
4815        }
4816
4817        for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) {
4818            let abs_path: Arc<Path> = root_abs_path.as_path().join(path).into();
4819            match metadata {
4820                Ok(Some((metadata, canonical_path))) => {
4821                    let ignore_stack = state
4822                        .snapshot
4823                        .ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
4824                    let is_external = !canonical_path.starts_with(&root_canonical_path);
4825                    let mut fs_entry = Entry::new(
4826                        path.clone(),
4827                        &metadata,
4828                        self.next_entry_id.as_ref(),
4829                        state.snapshot.root_char_bag,
4830                        if metadata.is_symlink {
4831                            Some(canonical_path.as_path().to_path_buf().into())
4832                        } else {
4833                            None
4834                        },
4835                    );
4836
4837                    let is_dir = fs_entry.is_dir();
4838                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
4839                    fs_entry.is_external = is_external;
4840                    fs_entry.is_private = self.is_path_private(path);
4841                    fs_entry.is_always_included = self.settings.is_path_always_included(path);
4842
4843                    if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) {
4844                        if state.should_scan_directory(&fs_entry)
4845                            || (fs_entry.path.as_os_str().is_empty()
4846                                && abs_path.file_name() == Some(*DOT_GIT))
4847                        {
4848                            state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx);
4849                        } else {
4850                            fs_entry.kind = EntryKind::UnloadedDir;
4851                        }
4852                    }
4853
4854                    state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref());
4855                }
4856                Ok(None) => {
4857                    self.remove_repo_path(path, &mut state.snapshot);
4858                }
4859                Err(err) => {
4860                    log::error!("error reading file {abs_path:?} on event: {err:#}");
4861                }
4862            }
4863        }
4864
4865        util::extend_sorted(
4866            &mut state.changed_paths,
4867            relative_paths.iter().cloned(),
4868            usize::MAX,
4869            Ord::cmp,
4870        );
4871    }
4872
4873    fn remove_repo_path(&self, path: &Arc<Path>, snapshot: &mut LocalSnapshot) -> Option<()> {
4874        if !path
4875            .components()
4876            .any(|component| component.as_os_str() == *DOT_GIT)
4877        {
4878            if let Some(repository) = snapshot.repository(PathKey(path.clone())) {
4879                snapshot
4880                    .git_repositories
4881                    .remove(&repository.work_directory_id);
4882                snapshot
4883                    .snapshot
4884                    .repositories
4885                    .remove(&PathKey(repository.work_directory.path.clone()), &());
4886                return Some(());
4887            }
4888        }
4889
4890        Some(())
4891    }
4892
4893    async fn update_ignore_statuses(&self, scan_job_tx: Sender<ScanJob>) {
4894        use futures::FutureExt as _;
4895
4896        let mut ignores_to_update = Vec::new();
4897        let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
4898        let prev_snapshot;
4899        {
4900            let snapshot = &mut self.state.lock().snapshot;
4901            let abs_path = snapshot.abs_path.clone();
4902            snapshot
4903                .ignores_by_parent_abs_path
4904                .retain(|parent_abs_path, (_, needs_update)| {
4905                    if let Ok(parent_path) = parent_abs_path.strip_prefix(abs_path.as_path()) {
4906                        if *needs_update {
4907                            *needs_update = false;
4908                            if snapshot.snapshot.entry_for_path(parent_path).is_some() {
4909                                ignores_to_update.push(parent_abs_path.clone());
4910                            }
4911                        }
4912
4913                        let ignore_path = parent_path.join(*GITIGNORE);
4914                        if snapshot.snapshot.entry_for_path(ignore_path).is_none() {
4915                            return false;
4916                        }
4917                    }
4918                    true
4919                });
4920
4921            ignores_to_update.sort_unstable();
4922            let mut ignores_to_update = ignores_to_update.into_iter().peekable();
4923            while let Some(parent_abs_path) = ignores_to_update.next() {
4924                while ignores_to_update
4925                    .peek()
4926                    .map_or(false, |p| p.starts_with(&parent_abs_path))
4927                {
4928                    ignores_to_update.next().unwrap();
4929                }
4930
4931                let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
4932                ignore_queue_tx
4933                    .send_blocking(UpdateIgnoreStatusJob {
4934                        abs_path: parent_abs_path,
4935                        ignore_stack,
4936                        ignore_queue: ignore_queue_tx.clone(),
4937                        scan_queue: scan_job_tx.clone(),
4938                    })
4939                    .unwrap();
4940            }
4941
4942            prev_snapshot = snapshot.clone();
4943        }
4944        drop(ignore_queue_tx);
4945
4946        self.executor
4947            .scoped(|scope| {
4948                for _ in 0..self.executor.num_cpus() {
4949                    scope.spawn(async {
4950                        loop {
4951                            select_biased! {
4952                                // Process any path refresh requests before moving on to process
4953                                // the queue of ignore statuses.
4954                                request = self.next_scan_request().fuse() => {
4955                                    let Ok(request) = request else { break };
4956                                    if !self.process_scan_request(request, true).await {
4957                                        return;
4958                                    }
4959                                }
4960
4961                                // Recursively process directories whose ignores have changed.
4962                                job = ignore_queue_rx.recv().fuse() => {
4963                                    let Ok(job) = job else { break };
4964                                    self.update_ignore_status(job, &prev_snapshot).await;
4965                                }
4966                            }
4967                        }
4968                    });
4969                }
4970            })
4971            .await;
4972    }
4973
4974    async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
4975        log::trace!("update ignore status {:?}", job.abs_path);
4976
4977        let mut ignore_stack = job.ignore_stack;
4978        if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
4979            ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
4980        }
4981
4982        let mut entries_by_id_edits = Vec::new();
4983        let mut entries_by_path_edits = Vec::new();
4984        let path = job
4985            .abs_path
4986            .strip_prefix(snapshot.abs_path.as_path())
4987            .unwrap();
4988
4989        for mut entry in snapshot.child_entries(path).cloned() {
4990            let was_ignored = entry.is_ignored;
4991            let abs_path: Arc<Path> = snapshot.abs_path().join(&entry.path).into();
4992            entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
4993
4994            if entry.is_dir() {
4995                let child_ignore_stack = if entry.is_ignored {
4996                    IgnoreStack::all()
4997                } else {
4998                    ignore_stack.clone()
4999                };
5000
5001                // Scan any directories that were previously ignored and weren't previously scanned.
5002                if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
5003                    let state = self.state.lock();
5004                    if state.should_scan_directory(&entry) {
5005                        state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue);
5006                    }
5007                }
5008
5009                job.ignore_queue
5010                    .send(UpdateIgnoreStatusJob {
5011                        abs_path: abs_path.clone(),
5012                        ignore_stack: child_ignore_stack,
5013                        ignore_queue: job.ignore_queue.clone(),
5014                        scan_queue: job.scan_queue.clone(),
5015                    })
5016                    .await
5017                    .unwrap();
5018            }
5019
5020            if entry.is_ignored != was_ignored {
5021                let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
5022                path_entry.scan_id = snapshot.scan_id;
5023                path_entry.is_ignored = entry.is_ignored;
5024                entries_by_id_edits.push(Edit::Insert(path_entry));
5025                entries_by_path_edits.push(Edit::Insert(entry));
5026            }
5027        }
5028
5029        let state = &mut self.state.lock();
5030        for edit in &entries_by_path_edits {
5031            if let Edit::Insert(entry) = edit {
5032                if let Err(ix) = state.changed_paths.binary_search(&entry.path) {
5033                    state.changed_paths.insert(ix, entry.path.clone());
5034                }
5035            }
5036        }
5037
5038        state
5039            .snapshot
5040            .entries_by_path
5041            .edit(entries_by_path_edits, &());
5042        state.snapshot.entries_by_id.edit(entries_by_id_edits, &());
5043    }
5044
5045    async fn update_git_repositories(&self, dot_git_paths: Vec<PathBuf>) {
5046        log::debug!("reloading repositories: {dot_git_paths:?}");
5047
5048        let mut repo_updates = Vec::new();
5049        {
5050            let mut state = self.state.lock();
5051            let scan_id = state.snapshot.scan_id;
5052            for dot_git_dir in dot_git_paths {
5053                let existing_repository_entry =
5054                    state
5055                        .snapshot
5056                        .git_repositories
5057                        .iter()
5058                        .find_map(|(entry_id, repo)| {
5059                            if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir
5060                                || repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir)
5061                            {
5062                                Some((*entry_id, repo.clone()))
5063                            } else {
5064                                None
5065                            }
5066                        });
5067
5068                let local_repository = match existing_repository_entry {
5069                    None => {
5070                        match state.insert_git_repository(
5071                            dot_git_dir.into(),
5072                            self.fs.as_ref(),
5073                            self.watcher.as_ref(),
5074                        ) {
5075                            Some(output) => output,
5076                            None => continue,
5077                        }
5078                    }
5079                    Some((entry_id, local_repository)) => {
5080                        if local_repository.git_dir_scan_id == scan_id {
5081                            continue;
5082                        }
5083                        let Some(work_dir) = state
5084                            .snapshot
5085                            .entry_for_id(entry_id)
5086                            .map(|entry| entry.path.clone())
5087                        else {
5088                            continue;
5089                        };
5090
5091                        let branch = local_repository.repo_ptr.branch_name();
5092                        local_repository.repo_ptr.reload_index();
5093
5094                        state.snapshot.git_repositories.update(&entry_id, |entry| {
5095                            entry.git_dir_scan_id = scan_id;
5096                            entry.status_scan_id = scan_id;
5097                        });
5098                        state.snapshot.snapshot.repositories.update(
5099                            &PathKey(work_dir.clone()),
5100                            &(),
5101                            |entry| entry.branch = branch.map(Into::into),
5102                        );
5103
5104                        local_repository
5105                    }
5106                };
5107
5108                repo_updates.push(UpdateGitStatusesJob { local_repository });
5109            }
5110
5111            // Remove any git repositories whose .git entry no longer exists.
5112            let snapshot = &mut state.snapshot;
5113            let mut ids_to_preserve = HashSet::default();
5114            for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
5115                let exists_in_snapshot = snapshot
5116                    .entry_for_id(work_directory_id)
5117                    .map_or(false, |entry| {
5118                        snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
5119                    });
5120
5121                if exists_in_snapshot
5122                    || matches!(
5123                        smol::block_on(self.fs.metadata(&entry.dot_git_dir_abs_path)),
5124                        Ok(Some(_))
5125                    )
5126                {
5127                    ids_to_preserve.insert(work_directory_id);
5128                }
5129            }
5130
5131            snapshot
5132                .git_repositories
5133                .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
5134            snapshot.repositories.retain(&(), |entry| {
5135                ids_to_preserve.contains(&entry.work_directory_id)
5136            });
5137        }
5138
5139        let (mut updates_done_tx, mut updates_done_rx) = barrier::channel();
5140        self.executor
5141            .scoped(|scope| {
5142                scope.spawn(async {
5143                    for repo_update in repo_updates {
5144                        self.update_git_statuses(repo_update);
5145                    }
5146                    updates_done_tx.blocking_send(()).ok();
5147                });
5148
5149                scope.spawn(async {
5150                    loop {
5151                        select_biased! {
5152                            // Process any path refresh requests before moving on to process
5153                            // the queue of git statuses.
5154                            request = self.next_scan_request().fuse() => {
5155                                let Ok(request) = request else { break };
5156                                if !self.process_scan_request(request, true).await {
5157                                    return;
5158                                }
5159                            }
5160                            _ = updates_done_rx.recv().fuse() =>  break,
5161                        }
5162                    }
5163                });
5164            })
5165            .await;
5166    }
5167
5168    /// Update the git statuses for a given batch of entries.
5169    fn update_git_statuses(&self, job: UpdateGitStatusesJob) {
5170        log::trace!(
5171            "updating git statuses for repo {:?}",
5172            job.local_repository.work_directory.path
5173        );
5174        let t0 = Instant::now();
5175
5176        let Some(statuses) = job
5177            .local_repository
5178            .repo()
5179            .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()])
5180            .log_err()
5181        else {
5182            return;
5183        };
5184        log::trace!(
5185            "computed git statuses for repo {:?} in {:?}",
5186            job.local_repository.work_directory.path,
5187            t0.elapsed()
5188        );
5189
5190        let t0 = Instant::now();
5191        let mut changed_paths = Vec::new();
5192        let snapshot = self.state.lock().snapshot.snapshot.clone();
5193
5194        let Some(mut repository) =
5195            snapshot.repository(job.local_repository.work_directory.path_key())
5196        else {
5197            log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot");
5198            debug_assert!(false);
5199            return;
5200        };
5201
5202        let mut new_entries_by_path = SumTree::new(&());
5203        for (repo_path, status) in statuses.entries.iter() {
5204            let project_path = repository.work_directory.unrelativize(repo_path);
5205
5206            new_entries_by_path.insert_or_replace(
5207                StatusEntry {
5208                    repo_path: repo_path.clone(),
5209                    status: *status,
5210                },
5211                &(),
5212            );
5213
5214            if let Some(path) = project_path {
5215                changed_paths.push(path);
5216            }
5217        }
5218
5219        repository.statuses_by_path = new_entries_by_path;
5220        let mut state = self.state.lock();
5221        state
5222            .snapshot
5223            .repositories
5224            .insert_or_replace(repository, &());
5225
5226        util::extend_sorted(
5227            &mut state.changed_paths,
5228            changed_paths,
5229            usize::MAX,
5230            Ord::cmp,
5231        );
5232
5233        log::trace!(
5234            "applied git status updates for repo {:?} in {:?}",
5235            job.local_repository.work_directory.path,
5236            t0.elapsed(),
5237        );
5238    }
5239
5240    fn build_change_set(
5241        &self,
5242        old_snapshot: &Snapshot,
5243        new_snapshot: &Snapshot,
5244        event_paths: &[Arc<Path>],
5245    ) -> UpdatedEntriesSet {
5246        use BackgroundScannerPhase::*;
5247        use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated};
5248
5249        // Identify which paths have changed. Use the known set of changed
5250        // parent paths to optimize the search.
5251        let mut changes = Vec::new();
5252        let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&());
5253        let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&());
5254        let mut last_newly_loaded_dir_path = None;
5255        old_paths.next(&());
5256        new_paths.next(&());
5257        for path in event_paths {
5258            let path = PathKey(path.clone());
5259            if old_paths.item().map_or(false, |e| e.path < path.0) {
5260                old_paths.seek_forward(&path, Bias::Left, &());
5261            }
5262            if new_paths.item().map_or(false, |e| e.path < path.0) {
5263                new_paths.seek_forward(&path, Bias::Left, &());
5264            }
5265            loop {
5266                match (old_paths.item(), new_paths.item()) {
5267                    (Some(old_entry), Some(new_entry)) => {
5268                        if old_entry.path > path.0
5269                            && new_entry.path > path.0
5270                            && !old_entry.path.starts_with(&path.0)
5271                            && !new_entry.path.starts_with(&path.0)
5272                        {
5273                            break;
5274                        }
5275
5276                        match Ord::cmp(&old_entry.path, &new_entry.path) {
5277                            Ordering::Less => {
5278                                changes.push((old_entry.path.clone(), old_entry.id, Removed));
5279                                old_paths.next(&());
5280                            }
5281                            Ordering::Equal => {
5282                                if self.phase == EventsReceivedDuringInitialScan {
5283                                    if old_entry.id != new_entry.id {
5284                                        changes.push((
5285                                            old_entry.path.clone(),
5286                                            old_entry.id,
5287                                            Removed,
5288                                        ));
5289                                    }
5290                                    // If the worktree was not fully initialized when this event was generated,
5291                                    // we can't know whether this entry was added during the scan or whether
5292                                    // it was merely updated.
5293                                    changes.push((
5294                                        new_entry.path.clone(),
5295                                        new_entry.id,
5296                                        AddedOrUpdated,
5297                                    ));
5298                                } else if old_entry.id != new_entry.id {
5299                                    changes.push((old_entry.path.clone(), old_entry.id, Removed));
5300                                    changes.push((new_entry.path.clone(), new_entry.id, Added));
5301                                } else if old_entry != new_entry {
5302                                    if old_entry.kind.is_unloaded() {
5303                                        last_newly_loaded_dir_path = Some(&new_entry.path);
5304                                        changes.push((
5305                                            new_entry.path.clone(),
5306                                            new_entry.id,
5307                                            Loaded,
5308                                        ));
5309                                    } else {
5310                                        changes.push((
5311                                            new_entry.path.clone(),
5312                                            new_entry.id,
5313                                            Updated,
5314                                        ));
5315                                    }
5316                                }
5317                                old_paths.next(&());
5318                                new_paths.next(&());
5319                            }
5320                            Ordering::Greater => {
5321                                let is_newly_loaded = self.phase == InitialScan
5322                                    || last_newly_loaded_dir_path
5323                                        .as_ref()
5324                                        .map_or(false, |dir| new_entry.path.starts_with(dir));
5325                                changes.push((
5326                                    new_entry.path.clone(),
5327                                    new_entry.id,
5328                                    if is_newly_loaded { Loaded } else { Added },
5329                                ));
5330                                new_paths.next(&());
5331                            }
5332                        }
5333                    }
5334                    (Some(old_entry), None) => {
5335                        changes.push((old_entry.path.clone(), old_entry.id, Removed));
5336                        old_paths.next(&());
5337                    }
5338                    (None, Some(new_entry)) => {
5339                        let is_newly_loaded = self.phase == InitialScan
5340                            || last_newly_loaded_dir_path
5341                                .as_ref()
5342                                .map_or(false, |dir| new_entry.path.starts_with(dir));
5343                        changes.push((
5344                            new_entry.path.clone(),
5345                            new_entry.id,
5346                            if is_newly_loaded { Loaded } else { Added },
5347                        ));
5348                        new_paths.next(&());
5349                    }
5350                    (None, None) => break,
5351                }
5352            }
5353        }
5354
5355        changes.into()
5356    }
5357
5358    async fn progress_timer(&self, running: bool) {
5359        if !running {
5360            return futures::future::pending().await;
5361        }
5362
5363        #[cfg(any(test, feature = "test-support"))]
5364        if self.fs.is_fake() {
5365            return self.executor.simulate_random_delay().await;
5366        }
5367
5368        smol::Timer::after(FS_WATCH_LATENCY).await;
5369    }
5370
5371    fn is_path_private(&self, path: &Path) -> bool {
5372        !self.share_private_files && self.settings.is_path_private(path)
5373    }
5374
5375    async fn next_scan_request(&self) -> Result<ScanRequest> {
5376        let mut request = self.scan_requests_rx.recv().await?;
5377        while let Ok(next_request) = self.scan_requests_rx.try_recv() {
5378            request.relative_paths.extend(next_request.relative_paths);
5379            request.done.extend(next_request.done);
5380        }
5381        Ok(request)
5382    }
5383}
5384
5385fn swap_to_front(child_paths: &mut Vec<PathBuf>, file: &OsStr) {
5386    let position = child_paths
5387        .iter()
5388        .position(|path| path.file_name().unwrap() == file);
5389    if let Some(position) = position {
5390        let temp = child_paths.remove(position);
5391        child_paths.insert(0, temp);
5392    }
5393}
5394
5395fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
5396    let mut result = root_char_bag;
5397    result.extend(
5398        path.to_string_lossy()
5399            .chars()
5400            .map(|c| c.to_ascii_lowercase()),
5401    );
5402    result
5403}
5404
5405#[derive(Debug)]
5406struct RepoPaths {
5407    repo: Arc<dyn GitRepository>,
5408    entry: RepositoryEntry,
5409    // sorted
5410    repo_paths: Vec<RepoPath>,
5411}
5412
5413impl RepoPaths {
5414    fn add_path(&mut self, repo_path: RepoPath) {
5415        match self.repo_paths.binary_search(&repo_path) {
5416            Ok(_) => {}
5417            Err(ix) => self.repo_paths.insert(ix, repo_path),
5418        }
5419    }
5420
5421    fn remove_repo_path(&mut self, repo_path: &RepoPath) {
5422        match self.repo_paths.binary_search(&repo_path) {
5423            Ok(ix) => {
5424                self.repo_paths.remove(ix);
5425            }
5426            Err(_) => {}
5427        }
5428    }
5429}
5430
5431struct ScanJob {
5432    abs_path: Arc<Path>,
5433    path: Arc<Path>,
5434    ignore_stack: Arc<IgnoreStack>,
5435    scan_queue: Sender<ScanJob>,
5436    ancestor_inodes: TreeSet<u64>,
5437    is_external: bool,
5438}
5439
5440struct UpdateIgnoreStatusJob {
5441    abs_path: Arc<Path>,
5442    ignore_stack: Arc<IgnoreStack>,
5443    ignore_queue: Sender<UpdateIgnoreStatusJob>,
5444    scan_queue: Sender<ScanJob>,
5445}
5446
5447struct UpdateGitStatusesJob {
5448    local_repository: LocalRepositoryEntry,
5449}
5450
5451pub trait WorktreeModelHandle {
5452    #[cfg(any(test, feature = "test-support"))]
5453    fn flush_fs_events<'a>(
5454        &self,
5455        cx: &'a mut gpui::TestAppContext,
5456    ) -> futures::future::LocalBoxFuture<'a, ()>;
5457
5458    #[cfg(any(test, feature = "test-support"))]
5459    fn flush_fs_events_in_root_git_repository<'a>(
5460        &self,
5461        cx: &'a mut gpui::TestAppContext,
5462    ) -> futures::future::LocalBoxFuture<'a, ()>;
5463}
5464
5465impl WorktreeModelHandle for Entity<Worktree> {
5466    // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
5467    // occurred before the worktree was constructed. These events can cause the worktree to perform
5468    // extra directory scans, and emit extra scan-state notifications.
5469    //
5470    // This function mutates the worktree's directory and waits for those mutations to be picked up,
5471    // to ensure that all redundant FS events have already been processed.
5472    #[cfg(any(test, feature = "test-support"))]
5473    fn flush_fs_events<'a>(
5474        &self,
5475        cx: &'a mut gpui::TestAppContext,
5476    ) -> futures::future::LocalBoxFuture<'a, ()> {
5477        let file_name = "fs-event-sentinel";
5478
5479        let tree = self.clone();
5480        let (fs, root_path) = self.update(cx, |tree, _| {
5481            let tree = tree.as_local().unwrap();
5482            (tree.fs.clone(), tree.abs_path().clone())
5483        });
5484
5485        async move {
5486            fs.create_file(&root_path.join(file_name), Default::default())
5487                .await
5488                .unwrap();
5489
5490            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_some())
5491                .await;
5492
5493            fs.remove_file(&root_path.join(file_name), Default::default())
5494                .await
5495                .unwrap();
5496            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_none())
5497                .await;
5498
5499            cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5500                .await;
5501        }
5502        .boxed_local()
5503    }
5504
5505    // This function is similar to flush_fs_events, except that it waits for events to be flushed in
5506    // the .git folder of the root repository.
5507    // The reason for its existence is that a repository's .git folder might live *outside* of the
5508    // worktree and thus its FS events might go through a different path.
5509    // In order to flush those, we need to create artificial events in the .git folder and wait
5510    // for the repository to be reloaded.
5511    #[cfg(any(test, feature = "test-support"))]
5512    fn flush_fs_events_in_root_git_repository<'a>(
5513        &self,
5514        cx: &'a mut gpui::TestAppContext,
5515    ) -> futures::future::LocalBoxFuture<'a, ()> {
5516        let file_name = "fs-event-sentinel";
5517
5518        let tree = self.clone();
5519        let (fs, root_path, mut git_dir_scan_id) = self.update(cx, |tree, _| {
5520            let tree = tree.as_local().unwrap();
5521            let root_entry = tree.root_git_entry().unwrap();
5522            let local_repo_entry = tree.get_local_repo(&root_entry).unwrap();
5523            (
5524                tree.fs.clone(),
5525                local_repo_entry.dot_git_dir_abs_path.clone(),
5526                local_repo_entry.git_dir_scan_id,
5527            )
5528        });
5529
5530        let scan_id_increased = |tree: &mut Worktree, git_dir_scan_id: &mut usize| {
5531            let root_entry = tree.root_git_entry().unwrap();
5532            let local_repo_entry = tree
5533                .as_local()
5534                .unwrap()
5535                .get_local_repo(&root_entry)
5536                .unwrap();
5537
5538            if local_repo_entry.git_dir_scan_id > *git_dir_scan_id {
5539                *git_dir_scan_id = local_repo_entry.git_dir_scan_id;
5540                true
5541            } else {
5542                false
5543            }
5544        };
5545
5546        async move {
5547            fs.create_file(&root_path.join(file_name), Default::default())
5548                .await
5549                .unwrap();
5550
5551            cx.condition(&tree, |tree, _| {
5552                scan_id_increased(tree, &mut git_dir_scan_id)
5553            })
5554            .await;
5555
5556            fs.remove_file(&root_path.join(file_name), Default::default())
5557                .await
5558                .unwrap();
5559
5560            cx.condition(&tree, |tree, _| {
5561                scan_id_increased(tree, &mut git_dir_scan_id)
5562            })
5563            .await;
5564
5565            cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5566                .await;
5567        }
5568        .boxed_local()
5569    }
5570}
5571
5572#[derive(Clone, Debug)]
5573struct TraversalProgress<'a> {
5574    max_path: &'a Path,
5575    count: usize,
5576    non_ignored_count: usize,
5577    file_count: usize,
5578    non_ignored_file_count: usize,
5579}
5580
5581impl<'a> TraversalProgress<'a> {
5582    fn count(&self, include_files: bool, include_dirs: bool, include_ignored: bool) -> usize {
5583        match (include_files, include_dirs, include_ignored) {
5584            (true, true, true) => self.count,
5585            (true, true, false) => self.non_ignored_count,
5586            (true, false, true) => self.file_count,
5587            (true, false, false) => self.non_ignored_file_count,
5588            (false, true, true) => self.count - self.file_count,
5589            (false, true, false) => self.non_ignored_count - self.non_ignored_file_count,
5590            (false, false, _) => 0,
5591        }
5592    }
5593}
5594
5595impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
5596    fn zero(_cx: &()) -> Self {
5597        Default::default()
5598    }
5599
5600    fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
5601        self.max_path = summary.max_path.as_ref();
5602        self.count += summary.count;
5603        self.non_ignored_count += summary.non_ignored_count;
5604        self.file_count += summary.file_count;
5605        self.non_ignored_file_count += summary.non_ignored_file_count;
5606    }
5607}
5608
5609impl<'a> Default for TraversalProgress<'a> {
5610    fn default() -> Self {
5611        Self {
5612            max_path: Path::new(""),
5613            count: 0,
5614            non_ignored_count: 0,
5615            file_count: 0,
5616            non_ignored_file_count: 0,
5617        }
5618    }
5619}
5620
5621#[derive(Debug, Clone, Copy)]
5622pub struct GitEntryRef<'a> {
5623    pub entry: &'a Entry,
5624    pub git_summary: GitSummary,
5625}
5626
5627impl<'a> GitEntryRef<'a> {
5628    pub fn to_owned(&self) -> GitEntry {
5629        GitEntry {
5630            entry: self.entry.clone(),
5631            git_summary: self.git_summary,
5632        }
5633    }
5634}
5635
5636impl<'a> Deref for GitEntryRef<'a> {
5637    type Target = Entry;
5638
5639    fn deref(&self) -> &Self::Target {
5640        &self.entry
5641    }
5642}
5643
5644impl<'a> AsRef<Entry> for GitEntryRef<'a> {
5645    fn as_ref(&self) -> &Entry {
5646        self.entry
5647    }
5648}
5649
5650#[derive(Debug, Clone, PartialEq, Eq)]
5651pub struct GitEntry {
5652    pub entry: Entry,
5653    pub git_summary: GitSummary,
5654}
5655
5656impl GitEntry {
5657    pub fn to_ref(&self) -> GitEntryRef {
5658        GitEntryRef {
5659            entry: &self.entry,
5660            git_summary: self.git_summary,
5661        }
5662    }
5663}
5664
5665impl Deref for GitEntry {
5666    type Target = Entry;
5667
5668    fn deref(&self) -> &Self::Target {
5669        &self.entry
5670    }
5671}
5672
5673impl AsRef<Entry> for GitEntry {
5674    fn as_ref(&self) -> &Entry {
5675        &self.entry
5676    }
5677}
5678
5679/// Walks the worktree entries and their associated git statuses.
5680pub struct GitTraversal<'a> {
5681    traversal: Traversal<'a>,
5682    current_entry_summary: Option<GitSummary>,
5683    repo_location: Option<(
5684        &'a RepositoryEntry,
5685        Cursor<'a, StatusEntry, PathProgress<'a>>,
5686    )>,
5687}
5688
5689impl<'a> GitTraversal<'a> {
5690    fn synchronize_statuses(&mut self, reset: bool) {
5691        self.current_entry_summary = None;
5692
5693        let Some(entry) = self.traversal.cursor.item() else {
5694            return;
5695        };
5696
5697        let Some(repo) = self.traversal.snapshot.repository_for_path(&entry.path) else {
5698            self.repo_location = None;
5699            return;
5700        };
5701
5702        // Update our state if we changed repositories.
5703        if reset || self.repo_location.as_ref().map(|(prev_repo, _)| prev_repo) != Some(&repo) {
5704            self.repo_location = Some((repo, repo.statuses_by_path.cursor::<PathProgress>(&())));
5705        }
5706
5707        let Some((repo, statuses)) = &mut self.repo_location else {
5708            return;
5709        };
5710
5711        let repo_path = repo.relativize(&entry.path).unwrap();
5712
5713        if entry.is_dir() {
5714            let mut statuses = statuses.clone();
5715            statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
5716            let summary =
5717                statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
5718
5719            self.current_entry_summary = Some(summary);
5720        } else if entry.is_file() {
5721            // For a file entry, park the cursor on the corresponding status
5722            if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
5723                // TODO: Investigate statuses.item() being None here.
5724                self.current_entry_summary = statuses.item().map(|item| item.status.into());
5725            } else {
5726                self.current_entry_summary = Some(GitSummary::UNCHANGED);
5727            }
5728        }
5729    }
5730
5731    pub fn advance(&mut self) -> bool {
5732        self.advance_by(1)
5733    }
5734
5735    pub fn advance_by(&mut self, count: usize) -> bool {
5736        let found = self.traversal.advance_by(count);
5737        self.synchronize_statuses(false);
5738        found
5739    }
5740
5741    pub fn advance_to_sibling(&mut self) -> bool {
5742        let found = self.traversal.advance_to_sibling();
5743        self.synchronize_statuses(false);
5744        found
5745    }
5746
5747    pub fn back_to_parent(&mut self) -> bool {
5748        let found = self.traversal.back_to_parent();
5749        self.synchronize_statuses(true);
5750        found
5751    }
5752
5753    pub fn start_offset(&self) -> usize {
5754        self.traversal.start_offset()
5755    }
5756
5757    pub fn end_offset(&self) -> usize {
5758        self.traversal.end_offset()
5759    }
5760
5761    pub fn entry(&self) -> Option<GitEntryRef<'a>> {
5762        let entry = self.traversal.cursor.item()?;
5763        let git_summary = self.current_entry_summary.unwrap_or(GitSummary::UNCHANGED);
5764        Some(GitEntryRef { entry, git_summary })
5765    }
5766}
5767
5768impl<'a> Iterator for GitTraversal<'a> {
5769    type Item = GitEntryRef<'a>;
5770    fn next(&mut self) -> Option<Self::Item> {
5771        if let Some(item) = self.entry() {
5772            self.advance();
5773            Some(item)
5774        } else {
5775            None
5776        }
5777    }
5778}
5779
5780#[derive(Debug)]
5781pub struct Traversal<'a> {
5782    snapshot: &'a Snapshot,
5783    cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
5784    include_ignored: bool,
5785    include_files: bool,
5786    include_dirs: bool,
5787}
5788
5789impl<'a> Traversal<'a> {
5790    fn new(
5791        snapshot: &'a Snapshot,
5792        include_files: bool,
5793        include_dirs: bool,
5794        include_ignored: bool,
5795        start_path: &Path,
5796    ) -> Self {
5797        let mut cursor = snapshot.entries_by_path.cursor(&());
5798        cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &());
5799        let mut traversal = Self {
5800            snapshot,
5801            cursor,
5802            include_files,
5803            include_dirs,
5804            include_ignored,
5805        };
5806        if traversal.end_offset() == traversal.start_offset() {
5807            traversal.next();
5808        }
5809        traversal
5810    }
5811
5812    pub fn with_git_statuses(self) -> GitTraversal<'a> {
5813        let mut this = GitTraversal {
5814            traversal: self,
5815            current_entry_summary: None,
5816            repo_location: None,
5817        };
5818        this.synchronize_statuses(true);
5819        this
5820    }
5821
5822    pub fn advance(&mut self) -> bool {
5823        self.advance_by(1)
5824    }
5825
5826    pub fn advance_by(&mut self, count: usize) -> bool {
5827        self.cursor.seek_forward(
5828            &TraversalTarget::Count {
5829                count: self.end_offset() + count,
5830                include_dirs: self.include_dirs,
5831                include_files: self.include_files,
5832                include_ignored: self.include_ignored,
5833            },
5834            Bias::Left,
5835            &(),
5836        )
5837    }
5838
5839    pub fn advance_to_sibling(&mut self) -> bool {
5840        while let Some(entry) = self.cursor.item() {
5841            self.cursor
5842                .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &());
5843            if let Some(entry) = self.cursor.item() {
5844                if (self.include_files || !entry.is_file())
5845                    && (self.include_dirs || !entry.is_dir())
5846                    && (self.include_ignored || !entry.is_ignored || entry.is_always_included)
5847                {
5848                    return true;
5849                }
5850            }
5851        }
5852        false
5853    }
5854
5855    pub fn back_to_parent(&mut self) -> bool {
5856        let Some(parent_path) = self.cursor.item().and_then(|entry| entry.path.parent()) else {
5857            return false;
5858        };
5859        self.cursor
5860            .seek(&TraversalTarget::path(parent_path), Bias::Left, &())
5861    }
5862
5863    pub fn entry(&self) -> Option<&'a Entry> {
5864        self.cursor.item()
5865    }
5866
5867    pub fn start_offset(&self) -> usize {
5868        self.cursor
5869            .start()
5870            .count(self.include_files, self.include_dirs, self.include_ignored)
5871    }
5872
5873    pub fn end_offset(&self) -> usize {
5874        self.cursor
5875            .end(&())
5876            .count(self.include_files, self.include_dirs, self.include_ignored)
5877    }
5878}
5879
5880impl<'a> Iterator for Traversal<'a> {
5881    type Item = &'a Entry;
5882
5883    fn next(&mut self) -> Option<Self::Item> {
5884        if let Some(item) = self.entry() {
5885            self.advance();
5886            Some(item)
5887        } else {
5888            None
5889        }
5890    }
5891}
5892
5893#[derive(Debug, Clone, Copy)]
5894enum PathTarget<'a> {
5895    Path(&'a Path),
5896    Successor(&'a Path),
5897    Contains(&'a Path),
5898}
5899
5900impl<'a> PathTarget<'a> {
5901    fn cmp_path(&self, other: &Path) -> Ordering {
5902        match self {
5903            PathTarget::Path(path) => path.cmp(&other),
5904            PathTarget::Successor(path) => {
5905                if other.starts_with(path) {
5906                    Ordering::Greater
5907                } else {
5908                    Ordering::Equal
5909                }
5910            }
5911            PathTarget::Contains(path) => {
5912                if path.starts_with(other) {
5913                    Ordering::Equal
5914                } else {
5915                    Ordering::Greater
5916                }
5917            }
5918        }
5919    }
5920}
5921
5922impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary<S>, PathProgress<'a>> for PathTarget<'b> {
5923    fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering {
5924        self.cmp_path(&cursor_location.max_path)
5925    }
5926}
5927
5928impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary<S>, TraversalProgress<'a>> for PathTarget<'b> {
5929    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering {
5930        self.cmp_path(&cursor_location.max_path)
5931    }
5932}
5933
5934impl<'a, 'b> SeekTarget<'a, PathSummary<GitSummary>, (TraversalProgress<'a>, GitSummary)>
5935    for PathTarget<'b>
5936{
5937    fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitSummary), _: &()) -> Ordering {
5938        self.cmp_path(&cursor_location.0.max_path)
5939    }
5940}
5941
5942#[derive(Debug)]
5943enum TraversalTarget<'a> {
5944    Path(PathTarget<'a>),
5945    Count {
5946        count: usize,
5947        include_files: bool,
5948        include_ignored: bool,
5949        include_dirs: bool,
5950    },
5951}
5952
5953impl<'a> TraversalTarget<'a> {
5954    fn path(path: &'a Path) -> Self {
5955        Self::Path(PathTarget::Path(path))
5956    }
5957
5958    fn successor(path: &'a Path) -> Self {
5959        Self::Path(PathTarget::Successor(path))
5960    }
5961
5962    fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering {
5963        match self {
5964            TraversalTarget::Path(path) => path.cmp_path(&progress.max_path),
5965            TraversalTarget::Count {
5966                count,
5967                include_files,
5968                include_dirs,
5969                include_ignored,
5970            } => Ord::cmp(
5971                count,
5972                &progress.count(*include_files, *include_dirs, *include_ignored),
5973            ),
5974        }
5975    }
5976}
5977
5978impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
5979    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
5980        self.cmp_progress(cursor_location)
5981    }
5982}
5983
5984impl<'a, 'b> SeekTarget<'a, PathSummary<Unit>, TraversalProgress<'a>> for TraversalTarget<'b> {
5985    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
5986        self.cmp_progress(cursor_location)
5987    }
5988}
5989
5990pub struct ChildEntriesIter<'a> {
5991    parent_path: &'a Path,
5992    traversal: Traversal<'a>,
5993}
5994
5995impl<'a> ChildEntriesIter<'a> {
5996    pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> {
5997        ChildEntriesGitIter {
5998            parent_path: self.parent_path,
5999            traversal: self.traversal.with_git_statuses(),
6000        }
6001    }
6002}
6003
6004pub struct ChildEntriesGitIter<'a> {
6005    parent_path: &'a Path,
6006    traversal: GitTraversal<'a>,
6007}
6008
6009impl<'a> Iterator for ChildEntriesIter<'a> {
6010    type Item = &'a Entry;
6011
6012    fn next(&mut self) -> Option<Self::Item> {
6013        if let Some(item) = self.traversal.entry() {
6014            if item.path.starts_with(self.parent_path) {
6015                self.traversal.advance_to_sibling();
6016                return Some(item);
6017            }
6018        }
6019        None
6020    }
6021}
6022
6023impl<'a> Iterator for ChildEntriesGitIter<'a> {
6024    type Item = GitEntryRef<'a>;
6025
6026    fn next(&mut self) -> Option<Self::Item> {
6027        if let Some(item) = self.traversal.entry() {
6028            if item.path.starts_with(self.parent_path) {
6029                self.traversal.advance_to_sibling();
6030                return Some(item);
6031            }
6032        }
6033        None
6034    }
6035}
6036
6037impl<'a> From<&'a Entry> for proto::Entry {
6038    fn from(entry: &'a Entry) -> Self {
6039        Self {
6040            id: entry.id.to_proto(),
6041            is_dir: entry.is_dir(),
6042            path: entry.path.to_string_lossy().into(),
6043            inode: entry.inode,
6044            mtime: entry.mtime.map(|time| time.into()),
6045            is_ignored: entry.is_ignored,
6046            is_external: entry.is_external,
6047            is_fifo: entry.is_fifo,
6048            size: Some(entry.size),
6049            canonical_path: entry
6050                .canonical_path
6051                .as_ref()
6052                .map(|path| path.to_string_lossy().to_string()),
6053        }
6054    }
6055}
6056
6057impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry {
6058    type Error = anyhow::Error;
6059
6060    fn try_from(
6061        (root_char_bag, always_included, entry): (&'a CharBag, &PathMatcher, proto::Entry),
6062    ) -> Result<Self> {
6063        let kind = if entry.is_dir {
6064            EntryKind::Dir
6065        } else {
6066            EntryKind::File
6067        };
6068        let path: Arc<Path> = PathBuf::from(entry.path).into();
6069        let char_bag = char_bag_for_path(*root_char_bag, &path);
6070        Ok(Entry {
6071            id: ProjectEntryId::from_proto(entry.id),
6072            kind,
6073            path: path.clone(),
6074            inode: entry.inode,
6075            mtime: entry.mtime.map(|time| time.into()),
6076            size: entry.size.unwrap_or(0),
6077            canonical_path: entry
6078                .canonical_path
6079                .map(|path_string| Box::from(Path::new(&path_string))),
6080            is_ignored: entry.is_ignored,
6081            is_always_included: always_included.is_match(path.as_ref()),
6082            is_external: entry.is_external,
6083            is_private: false,
6084            char_bag,
6085            is_fifo: entry.is_fifo,
6086        })
6087    }
6088}
6089
6090fn status_from_proto(
6091    simple_status: i32,
6092    status: Option<proto::GitFileStatus>,
6093) -> anyhow::Result<FileStatus> {
6094    use proto::git_file_status::Variant;
6095
6096    let Some(variant) = status.and_then(|status| status.variant) else {
6097        let code = proto::GitStatus::from_i32(simple_status)
6098            .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
6099        let result = match code {
6100            proto::GitStatus::Added => TrackedStatus {
6101                worktree_status: StatusCode::Added,
6102                index_status: StatusCode::Unmodified,
6103            }
6104            .into(),
6105            proto::GitStatus::Modified => TrackedStatus {
6106                worktree_status: StatusCode::Modified,
6107                index_status: StatusCode::Unmodified,
6108            }
6109            .into(),
6110            proto::GitStatus::Conflict => UnmergedStatus {
6111                first_head: UnmergedStatusCode::Updated,
6112                second_head: UnmergedStatusCode::Updated,
6113            }
6114            .into(),
6115            proto::GitStatus::Deleted => TrackedStatus {
6116                worktree_status: StatusCode::Deleted,
6117                index_status: StatusCode::Unmodified,
6118            }
6119            .into(),
6120            _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
6121        };
6122        return Ok(result);
6123    };
6124
6125    let result = match variant {
6126        Variant::Untracked(_) => FileStatus::Untracked,
6127        Variant::Ignored(_) => FileStatus::Ignored,
6128        Variant::Unmerged(unmerged) => {
6129            let [first_head, second_head] =
6130                [unmerged.first_head, unmerged.second_head].map(|head| {
6131                    let code = proto::GitStatus::from_i32(head)
6132                        .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
6133                    let result = match code {
6134                        proto::GitStatus::Added => UnmergedStatusCode::Added,
6135                        proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6136                        proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6137                        _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
6138                    };
6139                    Ok(result)
6140                });
6141            let [first_head, second_head] = [first_head?, second_head?];
6142            UnmergedStatus {
6143                first_head,
6144                second_head,
6145            }
6146            .into()
6147        }
6148        Variant::Tracked(tracked) => {
6149            let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6150                .map(|status| {
6151                    let code = proto::GitStatus::from_i32(status)
6152                        .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
6153                    let result = match code {
6154                        proto::GitStatus::Modified => StatusCode::Modified,
6155                        proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6156                        proto::GitStatus::Added => StatusCode::Added,
6157                        proto::GitStatus::Deleted => StatusCode::Deleted,
6158                        proto::GitStatus::Renamed => StatusCode::Renamed,
6159                        proto::GitStatus::Copied => StatusCode::Copied,
6160                        proto::GitStatus::Unmodified => StatusCode::Unmodified,
6161                        _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
6162                    };
6163                    Ok(result)
6164                });
6165            let [index_status, worktree_status] = [index_status?, worktree_status?];
6166            TrackedStatus {
6167                index_status,
6168                worktree_status,
6169            }
6170            .into()
6171        }
6172    };
6173    Ok(result)
6174}
6175
6176fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6177    use proto::git_file_status::{Tracked, Unmerged, Variant};
6178
6179    let variant = match status {
6180        FileStatus::Untracked => Variant::Untracked(Default::default()),
6181        FileStatus::Ignored => Variant::Ignored(Default::default()),
6182        FileStatus::Unmerged(UnmergedStatus {
6183            first_head,
6184            second_head,
6185        }) => Variant::Unmerged(Unmerged {
6186            first_head: unmerged_status_to_proto(first_head),
6187            second_head: unmerged_status_to_proto(second_head),
6188        }),
6189        FileStatus::Tracked(TrackedStatus {
6190            index_status,
6191            worktree_status,
6192        }) => Variant::Tracked(Tracked {
6193            index_status: tracked_status_to_proto(index_status),
6194            worktree_status: tracked_status_to_proto(worktree_status),
6195        }),
6196    };
6197    proto::GitFileStatus {
6198        variant: Some(variant),
6199    }
6200}
6201
6202fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6203    match code {
6204        UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6205        UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6206        UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6207    }
6208}
6209
6210fn tracked_status_to_proto(code: StatusCode) -> i32 {
6211    match code {
6212        StatusCode::Added => proto::GitStatus::Added as _,
6213        StatusCode::Deleted => proto::GitStatus::Deleted as _,
6214        StatusCode::Modified => proto::GitStatus::Modified as _,
6215        StatusCode::Renamed => proto::GitStatus::Renamed as _,
6216        StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6217        StatusCode::Copied => proto::GitStatus::Copied as _,
6218        StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6219    }
6220}
6221
6222#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
6223pub struct ProjectEntryId(usize);
6224
6225impl ProjectEntryId {
6226    pub const MAX: Self = Self(usize::MAX);
6227    pub const MIN: Self = Self(usize::MIN);
6228
6229    pub fn new(counter: &AtomicUsize) -> Self {
6230        Self(counter.fetch_add(1, SeqCst))
6231    }
6232
6233    pub fn from_proto(id: u64) -> Self {
6234        Self(id as usize)
6235    }
6236
6237    pub fn to_proto(&self) -> u64 {
6238        self.0 as u64
6239    }
6240
6241    pub fn to_usize(&self) -> usize {
6242        self.0
6243    }
6244}
6245
6246#[cfg(any(test, feature = "test-support"))]
6247impl CreatedEntry {
6248    pub fn to_included(self) -> Option<Entry> {
6249        match self {
6250            CreatedEntry::Included(entry) => Some(entry),
6251            CreatedEntry::Excluded { .. } => None,
6252        }
6253    }
6254}