worktree.rs

   1mod ignore;
   2mod worktree_settings;
   3#[cfg(test)]
   4mod worktree_tests;
   5
   6use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
   7use anyhow::{anyhow, Context as _, Result};
   8use clock::ReplicaId;
   9use collections::{HashMap, HashSet, VecDeque};
  10use fs::{copy_recursive, Fs, MTime, PathEvent, RemoveOptions, Watcher};
  11use futures::{
  12    channel::{
  13        mpsc::{self, UnboundedSender},
  14        oneshot,
  15    },
  16    select_biased,
  17    task::Poll,
  18    FutureExt as _, Stream, StreamExt,
  19};
  20use fuzzy::CharBag;
  21use git::{
  22    repository::{Branch, GitRepository, RepoPath},
  23    status::{
  24        FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
  25    },
  26    GitHostingProviderRegistry, COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, INDEX_LOCK,
  27};
  28use gpui::{
  29    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
  30};
  31use ignore::IgnoreStack;
  32use language::DiskState;
  33
  34use parking_lot::Mutex;
  35use paths::local_settings_folder_relative_path;
  36use postage::{
  37    barrier,
  38    prelude::{Sink as _, Stream as _},
  39    watch,
  40};
  41use rpc::{
  42    proto::{self, split_worktree_update, FromProto, ToProto},
  43    AnyProtoClient,
  44};
  45pub use settings::WorktreeId;
  46use settings::{Settings, SettingsLocation, SettingsStore};
  47use smallvec::{smallvec, SmallVec};
  48use smol::channel::{self, Sender};
  49use std::{
  50    any::Any,
  51    cmp::Ordering,
  52    collections::hash_map,
  53    convert::TryFrom,
  54    ffi::OsStr,
  55    fmt,
  56    future::Future,
  57    mem::{self},
  58    ops::{Deref, DerefMut},
  59    path::{Path, PathBuf},
  60    pin::Pin,
  61    sync::{
  62        atomic::{AtomicUsize, Ordering::SeqCst},
  63        Arc,
  64    },
  65    time::{Duration, Instant},
  66};
  67use sum_tree::{
  68    Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit,
  69};
  70use text::{LineEnding, Rope};
  71use util::{
  72    paths::{home_dir, PathMatcher, SanitizedPath},
  73    ResultExt,
  74};
  75pub use worktree_settings::WorktreeSettings;
  76
  77#[cfg(feature = "test-support")]
  78pub const FS_WATCH_LATENCY: Duration = Duration::from_millis(100);
  79#[cfg(not(feature = "test-support"))]
  80pub const FS_WATCH_LATENCY: Duration = Duration::from_millis(100);
  81
  82/// A set of local or remote files that are being opened as part of a project.
  83/// Responsible for tracking related FS (for local)/collab (for remote) events and corresponding updates.
  84/// Stores git repositories data and the diagnostics for the file(s).
  85///
  86/// Has an absolute path, and may be set to be visible in Zed UI or not.
  87/// May correspond to a directory or a single file.
  88/// Possible examples:
  89/// * a drag and dropped file — may be added as an invisible, "ephemeral" entry to the current worktree
  90/// * a directory opened in Zed — may be added as a visible entry to the current worktree
  91///
  92/// Uses [`Entry`] to track the state of each file/directory, can look up absolute paths for entries.
  93pub enum Worktree {
  94    Local(LocalWorktree),
  95    Remote(RemoteWorktree),
  96}
  97
  98/// An entry, created in the worktree.
  99#[derive(Debug)]
 100pub enum CreatedEntry {
 101    /// Got created and indexed by the worktree, receiving a corresponding entry.
 102    Included(Entry),
 103    /// Got created, but not indexed due to falling under exclusion filters.
 104    Excluded { abs_path: PathBuf },
 105}
 106
 107pub struct LoadedFile {
 108    pub file: Arc<File>,
 109    pub text: String,
 110}
 111
 112pub struct LoadedBinaryFile {
 113    pub file: Arc<File>,
 114    pub content: Vec<u8>,
 115}
 116
 117pub struct LocalWorktree {
 118    snapshot: LocalSnapshot,
 119    scan_requests_tx: channel::Sender<ScanRequest>,
 120    path_prefixes_to_scan_tx: channel::Sender<PathPrefixScanRequest>,
 121    is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
 122    _background_scanner_tasks: Vec<Task<()>>,
 123    update_observer: Option<UpdateObservationState>,
 124    fs: Arc<dyn Fs>,
 125    fs_case_sensitive: bool,
 126    visible: bool,
 127    next_entry_id: Arc<AtomicUsize>,
 128    settings: WorktreeSettings,
 129    share_private_files: bool,
 130}
 131
 132pub struct PathPrefixScanRequest {
 133    path: Arc<Path>,
 134    done: SmallVec<[barrier::Sender; 1]>,
 135}
 136
 137struct ScanRequest {
 138    relative_paths: Vec<Arc<Path>>,
 139    done: SmallVec<[barrier::Sender; 1]>,
 140}
 141
 142pub struct RemoteWorktree {
 143    snapshot: Snapshot,
 144    background_snapshot: Arc<Mutex<(Snapshot, Vec<proto::UpdateWorktree>)>>,
 145    project_id: u64,
 146    client: AnyProtoClient,
 147    file_scan_inclusions: PathMatcher,
 148    updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
 149    update_observer: Option<mpsc::UnboundedSender<proto::UpdateWorktree>>,
 150    snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
 151    replica_id: ReplicaId,
 152    visible: bool,
 153    disconnected: bool,
 154}
 155
 156#[derive(Clone)]
 157pub struct Snapshot {
 158    id: WorktreeId,
 159    abs_path: SanitizedPath,
 160    root_name: String,
 161    root_char_bag: CharBag,
 162    entries_by_path: SumTree<Entry>,
 163    entries_by_id: SumTree<PathEntry>,
 164    always_included_entries: Vec<Arc<Path>>,
 165    repositories: SumTree<RepositoryEntry>,
 166
 167    /// A number that increases every time the worktree begins scanning
 168    /// a set of paths from the filesystem. This scanning could be caused
 169    /// by some operation performed on the worktree, such as reading or
 170    /// writing a file, or by an event reported by the filesystem.
 171    scan_id: usize,
 172
 173    /// The latest scan id that has completed, and whose preceding scans
 174    /// have all completed. The current `scan_id` could be more than one
 175    /// greater than the `completed_scan_id` if operations are performed
 176    /// on the worktree while it is processing a file-system event.
 177    completed_scan_id: usize,
 178}
 179
 180#[derive(Debug, Clone, PartialEq, Eq)]
 181pub struct RepositoryEntry {
 182    /// The git status entries for this repository.
 183    /// Note that the paths on this repository are relative to the git work directory.
 184    /// If the .git folder is external to Zed, these paths will be relative to that folder,
 185    /// and this data structure might reference files external to this worktree.
 186    ///
 187    /// For example:
 188    ///
 189    ///     my_root_folder/          <-- repository root
 190    ///       .git
 191    ///       my_sub_folder_1/
 192    ///         project_root/        <-- Project root, Zed opened here
 193    ///           changed_file_1     <-- File with changes, in worktree
 194    ///       my_sub_folder_2/
 195    ///         changed_file_2       <-- File with changes, out of worktree
 196    ///           ...
 197    ///
 198    /// With this setup, this field would contain 2 entries, like so:
 199    ///     - my_sub_folder_1/project_root/changed_file_1
 200    ///     - my_sub_folder_2/changed_file_2
 201    pub(crate) statuses_by_path: SumTree<StatusEntry>,
 202    work_directory_id: ProjectEntryId,
 203    pub work_directory: WorkDirectory,
 204    pub(crate) branch: Option<Branch>,
 205    pub current_merge_conflicts: TreeSet<RepoPath>,
 206}
 207
 208impl Deref for RepositoryEntry {
 209    type Target = WorkDirectory;
 210
 211    fn deref(&self) -> &Self::Target {
 212        &self.work_directory
 213    }
 214}
 215
 216impl RepositoryEntry {
 217    pub fn branch(&self) -> Option<&Branch> {
 218        self.branch.as_ref()
 219    }
 220
 221    pub fn work_directory_id(&self) -> ProjectEntryId {
 222        self.work_directory_id
 223    }
 224
 225    pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
 226        self.statuses_by_path.iter().cloned()
 227    }
 228
 229    pub fn status_len(&self) -> usize {
 230        self.statuses_by_path.summary().item_summary.count
 231    }
 232
 233    pub fn status_summary(&self) -> GitSummary {
 234        self.statuses_by_path.summary().item_summary
 235    }
 236
 237    pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
 238        self.statuses_by_path
 239            .get(&PathKey(path.0.clone()), &())
 240            .cloned()
 241    }
 242
 243    pub fn initial_update(&self) -> proto::RepositoryEntry {
 244        proto::RepositoryEntry {
 245            work_directory_id: self.work_directory_id.to_proto(),
 246            branch: self.branch.as_ref().map(|branch| branch.name.to_string()),
 247            branch_summary: self.branch.as_ref().map(branch_to_proto),
 248            updated_statuses: self
 249                .statuses_by_path
 250                .iter()
 251                .map(|entry| entry.to_proto())
 252                .collect(),
 253            removed_statuses: Default::default(),
 254            current_merge_conflicts: self
 255                .current_merge_conflicts
 256                .iter()
 257                .map(|repo_path| repo_path.to_proto())
 258                .collect(),
 259        }
 260    }
 261
 262    pub fn build_update(&self, old: &Self) -> proto::RepositoryEntry {
 263        let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
 264        let mut removed_statuses: Vec<String> = Vec::new();
 265
 266        let mut new_statuses = self.statuses_by_path.iter().peekable();
 267        let mut old_statuses = old.statuses_by_path.iter().peekable();
 268
 269        let mut current_new_entry = new_statuses.next();
 270        let mut current_old_entry = old_statuses.next();
 271        loop {
 272            match (current_new_entry, current_old_entry) {
 273                (Some(new_entry), Some(old_entry)) => {
 274                    match new_entry.repo_path.cmp(&old_entry.repo_path) {
 275                        Ordering::Less => {
 276                            updated_statuses.push(new_entry.to_proto());
 277                            current_new_entry = new_statuses.next();
 278                        }
 279                        Ordering::Equal => {
 280                            if new_entry.status != old_entry.status {
 281                                updated_statuses.push(new_entry.to_proto());
 282                            }
 283                            current_old_entry = old_statuses.next();
 284                            current_new_entry = new_statuses.next();
 285                        }
 286                        Ordering::Greater => {
 287                            removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
 288                            current_old_entry = old_statuses.next();
 289                        }
 290                    }
 291                }
 292                (None, Some(old_entry)) => {
 293                    removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
 294                    current_old_entry = old_statuses.next();
 295                }
 296                (Some(new_entry), None) => {
 297                    updated_statuses.push(new_entry.to_proto());
 298                    current_new_entry = new_statuses.next();
 299                }
 300                (None, None) => break,
 301            }
 302        }
 303
 304        proto::RepositoryEntry {
 305            work_directory_id: self.work_directory_id.to_proto(),
 306            branch: self.branch.as_ref().map(|branch| branch.name.to_string()),
 307            branch_summary: self.branch.as_ref().map(branch_to_proto),
 308            updated_statuses,
 309            removed_statuses,
 310            current_merge_conflicts: self
 311                .current_merge_conflicts
 312                .iter()
 313                .map(|path| path.as_ref().to_proto())
 314                .collect(),
 315        }
 316    }
 317}
 318
 319pub fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
 320    proto::Branch {
 321        is_head: branch.is_head,
 322        name: branch.name.to_string(),
 323        unix_timestamp: branch
 324            .most_recent_commit
 325            .as_ref()
 326            .map(|commit| commit.commit_timestamp as u64),
 327        upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
 328            ref_name: upstream.ref_name.to_string(),
 329            tracking: upstream
 330                .tracking
 331                .as_ref()
 332                .map(|upstream| proto::UpstreamTracking {
 333                    ahead: upstream.ahead as u64,
 334                    behind: upstream.behind as u64,
 335                }),
 336        }),
 337        most_recent_commit: branch
 338            .most_recent_commit
 339            .as_ref()
 340            .map(|commit| proto::CommitSummary {
 341                sha: commit.sha.to_string(),
 342                subject: commit.subject.to_string(),
 343                commit_timestamp: commit.commit_timestamp,
 344            }),
 345    }
 346}
 347
 348pub fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
 349    git::repository::Branch {
 350        is_head: proto.is_head,
 351        name: proto.name.clone().into(),
 352        upstream: proto
 353            .upstream
 354            .as_ref()
 355            .map(|upstream| git::repository::Upstream {
 356                ref_name: upstream.ref_name.to_string().into(),
 357                tracking: upstream.tracking.as_ref().map(|tracking| {
 358                    git::repository::UpstreamTracking {
 359                        ahead: tracking.ahead as u32,
 360                        behind: tracking.behind as u32,
 361                    }
 362                }),
 363            }),
 364        most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
 365            git::repository::CommitSummary {
 366                sha: commit.sha.to_string().into(),
 367                subject: commit.subject.to_string().into(),
 368                commit_timestamp: commit.commit_timestamp,
 369            }
 370        }),
 371    }
 372}
 373
 374/// This path corresponds to the 'content path' of a repository in relation
 375/// to Zed's project root.
 376/// In the majority of the cases, this is the folder that contains the .git folder.
 377/// But if a sub-folder of a git repository is opened, this corresponds to the
 378/// project root and the .git folder is located in a parent directory.
 379#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
 380pub enum WorkDirectory {
 381    InProject {
 382        relative_path: Arc<Path>,
 383    },
 384    AboveProject {
 385        absolute_path: Arc<Path>,
 386        location_in_repo: Arc<Path>,
 387    },
 388}
 389
 390impl WorkDirectory {
 391    #[cfg(test)]
 392    fn in_project(path: &str) -> Self {
 393        let path = Path::new(path);
 394        Self::InProject {
 395            relative_path: path.into(),
 396        }
 397    }
 398
 399    #[cfg(test)]
 400    fn canonicalize(&self) -> Self {
 401        match self {
 402            WorkDirectory::InProject { relative_path } => WorkDirectory::InProject {
 403                relative_path: relative_path.clone(),
 404            },
 405            WorkDirectory::AboveProject {
 406                absolute_path,
 407                location_in_repo,
 408            } => WorkDirectory::AboveProject {
 409                absolute_path: absolute_path.canonicalize().unwrap().into(),
 410                location_in_repo: location_in_repo.clone(),
 411            },
 412        }
 413    }
 414
 415    pub fn is_above_project(&self) -> bool {
 416        match self {
 417            WorkDirectory::InProject { .. } => false,
 418            WorkDirectory::AboveProject { .. } => true,
 419        }
 420    }
 421
 422    fn path_key(&self) -> PathKey {
 423        match self {
 424            WorkDirectory::InProject { relative_path } => PathKey(relative_path.clone()),
 425            WorkDirectory::AboveProject { .. } => PathKey(Path::new("").into()),
 426        }
 427    }
 428
 429    /// Returns true if the given path is a child of the work directory.
 430    ///
 431    /// Note that the path may not be a member of this repository, if there
 432    /// is a repository in a directory between these two paths
 433    /// external .git folder in a parent folder of the project root.
 434    #[track_caller]
 435    pub fn directory_contains(&self, path: impl AsRef<Path>) -> bool {
 436        let path = path.as_ref();
 437        debug_assert!(path.is_relative());
 438        match self {
 439            WorkDirectory::InProject { relative_path } => path.starts_with(relative_path),
 440            WorkDirectory::AboveProject { .. } => true,
 441        }
 442    }
 443
 444    /// relativize returns the given project path relative to the root folder of the
 445    /// repository.
 446    /// If the root of the repository (and its .git folder) are located in a parent folder
 447    /// of the project root folder, then the returned RepoPath is relative to the root
 448    /// of the repository and not a valid path inside the project.
 449    pub fn relativize(&self, path: &Path) -> Result<RepoPath> {
 450        // path is assumed to be relative to worktree root.
 451        debug_assert!(path.is_relative());
 452        match self {
 453            WorkDirectory::InProject { relative_path } => Ok(path
 454                .strip_prefix(relative_path)
 455                .map_err(|_| {
 456                    anyhow!(
 457                        "could not relativize {:?} against {:?}",
 458                        path,
 459                        relative_path
 460                    )
 461                })?
 462                .into()),
 463            WorkDirectory::AboveProject {
 464                location_in_repo, ..
 465            } => {
 466                // Avoid joining a `/` to location_in_repo in the case of a single-file worktree.
 467                if path == Path::new("") {
 468                    Ok(RepoPath(location_in_repo.clone()))
 469                } else {
 470                    Ok(location_in_repo.join(path).into())
 471                }
 472            }
 473        }
 474    }
 475
 476    /// This is the opposite operation to `relativize` above
 477    pub fn unrelativize(&self, path: &RepoPath) -> Option<Arc<Path>> {
 478        match self {
 479            WorkDirectory::InProject { relative_path } => Some(relative_path.join(path).into()),
 480            WorkDirectory::AboveProject {
 481                location_in_repo, ..
 482            } => {
 483                // If we fail to strip the prefix, that means this status entry is
 484                // external to this worktree, and we definitely won't have an entry_id
 485                path.strip_prefix(location_in_repo).ok().map(Into::into)
 486            }
 487        }
 488    }
 489
 490    pub fn display_name(&self) -> String {
 491        match self {
 492            WorkDirectory::InProject { relative_path } => relative_path.display().to_string(),
 493            WorkDirectory::AboveProject {
 494                absolute_path,
 495                location_in_repo,
 496            } => {
 497                let num_of_dots = location_in_repo.components().count();
 498
 499                "../".repeat(num_of_dots)
 500                    + &absolute_path
 501                        .file_name()
 502                        .map(|s| s.to_string_lossy())
 503                        .unwrap_or_default()
 504                    + "/"
 505            }
 506        }
 507    }
 508}
 509
 510impl Default for WorkDirectory {
 511    fn default() -> Self {
 512        Self::InProject {
 513            relative_path: Arc::from(Path::new("")),
 514        }
 515    }
 516}
 517
 518#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
 519pub struct WorkDirectoryEntry(ProjectEntryId);
 520
 521impl Deref for WorkDirectoryEntry {
 522    type Target = ProjectEntryId;
 523
 524    fn deref(&self) -> &Self::Target {
 525        &self.0
 526    }
 527}
 528
 529impl From<ProjectEntryId> for WorkDirectoryEntry {
 530    fn from(value: ProjectEntryId) -> Self {
 531        WorkDirectoryEntry(value)
 532    }
 533}
 534
 535#[derive(Debug, Clone)]
 536pub struct LocalSnapshot {
 537    snapshot: Snapshot,
 538    /// All of the gitignore files in the worktree, indexed by their relative path.
 539    /// The boolean indicates whether the gitignore needs to be updated.
 540    ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, bool)>,
 541    /// All of the git repositories in the worktree, indexed by the project entry
 542    /// id of their parent directory.
 543    git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
 544    /// The file handle of the root dir
 545    /// (so we can find it after it's been moved)
 546    root_file_handle: Option<Arc<dyn fs::FileHandle>>,
 547}
 548
 549struct BackgroundScannerState {
 550    snapshot: LocalSnapshot,
 551    scanned_dirs: HashSet<ProjectEntryId>,
 552    path_prefixes_to_scan: HashSet<Arc<Path>>,
 553    paths_to_scan: HashSet<Arc<Path>>,
 554    /// The ids of all of the entries that were removed from the snapshot
 555    /// as part of the current update. These entry ids may be re-used
 556    /// if the same inode is discovered at a new path, or if the given
 557    /// path is re-created after being deleted.
 558    removed_entries: HashMap<u64, Entry>,
 559    changed_paths: Vec<Arc<Path>>,
 560    prev_snapshot: Snapshot,
 561    git_hosting_provider_registry: Option<Arc<GitHostingProviderRegistry>>,
 562}
 563
 564#[derive(Debug, Clone)]
 565pub struct LocalRepositoryEntry {
 566    pub(crate) work_directory_id: ProjectEntryId,
 567    pub(crate) work_directory: WorkDirectory,
 568    pub(crate) git_dir_scan_id: usize,
 569    pub(crate) status_scan_id: usize,
 570    pub(crate) repo_ptr: Arc<dyn GitRepository>,
 571    /// Absolute path to the actual .git folder.
 572    /// Note: if .git is a file, this points to the folder indicated by the .git file
 573    pub(crate) dot_git_dir_abs_path: Arc<Path>,
 574    /// Absolute path to the .git file, if we're in a git worktree.
 575    pub(crate) dot_git_worktree_abs_path: Option<Arc<Path>>,
 576    pub current_merge_head_shas: Vec<String>,
 577}
 578
 579impl sum_tree::Item for LocalRepositoryEntry {
 580    type Summary = PathSummary<Unit>;
 581
 582    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
 583        PathSummary {
 584            max_path: self.work_directory.path_key().0,
 585            item_summary: Unit,
 586        }
 587    }
 588}
 589
 590impl KeyedItem for LocalRepositoryEntry {
 591    type Key = PathKey;
 592
 593    fn key(&self) -> Self::Key {
 594        self.work_directory.path_key()
 595    }
 596}
 597
 598impl LocalRepositoryEntry {
 599    pub fn repo(&self) -> &Arc<dyn GitRepository> {
 600        &self.repo_ptr
 601    }
 602}
 603
 604impl Deref for LocalRepositoryEntry {
 605    type Target = WorkDirectory;
 606
 607    fn deref(&self) -> &Self::Target {
 608        &self.work_directory
 609    }
 610}
 611
 612impl Deref for LocalSnapshot {
 613    type Target = Snapshot;
 614
 615    fn deref(&self) -> &Self::Target {
 616        &self.snapshot
 617    }
 618}
 619
 620impl DerefMut for LocalSnapshot {
 621    fn deref_mut(&mut self) -> &mut Self::Target {
 622        &mut self.snapshot
 623    }
 624}
 625
 626enum ScanState {
 627    Started,
 628    Updated {
 629        snapshot: LocalSnapshot,
 630        changes: UpdatedEntriesSet,
 631        barrier: SmallVec<[barrier::Sender; 1]>,
 632        scanning: bool,
 633    },
 634    RootUpdated {
 635        new_path: Option<SanitizedPath>,
 636    },
 637}
 638
 639struct UpdateObservationState {
 640    snapshots_tx:
 641        mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
 642    resume_updates: watch::Sender<()>,
 643    _maintain_remote_snapshot: Task<Option<()>>,
 644}
 645
 646#[derive(Clone)]
 647pub enum Event {
 648    UpdatedEntries(UpdatedEntriesSet),
 649    UpdatedGitRepositories(UpdatedGitRepositoriesSet),
 650    DeletedEntry(ProjectEntryId),
 651}
 652
 653const EMPTY_PATH: &str = "";
 654
 655impl EventEmitter<Event> for Worktree {}
 656
 657impl Worktree {
 658    pub async fn local(
 659        path: impl Into<Arc<Path>>,
 660        visible: bool,
 661        fs: Arc<dyn Fs>,
 662        next_entry_id: Arc<AtomicUsize>,
 663        cx: &mut AsyncApp,
 664    ) -> Result<Entity<Self>> {
 665        let abs_path = path.into();
 666        let metadata = fs
 667            .metadata(&abs_path)
 668            .await
 669            .context("failed to stat worktree path")?;
 670
 671        let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
 672            log::error!(
 673                "Failed to determine whether filesystem is case sensitive (falling back to true) due to error: {e:#}"
 674            );
 675            true
 676        });
 677
 678        let root_file_handle = fs.open_handle(&abs_path).await.log_err();
 679
 680        cx.new(move |cx: &mut Context<Worktree>| {
 681            let mut snapshot = LocalSnapshot {
 682                ignores_by_parent_abs_path: Default::default(),
 683                git_repositories: Default::default(),
 684                snapshot: Snapshot::new(
 685                    cx.entity_id().as_u64(),
 686                    abs_path
 687                        .file_name()
 688                        .map_or(String::new(), |f| f.to_string_lossy().to_string()),
 689                    abs_path.clone(),
 690                ),
 691                root_file_handle,
 692            };
 693
 694            let worktree_id = snapshot.id();
 695            let settings_location = Some(SettingsLocation {
 696                worktree_id,
 697                path: Path::new(EMPTY_PATH),
 698            });
 699
 700            let settings = WorktreeSettings::get(settings_location, cx).clone();
 701            cx.observe_global::<SettingsStore>(move |this, cx| {
 702                if let Self::Local(this) = this {
 703                    let settings = WorktreeSettings::get(settings_location, cx).clone();
 704                    if this.settings != settings {
 705                        this.settings = settings;
 706                        this.restart_background_scanners(cx);
 707                    }
 708                }
 709            })
 710            .detach();
 711
 712            let share_private_files = false;
 713            if let Some(metadata) = metadata {
 714                let mut entry = Entry::new(
 715                    Arc::from(Path::new("")),
 716                    &metadata,
 717                    &next_entry_id,
 718                    snapshot.root_char_bag,
 719                    None,
 720                );
 721                if !metadata.is_dir {
 722                    entry.is_private = !share_private_files
 723                        && settings.is_path_private(abs_path.file_name().unwrap().as_ref());
 724                }
 725                snapshot.insert_entry(entry, fs.as_ref());
 726            }
 727
 728            let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
 729            let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
 730            let mut worktree = LocalWorktree {
 731                share_private_files,
 732                next_entry_id,
 733                snapshot,
 734                is_scanning: watch::channel_with(true),
 735                update_observer: None,
 736                scan_requests_tx,
 737                path_prefixes_to_scan_tx,
 738                _background_scanner_tasks: Vec::new(),
 739                fs,
 740                fs_case_sensitive,
 741                visible,
 742                settings,
 743            };
 744            worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
 745            Worktree::Local(worktree)
 746        })
 747    }
 748
 749    pub fn remote(
 750        project_id: u64,
 751        replica_id: ReplicaId,
 752        worktree: proto::WorktreeMetadata,
 753        client: AnyProtoClient,
 754        cx: &mut App,
 755    ) -> Entity<Self> {
 756        cx.new(|cx: &mut Context<Self>| {
 757            let snapshot = Snapshot::new(
 758                worktree.id,
 759                worktree.root_name,
 760                Arc::<Path>::from_proto(worktree.abs_path),
 761            );
 762
 763            let background_snapshot = Arc::new(Mutex::new((snapshot.clone(), Vec::new())));
 764            let (background_updates_tx, mut background_updates_rx) = mpsc::unbounded();
 765            let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
 766
 767            let worktree_id = snapshot.id();
 768            let settings_location = Some(SettingsLocation {
 769                worktree_id,
 770                path: Path::new(EMPTY_PATH),
 771            });
 772
 773            let settings = WorktreeSettings::get(settings_location, cx).clone();
 774            let worktree = RemoteWorktree {
 775                client,
 776                project_id,
 777                replica_id,
 778                snapshot,
 779                file_scan_inclusions: settings.file_scan_inclusions.clone(),
 780                background_snapshot: background_snapshot.clone(),
 781                updates_tx: Some(background_updates_tx),
 782                update_observer: None,
 783                snapshot_subscriptions: Default::default(),
 784                visible: worktree.visible,
 785                disconnected: false,
 786            };
 787
 788            // Apply updates to a separate snapshot in a background task, then
 789            // send them to a foreground task which updates the model.
 790            cx.background_executor()
 791                .spawn(async move {
 792                    while let Some(update) = background_updates_rx.next().await {
 793                        {
 794                            let mut lock = background_snapshot.lock();
 795                            if let Err(error) = lock
 796                                .0
 797                                .apply_remote_update(update.clone(), &settings.file_scan_inclusions)
 798                            {
 799                                log::error!("error applying worktree update: {}", error);
 800                            }
 801                            lock.1.push(update);
 802                        }
 803                        snapshot_updated_tx.send(()).await.ok();
 804                    }
 805                })
 806                .detach();
 807
 808            // On the foreground task, update to the latest snapshot and notify
 809            // any update observer of all updates that led to that snapshot.
 810            cx.spawn(|this, mut cx| async move {
 811                while (snapshot_updated_rx.recv().await).is_some() {
 812                    this.update(&mut cx, |this, cx| {
 813                        let this = this.as_remote_mut().unwrap();
 814                        {
 815                            let mut lock = this.background_snapshot.lock();
 816                            this.snapshot = lock.0.clone();
 817                            if let Some(tx) = &this.update_observer {
 818                                for update in lock.1.drain(..) {
 819                                    tx.unbounded_send(update).ok();
 820                                }
 821                            }
 822                        };
 823                        cx.emit(Event::UpdatedEntries(Arc::default()));
 824                        cx.notify();
 825                        while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
 826                            if this.observed_snapshot(*scan_id) {
 827                                let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
 828                                let _ = tx.send(());
 829                            } else {
 830                                break;
 831                            }
 832                        }
 833                    })?;
 834                }
 835                anyhow::Ok(())
 836            })
 837            .detach();
 838
 839            Worktree::Remote(worktree)
 840        })
 841    }
 842
 843    pub fn as_local(&self) -> Option<&LocalWorktree> {
 844        if let Worktree::Local(worktree) = self {
 845            Some(worktree)
 846        } else {
 847            None
 848        }
 849    }
 850
 851    pub fn as_remote(&self) -> Option<&RemoteWorktree> {
 852        if let Worktree::Remote(worktree) = self {
 853            Some(worktree)
 854        } else {
 855            None
 856        }
 857    }
 858
 859    pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
 860        if let Worktree::Local(worktree) = self {
 861            Some(worktree)
 862        } else {
 863            None
 864        }
 865    }
 866
 867    pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
 868        if let Worktree::Remote(worktree) = self {
 869            Some(worktree)
 870        } else {
 871            None
 872        }
 873    }
 874
 875    pub fn is_local(&self) -> bool {
 876        matches!(self, Worktree::Local(_))
 877    }
 878
 879    pub fn is_remote(&self) -> bool {
 880        !self.is_local()
 881    }
 882
 883    pub fn settings_location(&self, _: &Context<Self>) -> SettingsLocation<'static> {
 884        SettingsLocation {
 885            worktree_id: self.id(),
 886            path: Path::new(EMPTY_PATH),
 887        }
 888    }
 889
 890    pub fn snapshot(&self) -> Snapshot {
 891        match self {
 892            Worktree::Local(worktree) => worktree.snapshot.snapshot.clone(),
 893            Worktree::Remote(worktree) => worktree.snapshot.clone(),
 894        }
 895    }
 896
 897    pub fn scan_id(&self) -> usize {
 898        match self {
 899            Worktree::Local(worktree) => worktree.snapshot.scan_id,
 900            Worktree::Remote(worktree) => worktree.snapshot.scan_id,
 901        }
 902    }
 903
 904    pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
 905        proto::WorktreeMetadata {
 906            id: self.id().to_proto(),
 907            root_name: self.root_name().to_string(),
 908            visible: self.is_visible(),
 909            abs_path: self.abs_path().to_proto(),
 910        }
 911    }
 912
 913    pub fn completed_scan_id(&self) -> usize {
 914        match self {
 915            Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
 916            Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
 917        }
 918    }
 919
 920    pub fn is_visible(&self) -> bool {
 921        match self {
 922            Worktree::Local(worktree) => worktree.visible,
 923            Worktree::Remote(worktree) => worktree.visible,
 924        }
 925    }
 926
 927    pub fn replica_id(&self) -> ReplicaId {
 928        match self {
 929            Worktree::Local(_) => 0,
 930            Worktree::Remote(worktree) => worktree.replica_id,
 931        }
 932    }
 933
 934    pub fn abs_path(&self) -> Arc<Path> {
 935        match self {
 936            Worktree::Local(worktree) => worktree.abs_path.clone().into(),
 937            Worktree::Remote(worktree) => worktree.abs_path.clone().into(),
 938        }
 939    }
 940
 941    pub fn root_file(&self, cx: &Context<Self>) -> Option<Arc<File>> {
 942        let entry = self.root_entry()?;
 943        Some(File::for_entry(entry.clone(), cx.entity()))
 944    }
 945
 946    pub fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
 947    where
 948        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
 949        Fut: 'static + Send + Future<Output = bool>,
 950    {
 951        match self {
 952            Worktree::Local(this) => this.observe_updates(project_id, cx, callback),
 953            Worktree::Remote(this) => this.observe_updates(project_id, cx, callback),
 954        }
 955    }
 956
 957    pub fn stop_observing_updates(&mut self) {
 958        match self {
 959            Worktree::Local(this) => {
 960                this.update_observer.take();
 961            }
 962            Worktree::Remote(this) => {
 963                this.update_observer.take();
 964            }
 965        }
 966    }
 967
 968    #[cfg(any(test, feature = "test-support"))]
 969    pub fn has_update_observer(&self) -> bool {
 970        match self {
 971            Worktree::Local(this) => this.update_observer.is_some(),
 972            Worktree::Remote(this) => this.update_observer.is_some(),
 973        }
 974    }
 975
 976    pub fn load_file(&self, path: &Path, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
 977        match self {
 978            Worktree::Local(this) => this.load_file(path, cx),
 979            Worktree::Remote(_) => {
 980                Task::ready(Err(anyhow!("remote worktrees can't yet load files")))
 981            }
 982        }
 983    }
 984
 985    pub fn load_staged_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
 986        match self {
 987            Worktree::Local(this) => {
 988                let path = Arc::from(path);
 989                let snapshot = this.snapshot();
 990                cx.background_executor().spawn(async move {
 991                    if let Some(repo) = snapshot.repository_for_path(&path) {
 992                        if let Some(repo_path) = repo.relativize(&path).log_err() {
 993                            if let Some(git_repo) =
 994                                snapshot.git_repositories.get(&repo.work_directory_id)
 995                            {
 996                                return Ok(git_repo.repo_ptr.load_index_text(&repo_path));
 997                            }
 998                        }
 999                    }
1000                    Err(anyhow!("No repository found for {path:?}"))
1001                })
1002            }
1003            Worktree::Remote(_) => {
1004                Task::ready(Err(anyhow!("remote worktrees can't yet load staged files")))
1005            }
1006        }
1007    }
1008
1009    pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
1010        match self {
1011            Worktree::Local(this) => {
1012                let path = Arc::from(path);
1013                let snapshot = this.snapshot();
1014                cx.background_executor().spawn(async move {
1015                    if let Some(repo) = snapshot.repository_for_path(&path) {
1016                        if let Some(repo_path) = repo.relativize(&path).log_err() {
1017                            if let Some(git_repo) =
1018                                snapshot.git_repositories.get(&repo.work_directory_id)
1019                            {
1020                                return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
1021                            }
1022                        }
1023                    }
1024                    Err(anyhow!("No repository found for {path:?}"))
1025                })
1026            }
1027            Worktree::Remote(_) => Task::ready(Err(anyhow!(
1028                "remote worktrees can't yet load committed files"
1029            ))),
1030        }
1031    }
1032
1033    pub fn load_binary_file(
1034        &self,
1035        path: &Path,
1036        cx: &Context<Worktree>,
1037    ) -> Task<Result<LoadedBinaryFile>> {
1038        match self {
1039            Worktree::Local(this) => this.load_binary_file(path, cx),
1040            Worktree::Remote(_) => {
1041                Task::ready(Err(anyhow!("remote worktrees can't yet load binary files")))
1042            }
1043        }
1044    }
1045
1046    pub fn write_file(
1047        &self,
1048        path: &Path,
1049        text: Rope,
1050        line_ending: LineEnding,
1051        cx: &Context<Worktree>,
1052    ) -> Task<Result<Arc<File>>> {
1053        match self {
1054            Worktree::Local(this) => this.write_file(path, text, line_ending, cx),
1055            Worktree::Remote(_) => {
1056                Task::ready(Err(anyhow!("remote worktree can't yet write files")))
1057            }
1058        }
1059    }
1060
1061    pub fn create_entry(
1062        &mut self,
1063        path: impl Into<Arc<Path>>,
1064        is_directory: bool,
1065        cx: &Context<Worktree>,
1066    ) -> Task<Result<CreatedEntry>> {
1067        let path: Arc<Path> = path.into();
1068        let worktree_id = self.id();
1069        match self {
1070            Worktree::Local(this) => this.create_entry(path, is_directory, cx),
1071            Worktree::Remote(this) => {
1072                let project_id = this.project_id;
1073                let request = this.client.request(proto::CreateProjectEntry {
1074                    worktree_id: worktree_id.to_proto(),
1075                    project_id,
1076                    path: path.as_ref().to_proto(),
1077                    is_directory,
1078                });
1079                cx.spawn(move |this, mut cx| async move {
1080                    let response = request.await?;
1081                    match response.entry {
1082                        Some(entry) => this
1083                            .update(&mut cx, |worktree, cx| {
1084                                worktree.as_remote_mut().unwrap().insert_entry(
1085                                    entry,
1086                                    response.worktree_scan_id as usize,
1087                                    cx,
1088                                )
1089                            })?
1090                            .await
1091                            .map(CreatedEntry::Included),
1092                        None => {
1093                            let abs_path = this.update(&mut cx, |worktree, _| {
1094                                worktree
1095                                    .absolutize(&path)
1096                                    .with_context(|| format!("absolutizing {path:?}"))
1097                            })??;
1098                            Ok(CreatedEntry::Excluded { abs_path })
1099                        }
1100                    }
1101                })
1102            }
1103        }
1104    }
1105
1106    pub fn delete_entry(
1107        &mut self,
1108        entry_id: ProjectEntryId,
1109        trash: bool,
1110        cx: &mut Context<Worktree>,
1111    ) -> Option<Task<Result<()>>> {
1112        let task = match self {
1113            Worktree::Local(this) => this.delete_entry(entry_id, trash, cx),
1114            Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx),
1115        }?;
1116
1117        let entry = match self {
1118            Worktree::Local(ref this) => this.entry_for_id(entry_id),
1119            Worktree::Remote(ref this) => this.entry_for_id(entry_id),
1120        }?;
1121
1122        let mut ids = vec![entry_id];
1123        let path = &*entry.path;
1124
1125        self.get_children_ids_recursive(path, &mut ids);
1126
1127        for id in ids {
1128            cx.emit(Event::DeletedEntry(id));
1129        }
1130        Some(task)
1131    }
1132
1133    fn get_children_ids_recursive(&self, path: &Path, ids: &mut Vec<ProjectEntryId>) {
1134        let children_iter = self.child_entries(path);
1135        for child in children_iter {
1136            ids.push(child.id);
1137            self.get_children_ids_recursive(&child.path, ids);
1138        }
1139    }
1140
1141    pub fn rename_entry(
1142        &mut self,
1143        entry_id: ProjectEntryId,
1144        new_path: impl Into<Arc<Path>>,
1145        cx: &Context<Self>,
1146    ) -> Task<Result<CreatedEntry>> {
1147        let new_path = new_path.into();
1148        match self {
1149            Worktree::Local(this) => this.rename_entry(entry_id, new_path, cx),
1150            Worktree::Remote(this) => this.rename_entry(entry_id, new_path, cx),
1151        }
1152    }
1153
1154    pub fn copy_entry(
1155        &mut self,
1156        entry_id: ProjectEntryId,
1157        relative_worktree_source_path: Option<PathBuf>,
1158        new_path: impl Into<Arc<Path>>,
1159        cx: &Context<Self>,
1160    ) -> Task<Result<Option<Entry>>> {
1161        let new_path: Arc<Path> = new_path.into();
1162        match self {
1163            Worktree::Local(this) => {
1164                this.copy_entry(entry_id, relative_worktree_source_path, new_path, cx)
1165            }
1166            Worktree::Remote(this) => {
1167                let relative_worktree_source_path = relative_worktree_source_path
1168                    .map(|relative_worktree_source_path| relative_worktree_source_path.to_proto());
1169                let response = this.client.request(proto::CopyProjectEntry {
1170                    project_id: this.project_id,
1171                    entry_id: entry_id.to_proto(),
1172                    relative_worktree_source_path,
1173                    new_path: new_path.to_proto(),
1174                });
1175                cx.spawn(move |this, mut cx| async move {
1176                    let response = response.await?;
1177                    match response.entry {
1178                        Some(entry) => this
1179                            .update(&mut cx, |worktree, cx| {
1180                                worktree.as_remote_mut().unwrap().insert_entry(
1181                                    entry,
1182                                    response.worktree_scan_id as usize,
1183                                    cx,
1184                                )
1185                            })?
1186                            .await
1187                            .map(Some),
1188                        None => Ok(None),
1189                    }
1190                })
1191            }
1192        }
1193    }
1194
1195    pub fn copy_external_entries(
1196        &mut self,
1197        target_directory: PathBuf,
1198        paths: Vec<Arc<Path>>,
1199        overwrite_existing_files: bool,
1200        cx: &Context<Worktree>,
1201    ) -> Task<Result<Vec<ProjectEntryId>>> {
1202        match self {
1203            Worktree::Local(this) => {
1204                this.copy_external_entries(target_directory, paths, overwrite_existing_files, cx)
1205            }
1206            _ => Task::ready(Err(anyhow!(
1207                "Copying external entries is not supported for remote worktrees"
1208            ))),
1209        }
1210    }
1211
1212    pub fn expand_entry(
1213        &mut self,
1214        entry_id: ProjectEntryId,
1215        cx: &Context<Worktree>,
1216    ) -> Option<Task<Result<()>>> {
1217        match self {
1218            Worktree::Local(this) => this.expand_entry(entry_id, cx),
1219            Worktree::Remote(this) => {
1220                let response = this.client.request(proto::ExpandProjectEntry {
1221                    project_id: this.project_id,
1222                    entry_id: entry_id.to_proto(),
1223                });
1224                Some(cx.spawn(move |this, mut cx| async move {
1225                    let response = response.await?;
1226                    this.update(&mut cx, |this, _| {
1227                        this.as_remote_mut()
1228                            .unwrap()
1229                            .wait_for_snapshot(response.worktree_scan_id as usize)
1230                    })?
1231                    .await?;
1232                    Ok(())
1233                }))
1234            }
1235        }
1236    }
1237
1238    pub fn expand_all_for_entry(
1239        &mut self,
1240        entry_id: ProjectEntryId,
1241        cx: &Context<Worktree>,
1242    ) -> Option<Task<Result<()>>> {
1243        match self {
1244            Worktree::Local(this) => this.expand_all_for_entry(entry_id, cx),
1245            Worktree::Remote(this) => {
1246                let response = this.client.request(proto::ExpandAllForProjectEntry {
1247                    project_id: this.project_id,
1248                    entry_id: entry_id.to_proto(),
1249                });
1250                Some(cx.spawn(move |this, mut cx| async move {
1251                    let response = response.await?;
1252                    this.update(&mut cx, |this, _| {
1253                        this.as_remote_mut()
1254                            .unwrap()
1255                            .wait_for_snapshot(response.worktree_scan_id as usize)
1256                    })?
1257                    .await?;
1258                    Ok(())
1259                }))
1260            }
1261        }
1262    }
1263
1264    pub async fn handle_create_entry(
1265        this: Entity<Self>,
1266        request: proto::CreateProjectEntry,
1267        mut cx: AsyncApp,
1268    ) -> Result<proto::ProjectEntryResponse> {
1269        let (scan_id, entry) = this.update(&mut cx, |this, cx| {
1270            (
1271                this.scan_id(),
1272                this.create_entry(
1273                    Arc::<Path>::from_proto(request.path),
1274                    request.is_directory,
1275                    cx,
1276                ),
1277            )
1278        })?;
1279        Ok(proto::ProjectEntryResponse {
1280            entry: match &entry.await? {
1281                CreatedEntry::Included(entry) => Some(entry.into()),
1282                CreatedEntry::Excluded { .. } => None,
1283            },
1284            worktree_scan_id: scan_id as u64,
1285        })
1286    }
1287
1288    pub async fn handle_delete_entry(
1289        this: Entity<Self>,
1290        request: proto::DeleteProjectEntry,
1291        mut cx: AsyncApp,
1292    ) -> Result<proto::ProjectEntryResponse> {
1293        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1294            (
1295                this.scan_id(),
1296                this.delete_entry(
1297                    ProjectEntryId::from_proto(request.entry_id),
1298                    request.use_trash,
1299                    cx,
1300                ),
1301            )
1302        })?;
1303        task.ok_or_else(|| anyhow!("invalid entry"))?.await?;
1304        Ok(proto::ProjectEntryResponse {
1305            entry: None,
1306            worktree_scan_id: scan_id as u64,
1307        })
1308    }
1309
1310    pub async fn handle_expand_entry(
1311        this: Entity<Self>,
1312        request: proto::ExpandProjectEntry,
1313        mut cx: AsyncApp,
1314    ) -> Result<proto::ExpandProjectEntryResponse> {
1315        let task = this.update(&mut cx, |this, cx| {
1316            this.expand_entry(ProjectEntryId::from_proto(request.entry_id), cx)
1317        })?;
1318        task.ok_or_else(|| anyhow!("no such entry"))?.await?;
1319        let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
1320        Ok(proto::ExpandProjectEntryResponse {
1321            worktree_scan_id: scan_id as u64,
1322        })
1323    }
1324
1325    pub async fn handle_expand_all_for_entry(
1326        this: Entity<Self>,
1327        request: proto::ExpandAllForProjectEntry,
1328        mut cx: AsyncApp,
1329    ) -> Result<proto::ExpandAllForProjectEntryResponse> {
1330        let task = this.update(&mut cx, |this, cx| {
1331            this.expand_all_for_entry(ProjectEntryId::from_proto(request.entry_id), cx)
1332        })?;
1333        task.ok_or_else(|| anyhow!("no such entry"))?.await?;
1334        let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
1335        Ok(proto::ExpandAllForProjectEntryResponse {
1336            worktree_scan_id: scan_id as u64,
1337        })
1338    }
1339
1340    pub async fn handle_rename_entry(
1341        this: Entity<Self>,
1342        request: proto::RenameProjectEntry,
1343        mut cx: AsyncApp,
1344    ) -> Result<proto::ProjectEntryResponse> {
1345        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1346            (
1347                this.scan_id(),
1348                this.rename_entry(
1349                    ProjectEntryId::from_proto(request.entry_id),
1350                    Arc::<Path>::from_proto(request.new_path),
1351                    cx,
1352                ),
1353            )
1354        })?;
1355        Ok(proto::ProjectEntryResponse {
1356            entry: match &task.await? {
1357                CreatedEntry::Included(entry) => Some(entry.into()),
1358                CreatedEntry::Excluded { .. } => None,
1359            },
1360            worktree_scan_id: scan_id as u64,
1361        })
1362    }
1363
1364    pub async fn handle_copy_entry(
1365        this: Entity<Self>,
1366        request: proto::CopyProjectEntry,
1367        mut cx: AsyncApp,
1368    ) -> Result<proto::ProjectEntryResponse> {
1369        let (scan_id, task) = this.update(&mut cx, |this, cx| {
1370            let relative_worktree_source_path = request
1371                .relative_worktree_source_path
1372                .map(PathBuf::from_proto);
1373            (
1374                this.scan_id(),
1375                this.copy_entry(
1376                    ProjectEntryId::from_proto(request.entry_id),
1377                    relative_worktree_source_path,
1378                    PathBuf::from_proto(request.new_path),
1379                    cx,
1380                ),
1381            )
1382        })?;
1383        Ok(proto::ProjectEntryResponse {
1384            entry: task.await?.as_ref().map(|e| e.into()),
1385            worktree_scan_id: scan_id as u64,
1386        })
1387    }
1388}
1389
1390impl LocalWorktree {
1391    pub fn fs(&self) -> &Arc<dyn Fs> {
1392        &self.fs
1393    }
1394
1395    pub fn is_path_private(&self, path: &Path) -> bool {
1396        !self.share_private_files && self.settings.is_path_private(path)
1397    }
1398
1399    fn restart_background_scanners(&mut self, cx: &Context<Worktree>) {
1400        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
1401        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
1402        self.scan_requests_tx = scan_requests_tx;
1403        self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
1404
1405        self.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
1406        let always_included_entries = mem::take(&mut self.snapshot.always_included_entries);
1407        log::debug!(
1408            "refreshing entries for the following always included paths: {:?}",
1409            always_included_entries
1410        );
1411
1412        // Cleans up old always included entries to ensure they get updated properly. Otherwise,
1413        // nested always included entries may not get updated and will result in out-of-date info.
1414        self.refresh_entries_for_paths(always_included_entries);
1415    }
1416
1417    fn start_background_scanner(
1418        &mut self,
1419        scan_requests_rx: channel::Receiver<ScanRequest>,
1420        path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
1421        cx: &Context<Worktree>,
1422    ) {
1423        let snapshot = self.snapshot();
1424        let share_private_files = self.share_private_files;
1425        let next_entry_id = self.next_entry_id.clone();
1426        let fs = self.fs.clone();
1427        let git_hosting_provider_registry = GitHostingProviderRegistry::try_global(cx);
1428        let settings = self.settings.clone();
1429        let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
1430        let background_scanner = cx.background_executor().spawn({
1431            let abs_path = snapshot.abs_path.as_path().to_path_buf();
1432            let background = cx.background_executor().clone();
1433            async move {
1434                let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await;
1435                let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
1436                    log::error!("Failed to determine whether filesystem is case sensitive: {e:#}");
1437                    true
1438                });
1439
1440                let mut scanner = BackgroundScanner {
1441                    fs,
1442                    fs_case_sensitive,
1443                    status_updates_tx: scan_states_tx,
1444                    executor: background,
1445                    scan_requests_rx,
1446                    path_prefixes_to_scan_rx,
1447                    next_entry_id,
1448                    state: Mutex::new(BackgroundScannerState {
1449                        prev_snapshot: snapshot.snapshot.clone(),
1450                        snapshot,
1451                        scanned_dirs: Default::default(),
1452                        path_prefixes_to_scan: Default::default(),
1453                        paths_to_scan: Default::default(),
1454                        removed_entries: Default::default(),
1455                        changed_paths: Default::default(),
1456                        git_hosting_provider_registry,
1457                    }),
1458                    phase: BackgroundScannerPhase::InitialScan,
1459                    share_private_files,
1460                    settings,
1461                    watcher,
1462                };
1463
1464                scanner
1465                    .run(Box::pin(
1466                        events.map(|events| events.into_iter().map(Into::into).collect()),
1467                    ))
1468                    .await;
1469            }
1470        });
1471        let scan_state_updater = cx.spawn(|this, mut cx| async move {
1472            while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
1473                this.update(&mut cx, |this, cx| {
1474                    let this = this.as_local_mut().unwrap();
1475                    match state {
1476                        ScanState::Started => {
1477                            *this.is_scanning.0.borrow_mut() = true;
1478                        }
1479                        ScanState::Updated {
1480                            snapshot,
1481                            changes,
1482                            barrier,
1483                            scanning,
1484                        } => {
1485                            *this.is_scanning.0.borrow_mut() = scanning;
1486                            this.set_snapshot(snapshot, changes, cx);
1487                            drop(barrier);
1488                        }
1489                        ScanState::RootUpdated { new_path } => {
1490                            this.update_abs_path_and_refresh(new_path, cx);
1491                        }
1492                    }
1493                    cx.notify();
1494                })
1495                .ok();
1496            }
1497        });
1498        self._background_scanner_tasks = vec![background_scanner, scan_state_updater];
1499        self.is_scanning = watch::channel_with(true);
1500    }
1501
1502    fn set_snapshot(
1503        &mut self,
1504        new_snapshot: LocalSnapshot,
1505        entry_changes: UpdatedEntriesSet,
1506        cx: &mut Context<Worktree>,
1507    ) {
1508        let repo_changes = self.changed_repos(&self.snapshot, &new_snapshot);
1509        self.snapshot = new_snapshot;
1510
1511        if let Some(share) = self.update_observer.as_mut() {
1512            share
1513                .snapshots_tx
1514                .unbounded_send((
1515                    self.snapshot.clone(),
1516                    entry_changes.clone(),
1517                    repo_changes.clone(),
1518                ))
1519                .ok();
1520        }
1521
1522        if !entry_changes.is_empty() {
1523            cx.emit(Event::UpdatedEntries(entry_changes));
1524        }
1525        if !repo_changes.is_empty() {
1526            cx.emit(Event::UpdatedGitRepositories(repo_changes));
1527        }
1528    }
1529
1530    fn changed_repos(
1531        &self,
1532        old_snapshot: &LocalSnapshot,
1533        new_snapshot: &LocalSnapshot,
1534    ) -> UpdatedGitRepositoriesSet {
1535        let mut changes = Vec::new();
1536        let mut old_repos = old_snapshot.git_repositories.iter().peekable();
1537        let mut new_repos = new_snapshot.git_repositories.iter().peekable();
1538
1539        loop {
1540            match (new_repos.peek().map(clone), old_repos.peek().map(clone)) {
1541                (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => {
1542                    match Ord::cmp(&new_entry_id, &old_entry_id) {
1543                        Ordering::Less => {
1544                            if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
1545                                changes.push((
1546                                    entry.path.clone(),
1547                                    GitRepositoryChange {
1548                                        old_repository: None,
1549                                    },
1550                                ));
1551                            }
1552                            new_repos.next();
1553                        }
1554                        Ordering::Equal => {
1555                            if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id
1556                                || new_repo.status_scan_id != old_repo.status_scan_id
1557                            {
1558                                if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
1559                                    let old_repo = old_snapshot
1560                                        .repositories
1561                                        .get(&PathKey(entry.path.clone()), &())
1562                                        .cloned();
1563                                    changes.push((
1564                                        entry.path.clone(),
1565                                        GitRepositoryChange {
1566                                            old_repository: old_repo,
1567                                        },
1568                                    ));
1569                                }
1570                            }
1571                            new_repos.next();
1572                            old_repos.next();
1573                        }
1574                        Ordering::Greater => {
1575                            if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
1576                                let old_repo = old_snapshot
1577                                    .repositories
1578                                    .get(&PathKey(entry.path.clone()), &())
1579                                    .cloned();
1580                                changes.push((
1581                                    entry.path.clone(),
1582                                    GitRepositoryChange {
1583                                        old_repository: old_repo,
1584                                    },
1585                                ));
1586                            }
1587                            old_repos.next();
1588                        }
1589                    }
1590                }
1591                (Some((entry_id, _)), None) => {
1592                    if let Some(entry) = new_snapshot.entry_for_id(entry_id) {
1593                        changes.push((
1594                            entry.path.clone(),
1595                            GitRepositoryChange {
1596                                old_repository: None,
1597                            },
1598                        ));
1599                    }
1600                    new_repos.next();
1601                }
1602                (None, Some((entry_id, _))) => {
1603                    if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
1604                        let old_repo = old_snapshot
1605                            .repositories
1606                            .get(&PathKey(entry.path.clone()), &())
1607                            .cloned();
1608                        changes.push((
1609                            entry.path.clone(),
1610                            GitRepositoryChange {
1611                                old_repository: old_repo,
1612                            },
1613                        ));
1614                    }
1615                    old_repos.next();
1616                }
1617                (None, None) => break,
1618            }
1619        }
1620
1621        fn clone<T: Clone, U: Clone>(value: &(&T, &U)) -> (T, U) {
1622            (value.0.clone(), value.1.clone())
1623        }
1624
1625        changes.into()
1626    }
1627
1628    pub fn scan_complete(&self) -> impl Future<Output = ()> {
1629        let mut is_scanning_rx = self.is_scanning.1.clone();
1630        async move {
1631            let mut is_scanning = *is_scanning_rx.borrow();
1632            while is_scanning {
1633                if let Some(value) = is_scanning_rx.recv().await {
1634                    is_scanning = value;
1635                } else {
1636                    break;
1637                }
1638            }
1639        }
1640    }
1641
1642    pub fn snapshot(&self) -> LocalSnapshot {
1643        self.snapshot.clone()
1644    }
1645
1646    pub fn settings(&self) -> WorktreeSettings {
1647        self.settings.clone()
1648    }
1649
1650    pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
1651        self.git_repositories.get(&repo.work_directory_id)
1652    }
1653
1654    fn load_binary_file(
1655        &self,
1656        path: &Path,
1657        cx: &Context<Worktree>,
1658    ) -> Task<Result<LoadedBinaryFile>> {
1659        let path = Arc::from(path);
1660        let abs_path = self.absolutize(&path);
1661        let fs = self.fs.clone();
1662        let entry = self.refresh_entry(path.clone(), None, cx);
1663        let is_private = self.is_path_private(path.as_ref());
1664
1665        let worktree = cx.weak_entity();
1666        cx.background_executor().spawn(async move {
1667            let abs_path = abs_path?;
1668            let content = fs.load_bytes(&abs_path).await?;
1669
1670            let worktree = worktree
1671                .upgrade()
1672                .ok_or_else(|| anyhow!("worktree was dropped"))?;
1673            let file = match entry.await? {
1674                Some(entry) => File::for_entry(entry, worktree),
1675                None => {
1676                    let metadata = fs
1677                        .metadata(&abs_path)
1678                        .await
1679                        .with_context(|| {
1680                            format!("Loading metadata for excluded file {abs_path:?}")
1681                        })?
1682                        .with_context(|| {
1683                            format!("Excluded file {abs_path:?} got removed during loading")
1684                        })?;
1685                    Arc::new(File {
1686                        entry_id: None,
1687                        worktree,
1688                        path,
1689                        disk_state: DiskState::Present {
1690                            mtime: metadata.mtime,
1691                        },
1692                        is_local: true,
1693                        is_private,
1694                    })
1695                }
1696            };
1697
1698            Ok(LoadedBinaryFile { file, content })
1699        })
1700    }
1701
1702    fn load_file(&self, path: &Path, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
1703        let path = Arc::from(path);
1704        let abs_path = self.absolutize(&path);
1705        let fs = self.fs.clone();
1706        let entry = self.refresh_entry(path.clone(), None, cx);
1707        let is_private = self.is_path_private(path.as_ref());
1708
1709        cx.spawn(|this, _cx| async move {
1710            let abs_path = abs_path?;
1711            let text = fs.load(&abs_path).await?;
1712
1713            let worktree = this
1714                .upgrade()
1715                .ok_or_else(|| anyhow!("worktree was dropped"))?;
1716            let file = match entry.await? {
1717                Some(entry) => File::for_entry(entry, worktree),
1718                None => {
1719                    let metadata = fs
1720                        .metadata(&abs_path)
1721                        .await
1722                        .with_context(|| {
1723                            format!("Loading metadata for excluded file {abs_path:?}")
1724                        })?
1725                        .with_context(|| {
1726                            format!("Excluded file {abs_path:?} got removed during loading")
1727                        })?;
1728                    Arc::new(File {
1729                        entry_id: None,
1730                        worktree,
1731                        path,
1732                        disk_state: DiskState::Present {
1733                            mtime: metadata.mtime,
1734                        },
1735                        is_local: true,
1736                        is_private,
1737                    })
1738                }
1739            };
1740
1741            Ok(LoadedFile { file, text })
1742        })
1743    }
1744
1745    /// Find the lowest path in the worktree's datastructures that is an ancestor
1746    fn lowest_ancestor(&self, path: &Path) -> PathBuf {
1747        let mut lowest_ancestor = None;
1748        for path in path.ancestors() {
1749            if self.entry_for_path(path).is_some() {
1750                lowest_ancestor = Some(path.to_path_buf());
1751                break;
1752            }
1753        }
1754
1755        lowest_ancestor.unwrap_or_else(|| PathBuf::from(""))
1756    }
1757
1758    fn create_entry(
1759        &self,
1760        path: impl Into<Arc<Path>>,
1761        is_dir: bool,
1762        cx: &Context<Worktree>,
1763    ) -> Task<Result<CreatedEntry>> {
1764        let path = path.into();
1765        let abs_path = match self.absolutize(&path) {
1766            Ok(path) => path,
1767            Err(e) => return Task::ready(Err(e.context(format!("absolutizing path {path:?}")))),
1768        };
1769        let path_excluded = self.settings.is_path_excluded(&abs_path);
1770        let fs = self.fs.clone();
1771        let task_abs_path = abs_path.clone();
1772        let write = cx.background_executor().spawn(async move {
1773            if is_dir {
1774                fs.create_dir(&task_abs_path)
1775                    .await
1776                    .with_context(|| format!("creating directory {task_abs_path:?}"))
1777            } else {
1778                fs.save(&task_abs_path, &Rope::default(), LineEnding::default())
1779                    .await
1780                    .with_context(|| format!("creating file {task_abs_path:?}"))
1781            }
1782        });
1783
1784        let lowest_ancestor = self.lowest_ancestor(&path);
1785        cx.spawn(|this, mut cx| async move {
1786            write.await?;
1787            if path_excluded {
1788                return Ok(CreatedEntry::Excluded { abs_path });
1789            }
1790
1791            let (result, refreshes) = this.update(&mut cx, |this, cx| {
1792                let mut refreshes = Vec::new();
1793                let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
1794                for refresh_path in refresh_paths.ancestors() {
1795                    if refresh_path == Path::new("") {
1796                        continue;
1797                    }
1798                    let refresh_full_path = lowest_ancestor.join(refresh_path);
1799
1800                    refreshes.push(this.as_local_mut().unwrap().refresh_entry(
1801                        refresh_full_path.into(),
1802                        None,
1803                        cx,
1804                    ));
1805                }
1806                (
1807                    this.as_local_mut().unwrap().refresh_entry(path, None, cx),
1808                    refreshes,
1809                )
1810            })?;
1811            for refresh in refreshes {
1812                refresh.await.log_err();
1813            }
1814
1815            Ok(result
1816                .await?
1817                .map(CreatedEntry::Included)
1818                .unwrap_or_else(|| CreatedEntry::Excluded { abs_path }))
1819        })
1820    }
1821
1822    fn write_file(
1823        &self,
1824        path: impl Into<Arc<Path>>,
1825        text: Rope,
1826        line_ending: LineEnding,
1827        cx: &Context<Worktree>,
1828    ) -> Task<Result<Arc<File>>> {
1829        let path = path.into();
1830        let fs = self.fs.clone();
1831        let is_private = self.is_path_private(&path);
1832        let Ok(abs_path) = self.absolutize(&path) else {
1833            return Task::ready(Err(anyhow!("invalid path {path:?}")));
1834        };
1835
1836        let write = cx.background_executor().spawn({
1837            let fs = fs.clone();
1838            let abs_path = abs_path.clone();
1839            async move { fs.save(&abs_path, &text, line_ending).await }
1840        });
1841
1842        cx.spawn(move |this, mut cx| async move {
1843            write.await?;
1844            let entry = this
1845                .update(&mut cx, |this, cx| {
1846                    this.as_local_mut()
1847                        .unwrap()
1848                        .refresh_entry(path.clone(), None, cx)
1849                })?
1850                .await?;
1851            let worktree = this.upgrade().ok_or_else(|| anyhow!("worktree dropped"))?;
1852            if let Some(entry) = entry {
1853                Ok(File::for_entry(entry, worktree))
1854            } else {
1855                let metadata = fs
1856                    .metadata(&abs_path)
1857                    .await
1858                    .with_context(|| {
1859                        format!("Fetching metadata after saving the excluded buffer {abs_path:?}")
1860                    })?
1861                    .with_context(|| {
1862                        format!("Excluded buffer {path:?} got removed during saving")
1863                    })?;
1864                Ok(Arc::new(File {
1865                    worktree,
1866                    path,
1867                    disk_state: DiskState::Present {
1868                        mtime: metadata.mtime,
1869                    },
1870                    entry_id: None,
1871                    is_local: true,
1872                    is_private,
1873                }))
1874            }
1875        })
1876    }
1877
1878    fn delete_entry(
1879        &self,
1880        entry_id: ProjectEntryId,
1881        trash: bool,
1882        cx: &Context<Worktree>,
1883    ) -> Option<Task<Result<()>>> {
1884        let entry = self.entry_for_id(entry_id)?.clone();
1885        let abs_path = self.absolutize(&entry.path);
1886        let fs = self.fs.clone();
1887
1888        let delete = cx.background_executor().spawn(async move {
1889            if entry.is_file() {
1890                if trash {
1891                    fs.trash_file(&abs_path?, Default::default()).await?;
1892                } else {
1893                    fs.remove_file(&abs_path?, Default::default()).await?;
1894                }
1895            } else if trash {
1896                fs.trash_dir(
1897                    &abs_path?,
1898                    RemoveOptions {
1899                        recursive: true,
1900                        ignore_if_not_exists: false,
1901                    },
1902                )
1903                .await?;
1904            } else {
1905                fs.remove_dir(
1906                    &abs_path?,
1907                    RemoveOptions {
1908                        recursive: true,
1909                        ignore_if_not_exists: false,
1910                    },
1911                )
1912                .await?;
1913            }
1914            anyhow::Ok(entry.path)
1915        });
1916
1917        Some(cx.spawn(|this, mut cx| async move {
1918            let path = delete.await?;
1919            this.update(&mut cx, |this, _| {
1920                this.as_local_mut()
1921                    .unwrap()
1922                    .refresh_entries_for_paths(vec![path])
1923            })?
1924            .recv()
1925            .await;
1926            Ok(())
1927        }))
1928    }
1929
1930    /// Rename an entry.
1931    ///
1932    /// `new_path` is the new relative path to the worktree root.
1933    /// If the root entry is renamed then `new_path` is the new root name instead.
1934    fn rename_entry(
1935        &self,
1936        entry_id: ProjectEntryId,
1937        new_path: impl Into<Arc<Path>>,
1938        cx: &Context<Worktree>,
1939    ) -> Task<Result<CreatedEntry>> {
1940        let old_path = match self.entry_for_id(entry_id) {
1941            Some(entry) => entry.path.clone(),
1942            None => return Task::ready(Err(anyhow!("no entry to rename for id {entry_id:?}"))),
1943        };
1944        let new_path = new_path.into();
1945        let abs_old_path = self.absolutize(&old_path);
1946
1947        let is_root_entry = self.root_entry().is_some_and(|e| e.id == entry_id);
1948        let abs_new_path = if is_root_entry {
1949            let Some(root_parent_path) = self.abs_path().parent() else {
1950                return Task::ready(Err(anyhow!("no parent for path {:?}", self.abs_path)));
1951            };
1952            root_parent_path.join(&new_path)
1953        } else {
1954            let Ok(absolutize_path) = self.absolutize(&new_path) else {
1955                return Task::ready(Err(anyhow!("absolutizing path {new_path:?}")));
1956            };
1957            absolutize_path
1958        };
1959        let abs_path = abs_new_path.clone();
1960        let fs = self.fs.clone();
1961        let case_sensitive = self.fs_case_sensitive;
1962        let rename = cx.background_executor().spawn(async move {
1963            let abs_old_path = abs_old_path?;
1964            let abs_new_path = abs_new_path;
1965
1966            let abs_old_path_lower = abs_old_path.to_str().map(|p| p.to_lowercase());
1967            let abs_new_path_lower = abs_new_path.to_str().map(|p| p.to_lowercase());
1968
1969            // If we're on a case-insensitive FS and we're doing a case-only rename (i.e. `foobar` to `FOOBAR`)
1970            // we want to overwrite, because otherwise we run into a file-already-exists error.
1971            let overwrite = !case_sensitive
1972                && abs_old_path != abs_new_path
1973                && abs_old_path_lower == abs_new_path_lower;
1974
1975            fs.rename(
1976                &abs_old_path,
1977                &abs_new_path,
1978                fs::RenameOptions {
1979                    overwrite,
1980                    ..Default::default()
1981                },
1982            )
1983            .await
1984            .with_context(|| format!("Renaming {abs_old_path:?} into {abs_new_path:?}"))
1985        });
1986
1987        cx.spawn(|this, mut cx| async move {
1988            rename.await?;
1989            Ok(this
1990                .update(&mut cx, |this, cx| {
1991                    let local = this.as_local_mut().unwrap();
1992                    if is_root_entry {
1993                        // We eagerly update `abs_path` and refresh this worktree.
1994                        // Otherwise, the FS watcher would do it on the `RootUpdated` event,
1995                        // but with a noticeable delay, so we handle it proactively.
1996                        local.update_abs_path_and_refresh(
1997                            Some(SanitizedPath::from(abs_path.clone())),
1998                            cx,
1999                        );
2000                        Task::ready(Ok(this.root_entry().cloned()))
2001                    } else {
2002                        local.refresh_entry(new_path.clone(), Some(old_path), cx)
2003                    }
2004                })?
2005                .await?
2006                .map(CreatedEntry::Included)
2007                .unwrap_or_else(|| CreatedEntry::Excluded { abs_path }))
2008        })
2009    }
2010
2011    fn copy_entry(
2012        &self,
2013        entry_id: ProjectEntryId,
2014        relative_worktree_source_path: Option<PathBuf>,
2015        new_path: impl Into<Arc<Path>>,
2016        cx: &Context<Worktree>,
2017    ) -> Task<Result<Option<Entry>>> {
2018        let old_path = match self.entry_for_id(entry_id) {
2019            Some(entry) => entry.path.clone(),
2020            None => return Task::ready(Ok(None)),
2021        };
2022        let new_path = new_path.into();
2023        let abs_old_path =
2024            if let Some(relative_worktree_source_path) = relative_worktree_source_path {
2025                Ok(self.abs_path().join(relative_worktree_source_path))
2026            } else {
2027                self.absolutize(&old_path)
2028            };
2029        let abs_new_path = self.absolutize(&new_path);
2030        let fs = self.fs.clone();
2031        let copy = cx.background_executor().spawn(async move {
2032            copy_recursive(
2033                fs.as_ref(),
2034                &abs_old_path?,
2035                &abs_new_path?,
2036                Default::default(),
2037            )
2038            .await
2039        });
2040
2041        cx.spawn(|this, mut cx| async move {
2042            copy.await?;
2043            this.update(&mut cx, |this, cx| {
2044                this.as_local_mut()
2045                    .unwrap()
2046                    .refresh_entry(new_path.clone(), None, cx)
2047            })?
2048            .await
2049        })
2050    }
2051
2052    pub fn copy_external_entries(
2053        &self,
2054        target_directory: PathBuf,
2055        paths: Vec<Arc<Path>>,
2056        overwrite_existing_files: bool,
2057        cx: &Context<Worktree>,
2058    ) -> Task<Result<Vec<ProjectEntryId>>> {
2059        let worktree_path = self.abs_path().clone();
2060        let fs = self.fs.clone();
2061        let paths = paths
2062            .into_iter()
2063            .filter_map(|source| {
2064                let file_name = source.file_name()?;
2065                let mut target = target_directory.clone();
2066                target.push(file_name);
2067
2068                // Do not allow copying the same file to itself.
2069                if source.as_ref() != target.as_path() {
2070                    Some((source, target))
2071                } else {
2072                    None
2073                }
2074            })
2075            .collect::<Vec<_>>();
2076
2077        let paths_to_refresh = paths
2078            .iter()
2079            .filter_map(|(_, target)| Some(target.strip_prefix(&worktree_path).ok()?.into()))
2080            .collect::<Vec<_>>();
2081
2082        cx.spawn(|this, cx| async move {
2083            cx.background_executor()
2084                .spawn(async move {
2085                    for (source, target) in paths {
2086                        copy_recursive(
2087                            fs.as_ref(),
2088                            &source,
2089                            &target,
2090                            fs::CopyOptions {
2091                                overwrite: overwrite_existing_files,
2092                                ..Default::default()
2093                            },
2094                        )
2095                        .await
2096                        .with_context(|| {
2097                            anyhow!("Failed to copy file from {source:?} to {target:?}")
2098                        })?;
2099                    }
2100                    Ok::<(), anyhow::Error>(())
2101                })
2102                .await
2103                .log_err();
2104            let mut refresh = cx.read_entity(
2105                &this.upgrade().with_context(|| "Dropped worktree")?,
2106                |this, _| {
2107                    Ok::<postage::barrier::Receiver, anyhow::Error>(
2108                        this.as_local()
2109                            .with_context(|| "Worktree is not local")?
2110                            .refresh_entries_for_paths(paths_to_refresh.clone()),
2111                    )
2112                },
2113            )??;
2114
2115            cx.background_executor()
2116                .spawn(async move {
2117                    refresh.next().await;
2118                    Ok::<(), anyhow::Error>(())
2119                })
2120                .await
2121                .log_err();
2122
2123            let this = this.upgrade().with_context(|| "Dropped worktree")?;
2124            cx.read_entity(&this, |this, _| {
2125                paths_to_refresh
2126                    .iter()
2127                    .filter_map(|path| Some(this.entry_for_path(path)?.id))
2128                    .collect()
2129            })
2130        })
2131    }
2132
2133    fn expand_entry(
2134        &self,
2135        entry_id: ProjectEntryId,
2136        cx: &Context<Worktree>,
2137    ) -> Option<Task<Result<()>>> {
2138        let path = self.entry_for_id(entry_id)?.path.clone();
2139        let mut refresh = self.refresh_entries_for_paths(vec![path]);
2140        Some(cx.background_executor().spawn(async move {
2141            refresh.next().await;
2142            Ok(())
2143        }))
2144    }
2145
2146    fn expand_all_for_entry(
2147        &self,
2148        entry_id: ProjectEntryId,
2149        cx: &Context<Worktree>,
2150    ) -> Option<Task<Result<()>>> {
2151        let path = self.entry_for_id(entry_id).unwrap().path.clone();
2152        let mut rx = self.add_path_prefix_to_scan(path.clone());
2153        Some(cx.background_executor().spawn(async move {
2154            rx.next().await;
2155            Ok(())
2156        }))
2157    }
2158
2159    fn refresh_entries_for_paths(&self, paths: Vec<Arc<Path>>) -> barrier::Receiver {
2160        let (tx, rx) = barrier::channel();
2161        self.scan_requests_tx
2162            .try_send(ScanRequest {
2163                relative_paths: paths,
2164                done: smallvec![tx],
2165            })
2166            .ok();
2167        rx
2168    }
2169
2170    pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) -> barrier::Receiver {
2171        let (tx, rx) = barrier::channel();
2172        self.path_prefixes_to_scan_tx
2173            .try_send(PathPrefixScanRequest {
2174                path: path_prefix,
2175                done: smallvec![tx],
2176            })
2177            .ok();
2178        rx
2179    }
2180
2181    fn refresh_entry(
2182        &self,
2183        path: Arc<Path>,
2184        old_path: Option<Arc<Path>>,
2185        cx: &Context<Worktree>,
2186    ) -> Task<Result<Option<Entry>>> {
2187        if self.settings.is_path_excluded(&path) {
2188            return Task::ready(Ok(None));
2189        }
2190        let paths = if let Some(old_path) = old_path.as_ref() {
2191            vec![old_path.clone(), path.clone()]
2192        } else {
2193            vec![path.clone()]
2194        };
2195        let t0 = Instant::now();
2196        let mut refresh = self.refresh_entries_for_paths(paths);
2197        cx.spawn(move |this, mut cx| async move {
2198            refresh.recv().await;
2199            log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed());
2200            let new_entry = this.update(&mut cx, |this, _| {
2201                this.entry_for_path(path)
2202                    .cloned()
2203                    .ok_or_else(|| anyhow!("failed to read path after update"))
2204            })??;
2205            Ok(Some(new_entry))
2206        })
2207    }
2208
2209    fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
2210    where
2211        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
2212        Fut: Send + Future<Output = bool>,
2213    {
2214        if let Some(observer) = self.update_observer.as_mut() {
2215            *observer.resume_updates.borrow_mut() = ();
2216            return;
2217        }
2218
2219        let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
2220        let (snapshots_tx, mut snapshots_rx) =
2221            mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>();
2222        snapshots_tx
2223            .unbounded_send((self.snapshot(), Arc::default(), Arc::default()))
2224            .ok();
2225
2226        let worktree_id = cx.entity_id().as_u64();
2227        let _maintain_remote_snapshot = cx.background_executor().spawn(async move {
2228            let mut is_first = true;
2229            while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
2230                let update;
2231                if is_first {
2232                    update = snapshot.build_initial_update(project_id, worktree_id);
2233                    is_first = false;
2234                } else {
2235                    update =
2236                        snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes);
2237                }
2238
2239                for update in proto::split_worktree_update(update) {
2240                    let _ = resume_updates_rx.try_recv();
2241                    loop {
2242                        let result = callback(update.clone());
2243                        if result.await {
2244                            break;
2245                        } else {
2246                            log::info!("waiting to resume updates");
2247                            if resume_updates_rx.next().await.is_none() {
2248                                return Some(());
2249                            }
2250                        }
2251                    }
2252                }
2253            }
2254            Some(())
2255        });
2256
2257        self.update_observer = Some(UpdateObservationState {
2258            snapshots_tx,
2259            resume_updates: resume_updates_tx,
2260            _maintain_remote_snapshot,
2261        });
2262    }
2263
2264    pub fn share_private_files(&mut self, cx: &Context<Worktree>) {
2265        self.share_private_files = true;
2266        self.restart_background_scanners(cx);
2267    }
2268
2269    fn update_abs_path_and_refresh(
2270        &mut self,
2271        new_path: Option<SanitizedPath>,
2272        cx: &Context<Worktree>,
2273    ) {
2274        if let Some(new_path) = new_path {
2275            self.snapshot.git_repositories = Default::default();
2276            self.snapshot.ignores_by_parent_abs_path = Default::default();
2277            let root_name = new_path
2278                .as_path()
2279                .file_name()
2280                .map_or(String::new(), |f| f.to_string_lossy().to_string());
2281            self.snapshot.update_abs_path(new_path, root_name);
2282        }
2283        self.restart_background_scanners(cx);
2284    }
2285}
2286
2287impl RemoteWorktree {
2288    pub fn project_id(&self) -> u64 {
2289        self.project_id
2290    }
2291
2292    pub fn client(&self) -> AnyProtoClient {
2293        self.client.clone()
2294    }
2295
2296    pub fn disconnected_from_host(&mut self) {
2297        self.updates_tx.take();
2298        self.snapshot_subscriptions.clear();
2299        self.disconnected = true;
2300    }
2301
2302    pub fn update_from_remote(&self, update: proto::UpdateWorktree) {
2303        if let Some(updates_tx) = &self.updates_tx {
2304            updates_tx
2305                .unbounded_send(update)
2306                .expect("consumer runs to completion");
2307        }
2308    }
2309
2310    fn observe_updates<F, Fut>(&mut self, project_id: u64, cx: &Context<Worktree>, callback: F)
2311    where
2312        F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
2313        Fut: 'static + Send + Future<Output = bool>,
2314    {
2315        let (tx, mut rx) = mpsc::unbounded();
2316        let initial_update = self
2317            .snapshot
2318            .build_initial_update(project_id, self.id().to_proto());
2319        self.update_observer = Some(tx);
2320        cx.spawn(|this, mut cx| async move {
2321            let mut update = initial_update;
2322            'outer: loop {
2323                // SSH projects use a special project ID of 0, and we need to
2324                // remap it to the correct one here.
2325                update.project_id = project_id;
2326
2327                for chunk in split_worktree_update(update) {
2328                    if !callback(chunk).await {
2329                        break 'outer;
2330                    }
2331                }
2332
2333                if let Some(next_update) = rx.next().await {
2334                    update = next_update;
2335                } else {
2336                    break;
2337                }
2338            }
2339            this.update(&mut cx, |this, _| {
2340                let this = this.as_remote_mut().unwrap();
2341                this.update_observer.take();
2342            })
2343        })
2344        .detach();
2345    }
2346
2347    fn observed_snapshot(&self, scan_id: usize) -> bool {
2348        self.completed_scan_id >= scan_id
2349    }
2350
2351    pub fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
2352        let (tx, rx) = oneshot::channel();
2353        if self.observed_snapshot(scan_id) {
2354            let _ = tx.send(());
2355        } else if self.disconnected {
2356            drop(tx);
2357        } else {
2358            match self
2359                .snapshot_subscriptions
2360                .binary_search_by_key(&scan_id, |probe| probe.0)
2361            {
2362                Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
2363            }
2364        }
2365
2366        async move {
2367            rx.await?;
2368            Ok(())
2369        }
2370    }
2371
2372    fn insert_entry(
2373        &mut self,
2374        entry: proto::Entry,
2375        scan_id: usize,
2376        cx: &Context<Worktree>,
2377    ) -> Task<Result<Entry>> {
2378        let wait_for_snapshot = self.wait_for_snapshot(scan_id);
2379        cx.spawn(|this, mut cx| async move {
2380            wait_for_snapshot.await?;
2381            this.update(&mut cx, |worktree, _| {
2382                let worktree = worktree.as_remote_mut().unwrap();
2383                let snapshot = &mut worktree.background_snapshot.lock().0;
2384                let entry = snapshot.insert_entry(entry, &worktree.file_scan_inclusions);
2385                worktree.snapshot = snapshot.clone();
2386                entry
2387            })?
2388        })
2389    }
2390
2391    fn delete_entry(
2392        &self,
2393        entry_id: ProjectEntryId,
2394        trash: bool,
2395        cx: &Context<Worktree>,
2396    ) -> Option<Task<Result<()>>> {
2397        let response = self.client.request(proto::DeleteProjectEntry {
2398            project_id: self.project_id,
2399            entry_id: entry_id.to_proto(),
2400            use_trash: trash,
2401        });
2402        Some(cx.spawn(move |this, mut cx| async move {
2403            let response = response.await?;
2404            let scan_id = response.worktree_scan_id as usize;
2405
2406            this.update(&mut cx, move |this, _| {
2407                this.as_remote_mut().unwrap().wait_for_snapshot(scan_id)
2408            })?
2409            .await?;
2410
2411            this.update(&mut cx, |this, _| {
2412                let this = this.as_remote_mut().unwrap();
2413                let snapshot = &mut this.background_snapshot.lock().0;
2414                snapshot.delete_entry(entry_id);
2415                this.snapshot = snapshot.clone();
2416            })
2417        }))
2418    }
2419
2420    fn rename_entry(
2421        &self,
2422        entry_id: ProjectEntryId,
2423        new_path: impl Into<Arc<Path>>,
2424        cx: &Context<Worktree>,
2425    ) -> Task<Result<CreatedEntry>> {
2426        let new_path: Arc<Path> = new_path.into();
2427        let response = self.client.request(proto::RenameProjectEntry {
2428            project_id: self.project_id,
2429            entry_id: entry_id.to_proto(),
2430            new_path: new_path.as_ref().to_proto(),
2431        });
2432        cx.spawn(move |this, mut cx| async move {
2433            let response = response.await?;
2434            match response.entry {
2435                Some(entry) => this
2436                    .update(&mut cx, |this, cx| {
2437                        this.as_remote_mut().unwrap().insert_entry(
2438                            entry,
2439                            response.worktree_scan_id as usize,
2440                            cx,
2441                        )
2442                    })?
2443                    .await
2444                    .map(CreatedEntry::Included),
2445                None => {
2446                    let abs_path = this.update(&mut cx, |worktree, _| {
2447                        worktree
2448                            .absolutize(&new_path)
2449                            .with_context(|| format!("absolutizing {new_path:?}"))
2450                    })??;
2451                    Ok(CreatedEntry::Excluded { abs_path })
2452                }
2453            }
2454        })
2455    }
2456}
2457
2458impl Snapshot {
2459    pub fn new(id: u64, root_name: String, abs_path: Arc<Path>) -> Self {
2460        Snapshot {
2461            id: WorktreeId::from_usize(id as usize),
2462            abs_path: abs_path.into(),
2463            root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
2464            root_name,
2465            always_included_entries: Default::default(),
2466            entries_by_path: Default::default(),
2467            entries_by_id: Default::default(),
2468            repositories: Default::default(),
2469            scan_id: 1,
2470            completed_scan_id: 0,
2471        }
2472    }
2473
2474    pub fn id(&self) -> WorktreeId {
2475        self.id
2476    }
2477
2478    // TODO:
2479    // Consider the following:
2480    //
2481    // ```rust
2482    // let abs_path: Arc<Path> = snapshot.abs_path(); // e.g. "C:\Users\user\Desktop\project"
2483    // let some_non_trimmed_path = Path::new("\\\\?\\C:\\Users\\user\\Desktop\\project\\main.rs");
2484    // // The caller perform some actions here:
2485    // some_non_trimmed_path.strip_prefix(abs_path);  // This fails
2486    // some_non_trimmed_path.starts_with(abs_path);   // This fails too
2487    // ```
2488    //
2489    // This is definitely a bug, but it's not clear if we should handle it here or not.
2490    pub fn abs_path(&self) -> &Arc<Path> {
2491        self.abs_path.as_path()
2492    }
2493
2494    fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
2495        let mut updated_entries = self
2496            .entries_by_path
2497            .iter()
2498            .map(proto::Entry::from)
2499            .collect::<Vec<_>>();
2500        updated_entries.sort_unstable_by_key(|e| e.id);
2501
2502        let mut updated_repositories = self
2503            .repositories
2504            .iter()
2505            .map(|repository| repository.initial_update())
2506            .collect::<Vec<_>>();
2507        updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
2508
2509        proto::UpdateWorktree {
2510            project_id,
2511            worktree_id,
2512            abs_path: self.abs_path().to_proto(),
2513            root_name: self.root_name().to_string(),
2514            updated_entries,
2515            removed_entries: Vec::new(),
2516            scan_id: self.scan_id as u64,
2517            is_last_update: self.completed_scan_id == self.scan_id,
2518            updated_repositories,
2519            removed_repositories: Vec::new(),
2520        }
2521    }
2522
2523    pub fn absolutize(&self, path: &Path) -> Result<PathBuf> {
2524        if path
2525            .components()
2526            .any(|component| !matches!(component, std::path::Component::Normal(_)))
2527        {
2528            return Err(anyhow!("invalid path"));
2529        }
2530        if path.file_name().is_some() {
2531            Ok(self.abs_path.as_path().join(path))
2532        } else {
2533            Ok(self.abs_path.as_path().to_path_buf())
2534        }
2535    }
2536
2537    pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
2538        self.entries_by_id.get(&entry_id, &()).is_some()
2539    }
2540
2541    fn insert_entry(
2542        &mut self,
2543        entry: proto::Entry,
2544        always_included_paths: &PathMatcher,
2545    ) -> Result<Entry> {
2546        let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
2547        let old_entry = self.entries_by_id.insert_or_replace(
2548            PathEntry {
2549                id: entry.id,
2550                path: entry.path.clone(),
2551                is_ignored: entry.is_ignored,
2552                scan_id: 0,
2553            },
2554            &(),
2555        );
2556        if let Some(old_entry) = old_entry {
2557            self.entries_by_path.remove(&PathKey(old_entry.path), &());
2558        }
2559        self.entries_by_path.insert_or_replace(entry.clone(), &());
2560        Ok(entry)
2561    }
2562
2563    fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
2564        let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
2565        self.entries_by_path = {
2566            let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
2567            let mut new_entries_by_path =
2568                cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &());
2569            while let Some(entry) = cursor.item() {
2570                if entry.path.starts_with(&removed_entry.path) {
2571                    self.entries_by_id.remove(&entry.id, &());
2572                    cursor.next(&());
2573                } else {
2574                    break;
2575                }
2576            }
2577            new_entries_by_path.append(cursor.suffix(&()), &());
2578            new_entries_by_path
2579        };
2580
2581        Some(removed_entry.path)
2582    }
2583
2584    pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> {
2585        let path = path.as_ref();
2586        self.repository_for_path(path).and_then(|repo| {
2587            let repo_path = repo.relativize(path).unwrap();
2588            repo.statuses_by_path
2589                .get(&PathKey(repo_path.0), &())
2590                .map(|entry| entry.status)
2591        })
2592    }
2593
2594    fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) {
2595        self.abs_path = abs_path;
2596        if root_name != self.root_name {
2597            self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
2598            self.root_name = root_name;
2599        }
2600    }
2601
2602    pub(crate) fn apply_remote_update(
2603        &mut self,
2604        mut update: proto::UpdateWorktree,
2605        always_included_paths: &PathMatcher,
2606    ) -> Result<()> {
2607        log::trace!(
2608            "applying remote worktree update. {} entries updated, {} removed",
2609            update.updated_entries.len(),
2610            update.removed_entries.len()
2611        );
2612        self.update_abs_path(
2613            SanitizedPath::from(PathBuf::from_proto(update.abs_path)),
2614            update.root_name,
2615        );
2616
2617        let mut entries_by_path_edits = Vec::new();
2618        let mut entries_by_id_edits = Vec::new();
2619
2620        for entry_id in update.removed_entries {
2621            let entry_id = ProjectEntryId::from_proto(entry_id);
2622            entries_by_id_edits.push(Edit::Remove(entry_id));
2623            if let Some(entry) = self.entry_for_id(entry_id) {
2624                entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
2625            }
2626        }
2627
2628        for entry in update.updated_entries {
2629            let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
2630            if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
2631                entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
2632            }
2633            if let Some(old_entry) = self.entries_by_path.get(&PathKey(entry.path.clone()), &()) {
2634                if old_entry.id != entry.id {
2635                    entries_by_id_edits.push(Edit::Remove(old_entry.id));
2636                }
2637            }
2638            entries_by_id_edits.push(Edit::Insert(PathEntry {
2639                id: entry.id,
2640                path: entry.path.clone(),
2641                is_ignored: entry.is_ignored,
2642                scan_id: 0,
2643            }));
2644            entries_by_path_edits.push(Edit::Insert(entry));
2645        }
2646
2647        self.entries_by_path.edit(entries_by_path_edits, &());
2648        self.entries_by_id.edit(entries_by_id_edits, &());
2649
2650        update.removed_repositories.sort_unstable();
2651        self.repositories.retain(&(), |entry: &RepositoryEntry| {
2652            update
2653                .removed_repositories
2654                .binary_search(&entry.work_directory_id.to_proto())
2655                .is_err()
2656        });
2657
2658        for repository in update.updated_repositories {
2659            let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id);
2660            if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) {
2661                let conflicted_paths = TreeSet::from_ordered_entries(
2662                    repository
2663                        .current_merge_conflicts
2664                        .into_iter()
2665                        .map(|path| RepoPath(Path::new(&path).into())),
2666                );
2667
2668                if self
2669                    .repositories
2670                    .contains(&PathKey(work_dir_entry.path.clone()), &())
2671                {
2672                    let edits = repository
2673                        .removed_statuses
2674                        .into_iter()
2675                        .map(|path| Edit::Remove(PathKey(FromProto::from_proto(path))))
2676                        .chain(repository.updated_statuses.into_iter().filter_map(
2677                            |updated_status| {
2678                                Some(Edit::Insert(updated_status.try_into().log_err()?))
2679                            },
2680                        ))
2681                        .collect::<Vec<_>>();
2682
2683                    self.repositories
2684                        .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
2685                            repo.branch = repository.branch_summary.as_ref().map(proto_to_branch);
2686                            repo.statuses_by_path.edit(edits, &());
2687                            repo.current_merge_conflicts = conflicted_paths
2688                        });
2689                } else {
2690                    let statuses = SumTree::from_iter(
2691                        repository
2692                            .updated_statuses
2693                            .into_iter()
2694                            .filter_map(|updated_status| updated_status.try_into().log_err()),
2695                        &(),
2696                    );
2697
2698                    self.repositories.insert_or_replace(
2699                        RepositoryEntry {
2700                            work_directory_id,
2701                            // When syncing repository entries from a peer, we don't need
2702                            // the location_in_repo field, since git operations don't happen locally
2703                            // anyway.
2704                            work_directory: WorkDirectory::InProject {
2705                                relative_path: work_dir_entry.path.clone(),
2706                            },
2707                            branch: repository.branch_summary.as_ref().map(proto_to_branch),
2708                            statuses_by_path: statuses,
2709                            current_merge_conflicts: conflicted_paths,
2710                        },
2711                        &(),
2712                    );
2713                }
2714            } else {
2715                log::error!(
2716                    "no work directory entry for repository {:?}",
2717                    repository.work_directory_id
2718                )
2719            }
2720        }
2721
2722        self.scan_id = update.scan_id as usize;
2723        if update.is_last_update {
2724            self.completed_scan_id = update.scan_id as usize;
2725        }
2726
2727        Ok(())
2728    }
2729
2730    pub fn entry_count(&self) -> usize {
2731        self.entries_by_path.summary().count
2732    }
2733
2734    pub fn visible_entry_count(&self) -> usize {
2735        self.entries_by_path.summary().non_ignored_count
2736    }
2737
2738    pub fn dir_count(&self) -> usize {
2739        let summary = self.entries_by_path.summary();
2740        summary.count - summary.file_count
2741    }
2742
2743    pub fn visible_dir_count(&self) -> usize {
2744        let summary = self.entries_by_path.summary();
2745        summary.non_ignored_count - summary.non_ignored_file_count
2746    }
2747
2748    pub fn file_count(&self) -> usize {
2749        self.entries_by_path.summary().file_count
2750    }
2751
2752    pub fn visible_file_count(&self) -> usize {
2753        self.entries_by_path.summary().non_ignored_file_count
2754    }
2755
2756    fn traverse_from_offset(
2757        &self,
2758        include_files: bool,
2759        include_dirs: bool,
2760        include_ignored: bool,
2761        start_offset: usize,
2762    ) -> Traversal {
2763        let mut cursor = self.entries_by_path.cursor(&());
2764        cursor.seek(
2765            &TraversalTarget::Count {
2766                count: start_offset,
2767                include_files,
2768                include_dirs,
2769                include_ignored,
2770            },
2771            Bias::Right,
2772            &(),
2773        );
2774        Traversal {
2775            snapshot: self,
2776            cursor,
2777            include_files,
2778            include_dirs,
2779            include_ignored,
2780        }
2781    }
2782
2783    pub fn traverse_from_path(
2784        &self,
2785        include_files: bool,
2786        include_dirs: bool,
2787        include_ignored: bool,
2788        path: &Path,
2789    ) -> Traversal {
2790        Traversal::new(self, include_files, include_dirs, include_ignored, path)
2791    }
2792
2793    pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
2794        self.traverse_from_offset(true, false, include_ignored, start)
2795    }
2796
2797    pub fn directories(&self, include_ignored: bool, start: usize) -> Traversal {
2798        self.traverse_from_offset(false, true, include_ignored, start)
2799    }
2800
2801    pub fn entries(&self, include_ignored: bool, start: usize) -> Traversal {
2802        self.traverse_from_offset(true, true, include_ignored, start)
2803    }
2804
2805    #[cfg(any(feature = "test-support", test))]
2806    pub fn git_status(&self, work_dir: &Path) -> Option<Vec<StatusEntry>> {
2807        self.repositories
2808            .get(&PathKey(work_dir.into()), &())
2809            .map(|repo| repo.status().collect())
2810    }
2811
2812    pub fn repositories(&self) -> &SumTree<RepositoryEntry> {
2813        &self.repositories
2814    }
2815
2816    /// Get the repository whose work directory corresponds to the given path.
2817    pub(crate) fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> {
2818        self.repositories.get(&work_directory, &()).cloned()
2819    }
2820
2821    /// Get the repository whose work directory contains the given path.
2822    #[track_caller]
2823    pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> {
2824        self.repositories
2825            .iter()
2826            .filter(|repo| repo.work_directory.directory_contains(path))
2827            .last()
2828    }
2829
2830    /// Given an ordered iterator of entries, returns an iterator of those entries,
2831    /// along with their containing git repository.
2832    #[track_caller]
2833    pub fn entries_with_repositories<'a>(
2834        &'a self,
2835        entries: impl 'a + Iterator<Item = &'a Entry>,
2836    ) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
2837        let mut containing_repos = Vec::<&RepositoryEntry>::new();
2838        let mut repositories = self.repositories().iter().peekable();
2839        entries.map(move |entry| {
2840            while let Some(repository) = containing_repos.last() {
2841                if repository.directory_contains(&entry.path) {
2842                    break;
2843                } else {
2844                    containing_repos.pop();
2845                }
2846            }
2847            while let Some(repository) = repositories.peek() {
2848                if repository.directory_contains(&entry.path) {
2849                    containing_repos.push(repositories.next().unwrap());
2850                } else {
2851                    break;
2852                }
2853            }
2854            let repo = containing_repos.last().copied();
2855            (entry, repo)
2856        })
2857    }
2858
2859    pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
2860        let empty_path = Path::new("");
2861        self.entries_by_path
2862            .cursor::<()>(&())
2863            .filter(move |entry| entry.path.as_ref() != empty_path)
2864            .map(|entry| &entry.path)
2865    }
2866
2867    pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
2868        let options = ChildEntriesOptions {
2869            include_files: true,
2870            include_dirs: true,
2871            include_ignored: true,
2872        };
2873        self.child_entries_with_options(parent_path, options)
2874    }
2875
2876    pub fn child_entries_with_options<'a>(
2877        &'a self,
2878        parent_path: &'a Path,
2879        options: ChildEntriesOptions,
2880    ) -> ChildEntriesIter<'a> {
2881        let mut cursor = self.entries_by_path.cursor(&());
2882        cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &());
2883        let traversal = Traversal {
2884            snapshot: self,
2885            cursor,
2886            include_files: options.include_files,
2887            include_dirs: options.include_dirs,
2888            include_ignored: options.include_ignored,
2889        };
2890        ChildEntriesIter {
2891            traversal,
2892            parent_path,
2893        }
2894    }
2895
2896    pub fn root_entry(&self) -> Option<&Entry> {
2897        self.entry_for_path("")
2898    }
2899
2900    /// TODO: what's the difference between `root_dir` and `abs_path`?
2901    /// is there any? if so, document it.
2902    pub fn root_dir(&self) -> Option<Arc<Path>> {
2903        self.root_entry()
2904            .filter(|entry| entry.is_dir())
2905            .map(|_| self.abs_path().clone())
2906    }
2907
2908    pub fn root_name(&self) -> &str {
2909        &self.root_name
2910    }
2911
2912    pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
2913        self.repositories
2914            .get(&PathKey(Path::new("").into()), &())
2915            .map(|entry| entry.to_owned())
2916    }
2917
2918    pub fn git_entry(&self, work_directory_path: Arc<Path>) -> Option<RepositoryEntry> {
2919        self.repositories
2920            .get(&PathKey(work_directory_path), &())
2921            .map(|entry| entry.to_owned())
2922    }
2923
2924    pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
2925        self.repositories.iter()
2926    }
2927
2928    pub fn scan_id(&self) -> usize {
2929        self.scan_id
2930    }
2931
2932    pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
2933        let path = path.as_ref();
2934        debug_assert!(path.is_relative());
2935        self.traverse_from_path(true, true, true, path)
2936            .entry()
2937            .and_then(|entry| {
2938                if entry.path.as_ref() == path {
2939                    Some(entry)
2940                } else {
2941                    None
2942                }
2943            })
2944    }
2945
2946    pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
2947        let entry = self.entries_by_id.get(&id, &())?;
2948        self.entry_for_path(&entry.path)
2949    }
2950
2951    pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
2952        self.entry_for_path(path.as_ref()).map(|e| e.inode)
2953    }
2954}
2955
2956impl LocalSnapshot {
2957    pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> {
2958        let repository_entry = self.repository_for_path(path)?;
2959        let work_directory_id = repository_entry.work_directory_id();
2960        self.git_repositories.get(&work_directory_id)
2961    }
2962
2963    fn build_update(
2964        &self,
2965        project_id: u64,
2966        worktree_id: u64,
2967        entry_changes: UpdatedEntriesSet,
2968        repo_changes: UpdatedGitRepositoriesSet,
2969    ) -> proto::UpdateWorktree {
2970        let mut updated_entries = Vec::new();
2971        let mut removed_entries = Vec::new();
2972        let mut updated_repositories = Vec::new();
2973        let mut removed_repositories = Vec::new();
2974
2975        for (_, entry_id, path_change) in entry_changes.iter() {
2976            if let PathChange::Removed = path_change {
2977                removed_entries.push(entry_id.0 as u64);
2978            } else if let Some(entry) = self.entry_for_id(*entry_id) {
2979                updated_entries.push(proto::Entry::from(entry));
2980            }
2981        }
2982
2983        for (work_dir_path, change) in repo_changes.iter() {
2984            let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &());
2985            match (&change.old_repository, new_repo) {
2986                (Some(old_repo), Some(new_repo)) => {
2987                    updated_repositories.push(new_repo.build_update(old_repo));
2988                }
2989                (None, Some(new_repo)) => {
2990                    updated_repositories.push(new_repo.initial_update());
2991                }
2992                (Some(old_repo), None) => {
2993                    removed_repositories.push(old_repo.work_directory_id.to_proto());
2994                }
2995                _ => {}
2996            }
2997        }
2998
2999        removed_entries.sort_unstable();
3000        updated_entries.sort_unstable_by_key(|e| e.id);
3001        removed_repositories.sort_unstable();
3002        updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
3003
3004        // TODO - optimize, knowing that removed_entries are sorted.
3005        removed_entries.retain(|id| updated_entries.binary_search_by_key(id, |e| e.id).is_err());
3006
3007        proto::UpdateWorktree {
3008            project_id,
3009            worktree_id,
3010            abs_path: self.abs_path().to_proto(),
3011            root_name: self.root_name().to_string(),
3012            updated_entries,
3013            removed_entries,
3014            scan_id: self.scan_id as u64,
3015            is_last_update: self.completed_scan_id == self.scan_id,
3016            updated_repositories,
3017            removed_repositories,
3018        }
3019    }
3020
3021    fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
3022        if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
3023            let abs_path = self.abs_path.as_path().join(&entry.path);
3024            match smol::block_on(build_gitignore(&abs_path, fs)) {
3025                Ok(ignore) => {
3026                    self.ignores_by_parent_abs_path
3027                        .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true));
3028                }
3029                Err(error) => {
3030                    log::error!(
3031                        "error loading .gitignore file {:?} - {:?}",
3032                        &entry.path,
3033                        error
3034                    );
3035                }
3036            }
3037        }
3038
3039        if entry.kind == EntryKind::PendingDir {
3040            if let Some(existing_entry) =
3041                self.entries_by_path.get(&PathKey(entry.path.clone()), &())
3042            {
3043                entry.kind = existing_entry.kind;
3044            }
3045        }
3046
3047        let scan_id = self.scan_id;
3048        let removed = self.entries_by_path.insert_or_replace(entry.clone(), &());
3049        if let Some(removed) = removed {
3050            if removed.id != entry.id {
3051                self.entries_by_id.remove(&removed.id, &());
3052            }
3053        }
3054        self.entries_by_id.insert_or_replace(
3055            PathEntry {
3056                id: entry.id,
3057                path: entry.path.clone(),
3058                is_ignored: entry.is_ignored,
3059                scan_id,
3060            },
3061            &(),
3062        );
3063
3064        entry
3065    }
3066
3067    fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
3068        let mut inodes = TreeSet::default();
3069        for ancestor in path.ancestors().skip(1) {
3070            if let Some(entry) = self.entry_for_path(ancestor) {
3071                inodes.insert(entry.inode);
3072            }
3073        }
3074        inodes
3075    }
3076
3077    fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
3078        let mut new_ignores = Vec::new();
3079        for (index, ancestor) in abs_path.ancestors().enumerate() {
3080            if index > 0 {
3081                if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
3082                    new_ignores.push((ancestor, Some(ignore.clone())));
3083                } else {
3084                    new_ignores.push((ancestor, None));
3085                }
3086            }
3087            if ancestor.join(*DOT_GIT).exists() {
3088                break;
3089            }
3090        }
3091
3092        let mut ignore_stack = IgnoreStack::none();
3093        for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
3094            if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
3095                ignore_stack = IgnoreStack::all();
3096                break;
3097            } else if let Some(ignore) = ignore {
3098                ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
3099            }
3100        }
3101
3102        if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
3103            ignore_stack = IgnoreStack::all();
3104        }
3105
3106        ignore_stack
3107    }
3108
3109    #[cfg(test)]
3110    pub(crate) fn expanded_entries(&self) -> impl Iterator<Item = &Entry> {
3111        self.entries_by_path
3112            .cursor::<()>(&())
3113            .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored))
3114    }
3115
3116    #[cfg(test)]
3117    pub fn check_invariants(&self, git_state: bool) {
3118        use pretty_assertions::assert_eq;
3119
3120        assert_eq!(
3121            self.entries_by_path
3122                .cursor::<()>(&())
3123                .map(|e| (&e.path, e.id))
3124                .collect::<Vec<_>>(),
3125            self.entries_by_id
3126                .cursor::<()>(&())
3127                .map(|e| (&e.path, e.id))
3128                .collect::<collections::BTreeSet<_>>()
3129                .into_iter()
3130                .collect::<Vec<_>>(),
3131            "entries_by_path and entries_by_id are inconsistent"
3132        );
3133
3134        let mut files = self.files(true, 0);
3135        let mut visible_files = self.files(false, 0);
3136        for entry in self.entries_by_path.cursor::<()>(&()) {
3137            if entry.is_file() {
3138                assert_eq!(files.next().unwrap().inode, entry.inode);
3139                if (!entry.is_ignored && !entry.is_external) || entry.is_always_included {
3140                    assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3141                }
3142            }
3143        }
3144
3145        assert!(files.next().is_none());
3146        assert!(visible_files.next().is_none());
3147
3148        let mut bfs_paths = Vec::new();
3149        let mut stack = self
3150            .root_entry()
3151            .map(|e| e.path.as_ref())
3152            .into_iter()
3153            .collect::<Vec<_>>();
3154        while let Some(path) = stack.pop() {
3155            bfs_paths.push(path);
3156            let ix = stack.len();
3157            for child_entry in self.child_entries(path) {
3158                stack.insert(ix, &child_entry.path);
3159            }
3160        }
3161
3162        let dfs_paths_via_iter = self
3163            .entries_by_path
3164            .cursor::<()>(&())
3165            .map(|e| e.path.as_ref())
3166            .collect::<Vec<_>>();
3167        assert_eq!(bfs_paths, dfs_paths_via_iter);
3168
3169        let dfs_paths_via_traversal = self
3170            .entries(true, 0)
3171            .map(|e| e.path.as_ref())
3172            .collect::<Vec<_>>();
3173        assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3174
3175        if git_state {
3176            for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3177                let ignore_parent_path = ignore_parent_abs_path
3178                    .strip_prefix(self.abs_path.as_path())
3179                    .unwrap();
3180                assert!(self.entry_for_path(ignore_parent_path).is_some());
3181                assert!(self
3182                    .entry_for_path(ignore_parent_path.join(*GITIGNORE))
3183                    .is_some());
3184            }
3185        }
3186    }
3187
3188    #[cfg(test)]
3189    fn check_git_invariants(&self) {
3190        let dotgit_paths = self
3191            .git_repositories
3192            .iter()
3193            .map(|repo| repo.1.dot_git_dir_abs_path.clone())
3194            .collect::<HashSet<_>>();
3195        let work_dir_paths = self
3196            .repositories
3197            .iter()
3198            .map(|repo| repo.work_directory.path_key())
3199            .collect::<HashSet<_>>();
3200        assert_eq!(dotgit_paths.len(), work_dir_paths.len());
3201        assert_eq!(self.repositories.iter().count(), work_dir_paths.len());
3202        assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len());
3203        for entry in self.repositories.iter() {
3204            self.git_repositories.get(&entry.work_directory_id).unwrap();
3205        }
3206    }
3207
3208    #[cfg(test)]
3209    pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3210        let mut paths = Vec::new();
3211        for entry in self.entries_by_path.cursor::<()>(&()) {
3212            if include_ignored || !entry.is_ignored {
3213                paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3214            }
3215        }
3216        paths.sort_by(|a, b| a.0.cmp(b.0));
3217        paths
3218    }
3219}
3220
3221impl BackgroundScannerState {
3222    fn should_scan_directory(&self, entry: &Entry) -> bool {
3223        (!entry.is_external && (!entry.is_ignored || entry.is_always_included))
3224            || entry.path.file_name() == Some(*DOT_GIT)
3225            || entry.path.file_name() == Some(local_settings_folder_relative_path().as_os_str())
3226            || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
3227            || self
3228                .paths_to_scan
3229                .iter()
3230                .any(|p| p.starts_with(&entry.path))
3231            || self
3232                .path_prefixes_to_scan
3233                .iter()
3234                .any(|p| entry.path.starts_with(p))
3235    }
3236
3237    fn enqueue_scan_dir(&self, abs_path: Arc<Path>, entry: &Entry, scan_job_tx: &Sender<ScanJob>) {
3238        let path = entry.path.clone();
3239        let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
3240        let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
3241
3242        if !ancestor_inodes.contains(&entry.inode) {
3243            ancestor_inodes.insert(entry.inode);
3244            scan_job_tx
3245                .try_send(ScanJob {
3246                    abs_path,
3247                    path,
3248                    ignore_stack,
3249                    scan_queue: scan_job_tx.clone(),
3250                    ancestor_inodes,
3251                    is_external: entry.is_external,
3252                })
3253                .unwrap();
3254        }
3255    }
3256
3257    fn reuse_entry_id(&mut self, entry: &mut Entry) {
3258        if let Some(mtime) = entry.mtime {
3259            // If an entry with the same inode was removed from the worktree during this scan,
3260            // then it *might* represent the same file or directory. But the OS might also have
3261            // re-used the inode for a completely different file or directory.
3262            //
3263            // Conditionally reuse the old entry's id:
3264            // * if the mtime is the same, the file was probably been renamed.
3265            // * if the path is the same, the file may just have been updated
3266            if let Some(removed_entry) = self.removed_entries.remove(&entry.inode) {
3267                if removed_entry.mtime == Some(mtime) || removed_entry.path == entry.path {
3268                    entry.id = removed_entry.id;
3269                }
3270            } else if let Some(existing_entry) = self.snapshot.entry_for_path(&entry.path) {
3271                entry.id = existing_entry.id;
3272            }
3273        }
3274    }
3275
3276    fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry {
3277        self.reuse_entry_id(&mut entry);
3278        let entry = self.snapshot.insert_entry(entry, fs);
3279        if entry.path.file_name() == Some(&DOT_GIT) {
3280            self.insert_git_repository(entry.path.clone(), fs, watcher);
3281        }
3282
3283        #[cfg(test)]
3284        self.snapshot.check_invariants(false);
3285
3286        entry
3287    }
3288
3289    fn populate_dir(
3290        &mut self,
3291        parent_path: &Arc<Path>,
3292        entries: impl IntoIterator<Item = Entry>,
3293        ignore: Option<Arc<Gitignore>>,
3294    ) {
3295        let mut parent_entry = if let Some(parent_entry) = self
3296            .snapshot
3297            .entries_by_path
3298            .get(&PathKey(parent_path.clone()), &())
3299        {
3300            parent_entry.clone()
3301        } else {
3302            log::warn!(
3303                "populating a directory {:?} that has been removed",
3304                parent_path
3305            );
3306            return;
3307        };
3308
3309        match parent_entry.kind {
3310            EntryKind::PendingDir | EntryKind::UnloadedDir => parent_entry.kind = EntryKind::Dir,
3311            EntryKind::Dir => {}
3312            _ => return,
3313        }
3314
3315        if let Some(ignore) = ignore {
3316            let abs_parent_path = self.snapshot.abs_path.as_path().join(parent_path).into();
3317            self.snapshot
3318                .ignores_by_parent_abs_path
3319                .insert(abs_parent_path, (ignore, false));
3320        }
3321
3322        let parent_entry_id = parent_entry.id;
3323        self.scanned_dirs.insert(parent_entry_id);
3324        let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
3325        let mut entries_by_id_edits = Vec::new();
3326
3327        for entry in entries {
3328            entries_by_id_edits.push(Edit::Insert(PathEntry {
3329                id: entry.id,
3330                path: entry.path.clone(),
3331                is_ignored: entry.is_ignored,
3332                scan_id: self.snapshot.scan_id,
3333            }));
3334            entries_by_path_edits.push(Edit::Insert(entry));
3335        }
3336
3337        self.snapshot
3338            .entries_by_path
3339            .edit(entries_by_path_edits, &());
3340        self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
3341
3342        if let Err(ix) = self.changed_paths.binary_search(parent_path) {
3343            self.changed_paths.insert(ix, parent_path.clone());
3344        }
3345
3346        #[cfg(test)]
3347        self.snapshot.check_invariants(false);
3348    }
3349
3350    fn remove_path(&mut self, path: &Path) {
3351        let mut new_entries;
3352        let removed_entries;
3353        {
3354            let mut cursor = self
3355                .snapshot
3356                .entries_by_path
3357                .cursor::<TraversalProgress>(&());
3358            new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &());
3359            removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &());
3360            new_entries.append(cursor.suffix(&()), &());
3361        }
3362        self.snapshot.entries_by_path = new_entries;
3363
3364        let mut removed_ids = Vec::with_capacity(removed_entries.summary().count);
3365        for entry in removed_entries.cursor::<()>(&()) {
3366            match self.removed_entries.entry(entry.inode) {
3367                hash_map::Entry::Occupied(mut e) => {
3368                    let prev_removed_entry = e.get_mut();
3369                    if entry.id > prev_removed_entry.id {
3370                        *prev_removed_entry = entry.clone();
3371                    }
3372                }
3373                hash_map::Entry::Vacant(e) => {
3374                    e.insert(entry.clone());
3375                }
3376            }
3377
3378            if entry.path.file_name() == Some(&GITIGNORE) {
3379                let abs_parent_path = self
3380                    .snapshot
3381                    .abs_path
3382                    .as_path()
3383                    .join(entry.path.parent().unwrap());
3384                if let Some((_, needs_update)) = self
3385                    .snapshot
3386                    .ignores_by_parent_abs_path
3387                    .get_mut(abs_parent_path.as_path())
3388                {
3389                    *needs_update = true;
3390                }
3391            }
3392
3393            if let Err(ix) = removed_ids.binary_search(&entry.id) {
3394                removed_ids.insert(ix, entry.id);
3395            }
3396        }
3397
3398        self.snapshot.entries_by_id.edit(
3399            removed_ids.iter().map(|&id| Edit::Remove(id)).collect(),
3400            &(),
3401        );
3402        self.snapshot
3403            .git_repositories
3404            .retain(|id, _| removed_ids.binary_search(id).is_err());
3405        self.snapshot.repositories.retain(&(), |repository| {
3406            !repository.work_directory.path_key().0.starts_with(path)
3407        });
3408
3409        #[cfg(test)]
3410        self.snapshot.check_invariants(false);
3411    }
3412
3413    fn insert_git_repository(
3414        &mut self,
3415        dot_git_path: Arc<Path>,
3416        fs: &dyn Fs,
3417        watcher: &dyn Watcher,
3418    ) -> Option<LocalRepositoryEntry> {
3419        let work_dir_path: Arc<Path> = match dot_git_path.parent() {
3420            Some(parent_dir) => {
3421                // Guard against repositories inside the repository metadata
3422                if parent_dir.iter().any(|component| component == *DOT_GIT) {
3423                    log::info!(
3424                        "not building git repository for nested `.git` directory, `.git` path in the worktree: {dot_git_path:?}"
3425                    );
3426                    return None;
3427                };
3428                log::info!(
3429                    "building git repository, `.git` path in the worktree: {dot_git_path:?}"
3430                );
3431
3432                parent_dir.into()
3433            }
3434            None => {
3435                // `dot_git_path.parent().is_none()` means `.git` directory is the opened worktree itself,
3436                // no files inside that directory are tracked by git, so no need to build the repo around it
3437                log::info!(
3438                    "not building git repository for the worktree itself, `.git` path in the worktree: {dot_git_path:?}"
3439                );
3440                return None;
3441            }
3442        };
3443
3444        self.insert_git_repository_for_path(
3445            WorkDirectory::InProject {
3446                relative_path: work_dir_path,
3447            },
3448            dot_git_path,
3449            fs,
3450            watcher,
3451        )
3452    }
3453
3454    fn insert_git_repository_for_path(
3455        &mut self,
3456        work_directory: WorkDirectory,
3457        dot_git_path: Arc<Path>,
3458        fs: &dyn Fs,
3459        watcher: &dyn Watcher,
3460    ) -> Option<LocalRepositoryEntry> {
3461        let work_dir_id = self
3462            .snapshot
3463            .entry_for_path(work_directory.path_key().0)
3464            .map(|entry| entry.id)?;
3465
3466        if self.snapshot.git_repositories.get(&work_dir_id).is_some() {
3467            return None;
3468        }
3469
3470        let dot_git_abs_path = self.snapshot.abs_path.as_path().join(&dot_git_path);
3471
3472        let t0 = Instant::now();
3473        let repository = fs.open_repo(&dot_git_abs_path)?;
3474
3475        let repository_path = repository.path();
3476        watcher.add(&repository_path).log_err()?;
3477
3478        let actual_dot_git_dir_abs_path = repository.main_repository_path();
3479        let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path == dot_git_abs_path {
3480            None
3481        } else {
3482            // The two paths could be different because we opened a git worktree.
3483            // When that happens:
3484            //
3485            // * `dot_git_abs_path` is a file that points to the worktree-subdirectory in the actual
3486            // .git directory.
3487            //
3488            // * `repository_path` is the worktree-subdirectory.
3489            //
3490            // * `actual_dot_git_dir_abs_path` is the path to the actual .git directory. In git
3491            // documentation this is called the "commondir".
3492            watcher.add(&dot_git_abs_path).log_err()?;
3493            Some(Arc::from(dot_git_abs_path))
3494        };
3495
3496        log::trace!("constructed libgit2 repo in {:?}", t0.elapsed());
3497
3498        if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() {
3499            git_hosting_providers::register_additional_providers(
3500                git_hosting_provider_registry,
3501                repository.clone(),
3502            );
3503        }
3504
3505        self.snapshot.repositories.insert_or_replace(
3506            RepositoryEntry {
3507                work_directory_id: work_dir_id,
3508                work_directory: work_directory.clone(),
3509                branch: None,
3510                statuses_by_path: Default::default(),
3511                current_merge_conflicts: Default::default(),
3512            },
3513            &(),
3514        );
3515
3516        let local_repository = LocalRepositoryEntry {
3517            work_directory_id: work_dir_id,
3518            work_directory: work_directory.clone(),
3519            git_dir_scan_id: 0,
3520            status_scan_id: 0,
3521            repo_ptr: repository.clone(),
3522            dot_git_dir_abs_path: actual_dot_git_dir_abs_path.into(),
3523            dot_git_worktree_abs_path,
3524            current_merge_head_shas: Default::default(),
3525        };
3526
3527        self.snapshot
3528            .git_repositories
3529            .insert(work_dir_id, local_repository.clone());
3530
3531        Some(local_repository)
3532    }
3533}
3534
3535async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool {
3536    if path.file_name() == Some(&*DOT_GIT) {
3537        return true;
3538    }
3539
3540    // If we're in a bare repository, we are not inside a `.git` folder. In a
3541    // bare repository, the root folder contains what would normally be in the
3542    // `.git` folder.
3543    let head_metadata = fs.metadata(&path.join("HEAD")).await;
3544    if !matches!(head_metadata, Ok(Some(_))) {
3545        return false;
3546    }
3547    let config_metadata = fs.metadata(&path.join("config")).await;
3548    matches!(config_metadata, Ok(Some(_)))
3549}
3550
3551async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
3552    let contents = fs.load(abs_path).await?;
3553    let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
3554    let mut builder = GitignoreBuilder::new(parent);
3555    for line in contents.lines() {
3556        builder.add_line(Some(abs_path.into()), line)?;
3557    }
3558    Ok(builder.build()?)
3559}
3560
3561impl Deref for Worktree {
3562    type Target = Snapshot;
3563
3564    fn deref(&self) -> &Self::Target {
3565        match self {
3566            Worktree::Local(worktree) => &worktree.snapshot,
3567            Worktree::Remote(worktree) => &worktree.snapshot,
3568        }
3569    }
3570}
3571
3572impl Deref for LocalWorktree {
3573    type Target = LocalSnapshot;
3574
3575    fn deref(&self) -> &Self::Target {
3576        &self.snapshot
3577    }
3578}
3579
3580impl Deref for RemoteWorktree {
3581    type Target = Snapshot;
3582
3583    fn deref(&self) -> &Self::Target {
3584        &self.snapshot
3585    }
3586}
3587
3588impl fmt::Debug for LocalWorktree {
3589    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3590        self.snapshot.fmt(f)
3591    }
3592}
3593
3594impl fmt::Debug for Snapshot {
3595    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3596        struct EntriesById<'a>(&'a SumTree<PathEntry>);
3597        struct EntriesByPath<'a>(&'a SumTree<Entry>);
3598
3599        impl<'a> fmt::Debug for EntriesByPath<'a> {
3600            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3601                f.debug_map()
3602                    .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
3603                    .finish()
3604            }
3605        }
3606
3607        impl<'a> fmt::Debug for EntriesById<'a> {
3608            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3609                f.debug_list().entries(self.0.iter()).finish()
3610            }
3611        }
3612
3613        f.debug_struct("Snapshot")
3614            .field("id", &self.id)
3615            .field("root_name", &self.root_name)
3616            .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
3617            .field("entries_by_id", &EntriesById(&self.entries_by_id))
3618            .finish()
3619    }
3620}
3621
3622#[derive(Clone, PartialEq)]
3623pub struct File {
3624    pub worktree: Entity<Worktree>,
3625    pub path: Arc<Path>,
3626    pub disk_state: DiskState,
3627    pub entry_id: Option<ProjectEntryId>,
3628    pub is_local: bool,
3629    pub is_private: bool,
3630}
3631
3632impl language::File for File {
3633    fn as_local(&self) -> Option<&dyn language::LocalFile> {
3634        if self.is_local {
3635            Some(self)
3636        } else {
3637            None
3638        }
3639    }
3640
3641    fn disk_state(&self) -> DiskState {
3642        self.disk_state
3643    }
3644
3645    fn path(&self) -> &Arc<Path> {
3646        &self.path
3647    }
3648
3649    fn full_path(&self, cx: &App) -> PathBuf {
3650        let mut full_path = PathBuf::new();
3651        let worktree = self.worktree.read(cx);
3652
3653        if worktree.is_visible() {
3654            full_path.push(worktree.root_name());
3655        } else {
3656            let path = worktree.abs_path();
3657
3658            if worktree.is_local() && path.starts_with(home_dir().as_path()) {
3659                full_path.push("~");
3660                full_path.push(path.strip_prefix(home_dir().as_path()).unwrap());
3661            } else {
3662                full_path.push(path)
3663            }
3664        }
3665
3666        if self.path.components().next().is_some() {
3667            full_path.push(&self.path);
3668        }
3669
3670        full_path
3671    }
3672
3673    /// Returns the last component of this handle's absolute path. If this handle refers to the root
3674    /// of its worktree, then this method will return the name of the worktree itself.
3675    fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr {
3676        self.path
3677            .file_name()
3678            .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
3679    }
3680
3681    fn worktree_id(&self, cx: &App) -> WorktreeId {
3682        self.worktree.read(cx).id()
3683    }
3684
3685    fn as_any(&self) -> &dyn Any {
3686        self
3687    }
3688
3689    fn to_proto(&self, cx: &App) -> rpc::proto::File {
3690        rpc::proto::File {
3691            worktree_id: self.worktree.read(cx).id().to_proto(),
3692            entry_id: self.entry_id.map(|id| id.to_proto()),
3693            path: self.path.as_ref().to_proto(),
3694            mtime: self.disk_state.mtime().map(|time| time.into()),
3695            is_deleted: self.disk_state == DiskState::Deleted,
3696        }
3697    }
3698
3699    fn is_private(&self) -> bool {
3700        self.is_private
3701    }
3702}
3703
3704impl language::LocalFile for File {
3705    fn abs_path(&self, cx: &App) -> PathBuf {
3706        let worktree_path = &self.worktree.read(cx).as_local().unwrap().abs_path;
3707        if self.path.as_ref() == Path::new("") {
3708            worktree_path.as_path().to_path_buf()
3709        } else {
3710            worktree_path.as_path().join(&self.path)
3711        }
3712    }
3713
3714    fn load(&self, cx: &App) -> Task<Result<String>> {
3715        let worktree = self.worktree.read(cx).as_local().unwrap();
3716        let abs_path = worktree.absolutize(&self.path);
3717        let fs = worktree.fs.clone();
3718        cx.background_executor()
3719            .spawn(async move { fs.load(&abs_path?).await })
3720    }
3721
3722    fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>> {
3723        let worktree = self.worktree.read(cx).as_local().unwrap();
3724        let abs_path = worktree.absolutize(&self.path);
3725        let fs = worktree.fs.clone();
3726        cx.background_executor()
3727            .spawn(async move { fs.load_bytes(&abs_path?).await })
3728    }
3729}
3730
3731impl File {
3732    pub fn for_entry(entry: Entry, worktree: Entity<Worktree>) -> Arc<Self> {
3733        Arc::new(Self {
3734            worktree,
3735            path: entry.path.clone(),
3736            disk_state: if let Some(mtime) = entry.mtime {
3737                DiskState::Present { mtime }
3738            } else {
3739                DiskState::New
3740            },
3741            entry_id: Some(entry.id),
3742            is_local: true,
3743            is_private: entry.is_private,
3744        })
3745    }
3746
3747    pub fn from_proto(
3748        proto: rpc::proto::File,
3749        worktree: Entity<Worktree>,
3750        cx: &App,
3751    ) -> Result<Self> {
3752        let worktree_id = worktree
3753            .read(cx)
3754            .as_remote()
3755            .ok_or_else(|| anyhow!("not remote"))?
3756            .id();
3757
3758        if worktree_id.to_proto() != proto.worktree_id {
3759            return Err(anyhow!("worktree id does not match file"));
3760        }
3761
3762        let disk_state = if proto.is_deleted {
3763            DiskState::Deleted
3764        } else {
3765            if let Some(mtime) = proto.mtime.map(&Into::into) {
3766                DiskState::Present { mtime }
3767            } else {
3768                DiskState::New
3769            }
3770        };
3771
3772        Ok(Self {
3773            worktree,
3774            path: Arc::<Path>::from_proto(proto.path),
3775            disk_state,
3776            entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
3777            is_local: false,
3778            is_private: false,
3779        })
3780    }
3781
3782    pub fn from_dyn(file: Option<&Arc<dyn language::File>>) -> Option<&Self> {
3783        file.and_then(|f| f.as_any().downcast_ref())
3784    }
3785
3786    pub fn worktree_id(&self, cx: &App) -> WorktreeId {
3787        self.worktree.read(cx).id()
3788    }
3789
3790    pub fn project_entry_id(&self, _: &App) -> Option<ProjectEntryId> {
3791        match self.disk_state {
3792            DiskState::Deleted => None,
3793            _ => self.entry_id,
3794        }
3795    }
3796}
3797
3798#[derive(Clone, Debug, PartialEq, Eq)]
3799pub struct Entry {
3800    pub id: ProjectEntryId,
3801    pub kind: EntryKind,
3802    pub path: Arc<Path>,
3803    pub inode: u64,
3804    pub mtime: Option<MTime>,
3805
3806    pub canonical_path: Option<Box<Path>>,
3807    /// Whether this entry is ignored by Git.
3808    ///
3809    /// We only scan ignored entries once the directory is expanded and
3810    /// exclude them from searches.
3811    pub is_ignored: bool,
3812
3813    /// Whether this entry is always included in searches.
3814    ///
3815    /// This is used for entries that are always included in searches, even
3816    /// if they are ignored by git. Overridden by file_scan_exclusions.
3817    pub is_always_included: bool,
3818
3819    /// Whether this entry's canonical path is outside of the worktree.
3820    /// This means the entry is only accessible from the worktree root via a
3821    /// symlink.
3822    ///
3823    /// We only scan entries outside of the worktree once the symlinked
3824    /// directory is expanded. External entries are treated like gitignored
3825    /// entries in that they are not included in searches.
3826    pub is_external: bool,
3827
3828    /// Whether this entry is considered to be a `.env` file.
3829    pub is_private: bool,
3830    /// The entry's size on disk, in bytes.
3831    pub size: u64,
3832    pub char_bag: CharBag,
3833    pub is_fifo: bool,
3834}
3835
3836#[derive(Clone, Copy, Debug, PartialEq, Eq)]
3837pub enum EntryKind {
3838    UnloadedDir,
3839    PendingDir,
3840    Dir,
3841    File,
3842}
3843
3844#[derive(Clone, Copy, Debug, PartialEq)]
3845pub enum PathChange {
3846    /// A filesystem entry was was created.
3847    Added,
3848    /// A filesystem entry was removed.
3849    Removed,
3850    /// A filesystem entry was updated.
3851    Updated,
3852    /// A filesystem entry was either updated or added. We don't know
3853    /// whether or not it already existed, because the path had not
3854    /// been loaded before the event.
3855    AddedOrUpdated,
3856    /// A filesystem entry was found during the initial scan of the worktree.
3857    Loaded,
3858}
3859
3860#[derive(Debug)]
3861pub struct GitRepositoryChange {
3862    /// The previous state of the repository, if it already existed.
3863    pub old_repository: Option<RepositoryEntry>,
3864}
3865
3866pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
3867pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
3868
3869#[derive(Clone, Debug, PartialEq, Eq)]
3870pub struct StatusEntry {
3871    pub repo_path: RepoPath,
3872    pub status: FileStatus,
3873}
3874
3875impl StatusEntry {
3876    pub fn is_staged(&self) -> Option<bool> {
3877        self.status.is_staged()
3878    }
3879
3880    fn to_proto(&self) -> proto::StatusEntry {
3881        let simple_status = match self.status {
3882            FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
3883            FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
3884            FileStatus::Tracked(TrackedStatus {
3885                index_status,
3886                worktree_status,
3887            }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
3888                worktree_status
3889            } else {
3890                index_status
3891            }),
3892        };
3893
3894        proto::StatusEntry {
3895            repo_path: self.repo_path.as_ref().to_proto(),
3896            simple_status,
3897            status: Some(status_to_proto(self.status)),
3898        }
3899    }
3900}
3901
3902impl TryFrom<proto::StatusEntry> for StatusEntry {
3903    type Error = anyhow::Error;
3904
3905    fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
3906        let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
3907        let status = status_from_proto(value.simple_status, value.status)?;
3908        Ok(Self { repo_path, status })
3909    }
3910}
3911
3912#[derive(Clone, Debug)]
3913struct PathProgress<'a> {
3914    max_path: &'a Path,
3915}
3916
3917#[derive(Clone, Debug)]
3918pub struct PathSummary<S> {
3919    max_path: Arc<Path>,
3920    item_summary: S,
3921}
3922
3923impl<S: Summary> Summary for PathSummary<S> {
3924    type Context = S::Context;
3925
3926    fn zero(cx: &Self::Context) -> Self {
3927        Self {
3928            max_path: Path::new("").into(),
3929            item_summary: S::zero(cx),
3930        }
3931    }
3932
3933    fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) {
3934        self.max_path = rhs.max_path.clone();
3935        self.item_summary.add_summary(&rhs.item_summary, cx);
3936    }
3937}
3938
3939impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathProgress<'a> {
3940    fn zero(_: &<PathSummary<S> as Summary>::Context) -> Self {
3941        Self {
3942            max_path: Path::new(""),
3943        }
3944    }
3945
3946    fn add_summary(
3947        &mut self,
3948        summary: &'a PathSummary<S>,
3949        _: &<PathSummary<S> as Summary>::Context,
3950    ) {
3951        self.max_path = summary.max_path.as_ref()
3952    }
3953}
3954
3955impl sum_tree::Item for RepositoryEntry {
3956    type Summary = PathSummary<Unit>;
3957
3958    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
3959        PathSummary {
3960            max_path: self.work_directory.path_key().0,
3961            item_summary: Unit,
3962        }
3963    }
3964}
3965
3966impl sum_tree::KeyedItem for RepositoryEntry {
3967    type Key = PathKey;
3968
3969    fn key(&self) -> Self::Key {
3970        self.work_directory.path_key()
3971    }
3972}
3973
3974impl sum_tree::Item for StatusEntry {
3975    type Summary = PathSummary<GitSummary>;
3976
3977    fn summary(&self, _: &<Self::Summary as Summary>::Context) -> Self::Summary {
3978        PathSummary {
3979            max_path: self.repo_path.0.clone(),
3980            item_summary: self.status.summary(),
3981        }
3982    }
3983}
3984
3985impl sum_tree::KeyedItem for StatusEntry {
3986    type Key = PathKey;
3987
3988    fn key(&self) -> Self::Key {
3989        PathKey(self.repo_path.0.clone())
3990    }
3991}
3992
3993impl<'a> sum_tree::Dimension<'a, PathSummary<GitSummary>> for GitSummary {
3994    fn zero(_cx: &()) -> Self {
3995        Default::default()
3996    }
3997
3998    fn add_summary(&mut self, summary: &'a PathSummary<GitSummary>, _: &()) {
3999        *self += summary.item_summary
4000    }
4001}
4002
4003impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for PathKey {
4004    fn zero(_: &S::Context) -> Self {
4005        Default::default()
4006    }
4007
4008    fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
4009        self.0 = summary.max_path.clone();
4010    }
4011}
4012
4013impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary<S>> for TraversalProgress<'a> {
4014    fn zero(_cx: &S::Context) -> Self {
4015        Default::default()
4016    }
4017
4018    fn add_summary(&mut self, summary: &'a PathSummary<S>, _: &S::Context) {
4019        self.max_path = summary.max_path.as_ref();
4020    }
4021}
4022
4023impl Entry {
4024    fn new(
4025        path: Arc<Path>,
4026        metadata: &fs::Metadata,
4027        next_entry_id: &AtomicUsize,
4028        root_char_bag: CharBag,
4029        canonical_path: Option<Box<Path>>,
4030    ) -> Self {
4031        let char_bag = char_bag_for_path(root_char_bag, &path);
4032        Self {
4033            id: ProjectEntryId::new(next_entry_id),
4034            kind: if metadata.is_dir {
4035                EntryKind::PendingDir
4036            } else {
4037                EntryKind::File
4038            },
4039            path,
4040            inode: metadata.inode,
4041            mtime: Some(metadata.mtime),
4042            size: metadata.len,
4043            canonical_path,
4044            is_ignored: false,
4045            is_always_included: false,
4046            is_external: false,
4047            is_private: false,
4048            char_bag,
4049            is_fifo: metadata.is_fifo,
4050        }
4051    }
4052
4053    pub fn is_created(&self) -> bool {
4054        self.mtime.is_some()
4055    }
4056
4057    pub fn is_dir(&self) -> bool {
4058        self.kind.is_dir()
4059    }
4060
4061    pub fn is_file(&self) -> bool {
4062        self.kind.is_file()
4063    }
4064}
4065
4066impl EntryKind {
4067    pub fn is_dir(&self) -> bool {
4068        matches!(
4069            self,
4070            EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
4071        )
4072    }
4073
4074    pub fn is_unloaded(&self) -> bool {
4075        matches!(self, EntryKind::UnloadedDir)
4076    }
4077
4078    pub fn is_file(&self) -> bool {
4079        matches!(self, EntryKind::File)
4080    }
4081}
4082
4083impl sum_tree::Item for Entry {
4084    type Summary = EntrySummary;
4085
4086    fn summary(&self, _cx: &()) -> Self::Summary {
4087        let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included
4088        {
4089            0
4090        } else {
4091            1
4092        };
4093        let file_count;
4094        let non_ignored_file_count;
4095        if self.is_file() {
4096            file_count = 1;
4097            non_ignored_file_count = non_ignored_count;
4098        } else {
4099            file_count = 0;
4100            non_ignored_file_count = 0;
4101        }
4102
4103        EntrySummary {
4104            max_path: self.path.clone(),
4105            count: 1,
4106            non_ignored_count,
4107            file_count,
4108            non_ignored_file_count,
4109        }
4110    }
4111}
4112
4113impl sum_tree::KeyedItem for Entry {
4114    type Key = PathKey;
4115
4116    fn key(&self) -> Self::Key {
4117        PathKey(self.path.clone())
4118    }
4119}
4120
4121#[derive(Clone, Debug)]
4122pub struct EntrySummary {
4123    max_path: Arc<Path>,
4124    count: usize,
4125    non_ignored_count: usize,
4126    file_count: usize,
4127    non_ignored_file_count: usize,
4128}
4129
4130impl Default for EntrySummary {
4131    fn default() -> Self {
4132        Self {
4133            max_path: Arc::from(Path::new("")),
4134            count: 0,
4135            non_ignored_count: 0,
4136            file_count: 0,
4137            non_ignored_file_count: 0,
4138        }
4139    }
4140}
4141
4142impl sum_tree::Summary for EntrySummary {
4143    type Context = ();
4144
4145    fn zero(_cx: &()) -> Self {
4146        Default::default()
4147    }
4148
4149    fn add_summary(&mut self, rhs: &Self, _: &()) {
4150        self.max_path = rhs.max_path.clone();
4151        self.count += rhs.count;
4152        self.non_ignored_count += rhs.non_ignored_count;
4153        self.file_count += rhs.file_count;
4154        self.non_ignored_file_count += rhs.non_ignored_file_count;
4155    }
4156}
4157
4158#[derive(Clone, Debug)]
4159struct PathEntry {
4160    id: ProjectEntryId,
4161    path: Arc<Path>,
4162    is_ignored: bool,
4163    scan_id: usize,
4164}
4165
4166impl sum_tree::Item for PathEntry {
4167    type Summary = PathEntrySummary;
4168
4169    fn summary(&self, _cx: &()) -> Self::Summary {
4170        PathEntrySummary { max_id: self.id }
4171    }
4172}
4173
4174impl sum_tree::KeyedItem for PathEntry {
4175    type Key = ProjectEntryId;
4176
4177    fn key(&self) -> Self::Key {
4178        self.id
4179    }
4180}
4181
4182#[derive(Clone, Debug, Default)]
4183struct PathEntrySummary {
4184    max_id: ProjectEntryId,
4185}
4186
4187impl sum_tree::Summary for PathEntrySummary {
4188    type Context = ();
4189
4190    fn zero(_cx: &Self::Context) -> Self {
4191        Default::default()
4192    }
4193
4194    fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
4195        self.max_id = summary.max_id;
4196    }
4197}
4198
4199impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
4200    fn zero(_cx: &()) -> Self {
4201        Default::default()
4202    }
4203
4204    fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
4205        *self = summary.max_id;
4206    }
4207}
4208
4209#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
4210pub struct PathKey(Arc<Path>);
4211
4212impl Default for PathKey {
4213    fn default() -> Self {
4214        Self(Path::new("").into())
4215    }
4216}
4217
4218impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
4219    fn zero(_cx: &()) -> Self {
4220        Default::default()
4221    }
4222
4223    fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
4224        self.0 = summary.max_path.clone();
4225    }
4226}
4227
4228struct BackgroundScanner {
4229    state: Mutex<BackgroundScannerState>,
4230    fs: Arc<dyn Fs>,
4231    fs_case_sensitive: bool,
4232    status_updates_tx: UnboundedSender<ScanState>,
4233    executor: BackgroundExecutor,
4234    scan_requests_rx: channel::Receiver<ScanRequest>,
4235    path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
4236    next_entry_id: Arc<AtomicUsize>,
4237    phase: BackgroundScannerPhase,
4238    watcher: Arc<dyn Watcher>,
4239    settings: WorktreeSettings,
4240    share_private_files: bool,
4241}
4242
4243#[derive(PartialEq)]
4244enum BackgroundScannerPhase {
4245    InitialScan,
4246    EventsReceivedDuringInitialScan,
4247    Events,
4248}
4249
4250impl BackgroundScanner {
4251    async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>) {
4252        use futures::FutureExt as _;
4253
4254        // If the worktree root does not contain a git repository, then find
4255        // the git repository in an ancestor directory. Find any gitignore files
4256        // in ancestor directories.
4257        let root_abs_path = self.state.lock().snapshot.abs_path.clone();
4258        let mut containing_git_repository = None;
4259        for (index, ancestor) in root_abs_path.as_path().ancestors().enumerate() {
4260            if index != 0 {
4261                if let Ok(ignore) =
4262                    build_gitignore(&ancestor.join(*GITIGNORE), self.fs.as_ref()).await
4263                {
4264                    self.state
4265                        .lock()
4266                        .snapshot
4267                        .ignores_by_parent_abs_path
4268                        .insert(ancestor.into(), (ignore.into(), false));
4269                }
4270            }
4271
4272            let ancestor_dot_git = ancestor.join(*DOT_GIT);
4273            // Check whether the directory or file called `.git` exists (in the
4274            // case of worktrees it's a file.)
4275            if self
4276                .fs
4277                .metadata(&ancestor_dot_git)
4278                .await
4279                .is_ok_and(|metadata| metadata.is_some())
4280            {
4281                if index != 0 {
4282                    // We canonicalize, since the FS events use the canonicalized path.
4283                    if let Some(ancestor_dot_git) =
4284                        self.fs.canonicalize(&ancestor_dot_git).await.log_err()
4285                    {
4286                        // We associate the external git repo with our root folder and
4287                        // also mark where in the git repo the root folder is located.
4288                        let local_repository = self.state.lock().insert_git_repository_for_path(
4289                            WorkDirectory::AboveProject {
4290                                absolute_path: ancestor.into(),
4291                                location_in_repo: root_abs_path
4292                                    .as_path()
4293                                    .strip_prefix(ancestor)
4294                                    .unwrap()
4295                                    .into(),
4296                            },
4297                            ancestor_dot_git.clone().into(),
4298                            self.fs.as_ref(),
4299                            self.watcher.as_ref(),
4300                        );
4301
4302                        if local_repository.is_some() {
4303                            containing_git_repository = Some(ancestor_dot_git)
4304                        }
4305                    };
4306                }
4307
4308                // Reached root of git repository.
4309                break;
4310            }
4311        }
4312
4313        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4314        {
4315            let mut state = self.state.lock();
4316            state.snapshot.scan_id += 1;
4317            if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
4318                let ignore_stack = state
4319                    .snapshot
4320                    .ignore_stack_for_abs_path(root_abs_path.as_path(), true);
4321                if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) {
4322                    root_entry.is_ignored = true;
4323                    state.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref());
4324                }
4325                state.enqueue_scan_dir(root_abs_path.into(), &root_entry, &scan_job_tx);
4326            }
4327        };
4328
4329        // Perform an initial scan of the directory.
4330        drop(scan_job_tx);
4331        self.scan_dirs(true, scan_job_rx).await;
4332        {
4333            let mut state = self.state.lock();
4334            state.snapshot.completed_scan_id = state.snapshot.scan_id;
4335        }
4336
4337        self.send_status_update(false, SmallVec::new());
4338
4339        // Process any any FS events that occurred while performing the initial scan.
4340        // For these events, update events cannot be as precise, because we didn't
4341        // have the previous state loaded yet.
4342        self.phase = BackgroundScannerPhase::EventsReceivedDuringInitialScan;
4343        if let Poll::Ready(Some(mut paths)) = futures::poll!(fs_events_rx.next()) {
4344            while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
4345                paths.extend(more_paths);
4346            }
4347            self.process_events(paths.into_iter().map(Into::into).collect())
4348                .await;
4349        }
4350        if let Some(abs_path) = containing_git_repository {
4351            self.process_events(vec![abs_path]).await;
4352        }
4353
4354        // Continue processing events until the worktree is dropped.
4355        self.phase = BackgroundScannerPhase::Events;
4356
4357        loop {
4358            select_biased! {
4359                // Process any path refresh requests from the worktree. Prioritize
4360                // these before handling changes reported by the filesystem.
4361                request = self.next_scan_request().fuse() => {
4362                    let Ok(request) = request else { break };
4363                    if !self.process_scan_request(request, false).await {
4364                        return;
4365                    }
4366                }
4367
4368                path_prefix_request = self.path_prefixes_to_scan_rx.recv().fuse() => {
4369                    let Ok(request) = path_prefix_request else { break };
4370                    log::trace!("adding path prefix {:?}", request.path);
4371
4372                    let did_scan = self.forcibly_load_paths(&[request.path.clone()]).await;
4373                    if did_scan {
4374                        let abs_path =
4375                        {
4376                            let mut state = self.state.lock();
4377                            state.path_prefixes_to_scan.insert(request.path.clone());
4378                            state.snapshot.abs_path.as_path().join(&request.path)
4379                        };
4380
4381                        if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() {
4382                            self.process_events(vec![abs_path]).await;
4383                        }
4384                    }
4385                    self.send_status_update(false, request.done);
4386                }
4387
4388                paths = fs_events_rx.next().fuse() => {
4389                    let Some(mut paths) = paths else { break };
4390                    while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
4391                        paths.extend(more_paths);
4392                    }
4393                    self.process_events(paths.into_iter().map(Into::into).collect()).await;
4394                }
4395            }
4396        }
4397    }
4398
4399    async fn process_scan_request(&self, mut request: ScanRequest, scanning: bool) -> bool {
4400        log::debug!("rescanning paths {:?}", request.relative_paths);
4401
4402        request.relative_paths.sort_unstable();
4403        self.forcibly_load_paths(&request.relative_paths).await;
4404
4405        let root_path = self.state.lock().snapshot.abs_path.clone();
4406        let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
4407            Ok(path) => SanitizedPath::from(path),
4408            Err(err) => {
4409                log::error!("failed to canonicalize root path: {}", err);
4410                return true;
4411            }
4412        };
4413        let abs_paths = request
4414            .relative_paths
4415            .iter()
4416            .map(|path| {
4417                if path.file_name().is_some() {
4418                    root_canonical_path.as_path().join(path).to_path_buf()
4419                } else {
4420                    root_canonical_path.as_path().to_path_buf()
4421                }
4422            })
4423            .collect::<Vec<_>>();
4424
4425        {
4426            let mut state = self.state.lock();
4427            let is_idle = state.snapshot.completed_scan_id == state.snapshot.scan_id;
4428            state.snapshot.scan_id += 1;
4429            if is_idle {
4430                state.snapshot.completed_scan_id = state.snapshot.scan_id;
4431            }
4432        }
4433
4434        self.reload_entries_for_paths(
4435            root_path,
4436            root_canonical_path,
4437            &request.relative_paths,
4438            abs_paths,
4439            None,
4440        )
4441        .await;
4442
4443        self.send_status_update(scanning, request.done)
4444    }
4445
4446    async fn process_events(&self, mut abs_paths: Vec<PathBuf>) {
4447        let root_path = self.state.lock().snapshot.abs_path.clone();
4448        let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
4449            Ok(path) => SanitizedPath::from(path),
4450            Err(err) => {
4451                let new_path = self
4452                    .state
4453                    .lock()
4454                    .snapshot
4455                    .root_file_handle
4456                    .clone()
4457                    .and_then(|handle| handle.current_path(&self.fs).log_err())
4458                    .map(SanitizedPath::from)
4459                    .filter(|new_path| *new_path != root_path);
4460
4461                if let Some(new_path) = new_path.as_ref() {
4462                    log::info!(
4463                        "root renamed from {} to {}",
4464                        root_path.as_path().display(),
4465                        new_path.as_path().display()
4466                    )
4467                } else {
4468                    log::warn!("root path could not be canonicalized: {}", err);
4469                }
4470                self.status_updates_tx
4471                    .unbounded_send(ScanState::RootUpdated { new_path })
4472                    .ok();
4473                return;
4474            }
4475        };
4476
4477        // Certain directories may have FS changes, but do not lead to git data changes that Zed cares about.
4478        // Ignore these, to avoid Zed unnecessarily rescanning git metadata.
4479        let skipped_files_in_dot_git = HashSet::from_iter([*COMMIT_MESSAGE, *INDEX_LOCK]);
4480        let skipped_dirs_in_dot_git = [*FSMONITOR_DAEMON];
4481
4482        let mut relative_paths = Vec::with_capacity(abs_paths.len());
4483        let mut dot_git_abs_paths = Vec::new();
4484        abs_paths.sort_unstable();
4485        abs_paths.dedup_by(|a, b| a.starts_with(b));
4486        abs_paths.retain(|abs_path| {
4487            let abs_path = SanitizedPath::from(abs_path);
4488
4489            let snapshot = &self.state.lock().snapshot;
4490            {
4491                let mut is_git_related = false;
4492
4493                let dot_git_paths = abs_path.as_path().ancestors().find_map(|ancestor| {
4494                    if smol::block_on(is_git_dir(ancestor, self.fs.as_ref())) {
4495                        let path_in_git_dir = abs_path.as_path().strip_prefix(ancestor).expect("stripping off the ancestor");
4496                        Some((ancestor.to_owned(), path_in_git_dir.to_owned()))
4497                    } else {
4498                        None
4499                    }
4500                });
4501
4502                if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths {
4503                    if skipped_files_in_dot_git.contains(path_in_git_dir.as_os_str()) || skipped_dirs_in_dot_git.iter().any(|skipped_git_subdir| path_in_git_dir.starts_with(skipped_git_subdir)) {
4504                        log::debug!("ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories");
4505                        return false;
4506                    }
4507
4508                    is_git_related = true;
4509                    if !dot_git_abs_paths.contains(&dot_git_abs_path) {
4510                        dot_git_abs_paths.push(dot_git_abs_path);
4511                    }
4512                }
4513
4514                let relative_path: Arc<Path> =
4515                    if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
4516                        path.into()
4517                    } else {
4518                        if is_git_related {
4519                            log::debug!(
4520                              "ignoring event {abs_path:?}, since it's in git dir outside of root path {root_canonical_path:?}",
4521                            );
4522                        } else {
4523                            log::error!(
4524                              "ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
4525                            );
4526                        }
4527                        return false;
4528                    };
4529
4530                if abs_path.0.file_name() == Some(*GITIGNORE) {
4531                    for (_, repo) in snapshot.git_repositories.iter().filter(|(_, repo)| repo.directory_contains(&relative_path)) {
4532                        if !dot_git_abs_paths.iter().any(|dot_git_abs_path| dot_git_abs_path == repo.dot_git_dir_abs_path.as_ref()) {
4533                            dot_git_abs_paths.push(repo.dot_git_dir_abs_path.to_path_buf());
4534                        }
4535                    }
4536                }
4537
4538                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
4539                    snapshot
4540                        .entry_for_path(parent)
4541                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
4542                });
4543                if !parent_dir_is_loaded {
4544                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
4545                    return false;
4546                }
4547
4548                if self.settings.is_path_excluded(&relative_path) {
4549                    if !is_git_related {
4550                        log::debug!("ignoring FS event for excluded path {relative_path:?}");
4551                    }
4552                    return false;
4553                }
4554
4555                relative_paths.push(relative_path);
4556                true
4557            }
4558        });
4559
4560        if relative_paths.is_empty() && dot_git_abs_paths.is_empty() {
4561            return;
4562        }
4563
4564        self.state.lock().snapshot.scan_id += 1;
4565
4566        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4567        log::debug!("received fs events {:?}", relative_paths);
4568        self.reload_entries_for_paths(
4569            root_path,
4570            root_canonical_path,
4571            &relative_paths,
4572            abs_paths,
4573            Some(scan_job_tx.clone()),
4574        )
4575        .await;
4576
4577        self.update_ignore_statuses(scan_job_tx).await;
4578        self.scan_dirs(false, scan_job_rx).await;
4579
4580        if !dot_git_abs_paths.is_empty() {
4581            self.update_git_repositories(dot_git_abs_paths).await;
4582        }
4583
4584        {
4585            let mut state = self.state.lock();
4586            state.snapshot.completed_scan_id = state.snapshot.scan_id;
4587            for (_, entry) in mem::take(&mut state.removed_entries) {
4588                state.scanned_dirs.remove(&entry.id);
4589            }
4590        }
4591
4592        #[cfg(test)]
4593        self.state.lock().snapshot.check_git_invariants();
4594
4595        self.send_status_update(false, SmallVec::new());
4596    }
4597
4598    async fn forcibly_load_paths(&self, paths: &[Arc<Path>]) -> bool {
4599        let (scan_job_tx, scan_job_rx) = channel::unbounded();
4600        {
4601            let mut state = self.state.lock();
4602            let root_path = state.snapshot.abs_path.clone();
4603            for path in paths {
4604                for ancestor in path.ancestors() {
4605                    if let Some(entry) = state.snapshot.entry_for_path(ancestor) {
4606                        if entry.kind == EntryKind::UnloadedDir {
4607                            let abs_path = root_path.as_path().join(ancestor);
4608                            state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx);
4609                            state.paths_to_scan.insert(path.clone());
4610                            break;
4611                        }
4612                    }
4613                }
4614            }
4615            drop(scan_job_tx);
4616        }
4617        while let Ok(job) = scan_job_rx.recv().await {
4618            self.scan_dir(&job).await.log_err();
4619        }
4620
4621        !mem::take(&mut self.state.lock().paths_to_scan).is_empty()
4622    }
4623
4624    async fn scan_dirs(
4625        &self,
4626        enable_progress_updates: bool,
4627        scan_jobs_rx: channel::Receiver<ScanJob>,
4628    ) {
4629        use futures::FutureExt as _;
4630
4631        if self
4632            .status_updates_tx
4633            .unbounded_send(ScanState::Started)
4634            .is_err()
4635        {
4636            return;
4637        }
4638
4639        let progress_update_count = AtomicUsize::new(0);
4640        self.executor
4641            .scoped(|scope| {
4642                for _ in 0..self.executor.num_cpus() {
4643                    scope.spawn(async {
4644                        let mut last_progress_update_count = 0;
4645                        let progress_update_timer = self.progress_timer(enable_progress_updates).fuse();
4646                        futures::pin_mut!(progress_update_timer);
4647
4648                        loop {
4649                            select_biased! {
4650                                // Process any path refresh requests before moving on to process
4651                                // the scan queue, so that user operations are prioritized.
4652                                request = self.next_scan_request().fuse() => {
4653                                    let Ok(request) = request else { break };
4654                                    if !self.process_scan_request(request, true).await {
4655                                        return;
4656                                    }
4657                                }
4658
4659                                // Send periodic progress updates to the worktree. Use an atomic counter
4660                                // to ensure that only one of the workers sends a progress update after
4661                                // the update interval elapses.
4662                                _ = progress_update_timer => {
4663                                    match progress_update_count.compare_exchange(
4664                                        last_progress_update_count,
4665                                        last_progress_update_count + 1,
4666                                        SeqCst,
4667                                        SeqCst
4668                                    ) {
4669                                        Ok(_) => {
4670                                            last_progress_update_count += 1;
4671                                            self.send_status_update(true, SmallVec::new());
4672                                        }
4673                                        Err(count) => {
4674                                            last_progress_update_count = count;
4675                                        }
4676                                    }
4677                                    progress_update_timer.set(self.progress_timer(enable_progress_updates).fuse());
4678                                }
4679
4680                                // Recursively load directories from the file system.
4681                                job = scan_jobs_rx.recv().fuse() => {
4682                                    let Ok(job) = job else { break };
4683                                    if let Err(err) = self.scan_dir(&job).await {
4684                                        if job.path.as_ref() != Path::new("") {
4685                                            log::error!("error scanning directory {:?}: {}", job.abs_path, err);
4686                                        }
4687                                    }
4688                                }
4689                            }
4690                        }
4691                    })
4692                }
4693            })
4694            .await;
4695    }
4696
4697    fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool {
4698        let mut state = self.state.lock();
4699        if state.changed_paths.is_empty() && scanning {
4700            return true;
4701        }
4702
4703        let new_snapshot = state.snapshot.clone();
4704        let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone());
4705        let changes = self.build_change_set(&old_snapshot, &new_snapshot, &state.changed_paths);
4706        state.changed_paths.clear();
4707
4708        self.status_updates_tx
4709            .unbounded_send(ScanState::Updated {
4710                snapshot: new_snapshot,
4711                changes,
4712                scanning,
4713                barrier,
4714            })
4715            .is_ok()
4716    }
4717
4718    async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
4719        let root_abs_path;
4720        let root_char_bag;
4721        {
4722            let snapshot = &self.state.lock().snapshot;
4723            if self.settings.is_path_excluded(&job.path) {
4724                log::error!("skipping excluded directory {:?}", job.path);
4725                return Ok(());
4726            }
4727            log::debug!("scanning directory {:?}", job.path);
4728            root_abs_path = snapshot.abs_path().clone();
4729            root_char_bag = snapshot.root_char_bag;
4730        }
4731
4732        let next_entry_id = self.next_entry_id.clone();
4733        let mut ignore_stack = job.ignore_stack.clone();
4734        let mut new_ignore = None;
4735        let mut root_canonical_path = None;
4736        let mut new_entries: Vec<Entry> = Vec::new();
4737        let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
4738        let mut child_paths = self
4739            .fs
4740            .read_dir(&job.abs_path)
4741            .await?
4742            .filter_map(|entry| async {
4743                match entry {
4744                    Ok(entry) => Some(entry),
4745                    Err(error) => {
4746                        log::error!("error processing entry {:?}", error);
4747                        None
4748                    }
4749                }
4750            })
4751            .collect::<Vec<_>>()
4752            .await;
4753
4754        // Ensure that .git and .gitignore are processed first.
4755        swap_to_front(&mut child_paths, *GITIGNORE);
4756        swap_to_front(&mut child_paths, *DOT_GIT);
4757
4758        for child_abs_path in child_paths {
4759            let child_abs_path: Arc<Path> = child_abs_path.into();
4760            let child_name = child_abs_path.file_name().unwrap();
4761            let child_path: Arc<Path> = job.path.join(child_name).into();
4762
4763            if child_name == *DOT_GIT {
4764                let repo = self.state.lock().insert_git_repository(
4765                    child_path.clone(),
4766                    self.fs.as_ref(),
4767                    self.watcher.as_ref(),
4768                );
4769
4770                if let Some(local_repo) = repo {
4771                    self.update_git_repository(UpdateGitRepoJob {
4772                        local_repository: local_repo,
4773                    });
4774                }
4775            } else if child_name == *GITIGNORE {
4776                match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
4777                    Ok(ignore) => {
4778                        let ignore = Arc::new(ignore);
4779                        ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
4780                        new_ignore = Some(ignore);
4781                    }
4782                    Err(error) => {
4783                        log::error!(
4784                            "error loading .gitignore file {:?} - {:?}",
4785                            child_name,
4786                            error
4787                        );
4788                    }
4789                }
4790            }
4791
4792            if self.settings.is_path_excluded(&child_path) {
4793                log::debug!("skipping excluded child entry {child_path:?}");
4794                self.state.lock().remove_path(&child_path);
4795                continue;
4796            }
4797
4798            let child_metadata = match self.fs.metadata(&child_abs_path).await {
4799                Ok(Some(metadata)) => metadata,
4800                Ok(None) => continue,
4801                Err(err) => {
4802                    log::error!("error processing {child_abs_path:?}: {err:?}");
4803                    continue;
4804                }
4805            };
4806
4807            let mut child_entry = Entry::new(
4808                child_path.clone(),
4809                &child_metadata,
4810                &next_entry_id,
4811                root_char_bag,
4812                None,
4813            );
4814
4815            if job.is_external {
4816                child_entry.is_external = true;
4817            } else if child_metadata.is_symlink {
4818                let canonical_path = match self.fs.canonicalize(&child_abs_path).await {
4819                    Ok(path) => path,
4820                    Err(err) => {
4821                        log::error!(
4822                            "error reading target of symlink {:?}: {:?}",
4823                            child_abs_path,
4824                            err
4825                        );
4826                        continue;
4827                    }
4828                };
4829
4830                // lazily canonicalize the root path in order to determine if
4831                // symlinks point outside of the worktree.
4832                let root_canonical_path = match &root_canonical_path {
4833                    Some(path) => path,
4834                    None => match self.fs.canonicalize(&root_abs_path).await {
4835                        Ok(path) => root_canonical_path.insert(path),
4836                        Err(err) => {
4837                            log::error!("error canonicalizing root {:?}: {:?}", root_abs_path, err);
4838                            continue;
4839                        }
4840                    },
4841                };
4842
4843                if !canonical_path.starts_with(root_canonical_path) {
4844                    child_entry.is_external = true;
4845                }
4846
4847                child_entry.canonical_path = Some(canonical_path.into());
4848            }
4849
4850            if child_entry.is_dir() {
4851                child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
4852                child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
4853
4854                // Avoid recursing until crash in the case of a recursive symlink
4855                if job.ancestor_inodes.contains(&child_entry.inode) {
4856                    new_jobs.push(None);
4857                } else {
4858                    let mut ancestor_inodes = job.ancestor_inodes.clone();
4859                    ancestor_inodes.insert(child_entry.inode);
4860
4861                    new_jobs.push(Some(ScanJob {
4862                        abs_path: child_abs_path.clone(),
4863                        path: child_path,
4864                        is_external: child_entry.is_external,
4865                        ignore_stack: if child_entry.is_ignored {
4866                            IgnoreStack::all()
4867                        } else {
4868                            ignore_stack.clone()
4869                        },
4870                        ancestor_inodes,
4871                        scan_queue: job.scan_queue.clone(),
4872                    }));
4873                }
4874            } else {
4875                child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
4876                child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
4877            }
4878
4879            {
4880                let relative_path = job.path.join(child_name);
4881                if self.is_path_private(&relative_path) {
4882                    log::debug!("detected private file: {relative_path:?}");
4883                    child_entry.is_private = true;
4884                }
4885            }
4886
4887            new_entries.push(child_entry);
4888        }
4889
4890        let mut state = self.state.lock();
4891
4892        // Identify any subdirectories that should not be scanned.
4893        let mut job_ix = 0;
4894        for entry in &mut new_entries {
4895            state.reuse_entry_id(entry);
4896            if entry.is_dir() {
4897                if state.should_scan_directory(entry) {
4898                    job_ix += 1;
4899                } else {
4900                    log::debug!("defer scanning directory {:?}", entry.path);
4901                    entry.kind = EntryKind::UnloadedDir;
4902                    new_jobs.remove(job_ix);
4903                }
4904            }
4905            if entry.is_always_included {
4906                state
4907                    .snapshot
4908                    .always_included_entries
4909                    .push(entry.path.clone());
4910            }
4911        }
4912
4913        state.populate_dir(&job.path, new_entries, new_ignore);
4914        self.watcher.add(job.abs_path.as_ref()).log_err();
4915
4916        for new_job in new_jobs.into_iter().flatten() {
4917            job.scan_queue
4918                .try_send(new_job)
4919                .expect("channel is unbounded");
4920        }
4921
4922        Ok(())
4923    }
4924
4925    /// All list arguments should be sorted before calling this function
4926    async fn reload_entries_for_paths(
4927        &self,
4928        root_abs_path: SanitizedPath,
4929        root_canonical_path: SanitizedPath,
4930        relative_paths: &[Arc<Path>],
4931        abs_paths: Vec<PathBuf>,
4932        scan_queue_tx: Option<Sender<ScanJob>>,
4933    ) {
4934        // grab metadata for all requested paths
4935        let metadata = futures::future::join_all(
4936            abs_paths
4937                .iter()
4938                .map(|abs_path| async move {
4939                    let metadata = self.fs.metadata(abs_path).await?;
4940                    if let Some(metadata) = metadata {
4941                        let canonical_path = self.fs.canonicalize(abs_path).await?;
4942
4943                        // If we're on a case-insensitive filesystem (default on macOS), we want
4944                        // to only ignore metadata for non-symlink files if their absolute-path matches
4945                        // the canonical-path.
4946                        // Because if not, this might be a case-only-renaming (`mv test.txt TEST.TXT`)
4947                        // and we want to ignore the metadata for the old path (`test.txt`) so it's
4948                        // treated as removed.
4949                        if !self.fs_case_sensitive && !metadata.is_symlink {
4950                            let canonical_file_name = canonical_path.file_name();
4951                            let file_name = abs_path.file_name();
4952                            if canonical_file_name != file_name {
4953                                return Ok(None);
4954                            }
4955                        }
4956
4957                        anyhow::Ok(Some((metadata, SanitizedPath::from(canonical_path))))
4958                    } else {
4959                        Ok(None)
4960                    }
4961                })
4962                .collect::<Vec<_>>(),
4963        )
4964        .await;
4965
4966        let mut state = self.state.lock();
4967        let doing_recursive_update = scan_queue_tx.is_some();
4968
4969        // Remove any entries for paths that no longer exist or are being recursively
4970        // refreshed. Do this before adding any new entries, so that renames can be
4971        // detected regardless of the order of the paths.
4972        for (path, metadata) in relative_paths.iter().zip(metadata.iter()) {
4973            if matches!(metadata, Ok(None)) || doing_recursive_update {
4974                log::trace!("remove path {:?}", path);
4975                state.remove_path(path);
4976            }
4977        }
4978
4979        // Group all relative paths by their git repository.
4980        let mut paths_by_git_repo = HashMap::default();
4981        for relative_path in relative_paths.iter() {
4982            let repository_data = state
4983                .snapshot
4984                .local_repo_for_path(relative_path)
4985                .zip(state.snapshot.repository_for_path(relative_path));
4986            if let Some((local_repo, entry)) = repository_data {
4987                if let Ok(repo_path) = local_repo.relativize(relative_path) {
4988                    paths_by_git_repo
4989                        .entry(local_repo.work_directory.clone())
4990                        .or_insert_with(|| RepoPaths {
4991                            entry: entry.clone(),
4992                            repo: local_repo.repo_ptr.clone(),
4993                            repo_paths: Default::default(),
4994                        })
4995                        .add_path(repo_path);
4996                }
4997            }
4998        }
4999
5000        for (work_directory, mut paths) in paths_by_git_repo {
5001            if let Ok(status) = paths.repo.status(&paths.repo_paths) {
5002                let mut changed_path_statuses = Vec::new();
5003                let statuses = paths.entry.statuses_by_path.clone();
5004                let mut cursor = statuses.cursor::<PathProgress>(&());
5005
5006                for (repo_path, status) in &*status.entries {
5007                    paths.remove_repo_path(repo_path);
5008                    if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
5009                        if &cursor.item().unwrap().status == status {
5010                            continue;
5011                        }
5012                    }
5013
5014                    changed_path_statuses.push(Edit::Insert(StatusEntry {
5015                        repo_path: repo_path.clone(),
5016                        status: *status,
5017                    }));
5018                }
5019
5020                let mut cursor = statuses.cursor::<PathProgress>(&());
5021                for path in paths.repo_paths {
5022                    if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
5023                        changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5024                    }
5025                }
5026
5027                if !changed_path_statuses.is_empty() {
5028                    let work_directory_id = state.snapshot.repositories.update(
5029                        &work_directory.path_key(),
5030                        &(),
5031                        move |repository_entry| {
5032                            repository_entry
5033                                .statuses_by_path
5034                                .edit(changed_path_statuses, &());
5035
5036                            repository_entry.work_directory_id
5037                        },
5038                    );
5039
5040                    if let Some(work_directory_id) = work_directory_id {
5041                        let scan_id = state.snapshot.scan_id;
5042                        state.snapshot.git_repositories.update(
5043                            &work_directory_id,
5044                            |local_repository_entry| {
5045                                local_repository_entry.status_scan_id = scan_id;
5046                            },
5047                        );
5048                    }
5049                }
5050            }
5051        }
5052
5053        for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) {
5054            let abs_path: Arc<Path> = root_abs_path.as_path().join(path).into();
5055            match metadata {
5056                Ok(Some((metadata, canonical_path))) => {
5057                    let ignore_stack = state
5058                        .snapshot
5059                        .ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
5060                    let is_external = !canonical_path.starts_with(&root_canonical_path);
5061                    let mut fs_entry = Entry::new(
5062                        path.clone(),
5063                        &metadata,
5064                        self.next_entry_id.as_ref(),
5065                        state.snapshot.root_char_bag,
5066                        if metadata.is_symlink {
5067                            Some(canonical_path.as_path().to_path_buf().into())
5068                        } else {
5069                            None
5070                        },
5071                    );
5072
5073                    let is_dir = fs_entry.is_dir();
5074                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
5075                    fs_entry.is_external = is_external;
5076                    fs_entry.is_private = self.is_path_private(path);
5077                    fs_entry.is_always_included = self.settings.is_path_always_included(path);
5078
5079                    if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) {
5080                        if state.should_scan_directory(&fs_entry)
5081                            || (fs_entry.path.as_os_str().is_empty()
5082                                && abs_path.file_name() == Some(*DOT_GIT))
5083                        {
5084                            state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx);
5085                        } else {
5086                            fs_entry.kind = EntryKind::UnloadedDir;
5087                        }
5088                    }
5089
5090                    state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref());
5091                }
5092                Ok(None) => {
5093                    self.remove_repo_path(path, &mut state.snapshot);
5094                }
5095                Err(err) => {
5096                    log::error!("error reading file {abs_path:?} on event: {err:#}");
5097                }
5098            }
5099        }
5100
5101        util::extend_sorted(
5102            &mut state.changed_paths,
5103            relative_paths.iter().cloned(),
5104            usize::MAX,
5105            Ord::cmp,
5106        );
5107    }
5108
5109    fn remove_repo_path(&self, path: &Arc<Path>, snapshot: &mut LocalSnapshot) -> Option<()> {
5110        if !path
5111            .components()
5112            .any(|component| component.as_os_str() == *DOT_GIT)
5113        {
5114            if let Some(repository) = snapshot.repository(PathKey(path.clone())) {
5115                snapshot
5116                    .git_repositories
5117                    .remove(&repository.work_directory_id);
5118                snapshot
5119                    .snapshot
5120                    .repositories
5121                    .remove(&repository.work_directory.path_key(), &());
5122                return Some(());
5123            }
5124        }
5125
5126        Some(())
5127    }
5128
5129    async fn update_ignore_statuses(&self, scan_job_tx: Sender<ScanJob>) {
5130        use futures::FutureExt as _;
5131
5132        let mut ignores_to_update = Vec::new();
5133        let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
5134        let prev_snapshot;
5135        {
5136            let snapshot = &mut self.state.lock().snapshot;
5137            let abs_path = snapshot.abs_path.clone();
5138            snapshot
5139                .ignores_by_parent_abs_path
5140                .retain(|parent_abs_path, (_, needs_update)| {
5141                    if let Ok(parent_path) = parent_abs_path.strip_prefix(abs_path.as_path()) {
5142                        if *needs_update {
5143                            *needs_update = false;
5144                            if snapshot.snapshot.entry_for_path(parent_path).is_some() {
5145                                ignores_to_update.push(parent_abs_path.clone());
5146                            }
5147                        }
5148
5149                        let ignore_path = parent_path.join(*GITIGNORE);
5150                        if snapshot.snapshot.entry_for_path(ignore_path).is_none() {
5151                            return false;
5152                        }
5153                    }
5154                    true
5155                });
5156
5157            ignores_to_update.sort_unstable();
5158            let mut ignores_to_update = ignores_to_update.into_iter().peekable();
5159            while let Some(parent_abs_path) = ignores_to_update.next() {
5160                while ignores_to_update
5161                    .peek()
5162                    .map_or(false, |p| p.starts_with(&parent_abs_path))
5163                {
5164                    ignores_to_update.next().unwrap();
5165                }
5166
5167                let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
5168                ignore_queue_tx
5169                    .send_blocking(UpdateIgnoreStatusJob {
5170                        abs_path: parent_abs_path,
5171                        ignore_stack,
5172                        ignore_queue: ignore_queue_tx.clone(),
5173                        scan_queue: scan_job_tx.clone(),
5174                    })
5175                    .unwrap();
5176            }
5177
5178            prev_snapshot = snapshot.clone();
5179        }
5180        drop(ignore_queue_tx);
5181
5182        self.executor
5183            .scoped(|scope| {
5184                for _ in 0..self.executor.num_cpus() {
5185                    scope.spawn(async {
5186                        loop {
5187                            select_biased! {
5188                                // Process any path refresh requests before moving on to process
5189                                // the queue of ignore statuses.
5190                                request = self.next_scan_request().fuse() => {
5191                                    let Ok(request) = request else { break };
5192                                    if !self.process_scan_request(request, true).await {
5193                                        return;
5194                                    }
5195                                }
5196
5197                                // Recursively process directories whose ignores have changed.
5198                                job = ignore_queue_rx.recv().fuse() => {
5199                                    let Ok(job) = job else { break };
5200                                    self.update_ignore_status(job, &prev_snapshot).await;
5201                                }
5202                            }
5203                        }
5204                    });
5205                }
5206            })
5207            .await;
5208    }
5209
5210    async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
5211        log::trace!("update ignore status {:?}", job.abs_path);
5212
5213        let mut ignore_stack = job.ignore_stack;
5214        if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
5215            ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
5216        }
5217
5218        let mut entries_by_id_edits = Vec::new();
5219        let mut entries_by_path_edits = Vec::new();
5220        let path = job
5221            .abs_path
5222            .strip_prefix(snapshot.abs_path.as_path())
5223            .unwrap();
5224
5225        for mut entry in snapshot.child_entries(path).cloned() {
5226            let was_ignored = entry.is_ignored;
5227            let abs_path: Arc<Path> = snapshot.abs_path().join(&entry.path).into();
5228            entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
5229
5230            if entry.is_dir() {
5231                let child_ignore_stack = if entry.is_ignored {
5232                    IgnoreStack::all()
5233                } else {
5234                    ignore_stack.clone()
5235                };
5236
5237                // Scan any directories that were previously ignored and weren't previously scanned.
5238                if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
5239                    let state = self.state.lock();
5240                    if state.should_scan_directory(&entry) {
5241                        state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue);
5242                    }
5243                }
5244
5245                job.ignore_queue
5246                    .send(UpdateIgnoreStatusJob {
5247                        abs_path: abs_path.clone(),
5248                        ignore_stack: child_ignore_stack,
5249                        ignore_queue: job.ignore_queue.clone(),
5250                        scan_queue: job.scan_queue.clone(),
5251                    })
5252                    .await
5253                    .unwrap();
5254            }
5255
5256            if entry.is_ignored != was_ignored {
5257                let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
5258                path_entry.scan_id = snapshot.scan_id;
5259                path_entry.is_ignored = entry.is_ignored;
5260                entries_by_id_edits.push(Edit::Insert(path_entry));
5261                entries_by_path_edits.push(Edit::Insert(entry));
5262            }
5263        }
5264
5265        let state = &mut self.state.lock();
5266        for edit in &entries_by_path_edits {
5267            if let Edit::Insert(entry) = edit {
5268                if let Err(ix) = state.changed_paths.binary_search(&entry.path) {
5269                    state.changed_paths.insert(ix, entry.path.clone());
5270                }
5271            }
5272        }
5273
5274        state
5275            .snapshot
5276            .entries_by_path
5277            .edit(entries_by_path_edits, &());
5278        state.snapshot.entries_by_id.edit(entries_by_id_edits, &());
5279    }
5280
5281    async fn update_git_repositories(&self, dot_git_paths: Vec<PathBuf>) {
5282        log::debug!("reloading repositories: {dot_git_paths:?}");
5283
5284        let mut repo_updates = Vec::new();
5285        {
5286            let mut state = self.state.lock();
5287            let scan_id = state.snapshot.scan_id;
5288            for dot_git_dir in dot_git_paths {
5289                let existing_repository_entry =
5290                    state
5291                        .snapshot
5292                        .git_repositories
5293                        .iter()
5294                        .find_map(|(_, repo)| {
5295                            if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir
5296                                || repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir)
5297                            {
5298                                Some(repo.clone())
5299                            } else {
5300                                None
5301                            }
5302                        });
5303
5304                let local_repository = match existing_repository_entry {
5305                    None => {
5306                        let Ok(relative) = dot_git_dir.strip_prefix(state.snapshot.abs_path())
5307                        else {
5308                            return;
5309                        };
5310                        match state.insert_git_repository(
5311                            relative.into(),
5312                            self.fs.as_ref(),
5313                            self.watcher.as_ref(),
5314                        ) {
5315                            Some(output) => output,
5316                            None => continue,
5317                        }
5318                    }
5319                    Some(local_repository) => {
5320                        if local_repository.git_dir_scan_id == scan_id {
5321                            continue;
5322                        }
5323                        local_repository.repo_ptr.reload_index();
5324
5325                        state.snapshot.git_repositories.update(
5326                            &local_repository.work_directory_id,
5327                            |entry| {
5328                                entry.git_dir_scan_id = scan_id;
5329                                entry.status_scan_id = scan_id;
5330                            },
5331                        );
5332
5333                        local_repository
5334                    }
5335                };
5336
5337                repo_updates.push(UpdateGitRepoJob { local_repository });
5338            }
5339
5340            // Remove any git repositories whose .git entry no longer exists.
5341            let snapshot = &mut state.snapshot;
5342            let mut ids_to_preserve = HashSet::default();
5343            for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
5344                let exists_in_snapshot = snapshot
5345                    .entry_for_id(work_directory_id)
5346                    .map_or(false, |entry| {
5347                        snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
5348                    });
5349
5350                if exists_in_snapshot
5351                    || matches!(
5352                        smol::block_on(self.fs.metadata(&entry.dot_git_dir_abs_path)),
5353                        Ok(Some(_))
5354                    )
5355                {
5356                    ids_to_preserve.insert(work_directory_id);
5357                }
5358            }
5359
5360            snapshot
5361                .git_repositories
5362                .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
5363            snapshot.repositories.retain(&(), |entry| {
5364                ids_to_preserve.contains(&entry.work_directory_id)
5365            });
5366        }
5367
5368        let (mut updates_done_tx, mut updates_done_rx) = barrier::channel();
5369        self.executor
5370            .scoped(|scope| {
5371                scope.spawn(async {
5372                    for repo_update in repo_updates {
5373                        self.update_git_repository(repo_update);
5374                    }
5375                    updates_done_tx.blocking_send(()).ok();
5376                });
5377
5378                scope.spawn(async {
5379                    loop {
5380                        select_biased! {
5381                            // Process any path refresh requests before moving on to process
5382                            // the queue of git statuses.
5383                            request = self.next_scan_request().fuse() => {
5384                                let Ok(request) = request else { break };
5385                                if !self.process_scan_request(request, true).await {
5386                                    return;
5387                                }
5388                            }
5389                            _ = updates_done_rx.recv().fuse() =>  break,
5390                        }
5391                    }
5392                });
5393            })
5394            .await;
5395    }
5396
5397    fn update_branches(&self, job: &UpdateGitRepoJob) -> Result<()> {
5398        let branches = job.local_repository.repo().branches()?;
5399        let snapshot = self.state.lock().snapshot.snapshot.clone();
5400
5401        let mut repository = snapshot
5402            .repository(job.local_repository.work_directory.path_key())
5403            .context("Missing repository")?;
5404
5405        repository.branch = branches.into_iter().find(|branch| branch.is_head);
5406
5407        let mut state = self.state.lock();
5408        state
5409            .snapshot
5410            .repositories
5411            .insert_or_replace(repository, &());
5412
5413        Ok(())
5414    }
5415
5416    fn update_statuses(&self, job: &UpdateGitRepoJob) -> Result<()> {
5417        log::trace!(
5418            "updating git statuses for repo {:?}",
5419            job.local_repository.work_directory.display_name()
5420        );
5421        let t0 = Instant::now();
5422
5423        let statuses = job
5424            .local_repository
5425            .repo()
5426            .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()])?;
5427
5428        log::trace!(
5429            "computed git statuses for repo {:?} in {:?}",
5430            job.local_repository.work_directory.display_name(),
5431            t0.elapsed()
5432        );
5433
5434        let t0 = Instant::now();
5435        let mut changed_paths = Vec::new();
5436        let snapshot = self.state.lock().snapshot.snapshot.clone();
5437
5438        let mut repository = snapshot
5439            .repository(job.local_repository.work_directory.path_key())
5440            .context("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot")?;
5441
5442        let merge_head_shas = job.local_repository.repo().merge_head_shas();
5443        if merge_head_shas != job.local_repository.current_merge_head_shas {
5444            mem::take(&mut repository.current_merge_conflicts);
5445        }
5446
5447        let mut new_entries_by_path = SumTree::new(&());
5448        for (repo_path, status) in statuses.entries.iter() {
5449            let project_path = repository.work_directory.unrelativize(repo_path);
5450
5451            new_entries_by_path.insert_or_replace(
5452                StatusEntry {
5453                    repo_path: repo_path.clone(),
5454                    status: *status,
5455                },
5456                &(),
5457            );
5458            if status.is_conflicted() {
5459                repository.current_merge_conflicts.insert(repo_path.clone());
5460            }
5461
5462            if let Some(path) = project_path {
5463                changed_paths.push(path);
5464            }
5465        }
5466
5467        repository.statuses_by_path = new_entries_by_path;
5468
5469        let mut state = self.state.lock();
5470        state
5471            .snapshot
5472            .repositories
5473            .insert_or_replace(repository, &());
5474
5475        state
5476            .snapshot
5477            .git_repositories
5478            .update(&job.local_repository.work_directory_id, |entry| {
5479                entry.current_merge_head_shas = merge_head_shas;
5480            });
5481
5482        util::extend_sorted(
5483            &mut state.changed_paths,
5484            changed_paths,
5485            usize::MAX,
5486            Ord::cmp,
5487        );
5488
5489        log::trace!(
5490            "applied git status updates for repo {:?} in {:?}",
5491            job.local_repository.work_directory.display_name(),
5492            t0.elapsed(),
5493        );
5494        Ok(())
5495    }
5496
5497    /// Update the git statuses for a given batch of entries.
5498    fn update_git_repository(&self, job: UpdateGitRepoJob) {
5499        self.update_branches(&job).log_err();
5500        self.update_statuses(&job).log_err();
5501    }
5502
5503    fn build_change_set(
5504        &self,
5505        old_snapshot: &Snapshot,
5506        new_snapshot: &Snapshot,
5507        event_paths: &[Arc<Path>],
5508    ) -> UpdatedEntriesSet {
5509        use BackgroundScannerPhase::*;
5510        use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated};
5511
5512        // Identify which paths have changed. Use the known set of changed
5513        // parent paths to optimize the search.
5514        let mut changes = Vec::new();
5515        let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&());
5516        let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&());
5517        let mut last_newly_loaded_dir_path = None;
5518        old_paths.next(&());
5519        new_paths.next(&());
5520        for path in event_paths {
5521            let path = PathKey(path.clone());
5522            if old_paths.item().map_or(false, |e| e.path < path.0) {
5523                old_paths.seek_forward(&path, Bias::Left, &());
5524            }
5525            if new_paths.item().map_or(false, |e| e.path < path.0) {
5526                new_paths.seek_forward(&path, Bias::Left, &());
5527            }
5528            loop {
5529                match (old_paths.item(), new_paths.item()) {
5530                    (Some(old_entry), Some(new_entry)) => {
5531                        if old_entry.path > path.0
5532                            && new_entry.path > path.0
5533                            && !old_entry.path.starts_with(&path.0)
5534                            && !new_entry.path.starts_with(&path.0)
5535                        {
5536                            break;
5537                        }
5538
5539                        match Ord::cmp(&old_entry.path, &new_entry.path) {
5540                            Ordering::Less => {
5541                                changes.push((old_entry.path.clone(), old_entry.id, Removed));
5542                                old_paths.next(&());
5543                            }
5544                            Ordering::Equal => {
5545                                if self.phase == EventsReceivedDuringInitialScan {
5546                                    if old_entry.id != new_entry.id {
5547                                        changes.push((
5548                                            old_entry.path.clone(),
5549                                            old_entry.id,
5550                                            Removed,
5551                                        ));
5552                                    }
5553                                    // If the worktree was not fully initialized when this event was generated,
5554                                    // we can't know whether this entry was added during the scan or whether
5555                                    // it was merely updated.
5556                                    changes.push((
5557                                        new_entry.path.clone(),
5558                                        new_entry.id,
5559                                        AddedOrUpdated,
5560                                    ));
5561                                } else if old_entry.id != new_entry.id {
5562                                    changes.push((old_entry.path.clone(), old_entry.id, Removed));
5563                                    changes.push((new_entry.path.clone(), new_entry.id, Added));
5564                                } else if old_entry != new_entry {
5565                                    if old_entry.kind.is_unloaded() {
5566                                        last_newly_loaded_dir_path = Some(&new_entry.path);
5567                                        changes.push((
5568                                            new_entry.path.clone(),
5569                                            new_entry.id,
5570                                            Loaded,
5571                                        ));
5572                                    } else {
5573                                        changes.push((
5574                                            new_entry.path.clone(),
5575                                            new_entry.id,
5576                                            Updated,
5577                                        ));
5578                                    }
5579                                }
5580                                old_paths.next(&());
5581                                new_paths.next(&());
5582                            }
5583                            Ordering::Greater => {
5584                                let is_newly_loaded = self.phase == InitialScan
5585                                    || last_newly_loaded_dir_path
5586                                        .as_ref()
5587                                        .map_or(false, |dir| new_entry.path.starts_with(dir));
5588                                changes.push((
5589                                    new_entry.path.clone(),
5590                                    new_entry.id,
5591                                    if is_newly_loaded { Loaded } else { Added },
5592                                ));
5593                                new_paths.next(&());
5594                            }
5595                        }
5596                    }
5597                    (Some(old_entry), None) => {
5598                        changes.push((old_entry.path.clone(), old_entry.id, Removed));
5599                        old_paths.next(&());
5600                    }
5601                    (None, Some(new_entry)) => {
5602                        let is_newly_loaded = self.phase == InitialScan
5603                            || last_newly_loaded_dir_path
5604                                .as_ref()
5605                                .map_or(false, |dir| new_entry.path.starts_with(dir));
5606                        changes.push((
5607                            new_entry.path.clone(),
5608                            new_entry.id,
5609                            if is_newly_loaded { Loaded } else { Added },
5610                        ));
5611                        new_paths.next(&());
5612                    }
5613                    (None, None) => break,
5614                }
5615            }
5616        }
5617
5618        changes.into()
5619    }
5620
5621    async fn progress_timer(&self, running: bool) {
5622        if !running {
5623            return futures::future::pending().await;
5624        }
5625
5626        #[cfg(any(test, feature = "test-support"))]
5627        if self.fs.is_fake() {
5628            return self.executor.simulate_random_delay().await;
5629        }
5630
5631        smol::Timer::after(FS_WATCH_LATENCY).await;
5632    }
5633
5634    fn is_path_private(&self, path: &Path) -> bool {
5635        !self.share_private_files && self.settings.is_path_private(path)
5636    }
5637
5638    async fn next_scan_request(&self) -> Result<ScanRequest> {
5639        let mut request = self.scan_requests_rx.recv().await?;
5640        while let Ok(next_request) = self.scan_requests_rx.try_recv() {
5641            request.relative_paths.extend(next_request.relative_paths);
5642            request.done.extend(next_request.done);
5643        }
5644        Ok(request)
5645    }
5646}
5647
5648fn swap_to_front(child_paths: &mut Vec<PathBuf>, file: &OsStr) {
5649    let position = child_paths
5650        .iter()
5651        .position(|path| path.file_name().unwrap() == file);
5652    if let Some(position) = position {
5653        let temp = child_paths.remove(position);
5654        child_paths.insert(0, temp);
5655    }
5656}
5657
5658fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
5659    let mut result = root_char_bag;
5660    result.extend(
5661        path.to_string_lossy()
5662            .chars()
5663            .map(|c| c.to_ascii_lowercase()),
5664    );
5665    result
5666}
5667
5668#[derive(Debug)]
5669struct RepoPaths {
5670    repo: Arc<dyn GitRepository>,
5671    entry: RepositoryEntry,
5672    // sorted
5673    repo_paths: Vec<RepoPath>,
5674}
5675
5676impl RepoPaths {
5677    fn add_path(&mut self, repo_path: RepoPath) {
5678        match self.repo_paths.binary_search(&repo_path) {
5679            Ok(_) => {}
5680            Err(ix) => self.repo_paths.insert(ix, repo_path),
5681        }
5682    }
5683
5684    fn remove_repo_path(&mut self, repo_path: &RepoPath) {
5685        match self.repo_paths.binary_search(&repo_path) {
5686            Ok(ix) => {
5687                self.repo_paths.remove(ix);
5688            }
5689            Err(_) => {}
5690        }
5691    }
5692}
5693
5694struct ScanJob {
5695    abs_path: Arc<Path>,
5696    path: Arc<Path>,
5697    ignore_stack: Arc<IgnoreStack>,
5698    scan_queue: Sender<ScanJob>,
5699    ancestor_inodes: TreeSet<u64>,
5700    is_external: bool,
5701}
5702
5703struct UpdateIgnoreStatusJob {
5704    abs_path: Arc<Path>,
5705    ignore_stack: Arc<IgnoreStack>,
5706    ignore_queue: Sender<UpdateIgnoreStatusJob>,
5707    scan_queue: Sender<ScanJob>,
5708}
5709
5710struct UpdateGitRepoJob {
5711    local_repository: LocalRepositoryEntry,
5712}
5713
5714pub trait WorktreeModelHandle {
5715    #[cfg(any(test, feature = "test-support"))]
5716    fn flush_fs_events<'a>(
5717        &self,
5718        cx: &'a mut gpui::TestAppContext,
5719    ) -> futures::future::LocalBoxFuture<'a, ()>;
5720
5721    #[cfg(any(test, feature = "test-support"))]
5722    fn flush_fs_events_in_root_git_repository<'a>(
5723        &self,
5724        cx: &'a mut gpui::TestAppContext,
5725    ) -> futures::future::LocalBoxFuture<'a, ()>;
5726}
5727
5728impl WorktreeModelHandle for Entity<Worktree> {
5729    // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
5730    // occurred before the worktree was constructed. These events can cause the worktree to perform
5731    // extra directory scans, and emit extra scan-state notifications.
5732    //
5733    // This function mutates the worktree's directory and waits for those mutations to be picked up,
5734    // to ensure that all redundant FS events have already been processed.
5735    #[cfg(any(test, feature = "test-support"))]
5736    fn flush_fs_events<'a>(
5737        &self,
5738        cx: &'a mut gpui::TestAppContext,
5739    ) -> futures::future::LocalBoxFuture<'a, ()> {
5740        let file_name = "fs-event-sentinel";
5741
5742        let tree = self.clone();
5743        let (fs, root_path) = self.update(cx, |tree, _| {
5744            let tree = tree.as_local().unwrap();
5745            (tree.fs.clone(), tree.abs_path().clone())
5746        });
5747
5748        async move {
5749            fs.create_file(&root_path.join(file_name), Default::default())
5750                .await
5751                .unwrap();
5752
5753            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_some())
5754                .await;
5755
5756            fs.remove_file(&root_path.join(file_name), Default::default())
5757                .await
5758                .unwrap();
5759            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_none())
5760                .await;
5761
5762            cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5763                .await;
5764        }
5765        .boxed_local()
5766    }
5767
5768    // This function is similar to flush_fs_events, except that it waits for events to be flushed in
5769    // the .git folder of the root repository.
5770    // The reason for its existence is that a repository's .git folder might live *outside* of the
5771    // worktree and thus its FS events might go through a different path.
5772    // In order to flush those, we need to create artificial events in the .git folder and wait
5773    // for the repository to be reloaded.
5774    #[cfg(any(test, feature = "test-support"))]
5775    fn flush_fs_events_in_root_git_repository<'a>(
5776        &self,
5777        cx: &'a mut gpui::TestAppContext,
5778    ) -> futures::future::LocalBoxFuture<'a, ()> {
5779        let file_name = "fs-event-sentinel";
5780
5781        let tree = self.clone();
5782        let (fs, root_path, mut git_dir_scan_id) = self.update(cx, |tree, _| {
5783            let tree = tree.as_local().unwrap();
5784            let root_entry = tree.root_git_entry().unwrap();
5785            let local_repo_entry = tree.get_local_repo(&root_entry).unwrap();
5786            (
5787                tree.fs.clone(),
5788                local_repo_entry.dot_git_dir_abs_path.clone(),
5789                local_repo_entry.git_dir_scan_id,
5790            )
5791        });
5792
5793        let scan_id_increased = |tree: &mut Worktree, git_dir_scan_id: &mut usize| {
5794            let root_entry = tree.root_git_entry().unwrap();
5795            let local_repo_entry = tree
5796                .as_local()
5797                .unwrap()
5798                .get_local_repo(&root_entry)
5799                .unwrap();
5800
5801            if local_repo_entry.git_dir_scan_id > *git_dir_scan_id {
5802                *git_dir_scan_id = local_repo_entry.git_dir_scan_id;
5803                true
5804            } else {
5805                false
5806            }
5807        };
5808
5809        async move {
5810            fs.create_file(&root_path.join(file_name), Default::default())
5811                .await
5812                .unwrap();
5813
5814            cx.condition(&tree, |tree, _| {
5815                scan_id_increased(tree, &mut git_dir_scan_id)
5816            })
5817            .await;
5818
5819            fs.remove_file(&root_path.join(file_name), Default::default())
5820                .await
5821                .unwrap();
5822
5823            cx.condition(&tree, |tree, _| {
5824                scan_id_increased(tree, &mut git_dir_scan_id)
5825            })
5826            .await;
5827
5828            cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5829                .await;
5830        }
5831        .boxed_local()
5832    }
5833}
5834
5835#[derive(Clone, Debug)]
5836struct TraversalProgress<'a> {
5837    max_path: &'a Path,
5838    count: usize,
5839    non_ignored_count: usize,
5840    file_count: usize,
5841    non_ignored_file_count: usize,
5842}
5843
5844impl<'a> TraversalProgress<'a> {
5845    fn count(&self, include_files: bool, include_dirs: bool, include_ignored: bool) -> usize {
5846        match (include_files, include_dirs, include_ignored) {
5847            (true, true, true) => self.count,
5848            (true, true, false) => self.non_ignored_count,
5849            (true, false, true) => self.file_count,
5850            (true, false, false) => self.non_ignored_file_count,
5851            (false, true, true) => self.count - self.file_count,
5852            (false, true, false) => self.non_ignored_count - self.non_ignored_file_count,
5853            (false, false, _) => 0,
5854        }
5855    }
5856}
5857
5858impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
5859    fn zero(_cx: &()) -> Self {
5860        Default::default()
5861    }
5862
5863    fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
5864        self.max_path = summary.max_path.as_ref();
5865        self.count += summary.count;
5866        self.non_ignored_count += summary.non_ignored_count;
5867        self.file_count += summary.file_count;
5868        self.non_ignored_file_count += summary.non_ignored_file_count;
5869    }
5870}
5871
5872impl<'a> Default for TraversalProgress<'a> {
5873    fn default() -> Self {
5874        Self {
5875            max_path: Path::new(""),
5876            count: 0,
5877            non_ignored_count: 0,
5878            file_count: 0,
5879            non_ignored_file_count: 0,
5880        }
5881    }
5882}
5883
5884#[derive(Debug, Clone, Copy)]
5885pub struct GitEntryRef<'a> {
5886    pub entry: &'a Entry,
5887    pub git_summary: GitSummary,
5888}
5889
5890impl<'a> GitEntryRef<'a> {
5891    pub fn to_owned(&self) -> GitEntry {
5892        GitEntry {
5893            entry: self.entry.clone(),
5894            git_summary: self.git_summary,
5895        }
5896    }
5897}
5898
5899impl<'a> Deref for GitEntryRef<'a> {
5900    type Target = Entry;
5901
5902    fn deref(&self) -> &Self::Target {
5903        &self.entry
5904    }
5905}
5906
5907impl<'a> AsRef<Entry> for GitEntryRef<'a> {
5908    fn as_ref(&self) -> &Entry {
5909        self.entry
5910    }
5911}
5912
5913#[derive(Debug, Clone, PartialEq, Eq)]
5914pub struct GitEntry {
5915    pub entry: Entry,
5916    pub git_summary: GitSummary,
5917}
5918
5919impl GitEntry {
5920    pub fn to_ref(&self) -> GitEntryRef {
5921        GitEntryRef {
5922            entry: &self.entry,
5923            git_summary: self.git_summary,
5924        }
5925    }
5926}
5927
5928impl Deref for GitEntry {
5929    type Target = Entry;
5930
5931    fn deref(&self) -> &Self::Target {
5932        &self.entry
5933    }
5934}
5935
5936impl AsRef<Entry> for GitEntry {
5937    fn as_ref(&self) -> &Entry {
5938        &self.entry
5939    }
5940}
5941
5942/// Walks the worktree entries and their associated git statuses.
5943pub struct GitTraversal<'a> {
5944    traversal: Traversal<'a>,
5945    current_entry_summary: Option<GitSummary>,
5946    repo_location: Option<(
5947        &'a RepositoryEntry,
5948        Cursor<'a, StatusEntry, PathProgress<'a>>,
5949    )>,
5950}
5951
5952impl<'a> GitTraversal<'a> {
5953    fn synchronize_statuses(&mut self, reset: bool) {
5954        self.current_entry_summary = None;
5955
5956        let Some(entry) = self.traversal.cursor.item() else {
5957            return;
5958        };
5959
5960        let Some(repo) = self.traversal.snapshot.repository_for_path(&entry.path) else {
5961            self.repo_location = None;
5962            return;
5963        };
5964
5965        // Update our state if we changed repositories.
5966        if reset || self.repo_location.as_ref().map(|(prev_repo, _)| prev_repo) != Some(&repo) {
5967            self.repo_location = Some((repo, repo.statuses_by_path.cursor::<PathProgress>(&())));
5968        }
5969
5970        let Some((repo, statuses)) = &mut self.repo_location else {
5971            return;
5972        };
5973
5974        let repo_path = repo.relativize(&entry.path).unwrap();
5975
5976        if entry.is_dir() {
5977            let mut statuses = statuses.clone();
5978            statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
5979            let summary =
5980                statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
5981
5982            self.current_entry_summary = Some(summary);
5983        } else if entry.is_file() {
5984            // For a file entry, park the cursor on the corresponding status
5985            if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
5986                // TODO: Investigate statuses.item() being None here.
5987                self.current_entry_summary = statuses.item().map(|item| item.status.into());
5988            } else {
5989                self.current_entry_summary = Some(GitSummary::UNCHANGED);
5990            }
5991        }
5992    }
5993
5994    pub fn advance(&mut self) -> bool {
5995        self.advance_by(1)
5996    }
5997
5998    pub fn advance_by(&mut self, count: usize) -> bool {
5999        let found = self.traversal.advance_by(count);
6000        self.synchronize_statuses(false);
6001        found
6002    }
6003
6004    pub fn advance_to_sibling(&mut self) -> bool {
6005        let found = self.traversal.advance_to_sibling();
6006        self.synchronize_statuses(false);
6007        found
6008    }
6009
6010    pub fn back_to_parent(&mut self) -> bool {
6011        let found = self.traversal.back_to_parent();
6012        self.synchronize_statuses(true);
6013        found
6014    }
6015
6016    pub fn start_offset(&self) -> usize {
6017        self.traversal.start_offset()
6018    }
6019
6020    pub fn end_offset(&self) -> usize {
6021        self.traversal.end_offset()
6022    }
6023
6024    pub fn entry(&self) -> Option<GitEntryRef<'a>> {
6025        let entry = self.traversal.cursor.item()?;
6026        let git_summary = self.current_entry_summary.unwrap_or(GitSummary::UNCHANGED);
6027        Some(GitEntryRef { entry, git_summary })
6028    }
6029}
6030
6031impl<'a> Iterator for GitTraversal<'a> {
6032    type Item = GitEntryRef<'a>;
6033    fn next(&mut self) -> Option<Self::Item> {
6034        if let Some(item) = self.entry() {
6035            self.advance();
6036            Some(item)
6037        } else {
6038            None
6039        }
6040    }
6041}
6042
6043#[derive(Debug)]
6044pub struct Traversal<'a> {
6045    snapshot: &'a Snapshot,
6046    cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
6047    include_ignored: bool,
6048    include_files: bool,
6049    include_dirs: bool,
6050}
6051
6052impl<'a> Traversal<'a> {
6053    fn new(
6054        snapshot: &'a Snapshot,
6055        include_files: bool,
6056        include_dirs: bool,
6057        include_ignored: bool,
6058        start_path: &Path,
6059    ) -> Self {
6060        let mut cursor = snapshot.entries_by_path.cursor(&());
6061        cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &());
6062        let mut traversal = Self {
6063            snapshot,
6064            cursor,
6065            include_files,
6066            include_dirs,
6067            include_ignored,
6068        };
6069        if traversal.end_offset() == traversal.start_offset() {
6070            traversal.next();
6071        }
6072        traversal
6073    }
6074
6075    pub fn with_git_statuses(self) -> GitTraversal<'a> {
6076        let mut this = GitTraversal {
6077            traversal: self,
6078            current_entry_summary: None,
6079            repo_location: None,
6080        };
6081        this.synchronize_statuses(true);
6082        this
6083    }
6084
6085    pub fn advance(&mut self) -> bool {
6086        self.advance_by(1)
6087    }
6088
6089    pub fn advance_by(&mut self, count: usize) -> bool {
6090        self.cursor.seek_forward(
6091            &TraversalTarget::Count {
6092                count: self.end_offset() + count,
6093                include_dirs: self.include_dirs,
6094                include_files: self.include_files,
6095                include_ignored: self.include_ignored,
6096            },
6097            Bias::Left,
6098            &(),
6099        )
6100    }
6101
6102    pub fn advance_to_sibling(&mut self) -> bool {
6103        while let Some(entry) = self.cursor.item() {
6104            self.cursor
6105                .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &());
6106            if let Some(entry) = self.cursor.item() {
6107                if (self.include_files || !entry.is_file())
6108                    && (self.include_dirs || !entry.is_dir())
6109                    && (self.include_ignored || !entry.is_ignored || entry.is_always_included)
6110                {
6111                    return true;
6112                }
6113            }
6114        }
6115        false
6116    }
6117
6118    pub fn back_to_parent(&mut self) -> bool {
6119        let Some(parent_path) = self.cursor.item().and_then(|entry| entry.path.parent()) else {
6120            return false;
6121        };
6122        self.cursor
6123            .seek(&TraversalTarget::path(parent_path), Bias::Left, &())
6124    }
6125
6126    pub fn entry(&self) -> Option<&'a Entry> {
6127        self.cursor.item()
6128    }
6129
6130    pub fn start_offset(&self) -> usize {
6131        self.cursor
6132            .start()
6133            .count(self.include_files, self.include_dirs, self.include_ignored)
6134    }
6135
6136    pub fn end_offset(&self) -> usize {
6137        self.cursor
6138            .end(&())
6139            .count(self.include_files, self.include_dirs, self.include_ignored)
6140    }
6141}
6142
6143impl<'a> Iterator for Traversal<'a> {
6144    type Item = &'a Entry;
6145
6146    fn next(&mut self) -> Option<Self::Item> {
6147        if let Some(item) = self.entry() {
6148            self.advance();
6149            Some(item)
6150        } else {
6151            None
6152        }
6153    }
6154}
6155
6156#[derive(Debug, Clone, Copy)]
6157enum PathTarget<'a> {
6158    Path(&'a Path),
6159    Successor(&'a Path),
6160}
6161
6162impl<'a> PathTarget<'a> {
6163    fn cmp_path(&self, other: &Path) -> Ordering {
6164        match self {
6165            PathTarget::Path(path) => path.cmp(&other),
6166            PathTarget::Successor(path) => {
6167                if other.starts_with(path) {
6168                    Ordering::Greater
6169                } else {
6170                    Ordering::Equal
6171                }
6172            }
6173        }
6174    }
6175}
6176
6177impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary<S>, PathProgress<'a>> for PathTarget<'b> {
6178    fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering {
6179        self.cmp_path(&cursor_location.max_path)
6180    }
6181}
6182
6183impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary<S>, TraversalProgress<'a>> for PathTarget<'b> {
6184    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering {
6185        self.cmp_path(&cursor_location.max_path)
6186    }
6187}
6188
6189impl<'a, 'b> SeekTarget<'a, PathSummary<GitSummary>, (TraversalProgress<'a>, GitSummary)>
6190    for PathTarget<'b>
6191{
6192    fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitSummary), _: &()) -> Ordering {
6193        self.cmp_path(&cursor_location.0.max_path)
6194    }
6195}
6196
6197#[derive(Debug)]
6198enum TraversalTarget<'a> {
6199    Path(PathTarget<'a>),
6200    Count {
6201        count: usize,
6202        include_files: bool,
6203        include_ignored: bool,
6204        include_dirs: bool,
6205    },
6206}
6207
6208impl<'a> TraversalTarget<'a> {
6209    fn path(path: &'a Path) -> Self {
6210        Self::Path(PathTarget::Path(path))
6211    }
6212
6213    fn successor(path: &'a Path) -> Self {
6214        Self::Path(PathTarget::Successor(path))
6215    }
6216
6217    fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering {
6218        match self {
6219            TraversalTarget::Path(path) => path.cmp_path(&progress.max_path),
6220            TraversalTarget::Count {
6221                count,
6222                include_files,
6223                include_dirs,
6224                include_ignored,
6225            } => Ord::cmp(
6226                count,
6227                &progress.count(*include_files, *include_dirs, *include_ignored),
6228            ),
6229        }
6230    }
6231}
6232
6233impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
6234    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
6235        self.cmp_progress(cursor_location)
6236    }
6237}
6238
6239impl<'a, 'b> SeekTarget<'a, PathSummary<Unit>, TraversalProgress<'a>> for TraversalTarget<'b> {
6240    fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
6241        self.cmp_progress(cursor_location)
6242    }
6243}
6244
6245pub struct ChildEntriesOptions {
6246    pub include_files: bool,
6247    pub include_dirs: bool,
6248    pub include_ignored: bool,
6249}
6250
6251pub struct ChildEntriesIter<'a> {
6252    parent_path: &'a Path,
6253    traversal: Traversal<'a>,
6254}
6255
6256impl<'a> ChildEntriesIter<'a> {
6257    pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> {
6258        ChildEntriesGitIter {
6259            parent_path: self.parent_path,
6260            traversal: self.traversal.with_git_statuses(),
6261        }
6262    }
6263}
6264
6265pub struct ChildEntriesGitIter<'a> {
6266    parent_path: &'a Path,
6267    traversal: GitTraversal<'a>,
6268}
6269
6270impl<'a> Iterator for ChildEntriesIter<'a> {
6271    type Item = &'a Entry;
6272
6273    fn next(&mut self) -> Option<Self::Item> {
6274        if let Some(item) = self.traversal.entry() {
6275            if item.path.starts_with(self.parent_path) {
6276                self.traversal.advance_to_sibling();
6277                return Some(item);
6278            }
6279        }
6280        None
6281    }
6282}
6283
6284impl<'a> Iterator for ChildEntriesGitIter<'a> {
6285    type Item = GitEntryRef<'a>;
6286
6287    fn next(&mut self) -> Option<Self::Item> {
6288        if let Some(item) = self.traversal.entry() {
6289            if item.path.starts_with(self.parent_path) {
6290                self.traversal.advance_to_sibling();
6291                return Some(item);
6292            }
6293        }
6294        None
6295    }
6296}
6297
6298impl<'a> From<&'a Entry> for proto::Entry {
6299    fn from(entry: &'a Entry) -> Self {
6300        Self {
6301            id: entry.id.to_proto(),
6302            is_dir: entry.is_dir(),
6303            path: entry.path.as_ref().to_proto(),
6304            inode: entry.inode,
6305            mtime: entry.mtime.map(|time| time.into()),
6306            is_ignored: entry.is_ignored,
6307            is_external: entry.is_external,
6308            is_fifo: entry.is_fifo,
6309            size: Some(entry.size),
6310            canonical_path: entry
6311                .canonical_path
6312                .as_ref()
6313                .map(|path| path.as_ref().to_proto()),
6314        }
6315    }
6316}
6317
6318impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry {
6319    type Error = anyhow::Error;
6320
6321    fn try_from(
6322        (root_char_bag, always_included, entry): (&'a CharBag, &PathMatcher, proto::Entry),
6323    ) -> Result<Self> {
6324        let kind = if entry.is_dir {
6325            EntryKind::Dir
6326        } else {
6327            EntryKind::File
6328        };
6329
6330        let path = Arc::<Path>::from_proto(entry.path);
6331        let char_bag = char_bag_for_path(*root_char_bag, &path);
6332        let is_always_included = always_included.is_match(path.as_ref());
6333        Ok(Entry {
6334            id: ProjectEntryId::from_proto(entry.id),
6335            kind,
6336            path,
6337            inode: entry.inode,
6338            mtime: entry.mtime.map(|time| time.into()),
6339            size: entry.size.unwrap_or(0),
6340            canonical_path: entry
6341                .canonical_path
6342                .map(|path_string| Box::from(PathBuf::from_proto(path_string))),
6343            is_ignored: entry.is_ignored,
6344            is_always_included,
6345            is_external: entry.is_external,
6346            is_private: false,
6347            char_bag,
6348            is_fifo: entry.is_fifo,
6349        })
6350    }
6351}
6352
6353fn status_from_proto(
6354    simple_status: i32,
6355    status: Option<proto::GitFileStatus>,
6356) -> anyhow::Result<FileStatus> {
6357    use proto::git_file_status::Variant;
6358
6359    let Some(variant) = status.and_then(|status| status.variant) else {
6360        let code = proto::GitStatus::from_i32(simple_status)
6361            .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
6362        let result = match code {
6363            proto::GitStatus::Added => TrackedStatus {
6364                worktree_status: StatusCode::Added,
6365                index_status: StatusCode::Unmodified,
6366            }
6367            .into(),
6368            proto::GitStatus::Modified => TrackedStatus {
6369                worktree_status: StatusCode::Modified,
6370                index_status: StatusCode::Unmodified,
6371            }
6372            .into(),
6373            proto::GitStatus::Conflict => UnmergedStatus {
6374                first_head: UnmergedStatusCode::Updated,
6375                second_head: UnmergedStatusCode::Updated,
6376            }
6377            .into(),
6378            proto::GitStatus::Deleted => TrackedStatus {
6379                worktree_status: StatusCode::Deleted,
6380                index_status: StatusCode::Unmodified,
6381            }
6382            .into(),
6383            _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
6384        };
6385        return Ok(result);
6386    };
6387
6388    let result = match variant {
6389        Variant::Untracked(_) => FileStatus::Untracked,
6390        Variant::Ignored(_) => FileStatus::Ignored,
6391        Variant::Unmerged(unmerged) => {
6392            let [first_head, second_head] =
6393                [unmerged.first_head, unmerged.second_head].map(|head| {
6394                    let code = proto::GitStatus::from_i32(head)
6395                        .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
6396                    let result = match code {
6397                        proto::GitStatus::Added => UnmergedStatusCode::Added,
6398                        proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6399                        proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6400                        _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
6401                    };
6402                    Ok(result)
6403                });
6404            let [first_head, second_head] = [first_head?, second_head?];
6405            UnmergedStatus {
6406                first_head,
6407                second_head,
6408            }
6409            .into()
6410        }
6411        Variant::Tracked(tracked) => {
6412            let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6413                .map(|status| {
6414                    let code = proto::GitStatus::from_i32(status)
6415                        .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
6416                    let result = match code {
6417                        proto::GitStatus::Modified => StatusCode::Modified,
6418                        proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6419                        proto::GitStatus::Added => StatusCode::Added,
6420                        proto::GitStatus::Deleted => StatusCode::Deleted,
6421                        proto::GitStatus::Renamed => StatusCode::Renamed,
6422                        proto::GitStatus::Copied => StatusCode::Copied,
6423                        proto::GitStatus::Unmodified => StatusCode::Unmodified,
6424                        _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
6425                    };
6426                    Ok(result)
6427                });
6428            let [index_status, worktree_status] = [index_status?, worktree_status?];
6429            TrackedStatus {
6430                index_status,
6431                worktree_status,
6432            }
6433            .into()
6434        }
6435    };
6436    Ok(result)
6437}
6438
6439fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6440    use proto::git_file_status::{Tracked, Unmerged, Variant};
6441
6442    let variant = match status {
6443        FileStatus::Untracked => Variant::Untracked(Default::default()),
6444        FileStatus::Ignored => Variant::Ignored(Default::default()),
6445        FileStatus::Unmerged(UnmergedStatus {
6446            first_head,
6447            second_head,
6448        }) => Variant::Unmerged(Unmerged {
6449            first_head: unmerged_status_to_proto(first_head),
6450            second_head: unmerged_status_to_proto(second_head),
6451        }),
6452        FileStatus::Tracked(TrackedStatus {
6453            index_status,
6454            worktree_status,
6455        }) => Variant::Tracked(Tracked {
6456            index_status: tracked_status_to_proto(index_status),
6457            worktree_status: tracked_status_to_proto(worktree_status),
6458        }),
6459    };
6460    proto::GitFileStatus {
6461        variant: Some(variant),
6462    }
6463}
6464
6465fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6466    match code {
6467        UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6468        UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6469        UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6470    }
6471}
6472
6473fn tracked_status_to_proto(code: StatusCode) -> i32 {
6474    match code {
6475        StatusCode::Added => proto::GitStatus::Added as _,
6476        StatusCode::Deleted => proto::GitStatus::Deleted as _,
6477        StatusCode::Modified => proto::GitStatus::Modified as _,
6478        StatusCode::Renamed => proto::GitStatus::Renamed as _,
6479        StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6480        StatusCode::Copied => proto::GitStatus::Copied as _,
6481        StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6482    }
6483}
6484
6485#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
6486pub struct ProjectEntryId(usize);
6487
6488impl ProjectEntryId {
6489    pub const MAX: Self = Self(usize::MAX);
6490    pub const MIN: Self = Self(usize::MIN);
6491
6492    pub fn new(counter: &AtomicUsize) -> Self {
6493        Self(counter.fetch_add(1, SeqCst))
6494    }
6495
6496    pub fn from_proto(id: u64) -> Self {
6497        Self(id as usize)
6498    }
6499
6500    pub fn to_proto(&self) -> u64 {
6501        self.0 as u64
6502    }
6503
6504    pub fn to_usize(&self) -> usize {
6505        self.0
6506    }
6507}
6508
6509#[cfg(any(test, feature = "test-support"))]
6510impl CreatedEntry {
6511    pub fn to_included(self) -> Option<Entry> {
6512        match self {
6513            CreatedEntry::Included(entry) => Some(entry),
6514            CreatedEntry::Excluded { .. } => None,
6515        }
6516    }
6517}