Merge pull request #15 from zed-industries/rescan

Nathan Sobo created

Update Worktrees when the file system changes

Change summary

.vscode/launch.json                    |    8 
Cargo.lock                             |  403 ++---
Cargo.toml                             |    5 
fsevent/Cargo.toml                     |   16 
fsevent/examples/events.rs             |   16 
fsevent/src/lib.rs                     |  354 +++++
gpui/Cargo.toml                        |    5 
gpui/src/app.rs                        |  440 ++++--
gpui/src/elements/uniform_list.rs      |   19 
gpui/src/test.rs                       |    4 
gpui/src/util.rs                       |   15 
scoped_pool/Cargo.toml                 |    8 
scoped_pool/src/lib.rs                 |  188 +++
zed/Cargo.toml                         |    8 
zed/src/editor/buffer/mod.rs           |  926 ++++++++------
zed/src/editor/buffer_view.rs          |   34 
zed/src/editor/display_map/fold_map.rs |   12 
zed/src/editor/display_map/mod.rs      |    4 
zed/src/file_finder.rs                 |  197 +-
zed/src/lib.rs                         |    1 
zed/src/main.rs                        |    3 
zed/src/operation_queue.rs             |    6 
zed/src/sum_tree/cursor.rs             |    5 
zed/src/sum_tree/mod.rs                |   88 +
zed/src/test.rs                        |    7 
zed/src/timer.rs                       |   42 
zed/src/workspace/pane.rs              |    4 
zed/src/workspace/workspace.rs         |   85 
zed/src/workspace/workspace_view.rs    |  160 +-
zed/src/worktree.rs                    | 1720 ++++++++++++++++++++++++++++
zed/src/worktree/char_bag.rs           |   20 
zed/src/worktree/fuzzy.rs              |  276 +++-
zed/src/worktree/ignore.rs             |   57 
zed/src/worktree/mod.rs                |    5 
zed/src/worktree/worktree.rs           |  725 -----------
35 files changed, 3,920 insertions(+), 1,946 deletions(-)

Detailed changes

.vscode/launch.json 🔗

@@ -7,15 +7,15 @@
         {
             "type": "lldb",
             "request": "launch",
-            "name": "Debug executable 'zed'",
+            "name": "Debug executable 'Zed'",
             "cargo": {
                 "args": [
                     "build",
-                    "--bin=zed",
+                    "--bin=Zed",
                     "--package=zed"
                 ],
                 "filter": {
-                    "name": "zed",
+                    "name": "Zed",
                     "kind": "bin"
                 }
             },
@@ -63,4 +63,4 @@
             "cwd": "${workspaceFolder}"
         }
     ]
-}
+}

Cargo.lock 🔗

@@ -84,22 +84,6 @@ dependencies = [
  "futures-lite",
 ]
 
-[[package]]
-name = "async-global-executor"
-version = "2.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9586ec52317f36de58453159d48351bc244bc24ced3effc1fce22f3d48664af6"
-dependencies = [
- "async-channel",
- "async-executor",
- "async-io",
- "async-mutex",
- "blocking",
- "futures-lite",
- "num_cpus",
- "once_cell",
-]
-
 [[package]]
 name = "async-io"
 version = "1.3.1"
@@ -129,15 +113,6 @@ dependencies = [
  "event-listener",
 ]
 
-[[package]]
-name = "async-mutex"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e"
-dependencies = [
- "event-listener",
-]
-
 [[package]]
 name = "async-net"
 version = "1.5.0"
@@ -166,39 +141,20 @@ dependencies = [
  "winapi",
 ]
 
-[[package]]
-name = "async-std"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9f06685bad74e0570f5213741bea82158279a4103d988e57bfada11ad230341"
-dependencies = [
- "async-channel",
- "async-global-executor",
- "async-io",
- "async-lock",
- "async-process",
- "crossbeam-utils 0.8.2",
- "futures-channel",
- "futures-core",
- "futures-io",
- "futures-lite",
- "gloo-timers",
- "kv-log-macro",
- "log",
- "memchr",
- "num_cpus",
- "once_cell",
- "pin-project-lite",
- "pin-utils",
- "slab",
- "wasm-bindgen-futures",
-]
-
 [[package]]
 name = "async-task"
 version = "4.0.3"
 source = "git+https://github.com/zed-industries/async-task?rev=341b57d6de98cdfd7b418567b8de2022ca993a6e#341b57d6de98cdfd7b418567b8de2022ca993a6e"
 
+[[package]]
+name = "atomic"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3410529e8288c463bedb5930f82833bc0c90e5d2fe639a56582a4d09220b281"
+dependencies = [
+ "autocfg",
+]
+
 [[package]]
 name = "atomic-waker"
 version = "1.0.0"
@@ -297,12 +253,6 @@ dependencies = [
  "memchr",
 ]
 
-[[package]]
-name = "bumpalo"
-version = "3.6.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe"
-
 [[package]]
 name = "bytemuck"
 version = "1.5.1"
@@ -498,22 +448,6 @@ dependencies = [
  "cfg-if 1.0.0",
 ]
 
-[[package]]
-name = "crossbeam"
-version = "0.2.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd66663db5a988098a89599d4857919b3acf7f61402e61365acfd3919857b9be"
-
-[[package]]
-name = "crossbeam-channel"
-version = "0.4.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
-dependencies = [
- "crossbeam-utils 0.7.2",
- "maybe-uninit",
-]
-
 [[package]]
 name = "crossbeam-channel"
 version = "0.5.0"
@@ -521,18 +455,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
 dependencies = [
  "cfg-if 1.0.0",
- "crossbeam-utils 0.8.2",
+ "crossbeam-utils",
 ]
 
 [[package]]
-name = "crossbeam-utils"
-version = "0.7.2"
+name = "crossbeam-queue"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
+checksum = "0f6cb3c7f5b8e51bc3ebb73a2327ad4abdbd119dc13223f14f961d2f38486756"
 dependencies = [
- "autocfg",
- "cfg-if 0.1.10",
- "lazy_static",
+ "cfg-if 1.0.0",
+ "crossbeam-utils",
 ]
 
 [[package]]
@@ -549,9 +482,9 @@ dependencies = [
 
 [[package]]
 name = "ctor"
-version = "0.1.19"
+version = "0.1.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19"
+checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d"
 dependencies = [
  "quote",
  "syn",
@@ -793,12 +726,45 @@ dependencies = [
  "pkg-config",
 ]
 
+[[package]]
+name = "fsevent"
+version = "2.0.2"
+dependencies = [
+ "bitflags",
+ "fsevent-sys",
+ "parking_lot",
+ "tempdir",
+]
+
+[[package]]
+name = "fsevent-sys"
+version = "3.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120"
+dependencies = [
+ "libc",
+]
+
 [[package]]
 name = "fuchsia-cprng"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
 
+[[package]]
+name = "futures"
+version = "0.3.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da9052a1a50244d8d5aa9bf55cbc2fb6f357c86cc52e46c62ed390a7180cf150"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
 [[package]]
 name = "futures-channel"
 version = "0.3.12"
@@ -806,6 +772,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f2d31b7ec7efab6eefc7c57233bb10b847986139d88cc2f5a02a1ae6871a1846"
 dependencies = [
  "futures-core",
+ "futures-sink",
 ]
 
 [[package]]
@@ -835,6 +802,31 @@ dependencies = [
  "waker-fn",
 ]
 
+[[package]]
+name = "futures-sink"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5629433c555de3d82861a7a4e3794a4c40040390907cfbfd7143a92a426c23"
+
+[[package]]
+name = "futures-task"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba7aa51095076f3ba6d9a1f702f74bd05ec65f555d70d2033d55ba8d69f581bc"
+
+[[package]]
+name = "futures-util"
+version = "0.3.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "632a8cd0f2a4b3fdea1657f08bde063848c3bd00f9bbf6e256b8be78802e624b"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "futures-task",
+ "pin-project-lite",
+ "pin-utils",
+]
+
 [[package]]
 name = "generator"
 version = "0.6.23"
@@ -878,8 +870,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
 
 [[package]]
 name = "globset"
-version = "0.4.4"
-source = "git+https://github.com/zed-industries/ripgrep?rev=1d152118f35b3e3590216709b86277062d79b8a0#1d152118f35b3e3590216709b86277062d79b8a0"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a"
 dependencies = [
  "aho-corasick",
  "bstr",
@@ -888,25 +881,11 @@ dependencies = [
  "regex",
 ]
 
-[[package]]
-name = "gloo-timers"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "47204a46aaff920a1ea58b11d03dec6f704287d27561724a4631e450654a891f"
-dependencies = [
- "futures-channel",
- "futures-core",
- "js-sys",
- "wasm-bindgen",
- "web-sys",
-]
-
 [[package]]
 name = "gpui"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "async-std",
  "async-task",
  "bindgen",
  "block",
@@ -916,6 +895,7 @@ dependencies = [
  "core-graphics",
  "core-text",
  "ctor",
+ "env_logger",
  "etagere",
  "font-kit",
  "foreign-types",
@@ -928,6 +908,7 @@ dependencies = [
  "pathfinder_color",
  "pathfinder_geometry",
  "png",
+ "postage",
  "rand 0.8.3",
  "replace_with",
  "resvg",
@@ -966,11 +947,11 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
 
 [[package]]
 name = "ignore"
-version = "0.4.11"
-source = "git+https://github.com/zed-industries/ripgrep?rev=1d152118f35b3e3590216709b86277062d79b8a0#1d152118f35b3e3590216709b86277062d79b8a0"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b287fb45c60bb826a0dc68ff08742b9d88a2fea13d6e0c286b3172065aaf878c"
 dependencies = [
- "crossbeam-channel 0.4.4",
- "crossbeam-utils 0.7.2",
+ "crossbeam-utils",
  "globset",
  "lazy_static",
  "log",
@@ -1013,15 +994,6 @@ version = "0.1.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2"
 
-[[package]]
-name = "js-sys"
-version = "0.3.50"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c"
-dependencies = [
- "wasm-bindgen",
-]
-
 [[package]]
 name = "kurbo"
 version = "0.8.1"
@@ -1031,15 +1003,6 @@ dependencies = [
  "arrayvec",
 ]
 
-[[package]]
-name = "kv-log-macro"
-version = "1.0.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
-dependencies = [
- "log",
-]
-
 [[package]]
 name = "lazy_static"
 version = "1.4.0"
@@ -1074,7 +1037,7 @@ version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
 dependencies = [
- "scopeguard 1.1.0",
+ "scopeguard",
 ]
 
 [[package]]
@@ -1084,7 +1047,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
 dependencies = [
  "cfg-if 1.0.0",
- "value-bag",
 ]
 
 [[package]]
@@ -1113,12 +1075,6 @@ version = "0.1.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
 
-[[package]]
-name = "maybe-uninit"
-version = "2.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
-
 [[package]]
 name = "memchr"
 version = "2.3.4"
@@ -1330,6 +1286,26 @@ version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d70072c20945e1ab871c472a285fc772aefd4f5407723c206242f2c6f94595d6"
 
+[[package]]
+name = "pin-project"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc174859768806e91ae575187ada95c91a29e96a98dc5d2cd9a1fed039501ba6"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
 [[package]]
 name = "pin-project-lite"
 version = "0.2.4"
@@ -1373,6 +1349,28 @@ dependencies = [
  "winapi",
 ]
 
+[[package]]
+name = "pollster"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6cce106fd2646acbe31a0e4006f75779d535c26a44f153ada196e9edcfc6d944"
+
+[[package]]
+name = "postage"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a63d25391d04a097954b76aba742b6b5b74f213dfe3dbaeeb36e8ddc1c657f0b"
+dependencies = [
+ "atomic",
+ "crossbeam-queue",
+ "futures",
+ "log",
+ "pin-project",
+ "pollster",
+ "static_assertions",
+ "thiserror",
+]
+
 [[package]]
 name = "ppv-lite86"
 version = "0.2.10"
@@ -1592,7 +1590,7 @@ dependencies = [
  "base64",
  "blake2b_simd",
  "constant_time_eq",
- "crossbeam-utils 0.8.2",
+ "crossbeam-utils",
 ]
 
 [[package]]
@@ -1685,13 +1683,9 @@ dependencies = [
 
 [[package]]
 name = "scoped-pool"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "817a3a15e704545ce59ed2b5c60a5d32bda4d7869befb8b36667b658a6c00b43"
+version = "0.0.1"
 dependencies = [
- "crossbeam",
- "scopeguard 0.1.2",
- "variance",
+ "crossbeam-channel",
 ]
 
 [[package]]
@@ -1700,12 +1694,6 @@ version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
 
-[[package]]
-name = "scopeguard"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57"
-
 [[package]]
 name = "scopeguard"
 version = "1.1.0"
@@ -1837,12 +1825,6 @@ version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
 
-[[package]]
-name = "slab"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
-
 [[package]]
 name = "smallvec"
 version = "1.6.1"
@@ -1878,6 +1860,12 @@ dependencies = [
  "winapi",
 ]
 
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
 [[package]]
 name = "strsim"
 version = "0.8.0"
@@ -1949,6 +1937,26 @@ dependencies = [
  "unicode-width",
 ]
 
+[[package]]
+name = "thiserror"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
 [[package]]
 name = "thread_local"
 version = "1.1.3"
@@ -2089,21 +2097,6 @@ dependencies = [
  "xmlwriter",
 ]
 
-[[package]]
-name = "value-bag"
-version = "1.0.0-alpha.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b676010e055c99033117c2343b33a40a30b91fecd6c49055ac9cd2d6c305ab1"
-dependencies = [
- "ctor",
-]
-
-[[package]]
-name = "variance"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3abfc2be1fb59663871379ea884fd81de80c496f2274e021c01d6fe56cd77b05"
-
 [[package]]
 name = "vec-arena"
 version = "1.0.0"
@@ -2151,82 +2144,6 @@ version = "0.10.0+wasi-snapshot-preview1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
 
-[[package]]
-name = "wasm-bindgen"
-version = "0.2.73"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9"
-dependencies = [
- "cfg-if 1.0.0",
- "wasm-bindgen-macro",
-]
-
-[[package]]
-name = "wasm-bindgen-backend"
-version = "0.2.73"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae"
-dependencies = [
- "bumpalo",
- "lazy_static",
- "log",
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-futures"
-version = "0.4.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea"
-dependencies = [
- "cfg-if 1.0.0",
- "js-sys",
- "wasm-bindgen",
- "web-sys",
-]
-
-[[package]]
-name = "wasm-bindgen-macro"
-version = "0.2.73"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f"
-dependencies = [
- "quote",
- "wasm-bindgen-macro-support",
-]
-
-[[package]]
-name = "wasm-bindgen-macro-support"
-version = "0.2.73"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-backend",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-shared"
-version = "0.2.73"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489"
-
-[[package]]
-name = "web-sys"
-version = "0.3.50"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be"
-dependencies = [
- "js-sys",
- "wasm-bindgen",
-]
-
 [[package]]
 name = "wepoll-sys"
 version = "3.0.1"
@@ -2303,9 +2220,12 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "arrayvec",
- "crossbeam-channel 0.5.0",
+ "crossbeam-channel",
+ "ctor",
  "dirs",
  "easy-parallel",
+ "env_logger",
+ "fsevent",
  "futures-core",
  "gpui",
  "ignore",
@@ -2314,6 +2234,7 @@ dependencies = [
  "log",
  "num_cpus",
  "parking_lot",
+ "postage",
  "rand 0.8.3",
  "rust-embed",
  "seahash",

Cargo.toml 🔗

@@ -1,5 +1,5 @@
 [workspace]
-members = ["zed", "gpui"]
+members = ["zed", "gpui", "fsevent", "scoped_pool"]
 
 [patch.crates-io]
 async-task = {git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e"}
@@ -9,3 +9,6 @@ cocoa = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d5
 cocoa-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
 core-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
 core-graphics = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
+
+[profile.dev]
+split-debuginfo = "unpacked"

fsevent/Cargo.toml 🔗

@@ -0,0 +1,16 @@
+[package]
+name = "fsevent"
+version = "2.0.2"
+license = "MIT"
+edition = "2018"
+
+[dependencies]
+bitflags = "1"
+fsevent-sys = "3.0.2"
+parking_lot = "0.11.1"
+
+[dev-dependencies]
+tempdir = "0.3.7"
+
+[package.metadata.docs.rs]
+targets = ["x86_64-apple-darwin"]

fsevent/examples/events.rs 🔗

@@ -0,0 +1,16 @@
+use fsevent::EventStream;
+use std::{env::args, path::Path, time::Duration};
+
+fn main() {
+    let paths = args().skip(1).collect::<Vec<_>>();
+    let paths = paths.iter().map(Path::new).collect::<Vec<_>>();
+    assert!(paths.len() > 0, "Must pass 1 or more paths as arguments");
+    let (stream, _handle) = EventStream::new(&paths, Duration::from_millis(100));
+    stream.run(|events| {
+        eprintln!("event batch");
+        for event in events {
+            eprintln!("  {:?}", event);
+        }
+        true
+    });
+}

fsevent/src/lib.rs 🔗

@@ -0,0 +1,354 @@
+#![cfg(target_os = "macos")]
+
+use bitflags::bitflags;
+use fsevent_sys::{self as fs, core_foundation as cf};
+use parking_lot::Mutex;
+use std::{
+    convert::AsRef,
+    ffi::{c_void, CStr, OsStr},
+    os::unix::ffi::OsStrExt,
+    path::{Path, PathBuf},
+    slice,
+    sync::Arc,
+    time::Duration,
+};
+
+#[derive(Clone, Debug)]
+pub struct Event {
+    pub event_id: u64,
+    pub flags: StreamFlags,
+    pub path: PathBuf,
+}
+
+pub struct EventStream {
+    stream: fs::FSEventStreamRef,
+    state: Arc<Mutex<Lifecycle>>,
+    callback: Box<Option<RunCallback>>,
+}
+
+type RunCallback = Box<dyn FnMut(Vec<Event>) -> bool>;
+
+enum Lifecycle {
+    New,
+    Running(cf::CFRunLoopRef),
+    Stopped,
+}
+
+pub struct Handle(Arc<Mutex<Lifecycle>>);
+
+unsafe impl Send for EventStream {}
+unsafe impl Send for Lifecycle {}
+
+impl EventStream {
+    pub fn new(paths: &[&Path], latency: Duration) -> (Self, Handle) {
+        unsafe {
+            let callback = Box::new(None);
+            let stream_context = fs::FSEventStreamContext {
+                version: 0,
+                info: callback.as_ref() as *const _ as *mut c_void,
+                retain: None,
+                release: None,
+                copy_description: None,
+            };
+
+            let cf_paths =
+                cf::CFArrayCreateMutable(cf::kCFAllocatorDefault, 0, &cf::kCFTypeArrayCallBacks);
+            assert!(!cf_paths.is_null());
+
+            for path in paths {
+                let path_bytes = path.as_os_str().as_bytes();
+                let cf_url = cf::CFURLCreateFromFileSystemRepresentation(
+                    cf::kCFAllocatorDefault,
+                    path_bytes.as_ptr() as *const i8,
+                    path_bytes.len() as cf::CFIndex,
+                    false,
+                );
+                let cf_path = cf::CFURLCopyFileSystemPath(cf_url, cf::kCFURLPOSIXPathStyle);
+                cf::CFArrayAppendValue(cf_paths, cf_path);
+                cf::CFRelease(cf_path);
+                cf::CFRelease(cf_url);
+            }
+
+            let stream = fs::FSEventStreamCreate(
+                cf::kCFAllocatorDefault,
+                Self::trampoline,
+                &stream_context,
+                cf_paths,
+                fs::kFSEventStreamEventIdSinceNow,
+                latency.as_secs_f64(),
+                fs::kFSEventStreamCreateFlagFileEvents
+                    | fs::kFSEventStreamCreateFlagNoDefer
+                    | fs::kFSEventStreamCreateFlagWatchRoot,
+            );
+            cf::CFRelease(cf_paths);
+
+            let state = Arc::new(Mutex::new(Lifecycle::New));
+
+            (
+                EventStream {
+                    stream,
+                    state: state.clone(),
+                    callback,
+                },
+                Handle(state),
+            )
+        }
+    }
+
+    pub fn run<F>(mut self, f: F)
+    where
+        F: FnMut(Vec<Event>) -> bool + 'static,
+    {
+        *self.callback = Some(Box::new(f));
+        unsafe {
+            let run_loop = cf::CFRunLoopGetCurrent();
+            {
+                let mut state = self.state.lock();
+                match *state {
+                    Lifecycle::New => *state = Lifecycle::Running(run_loop),
+                    Lifecycle::Running(_) => unreachable!(),
+                    Lifecycle::Stopped => return,
+                }
+            }
+            fs::FSEventStreamScheduleWithRunLoop(self.stream, run_loop, cf::kCFRunLoopDefaultMode);
+
+            fs::FSEventStreamStart(self.stream);
+            cf::CFRunLoopRun();
+
+            fs::FSEventStreamFlushSync(self.stream);
+            fs::FSEventStreamStop(self.stream);
+            fs::FSEventStreamRelease(self.stream);
+        }
+    }
+
+    extern "C" fn trampoline(
+        stream_ref: fs::FSEventStreamRef,
+        info: *mut ::std::os::raw::c_void,
+        num: usize,                                 // size_t numEvents
+        event_paths: *mut ::std::os::raw::c_void,   // void *eventPaths
+        event_flags: *const ::std::os::raw::c_void, // const FSEventStreamEventFlags eventFlags[]
+        event_ids: *const ::std::os::raw::c_void,   // const FSEventStreamEventId eventIds[]
+    ) {
+        unsafe {
+            let event_paths = event_paths as *const *const ::std::os::raw::c_char;
+            let e_ptr = event_flags as *mut u32;
+            let i_ptr = event_ids as *mut u64;
+            let callback = (info as *mut Option<RunCallback>)
+                .as_mut()
+                .unwrap()
+                .as_mut()
+                .unwrap();
+
+            let paths = slice::from_raw_parts(event_paths, num);
+            let flags = slice::from_raw_parts_mut(e_ptr, num);
+            let ids = slice::from_raw_parts_mut(i_ptr, num);
+
+            let mut events = Vec::with_capacity(num);
+            for p in 0..num {
+                let path_c_str = CStr::from_ptr(paths[p]);
+                let path = PathBuf::from(OsStr::from_bytes(path_c_str.to_bytes()));
+                if let Some(flag) = StreamFlags::from_bits(flags[p]) {
+                    events.push(Event {
+                        event_id: ids[p],
+                        flags: flag,
+                        path,
+                    });
+                } else {
+                    debug_assert!(false, "unknown flag set for fs event: {}", flags[p]);
+                }
+            }
+
+            if !callback(events) {
+                fs::FSEventStreamStop(stream_ref);
+                cf::CFRunLoopStop(cf::CFRunLoopGetCurrent());
+            }
+        }
+    }
+}
+
+impl Drop for Handle {
+    fn drop(&mut self) {
+        let mut state = self.0.lock();
+        if let Lifecycle::Running(run_loop) = *state {
+            unsafe {
+                cf::CFRunLoopStop(run_loop);
+            }
+        }
+        *state = Lifecycle::Stopped;
+    }
+}
+
+// Synchronize with
+// /System/Library/Frameworks/CoreServices.framework/Versions/A/Frameworks/FSEvents.framework/Versions/A/Headers/FSEvents.h
+bitflags! {
+  #[repr(C)]
+  pub struct StreamFlags: u32 {
+    const NONE = 0x00000000;
+    const MUST_SCAN_SUBDIRS = 0x00000001;
+    const USER_DROPPED = 0x00000002;
+    const KERNEL_DROPPED = 0x00000004;
+    const IDS_WRAPPED = 0x00000008;
+    const HISTORY_DONE = 0x00000010;
+    const ROOT_CHANGED = 0x00000020;
+    const MOUNT = 0x00000040;
+    const UNMOUNT = 0x00000080;
+    const ITEM_CREATED = 0x00000100;
+    const ITEM_REMOVED = 0x00000200;
+    const INODE_META_MOD = 0x00000400;
+    const ITEM_RENAMED = 0x00000800;
+    const ITEM_MODIFIED = 0x00001000;
+    const FINDER_INFO_MOD = 0x00002000;
+    const ITEM_CHANGE_OWNER = 0x00004000;
+    const ITEM_XATTR_MOD = 0x00008000;
+    const IS_FILE = 0x00010000;
+    const IS_DIR = 0x00020000;
+    const IS_SYMLINK = 0x00040000;
+    const OWN_EVENT = 0x00080000;
+    const IS_HARDLINK = 0x00100000;
+    const IS_LAST_HARDLINK = 0x00200000;
+    const ITEM_CLONED = 0x400000;
+  }
+}
+
+impl std::fmt::Display for StreamFlags {
+    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+        if self.contains(StreamFlags::MUST_SCAN_SUBDIRS) {
+            let _d = write!(f, "MUST_SCAN_SUBDIRS ");
+        }
+        if self.contains(StreamFlags::USER_DROPPED) {
+            let _d = write!(f, "USER_DROPPED ");
+        }
+        if self.contains(StreamFlags::KERNEL_DROPPED) {
+            let _d = write!(f, "KERNEL_DROPPED ");
+        }
+        if self.contains(StreamFlags::IDS_WRAPPED) {
+            let _d = write!(f, "IDS_WRAPPED ");
+        }
+        if self.contains(StreamFlags::HISTORY_DONE) {
+            let _d = write!(f, "HISTORY_DONE ");
+        }
+        if self.contains(StreamFlags::ROOT_CHANGED) {
+            let _d = write!(f, "ROOT_CHANGED ");
+        }
+        if self.contains(StreamFlags::MOUNT) {
+            let _d = write!(f, "MOUNT ");
+        }
+        if self.contains(StreamFlags::UNMOUNT) {
+            let _d = write!(f, "UNMOUNT ");
+        }
+        if self.contains(StreamFlags::ITEM_CREATED) {
+            let _d = write!(f, "ITEM_CREATED ");
+        }
+        if self.contains(StreamFlags::ITEM_REMOVED) {
+            let _d = write!(f, "ITEM_REMOVED ");
+        }
+        if self.contains(StreamFlags::INODE_META_MOD) {
+            let _d = write!(f, "INODE_META_MOD ");
+        }
+        if self.contains(StreamFlags::ITEM_RENAMED) {
+            let _d = write!(f, "ITEM_RENAMED ");
+        }
+        if self.contains(StreamFlags::ITEM_MODIFIED) {
+            let _d = write!(f, "ITEM_MODIFIED ");
+        }
+        if self.contains(StreamFlags::FINDER_INFO_MOD) {
+            let _d = write!(f, "FINDER_INFO_MOD ");
+        }
+        if self.contains(StreamFlags::ITEM_CHANGE_OWNER) {
+            let _d = write!(f, "ITEM_CHANGE_OWNER ");
+        }
+        if self.contains(StreamFlags::ITEM_XATTR_MOD) {
+            let _d = write!(f, "ITEM_XATTR_MOD ");
+        }
+        if self.contains(StreamFlags::IS_FILE) {
+            let _d = write!(f, "IS_FILE ");
+        }
+        if self.contains(StreamFlags::IS_DIR) {
+            let _d = write!(f, "IS_DIR ");
+        }
+        if self.contains(StreamFlags::IS_SYMLINK) {
+            let _d = write!(f, "IS_SYMLINK ");
+        }
+        if self.contains(StreamFlags::OWN_EVENT) {
+            let _d = write!(f, "OWN_EVENT ");
+        }
+        if self.contains(StreamFlags::IS_LAST_HARDLINK) {
+            let _d = write!(f, "IS_LAST_HARDLINK ");
+        }
+        if self.contains(StreamFlags::IS_HARDLINK) {
+            let _d = write!(f, "IS_HARDLINK ");
+        }
+        if self.contains(StreamFlags::ITEM_CLONED) {
+            let _d = write!(f, "ITEM_CLONED ");
+        }
+        write!(f, "")
+    }
+}
+
+#[test]
+fn test_event_stream() {
+    use std::{fs, sync::mpsc, time::Duration};
+    use tempdir::TempDir;
+
+    let dir = TempDir::new("test_observe").unwrap();
+    let path = dir.path().canonicalize().unwrap();
+    fs::write(path.join("a"), "a contents").unwrap();
+
+    let (tx, rx) = mpsc::channel();
+    let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
+    std::thread::spawn(move || stream.run(move |events| tx.send(events.to_vec()).is_ok()));
+
+    fs::write(path.join("b"), "b contents").unwrap();
+    let events = rx.recv_timeout(Duration::from_millis(500)).unwrap();
+    let event = events.last().unwrap();
+    assert_eq!(event.path, path.join("b"));
+    assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
+
+    fs::remove_file(path.join("a")).unwrap();
+    let events = rx.recv_timeout(Duration::from_millis(500)).unwrap();
+    let event = events.last().unwrap();
+    assert_eq!(event.path, path.join("a"));
+    assert!(event.flags.contains(StreamFlags::ITEM_REMOVED));
+    drop(handle);
+}
+
+#[test]
+fn test_event_stream_shutdown() {
+    use std::{fs, sync::mpsc, time::Duration};
+    use tempdir::TempDir;
+
+    let dir = TempDir::new("test_observe").unwrap();
+    let path = dir.path().canonicalize().unwrap();
+
+    let (tx, rx) = mpsc::channel();
+    let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
+    std::thread::spawn(move || {
+        stream.run({
+            let tx = tx.clone();
+            move |_| {
+                tx.send(()).unwrap();
+                true
+            }
+        });
+        tx.send(()).unwrap();
+    });
+
+    fs::write(path.join("b"), "b contents").unwrap();
+    rx.recv_timeout(Duration::from_millis(500)).unwrap();
+
+    drop(handle);
+    rx.recv_timeout(Duration::from_millis(500)).unwrap();
+}
+
+#[test]
+fn test_event_stream_shutdown_before_run() {
+    use std::time::Duration;
+    use tempdir::TempDir;
+
+    let dir = TempDir::new("test_observe").unwrap();
+    let path = dir.path().canonicalize().unwrap();
+
+    let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
+    drop(handle);
+    stream.run(|_| true);
+}

gpui/Cargo.toml 🔗

@@ -5,7 +5,6 @@ name = "gpui"
 version = "0.1.0"
 
 [dependencies]
-async-std = {version = "1.9.0", features = ["unstable"]}
 async-task = "4.0.3"
 ctor = "0.1"
 etagere = "0.2"
@@ -15,10 +14,11 @@ ordered-float = "2.1.1"
 parking_lot = "0.11.1"
 pathfinder_color = "0.5"
 pathfinder_geometry = "0.5"
+postage = {version = "0.4.1", features = ["futures-traits"]}
 rand = "0.8.3"
 replace_with = "0.1.7"
 resvg = "0.14"
-scoped-pool = "1.0.0"
+scoped-pool = {path = "../scoped_pool"}
 seahash = "4.1"
 serde = {version = "1.0.125", features = ["derive"]}
 serde_json = "1.0.64"
@@ -33,6 +33,7 @@ bindgen = "0.57"
 cc = "1.0.67"
 
 [dev-dependencies]
+env_logger = "0.8"
 png = "0.16"
 simplelog = "0.9"
 

gpui/src/app.rs 🔗

@@ -4,26 +4,27 @@ use crate::{
     keymap::{self, Keystroke},
     platform::{self, WindowOptions},
     presenter::Presenter,
-    util::post_inc,
+    util::{post_inc, timeout},
     AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache,
 };
 use anyhow::{anyhow, Result};
-use async_std::sync::Condvar;
 use keymap::MatchResult;
 use parking_lot::Mutex;
 use pathfinder_geometry::{rect::RectF, vector::vec2f};
 use platform::Event;
+use postage::{sink::Sink as _, stream::Stream as _};
 use smol::prelude::*;
 use std::{
     any::{type_name, Any, TypeId},
     cell::RefCell,
-    collections::{HashMap, HashSet, VecDeque},
+    collections::{hash_map::Entry, HashMap, HashSet, VecDeque},
     fmt::{self, Debug},
     hash::{Hash, Hasher},
     marker::PhantomData,
     path::PathBuf,
     rc::{self, Rc},
     sync::{Arc, Weak},
+    time::Duration,
 };
 
 pub trait Entity: 'static + Send + Sync {
@@ -324,10 +325,6 @@ impl TestAppContext {
         result
     }
 
-    pub fn finish_pending_tasks(&self) -> impl Future<Output = ()> {
-        self.0.borrow().finish_pending_tasks()
-    }
-
     pub fn font_cache(&self) -> Arc<FontCache> {
         self.0.borrow().font_cache.clone()
     }
@@ -384,6 +381,7 @@ pub struct MutableAppContext {
     next_task_id: usize,
     subscriptions: HashMap<usize, Vec<Subscription>>,
     observations: HashMap<usize, Vec<Observation>>,
+    async_observations: HashMap<usize, postage::broadcast::Sender<()>>,
     window_invalidations: HashMap<usize, WindowInvalidation>,
     presenters_and_platform_windows:
         HashMap<usize, (Rc<RefCell<Presenter>>, Box<dyn platform::Window>)>,
@@ -391,7 +389,6 @@ pub struct MutableAppContext {
     foreground: Rc<executor::Foreground>,
     future_handlers: Rc<RefCell<HashMap<usize, FutureHandler>>>,
     stream_handlers: Rc<RefCell<HashMap<usize, StreamHandler>>>,
-    task_done: Arc<Condvar>,
     pending_effects: VecDeque<Effect>,
     pending_flushes: usize,
     flushing_effects: bool,
@@ -414,7 +411,7 @@ impl MutableAppContext {
                 windows: HashMap::new(),
                 ref_counts: Arc::new(Mutex::new(RefCounts::default())),
                 background: Arc::new(executor::Background::new()),
-                scoped_pool: scoped_pool::Pool::new(num_cpus::get()),
+                thread_pool: scoped_pool::Pool::new(num_cpus::get(), "app"),
             },
             actions: HashMap::new(),
             global_actions: HashMap::new(),
@@ -424,13 +421,13 @@ impl MutableAppContext {
             next_task_id: 0,
             subscriptions: HashMap::new(),
             observations: HashMap::new(),
+            async_observations: HashMap::new(),
             window_invalidations: HashMap::new(),
             presenters_and_platform_windows: HashMap::new(),
             debug_elements_callbacks: HashMap::new(),
             foreground,
             future_handlers: Default::default(),
             stream_handlers: Default::default(),
-            task_done: Default::default(),
             pending_effects: VecDeque::new(),
             pending_flushes: 0,
             flushing_effects: false,
@@ -877,11 +874,13 @@ impl MutableAppContext {
                 self.ctx.models.remove(&model_id);
                 self.subscriptions.remove(&model_id);
                 self.observations.remove(&model_id);
+                self.async_observations.remove(&model_id);
             }
 
             for (window_id, view_id) in dropped_views {
                 self.subscriptions.remove(&view_id);
                 self.observations.remove(&view_id);
+                self.async_observations.remove(&view_id);
                 if let Some(window) = self.ctx.windows.get_mut(&window_id) {
                     self.window_invalidations
                         .entry(window_id)
@@ -1047,6 +1046,12 @@ impl MutableAppContext {
                 }
             }
         }
+
+        if let Entry::Occupied(mut entry) = self.async_observations.entry(observed_id) {
+            if entry.get_mut().blocking_send(()).is_err() {
+                entry.remove_entry();
+            }
+        }
     }
 
     fn notify_view_observers(&mut self, window_id: usize, view_id: usize) {
@@ -1055,6 +1060,12 @@ impl MutableAppContext {
             .or_default()
             .updated
             .insert(view_id);
+
+        if let Entry::Occupied(mut entry) = self.async_observations.entry(view_id) {
+            if entry.get_mut().blocking_send(()).is_err() {
+                entry.remove_entry();
+            }
+        }
     }
 
     fn focus(&mut self, window_id: usize, focused_id: usize) {
@@ -1125,7 +1136,6 @@ impl MutableAppContext {
             task_id,
             task,
             TaskHandlerMap::Future(self.future_handlers.clone()),
-            self.task_done.clone(),
         )
     }
 
@@ -1161,7 +1171,6 @@ impl MutableAppContext {
             task_id,
             task,
             TaskHandlerMap::Stream(self.stream_handlers.clone()),
-            self.task_done.clone(),
         )
     }
 
@@ -1170,7 +1179,6 @@ impl MutableAppContext {
         let future_callback = self.future_handlers.borrow_mut().remove(&task_id).unwrap();
         let result = future_callback(output, self);
         self.flush_effects();
-        self.task_done.notify_all();
         result
     }
 
@@ -1192,44 +1200,9 @@ impl MutableAppContext {
         let result = (handler.done_callback)(self);
 
         self.flush_effects();
-        self.task_done.notify_all();
         result
     }
 
-    pub fn finish_pending_tasks(&self) -> impl Future<Output = ()> {
-        let mut pending_tasks = self
-            .future_handlers
-            .borrow()
-            .keys()
-            .cloned()
-            .collect::<HashSet<_>>();
-        pending_tasks.extend(self.stream_handlers.borrow().keys());
-
-        let task_done = self.task_done.clone();
-        let future_handlers = self.future_handlers.clone();
-        let stream_handlers = self.stream_handlers.clone();
-
-        async move {
-            // A Condvar expects the condition to be protected by a Mutex, but in this case we know
-            // that this logic will always run on the main thread.
-            let mutex = async_std::sync::Mutex::new(());
-            loop {
-                {
-                    let future_handlers = future_handlers.borrow();
-                    let stream_handlers = stream_handlers.borrow();
-                    pending_tasks.retain(|task_id| {
-                        future_handlers.contains_key(task_id)
-                            || stream_handlers.contains_key(task_id)
-                    });
-                    if pending_tasks.is_empty() {
-                        break;
-                    }
-                }
-                task_done.wait(mutex.lock().await).await;
-            }
-        }
-    }
-
     pub fn write_to_clipboard(&self, item: ClipboardItem) {
         self.platform.write_to_clipboard(item);
     }
@@ -1337,7 +1310,7 @@ pub struct AppContext {
     windows: HashMap<usize, Window>,
     background: Arc<executor::Background>,
     ref_counts: Arc<Mutex<RefCounts>>,
-    scoped_pool: scoped_pool::Pool,
+    thread_pool: scoped_pool::Pool,
 }
 
 impl AppContext {
@@ -1377,8 +1350,8 @@ impl AppContext {
         &self.background
     }
 
-    pub fn scoped_pool(&self) -> &scoped_pool::Pool {
-        &self.scoped_pool
+    pub fn thread_pool(&self) -> &scoped_pool::Pool {
+        &self.thread_pool
     }
 }
 
@@ -1526,6 +1499,10 @@ impl<'a, T: Entity> ModelContext<'a, T> {
         &self.app.ctx.background
     }
 
+    pub fn thread_pool(&self) -> &scoped_pool::Pool {
+        &self.app.ctx.thread_pool
+    }
+
     pub fn halt_stream(&mut self) {
         self.halt_stream = true;
     }
@@ -2008,6 +1985,47 @@ impl<T: Entity> ModelHandle<T> {
     {
         app.update_model(self, update)
     }
+
+    pub fn condition(
+        &self,
+        ctx: &TestAppContext,
+        mut predicate: impl 'static + FnMut(&T, &AppContext) -> bool,
+    ) -> impl 'static + Future<Output = ()> {
+        let mut ctx = ctx.0.borrow_mut();
+        let tx = ctx
+            .async_observations
+            .entry(self.id())
+            .or_insert_with(|| postage::broadcast::channel(128).0);
+        let mut rx = tx.subscribe();
+        let ctx = ctx.weak_self.as_ref().unwrap().upgrade().unwrap();
+        let handle = self.downgrade();
+
+        async move {
+            timeout(Duration::from_millis(200), async move {
+                loop {
+                    {
+                        let ctx = ctx.borrow();
+                        let ctx = ctx.as_ref();
+                        if predicate(
+                            handle
+                                .upgrade(ctx)
+                                .expect("model dropped with pending condition")
+                                .read(ctx),
+                            ctx,
+                        ) {
+                            break;
+                        }
+                    }
+
+                    rx.recv()
+                        .await
+                        .expect("model dropped with pending condition");
+                }
+            })
+            .await
+            .expect("condition timed out");
+        }
+    }
 }
 
 impl<T> Clone for ModelHandle<T> {
@@ -2141,6 +2159,47 @@ impl<T: View> ViewHandle<T> {
         app.focused_view_id(self.window_id)
             .map_or(false, |focused_id| focused_id == self.view_id)
     }
+
+    pub fn condition(
+        &self,
+        ctx: &TestAppContext,
+        mut predicate: impl 'static + FnMut(&T, &AppContext) -> bool,
+    ) -> impl 'static + Future<Output = ()> {
+        let mut ctx = ctx.0.borrow_mut();
+        let tx = ctx
+            .async_observations
+            .entry(self.id())
+            .or_insert_with(|| postage::broadcast::channel(128).0);
+        let mut rx = tx.subscribe();
+        let ctx = ctx.weak_self.as_ref().unwrap().upgrade().unwrap();
+        let handle = self.downgrade();
+
+        async move {
+            timeout(Duration::from_millis(200), async move {
+                loop {
+                    {
+                        let ctx = ctx.borrow();
+                        let ctx = ctx.as_ref();
+                        if predicate(
+                            handle
+                                .upgrade(ctx)
+                                .expect("model dropped with pending condition")
+                                .read(ctx),
+                            ctx,
+                        ) {
+                            break;
+                        }
+                    }
+
+                    rx.recv()
+                        .await
+                        .expect("model dropped with pending condition");
+                }
+            })
+            .await
+            .expect("condition timed out");
+        }
+    }
 }
 
 impl<T> Clone for ViewHandle<T> {
@@ -2364,7 +2423,6 @@ pub struct EntityTask<T> {
     id: usize,
     task: Option<executor::Task<T>>,
     handler_map: TaskHandlerMap,
-    task_done: Arc<Condvar>,
 }
 
 enum TaskHandlerMap {
@@ -2374,17 +2432,11 @@ enum TaskHandlerMap {
 }
 
 impl<T> EntityTask<T> {
-    fn new(
-        id: usize,
-        task: executor::Task<T>,
-        handler_map: TaskHandlerMap,
-        task_done: Arc<Condvar>,
-    ) -> Self {
+    fn new(id: usize, task: executor::Task<T>, handler_map: TaskHandlerMap) -> Self {
         Self {
             id,
             task: Some(task),
             handler_map,
-            task_done,
         }
     }
 
@@ -2424,7 +2476,6 @@ impl<T> Drop for EntityTask<T> {
                 map.borrow_mut().remove(&self.id);
             }
         }
-        self.task_done.notify_all();
     }
 }
 
@@ -2432,6 +2483,7 @@ impl<T> Drop for EntityTask<T> {
 mod tests {
     use super::*;
     use crate::elements::*;
+    use smol::future::poll_once;
 
     #[test]
     fn test_model_handles() {
@@ -3233,6 +3285,180 @@ mod tests {
         });
     }
 
+    #[test]
+    fn test_model_condition() {
+        struct Counter(usize);
+
+        impl super::Entity for Counter {
+            type Event = ();
+        }
+
+        impl Counter {
+            fn inc(&mut self, ctx: &mut ModelContext<Self>) {
+                self.0 += 1;
+                ctx.notify();
+            }
+        }
+
+        App::test_async((), |mut app| async move {
+            let model = app.add_model(|_| Counter(0));
+
+            let condition1 = model.condition(&app, |model, _| model.0 == 2);
+            let condition2 = model.condition(&app, |model, _| model.0 == 3);
+            smol::pin!(condition1, condition2);
+
+            model.update(&mut app, |model, ctx| model.inc(ctx));
+            assert_eq!(poll_once(&mut condition1).await, None);
+            assert_eq!(poll_once(&mut condition2).await, None);
+
+            model.update(&mut app, |model, ctx| model.inc(ctx));
+            assert_eq!(poll_once(&mut condition1).await, Some(()));
+            assert_eq!(poll_once(&mut condition2).await, None);
+
+            model.update(&mut app, |model, ctx| model.inc(ctx));
+            assert_eq!(poll_once(&mut condition2).await, Some(()));
+
+            // Broadcast channel should be removed if no conditions remain on next notification.
+            model.update(&mut app, |_, ctx| ctx.notify());
+            app.update(|ctx| assert!(ctx.async_observations.get(&model.id()).is_none()));
+        });
+    }
+
+    #[test]
+    #[should_panic]
+    fn test_model_condition_timeout() {
+        struct Model;
+
+        impl super::Entity for Model {
+            type Event = ();
+        }
+
+        App::test_async((), |mut app| async move {
+            let model = app.add_model(|_| Model);
+            model.condition(&app, |_, _| false).await;
+        });
+    }
+
+    #[test]
+    #[should_panic(expected = "model dropped with pending condition")]
+    fn test_model_condition_panic_on_drop() {
+        struct Model;
+
+        impl super::Entity for Model {
+            type Event = ();
+        }
+
+        App::test_async((), |mut app| async move {
+            let model = app.add_model(|_| Model);
+            let condition = model.condition(&app, |_, _| false);
+            app.update(|_| drop(model));
+            condition.await;
+        });
+    }
+
+    #[test]
+    fn test_view_condition() {
+        struct Counter(usize);
+
+        impl super::Entity for Counter {
+            type Event = ();
+        }
+
+        impl super::View for Counter {
+            fn ui_name() -> &'static str {
+                "test view"
+            }
+
+            fn render(&self, _: &AppContext) -> ElementBox {
+                Empty::new().boxed()
+            }
+        }
+
+        impl Counter {
+            fn inc(&mut self, ctx: &mut ViewContext<Self>) {
+                self.0 += 1;
+                ctx.notify();
+            }
+        }
+
+        App::test_async((), |mut app| async move {
+            let (_, view) = app.add_window(|_| Counter(0));
+
+            let condition1 = view.condition(&app, |view, _| view.0 == 2);
+            let condition2 = view.condition(&app, |view, _| view.0 == 3);
+            smol::pin!(condition1, condition2);
+
+            view.update(&mut app, |view, ctx| view.inc(ctx));
+            assert_eq!(poll_once(&mut condition1).await, None);
+            assert_eq!(poll_once(&mut condition2).await, None);
+
+            view.update(&mut app, |view, ctx| view.inc(ctx));
+            assert_eq!(poll_once(&mut condition1).await, Some(()));
+            assert_eq!(poll_once(&mut condition2).await, None);
+
+            view.update(&mut app, |view, ctx| view.inc(ctx));
+            assert_eq!(poll_once(&mut condition2).await, Some(()));
+
+            // Broadcast channel should be removed if no conditions remain on next notification.
+            view.update(&mut app, |_, ctx| ctx.notify());
+            app.update(|ctx| assert!(ctx.async_observations.get(&view.id()).is_none()));
+        });
+    }
+
+    #[test]
+    #[should_panic]
+    fn test_view_condition_timeout() {
+        struct View;
+
+        impl super::Entity for View {
+            type Event = ();
+        }
+
+        impl super::View for View {
+            fn ui_name() -> &'static str {
+                "test view"
+            }
+
+            fn render(&self, _: &AppContext) -> ElementBox {
+                Empty::new().boxed()
+            }
+        }
+
+        App::test_async((), |mut app| async move {
+            let (_, view) = app.add_window(|_| View);
+            view.condition(&app, |_, _| false).await;
+        });
+    }
+
+    #[test]
+    #[should_panic(expected = "model dropped with pending condition")]
+    fn test_view_condition_panic_on_drop() {
+        struct View;
+
+        impl super::Entity for View {
+            type Event = ();
+        }
+
+        impl super::View for View {
+            fn ui_name() -> &'static str {
+                "test view"
+            }
+
+            fn render(&self, _: &AppContext) -> ElementBox {
+                Empty::new().boxed()
+            }
+        }
+
+        App::test_async((), |mut app| async move {
+            let window_id = app.add_window(|_| View).0;
+            let view = app.add_view(window_id, |_| View);
+
+            let condition = view.condition(&app, |_, _| false);
+            app.update(|_| drop(view));
+            condition.await;
+        });
+    }
+
     // #[test]
     // fn test_ui_and_window_updates() {
     //     struct View {
@@ -3313,98 +3539,4 @@ mod tests {
     //         assert!(invalidation.removed.is_empty());
     //     });
     // }
-
-    #[test]
-    fn test_finish_pending_tasks() {
-        struct View;
-
-        impl Entity for View {
-            type Event = ();
-        }
-
-        impl super::View for View {
-            fn render<'a>(&self, _: &AppContext) -> ElementBox {
-                Empty::new().boxed()
-            }
-
-            fn ui_name() -> &'static str {
-                "View"
-            }
-        }
-
-        struct Model;
-
-        impl Entity for Model {
-            type Event = ();
-        }
-
-        App::test_async((), |mut app| async move {
-            let model = app.add_model(|_| Model);
-            let (_, view) = app.add_window(|_| View);
-
-            model.update(&mut app, |_, ctx| {
-                ctx.spawn(async {}, |_, _, _| {}).detach();
-                // Cancel this task
-                drop(ctx.spawn(async {}, |_, _, _| {}));
-            });
-
-            view.update(&mut app, |_, ctx| {
-                ctx.spawn(async {}, |_, _, _| {}).detach();
-                // Cancel this task
-                drop(ctx.spawn(async {}, |_, _, _| {}));
-            });
-
-            assert!(!app.0.borrow().future_handlers.borrow().is_empty());
-            app.finish_pending_tasks().await;
-            assert!(app.0.borrow().future_handlers.borrow().is_empty());
-            app.finish_pending_tasks().await; // Don't block if there are no tasks
-
-            model.update(&mut app, |_, ctx| {
-                ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {})
-                    .detach();
-                // Cancel this task
-                drop(ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {}));
-            });
-
-            view.update(&mut app, |_, ctx| {
-                ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {})
-                    .detach();
-                // Cancel this task
-                drop(ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {}));
-            });
-
-            assert!(!app.0.borrow().stream_handlers.borrow().is_empty());
-            app.finish_pending_tasks().await;
-            assert!(app.0.borrow().stream_handlers.borrow().is_empty());
-            app.finish_pending_tasks().await; // Don't block if there are no tasks
-
-            // Tasks are considered finished when we drop handles
-            let mut tasks = Vec::new();
-            model.update(&mut app, |_, ctx| {
-                tasks.push(Box::new(ctx.spawn(async {}, |_, _, _| {})));
-                tasks.push(Box::new(ctx.spawn_stream(
-                    smol::stream::iter(vec![1, 2, 3]),
-                    |_, _, _| {},
-                    |_, _| {},
-                )));
-            });
-
-            view.update(&mut app, |_, ctx| {
-                tasks.push(Box::new(ctx.spawn(async {}, |_, _, _| {})));
-                tasks.push(Box::new(ctx.spawn_stream(
-                    smol::stream::iter(vec![1, 2, 3]),
-                    |_, _, _| {},
-                    |_, _| {},
-                )));
-            });
-
-            assert!(!app.0.borrow().stream_handlers.borrow().is_empty());
-
-            let finish_pending_tasks = app.finish_pending_tasks();
-            drop(tasks);
-            finish_pending_tasks.await;
-            assert!(app.0.borrow().stream_handlers.borrow().is_empty());
-            app.finish_pending_tasks().await; // Don't block if there are no tasks
-        });
-    }
 }

gpui/src/elements/uniform_list.rs 🔗

@@ -68,16 +68,12 @@ where
 
     fn scroll(
         &self,
-        position: Vector2F,
+        _: Vector2F,
         delta: Vector2F,
         precise: bool,
         scroll_max: f32,
         ctx: &mut EventContext,
     ) -> bool {
-        if !self.rect().unwrap().contains_point(position) {
-            return false;
-        }
-
         if !precise {
             todo!("still need to handle non-precise scroll events from a mouse wheel");
         }
@@ -111,11 +107,6 @@ where
     fn scroll_top(&self) -> f32 {
         self.state.0.lock().scroll_top
     }
-
-    fn rect(&self) -> Option<RectF> {
-        todo!()
-        // try_rect(self.origin, self.size)
-    }
 }
 
 impl<F> Element for UniformList<F>
@@ -213,7 +204,7 @@ where
     fn dispatch_event(
         &mut self,
         event: &Event,
-        _: RectF,
+        bounds: RectF,
         layout: &mut Self::LayoutState,
         _: &mut Self::PaintState,
         ctx: &mut EventContext,
@@ -229,8 +220,10 @@ where
                 delta,
                 precise,
             } => {
-                if self.scroll(*position, *delta, *precise, layout.scroll_max, ctx) {
-                    handled = true;
+                if bounds.contains_point(*position) {
+                    if self.scroll(*position, *delta, *precise, layout.scroll_max, ctx) {
+                        handled = true;
+                    }
                 }
             }
             _ => {}

gpui/src/test.rs 🔗

@@ -1,8 +1,6 @@
 use ctor::ctor;
-use simplelog::SimpleLogger;
-use log::LevelFilter;
 
 #[ctor]
 fn init_logger() {
-    SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
+    env_logger::init();
 }

gpui/src/util.rs 🔗

@@ -1,5 +1,20 @@
+use smol::future::FutureExt;
+use std::{future::Future, time::Duration};
+
 pub fn post_inc(value: &mut usize) -> usize {
     let prev = *value;
     *value += 1;
     prev
 }
+
+pub async fn timeout<F, T>(timeout: Duration, f: F) -> Result<T, ()>
+where
+    F: Future<Output = T>,
+{
+    let timer = async {
+        smol::Timer::after(timeout).await;
+        Err(())
+    };
+    let future = async move { Ok(f.await) };
+    timer.race(future).await
+}

scoped_pool/Cargo.toml 🔗

@@ -0,0 +1,8 @@
+[package]
+name = "scoped-pool"
+version = "0.0.1"
+license = "MIT"
+edition = "2018"
+
+[dependencies]
+crossbeam-channel = "0.5"

scoped_pool/src/lib.rs 🔗

@@ -0,0 +1,188 @@
+use crossbeam_channel as chan;
+use std::{marker::PhantomData, mem::transmute, thread};
+
+#[derive(Clone)]
+pub struct Pool {
+    req_tx: chan::Sender<Request>,
+    thread_count: usize,
+}
+
+pub struct Scope<'a> {
+    req_count: usize,
+    req_tx: chan::Sender<Request>,
+    resp_tx: chan::Sender<()>,
+    resp_rx: chan::Receiver<()>,
+    phantom: PhantomData<&'a ()>,
+}
+
+struct Request {
+    callback: Box<dyn FnOnce() + Send + 'static>,
+    resp_tx: chan::Sender<()>,
+}
+
+impl Pool {
+    pub fn new(thread_count: usize, name: impl AsRef<str>) -> Self {
+        let (req_tx, req_rx) = chan::unbounded();
+        for i in 0..thread_count {
+            thread::Builder::new()
+                .name(format!("scoped_pool {} {}", name.as_ref(), i))
+                .spawn({
+                    let req_rx = req_rx.clone();
+                    move || loop {
+                        match req_rx.recv() {
+                            Err(_) => break,
+                            Ok(Request { callback, resp_tx }) => {
+                                callback();
+                                resp_tx.send(()).ok();
+                            }
+                        }
+                    }
+                })
+                .expect("scoped_pool: failed to spawn thread");
+        }
+        Self {
+            req_tx,
+            thread_count,
+        }
+    }
+
+    pub fn thread_count(&self) -> usize {
+        self.thread_count
+    }
+
+    pub fn scoped<'scope, F, R>(&self, scheduler: F) -> R
+    where
+        F: FnOnce(&mut Scope<'scope>) -> R,
+    {
+        let (resp_tx, resp_rx) = chan::bounded(1);
+        let mut scope = Scope {
+            resp_tx,
+            resp_rx,
+            req_count: 0,
+            phantom: PhantomData,
+            req_tx: self.req_tx.clone(),
+        };
+        let result = scheduler(&mut scope);
+        scope.wait();
+        result
+    }
+}
+
+impl<'scope> Scope<'scope> {
+    pub fn execute<F>(&mut self, callback: F)
+    where
+        F: FnOnce() + Send + 'scope,
+    {
+        // Transmute the callback's lifetime to be 'static. This is safe because in ::wait,
+        // we block until all the callbacks have been called and dropped.
+        let callback = unsafe {
+            transmute::<Box<dyn FnOnce() + Send + 'scope>, Box<dyn FnOnce() + Send + 'static>>(
+                Box::new(callback),
+            )
+        };
+
+        self.req_count += 1;
+        self.req_tx
+            .send(Request {
+                callback,
+                resp_tx: self.resp_tx.clone(),
+            })
+            .unwrap();
+    }
+
+    fn wait(&self) {
+        for _ in 0..self.req_count {
+            self.resp_rx.recv().unwrap();
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use std::sync::{Arc, Mutex};
+
+    #[test]
+    fn test_execute() {
+        let pool = Pool::new(3, "test");
+
+        {
+            let vec = Mutex::new(Vec::new());
+            pool.scoped(|scope| {
+                for _ in 0..3 {
+                    scope.execute(|| {
+                        for i in 0..5 {
+                            vec.lock().unwrap().push(i);
+                        }
+                    });
+                }
+            });
+
+            let mut vec = vec.into_inner().unwrap();
+            vec.sort_unstable();
+            assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
+        }
+    }
+
+    #[test]
+    fn test_clone_send_and_execute() {
+        let pool = Pool::new(3, "test");
+
+        let mut threads = Vec::new();
+        for _ in 0..3 {
+            threads.push(thread::spawn({
+                let pool = pool.clone();
+                move || {
+                    let vec = Mutex::new(Vec::new());
+                    pool.scoped(|scope| {
+                        for _ in 0..3 {
+                            scope.execute(|| {
+                                for i in 0..5 {
+                                    vec.lock().unwrap().push(i);
+                                }
+                            });
+                        }
+                    });
+                    let mut vec = vec.into_inner().unwrap();
+                    vec.sort_unstable();
+                    assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
+                }
+            }));
+        }
+
+        for thread in threads {
+            thread.join().unwrap();
+        }
+    }
+
+    #[test]
+    fn test_share_and_execute() {
+        let pool = Arc::new(Pool::new(3, "test"));
+
+        let mut threads = Vec::new();
+        for _ in 0..3 {
+            threads.push(thread::spawn({
+                let pool = pool.clone();
+                move || {
+                    let vec = Mutex::new(Vec::new());
+                    pool.scoped(|scope| {
+                        for _ in 0..3 {
+                            scope.execute(|| {
+                                for i in 0..5 {
+                                    vec.lock().unwrap().push(i);
+                                }
+                            });
+                        }
+                    });
+                    let mut vec = vec.into_inner().unwrap();
+                    vec.sort_unstable();
+                    assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
+                }
+            }));
+        }
+
+        for thread in threads {
+            thread.join().unwrap();
+        }
+    }
+}

zed/Cargo.toml 🔗

@@ -16,25 +16,29 @@ path = "src/main.rs"
 anyhow = "1.0.38"
 arrayvec = "0.5.2"
 crossbeam-channel = "0.5.0"
+ctor = "0.1.20"
 dirs = "3.0"
 easy-parallel = "3.1.0"
+fsevent = {path = "../fsevent"}
 futures-core = "0.3"
 gpui = {path = "../gpui"}
-ignore = {git = "https://github.com/zed-industries/ripgrep", rev = "1d152118f35b3e3590216709b86277062d79b8a0"}
+ignore = "0.4"
 lazy_static = "1.4.0"
 libc = "0.2"
 log = "0.4"
 num_cpus = "1.13.0"
 parking_lot = "0.11.1"
+postage = {version = "0.4.1", features = ["futures-traits"]}
 rand = "0.8.3"
 rust-embed = "5.9.0"
 seahash = "4.1"
+serde = {version = "1", features = ["derive"]}
 simplelog = "0.9"
-serde = { version = "1", features = ["derive"] }
 smallvec = "1.6.1"
 smol = "1.2.5"
 
 [dev-dependencies]
+env_logger = "0.8"
 serde_json = {version = "1.0.64", features = ["preserve_order"]}
 tempdir = "0.3.7"
 unindent = "0.1.7"

zed/src/editor/buffer/mod.rs 🔗

@@ -18,7 +18,7 @@ use crate::{
     worktree::FileHandle,
 };
 use anyhow::{anyhow, Result};
-use gpui::{AppContext, Entity, ModelContext};
+use gpui::{Entity, ModelContext};
 use lazy_static::lazy_static;
 use rand::prelude::*;
 use std::{
@@ -26,7 +26,7 @@ use std::{
     hash::BuildHasher,
     iter::{self, Iterator},
     ops::{AddAssign, Range},
-    path::PathBuf,
+    path::Path,
     str,
     sync::Arc,
     time::{Duration, Instant},
@@ -353,15 +353,29 @@ pub struct UndoOperation {
 }
 
 impl Buffer {
-    pub fn new<T: Into<Arc<str>>>(replica_id: ReplicaId, base_text: T) -> Self {
-        Self::build(replica_id, None, History::new(base_text.into()))
-    }
-
-    pub fn from_history(replica_id: ReplicaId, file: FileHandle, history: History) -> Self {
-        Self::build(replica_id, Some(file), history)
-    }
-
-    fn build(replica_id: ReplicaId, file: Option<FileHandle>, history: History) -> Self {
+    pub fn new<T: Into<Arc<str>>>(
+        replica_id: ReplicaId,
+        base_text: T,
+        ctx: &mut ModelContext<Self>,
+    ) -> Self {
+        Self::build(replica_id, None, History::new(base_text.into()), ctx)
+    }
+
+    pub fn from_history(
+        replica_id: ReplicaId,
+        file: FileHandle,
+        history: History,
+        ctx: &mut ModelContext<Self>,
+    ) -> Self {
+        Self::build(replica_id, Some(file), history, ctx)
+    }
+
+    fn build(
+        replica_id: ReplicaId,
+        file: Option<FileHandle>,
+        history: History,
+        ctx: &mut ModelContext<Self>,
+    ) -> Self {
         let mut insertion_splits = HashMap::default();
         let mut fragments = SumTree::new();
 
@@ -410,6 +424,10 @@ impl Buffer {
             });
         }
 
+        if let Some(file) = file.as_ref() {
+            file.observe_from_model(ctx, |_, _, ctx| ctx.emit(Event::FileHandleChanged));
+        }
+
         Self {
             file,
             fragments,
@@ -429,11 +447,11 @@ impl Buffer {
         }
     }
 
-    pub fn path(&self, app: &AppContext) -> Option<PathBuf> {
-        self.file.as_ref().map(|file| file.path(app))
+    pub fn path(&self) -> Option<Arc<Path>> {
+        self.file.as_ref().map(|file| file.path())
     }
 
-    pub fn entry_id(&self) -> Option<(usize, usize)> {
+    pub fn entry_id(&self) -> Option<(usize, Arc<Path>)> {
         self.file.as_ref().map(|file| file.entry_id())
     }
 
@@ -445,6 +463,8 @@ impl Buffer {
 
     pub fn save(&mut self, ctx: &mut ModelContext<Self>) -> LocalBoxFuture<'static, Result<()>> {
         if let Some(file) = &self.file {
+            dbg!(file.path());
+
             let snapshot = self.snapshot();
             let version = self.version.clone();
             let save_task = file.save(snapshot, ctx.as_ref());
@@ -1772,6 +1792,7 @@ pub enum Event {
     Edited(Vec<Edit>),
     Dirtied,
     Saved,
+    FileHandleChanged,
 }
 
 impl Entity for Buffer {
@@ -2305,21 +2326,24 @@ mod tests {
     use std::{cell::RefCell, rc::Rc};
 
     #[test]
-    fn test_edit() -> Result<()> {
-        let mut buffer = Buffer::new(0, "abc");
-        assert_eq!(buffer.text(), "abc");
-        buffer.edit(vec![3..3], "def", None)?;
-        assert_eq!(buffer.text(), "abcdef");
-        buffer.edit(vec![0..0], "ghi", None)?;
-        assert_eq!(buffer.text(), "ghiabcdef");
-        buffer.edit(vec![5..5], "jkl", None)?;
-        assert_eq!(buffer.text(), "ghiabjklcdef");
-        buffer.edit(vec![6..7], "", None)?;
-        assert_eq!(buffer.text(), "ghiabjlcdef");
-        buffer.edit(vec![4..9], "mno", None)?;
-        assert_eq!(buffer.text(), "ghiamnoef");
-
-        Ok(())
+    fn test_edit() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "abc", ctx);
+                assert_eq!(buffer.text(), "abc");
+                buffer.edit(vec![3..3], "def", None).unwrap();
+                assert_eq!(buffer.text(), "abcdef");
+                buffer.edit(vec![0..0], "ghi", None).unwrap();
+                assert_eq!(buffer.text(), "ghiabcdef");
+                buffer.edit(vec![5..5], "jkl", None).unwrap();
+                assert_eq!(buffer.text(), "ghiabjklcdef");
+                buffer.edit(vec![6..7], "", None).unwrap();
+                assert_eq!(buffer.text(), "ghiabjlcdef");
+                buffer.edit(vec![4..9], "mno", None).unwrap();
+                assert_eq!(buffer.text(), "ghiamnoef");
+                buffer
+            });
+        })
     }
 
     #[test]
@@ -2329,8 +2353,8 @@ mod tests {
             let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
             let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
 
-            let buffer1 = app.add_model(|_| Buffer::new(0, "abcdef"));
-            let buffer2 = app.add_model(|_| Buffer::new(1, "abcdef"));
+            let buffer1 = app.add_model(|ctx| Buffer::new(0, "abcdef", ctx));
+            let buffer2 = app.add_model(|ctx| Buffer::new(1, "abcdef", ctx));
             let mut buffer_ops = Vec::new();
             buffer1.update(app, |buffer, ctx| {
                 let buffer_1_events = buffer_1_events.clone();
@@ -2408,187 +2432,207 @@ mod tests {
     #[test]
     fn test_random_edits() {
         for seed in 0..100 {
-            println!("{:?}", seed);
-            let mut rng = &mut StdRng::seed_from_u64(seed);
+            App::test((), |ctx| {
+                println!("{:?}", seed);
+                let mut rng = &mut StdRng::seed_from_u64(seed);
+
+                let reference_string_len = rng.gen_range(0..3);
+                let mut reference_string = RandomCharIter::new(&mut rng)
+                    .take(reference_string_len)
+                    .collect::<String>();
+                ctx.add_model(|ctx| {
+                    let mut buffer = Buffer::new(0, reference_string.as_str(), ctx);
+                    let mut buffer_versions = Vec::new();
+                    for _i in 0..10 {
+                        let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None);
+                        for old_range in old_ranges.iter().rev() {
+                            reference_string = [
+                                &reference_string[0..old_range.start],
+                                new_text.as_str(),
+                                &reference_string[old_range.end..],
+                            ]
+                            .concat();
+                        }
+                        assert_eq!(buffer.text(), reference_string);
 
-            let reference_string_len = rng.gen_range(0..3);
-            let mut reference_string = RandomCharIter::new(&mut rng)
-                .take(reference_string_len)
-                .collect::<String>();
-            let mut buffer = Buffer::new(0, reference_string.as_str());
-            let mut buffer_versions = Vec::new();
-
-            for _i in 0..10 {
-                let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None);
-                for old_range in old_ranges.iter().rev() {
-                    reference_string = [
-                        &reference_string[0..old_range.start],
-                        new_text.as_str(),
-                        &reference_string[old_range.end..],
-                    ]
-                    .concat();
-                }
-                assert_eq!(buffer.text(), reference_string);
+                        if rng.gen_bool(0.25) {
+                            buffer.randomly_undo_redo(rng);
+                            reference_string = buffer.text();
+                        }
 
-                if rng.gen_bool(0.25) {
-                    buffer.randomly_undo_redo(rng);
-                    reference_string = buffer.text();
-                }
+                        {
+                            let line_lengths = line_lengths_in_range(&buffer, 0..buffer.len());
 
-                {
-                    let line_lengths = line_lengths_in_range(&buffer, 0..buffer.len());
+                            for (len, rows) in &line_lengths {
+                                for row in rows {
+                                    assert_eq!(buffer.line_len(*row).unwrap(), *len);
+                                }
+                            }
 
-                    for (len, rows) in &line_lengths {
-                        for row in rows {
-                            assert_eq!(buffer.line_len(*row).unwrap(), *len);
+                            let (longest_column, longest_rows) =
+                                line_lengths.iter().next_back().unwrap();
+                            let rightmost_point = buffer.rightmost_point();
+                            assert_eq!(rightmost_point.column, *longest_column);
+                            assert!(longest_rows.contains(&rightmost_point.row));
                         }
-                    }
 
-                    let (longest_column, longest_rows) = line_lengths.iter().next_back().unwrap();
-                    let rightmost_point = buffer.rightmost_point();
-                    assert_eq!(rightmost_point.column, *longest_column);
-                    assert!(longest_rows.contains(&rightmost_point.row));
-                }
+                        for _ in 0..5 {
+                            let end = rng.gen_range(0..buffer.len() + 1);
+                            let start = rng.gen_range(0..end + 1);
+
+                            let line_lengths = line_lengths_in_range(&buffer, start..end);
+                            let (longest_column, longest_rows) =
+                                line_lengths.iter().next_back().unwrap();
+                            let range_sum = buffer.text_summary_for_range(start..end);
+                            assert_eq!(range_sum.rightmost_point.column, *longest_column);
+                            assert!(longest_rows.contains(&range_sum.rightmost_point.row));
+                            let range_text = &buffer.text()[start..end];
+                            assert_eq!(range_sum.chars, range_text.chars().count());
+                            assert_eq!(range_sum.bytes, range_text.len());
+                        }
 
-                for _ in 0..5 {
-                    let end = rng.gen_range(0..buffer.len() + 1);
-                    let start = rng.gen_range(0..end + 1);
-
-                    let line_lengths = line_lengths_in_range(&buffer, start..end);
-                    let (longest_column, longest_rows) = line_lengths.iter().next_back().unwrap();
-                    let range_sum = buffer.text_summary_for_range(start..end);
-                    assert_eq!(range_sum.rightmost_point.column, *longest_column);
-                    assert!(longest_rows.contains(&range_sum.rightmost_point.row));
-                    let range_text = &buffer.text()[start..end];
-                    assert_eq!(range_sum.chars, range_text.chars().count());
-                    assert_eq!(range_sum.bytes, range_text.len());
-                }
+                        if rng.gen_bool(0.3) {
+                            buffer_versions.push(buffer.clone());
+                        }
+                    }
 
-                if rng.gen_bool(0.3) {
-                    buffer_versions.push(buffer.clone());
-                }
-            }
+                    for mut old_buffer in buffer_versions {
+                        let mut delta = 0_isize;
+                        for Edit {
+                            old_range,
+                            new_range,
+                        } in buffer.edits_since(old_buffer.version.clone())
+                        {
+                            let old_len = old_range.end - old_range.start;
+                            let new_len = new_range.end - new_range.start;
+                            let old_start = (old_range.start as isize + delta) as usize;
+                            let new_text: String =
+                                buffer.text_for_range(new_range).unwrap().collect();
+                            old_buffer
+                                .edit(Some(old_start..old_start + old_len), new_text, None)
+                                .unwrap();
+
+                            delta += new_len as isize - old_len as isize;
+                        }
+                        assert_eq!(old_buffer.text(), buffer.text());
+                    }
 
-            for mut old_buffer in buffer_versions {
-                let mut delta = 0_isize;
-                for Edit {
-                    old_range,
-                    new_range,
-                } in buffer.edits_since(old_buffer.version.clone())
-                {
-                    let old_len = old_range.end - old_range.start;
-                    let new_len = new_range.end - new_range.start;
-                    let old_start = (old_range.start as isize + delta) as usize;
-                    let new_text: String = buffer.text_for_range(new_range).unwrap().collect();
-                    old_buffer
-                        .edit(Some(old_start..old_start + old_len), new_text, None)
-                        .unwrap();
-
-                    delta += new_len as isize - old_len as isize;
-                }
-                assert_eq!(old_buffer.text(), buffer.text());
-            }
+                    buffer
+                })
+            });
         }
     }
 
     #[test]
-    fn test_line_len() -> Result<()> {
-        let mut buffer = Buffer::new(0, "");
-        buffer.edit(vec![0..0], "abcd\nefg\nhij", None)?;
-        buffer.edit(vec![12..12], "kl\nmno", None)?;
-        buffer.edit(vec![18..18], "\npqrs\n", None)?;
-        buffer.edit(vec![18..21], "\nPQ", None)?;
-
-        assert_eq!(buffer.line_len(0)?, 4);
-        assert_eq!(buffer.line_len(1)?, 3);
-        assert_eq!(buffer.line_len(2)?, 5);
-        assert_eq!(buffer.line_len(3)?, 3);
-        assert_eq!(buffer.line_len(4)?, 4);
-        assert_eq!(buffer.line_len(5)?, 0);
-        assert!(buffer.line_len(6).is_err());
-
-        Ok(())
+    fn test_line_len() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "", ctx);
+                buffer.edit(vec![0..0], "abcd\nefg\nhij", None).unwrap();
+                buffer.edit(vec![12..12], "kl\nmno", None).unwrap();
+                buffer.edit(vec![18..18], "\npqrs\n", None).unwrap();
+                buffer.edit(vec![18..21], "\nPQ", None).unwrap();
+
+                assert_eq!(buffer.line_len(0).unwrap(), 4);
+                assert_eq!(buffer.line_len(1).unwrap(), 3);
+                assert_eq!(buffer.line_len(2).unwrap(), 5);
+                assert_eq!(buffer.line_len(3).unwrap(), 3);
+                assert_eq!(buffer.line_len(4).unwrap(), 4);
+                assert_eq!(buffer.line_len(5).unwrap(), 0);
+                assert!(buffer.line_len(6).is_err());
+                buffer
+            });
+        });
     }
 
     #[test]
-    fn test_rightmost_point() -> Result<()> {
-        let mut buffer = Buffer::new(0, "");
-        assert_eq!(buffer.rightmost_point().row, 0);
-        buffer.edit(vec![0..0], "abcd\nefg\nhij", None)?;
-        assert_eq!(buffer.rightmost_point().row, 0);
-        buffer.edit(vec![12..12], "kl\nmno", None)?;
-        assert_eq!(buffer.rightmost_point().row, 2);
-        buffer.edit(vec![18..18], "\npqrs", None)?;
-        assert_eq!(buffer.rightmost_point().row, 2);
-        buffer.edit(vec![10..12], "", None)?;
-        assert_eq!(buffer.rightmost_point().row, 0);
-        buffer.edit(vec![24..24], "tuv", None)?;
-        assert_eq!(buffer.rightmost_point().row, 4);
-
-        println!("{:?}", buffer.text());
-
-        Ok(())
+    fn test_rightmost_point() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "", ctx);
+                assert_eq!(buffer.rightmost_point().row, 0);
+                buffer.edit(vec![0..0], "abcd\nefg\nhij", None).unwrap();
+                assert_eq!(buffer.rightmost_point().row, 0);
+                buffer.edit(vec![12..12], "kl\nmno", None).unwrap();
+                assert_eq!(buffer.rightmost_point().row, 2);
+                buffer.edit(vec![18..18], "\npqrs", None).unwrap();
+                assert_eq!(buffer.rightmost_point().row, 2);
+                buffer.edit(vec![10..12], "", None).unwrap();
+                assert_eq!(buffer.rightmost_point().row, 0);
+                buffer.edit(vec![24..24], "tuv", None).unwrap();
+                assert_eq!(buffer.rightmost_point().row, 4);
+                buffer
+            });
+        });
     }
 
     #[test]
     fn test_text_summary_for_range() {
-        let buffer = Buffer::new(0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz");
-        let text = Text::from(buffer.text());
-
-        assert_eq!(
-            buffer.text_summary_for_range(1..3),
-            text.slice(1..3).summary()
-        );
-        assert_eq!(
-            buffer.text_summary_for_range(1..12),
-            text.slice(1..12).summary()
-        );
-        assert_eq!(
-            buffer.text_summary_for_range(0..20),
-            text.slice(0..20).summary()
-        );
-        assert_eq!(
-            buffer.text_summary_for_range(0..22),
-            text.slice(0..22).summary()
-        );
-        assert_eq!(
-            buffer.text_summary_for_range(7..22),
-            text.slice(7..22).summary()
-        );
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let buffer = Buffer::new(0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz", ctx);
+                let text = Text::from(buffer.text());
+                assert_eq!(
+                    buffer.text_summary_for_range(1..3),
+                    text.slice(1..3).summary()
+                );
+                assert_eq!(
+                    buffer.text_summary_for_range(1..12),
+                    text.slice(1..12).summary()
+                );
+                assert_eq!(
+                    buffer.text_summary_for_range(0..20),
+                    text.slice(0..20).summary()
+                );
+                assert_eq!(
+                    buffer.text_summary_for_range(0..22),
+                    text.slice(0..22).summary()
+                );
+                assert_eq!(
+                    buffer.text_summary_for_range(7..22),
+                    text.slice(7..22).summary()
+                );
+                buffer
+            });
+        });
     }
 
     #[test]
-    fn test_chars_at() -> Result<()> {
-        let mut buffer = Buffer::new(0, "");
-        buffer.edit(vec![0..0], "abcd\nefgh\nij", None)?;
-        buffer.edit(vec![12..12], "kl\nmno", None)?;
-        buffer.edit(vec![18..18], "\npqrs", None)?;
-        buffer.edit(vec![18..21], "\nPQ", None)?;
+    fn test_chars_at() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "", ctx);
+                buffer.edit(vec![0..0], "abcd\nefgh\nij", None).unwrap();
+                buffer.edit(vec![12..12], "kl\nmno", None).unwrap();
+                buffer.edit(vec![18..18], "\npqrs", None).unwrap();
+                buffer.edit(vec![18..21], "\nPQ", None).unwrap();
 
-        let chars = buffer.chars_at(Point::new(0, 0))?;
-        assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
+                let chars = buffer.chars_at(Point::new(0, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
 
-        let chars = buffer.chars_at(Point::new(1, 0))?;
-        assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
+                let chars = buffer.chars_at(Point::new(1, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
 
-        let chars = buffer.chars_at(Point::new(2, 0))?;
-        assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
+                let chars = buffer.chars_at(Point::new(2, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
 
-        let chars = buffer.chars_at(Point::new(3, 0))?;
-        assert_eq!(chars.collect::<String>(), "mno\nPQrs");
+                let chars = buffer.chars_at(Point::new(3, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "mno\nPQrs");
 
-        let chars = buffer.chars_at(Point::new(4, 0))?;
-        assert_eq!(chars.collect::<String>(), "PQrs");
+                let chars = buffer.chars_at(Point::new(4, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "PQrs");
 
-        // Regression test:
-        let mut buffer = Buffer::new(0, "");
-        buffer.edit(vec![0..0], "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n", None)?;
-        buffer.edit(vec![60..60], "\n", None)?;
+                // Regression test:
+                let mut buffer = Buffer::new(0, "", ctx);
+                buffer.edit(vec![0..0], "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n", None).unwrap();
+                buffer.edit(vec![60..60], "\n", None).unwrap();
 
-        let chars = buffer.chars_at(Point::new(6, 0))?;
-        assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
+                let chars = buffer.chars_at(Point::new(6, 0)).unwrap();
+                assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
 
-        Ok(())
+                buffer
+            });
+        });
     }
 
     // #[test]
@@ -2706,177 +2750,202 @@ mod tests {
     }
 
     #[test]
-    fn test_anchors() -> Result<()> {
-        let mut buffer = Buffer::new(0, "");
-        buffer.edit(vec![0..0], "abc", None)?;
-        let left_anchor = buffer.anchor_before(2).unwrap();
-        let right_anchor = buffer.anchor_after(2).unwrap();
-
-        buffer.edit(vec![1..1], "def\n", None)?;
-        assert_eq!(buffer.text(), "adef\nbc");
-        assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 6);
-        assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 6);
-        assert_eq!(
-            left_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 }
-        );
-        assert_eq!(
-            right_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 }
-        );
+    fn test_anchors() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "", ctx);
+                buffer.edit(vec![0..0], "abc", None).unwrap();
+                let left_anchor = buffer.anchor_before(2).unwrap();
+                let right_anchor = buffer.anchor_after(2).unwrap();
+
+                buffer.edit(vec![1..1], "def\n", None).unwrap();
+                assert_eq!(buffer.text(), "adef\nbc");
+                assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 6);
+                assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 6);
+                assert_eq!(
+                    left_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 }
+                );
+                assert_eq!(
+                    right_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 }
+                );
 
-        buffer.edit(vec![2..3], "", None)?;
-        assert_eq!(buffer.text(), "adf\nbc");
-        assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
-        assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 5);
-        assert_eq!(
-            left_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 }
-        );
-        assert_eq!(
-            right_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 }
-        );
+                buffer.edit(vec![2..3], "", None).unwrap();
+                assert_eq!(buffer.text(), "adf\nbc");
+                assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
+                assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 5);
+                assert_eq!(
+                    left_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 }
+                );
+                assert_eq!(
+                    right_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 }
+                );
 
-        buffer.edit(vec![5..5], "ghi\n", None)?;
-        assert_eq!(buffer.text(), "adf\nbghi\nc");
-        assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
-        assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 9);
-        assert_eq!(
-            left_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 }
-        );
-        assert_eq!(
-            right_anchor.to_point(&buffer).unwrap(),
-            Point { row: 2, column: 0 }
-        );
+                buffer.edit(vec![5..5], "ghi\n", None).unwrap();
+                assert_eq!(buffer.text(), "adf\nbghi\nc");
+                assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
+                assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 9);
+                assert_eq!(
+                    left_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 }
+                );
+                assert_eq!(
+                    right_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 2, column: 0 }
+                );
 
-        buffer.edit(vec![7..9], "", None)?;
-        assert_eq!(buffer.text(), "adf\nbghc");
-        assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
-        assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 7);
-        assert_eq!(
-            left_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 1 },
-        );
-        assert_eq!(
-            right_anchor.to_point(&buffer).unwrap(),
-            Point { row: 1, column: 3 }
-        );
+                buffer.edit(vec![7..9], "", None).unwrap();
+                assert_eq!(buffer.text(), "adf\nbghc");
+                assert_eq!(left_anchor.to_offset(&buffer).unwrap(), 5);
+                assert_eq!(right_anchor.to_offset(&buffer).unwrap(), 7);
+                assert_eq!(
+                    left_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 1 },
+                );
+                assert_eq!(
+                    right_anchor.to_point(&buffer).unwrap(),
+                    Point { row: 1, column: 3 }
+                );
 
-        // Ensure anchoring to a point is equivalent to anchoring to an offset.
-        assert_eq!(
-            buffer.anchor_before(Point { row: 0, column: 0 })?,
-            buffer.anchor_before(0)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 0, column: 1 })?,
-            buffer.anchor_before(1)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 0, column: 2 })?,
-            buffer.anchor_before(2)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 0, column: 3 })?,
-            buffer.anchor_before(3)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 1, column: 0 })?,
-            buffer.anchor_before(4)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 1, column: 1 })?,
-            buffer.anchor_before(5)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 1, column: 2 })?,
-            buffer.anchor_before(6)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 1, column: 3 })?,
-            buffer.anchor_before(7)?
-        );
-        assert_eq!(
-            buffer.anchor_before(Point { row: 1, column: 4 })?,
-            buffer.anchor_before(8)?
-        );
+                // Ensure anchoring to a point is equivalent to anchoring to an offset.
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 0, column: 0 }).unwrap(),
+                    buffer.anchor_before(0).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 0, column: 1 }).unwrap(),
+                    buffer.anchor_before(1).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 0, column: 2 }).unwrap(),
+                    buffer.anchor_before(2).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 0, column: 3 }).unwrap(),
+                    buffer.anchor_before(3).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 1, column: 0 }).unwrap(),
+                    buffer.anchor_before(4).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 1, column: 1 }).unwrap(),
+                    buffer.anchor_before(5).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 1, column: 2 }).unwrap(),
+                    buffer.anchor_before(6).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 1, column: 3 }).unwrap(),
+                    buffer.anchor_before(7).unwrap()
+                );
+                assert_eq!(
+                    buffer.anchor_before(Point { row: 1, column: 4 }).unwrap(),
+                    buffer.anchor_before(8).unwrap()
+                );
 
-        // Comparison between anchors.
-        let anchor_at_offset_0 = buffer.anchor_before(0).unwrap();
-        let anchor_at_offset_1 = buffer.anchor_before(1).unwrap();
-        let anchor_at_offset_2 = buffer.anchor_before(2).unwrap();
+                // Comparison between anchors.
+                let anchor_at_offset_0 = buffer.anchor_before(0).unwrap();
+                let anchor_at_offset_1 = buffer.anchor_before(1).unwrap();
+                let anchor_at_offset_2 = buffer.anchor_before(2).unwrap();
 
-        assert_eq!(
-            anchor_at_offset_0.cmp(&anchor_at_offset_0, &buffer)?,
-            Ordering::Equal
-        );
-        assert_eq!(
-            anchor_at_offset_1.cmp(&anchor_at_offset_1, &buffer)?,
-            Ordering::Equal
-        );
-        assert_eq!(
-            anchor_at_offset_2.cmp(&anchor_at_offset_2, &buffer)?,
-            Ordering::Equal
-        );
+                assert_eq!(
+                    anchor_at_offset_0
+                        .cmp(&anchor_at_offset_0, &buffer)
+                        .unwrap(),
+                    Ordering::Equal
+                );
+                assert_eq!(
+                    anchor_at_offset_1
+                        .cmp(&anchor_at_offset_1, &buffer)
+                        .unwrap(),
+                    Ordering::Equal
+                );
+                assert_eq!(
+                    anchor_at_offset_2
+                        .cmp(&anchor_at_offset_2, &buffer)
+                        .unwrap(),
+                    Ordering::Equal
+                );
 
-        assert_eq!(
-            anchor_at_offset_0.cmp(&anchor_at_offset_1, &buffer)?,
-            Ordering::Less
-        );
-        assert_eq!(
-            anchor_at_offset_1.cmp(&anchor_at_offset_2, &buffer)?,
-            Ordering::Less
-        );
-        assert_eq!(
-            anchor_at_offset_0.cmp(&anchor_at_offset_2, &buffer)?,
-            Ordering::Less
-        );
+                assert_eq!(
+                    anchor_at_offset_0
+                        .cmp(&anchor_at_offset_1, &buffer)
+                        .unwrap(),
+                    Ordering::Less
+                );
+                assert_eq!(
+                    anchor_at_offset_1
+                        .cmp(&anchor_at_offset_2, &buffer)
+                        .unwrap(),
+                    Ordering::Less
+                );
+                assert_eq!(
+                    anchor_at_offset_0
+                        .cmp(&anchor_at_offset_2, &buffer)
+                        .unwrap(),
+                    Ordering::Less
+                );
 
-        assert_eq!(
-            anchor_at_offset_1.cmp(&anchor_at_offset_0, &buffer)?,
-            Ordering::Greater
-        );
-        assert_eq!(
-            anchor_at_offset_2.cmp(&anchor_at_offset_1, &buffer)?,
-            Ordering::Greater
-        );
-        assert_eq!(
-            anchor_at_offset_2.cmp(&anchor_at_offset_0, &buffer)?,
-            Ordering::Greater
-        );
-        Ok(())
+                assert_eq!(
+                    anchor_at_offset_1
+                        .cmp(&anchor_at_offset_0, &buffer)
+                        .unwrap(),
+                    Ordering::Greater
+                );
+                assert_eq!(
+                    anchor_at_offset_2
+                        .cmp(&anchor_at_offset_1, &buffer)
+                        .unwrap(),
+                    Ordering::Greater
+                );
+                assert_eq!(
+                    anchor_at_offset_2
+                        .cmp(&anchor_at_offset_0, &buffer)
+                        .unwrap(),
+                    Ordering::Greater
+                );
+                buffer
+            });
+        });
     }
 
     #[test]
-    fn test_anchors_at_start_and_end() -> Result<()> {
-        let mut buffer = Buffer::new(0, "");
-        let before_start_anchor = buffer.anchor_before(0).unwrap();
-        let after_end_anchor = buffer.anchor_after(0).unwrap();
-
-        buffer.edit(vec![0..0], "abc", None)?;
-        assert_eq!(buffer.text(), "abc");
-        assert_eq!(before_start_anchor.to_offset(&buffer).unwrap(), 0);
-        assert_eq!(after_end_anchor.to_offset(&buffer).unwrap(), 3);
-
-        let after_start_anchor = buffer.anchor_after(0).unwrap();
-        let before_end_anchor = buffer.anchor_before(3).unwrap();
-
-        buffer.edit(vec![3..3], "def", None)?;
-        buffer.edit(vec![0..0], "ghi", None)?;
-        assert_eq!(buffer.text(), "ghiabcdef");
-        assert_eq!(before_start_anchor.to_offset(&buffer).unwrap(), 0);
-        assert_eq!(after_start_anchor.to_offset(&buffer).unwrap(), 3);
-        assert_eq!(before_end_anchor.to_offset(&buffer).unwrap(), 6);
-        assert_eq!(after_end_anchor.to_offset(&buffer).unwrap(), 9);
-
-        Ok(())
+    fn test_anchors_at_start_and_end() {
+        App::test((), |ctx| {
+            ctx.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "", ctx);
+                let before_start_anchor = buffer.anchor_before(0).unwrap();
+                let after_end_anchor = buffer.anchor_after(0).unwrap();
+
+                buffer.edit(vec![0..0], "abc", None).unwrap();
+                assert_eq!(buffer.text(), "abc");
+                assert_eq!(before_start_anchor.to_offset(&buffer).unwrap(), 0);
+                assert_eq!(after_end_anchor.to_offset(&buffer).unwrap(), 3);
+
+                let after_start_anchor = buffer.anchor_after(0).unwrap();
+                let before_end_anchor = buffer.anchor_before(3).unwrap();
+
+                buffer.edit(vec![3..3], "def", None).unwrap();
+                buffer.edit(vec![0..0], "ghi", None).unwrap();
+                assert_eq!(buffer.text(), "ghiabcdef");
+                assert_eq!(before_start_anchor.to_offset(&buffer).unwrap(), 0);
+                assert_eq!(after_start_anchor.to_offset(&buffer).unwrap(), 3);
+                assert_eq!(before_end_anchor.to_offset(&buffer).unwrap(), 6);
+                assert_eq!(after_end_anchor.to_offset(&buffer).unwrap(), 9);
+                buffer
+            });
+        });
     }
 
     #[test]
-    fn test_is_modified() -> Result<()> {
+    fn test_is_modified() {
         App::test((), |app| {
-            let model = app.add_model(|_| Buffer::new(0, "abc"));
+            let model = app.add_model(|ctx| Buffer::new(0, "abc", ctx));
             let events = Rc::new(RefCell::new(Vec::new()));
 
             // initially, the buffer isn't dirty.
@@ -2958,94 +3027,113 @@ mod tests {
                 );
             });
         });
-        Ok(())
     }
 
     #[test]
-    fn test_undo_redo() -> Result<()> {
-        let mut buffer = Buffer::new(0, "1234");
-
-        let edit1 = buffer.edit(vec![1..1], "abx", None)?;
-        let edit2 = buffer.edit(vec![3..4], "yzef", None)?;
-        let edit3 = buffer.edit(vec![3..5], "cd", None)?;
-        assert_eq!(buffer.text(), "1abcdef234");
-
-        buffer.undo_or_redo(edit1[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1cdef234");
-        buffer.undo_or_redo(edit1[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abcdef234");
-
-        buffer.undo_or_redo(edit2[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abcdx234");
-        buffer.undo_or_redo(edit3[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abx234");
-        buffer.undo_or_redo(edit2[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abyzef234");
-        buffer.undo_or_redo(edit3[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abcdef234");
-
-        buffer.undo_or_redo(edit3[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1abyzef234");
-        buffer.undo_or_redo(edit1[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1yzef234");
-        buffer.undo_or_redo(edit2[0].edit_id().unwrap())?;
-        assert_eq!(buffer.text(), "1234");
-
-        Ok(())
+    fn test_undo_redo() {
+        App::test((), |app| {
+            app.add_model(|ctx| {
+                let mut buffer = Buffer::new(0, "1234", ctx);
+
+                let edit1 = buffer.edit(vec![1..1], "abx", None).unwrap();
+                let edit2 = buffer.edit(vec![3..4], "yzef", None).unwrap();
+                let edit3 = buffer.edit(vec![3..5], "cd", None).unwrap();
+                assert_eq!(buffer.text(), "1abcdef234");
+
+                buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1cdef234");
+                buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abcdef234");
+
+                buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abcdx234");
+                buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abx234");
+                buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abyzef234");
+                buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abcdef234");
+
+                buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1abyzef234");
+                buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1yzef234");
+                buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap();
+                assert_eq!(buffer.text(), "1234");
+
+                buffer
+            });
+        });
     }
 
     #[test]
-    fn test_history() -> Result<()> {
-        let mut now = Instant::now();
-        let mut buffer = Buffer::new(0, "123456");
-
-        let (set_id, _) =
-            buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4])?, None);
-        buffer.start_transaction_at(Some(set_id), now)?;
-        buffer.edit(vec![2..4], "cd", None)?;
-        buffer.end_transaction_at(Some(set_id), now, None)?;
-        assert_eq!(buffer.text(), "12cd56");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![4..4]);
-
-        buffer.start_transaction_at(Some(set_id), now)?;
-        buffer.update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3])?, None)?;
-        buffer.edit(vec![4..5], "e", None)?;
-        buffer.end_transaction_at(Some(set_id), now, None)?;
-        assert_eq!(buffer.text(), "12cde6");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![1..3]);
-
-        now += UNDO_GROUP_INTERVAL + Duration::from_millis(1);
-        buffer.start_transaction_at(Some(set_id), now)?;
-        buffer.update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2])?, None)?;
-        buffer.edit(vec![0..1], "a", None)?;
-        buffer.edit(vec![1..1], "b", None)?;
-        buffer.end_transaction_at(Some(set_id), now, None)?;
-        assert_eq!(buffer.text(), "ab2cde6");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![3..3]);
-
-        // Last transaction happened past the group interval, undo it on its
-        // own.
-        buffer.undo(None);
-        assert_eq!(buffer.text(), "12cde6");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![1..3]);
-
-        // First two transactions happened within the group interval, undo them
-        // together.
-        buffer.undo(None);
-        assert_eq!(buffer.text(), "123456");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![4..4]);
-
-        // Redo the first two transactions together.
-        buffer.redo(None);
-        assert_eq!(buffer.text(), "12cde6");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![1..3]);
-
-        // Redo the last transaction on its own.
-        buffer.redo(None);
-        assert_eq!(buffer.text(), "ab2cde6");
-        assert_eq!(buffer.selection_ranges(set_id)?, vec![3..3]);
-
-        Ok(())
+    fn test_history() {
+        App::test((), |app| {
+            app.add_model(|ctx| {
+                let mut now = Instant::now();
+                let mut buffer = Buffer::new(0, "123456", ctx);
+
+                let (set_id, _) = buffer
+                    .add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), None);
+                buffer.start_transaction_at(Some(set_id), now).unwrap();
+                buffer.edit(vec![2..4], "cd", None).unwrap();
+                buffer.end_transaction_at(Some(set_id), now, None).unwrap();
+                assert_eq!(buffer.text(), "12cd56");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
+
+                buffer.start_transaction_at(Some(set_id), now).unwrap();
+                buffer
+                    .update_selection_set(
+                        set_id,
+                        buffer.selections_from_ranges(vec![1..3]).unwrap(),
+                        None,
+                    )
+                    .unwrap();
+                buffer.edit(vec![4..5], "e", None).unwrap();
+                buffer.end_transaction_at(Some(set_id), now, None).unwrap();
+                assert_eq!(buffer.text(), "12cde6");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
+
+                now += UNDO_GROUP_INTERVAL + Duration::from_millis(1);
+                buffer.start_transaction_at(Some(set_id), now).unwrap();
+                buffer
+                    .update_selection_set(
+                        set_id,
+                        buffer.selections_from_ranges(vec![2..2]).unwrap(),
+                        None,
+                    )
+                    .unwrap();
+                buffer.edit(vec![0..1], "a", None).unwrap();
+                buffer.edit(vec![1..1], "b", None).unwrap();
+                buffer.end_transaction_at(Some(set_id), now, None).unwrap();
+                assert_eq!(buffer.text(), "ab2cde6");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
+
+                // Last transaction happened past the group interval, undo it on its
+                // own.
+                buffer.undo(None);
+                assert_eq!(buffer.text(), "12cde6");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
+
+                // First two transactions happened within the group interval, undo them
+                // together.
+                buffer.undo(None);
+                assert_eq!(buffer.text(), "123456");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
+
+                // Redo the first two transactions together.
+                buffer.redo(None);
+                assert_eq!(buffer.text(), "12cde6");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
+
+                // Redo the last transaction on its own.
+                buffer.redo(None);
+                assert_eq!(buffer.text(), "ab2cde6");
+                assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
+
+                buffer
+            });
+        });
     }
 
     #[test]

zed/src/editor/buffer_view.rs 🔗

@@ -20,6 +20,7 @@ use std::{
     fmt::Write,
     iter::FromIterator,
     ops::Range,
+    path::Path,
     sync::Arc,
     time::Duration,
 };
@@ -118,7 +119,7 @@ struct ClipboardSelection {
 
 impl BufferView {
     pub fn single_line(settings: watch::Receiver<Settings>, ctx: &mut ViewContext<Self>) -> Self {
-        let buffer = ctx.add_model(|_| Buffer::new(0, String::new()));
+        let buffer = ctx.add_model(|ctx| Buffer::new(0, String::new(), ctx));
         let mut view = Self::for_buffer(buffer, settings, ctx);
         view.single_line = true;
         view
@@ -1315,6 +1316,7 @@ impl BufferView {
             buffer::Event::Edited(_) => ctx.emit(Event::Edited),
             buffer::Event::Dirtied => ctx.emit(Event::Dirtied),
             buffer::Event::Saved => ctx.emit(Event::Saved),
+            buffer::Event::FileHandleChanged => ctx.emit(Event::FileHandleChanged),
         }
     }
 }
@@ -1325,6 +1327,7 @@ pub enum Event {
     Blurred,
     Dirtied,
     Saved,
+    FileHandleChanged,
 }
 
 impl Entity for BufferView {
@@ -1371,11 +1374,14 @@ impl workspace::ItemView for BufferView {
     }
 
     fn should_update_tab_on_event(event: &Self::Event) -> bool {
-        matches!(event, Event::Saved | Event::Dirtied)
+        matches!(
+            event,
+            Event::Saved | Event::Dirtied | Event::FileHandleChanged
+        )
     }
 
     fn title(&self, app: &AppContext) -> std::string::String {
-        if let Some(path) = self.buffer.read(app).path(app) {
+        if let Some(path) = self.buffer.read(app).path() {
             path.file_name()
                 .expect("buffer's path is always to a file")
                 .to_string_lossy()
@@ -1385,7 +1391,7 @@ impl workspace::ItemView for BufferView {
         }
     }
 
-    fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)> {
+    fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)> {
         self.buffer.read(app).entry_id()
     }
 
@@ -1418,7 +1424,8 @@ mod tests {
     #[test]
     fn test_selection_with_mouse() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n"));
+            let buffer =
+                app.add_model(|ctx| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n", ctx));
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let (_, buffer_view) =
                 app.add_window(|ctx| BufferView::for_buffer(buffer, settings, ctx));
@@ -1532,7 +1539,7 @@ mod tests {
             let layout_cache = TextLayoutCache::new(app.platform().fonts());
             let font_cache = app.font_cache().clone();
 
-            let buffer = app.add_model(|_| Buffer::new(0, sample_text(6, 6)));
+            let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(6, 6), ctx));
 
             let settings = settings::channel(&font_cache).unwrap().1;
             let (_, view) =
@@ -1549,7 +1556,7 @@ mod tests {
     #[test]
     fn test_fold() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| {
+            let buffer = app.add_model(|ctx| {
                 Buffer::new(
                     0,
                     "
@@ -1570,6 +1577,7 @@ mod tests {
                     }
                 "
                     .unindent(),
+                    ctx,
                 )
             });
             let settings = settings::channel(&app.font_cache()).unwrap().1;
@@ -1643,7 +1651,7 @@ mod tests {
     #[test]
     fn test_move_cursor() -> Result<()> {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, sample_text(6, 6)));
+            let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(6, 6), ctx));
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let (_, view) =
                 app.add_window(|ctx| BufferView::for_buffer(buffer.clone(), settings, ctx));
@@ -1680,8 +1688,12 @@ mod tests {
     #[test]
     fn test_backspace() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| {
-                Buffer::new(0, "one two three\nfour five six\nseven eight nine\nten\n")
+            let buffer = app.add_model(|ctx| {
+                Buffer::new(
+                    0,
+                    "one two three\nfour five six\nseven eight nine\nten\n",
+                    ctx,
+                )
             });
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let (_, view) =
@@ -1713,7 +1725,7 @@ mod tests {
     #[test]
     fn test_clipboard() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, "one two three four five six "));
+            let buffer = app.add_model(|ctx| Buffer::new(0, "one two three four five six ", ctx));
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let view = app
                 .add_window(|ctx| BufferView::for_buffer(buffer.clone(), settings, ctx))

zed/src/editor/display_map/fold_map.rs 🔗

@@ -471,7 +471,7 @@ mod tests {
     #[test]
     fn test_basic_folds() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
+            let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
             let mut map = FoldMap::new(buffer.clone(), app.as_ref());
 
             map.fold(
@@ -522,7 +522,7 @@ mod tests {
     #[test]
     fn test_overlapping_folds() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
+            let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
             let mut map = FoldMap::new(buffer.clone(), app.as_ref());
             map.fold(
                 vec![
@@ -541,7 +541,7 @@ mod tests {
     #[test]
     fn test_merging_folds_via_edit() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
+            let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
             let mut map = FoldMap::new(buffer.clone(), app.as_ref());
 
             map.fold(
@@ -589,10 +589,10 @@ mod tests {
             let mut rng = StdRng::seed_from_u64(seed);
 
             App::test((), |app| {
-                let buffer = app.add_model(|_| {
+                let buffer = app.add_model(|ctx| {
                     let len = rng.gen_range(0..10);
                     let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-                    Buffer::new(0, text)
+                    Buffer::new(0, text, ctx)
                 });
                 let mut map = FoldMap::new(buffer.clone(), app.as_ref());
 
@@ -664,7 +664,7 @@ mod tests {
     fn test_buffer_rows() {
         App::test((), |app| {
             let text = sample_text(6, 6) + "\n";
-            let buffer = app.add_model(|_| Buffer::new(0, text));
+            let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
 
             let mut map = FoldMap::new(buffer.clone(), app.as_ref());
 

zed/src/editor/display_map/mod.rs 🔗

@@ -298,7 +298,7 @@ mod tests {
     fn test_chars_at() {
         App::test((), |app| {
             let text = sample_text(6, 6);
-            let buffer = app.add_model(|_| Buffer::new(0, text));
+            let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
             let map = app.add_model(|ctx| DisplayMap::new(buffer.clone(), 4, ctx));
             buffer
                 .update(app, |buffer, ctx| {
@@ -365,7 +365,7 @@ mod tests {
     #[test]
     fn test_max_point() {
         App::test((), |app| {
-            let buffer = app.add_model(|_| Buffer::new(0, "aaa\n\t\tbbb"));
+            let buffer = app.add_model(|ctx| Buffer::new(0, "aaa\n\t\tbbb", ctx));
             let map = app.add_model(|ctx| DisplayMap::new(buffer.clone(), 4, ctx));
             assert_eq!(
                 map.read(app).max_point(app.as_ref()),

zed/src/file_finder.rs 🔗

@@ -14,7 +14,14 @@ use gpui::{
     AppContext, Axis, Border, Entity, ModelHandle, MutableAppContext, View, ViewContext,
     ViewHandle, WeakViewHandle,
 };
-use std::cmp;
+use std::{
+    cmp,
+    path::Path,
+    sync::{
+        atomic::{self, AtomicBool},
+        Arc,
+    },
+};
 
 pub struct FileFinder {
     handle: WeakViewHandle<Self>,
@@ -24,7 +31,9 @@ pub struct FileFinder {
     search_count: usize,
     latest_search_id: usize,
     matches: Vec<PathMatch>,
-    selected: usize,
+    include_root_name: bool,
+    selected: Option<Arc<Path>>,
+    cancel_flag: Arc<AtomicBool>,
     list_state: UniformListState,
 }
 
@@ -32,8 +41,8 @@ pub fn init(app: &mut MutableAppContext) {
     app.add_action("file_finder:toggle", FileFinder::toggle);
     app.add_action("file_finder:confirm", FileFinder::confirm);
     app.add_action("file_finder:select", FileFinder::select);
-    app.add_action("buffer:move_up", FileFinder::select_prev);
-    app.add_action("buffer:move_down", FileFinder::select_next);
+    app.add_action("menu:select_prev", FileFinder::select_prev);
+    app.add_action("menu:select_next", FileFinder::select_next);
     app.add_action("uniform_list:scroll", FileFinder::scroll);
 
     app.add_bindings(vec![
@@ -44,7 +53,7 @@ pub fn init(app: &mut MutableAppContext) {
 }
 
 pub enum Event {
-    Selected(usize, usize),
+    Selected(usize, Arc<Path>),
     Dismissed,
 }
 
@@ -137,24 +146,24 @@ impl FileFinder {
         app: &AppContext,
     ) -> Option<ElementBox> {
         let tree_id = path_match.tree_id;
-        let entry_id = path_match.entry_id;
 
         self.worktree(tree_id, app).map(|tree| {
-            let path = tree.entry_path(entry_id).unwrap();
-            let file_name = path
+            let prefix = if self.include_root_name {
+                tree.root_name()
+            } else {
+                ""
+            };
+            let path = path_match.path.clone();
+            let path_string = path_match.path.to_string_lossy();
+            let file_name = path_match
+                .path
                 .file_name()
                 .unwrap_or_default()
-                .to_string_lossy()
-                .to_string();
-
-            let mut path = path.to_string_lossy().to_string();
-            if path_match.skipped_prefix_len > 0 {
-                let mut i = 0;
-                path.retain(|_| util::post_inc(&mut i) >= path_match.skipped_prefix_len)
-            }
+                .to_string_lossy();
 
             let path_positions = path_match.positions.clone();
-            let file_name_start = path.chars().count() - file_name.chars().count();
+            let file_name_start =
+                prefix.len() + path_string.chars().count() - file_name.chars().count();
             let mut file_name_positions = Vec::new();
             file_name_positions.extend(path_positions.iter().filter_map(|pos| {
                 if pos >= &file_name_start {
@@ -168,6 +177,9 @@ impl FileFinder {
             let highlight_color = ColorU::from_u32(0x304ee2ff);
             let bold = *Properties::new().weight(Weight::BOLD);
 
+            let mut full_path = prefix.to_string();
+            full_path.push_str(&path_string);
+
             let mut container = Container::new(
                 Flex::row()
                     .with_child(
@@ -188,7 +200,7 @@ impl FileFinder {
                             Flex::column()
                                 .with_child(
                                     Label::new(
-                                        file_name,
+                                        file_name.to_string(),
                                         settings.ui_font_family,
                                         settings.ui_font_size,
                                     )
@@ -197,7 +209,7 @@ impl FileFinder {
                                 )
                                 .with_child(
                                     Label::new(
-                                        path.into(),
+                                        full_path,
                                         settings.ui_font_family,
                                         settings.ui_font_size,
                                     )
@@ -212,18 +224,19 @@ impl FileFinder {
             )
             .with_uniform_padding(6.0);
 
-            if index == self.selected || index < self.matches.len() - 1 {
+            let selected_index = self.selected_index();
+            if index == selected_index || index < self.matches.len() - 1 {
                 container =
                     container.with_border(Border::bottom(1.0, ColorU::from_u32(0xdbdbdcff)));
             }
 
-            if index == self.selected {
+            if index == selected_index {
                 container = container.with_background_color(ColorU::from_u32(0xdbdbdcff));
             }
 
             EventHandler::new(container.boxed())
                 .on_mouse_down(move |ctx| {
-                    ctx.dispatch_action("file_finder:select", (tree_id, entry_id));
+                    ctx.dispatch_action("file_finder:select", (tree_id, path.clone()));
                     true
                 })
                 .named("match")
@@ -251,8 +264,8 @@ impl FileFinder {
         ctx: &mut ViewContext<WorkspaceView>,
     ) {
         match event {
-            Event::Selected(tree_id, entry_id) => {
-                workspace_view.open_entry((*tree_id, *entry_id), ctx);
+            Event::Selected(tree_id, path) => {
+                workspace_view.open_entry((*tree_id, path.clone()), ctx);
                 workspace_view.dismiss_modal(ctx);
             }
             Event::Dismissed => {
@@ -281,7 +294,9 @@ impl FileFinder {
             search_count: 0,
             latest_search_id: 0,
             matches: Vec::new(),
-            selected: 0,
+            include_root_name: false,
+            selected: None,
+            cancel_flag: Arc::new(AtomicBool::new(false)),
             list_state: UniformListState::new(),
         }
     }
@@ -313,19 +328,34 @@ impl FileFinder {
         }
     }
 
+    fn selected_index(&self) -> usize {
+        if let Some(selected) = self.selected.as_ref() {
+            for (ix, path_match) in self.matches.iter().enumerate() {
+                if path_match.path.as_ref() == selected.as_ref() {
+                    return ix;
+                }
+            }
+        }
+        0
+    }
+
     fn select_prev(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
-        if self.selected > 0 {
-            self.selected -= 1;
+        let mut selected_index = self.selected_index();
+        if selected_index > 0 {
+            selected_index -= 1;
+            self.selected = Some(self.matches[selected_index].path.clone());
         }
-        self.list_state.scroll_to(self.selected);
+        self.list_state.scroll_to(selected_index);
         ctx.notify();
     }
 
     fn select_next(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
-        if self.selected + 1 < self.matches.len() {
-            self.selected += 1;
+        let mut selected_index = self.selected_index();
+        if selected_index + 1 < self.matches.len() {
+            selected_index += 1;
+            self.selected = Some(self.matches[selected_index].path.clone());
         }
-        self.list_state.scroll_to(self.selected);
+        self.list_state.scroll_to(selected_index);
         ctx.notify();
     }
 
@@ -334,23 +364,41 @@ impl FileFinder {
     }
 
     fn confirm(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
-        if let Some(m) = self.matches.get(self.selected) {
-            ctx.emit(Event::Selected(m.tree_id, m.entry_id));
+        if let Some(m) = self.matches.get(self.selected_index()) {
+            ctx.emit(Event::Selected(m.tree_id, m.path.clone()));
         }
     }
 
-    fn select(&mut self, entry: &(usize, usize), ctx: &mut ViewContext<Self>) {
-        let (tree_id, entry_id) = *entry;
-        ctx.emit(Event::Selected(tree_id, entry_id));
+    fn select(&mut self, (tree_id, path): &(usize, Arc<Path>), ctx: &mut ViewContext<Self>) {
+        ctx.emit(Event::Selected(*tree_id, path.clone()));
     }
 
     fn spawn_search(&mut self, query: String, ctx: &mut ViewContext<Self>) {
-        let worktrees = self.worktrees(ctx.as_ref());
+        let snapshots = self
+            .workspace
+            .read(ctx)
+            .worktrees()
+            .iter()
+            .map(|tree| tree.read(ctx).snapshot())
+            .collect::<Vec<_>>();
         let search_id = util::post_inc(&mut self.search_count);
-        let pool = ctx.as_ref().scoped_pool().clone();
+        let pool = ctx.as_ref().thread_pool().clone();
+        self.cancel_flag.store(true, atomic::Ordering::Relaxed);
+        self.cancel_flag = Arc::new(AtomicBool::new(false));
+        let cancel_flag = self.cancel_flag.clone();
         let task = ctx.background_executor().spawn(async move {
-            let matches = match_paths(worktrees.as_slice(), &query, false, false, 100, pool);
-            (search_id, matches)
+            let include_root_name = snapshots.len() > 1;
+            let matches = match_paths(
+                snapshots.iter(),
+                &query,
+                include_root_name,
+                false,
+                false,
+                100,
+                cancel_flag,
+                pool,
+            );
+            (search_id, include_root_name, matches)
         });
 
         ctx.spawn(task, Self::update_matches).detach();
@@ -358,14 +406,14 @@ impl FileFinder {
 
     fn update_matches(
         &mut self,
-        (search_id, matches): (usize, Vec<PathMatch>),
+        (search_id, include_root_name, matches): (usize, bool, Vec<PathMatch>),
         ctx: &mut ViewContext<Self>,
     ) {
         if search_id >= self.latest_search_id {
             self.latest_search_id = search_id;
             self.matches = matches;
-            self.selected = 0;
-            self.list_state.scroll_to(0);
+            self.include_root_name = include_root_name;
+            self.list_state.scroll_to(self.selected_index());
             ctx.notify();
         }
     }
@@ -377,15 +425,6 @@ impl FileFinder {
             .get(&tree_id)
             .map(|worktree| worktree.read(app))
     }
-
-    fn worktrees(&self, app: &AppContext) -> Vec<Worktree> {
-        self.workspace
-            .read(app)
-            .worktrees()
-            .iter()
-            .map(|worktree| worktree.read(app).clone())
-            .collect()
-    }
 }
 
 #[cfg(test)]
@@ -419,7 +458,8 @@ mod tests {
             let workspace = app.add_model(|ctx| Workspace::new(vec![tmp_dir.path().into()], ctx));
             let (window_id, workspace_view) =
                 app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
-            app.finish_pending_tasks().await; // Open and populate worktree.
+            app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
+                .await;
             app.dispatch_action(
                 window_id,
                 vec![workspace_view.id()],
@@ -442,33 +482,30 @@ mod tests {
             app.dispatch_action(window_id, chain.clone(), "buffer:insert", "b".to_string());
             app.dispatch_action(window_id, chain.clone(), "buffer:insert", "n".to_string());
             app.dispatch_action(window_id, chain.clone(), "buffer:insert", "a".to_string());
-            app.finish_pending_tasks().await; // Complete path search.
-
-            // let view_state = finder.state(&app);
-            // assert!(view_state.matches.len() > 1);
-            // app.dispatch_action(
-            //     window_id,
-            //     vec![workspace_view.id(), finder.id()],
-            //     "menu:select_next",
-            //     (),
-            // );
-            // app.dispatch_action(
-            //     window_id,
-            //     vec![workspace_view.id(), finder.id()],
-            //     "file_finder:confirm",
-            //     (),
-            // );
-            // app.finish_pending_tasks().await; // Load Buffer and open BufferView.
-            // let active_pane = workspace_view.as_ref(app).active_pane().clone();
-            // assert_eq!(
-            //     active_pane.state(&app),
-            //     pane::State {
-            //         tabs: vec![pane::TabState {
-            //             title: "bandana".into(),
-            //             active: true,
-            //         }]
-            //     }
-            // );
+            finder
+                .condition(&app, |finder, _| finder.matches.len() == 2)
+                .await;
+
+            let active_pane = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
+            app.dispatch_action(
+                window_id,
+                vec![workspace_view.id(), finder.id()],
+                "menu:select_next",
+                (),
+            );
+            app.dispatch_action(
+                window_id,
+                vec![workspace_view.id(), finder.id()],
+                "file_finder:confirm",
+                (),
+            );
+            active_pane
+                .condition(&app, |pane, _| pane.active_item().is_some())
+                .await;
+            app.read(|ctx| {
+                let active_item = active_pane.read(ctx).active_item().unwrap();
+                assert_eq!(active_item.title(ctx), "bandana");
+            });
         });
     }
 }

zed/src/lib.rs 🔗

@@ -8,7 +8,6 @@ mod sum_tree;
 #[cfg(test)]
 mod test;
 mod time;
-mod timer;
 mod util;
 pub mod watch;
 pub mod workspace;

zed/src/main.rs 🔗

@@ -1,3 +1,6 @@
+// Allow binary to be called Zed for a nice application menu when running executable direcly
+#![allow(non_snake_case)]
+
 use fs::OpenOptions;
 use log::LevelFilter;
 use simplelog::SimpleLogger;

zed/src/operation_queue.rs 🔗

@@ -35,11 +35,7 @@ impl<T: Operation> OperationQueue<T> {
     pub fn insert(&mut self, mut ops: Vec<T>) {
         ops.sort_by_key(|op| op.timestamp());
         ops.dedup_by_key(|op| op.timestamp());
-        let mut edits = ops
-            .into_iter()
-            .map(|op| Edit::Insert(op))
-            .collect::<Vec<_>>();
-        self.0.edit(&mut edits);
+        self.0.edit(ops.into_iter().map(Edit::Insert).collect());
     }
 
     pub fn drain(&mut self) -> Self {

zed/src/sum_tree/cursor.rs 🔗

@@ -199,6 +199,9 @@ where
     }
 
     pub fn next(&mut self) {
+        if !self.did_seek {
+            self.descend_to_first_item(self.tree, |_| true)
+        }
         self.next_internal(|_| true)
     }
 
@@ -271,6 +274,7 @@ where
         }
 
         self.at_end = self.stack.is_empty();
+        debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
     }
 
     pub fn descend_to_first_item<F>(&mut self, mut subtree: &'a SumTree<T>, filter_node: F)
@@ -656,6 +660,7 @@ where
         }
 
         self.at_end = self.stack.is_empty();
+        debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
         if bias == SeekBias::Left {
             let mut end = self.seek_dimension.clone();
             if let Some(summary) = self.item_summary() {

zed/src/sum_tree/mod.rs 🔗

@@ -10,7 +10,7 @@ const TREE_BASE: usize = 2;
 #[cfg(not(test))]
 const TREE_BASE: usize = 6;
 
-pub trait Item: Clone + Eq + fmt::Debug {
+pub trait Item: Clone + fmt::Debug {
     type Summary: for<'a> AddAssign<&'a Self::Summary> + Default + Clone + fmt::Debug;
 
     fn summary(&self) -> Self::Summary;
@@ -22,7 +22,7 @@ pub trait KeyedItem: Item {
     fn key(&self) -> Self::Key;
 }
 
-pub trait Dimension<'a, Summary: Default>: 'a + Clone + fmt::Debug + Default {
+pub trait Dimension<'a, Summary: Default>: Clone + fmt::Debug + Default {
     fn add_summary(&mut self, summary: &'a Summary);
 }
 
@@ -332,11 +332,12 @@ impl<T: KeyedItem> SumTree<T> {
         };
     }
 
-    pub fn edit(&mut self, edits: &mut [Edit<T>]) {
+    pub fn edit(&mut self, mut edits: Vec<Edit<T>>) -> Vec<T> {
         if edits.is_empty() {
-            return;
+            return Vec::new();
         }
 
+        let mut removed = Vec::new();
         edits.sort_unstable_by_key(|item| item.key());
 
         *self = {
@@ -358,13 +359,19 @@ impl<T: KeyedItem> SumTree<T> {
                     new_tree.push_tree(slice);
                     old_item = cursor.item();
                 }
-                if old_item.map_or(false, |old_item| old_item.key() == new_key) {
-                    cursor.next();
+
+                if let Some(old_item) = old_item {
+                    if old_item.key() == new_key {
+                        removed.push(old_item.clone());
+                        cursor.next();
+                    }
                 }
+
                 match edit {
                     Edit::Insert(item) => {
-                        buffered_items.push(item.clone());
+                        buffered_items.push(item);
                     }
+                    Edit::Remove(_) => {}
                 }
             }
 
@@ -372,6 +379,23 @@ impl<T: KeyedItem> SumTree<T> {
             new_tree.push_tree(cursor.suffix());
             new_tree
         };
+
+        removed
+    }
+
+    pub fn get(&self, key: &T::Key) -> Option<&T> {
+        let mut cursor = self.cursor::<T::Key, ()>();
+        if cursor.seek(key, SeekBias::Left) {
+            cursor.item()
+        } else {
+            None
+        }
+    }
+}
+
+impl<T: Item> Default for SumTree<T> {
+    fn default() -> Self {
+        Self::new()
     }
 }
 
@@ -446,12 +470,14 @@ impl<T: Item> Node<T> {
 #[derive(Debug)]
 pub enum Edit<T: KeyedItem> {
     Insert(T),
+    Remove(T::Key),
 }
 
 impl<T: KeyedItem> Edit<T> {
     fn key(&self) -> T::Key {
         match self {
             Edit::Insert(item) => item.key(),
+            Edit::Remove(key) => key.clone(),
         }
     }
 }
@@ -471,6 +497,7 @@ where
 #[cfg(test)]
 mod tests {
     use super::*;
+    use std::cmp;
     use std::ops::Add;
 
     #[test]
@@ -754,11 +781,33 @@ mod tests {
         assert_eq!(cursor.slice(&Count(6), SeekBias::Right).items(), vec![6]);
     }
 
+    #[test]
+    fn test_edit() {
+        let mut tree = SumTree::<u8>::new();
+
+        let removed = tree.edit(vec![Edit::Insert(1), Edit::Insert(2), Edit::Insert(0)]);
+        assert_eq!(tree.items(), vec![0, 1, 2]);
+        assert_eq!(removed, Vec::<u8>::new());
+        assert_eq!(tree.get(&0), Some(&0));
+        assert_eq!(tree.get(&1), Some(&1));
+        assert_eq!(tree.get(&2), Some(&2));
+        assert_eq!(tree.get(&4), None);
+
+        let removed = tree.edit(vec![Edit::Insert(2), Edit::Insert(4), Edit::Remove(0)]);
+        assert_eq!(tree.items(), vec![1, 2, 4]);
+        assert_eq!(removed, vec![0, 2]);
+        assert_eq!(tree.get(&0), None);
+        assert_eq!(tree.get(&1), Some(&1));
+        assert_eq!(tree.get(&2), Some(&2));
+        assert_eq!(tree.get(&4), Some(&4));
+    }
+
     #[derive(Clone, Default, Debug)]
     pub struct IntegersSummary {
         count: Count,
         sum: Sum,
         contains_even: bool,
+        max: u8,
     }
 
     #[derive(Ord, PartialOrd, Default, Eq, PartialEq, Clone, Debug)]
@@ -775,15 +824,31 @@ mod tests {
                 count: Count(1),
                 sum: Sum(*self as usize),
                 contains_even: (*self & 1) == 0,
+                max: *self,
             }
         }
     }
 
+    impl KeyedItem for u8 {
+        type Key = u8;
+
+        fn key(&self) -> Self::Key {
+            *self
+        }
+    }
+
+    impl<'a> Dimension<'a, IntegersSummary> for u8 {
+        fn add_summary(&mut self, summary: &IntegersSummary) {
+            *self = summary.max;
+        }
+    }
+
     impl<'a> AddAssign<&'a Self> for IntegersSummary {
         fn add_assign(&mut self, other: &Self) {
             self.count.0 += &other.count.0;
             self.sum.0 += &other.sum.0;
             self.contains_even |= other.contains_even;
+            self.max = cmp::max(self.max, other.max);
         }
     }
 
@@ -793,15 +858,6 @@ mod tests {
         }
     }
 
-    // impl<'a> Add<&'a Self> for Count {
-    //     type Output = Self;
-    //
-    //     fn add(mut self, other: &Self) -> Self {
-    //         self.0 += other.0;
-    //         self
-    //     }
-    // }
-
     impl<'a> Dimension<'a, IntegersSummary> for Sum {
         fn add_summary(&mut self, summary: &IntegersSummary) {
             self.0 += summary.sum.0;

zed/src/test.rs 🔗

@@ -1,3 +1,5 @@
+use crate::time::ReplicaId;
+use ctor::ctor;
 use rand::Rng;
 use std::{
     collections::BTreeMap,
@@ -5,7 +7,10 @@ use std::{
 };
 use tempdir::TempDir;
 
-use crate::time::ReplicaId;
+#[ctor]
+fn init_logger() {
+    env_logger::init();
+}
 
 #[derive(Clone)]
 struct Envelope<T: Clone> {

zed/src/timer.rs 🔗

@@ -1,42 +0,0 @@
-use smol::prelude::*;
-use std::{
-    pin::Pin,
-    task::Poll,
-    time::{Duration, Instant},
-};
-
-pub struct Repeat {
-    timer: smol::Timer,
-    period: Duration,
-}
-
-impl Stream for Repeat {
-    type Item = Instant;
-
-    fn poll_next(
-        mut self: std::pin::Pin<&mut Self>,
-        cx: &mut std::task::Context<'_>,
-    ) -> Poll<Option<Self::Item>> {
-        match self.as_mut().timer().poll(cx) {
-            Poll::Ready(instant) => {
-                let period = self.as_ref().period;
-                self.as_mut().timer().set_after(period);
-                Poll::Ready(Some(instant))
-            }
-            Poll::Pending => Poll::Pending,
-        }
-    }
-}
-
-impl Repeat {
-    fn timer(self: std::pin::Pin<&mut Self>) -> Pin<&mut smol::Timer> {
-        unsafe { self.map_unchecked_mut(|s| &mut s.timer) }
-    }
-}
-
-pub fn repeat(period: Duration) -> Repeat {
-    Repeat {
-        timer: smol::Timer::after(period),
-        period,
-    }
-}

zed/src/workspace/pane.rs 🔗

@@ -7,7 +7,7 @@ use gpui::{
     keymap::Binding,
     AppContext, Border, Entity, MutableAppContext, Quad, View, ViewContext,
 };
-use std::cmp;
+use std::{cmp, path::Path, sync::Arc};
 
 pub fn init(app: &mut MutableAppContext) {
     app.add_action(
@@ -107,7 +107,7 @@ impl Pane {
 
     pub fn activate_entry(
         &mut self,
-        entry_id: (usize, usize),
+        entry_id: (usize, Arc<Path>),
         ctx: &mut ViewContext<Self>,
     ) -> bool {
         if let Some(index) = self.items.iter().position(|item| {

zed/src/workspace/workspace.rs 🔗

@@ -1,6 +1,6 @@
 use super::{ItemView, ItemViewHandle};
 use crate::{
-    editor::Buffer,
+    editor::{Buffer, History},
     settings::Settings,
     time::ReplicaId,
     watch,
@@ -76,7 +76,7 @@ enum OpenedItem {
 pub struct Workspace {
     replica_id: ReplicaId,
     worktrees: HashSet<ModelHandle<Worktree>>,
-    items: HashMap<(usize, usize), OpenedItem>,
+    items: HashMap<(usize, u64), OpenedItem>,
 }
 
 impl Workspace {
@@ -94,6 +94,19 @@ impl Workspace {
         &self.worktrees
     }
 
+    pub fn worktree_scans_complete(&self, ctx: &AppContext) -> impl Future<Output = ()> + 'static {
+        let futures = self
+            .worktrees
+            .iter()
+            .map(|worktree| worktree.read(ctx).scan_complete())
+            .collect::<Vec<_>>();
+        async move {
+            for future in futures {
+                future.await;
+            }
+        }
+    }
+
     pub fn contains_paths(&self, paths: &[PathBuf], app: &AppContext) -> bool {
         paths.iter().all(|path| self.contains_path(&path, app))
     }
@@ -101,7 +114,7 @@ impl Workspace {
     pub fn contains_path(&self, path: &Path, app: &AppContext) -> bool {
         self.worktrees
             .iter()
-            .any(|worktree| worktree.read(app).contains_path(path))
+            .any(|worktree| worktree.read(app).contains_abs_path(path))
     }
 
     pub fn open_paths(&mut self, paths: &[PathBuf], ctx: &mut ModelContext<Self>) {
@@ -112,12 +125,12 @@ impl Workspace {
 
     pub fn open_path<'a>(&'a mut self, path: PathBuf, ctx: &mut ModelContext<Self>) {
         for tree in self.worktrees.iter() {
-            if tree.read(ctx).contains_path(&path) {
+            if tree.read(ctx).contains_abs_path(&path) {
                 return;
             }
         }
 
-        let worktree = ctx.add_model(|ctx| Worktree::new(ctx.model_id(), path, Some(ctx)));
+        let worktree = ctx.add_model(|ctx| Worktree::new(path, ctx));
         ctx.observe(&worktree, Self::on_worktree_updated);
         self.worktrees.insert(worktree);
         ctx.notify();
@@ -125,10 +138,22 @@ impl Workspace {
 
     pub fn open_entry(
         &mut self,
-        entry: (usize, usize),
+        (worktree_id, path): (usize, Arc<Path>),
         ctx: &mut ModelContext<'_, Self>,
     ) -> anyhow::Result<Pin<Box<dyn Future<Output = OpenResult> + Send>>> {
-        if let Some(item) = self.items.get(&entry).cloned() {
+        let worktree = self
+            .worktrees
+            .get(&worktree_id)
+            .cloned()
+            .ok_or_else(|| anyhow!("worktree {} does not exist", worktree_id,))?;
+
+        let inode = worktree
+            .read(ctx)
+            .inode_for_path(&path)
+            .ok_or_else(|| anyhow!("path {:?} does not exist", path))?;
+
+        let item_key = (worktree_id, inode);
+        if let Some(item) = self.items.get(&item_key).cloned() {
             return Ok(async move {
                 match item {
                     OpenedItem::Loaded(handle) => {
@@ -146,25 +171,22 @@ impl Workspace {
             .boxed());
         }
 
-        let worktree = self
-            .worktrees
-            .get(&entry.0)
-            .cloned()
-            .ok_or(anyhow!("worktree {} does not exist", entry.0,))?;
-
         let replica_id = self.replica_id;
-        let file = worktree.file(entry.1, ctx.as_ref())?;
+        let file = worktree.file(path.clone(), ctx.as_ref())?;
         let history = file.load_history(ctx.as_ref());
-        let buffer = async move { Ok(Buffer::from_history(replica_id, file, history.await?)) };
+        // let buffer = async move { Ok(Buffer::from_history(replica_id, file, history.await?)) };
 
         let (mut tx, rx) = watch::channel(None);
-        self.items.insert(entry, OpenedItem::Loading(rx));
+        self.items.insert(item_key, OpenedItem::Loading(rx));
         ctx.spawn(
-            buffer,
-            move |me, buffer: anyhow::Result<Buffer>, ctx| match buffer {
-                Ok(buffer) => {
-                    let handle = Box::new(ctx.add_model(|_| buffer)) as Box<dyn ItemHandle>;
-                    me.items.insert(entry, OpenedItem::Loaded(handle.clone()));
+            history,
+            move |me, history: anyhow::Result<History>, ctx| match history {
+                Ok(history) => {
+                    let handle = Box::new(
+                        ctx.add_model(|ctx| Buffer::from_history(replica_id, file, history, ctx)),
+                    ) as Box<dyn ItemHandle>;
+                    me.items
+                        .insert(item_key, OpenedItem::Loaded(handle.clone()));
                     ctx.spawn(
                         async move {
                             tx.update(|value| *value = Some(Ok(handle))).await;
@@ -186,7 +208,7 @@ impl Workspace {
         )
         .detach();
 
-        self.open_entry(entry, ctx)
+        self.open_entry((worktree_id, path), ctx)
     }
 
     fn on_worktree_updated(&mut self, _: ModelHandle<Worktree>, ctx: &mut ModelContext<Self>) {
@@ -200,20 +222,20 @@ impl Entity for Workspace {
 
 #[cfg(test)]
 pub trait WorkspaceHandle {
-    fn file_entries(&self, app: &AppContext) -> Vec<(usize, usize)>;
+    fn file_entries(&self, app: &AppContext) -> Vec<(usize, Arc<Path>)>;
 }
 
 #[cfg(test)]
 impl WorkspaceHandle for ModelHandle<Workspace> {
-    fn file_entries(&self, app: &AppContext) -> Vec<(usize, usize)> {
+    fn file_entries(&self, app: &AppContext) -> Vec<(usize, Arc<Path>)> {
         self.read(app)
             .worktrees()
             .iter()
             .flat_map(|tree| {
                 let tree_id = tree.id();
                 tree.read(app)
-                    .files()
-                    .map(move |file| (tree_id, file.entry_id))
+                    .files(0)
+                    .map(move |f| (tree_id, f.path().clone()))
             })
             .collect::<Vec<_>>()
     }
@@ -237,18 +259,19 @@ mod tests {
             }));
 
             let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
-            app.finish_pending_tasks().await; // Open and populate worktree.
+            app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
+                .await;
 
             // Get the first file entry.
             let tree = app.read(|ctx| workspace.read(ctx).worktrees.iter().next().unwrap().clone());
-            let entry_id = app.read(|ctx| tree.read(ctx).files().next().unwrap().entry_id);
-            let entry = (tree.id(), entry_id);
+            let path = app.read(|ctx| tree.read(ctx).files(0).next().unwrap().path().clone());
+            let entry = (tree.id(), path);
 
             // Open the same entry twice before it finishes loading.
             let (future_1, future_2) = workspace.update(&mut app, |w, app| {
                 (
-                    w.open_entry(entry, app).unwrap(),
-                    w.open_entry(entry, app).unwrap(),
+                    w.open_entry(entry.clone(), app).unwrap(),
+                    w.open_entry(entry.clone(), app).unwrap(),
                 )
             });
 

zed/src/workspace/workspace_view.rs 🔗

@@ -5,8 +5,12 @@ use gpui::{
     color::rgbu, elements::*, json::to_string_pretty, keymap::Binding, AnyViewHandle, AppContext,
     ClipboardItem, Entity, ModelHandle, MutableAppContext, View, ViewContext, ViewHandle,
 };
-use log::{error, info};
-use std::{collections::HashSet, path::PathBuf};
+use log::error;
+use std::{
+    collections::HashSet,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
 
 pub fn init(app: &mut MutableAppContext) {
     app.add_action("workspace:save", WorkspaceView::save_active_item);
@@ -19,7 +23,7 @@ pub fn init(app: &mut MutableAppContext) {
 
 pub trait ItemView: View {
     fn title(&self, app: &AppContext) -> String;
-    fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)>;
+    fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)>;
     fn clone_on_split(&self, _: &mut ViewContext<Self>) -> Option<Self>
     where
         Self: Sized,
@@ -42,7 +46,7 @@ pub trait ItemView: View {
 
 pub trait ItemViewHandle: Send + Sync {
     fn title(&self, app: &AppContext) -> String;
-    fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)>;
+    fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)>;
     fn boxed_clone(&self) -> Box<dyn ItemViewHandle>;
     fn clone_on_split(&self, app: &mut MutableAppContext) -> Option<Box<dyn ItemViewHandle>>;
     fn set_parent_pane(&self, pane: &ViewHandle<Pane>, app: &mut MutableAppContext);
@@ -57,7 +61,7 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
         self.read(app).title(app)
     }
 
-    fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)> {
+    fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)> {
         self.read(app).entry_id(app)
     }
 
@@ -124,7 +128,7 @@ pub struct WorkspaceView {
     center: PaneGroup,
     panes: Vec<ViewHandle<Pane>>,
     active_pane: ViewHandle<Pane>,
-    loading_entries: HashSet<(usize, usize)>,
+    loading_entries: HashSet<(usize, Arc<Path>)>,
 }
 
 impl WorkspaceView {
@@ -189,24 +193,23 @@ impl WorkspaceView {
         }
     }
 
-    pub fn open_entry(&mut self, entry: (usize, usize), ctx: &mut ViewContext<Self>) {
+    pub fn open_entry(&mut self, entry: (usize, Arc<Path>), ctx: &mut ViewContext<Self>) {
         if self.loading_entries.contains(&entry) {
             return;
         }
 
         if self
             .active_pane()
-            .update(ctx, |pane, ctx| pane.activate_entry(entry, ctx))
+            .update(ctx, |pane, ctx| pane.activate_entry(entry.clone(), ctx))
         {
             return;
         }
 
-        self.loading_entries.insert(entry);
+        self.loading_entries.insert(entry.clone());
 
-        match self
-            .workspace
-            .update(ctx, |workspace, ctx| workspace.open_entry(entry, ctx))
-        {
+        match self.workspace.update(ctx, |workspace, ctx| {
+            workspace.open_entry(entry.clone(), ctx)
+        }) {
             Err(error) => error!("{}", error),
             Ok(item) => {
                 let settings = self.settings.clone();
@@ -227,19 +230,6 @@ impl WorkspaceView {
         }
     }
 
-    pub fn open_example_entry(&mut self, ctx: &mut ViewContext<Self>) {
-        if let Some(tree) = self.workspace.read(ctx).worktrees().iter().next() {
-            if let Some(file) = tree.read(ctx).files().next() {
-                info!("open_entry ({}, {})", tree.id(), file.entry_id);
-                self.open_entry((tree.id(), file.entry_id), ctx);
-            } else {
-                error!("No example file found for worktree {}", tree.id());
-            }
-        } else {
-            error!("No worktree found while opening example entry");
-        }
-    }
-
     pub fn save_active_item(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
         self.active_pane.update(ctx, |pane, ctx| {
             if let Some(item) = pane.active_item() {
@@ -398,80 +388,59 @@ mod tests {
         App::test_async((), |mut app| async move {
             let dir = temp_tree(json!({
                 "a": {
-                    "aa": "aa contents",
-                    "ab": "ab contents",
-                    "ac": "ab contents",
+                    "file1": "contents 1",
+                    "file2": "contents 2",
+                    "file3": "contents 3",
                 },
             }));
 
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
-            app.finish_pending_tasks().await; // Open and populate worktree.
+            app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
+                .await;
             let entries = app.read(|ctx| workspace.file_entries(ctx));
+            let file1 = entries[0].clone();
+            let file2 = entries[1].clone();
+            let file3 = entries[2].clone();
 
             let (_, workspace_view) =
                 app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
+            let pane = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
 
             // Open the first entry
-            workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
-            app.finish_pending_tasks().await;
-
-            app.read(|ctx| {
-                assert_eq!(
-                    workspace_view
-                        .read(ctx)
-                        .active_pane()
-                        .read(ctx)
-                        .items()
-                        .len(),
-                    1
-                )
-            });
+            workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
+            pane.condition(&app, |pane, _| pane.items().len() == 1)
+                .await;
 
             // Open the second entry
-            workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[1], ctx));
-            app.finish_pending_tasks().await;
-
+            workspace_view.update(&mut app, |w, ctx| w.open_entry(file2.clone(), ctx));
+            pane.condition(&app, |pane, _| pane.items().len() == 2)
+                .await;
             app.read(|ctx| {
-                let active_pane = workspace_view.read(ctx).active_pane().read(ctx);
-                assert_eq!(active_pane.items().len(), 2);
+                let pane = pane.read(ctx);
                 assert_eq!(
-                    active_pane.active_item().unwrap().entry_id(ctx),
-                    Some(entries[1])
+                    pane.active_item().unwrap().entry_id(ctx),
+                    Some(file2.clone())
                 );
             });
 
             // Open the first entry again
-            workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
-            app.finish_pending_tasks().await;
-
+            workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
+            pane.condition(&app, move |pane, ctx| {
+                pane.active_item().unwrap().entry_id(ctx) == Some(file1.clone())
+            })
+            .await;
             app.read(|ctx| {
-                let active_pane = workspace_view.read(ctx).active_pane().read(ctx);
-                assert_eq!(active_pane.items().len(), 2);
-                assert_eq!(
-                    active_pane.active_item().unwrap().entry_id(ctx),
-                    Some(entries[0])
-                );
+                assert_eq!(pane.read(ctx).items().len(), 2);
             });
 
             // Open the third entry twice concurrently
             workspace_view.update(&mut app, |w, ctx| {
-                w.open_entry(entries[2], ctx);
-                w.open_entry(entries[2], ctx);
-            });
-            app.finish_pending_tasks().await;
-
-            app.read(|ctx| {
-                assert_eq!(
-                    workspace_view
-                        .read(ctx)
-                        .active_pane()
-                        .read(ctx)
-                        .items()
-                        .len(),
-                    3
-                );
+                w.open_entry(file3.clone(), ctx);
+                w.open_entry(file3.clone(), ctx);
             });
+            pane.condition(&app, |pane, _| pane.items().len() == 3)
+                .await;
         });
     }
 
@@ -482,44 +451,45 @@ mod tests {
 
             let dir = temp_tree(json!({
                 "a": {
-                    "aa": "aa contents",
-                    "ab": "ab contents",
-                    "ac": "ab contents",
+                    "file1": "contents 1",
+                    "file2": "contents 2",
+                    "file3": "contents 3",
                 },
             }));
 
             let settings = settings::channel(&app.font_cache()).unwrap().1;
             let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
-            app.finish_pending_tasks().await; // Open and populate worktree.
+            app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
+                .await;
             let entries = app.read(|ctx| workspace.file_entries(ctx));
+            let file1 = entries[0].clone();
 
             let (window_id, workspace_view) =
                 app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
-
-            workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
-            app.finish_pending_tasks().await;
-
             let pane_1 = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
 
+            workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
+            {
+                let file1 = file1.clone();
+                pane_1
+                    .condition(&app, move |pane, ctx| {
+                        pane.active_item().and_then(|i| i.entry_id(ctx)) == Some(file1.clone())
+                    })
+                    .await;
+            }
+
             app.dispatch_action(window_id, vec![pane_1.id()], "pane:split_right", ());
             app.update(|ctx| {
                 let pane_2 = workspace_view.read(ctx).active_pane().clone();
                 assert_ne!(pane_1, pane_2);
 
-                assert_eq!(
-                    pane_2
-                        .read(ctx)
-                        .active_item()
-                        .unwrap()
-                        .entry_id(ctx.as_ref()),
-                    Some(entries[0])
-                );
+                let pane2_item = pane_2.read(ctx).active_item().unwrap();
+                assert_eq!(pane2_item.entry_id(ctx.as_ref()), Some(file1.clone()));
 
                 ctx.dispatch_action(window_id, vec![pane_2.id()], "pane:close_active_item", ());
-
-                let w = workspace_view.read(ctx);
-                assert_eq!(w.panes.len(), 1);
-                assert_eq!(w.active_pane(), &pane_1);
+                let workspace_view = workspace_view.read(ctx);
+                assert_eq!(workspace_view.panes.len(), 1);
+                assert_eq!(workspace_view.active_pane(), &pane_1);
             });
         });
     }

zed/src/worktree.rs 🔗

@@ -0,0 +1,1720 @@
+mod char_bag;
+mod fuzzy;
+mod ignore;
+
+use crate::{
+    editor::{History, Snapshot as BufferSnapshot},
+    sum_tree::{self, Cursor, Edit, SeekBias, SumTree},
+};
+use ::ignore::gitignore::Gitignore;
+use anyhow::{anyhow, Context, Result};
+pub use fuzzy::{match_paths, PathMatch};
+use gpui::{scoped_pool, AppContext, Entity, ModelContext, ModelHandle, Task};
+use lazy_static::lazy_static;
+use parking_lot::Mutex;
+use postage::{
+    prelude::{Sink, Stream},
+    watch,
+};
+use smol::{channel::Sender, Timer};
+use std::{
+    cmp,
+    collections::{HashMap, HashSet},
+    ffi::{CStr, OsStr},
+    fmt, fs,
+    future::Future,
+    io::{self, Read, Write},
+    ops::{AddAssign, Deref},
+    os::unix::{ffi::OsStrExt, fs::MetadataExt},
+    path::{Path, PathBuf},
+    sync::{Arc, Weak},
+    time::Duration,
+};
+
+use self::{char_bag::CharBag, ignore::IgnoreStack};
+
+lazy_static! {
+    static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
+}
+
+#[derive(Clone, Debug)]
+enum ScanState {
+    Idle,
+    Scanning,
+    Err(Arc<io::Error>),
+}
+
+pub struct Worktree {
+    snapshot: Snapshot,
+    background_snapshot: Arc<Mutex<Snapshot>>,
+    handles: Arc<Mutex<HashMap<Arc<Path>, Weak<Mutex<FileHandleState>>>>>,
+    scan_state: (watch::Sender<ScanState>, watch::Receiver<ScanState>),
+    _event_stream_handle: fsevent::Handle,
+    poll_scheduled: bool,
+}
+
+#[derive(Clone)]
+pub struct FileHandle {
+    worktree: ModelHandle<Worktree>,
+    state: Arc<Mutex<FileHandleState>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct FileHandleState {
+    path: Arc<Path>,
+    is_deleted: bool,
+}
+
+impl Worktree {
+    pub fn new(path: impl Into<Arc<Path>>, ctx: &mut ModelContext<Self>) -> Self {
+        let abs_path = path.into();
+        let root_name = abs_path
+            .file_name()
+            .map_or(String::new(), |n| n.to_string_lossy().to_string() + "/");
+        let (scan_state_tx, scan_state_rx) = smol::channel::unbounded();
+        let id = ctx.model_id();
+        let snapshot = Snapshot {
+            id,
+            scan_id: 0,
+            abs_path,
+            root_name,
+            ignores: Default::default(),
+            entries: Default::default(),
+        };
+        let (event_stream, event_stream_handle) =
+            fsevent::EventStream::new(&[snapshot.abs_path.as_ref()], Duration::from_millis(100));
+
+        let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
+        let handles = Arc::new(Mutex::new(Default::default()));
+
+        let tree = Self {
+            snapshot,
+            background_snapshot: background_snapshot.clone(),
+            handles: handles.clone(),
+            scan_state: watch::channel_with(ScanState::Scanning),
+            _event_stream_handle: event_stream_handle,
+            poll_scheduled: false,
+        };
+
+        std::thread::spawn(move || {
+            let scanner = BackgroundScanner::new(background_snapshot, handles, scan_state_tx, id);
+            scanner.run(event_stream)
+        });
+
+        ctx.spawn_stream(scan_state_rx, Self::observe_scan_state, |_, _| {})
+            .detach();
+
+        tree
+    }
+
+    pub fn scan_complete(&self) -> impl Future<Output = ()> {
+        let mut scan_state_rx = self.scan_state.1.clone();
+        async move {
+            let mut scan_state = Some(scan_state_rx.borrow().clone());
+            while let Some(ScanState::Scanning) = scan_state {
+                scan_state = scan_state_rx.recv().await;
+            }
+        }
+    }
+
+    pub fn next_scan_complete(&self) -> impl Future<Output = ()> {
+        let mut scan_state_rx = self.scan_state.1.clone();
+        let mut did_scan = matches!(*scan_state_rx.borrow(), ScanState::Scanning);
+        async move {
+            loop {
+                if let ScanState::Scanning = *scan_state_rx.borrow() {
+                    did_scan = true;
+                } else if did_scan {
+                    break;
+                }
+                scan_state_rx.recv().await;
+            }
+        }
+    }
+
+    fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext<Self>) {
+        let _ = self.scan_state.0.blocking_send(scan_state);
+        self.poll_entries(ctx);
+    }
+
+    fn poll_entries(&mut self, ctx: &mut ModelContext<Self>) {
+        self.snapshot = self.background_snapshot.lock().clone();
+        ctx.notify();
+
+        if self.is_scanning() && !self.poll_scheduled {
+            ctx.spawn(Timer::after(Duration::from_millis(100)), |this, _, ctx| {
+                this.poll_scheduled = false;
+                this.poll_entries(ctx);
+            })
+            .detach();
+            self.poll_scheduled = true;
+        }
+    }
+
+    fn is_scanning(&self) -> bool {
+        if let ScanState::Scanning = *self.scan_state.1.borrow() {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn snapshot(&self) -> Snapshot {
+        self.snapshot.clone()
+    }
+
+    pub fn contains_abs_path(&self, path: &Path) -> bool {
+        path.starts_with(&self.snapshot.abs_path)
+    }
+
+    pub fn load_history(
+        &self,
+        path: &Path,
+        ctx: &AppContext,
+    ) -> impl Future<Output = Result<History>> {
+        let abs_path = self.snapshot.abs_path.join(path);
+        ctx.background_executor().spawn(async move {
+            let mut file = std::fs::File::open(&abs_path)?;
+            let mut base_text = String::new();
+            file.read_to_string(&mut base_text)?;
+            Ok(History::new(Arc::from(base_text)))
+        })
+    }
+
+    pub fn save<'a>(
+        &self,
+        path: &Path,
+        content: BufferSnapshot,
+        ctx: &AppContext,
+    ) -> Task<Result<()>> {
+        let abs_path = self.snapshot.abs_path.join(path);
+        ctx.background_executor().spawn(async move {
+            let buffer_size = content.text_summary().bytes.min(10 * 1024);
+            let file = std::fs::File::create(&abs_path)?;
+            let mut writer = std::io::BufWriter::with_capacity(buffer_size, file);
+            for chunk in content.fragments() {
+                writer.write(chunk.as_bytes())?;
+            }
+            writer.flush()?;
+            Ok(())
+        })
+    }
+}
+
+impl Entity for Worktree {
+    type Event = ();
+}
+
+impl Deref for Worktree {
+    type Target = Snapshot;
+
+    fn deref(&self) -> &Self::Target {
+        &self.snapshot
+    }
+}
+
+impl fmt::Debug for Worktree {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.snapshot.fmt(f)
+    }
+}
+
+#[derive(Clone)]
+pub struct Snapshot {
+    id: usize,
+    scan_id: usize,
+    abs_path: Arc<Path>,
+    root_name: String,
+    ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
+    entries: SumTree<Entry>,
+}
+
+impl Snapshot {
+    pub fn file_count(&self) -> usize {
+        self.entries.summary().file_count
+    }
+
+    pub fn visible_file_count(&self) -> usize {
+        self.entries.summary().visible_file_count
+    }
+
+    pub fn files(&self, start: usize) -> FileIter {
+        FileIter::all(self, start)
+    }
+
+    #[cfg(test)]
+    pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
+        let mut cursor = self.entries.cursor::<(), ()>();
+        cursor.next();
+        cursor.map(|entry| entry.path())
+    }
+
+    pub fn visible_files(&self, start: usize) -> FileIter {
+        FileIter::visible(self, start)
+    }
+
+    fn child_entries<'a>(&'a self, path: &'a Path) -> ChildEntriesIter<'a> {
+        ChildEntriesIter::new(path, self)
+    }
+
+    pub fn root_entry(&self) -> &Entry {
+        self.entry_for_path("").unwrap()
+    }
+
+    /// Returns the filename of the snapshot's root directory,
+    /// with a trailing slash.
+    pub fn root_name(&self) -> &str {
+        &self.root_name
+    }
+
+    fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
+        let mut cursor = self.entries.cursor::<_, ()>();
+        if cursor.seek(&PathSearch::Exact(path.as_ref()), SeekBias::Left) {
+            cursor.item()
+        } else {
+            None
+        }
+    }
+
+    pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
+        self.entry_for_path(path.as_ref()).map(|e| e.inode())
+    }
+
+    fn insert_entry(&mut self, entry: Entry) {
+        if !entry.is_dir() && entry.path().file_name() == Some(&GITIGNORE) {
+            let (ignore, err) = Gitignore::new(self.abs_path.join(entry.path()));
+            if let Some(err) = err {
+                log::error!("error in ignore file {:?} - {:?}", entry.path(), err);
+            }
+
+            let ignore_dir_path = entry.path().parent().unwrap();
+            self.ignores
+                .insert(ignore_dir_path.into(), (Arc::new(ignore), self.scan_id));
+        }
+        self.entries.insert(entry);
+    }
+
+    fn populate_dir(
+        &mut self,
+        parent_path: Arc<Path>,
+        entries: impl IntoIterator<Item = Entry>,
+        ignore: Option<Arc<Gitignore>>,
+    ) {
+        let mut edits = Vec::new();
+
+        let mut parent_entry = self
+            .entries
+            .get(&PathKey(parent_path.clone()))
+            .unwrap()
+            .clone();
+        if let Some(ignore) = ignore {
+            self.ignores.insert(parent_path, (ignore, self.scan_id));
+        }
+        if matches!(parent_entry.kind, EntryKind::PendingDir) {
+            parent_entry.kind = EntryKind::Dir;
+        } else {
+            unreachable!();
+        }
+        edits.push(Edit::Insert(parent_entry));
+
+        for entry in entries {
+            edits.push(Edit::Insert(entry));
+        }
+        self.entries.edit(edits);
+    }
+
+    fn remove_path(&mut self, path: &Path) {
+        let new_entries = {
+            let mut cursor = self.entries.cursor::<_, ()>();
+            let mut new_entries = cursor.slice(&PathSearch::Exact(path), SeekBias::Left);
+            cursor.seek_forward(&PathSearch::Successor(path), SeekBias::Left);
+            new_entries.push_tree(cursor.suffix());
+            new_entries
+        };
+        self.entries = new_entries;
+
+        if path.file_name() == Some(&GITIGNORE) {
+            if let Some((_, scan_id)) = self.ignores.get_mut(path.parent().unwrap()) {
+                *scan_id = self.scan_id;
+            }
+        }
+    }
+
+    fn ignore_stack_for_path(&self, path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
+        let mut new_ignores = Vec::new();
+        for ancestor in path.ancestors().skip(1) {
+            if let Some((ignore, _)) = self.ignores.get(ancestor) {
+                new_ignores.push((ancestor, Some(ignore.clone())));
+            } else {
+                new_ignores.push((ancestor, None));
+            }
+        }
+
+        let mut ignore_stack = IgnoreStack::none();
+        for (parent_path, ignore) in new_ignores.into_iter().rev() {
+            if ignore_stack.is_path_ignored(&parent_path, true) {
+                ignore_stack = IgnoreStack::all();
+                break;
+            } else if let Some(ignore) = ignore {
+                ignore_stack = ignore_stack.append(Arc::from(parent_path), ignore);
+            }
+        }
+
+        if ignore_stack.is_path_ignored(path, is_dir) {
+            ignore_stack = IgnoreStack::all();
+        }
+
+        ignore_stack
+    }
+}
+
+impl fmt::Debug for Snapshot {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        for entry in self.entries.cursor::<(), ()>() {
+            for _ in entry.path().ancestors().skip(1) {
+                write!(f, " ")?;
+            }
+            writeln!(f, "{:?} (inode: {})", entry.path(), entry.inode())?;
+        }
+        Ok(())
+    }
+}
+
+impl FileHandle {
+    pub fn path(&self) -> Arc<Path> {
+        self.state.lock().path.clone()
+    }
+
+    pub fn is_deleted(&self) -> bool {
+        self.state.lock().is_deleted
+    }
+
+    pub fn load_history(&self, ctx: &AppContext) -> impl Future<Output = Result<History>> {
+        self.worktree.read(ctx).load_history(&self.path(), ctx)
+    }
+
+    pub fn save<'a>(&self, content: BufferSnapshot, ctx: &AppContext) -> Task<Result<()>> {
+        let worktree = self.worktree.read(ctx);
+        worktree.save(&self.path(), content, ctx)
+    }
+
+    pub fn entry_id(&self) -> (usize, Arc<Path>) {
+        (self.worktree.id(), self.path())
+    }
+
+    pub fn observe_from_model<T: Entity>(
+        &self,
+        ctx: &mut ModelContext<T>,
+        mut callback: impl FnMut(&mut T, FileHandle, &mut ModelContext<T>) + 'static,
+    ) {
+        let mut prev_state = self.state.lock().clone();
+        let cur_state = Arc::downgrade(&self.state);
+        ctx.observe(&self.worktree, move |observer, worktree, ctx| {
+            if let Some(cur_state) = cur_state.upgrade() {
+                let cur_state_unlocked = cur_state.lock();
+                if *cur_state_unlocked != prev_state {
+                    prev_state = cur_state_unlocked.clone();
+                    drop(cur_state_unlocked);
+                    callback(
+                        observer,
+                        FileHandle {
+                            worktree,
+                            state: cur_state,
+                        },
+                        ctx,
+                    );
+                }
+            }
+        });
+    }
+}
+
+#[derive(Clone, Debug)]
+pub struct Entry {
+    kind: EntryKind,
+    path: Arc<Path>,
+    inode: u64,
+    is_symlink: bool,
+    is_ignored: bool,
+}
+
+#[derive(Clone, Debug)]
+pub enum EntryKind {
+    PendingDir,
+    Dir,
+    File(CharBag),
+}
+
+impl Entry {
+    pub fn path(&self) -> &Arc<Path> {
+        &self.path
+    }
+
+    pub fn inode(&self) -> u64 {
+        self.inode
+    }
+
+    pub fn is_ignored(&self) -> bool {
+        self.is_ignored
+    }
+
+    fn is_dir(&self) -> bool {
+        matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
+    }
+}
+
+impl sum_tree::Item for Entry {
+    type Summary = EntrySummary;
+
+    fn summary(&self) -> Self::Summary {
+        let file_count;
+        let visible_file_count;
+        if matches!(self.kind, EntryKind::File(_)) {
+            file_count = 1;
+            if self.is_ignored {
+                visible_file_count = 0;
+            } else {
+                visible_file_count = 1;
+            }
+        } else {
+            file_count = 0;
+            visible_file_count = 0;
+        }
+
+        EntrySummary {
+            max_path: self.path().clone(),
+            file_count,
+            visible_file_count,
+        }
+    }
+}
+
+impl sum_tree::KeyedItem for Entry {
+    type Key = PathKey;
+
+    fn key(&self) -> Self::Key {
+        PathKey(self.path().clone())
+    }
+}
+
+#[derive(Clone, Debug)]
+pub struct EntrySummary {
+    max_path: Arc<Path>,
+    file_count: usize,
+    visible_file_count: usize,
+}
+
+impl Default for EntrySummary {
+    fn default() -> Self {
+        Self {
+            max_path: Arc::from(Path::new("")),
+            file_count: 0,
+            visible_file_count: 0,
+        }
+    }
+}
+
+impl<'a> AddAssign<&'a EntrySummary> for EntrySummary {
+    fn add_assign(&mut self, rhs: &'a EntrySummary) {
+        self.max_path = rhs.max_path.clone();
+        self.file_count += rhs.file_count;
+        self.visible_file_count += rhs.visible_file_count;
+    }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct PathKey(Arc<Path>);
+
+impl Default for PathKey {
+    fn default() -> Self {
+        Self(Path::new("").into())
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
+    fn add_summary(&mut self, summary: &'a EntrySummary) {
+        self.0 = summary.max_path.clone();
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum PathSearch<'a> {
+    Exact(&'a Path),
+    Successor(&'a Path),
+}
+
+impl<'a> Ord for PathSearch<'a> {
+    fn cmp(&self, other: &Self) -> cmp::Ordering {
+        match (self, other) {
+            (Self::Exact(a), Self::Exact(b)) => a.cmp(b),
+            (Self::Successor(a), Self::Exact(b)) => {
+                if b.starts_with(a) {
+                    cmp::Ordering::Greater
+                } else {
+                    a.cmp(b)
+                }
+            }
+            _ => todo!("not sure we need the other two cases"),
+        }
+    }
+}
+
+impl<'a> PartialOrd for PathSearch<'a> {
+    fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl<'a> Default for PathSearch<'a> {
+    fn default() -> Self {
+        Self::Exact(Path::new("").into())
+    }
+}
+
+impl<'a: 'b, 'b> sum_tree::Dimension<'a, EntrySummary> for PathSearch<'b> {
+    fn add_summary(&mut self, summary: &'a EntrySummary) {
+        *self = Self::Exact(summary.max_path.as_ref());
+    }
+}
+
+#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct FileCount(usize);
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for FileCount {
+    fn add_summary(&mut self, summary: &'a EntrySummary) {
+        self.0 += summary.file_count;
+    }
+}
+
+#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct VisibleFileCount(usize);
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for VisibleFileCount {
+    fn add_summary(&mut self, summary: &'a EntrySummary) {
+        self.0 += summary.visible_file_count;
+    }
+}
+
+struct BackgroundScanner {
+    snapshot: Arc<Mutex<Snapshot>>,
+    notify: Sender<ScanState>,
+    handles: Arc<Mutex<HashMap<Arc<Path>, Weak<Mutex<FileHandleState>>>>>,
+    other_mount_paths: HashSet<PathBuf>,
+    thread_pool: scoped_pool::Pool,
+    root_char_bag: CharBag,
+}
+
+impl BackgroundScanner {
+    fn new(
+        snapshot: Arc<Mutex<Snapshot>>,
+        handles: Arc<Mutex<HashMap<Arc<Path>, Weak<Mutex<FileHandleState>>>>>,
+        notify: Sender<ScanState>,
+        worktree_id: usize,
+    ) -> Self {
+        let root_char_bag = snapshot
+            .lock()
+            .root_name
+            .chars()
+            .map(|c| c.to_ascii_lowercase())
+            .collect();
+        let mut scanner = Self {
+            root_char_bag,
+            snapshot,
+            notify,
+            handles,
+            other_mount_paths: Default::default(),
+            thread_pool: scoped_pool::Pool::new(16, format!("worktree-{}-scanner", worktree_id)),
+        };
+        scanner.update_other_mount_paths();
+        scanner
+    }
+
+    fn update_other_mount_paths(&mut self) {
+        let path = self.snapshot.lock().abs_path.clone();
+        self.other_mount_paths.clear();
+        self.other_mount_paths.extend(
+            mounted_volume_paths()
+                .into_iter()
+                .filter(|mount_path| !path.starts_with(mount_path)),
+        );
+    }
+
+    fn abs_path(&self) -> Arc<Path> {
+        self.snapshot.lock().abs_path.clone()
+    }
+
+    fn snapshot(&self) -> Snapshot {
+        self.snapshot.lock().clone()
+    }
+
+    fn run(mut self, event_stream: fsevent::EventStream) {
+        if smol::block_on(self.notify.send(ScanState::Scanning)).is_err() {
+            return;
+        }
+
+        if let Err(err) = self.scan_dirs() {
+            if smol::block_on(self.notify.send(ScanState::Err(Arc::new(err)))).is_err() {
+                return;
+            }
+        }
+
+        if smol::block_on(self.notify.send(ScanState::Idle)).is_err() {
+            return;
+        }
+
+        event_stream.run(move |events| {
+            if smol::block_on(self.notify.send(ScanState::Scanning)).is_err() {
+                return false;
+            }
+
+            if !self.process_events(events) {
+                return false;
+            }
+
+            if smol::block_on(self.notify.send(ScanState::Idle)).is_err() {
+                return false;
+            }
+
+            true
+        });
+    }
+
+    fn scan_dirs(&self) -> io::Result<()> {
+        self.snapshot.lock().scan_id += 1;
+
+        let path: Arc<Path> = Arc::from(Path::new(""));
+        let abs_path = self.abs_path();
+        let metadata = fs::metadata(&abs_path)?;
+        let inode = metadata.ino();
+        let is_symlink = fs::symlink_metadata(&abs_path)?.file_type().is_symlink();
+
+        if metadata.file_type().is_dir() {
+            let dir_entry = Entry {
+                kind: EntryKind::PendingDir,
+                path: path.clone(),
+                inode,
+                is_symlink,
+                is_ignored: false,
+            };
+            self.snapshot.lock().insert_entry(dir_entry);
+
+            let (tx, rx) = crossbeam_channel::unbounded();
+
+            tx.send(ScanJob {
+                abs_path: abs_path.to_path_buf(),
+                path,
+                ignore_stack: IgnoreStack::none(),
+                scan_queue: tx.clone(),
+            })
+            .unwrap();
+            drop(tx);
+
+            self.thread_pool.scoped(|pool| {
+                for _ in 0..self.thread_pool.thread_count() {
+                    pool.execute(|| {
+                        while let Ok(job) = rx.recv() {
+                            if let Err(err) = self.scan_dir(&job) {
+                                log::error!("error scanning {:?}: {}", job.abs_path, err);
+                            }
+                        }
+                    });
+                }
+            });
+        } else {
+            self.snapshot.lock().insert_entry(Entry {
+                kind: EntryKind::File(self.char_bag(&path)),
+                path,
+                inode,
+                is_symlink,
+                is_ignored: false,
+            });
+        }
+
+        Ok(())
+    }
+
+    fn scan_dir(&self, job: &ScanJob) -> io::Result<()> {
+        let mut new_entries: Vec<Entry> = Vec::new();
+        let mut new_jobs: Vec<ScanJob> = Vec::new();
+        let mut ignore_stack = job.ignore_stack.clone();
+        let mut new_ignore = None;
+
+        for child_entry in fs::read_dir(&job.abs_path)? {
+            let child_entry = child_entry?;
+            let child_name = child_entry.file_name();
+            let child_abs_path = job.abs_path.join(&child_name);
+            let child_path: Arc<Path> = job.path.join(&child_name).into();
+            let child_metadata = child_entry.metadata()?;
+            let child_inode = child_metadata.ino();
+            let child_is_symlink = child_metadata.file_type().is_symlink();
+
+            // Disallow mount points outside the file system containing the root of this worktree
+            if self.other_mount_paths.contains(&child_abs_path) {
+                continue;
+            }
+
+            // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
+            if child_name == *GITIGNORE {
+                let (ignore, err) = Gitignore::new(&child_abs_path);
+                if let Some(err) = err {
+                    log::error!("error in ignore file {:?} - {:?}", child_path, err);
+                }
+                let ignore = Arc::new(ignore);
+                ignore_stack = ignore_stack.append(job.path.clone(), ignore.clone());
+                new_ignore = Some(ignore);
+
+                // Update ignore status of any child entries we've already processed to reflect the
+                // ignore file in the current directory. Because `.gitignore` starts with a `.`,
+                // there should rarely be too numerous. Update the ignore stack associated with any
+                // new jobs as well.
+                let mut new_jobs = new_jobs.iter_mut();
+                for entry in &mut new_entries {
+                    entry.is_ignored = ignore_stack.is_path_ignored(&entry.path, entry.is_dir());
+                    if entry.is_dir() {
+                        new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
+                            IgnoreStack::all()
+                        } else {
+                            ignore_stack.clone()
+                        };
+                    }
+                }
+            }
+
+            if child_metadata.is_dir() {
+                let is_ignored = ignore_stack.is_path_ignored(&child_path, true);
+                new_entries.push(Entry {
+                    kind: EntryKind::PendingDir,
+                    path: child_path.clone(),
+                    inode: child_inode,
+                    is_symlink: child_is_symlink,
+                    is_ignored,
+                });
+                new_jobs.push(ScanJob {
+                    abs_path: child_abs_path,
+                    path: child_path,
+                    ignore_stack: if is_ignored {
+                        IgnoreStack::all()
+                    } else {
+                        ignore_stack.clone()
+                    },
+                    scan_queue: job.scan_queue.clone(),
+                });
+            } else {
+                let is_ignored = ignore_stack.is_path_ignored(&child_path, false);
+                new_entries.push(Entry {
+                    kind: EntryKind::File(self.char_bag(&child_path)),
+                    path: child_path,
+                    inode: child_inode,
+                    is_symlink: child_is_symlink,
+                    is_ignored,
+                });
+            };
+        }
+
+        self.snapshot
+            .lock()
+            .populate_dir(job.path.clone(), new_entries, new_ignore);
+        for new_job in new_jobs {
+            job.scan_queue.send(new_job).unwrap();
+        }
+
+        Ok(())
+    }
+
+    fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
+        self.update_other_mount_paths();
+
+        let mut snapshot = self.snapshot();
+        snapshot.scan_id += 1;
+
+        let root_abs_path = if let Ok(abs_path) = snapshot.abs_path.canonicalize() {
+            abs_path
+        } else {
+            return false;
+        };
+
+        let mut renamed_paths: HashMap<u64, PathBuf> = HashMap::new();
+        let mut updated_handles = HashMap::new();
+        for event in &events {
+            if event.flags.contains(fsevent::StreamFlags::ITEM_RENAMED) {
+                if let Ok(path) = event.path.strip_prefix(&root_abs_path) {
+                    if let Some(inode) = snapshot.inode_for_path(path) {
+                        renamed_paths.insert(inode, path.to_path_buf());
+                    } else if let Ok(metadata) = fs::metadata(&event.path) {
+                        let new_path = path;
+                        let mut handles = self.handles.lock();
+                        if let Some(old_path) = renamed_paths.get(&metadata.ino()) {
+                            handles.retain(|handle_path, handle_state| {
+                                if let Ok(path_suffix) = handle_path.strip_prefix(&old_path) {
+                                    let new_handle_path: Arc<Path> =
+                                        if path_suffix.file_name().is_some() {
+                                            new_path.join(path_suffix)
+                                        } else {
+                                            new_path.to_path_buf()
+                                        }
+                                        .into();
+                                    if let Some(handle_state) = Weak::upgrade(&handle_state) {
+                                        handle_state.lock().path = new_handle_path.clone();
+                                        updated_handles
+                                            .insert(new_handle_path, Arc::downgrade(&handle_state));
+                                    }
+                                    false
+                                } else {
+                                    true
+                                }
+                            });
+                            handles.extend(updated_handles.drain());
+                        }
+                    }
+                }
+            }
+        }
+
+        events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
+        let mut abs_paths = events.into_iter().map(|e| e.path).peekable();
+        let (scan_queue_tx, scan_queue_rx) = crossbeam_channel::unbounded();
+
+        while let Some(abs_path) = abs_paths.next() {
+            let path = match abs_path.strip_prefix(&root_abs_path) {
+                Ok(path) => Arc::from(path.to_path_buf()),
+                Err(_) => {
+                    log::error!(
+                        "unexpected event {:?} for root path {:?}",
+                        abs_path,
+                        root_abs_path
+                    );
+                    continue;
+                }
+            };
+
+            while abs_paths.peek().map_or(false, |p| p.starts_with(&abs_path)) {
+                abs_paths.next();
+            }
+
+            snapshot.remove_path(&path);
+
+            match self.fs_entry_for_path(path.clone(), &abs_path) {
+                Ok(Some(mut fs_entry)) => {
+                    let is_dir = fs_entry.is_dir();
+                    let ignore_stack = snapshot.ignore_stack_for_path(&path, is_dir);
+                    fs_entry.is_ignored = ignore_stack.is_all();
+                    snapshot.insert_entry(fs_entry);
+                    if is_dir {
+                        scan_queue_tx
+                            .send(ScanJob {
+                                abs_path,
+                                path,
+                                ignore_stack,
+                                scan_queue: scan_queue_tx.clone(),
+                            })
+                            .unwrap();
+                    }
+                }
+                Ok(None) => {}
+                Err(err) => {
+                    // TODO - create a special 'error' entry in the entries tree to mark this
+                    log::error!("error reading file on event {:?}", err);
+                }
+            }
+        }
+
+        *self.snapshot.lock() = snapshot;
+
+        // Scan any directories that were created as part of this event batch.
+        drop(scan_queue_tx);
+        self.thread_pool.scoped(|pool| {
+            for _ in 0..self.thread_pool.thread_count() {
+                pool.execute(|| {
+                    while let Ok(job) = scan_queue_rx.recv() {
+                        if let Err(err) = self.scan_dir(&job) {
+                            log::error!("error scanning {:?}: {}", job.abs_path, err);
+                        }
+                    }
+                });
+            }
+        });
+
+        self.update_ignore_statuses();
+
+        let mut handles = self.handles.lock();
+        let snapshot = self.snapshot.lock();
+        handles.retain(|path, handle_state| {
+            if let Some(handle_state) = Weak::upgrade(&handle_state) {
+                if snapshot.entry_for_path(&path).is_none() {
+                    handle_state.lock().is_deleted = true;
+                }
+                true
+            } else {
+                false
+            }
+        });
+
+        true
+    }
+
+    fn update_ignore_statuses(&self) {
+        let mut snapshot = self.snapshot();
+
+        let mut ignores_to_update = Vec::new();
+        let mut ignores_to_delete = Vec::new();
+        for (parent_path, (_, scan_id)) in &snapshot.ignores {
+            if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
+                ignores_to_update.push(parent_path.clone());
+            }
+
+            let ignore_path = parent_path.join(&*GITIGNORE);
+            if snapshot.entry_for_path(ignore_path).is_none() {
+                ignores_to_delete.push(parent_path.clone());
+            }
+        }
+
+        for parent_path in ignores_to_delete {
+            snapshot.ignores.remove(&parent_path);
+            self.snapshot.lock().ignores.remove(&parent_path);
+        }
+
+        let (ignore_queue_tx, ignore_queue_rx) = crossbeam_channel::unbounded();
+        ignores_to_update.sort_unstable();
+        let mut ignores_to_update = ignores_to_update.into_iter().peekable();
+        while let Some(parent_path) = ignores_to_update.next() {
+            while ignores_to_update
+                .peek()
+                .map_or(false, |p| p.starts_with(&parent_path))
+            {
+                ignores_to_update.next().unwrap();
+            }
+
+            let ignore_stack = snapshot.ignore_stack_for_path(&parent_path, true);
+            ignore_queue_tx
+                .send(UpdateIgnoreStatusJob {
+                    path: parent_path,
+                    ignore_stack,
+                    ignore_queue: ignore_queue_tx.clone(),
+                })
+                .unwrap();
+        }
+        drop(ignore_queue_tx);
+
+        self.thread_pool.scoped(|scope| {
+            for _ in 0..self.thread_pool.thread_count() {
+                scope.execute(|| {
+                    while let Ok(job) = ignore_queue_rx.recv() {
+                        self.update_ignore_status(job, &snapshot);
+                    }
+                });
+            }
+        });
+    }
+
+    fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &Snapshot) {
+        let mut ignore_stack = job.ignore_stack;
+        if let Some((ignore, _)) = snapshot.ignores.get(&job.path) {
+            ignore_stack = ignore_stack.append(job.path.clone(), ignore.clone());
+        }
+
+        let mut edits = Vec::new();
+        for mut entry in snapshot.child_entries(&job.path).cloned() {
+            let was_ignored = entry.is_ignored;
+            entry.is_ignored = ignore_stack.is_path_ignored(entry.path(), entry.is_dir());
+            if entry.is_dir() {
+                let child_ignore_stack = if entry.is_ignored {
+                    IgnoreStack::all()
+                } else {
+                    ignore_stack.clone()
+                };
+                job.ignore_queue
+                    .send(UpdateIgnoreStatusJob {
+                        path: entry.path().clone(),
+                        ignore_stack: child_ignore_stack,
+                        ignore_queue: job.ignore_queue.clone(),
+                    })
+                    .unwrap();
+            }
+
+            if entry.is_ignored != was_ignored {
+                edits.push(Edit::Insert(entry));
+            }
+        }
+        self.snapshot.lock().entries.edit(edits);
+    }
+
+    fn fs_entry_for_path(&self, path: Arc<Path>, abs_path: &Path) -> Result<Option<Entry>> {
+        let metadata = match fs::metadata(&abs_path) {
+            Err(err) => {
+                return match (err.kind(), err.raw_os_error()) {
+                    (io::ErrorKind::NotFound, _) => Ok(None),
+                    (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None),
+                    _ => Err(anyhow::Error::new(err)),
+                }
+            }
+            Ok(metadata) => metadata,
+        };
+        let inode = metadata.ino();
+        let is_symlink = fs::symlink_metadata(&abs_path)
+            .context("failed to read symlink metadata")?
+            .file_type()
+            .is_symlink();
+
+        let entry = Entry {
+            kind: if metadata.file_type().is_dir() {
+                EntryKind::PendingDir
+            } else {
+                EntryKind::File(self.char_bag(&path))
+            },
+            path,
+            inode,
+            is_symlink,
+            is_ignored: false,
+        };
+
+        Ok(Some(entry))
+    }
+
+    fn char_bag(&self, path: &Path) -> CharBag {
+        let mut result = self.root_char_bag;
+        result.extend(
+            path.to_string_lossy()
+                .chars()
+                .map(|c| c.to_ascii_lowercase()),
+        );
+        result
+    }
+}
+
+struct ScanJob {
+    abs_path: PathBuf,
+    path: Arc<Path>,
+    ignore_stack: Arc<IgnoreStack>,
+    scan_queue: crossbeam_channel::Sender<ScanJob>,
+}
+
+struct UpdateIgnoreStatusJob {
+    path: Arc<Path>,
+    ignore_stack: Arc<IgnoreStack>,
+    ignore_queue: crossbeam_channel::Sender<UpdateIgnoreStatusJob>,
+}
+
+pub trait WorktreeHandle {
+    fn file(&self, path: impl AsRef<Path>, app: &AppContext) -> Result<FileHandle>;
+}
+
+impl WorktreeHandle for ModelHandle<Worktree> {
+    fn file(&self, path: impl AsRef<Path>, app: &AppContext) -> Result<FileHandle> {
+        let tree = self.read(app);
+        let entry = tree
+            .entry_for_path(&path)
+            .ok_or_else(|| anyhow!("path does not exist in tree"))?;
+        let path = entry.path().clone();
+        let mut handles = tree.handles.lock();
+        let state = if let Some(state) = handles.get(&path).and_then(Weak::upgrade) {
+            state
+        } else {
+            let state = Arc::new(Mutex::new(FileHandleState {
+                path: path.clone(),
+                is_deleted: false,
+            }));
+            handles.insert(path, Arc::downgrade(&state));
+            state
+        };
+
+        Ok(FileHandle {
+            worktree: self.clone(),
+            state,
+        })
+    }
+}
+
+pub enum FileIter<'a> {
+    All(Cursor<'a, Entry, FileCount, FileCount>),
+    Visible(Cursor<'a, Entry, VisibleFileCount, VisibleFileCount>),
+}
+
+impl<'a> FileIter<'a> {
+    fn all(snapshot: &'a Snapshot, start: usize) -> Self {
+        let mut cursor = snapshot.entries.cursor();
+        cursor.seek(&FileCount(start), SeekBias::Right);
+        Self::All(cursor)
+    }
+
+    fn visible(snapshot: &'a Snapshot, start: usize) -> Self {
+        let mut cursor = snapshot.entries.cursor();
+        cursor.seek(&VisibleFileCount(start), SeekBias::Right);
+        Self::Visible(cursor)
+    }
+
+    fn next_internal(&mut self) {
+        match self {
+            Self::All(cursor) => {
+                let ix = *cursor.start();
+                cursor.seek_forward(&FileCount(ix.0 + 1), SeekBias::Right);
+            }
+            Self::Visible(cursor) => {
+                let ix = *cursor.start();
+                cursor.seek_forward(&VisibleFileCount(ix.0 + 1), SeekBias::Right);
+            }
+        }
+    }
+
+    fn item(&self) -> Option<&'a Entry> {
+        match self {
+            Self::All(cursor) => cursor.item(),
+            Self::Visible(cursor) => cursor.item(),
+        }
+    }
+}
+
+impl<'a> Iterator for FileIter<'a> {
+    type Item = &'a Entry;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(entry) = self.item() {
+            self.next_internal();
+            Some(entry)
+        } else {
+            None
+        }
+    }
+}
+
+struct ChildEntriesIter<'a> {
+    parent_path: &'a Path,
+    cursor: Cursor<'a, Entry, PathSearch<'a>, ()>,
+}
+
+impl<'a> ChildEntriesIter<'a> {
+    fn new(parent_path: &'a Path, snapshot: &'a Snapshot) -> Self {
+        let mut cursor = snapshot.entries.cursor();
+        cursor.seek(&PathSearch::Exact(parent_path), SeekBias::Right);
+        Self {
+            parent_path,
+            cursor,
+        }
+    }
+}
+
+impl<'a> Iterator for ChildEntriesIter<'a> {
+    type Item = &'a Entry;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(item) = self.cursor.item() {
+            if item.path().starts_with(self.parent_path) {
+                self.cursor
+                    .seek_forward(&PathSearch::Successor(item.path()), SeekBias::Left);
+                Some(item)
+            } else {
+                None
+            }
+        } else {
+            None
+        }
+    }
+}
+
+fn mounted_volume_paths() -> Vec<PathBuf> {
+    unsafe {
+        let mut stat_ptr: *mut libc::statfs = std::ptr::null_mut();
+        let count = libc::getmntinfo(&mut stat_ptr as *mut _, libc::MNT_WAIT);
+        if count >= 0 {
+            std::slice::from_raw_parts(stat_ptr, count as usize)
+                .iter()
+                .map(|stat| {
+                    PathBuf::from(OsStr::from_bytes(
+                        CStr::from_ptr(&stat.f_mntonname[0]).to_bytes(),
+                    ))
+                })
+                .collect()
+        } else {
+            panic!("failed to run getmntinfo");
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::editor::Buffer;
+    use crate::test::*;
+    use anyhow::Result;
+    use gpui::App;
+    use rand::prelude::*;
+    use serde_json::json;
+    use std::env;
+    use std::fmt::Write;
+    use std::os::unix;
+    use std::time::{SystemTime, UNIX_EPOCH};
+
+    #[test]
+    fn test_populate_and_search() {
+        App::test_async((), |mut app| async move {
+            let dir = temp_tree(json!({
+                "root": {
+                    "apple": "",
+                    "banana": {
+                        "carrot": {
+                            "date": "",
+                            "endive": "",
+                        }
+                    },
+                    "fennel": {
+                        "grape": "",
+                    }
+                }
+            }));
+
+            let root_link_path = dir.path().join("root_link");
+            unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
+
+            let tree = app.add_model(|ctx| Worktree::new(root_link_path, ctx));
+
+            app.read(|ctx| tree.read(ctx).scan_complete()).await;
+            app.read(|ctx| {
+                let tree = tree.read(ctx);
+                assert_eq!(tree.file_count(), 4);
+                let results = match_paths(
+                    Some(tree.snapshot()).iter(),
+                    "bna",
+                    false,
+                    false,
+                    false,
+                    10,
+                    Default::default(),
+                    ctx.thread_pool().clone(),
+                )
+                .into_iter()
+                .map(|result| result.path)
+                .collect::<Vec<Arc<Path>>>();
+                assert_eq!(
+                    results,
+                    vec![
+                        PathBuf::from("banana/carrot/date").into(),
+                        PathBuf::from("banana/carrot/endive").into(),
+                    ]
+                );
+            })
+        });
+    }
+
+    #[test]
+    fn test_save_file() {
+        App::test_async((), |mut app| async move {
+            let dir = temp_tree(json!({
+                "file1": "the old contents",
+            }));
+
+            let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
+            app.read(|ctx| tree.read(ctx).scan_complete()).await;
+            app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 1));
+
+            let buffer =
+                app.add_model(|ctx| Buffer::new(1, "a line of text.\n".repeat(10 * 1024), ctx));
+
+            let path = tree.update(&mut app, |tree, ctx| {
+                let path = tree.files(0).next().unwrap().path().clone();
+                assert_eq!(path.file_name().unwrap(), "file1");
+                smol::block_on(tree.save(&path, buffer.read(ctx).snapshot(), ctx.as_ref()))
+                    .unwrap();
+                path
+            });
+
+            let history = app
+                .read(|ctx| tree.read(ctx).load_history(&path, ctx))
+                .await
+                .unwrap();
+            app.read(|ctx| {
+                assert_eq!(history.base_text.as_ref(), buffer.read(ctx).text());
+            });
+        });
+    }
+
+    #[test]
+    fn test_rescan_simple() {
+        App::test_async((), |mut app| async move {
+            let dir = temp_tree(json!({
+                "a": {
+                    "file1": "",
+                    "file2": "",
+                    "file3": "",
+                },
+                "b": {
+                    "c": {
+                        "file4": "",
+                        "file5": "",
+                    }
+                }
+            }));
+
+            let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
+            app.read(|ctx| tree.read(ctx).scan_complete()).await;
+            app.read(|ctx| assert_eq!(tree.read(ctx).file_count(), 5));
+
+            let (file2, file3, file4, file5) = app.read(|ctx| {
+                (
+                    tree.file("a/file2", ctx).unwrap(),
+                    tree.file("a/file3", ctx).unwrap(),
+                    tree.file("b/c/file4", ctx).unwrap(),
+                    tree.file("b/c/file5", ctx).unwrap(),
+                )
+            });
+
+            std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
+            std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
+            std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
+            std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
+            app.read(|ctx| tree.read(ctx).next_scan_complete()).await;
+
+            app.read(|ctx| {
+                assert_eq!(
+                    tree.read(ctx)
+                        .paths()
+                        .map(|p| p.to_str().unwrap())
+                        .collect::<Vec<_>>(),
+                    vec![
+                        "a",
+                        "a/file1",
+                        "a/file2.new",
+                        "b",
+                        "d",
+                        "d/file3",
+                        "d/file4"
+                    ]
+                );
+
+                assert_eq!(file2.path().to_str().unwrap(), "a/file2.new");
+                assert_eq!(file4.path().as_ref(), Path::new("d/file4"));
+                assert_eq!(file5.path().as_ref(), Path::new("d/file5"));
+                assert!(!file2.is_deleted());
+                assert!(!file4.is_deleted());
+                assert!(file5.is_deleted());
+
+                // Right now, this rename isn't detected because the target path
+                // no longer exists on the file system by the time we process the
+                // rename event.
+                assert_eq!(file3.path().as_ref(), Path::new("a/file3"));
+                assert!(file3.is_deleted());
+            });
+        });
+    }
+
+    #[test]
+    fn test_rescan_with_gitignore() {
+        App::test_async((), |mut app| async move {
+            let dir = temp_tree(json!({
+                ".git": {},
+                ".gitignore": "ignored-dir\n",
+                "tracked-dir": {
+                    "tracked-file1": "tracked contents",
+                },
+                "ignored-dir": {
+                    "ignored-file1": "ignored contents",
+                }
+            }));
+
+            let tree = app.add_model(|ctx| Worktree::new(dir.path(), ctx));
+            app.read(|ctx| tree.read(ctx).scan_complete()).await;
+            app.read(|ctx| {
+                let tree = tree.read(ctx);
+                let tracked = tree.entry_for_path("tracked-dir/tracked-file1").unwrap();
+                let ignored = tree.entry_for_path("ignored-dir/ignored-file1").unwrap();
+                assert_eq!(tracked.is_ignored(), false);
+                assert_eq!(ignored.is_ignored(), true);
+            });
+
+            fs::write(dir.path().join("tracked-dir/tracked-file2"), "").unwrap();
+            fs::write(dir.path().join("ignored-dir/ignored-file2"), "").unwrap();
+            app.read(|ctx| tree.read(ctx).next_scan_complete()).await;
+            app.read(|ctx| {
+                let tree = tree.read(ctx);
+                let dot_git = tree.entry_for_path(".git").unwrap();
+                let tracked = tree.entry_for_path("tracked-dir/tracked-file2").unwrap();
+                let ignored = tree.entry_for_path("ignored-dir/ignored-file2").unwrap();
+                assert_eq!(tracked.is_ignored(), false);
+                assert_eq!(ignored.is_ignored(), true);
+                assert_eq!(dot_git.is_ignored(), true);
+            });
+        });
+    }
+
+    #[test]
+    fn test_mounted_volume_paths() {
+        let paths = mounted_volume_paths();
+        assert!(paths.contains(&"/".into()));
+    }
+
+    #[test]
+    fn test_random() {
+        let iterations = env::var("ITERATIONS")
+            .map(|i| i.parse().unwrap())
+            .unwrap_or(100);
+        let operations = env::var("OPERATIONS")
+            .map(|o| o.parse().unwrap())
+            .unwrap_or(40);
+        let initial_entries = env::var("INITIAL_ENTRIES")
+            .map(|o| o.parse().unwrap())
+            .unwrap_or(20);
+        let seeds = if let Ok(seed) = env::var("SEED").map(|s| s.parse().unwrap()) {
+            seed..seed + 1
+        } else {
+            0..iterations
+        };
+
+        for seed in seeds {
+            dbg!(seed);
+            let mut rng = StdRng::seed_from_u64(seed);
+
+            let root_dir = tempdir::TempDir::new(&format!("test-{}", seed)).unwrap();
+            for _ in 0..initial_entries {
+                randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
+            }
+            log::info!("Generated initial tree");
+
+            let (notify_tx, _notify_rx) = smol::channel::unbounded();
+            let mut scanner = BackgroundScanner::new(
+                Arc::new(Mutex::new(Snapshot {
+                    id: 0,
+                    scan_id: 0,
+                    abs_path: root_dir.path().into(),
+                    entries: Default::default(),
+                    ignores: Default::default(),
+                    root_name: Default::default(),
+                })),
+                Arc::new(Mutex::new(Default::default())),
+                notify_tx,
+                0,
+            );
+            scanner.scan_dirs().unwrap();
+            scanner.snapshot().check_invariants();
+
+            let mut events = Vec::new();
+            let mut mutations_len = operations;
+            while mutations_len > 1 {
+                if !events.is_empty() && rng.gen_bool(0.4) {
+                    let len = rng.gen_range(0..=events.len());
+                    let to_deliver = events.drain(0..len).collect::<Vec<_>>();
+                    log::info!("Delivering events: {:#?}", to_deliver);
+                    scanner.process_events(to_deliver);
+                    scanner.snapshot().check_invariants();
+                } else {
+                    events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
+                    mutations_len -= 1;
+                }
+            }
+            log::info!("Quiescing: {:#?}", events);
+            scanner.process_events(events);
+            scanner.snapshot().check_invariants();
+
+            let (notify_tx, _notify_rx) = smol::channel::unbounded();
+            let new_scanner = BackgroundScanner::new(
+                Arc::new(Mutex::new(Snapshot {
+                    id: 0,
+                    scan_id: 0,
+                    abs_path: root_dir.path().into(),
+                    entries: Default::default(),
+                    ignores: Default::default(),
+                    root_name: Default::default(),
+                })),
+                Arc::new(Mutex::new(Default::default())),
+                notify_tx,
+                1,
+            );
+            new_scanner.scan_dirs().unwrap();
+            assert_eq!(scanner.snapshot().to_vec(), new_scanner.snapshot().to_vec());
+        }
+    }
+
+    fn randomly_mutate_tree(
+        root_path: &Path,
+        insertion_probability: f64,
+        rng: &mut impl Rng,
+    ) -> Result<Vec<fsevent::Event>> {
+        let root_path = root_path.canonicalize().unwrap();
+        let (dirs, files) = read_dir_recursive(root_path.clone());
+
+        let mut events = Vec::new();
+        let mut record_event = |path: PathBuf| {
+            events.push(fsevent::Event {
+                event_id: SystemTime::now()
+                    .duration_since(UNIX_EPOCH)
+                    .unwrap()
+                    .as_secs(),
+                flags: fsevent::StreamFlags::empty(),
+                path,
+            });
+        };
+
+        if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+            let path = dirs.choose(rng).unwrap();
+            let new_path = path.join(gen_name(rng));
+
+            if rng.gen() {
+                log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
+                fs::create_dir(&new_path)?;
+            } else {
+                log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
+                fs::write(&new_path, "")?;
+            }
+            record_event(new_path);
+        } else if rng.gen_bool(0.05) {
+            let ignore_dir_path = dirs.choose(rng).unwrap();
+            let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+            let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
+            let files_to_ignore = {
+                let len = rng.gen_range(0..=subfiles.len());
+                subfiles.choose_multiple(rng, len)
+            };
+            let dirs_to_ignore = {
+                let len = rng.gen_range(0..subdirs.len());
+                subdirs.choose_multiple(rng, len)
+            };
+
+            let mut ignore_contents = String::new();
+            for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+                write!(
+                    ignore_contents,
+                    "{}\n",
+                    path_to_ignore
+                        .strip_prefix(&ignore_dir_path)?
+                        .to_str()
+                        .unwrap()
+                )
+                .unwrap();
+            }
+            log::info!(
+                "Creating {:?} with contents:\n{}",
+                ignore_path.strip_prefix(&root_path)?,
+                ignore_contents
+            );
+            fs::write(&ignore_path, ignore_contents).unwrap();
+            record_event(ignore_path);
+        } else {
+            let old_path = {
+                let file_path = files.choose(rng);
+                let dir_path = dirs[1..].choose(rng);
+                file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+            };
+
+            let is_rename = rng.gen();
+            if is_rename {
+                let new_path_parent = dirs
+                    .iter()
+                    .filter(|d| !d.starts_with(old_path))
+                    .choose(rng)
+                    .unwrap();
+
+                let overwrite_existing_dir =
+                    !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+                let new_path = if overwrite_existing_dir {
+                    fs::remove_dir_all(&new_path_parent).ok();
+                    new_path_parent.to_path_buf()
+                } else {
+                    new_path_parent.join(gen_name(rng))
+                };
+
+                log::info!(
+                    "Renaming {:?} to {}{:?}",
+                    old_path.strip_prefix(&root_path)?,
+                    if overwrite_existing_dir {
+                        "overwrite "
+                    } else {
+                        ""
+                    },
+                    new_path.strip_prefix(&root_path)?
+                );
+                fs::rename(&old_path, &new_path)?;
+                record_event(old_path.clone());
+                record_event(new_path);
+            } else if old_path.is_dir() {
+                let (dirs, files) = read_dir_recursive(old_path.clone());
+
+                log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
+                fs::remove_dir_all(&old_path).unwrap();
+                for file in files {
+                    record_event(file);
+                }
+                for dir in dirs {
+                    record_event(dir);
+                }
+            } else {
+                log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
+                fs::remove_file(old_path).unwrap();
+                record_event(old_path.clone());
+            }
+        }
+
+        Ok(events)
+    }
+
+    fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
+        let child_entries = fs::read_dir(&path).unwrap();
+        let mut dirs = vec![path];
+        let mut files = Vec::new();
+        for child_entry in child_entries {
+            let child_path = child_entry.unwrap().path();
+            if child_path.is_dir() {
+                let (child_dirs, child_files) = read_dir_recursive(child_path);
+                dirs.extend(child_dirs);
+                files.extend(child_files);
+            } else {
+                files.push(child_path);
+            }
+        }
+        (dirs, files)
+    }
+
+    fn gen_name(rng: &mut impl Rng) -> String {
+        (0..6)
+            .map(|_| rng.sample(rand::distributions::Alphanumeric))
+            .map(char::from)
+            .collect()
+    }
+
+    impl Snapshot {
+        fn check_invariants(&self) {
+            let mut files = self.files(0);
+            let mut visible_files = self.visible_files(0);
+            for entry in self.entries.cursor::<(), ()>() {
+                if matches!(entry.kind, EntryKind::File(_)) {
+                    assert_eq!(files.next().unwrap().inode(), entry.inode);
+                    if !entry.is_ignored {
+                        assert_eq!(visible_files.next().unwrap().inode(), entry.inode);
+                    }
+                }
+            }
+            assert!(files.next().is_none());
+            assert!(visible_files.next().is_none());
+
+            let mut bfs_paths = Vec::new();
+            let mut stack = vec![Path::new("")];
+            while let Some(path) = stack.pop() {
+                bfs_paths.push(path);
+                let ix = stack.len();
+                for child_entry in self.child_entries(path) {
+                    stack.insert(ix, child_entry.path());
+                }
+            }
+
+            let dfs_paths = self
+                .entries
+                .cursor::<(), ()>()
+                .map(|e| e.path().as_ref())
+                .collect::<Vec<_>>();
+            assert_eq!(bfs_paths, dfs_paths);
+
+            for (ignore_parent_path, _) in &self.ignores {
+                assert!(self.entry_for_path(ignore_parent_path).is_some());
+                assert!(self
+                    .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
+                    .is_some());
+            }
+        }
+
+        fn to_vec(&self) -> Vec<(&Path, u64, bool)> {
+            let mut paths = Vec::new();
+            for entry in self.entries.cursor::<(), ()>() {
+                paths.push((entry.path().as_ref(), entry.inode(), entry.is_ignored()));
+            }
+            paths.sort_by(|a, b| a.0.cmp(&b.0));
+            paths
+        }
+    }
+}

zed/src/worktree/char_bag.rs 🔗

@@ -1,4 +1,6 @@
-#[derive(Copy, Clone, Debug)]
+use std::iter::FromIterator;
+
+#[derive(Copy, Clone, Debug, Default)]
 pub struct CharBag(u64);
 
 impl CharBag {
@@ -23,6 +25,22 @@ impl CharBag {
     }
 }
 
+impl Extend<char> for CharBag {
+    fn extend<T: IntoIterator<Item = char>>(&mut self, iter: T) {
+        for c in iter {
+            self.insert(c);
+        }
+    }
+}
+
+impl FromIterator<char> for CharBag {
+    fn from_iter<T: IntoIterator<Item = char>>(iter: T) -> Self {
+        let mut result = Self::default();
+        result.extend(iter);
+        result
+    }
+}
+
 impl From<&str> for CharBag {
     fn from(s: &str) -> Self {
         let mut bag = Self(0);

zed/src/worktree/fuzzy.rs 🔗

@@ -1,22 +1,21 @@
+use super::{char_bag::CharBag, EntryKind, Snapshot};
 use gpui::scoped_pool;
-
-use super::char_bag::CharBag;
-
 use std::{
     cmp::{max, min, Ordering, Reverse},
     collections::BinaryHeap,
+    path::Path,
+    sync::atomic::{self, AtomicBool},
+    sync::Arc,
 };
 
 const BASE_DISTANCE_PENALTY: f64 = 0.6;
 const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
 const MIN_DISTANCE_PENALTY: f64 = 0.2;
 
-pub struct PathEntry {
-    pub entry_id: usize,
-    pub path_chars: CharBag,
-    pub path: Vec<char>,
-    pub lowercase_path: Vec<char>,
-    pub is_ignored: bool,
+#[derive(Clone, Debug)]
+pub struct MatchCandidate<'a> {
+    pub path: &'a Arc<Path>,
+    pub char_bag: CharBag,
 }
 
 #[derive(Clone, Debug)]
@@ -24,8 +23,7 @@ pub struct PathMatch {
     pub score: f64,
     pub positions: Vec<usize>,
     pub tree_id: usize,
-    pub entry_id: usize,
-    pub skipped_prefix_len: usize,
+    pub path: Arc<Path>,
 }
 
 impl PartialEq for PathMatch {
@@ -48,29 +46,40 @@ impl Ord for PathMatch {
     }
 }
 
-pub fn match_paths(
-    paths_by_tree_id: &[(usize, usize, &[PathEntry])],
+pub fn match_paths<'a, T>(
+    snapshots: T,
     query: &str,
+    include_root_name: bool,
     include_ignored: bool,
     smart_case: bool,
     max_results: usize,
+    cancel_flag: Arc<AtomicBool>,
     pool: scoped_pool::Pool,
-) -> Vec<PathMatch> {
+) -> Vec<PathMatch>
+where
+    T: Clone + Send + Iterator<Item = &'a Snapshot> + 'a,
+{
     let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
     let query = query.chars().collect::<Vec<_>>();
+
     let lowercase_query = &lowercase_query;
     let query = &query;
     let query_chars = CharBag::from(&lowercase_query[..]);
 
     let cpus = num_cpus::get();
-    let path_count = paths_by_tree_id
-        .iter()
-        .fold(0, |sum, (_, _, paths)| sum + paths.len());
+    let path_count: usize = if include_ignored {
+        snapshots.clone().map(Snapshot::file_count).sum()
+    } else {
+        snapshots.clone().map(Snapshot::visible_file_count).sum()
+    };
+
     let segment_size = (path_count + cpus - 1) / cpus;
     let mut segment_results = (0..cpus).map(|_| BinaryHeap::new()).collect::<Vec<_>>();
 
     pool.scoped(|scope| {
         for (segment_idx, results) in segment_results.iter_mut().enumerate() {
+            let trees = snapshots.clone();
+            let cancel_flag = &cancel_flag;
             scope.execute(move || {
                 let segment_start = segment_idx * segment_size;
                 let segment_end = segment_start + segment_size;
@@ -84,22 +93,38 @@ pub fn match_paths(
                 let mut best_position_matrix = Vec::new();
 
                 let mut tree_start = 0;
-                for (tree_id, skipped_prefix_len, paths) in paths_by_tree_id {
-                    let tree_end = tree_start + paths.len();
+                for snapshot in trees {
+                    let tree_end = if include_ignored {
+                        tree_start + snapshot.file_count()
+                    } else {
+                        tree_start + snapshot.visible_file_count()
+                    };
                     if tree_start < segment_end && segment_start < tree_end {
                         let start = max(tree_start, segment_start) - tree_start;
                         let end = min(tree_end, segment_end) - tree_start;
+                        let entries = if include_ignored {
+                            snapshot.files(start).take(end - start)
+                        } else {
+                            snapshot.visible_files(start).take(end - start)
+                        };
+                        let paths = entries.map(|entry| {
+                            if let EntryKind::File(char_bag) = entry.kind {
+                                MatchCandidate {
+                                    path: &entry.path,
+                                    char_bag,
+                                }
+                            } else {
+                                unreachable!()
+                            }
+                        });
 
                         match_single_tree_paths(
-                            *tree_id,
-                            *skipped_prefix_len,
+                            snapshot,
+                            include_root_name,
                             paths,
-                            start,
-                            end,
                             query,
                             lowercase_query,
                             query_chars,
-                            include_ignored,
                             smart_case,
                             results,
                             max_results,
@@ -108,6 +133,7 @@ pub fn match_paths(
                             &mut last_positions,
                             &mut score_matrix,
                             &mut best_position_matrix,
+                            &cancel_flag,
                         );
                     }
                     if tree_end >= segment_end {
@@ -129,16 +155,13 @@ pub fn match_paths(
     results
 }
 
-fn match_single_tree_paths(
-    tree_id: usize,
-    skipped_prefix_len: usize,
-    path_entries: &[PathEntry],
-    start: usize,
-    end: usize,
+fn match_single_tree_paths<'a>(
+    snapshot: &Snapshot,
+    include_root_name: bool,
+    path_entries: impl Iterator<Item = MatchCandidate<'a>>,
     query: &[char],
     lowercase_query: &[char],
     query_chars: CharBag,
-    include_ignored: bool,
     smart_case: bool,
     results: &mut BinaryHeap<Reverse<PathMatch>>,
     max_results: usize,
@@ -147,39 +170,61 @@ fn match_single_tree_paths(
     last_positions: &mut Vec<usize>,
     score_matrix: &mut Vec<Option<f64>>,
     best_position_matrix: &mut Vec<usize>,
+    cancel_flag: &AtomicBool,
 ) {
-    for i in start..end {
-        let path_entry = unsafe { &path_entries.get_unchecked(i) };
+    let mut path_chars = Vec::new();
+    let mut lowercase_path_chars = Vec::new();
 
-        if !include_ignored && path_entry.is_ignored {
+    let prefix = if include_root_name {
+        snapshot.root_name()
+    } else {
+        ""
+    }
+    .chars()
+    .collect::<Vec<_>>();
+    let lowercase_prefix = prefix
+        .iter()
+        .map(|c| c.to_ascii_lowercase())
+        .collect::<Vec<_>>();
+
+    for candidate in path_entries {
+        if !candidate.char_bag.is_superset(query_chars) {
             continue;
         }
 
-        if !path_entry.path_chars.is_superset(query_chars) {
-            continue;
+        if cancel_flag.load(atomic::Ordering::Relaxed) {
+            break;
+        }
+
+        path_chars.clear();
+        lowercase_path_chars.clear();
+        for c in candidate.path.to_string_lossy().chars() {
+            path_chars.push(c);
+            lowercase_path_chars.push(c.to_ascii_lowercase());
         }
 
         if !find_last_positions(
             last_positions,
-            skipped_prefix_len,
-            &path_entry.lowercase_path,
+            &lowercase_prefix,
+            &lowercase_path_chars,
             &lowercase_query[..],
         ) {
             continue;
         }
 
-        let matrix_len = query.len() * (path_entry.path.len() - skipped_prefix_len);
+        let matrix_len = query.len() * (path_chars.len() + prefix.len());
         score_matrix.clear();
         score_matrix.resize(matrix_len, None);
         best_position_matrix.clear();
-        best_position_matrix.resize(matrix_len, skipped_prefix_len);
+        best_position_matrix.resize(matrix_len, 0);
 
         let score = score_match(
             &query[..],
             &lowercase_query[..],
-            &path_entry.path,
-            &path_entry.lowercase_path,
-            skipped_prefix_len,
+            &path_chars,
+            &lowercase_path_chars,
+            &prefix,
+            &lowercase_prefix,
             smart_case,
             &last_positions,
             score_matrix,
@@ -190,11 +235,10 @@ fn match_single_tree_paths(
 
         if score > 0.0 {
             results.push(Reverse(PathMatch {
-                tree_id,
-                entry_id: path_entry.entry_id,
+                tree_id: snapshot.id,
+                path: candidate.path.clone(),
                 score,
                 positions: match_positions.clone(),
-                skipped_prefix_len,
             }));
             if results.len() == max_results {
                 *min_score = results.peek().unwrap().0.score;
@@ -205,18 +249,17 @@ fn match_single_tree_paths(
 
 fn find_last_positions(
     last_positions: &mut Vec<usize>,
-    skipped_prefix_len: usize,
+    prefix: &[char],
     path: &[char],
     query: &[char],
 ) -> bool {
     let mut path = path.iter();
+    let mut prefix_iter = prefix.iter();
     for (i, char) in query.iter().enumerate().rev() {
         if let Some(j) = path.rposition(|c| c == char) {
-            if j >= skipped_prefix_len {
-                last_positions[i] = j;
-            } else {
-                return false;
-            }
+            last_positions[i] = j + prefix.len();
+        } else if let Some(j) = prefix_iter.rposition(|c| c == char) {
+            last_positions[i] = j;
         } else {
             return false;
         }
@@ -229,7 +272,8 @@ fn score_match(
     query_cased: &[char],
     path: &[char],
     path_cased: &[char],
-    skipped_prefix_len: usize,
+    prefix: &[char],
+    lowercase_prefix: &[char],
     smart_case: bool,
     last_positions: &[usize],
     score_matrix: &mut [Option<f64>],
@@ -242,14 +286,15 @@ fn score_match(
         query_cased,
         path,
         path_cased,
-        skipped_prefix_len,
+        prefix,
+        lowercase_prefix,
         smart_case,
         last_positions,
         score_matrix,
         best_position_matrix,
         min_score,
         0,
-        skipped_prefix_len,
+        0,
         query.len() as f64,
     ) * query.len() as f64;
 
@@ -257,10 +302,10 @@ fn score_match(
         return 0.0;
     }
 
-    let path_len = path.len() - skipped_prefix_len;
+    let path_len = path.len() + prefix.len();
     let mut cur_start = 0;
     for i in 0..query.len() {
-        match_positions[i] = best_position_matrix[i * path_len + cur_start] - skipped_prefix_len;
+        match_positions[i] = best_position_matrix[i * path_len + cur_start];
         cur_start = match_positions[i] + 1;
     }
 
@@ -272,7 +317,8 @@ fn recursive_score_match(
     query_cased: &[char],
     path: &[char],
     path_cased: &[char],
-    skipped_prefix_len: usize,
+    prefix: &[char],
+    lowercase_prefix: &[char],
     smart_case: bool,
     last_positions: &[usize],
     score_matrix: &mut [Option<f64>],
@@ -286,9 +332,9 @@ fn recursive_score_match(
         return 1.0;
     }
 
-    let path_len = path.len() - skipped_prefix_len;
+    let path_len = prefix.len() + path.len();
 
-    if let Some(memoized) = score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] {
+    if let Some(memoized) = score_matrix[query_idx * path_len + path_idx] {
         return memoized;
     }
 
@@ -300,7 +346,11 @@ fn recursive_score_match(
 
     let mut last_slash = 0;
     for j in path_idx..=limit {
-        let path_char = path_cased[j];
+        let path_char = if j < prefix.len() {
+            lowercase_prefix[j]
+        } else {
+            path_cased[j - prefix.len()]
+        };
         let is_path_sep = path_char == '/' || path_char == '\\';
 
         if query_idx == 0 && is_path_sep {
@@ -308,10 +358,19 @@ fn recursive_score_match(
         }
 
         if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
+            let curr = if j < prefix.len() {
+                prefix[j]
+            } else {
+                path[j - prefix.len()]
+            };
+
             let mut char_score = 1.0;
             if j > path_idx {
-                let last = path[j - 1];
-                let curr = path[j];
+                let last = if j - 1 < prefix.len() {
+                    prefix[j - 1]
+                } else {
+                    path[j - 1 - prefix.len()]
+                };
 
                 if last == '/' {
                     char_score = 0.9;
@@ -334,15 +393,15 @@ fn recursive_score_match(
             // Apply a severe penalty if the case doesn't match.
             // This will make the exact matches have higher score than the case-insensitive and the
             // path insensitive matches.
-            if (smart_case || path[j] == '/') && query[query_idx] != path[j] {
+            if (smart_case || curr == '/') && query[query_idx] != curr {
                 char_score *= 0.001;
             }
 
             let mut multiplier = char_score;
 
-            // Scale the score based on how deep within the patch we found the match.
+            // Scale the score based on how deep within the path we found the match.
             if query_idx == 0 {
-                multiplier /= (path.len() - last_slash) as f64;
+                multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
             }
 
             let mut next_score = 1.0;
@@ -363,7 +422,8 @@ fn recursive_score_match(
                 query_cased,
                 path,
                 path_cased,
-                skipped_prefix_len,
+                prefix,
+                lowercase_prefix,
                 smart_case,
                 last_positions,
                 score_matrix,
@@ -386,16 +446,49 @@ fn recursive_score_match(
     }
 
     if best_position != 0 {
-        best_position_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = best_position;
+        best_position_matrix[query_idx * path_len + path_idx] = best_position;
     }
 
-    score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = Some(score);
+    score_matrix[query_idx * path_len + path_idx] = Some(score);
     score
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
+    use std::path::PathBuf;
+
+    #[test]
+    fn test_get_last_positions() {
+        let mut last_positions = vec![0; 2];
+        let result = find_last_positions(
+            &mut last_positions,
+            &['a', 'b', 'c'],
+            &['b', 'd', 'e', 'f'],
+            &['d', 'c'],
+        );
+        assert_eq!(result, false);
+
+        last_positions.resize(2, 0);
+        let result = find_last_positions(
+            &mut last_positions,
+            &['a', 'b', 'c'],
+            &['b', 'd', 'e', 'f'],
+            &['c', 'd'],
+        );
+        assert_eq!(result, true);
+        assert_eq!(last_positions, vec![2, 4]);
+
+        last_positions.resize(4, 0);
+        let result = find_last_positions(
+            &mut last_positions,
+            &['z', 'e', 'd', '/'],
+            &['z', 'e', 'd', '/', 'f'],
+            &['z', '/', 'z', 'f'],
+        );
+        assert_eq!(result, true);
+        assert_eq!(last_positions, vec![0, 3, 4, 8]);
+    }
 
     #[test]
     fn test_match_path_entries() {
@@ -447,17 +540,17 @@ mod tests {
         let query = query.chars().collect::<Vec<_>>();
         let query_chars = CharBag::from(&lowercase_query[..]);
 
+        let path_arcs = paths
+            .iter()
+            .map(|path| Arc::from(PathBuf::from(path)))
+            .collect::<Vec<_>>();
         let mut path_entries = Vec::new();
         for (i, path) in paths.iter().enumerate() {
             let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
-            let path_chars = CharBag::from(&lowercase_path[..]);
-            let path = path.chars().collect();
-            path_entries.push(PathEntry {
-                entry_id: i,
-                path_chars,
-                path,
-                lowercase_path,
-                is_ignored: false,
+            let char_bag = CharBag::from(lowercase_path.as_slice());
+            path_entries.push(MatchCandidate {
+                char_bag,
+                path: path_arcs.get(i).unwrap(),
             });
         }
 
@@ -466,17 +559,22 @@ mod tests {
         match_positions.resize(query.len(), 0);
         last_positions.resize(query.len(), 0);
 
+        let cancel_flag = AtomicBool::new(false);
         let mut results = BinaryHeap::new();
         match_single_tree_paths(
-            0,
-            0,
-            &path_entries,
-            0,
-            path_entries.len(),
+            &Snapshot {
+                id: 0,
+                scan_id: 0,
+                abs_path: PathBuf::new().into(),
+                ignores: Default::default(),
+                entries: Default::default(),
+                root_name: Default::default(),
+            },
+            false,
+            path_entries.into_iter(),
             &query[..],
             &lowercase_query[..],
             query_chars,
-            true,
             smart_case,
             &mut results,
             100,
@@ -485,12 +583,22 @@ mod tests {
             &mut last_positions,
             &mut Vec::new(),
             &mut Vec::new(),
+            &cancel_flag,
         );
 
         results
             .into_iter()
             .rev()
-            .map(|result| (paths[result.0.entry_id].clone(), result.0.positions))
+            .map(|result| {
+                (
+                    paths
+                        .iter()
+                        .copied()
+                        .find(|p| result.0.path.as_ref() == Path::new(p))
+                        .unwrap(),
+                    result.0.positions,
+                )
+            })
             .collect()
     }
 }

zed/src/worktree/ignore.rs 🔗

@@ -0,0 +1,57 @@
+use ignore::gitignore::Gitignore;
+use std::{ffi::OsStr, path::Path, sync::Arc};
+
+pub enum IgnoreStack {
+    None,
+    Some {
+        base: Arc<Path>,
+        ignore: Arc<Gitignore>,
+        parent: Arc<IgnoreStack>,
+    },
+    All,
+}
+
+impl IgnoreStack {
+    pub fn none() -> Arc<Self> {
+        Arc::new(Self::None)
+    }
+
+    pub fn all() -> Arc<Self> {
+        Arc::new(Self::All)
+    }
+
+    pub fn is_all(&self) -> bool {
+        matches!(self, IgnoreStack::All)
+    }
+
+    pub fn append(self: Arc<Self>, base: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
+        match self.as_ref() {
+            IgnoreStack::All => self,
+            _ => Arc::new(Self::Some {
+                base,
+                ignore,
+                parent: self,
+            }),
+        }
+    }
+
+    pub fn is_path_ignored(&self, path: &Path, is_dir: bool) -> bool {
+        if is_dir && path.file_name() == Some(OsStr::new(".git")) {
+            return true;
+        }
+
+        match self {
+            Self::None => false,
+            Self::All => true,
+            Self::Some {
+                base,
+                ignore,
+                parent: prev,
+            } => match ignore.matched(path.strip_prefix(base).unwrap(), is_dir) {
+                ignore::Match::None => prev.is_path_ignored(path, is_dir),
+                ignore::Match::Ignore(_) => true,
+                ignore::Match::Whitelist(_) => false,
+            },
+        }
+    }
+}

zed/src/worktree/mod.rs 🔗

@@ -1,5 +0,0 @@
-mod char_bag;
-mod fuzzy;
-mod worktree;
-
-pub use worktree::{match_paths, FileHandle, PathMatch, Worktree, WorktreeHandle};

zed/src/worktree/worktree.rs 🔗

@@ -1,725 +0,0 @@
-pub use super::fuzzy::PathMatch;
-use super::{
-    char_bag::CharBag,
-    fuzzy::{self, PathEntry},
-};
-use crate::{
-    editor::{History, Snapshot},
-    timer,
-    util::post_inc,
-};
-use anyhow::{anyhow, Result};
-use crossbeam_channel as channel;
-use easy_parallel::Parallel;
-use gpui::{scoped_pool, AppContext, Entity, ModelContext, ModelHandle, Task};
-use ignore::dir::{Ignore, IgnoreBuilder};
-use parking_lot::RwLock;
-use smol::prelude::*;
-use std::{
-    collections::HashMap,
-    ffi::{OsStr, OsString},
-    fmt, fs,
-    io::{self, Write},
-    os::unix::fs::MetadataExt,
-    path::Path,
-    path::PathBuf,
-    sync::Arc,
-    time::Duration,
-};
-
-#[derive(Clone)]
-pub struct Worktree(Arc<RwLock<WorktreeState>>);
-
-struct WorktreeState {
-    id: usize,
-    path: PathBuf,
-    entries: Vec<Entry>,
-    file_paths: Vec<PathEntry>,
-    histories: HashMap<usize, History>,
-    scanning: bool,
-}
-
-struct DirToScan {
-    id: usize,
-    path: PathBuf,
-    relative_path: PathBuf,
-    ignore: Option<Ignore>,
-    dirs_to_scan: channel::Sender<io::Result<DirToScan>>,
-}
-
-impl Worktree {
-    pub fn new<T>(id: usize, path: T, ctx: Option<&mut ModelContext<Self>>) -> Self
-    where
-        T: Into<PathBuf>,
-    {
-        let tree = Self(Arc::new(RwLock::new(WorktreeState {
-            id,
-            path: path.into(),
-            entries: Vec::new(),
-            file_paths: Vec::new(),
-            histories: HashMap::new(),
-            scanning: ctx.is_some(),
-        })));
-
-        if let Some(ctx) = ctx {
-            tree.0.write().scanning = true;
-
-            let tree = tree.clone();
-            let task = ctx.background_executor().spawn(async move {
-                tree.scan_dirs()?;
-                Ok(())
-            });
-
-            ctx.spawn(task, Self::done_scanning).detach();
-
-            ctx.spawn_stream(
-                timer::repeat(Duration::from_millis(100)).map(|_| ()),
-                Self::scanning,
-                |_, _| {},
-            )
-            .detach();
-        }
-
-        tree
-    }
-
-    fn scan_dirs(&self) -> io::Result<()> {
-        let path = self.0.read().path.clone();
-        let metadata = fs::metadata(&path)?;
-        let ino = metadata.ino();
-        let is_symlink = fs::symlink_metadata(&path)?.file_type().is_symlink();
-        let name = path
-            .file_name()
-            .map(|name| OsString::from(name))
-            .unwrap_or(OsString::from("/"));
-        let relative_path = PathBuf::from(&name);
-
-        let mut ignore = IgnoreBuilder::new().build().add_parents(&path).unwrap();
-        if metadata.is_dir() {
-            ignore = ignore.add_child(&path).unwrap();
-        }
-        let is_ignored = ignore.matched(&path, metadata.is_dir()).is_ignore();
-
-        if metadata.file_type().is_dir() {
-            let is_ignored = is_ignored || name == ".git";
-            let id = self.push_dir(None, name, ino, is_symlink, is_ignored);
-            let (tx, rx) = channel::unbounded();
-
-            let tx_ = tx.clone();
-            tx.send(Ok(DirToScan {
-                id,
-                path,
-                relative_path,
-                ignore: Some(ignore),
-                dirs_to_scan: tx_,
-            }))
-            .unwrap();
-            drop(tx);
-
-            Parallel::<io::Result<()>>::new()
-                .each(0..16, |_| {
-                    while let Ok(result) = rx.recv() {
-                        self.scan_dir(result?)?;
-                    }
-                    Ok(())
-                })
-                .run()
-                .into_iter()
-                .collect::<io::Result<()>>()?;
-        } else {
-            self.push_file(None, name, ino, is_symlink, is_ignored, relative_path);
-        }
-
-        Ok(())
-    }
-
-    fn scan_dir(&self, to_scan: DirToScan) -> io::Result<()> {
-        let mut new_children = Vec::new();
-
-        for child_entry in fs::read_dir(&to_scan.path)? {
-            let child_entry = child_entry?;
-            let name = child_entry.file_name();
-            let relative_path = to_scan.relative_path.join(&name);
-            let metadata = child_entry.metadata()?;
-            let ino = metadata.ino();
-            let is_symlink = metadata.file_type().is_symlink();
-
-            if metadata.is_dir() {
-                let path = to_scan.path.join(&name);
-                let mut is_ignored = true;
-                let mut ignore = None;
-
-                if let Some(parent_ignore) = to_scan.ignore.as_ref() {
-                    let child_ignore = parent_ignore.add_child(&path).unwrap();
-                    is_ignored = child_ignore.matched(&path, true).is_ignore() || name == ".git";
-                    if !is_ignored {
-                        ignore = Some(child_ignore);
-                    }
-                }
-
-                let id = self.push_dir(Some(to_scan.id), name, ino, is_symlink, is_ignored);
-                new_children.push(id);
-
-                let dirs_to_scan = to_scan.dirs_to_scan.clone();
-                let _ = to_scan.dirs_to_scan.send(Ok(DirToScan {
-                    id,
-                    path,
-                    relative_path,
-                    ignore,
-                    dirs_to_scan,
-                }));
-            } else {
-                let is_ignored = to_scan.ignore.as_ref().map_or(true, |i| {
-                    i.matched(to_scan.path.join(&name), false).is_ignore()
-                });
-
-                new_children.push(self.push_file(
-                    Some(to_scan.id),
-                    name,
-                    ino,
-                    is_symlink,
-                    is_ignored,
-                    relative_path,
-                ));
-            };
-        }
-
-        if let Entry::Dir { children, .. } = &mut self.0.write().entries[to_scan.id] {
-            *children = new_children.clone();
-        }
-
-        Ok(())
-    }
-
-    fn push_dir(
-        &self,
-        parent: Option<usize>,
-        name: OsString,
-        ino: u64,
-        is_symlink: bool,
-        is_ignored: bool,
-    ) -> usize {
-        let entries = &mut self.0.write().entries;
-        let dir_id = entries.len();
-        entries.push(Entry::Dir {
-            parent,
-            name,
-            ino,
-            is_symlink,
-            is_ignored,
-            children: Vec::new(),
-        });
-        dir_id
-    }
-
-    fn push_file(
-        &self,
-        parent: Option<usize>,
-        name: OsString,
-        ino: u64,
-        is_symlink: bool,
-        is_ignored: bool,
-        path: PathBuf,
-    ) -> usize {
-        let path = path.to_string_lossy();
-        let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
-        let path = path.chars().collect::<Vec<_>>();
-        let path_chars = CharBag::from(&path[..]);
-
-        let mut state = self.0.write();
-        let entry_id = state.entries.len();
-        state.entries.push(Entry::File {
-            parent,
-            name,
-            ino,
-            is_symlink,
-            is_ignored,
-        });
-        state.file_paths.push(PathEntry {
-            entry_id,
-            path_chars,
-            path,
-            lowercase_path,
-            is_ignored,
-        });
-        entry_id
-    }
-
-    pub fn entry_path(&self, mut entry_id: usize) -> Result<PathBuf> {
-        let state = self.0.read();
-
-        if entry_id >= state.entries.len() {
-            return Err(anyhow!("Entry does not exist in tree"));
-        }
-
-        let mut entries = Vec::new();
-        loop {
-            let entry = &state.entries[entry_id];
-            entries.push(entry);
-            if let Some(parent_id) = entry.parent() {
-                entry_id = parent_id;
-            } else {
-                break;
-            }
-        }
-
-        let mut path = PathBuf::new();
-        for entry in entries.into_iter().rev() {
-            path.push(entry.name());
-        }
-        Ok(path)
-    }
-
-    pub fn abs_entry_path(&self, entry_id: usize) -> Result<PathBuf> {
-        let mut path = self.0.read().path.clone();
-        path.pop();
-        Ok(path.join(self.entry_path(entry_id)?))
-    }
-
-    fn fmt_entry(&self, f: &mut fmt::Formatter<'_>, entry_id: usize, indent: usize) -> fmt::Result {
-        match &self.0.read().entries[entry_id] {
-            Entry::Dir { name, children, .. } => {
-                write!(
-                    f,
-                    "{}{}/ ({})\n",
-                    " ".repeat(indent),
-                    name.to_string_lossy(),
-                    entry_id
-                )?;
-                for child_id in children.iter() {
-                    self.fmt_entry(f, *child_id, indent + 2)?;
-                }
-                Ok(())
-            }
-            Entry::File { name, .. } => write!(
-                f,
-                "{}{} ({})\n",
-                " ".repeat(indent),
-                name.to_string_lossy(),
-                entry_id
-            ),
-        }
-    }
-
-    pub fn path(&self) -> PathBuf {
-        PathBuf::from(&self.0.read().path)
-    }
-
-    pub fn contains_path(&self, path: &Path) -> bool {
-        path.starts_with(self.path())
-    }
-
-    pub fn iter(&self) -> Iter {
-        Iter {
-            tree: self.clone(),
-            stack: Vec::new(),
-            started: false,
-        }
-    }
-
-    pub fn files(&self) -> FilesIter {
-        FilesIter {
-            iter: self.iter(),
-            path: PathBuf::new(),
-        }
-    }
-
-    pub fn entry_count(&self) -> usize {
-        self.0.read().entries.len()
-    }
-
-    pub fn file_count(&self) -> usize {
-        self.0.read().file_paths.len()
-    }
-
-    pub fn load_history(&self, entry_id: usize) -> impl Future<Output = Result<History>> {
-        let tree = self.clone();
-
-        async move {
-            if let Some(history) = tree.0.read().histories.get(&entry_id) {
-                return Ok(history.clone());
-            }
-
-            let path = tree.abs_entry_path(entry_id)?;
-
-            let mut file = smol::fs::File::open(&path).await?;
-            let mut base_text = String::new();
-            file.read_to_string(&mut base_text).await?;
-            let history = History::new(Arc::from(base_text));
-            tree.0.write().histories.insert(entry_id, history.clone());
-            Ok(history)
-        }
-    }
-
-    pub fn save<'a>(
-        &self,
-        entry_id: usize,
-        content: Snapshot,
-        ctx: &AppContext,
-    ) -> Task<Result<()>> {
-        let path = self.abs_entry_path(entry_id);
-        ctx.background_executor().spawn(async move {
-            let buffer_size = content.text_summary().bytes.min(10 * 1024);
-            let file = std::fs::File::create(&path?)?;
-            let mut writer = std::io::BufWriter::with_capacity(buffer_size, file);
-            for chunk in content.fragments() {
-                writer.write(chunk.as_bytes())?;
-            }
-            writer.flush()?;
-            Ok(())
-        })
-    }
-
-    fn scanning(&mut self, _: (), ctx: &mut ModelContext<Self>) {
-        if self.0.read().scanning {
-            ctx.notify();
-        } else {
-            ctx.halt_stream();
-        }
-    }
-
-    fn done_scanning(&mut self, result: io::Result<()>, ctx: &mut ModelContext<Self>) {
-        log::info!("done scanning");
-        self.0.write().scanning = false;
-        if let Err(error) = result {
-            log::error!("error populating worktree: {}", error);
-        } else {
-            ctx.notify();
-        }
-    }
-}
-
-impl fmt::Debug for Worktree {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        if self.entry_count() == 0 {
-            write!(f, "Empty tree\n")
-        } else {
-            self.fmt_entry(f, 0, 0)
-        }
-    }
-}
-
-impl Entity for Worktree {
-    type Event = ();
-}
-
-pub trait WorktreeHandle {
-    fn file(&self, entry_id: usize, app: &AppContext) -> Result<FileHandle>;
-}
-
-impl WorktreeHandle for ModelHandle<Worktree> {
-    fn file(&self, entry_id: usize, app: &AppContext) -> Result<FileHandle> {
-        if entry_id >= self.read(app).entry_count() {
-            return Err(anyhow!("Entry does not exist in tree"));
-        }
-
-        Ok(FileHandle {
-            worktree: self.clone(),
-            entry_id,
-        })
-    }
-}
-
-#[derive(Clone, Debug)]
-pub enum Entry {
-    Dir {
-        parent: Option<usize>,
-        name: OsString,
-        ino: u64,
-        is_symlink: bool,
-        is_ignored: bool,
-        children: Vec<usize>,
-    },
-    File {
-        parent: Option<usize>,
-        name: OsString,
-        ino: u64,
-        is_symlink: bool,
-        is_ignored: bool,
-    },
-}
-
-impl Entry {
-    fn parent(&self) -> Option<usize> {
-        match self {
-            Entry::Dir { parent, .. } | Entry::File { parent, .. } => *parent,
-        }
-    }
-
-    fn name(&self) -> &OsStr {
-        match self {
-            Entry::Dir { name, .. } | Entry::File { name, .. } => name,
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct FileHandle {
-    worktree: ModelHandle<Worktree>,
-    entry_id: usize,
-}
-
-impl FileHandle {
-    pub fn path(&self, app: &AppContext) -> PathBuf {
-        self.worktree.read(app).entry_path(self.entry_id).unwrap()
-    }
-
-    pub fn load_history(&self, app: &AppContext) -> impl Future<Output = Result<History>> {
-        self.worktree.read(app).load_history(self.entry_id)
-    }
-
-    pub fn save<'a>(&self, content: Snapshot, ctx: &AppContext) -> Task<Result<()>> {
-        let worktree = self.worktree.read(ctx);
-        worktree.save(self.entry_id, content, ctx)
-    }
-
-    pub fn entry_id(&self) -> (usize, usize) {
-        (self.worktree.id(), self.entry_id)
-    }
-}
-
-struct IterStackEntry {
-    entry_id: usize,
-    child_idx: usize,
-}
-
-pub struct Iter {
-    tree: Worktree,
-    stack: Vec<IterStackEntry>,
-    started: bool,
-}
-
-impl Iterator for Iter {
-    type Item = Traversal;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        let state = self.tree.0.read();
-
-        if !self.started {
-            self.started = true;
-
-            return if let Some(entry) = state.entries.first().cloned() {
-                self.stack.push(IterStackEntry {
-                    entry_id: 0,
-                    child_idx: 0,
-                });
-
-                Some(Traversal::Push { entry_id: 0, entry })
-            } else {
-                None
-            };
-        }
-
-        while let Some(parent) = self.stack.last_mut() {
-            if let Entry::Dir { children, .. } = &state.entries[parent.entry_id] {
-                if parent.child_idx < children.len() {
-                    let child_id = children[post_inc(&mut parent.child_idx)];
-
-                    self.stack.push(IterStackEntry {
-                        entry_id: child_id,
-                        child_idx: 0,
-                    });
-
-                    return Some(Traversal::Push {
-                        entry_id: child_id,
-                        entry: state.entries[child_id].clone(),
-                    });
-                } else {
-                    self.stack.pop();
-
-                    return Some(Traversal::Pop);
-                }
-            } else {
-                self.stack.pop();
-
-                return Some(Traversal::Pop);
-            }
-        }
-
-        None
-    }
-}
-
-#[derive(Debug)]
-pub enum Traversal {
-    Push { entry_id: usize, entry: Entry },
-    Pop,
-}
-
-pub struct FilesIter {
-    iter: Iter,
-    path: PathBuf,
-}
-
-pub struct FilesIterItem {
-    pub entry_id: usize,
-    pub path: PathBuf,
-}
-
-impl Iterator for FilesIter {
-    type Item = FilesIterItem;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        loop {
-            match self.iter.next() {
-                Some(Traversal::Push {
-                    entry_id, entry, ..
-                }) => match entry {
-                    Entry::Dir { name, .. } => {
-                        self.path.push(name);
-                    }
-                    Entry::File { name, .. } => {
-                        self.path.push(name);
-                        return Some(FilesIterItem {
-                            entry_id,
-                            path: self.path.clone(),
-                        });
-                    }
-                },
-                Some(Traversal::Pop) => {
-                    self.path.pop();
-                }
-                None => {
-                    return None;
-                }
-            }
-        }
-    }
-}
-
-trait UnwrapIgnoreTuple {
-    fn unwrap(self) -> Ignore;
-}
-
-impl UnwrapIgnoreTuple for (Ignore, Option<ignore::Error>) {
-    fn unwrap(self) -> Ignore {
-        if let Some(error) = self.1 {
-            log::error!("error loading gitignore data: {}", error);
-        }
-        self.0
-    }
-}
-
-pub fn match_paths(
-    trees: &[Worktree],
-    query: &str,
-    include_ignored: bool,
-    smart_case: bool,
-    max_results: usize,
-    pool: scoped_pool::Pool,
-) -> Vec<PathMatch> {
-    let tree_states = trees.iter().map(|tree| tree.0.read()).collect::<Vec<_>>();
-    fuzzy::match_paths(
-        &tree_states
-            .iter()
-            .map(|tree| {
-                let skip_prefix = if trees.len() == 1 {
-                    if let Some(Entry::Dir { name, .. }) = tree.entries.get(0) {
-                        let name = name.to_string_lossy();
-                        if name == "/" {
-                            1
-                        } else {
-                            name.chars().count() + 1
-                        }
-                    } else {
-                        0
-                    }
-                } else {
-                    0
-                };
-
-                (tree.id, skip_prefix, &tree.file_paths[..])
-            })
-            .collect::<Vec<_>>()[..],
-        query,
-        include_ignored,
-        smart_case,
-        max_results,
-        pool,
-    )
-}
-
-#[cfg(test)]
-mod test {
-    use super::*;
-    use crate::editor::Buffer;
-    use crate::test::*;
-    use anyhow::Result;
-    use gpui::App;
-    use serde_json::json;
-    use std::os::unix;
-
-    #[test]
-    fn test_populate_and_search() {
-        App::test_async((), |mut app| async move {
-            let dir = temp_tree(json!({
-                "root": {
-                    "apple": "",
-                    "banana": {
-                        "carrot": {
-                            "date": "",
-                            "endive": "",
-                        }
-                    },
-                    "fennel": {
-                        "grape": "",
-                    }
-                }
-            }));
-
-            let root_link_path = dir.path().join("root_link");
-            unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
-
-            let tree = app.add_model(|ctx| Worktree::new(1, root_link_path, Some(ctx)));
-            app.finish_pending_tasks().await;
-
-            app.read(|ctx| {
-                let tree = tree.read(ctx);
-                assert_eq!(tree.file_count(), 4);
-                let results = match_paths(&[tree.clone()], "bna", false, false, 10, ctx.scoped_pool().clone())
-                    .iter()
-                    .map(|result| tree.entry_path(result.entry_id))
-                    .collect::<Result<Vec<PathBuf>, _>>()
-                    .unwrap();
-                assert_eq!(
-                    results,
-                    vec![
-                        PathBuf::from("root_link/banana/carrot/date"),
-                        PathBuf::from("root_link/banana/carrot/endive"),
-                    ]
-                );
-            })
-        });
-    }
-
-    #[test]
-    fn test_save_file() {
-        App::test_async((), |mut app| async move {
-            let dir = temp_tree(json!({
-                "file1": "the old contents",
-            }));
-
-            let tree = app.add_model(|ctx| Worktree::new(1, dir.path(), Some(ctx)));
-            app.finish_pending_tasks().await;
-
-            let buffer = Buffer::new(1, "a line of text.\n".repeat(10 * 1024));
-
-            let entry = app.read(|ctx| {
-                let entry = tree.read(ctx).files().next().unwrap();
-                assert_eq!(entry.path.file_name().unwrap(), "file1");
-                entry
-            });
-            let file_id = entry.entry_id;
-
-            tree.update(&mut app, |tree, ctx| {
-                smol::block_on(tree.save(file_id, buffer.snapshot(), ctx.as_ref())).unwrap()
-            });
-
-            let history = app
-                .read(|ctx| tree.read(ctx).load_history(file_id))
-                .await
-                .unwrap();
-            assert_eq!(history.base_text.as_ref(), buffer.text());
-        });
-    }
-}