Merge branch 'main' into multibuffer-following

Max Brunsfeld created

Change summary

.github/workflows/release_actions.yml                                                |    2 
.gitignore                                                                           |    1 
Cargo.lock                                                                           |  406 
Cargo.toml                                                                           |    3 
assets/keymaps/vim.json                                                              |   74 
crates/activity_indicator/src/activity_indicator.rs                                  |    2 
crates/auto_update/Cargo.toml                                                        |    1 
crates/auto_update/src/auto_update.rs                                                |   43 
crates/auto_update/src/update_notification.rs                                        |    9 
crates/breadcrumbs/src/breadcrumbs.rs                                                |    5 
crates/call/src/call.rs                                                              |   20 
crates/call/src/participant.rs                                                       |   10 
crates/call/src/room.rs                                                              |  137 
crates/client/src/client.rs                                                          |  298 
crates/client/src/telemetry.rs                                                       |   26 
crates/client/src/user.rs                                                            |    1 
crates/collab/Cargo.toml                                                             |   21 
crates/collab/migrations.sqlite/20221109000000_test_schema.sql                       |  114 
crates/collab/migrations/20221111092550_reconnection_support.sql                     |   90 
crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql     |    2 
crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql |    7 
crates/collab/src/api.rs                                                             |    6 
crates/collab/src/auth.rs                                                            |    2 
crates/collab/src/bin/seed.rs                                                        |   10 
crates/collab/src/db.rs                                                              |  968 
crates/collab/src/db/access_token.rs                                                 |   29 
crates/collab/src/db/contact.rs                                                      |   58 
crates/collab/src/db/language_server.rs                                              |   30 
crates/collab/src/db/project.rs                                                      |   67 
crates/collab/src/db/project_collaborator.rs                                         |   33 
crates/collab/src/db/room.rs                                                         |   32 
crates/collab/src/db/room_participant.rs                                             |   50 
crates/collab/src/db/signup.rs                                                       |   57 
crates/collab/src/db/tests.rs                                                        |  312 
crates/collab/src/db/user.rs                                                         |   49 
crates/collab/src/db/worktree.rs                                                     |   34 
crates/collab/src/db/worktree_diagnostic_summary.rs                                  |   21 
crates/collab/src/db/worktree_entry.rs                                               |   25 
crates/collab/src/executor.rs                                                        |   36 
crates/collab/src/integration_tests.rs                                               |  452 
crates/collab/src/lib.rs                                                             |   78 
crates/collab/src/main.rs                                                            |  136 
crates/collab/src/rpc.rs                                                             | 2558 
crates/collab/src/rpc/connection_pool.rs                                             |   93 
crates/collab/src/rpc/store.rs                                                       | 1182 
crates/collab_ui/src/collab_ui.rs                                                    |    9 
crates/collab_ui/src/contact_notification.rs                                         |    2 
crates/collab_ui/src/incoming_call_notification.rs                                   |    6 
crates/command_palette/src/command_palette.rs                                        |    5 
crates/db/Cargo.toml                                                                 |   12 
crates/db/README.md                                                                  |    5 
crates/db/src/db.rs                                                                  |  430 
crates/db/src/items.rs                                                               |  311 
crates/db/src/kvp.rs                                                                 |  110 
crates/db/src/migrations.rs                                                          |   15 
crates/db/src/query.rs                                                               |  314 
crates/diagnostics/src/diagnostics.rs                                                |   45 
crates/diagnostics/src/items.rs                                                      |    4 
crates/drag_and_drop/src/drag_and_drop.rs                                            |   44 
crates/editor/Cargo.toml                                                             |    2 
crates/editor/src/editor.rs                                                          |  617 
crates/editor/src/editor_tests.rs                                                    |   26 
crates/editor/src/element.rs                                                         |   15 
crates/editor/src/items.rs                                                           |  139 
crates/editor/src/persistence.rs                                                     |   36 
crates/editor/src/scroll.rs                                                          |  348 
crates/editor/src/scroll/actions.rs                                                  |  159 
crates/editor/src/scroll/autoscroll.rs                                               |  246 
crates/editor/src/scroll/scroll_amount.rs                                            |   48 
crates/editor/src/selections_collection.rs                                           |    2 
crates/editor/src/test/editor_lsp_test_context.rs                                    |   11 
crates/file_finder/src/file_finder.rs                                                |   30 
crates/go_to_line/src/go_to_line.rs                                                  |    2 
crates/gpui/Cargo.toml                                                               |    1 
crates/gpui/grammars/context-predicate/bindings/node/binding.cc                      |   12 
crates/gpui/src/app.rs                                                               |  178 
crates/gpui/src/elements/tooltip.rs                                                  |    1 
crates/gpui/src/executor.rs                                                          |   84 
crates/gpui/src/keymap.rs                                                            |   15 
crates/gpui/src/presenter.rs                                                         |   33 
crates/gpui/src/test.rs                                                              |   88 
crates/gpui_macros/Cargo.toml                                                        |    1 
crates/gpui_macros/src/gpui_macros.rs                                                |   27 
crates/journal/src/journal.rs                                                        |    8 
crates/outline/src/outline.rs                                                        |    4 
crates/project/Cargo.toml                                                            |    1 
crates/project/src/project.rs                                                        |  427 
crates/project/src/project_tests.rs                                                  |    6 
crates/project/src/worktree.rs                                                       |   65 
crates/project_panel/src/project_panel.rs                                            |   22 
crates/project_symbols/src/project_symbols.rs                                        |    3 
crates/rope/Cargo.toml                                                               |    2 
crates/rope/src/rope.rs                                                              |   39 
crates/rpc/proto/zed.proto                                                           |   43 
crates/rpc/src/peer.rs                                                               |    2 
crates/rpc/src/proto.rs                                                              |    7 
crates/rpc/src/rpc.rs                                                                |    2 
crates/search/src/buffer_search.rs                                                   |    3 
crates/search/src/project_search.rs                                                  |   26 
crates/settings/Cargo.toml                                                           |    1 
crates/settings/src/settings.rs                                                      |   85 
crates/sqlez/.gitignore                                                              |    2 
crates/sqlez/Cargo.lock                                                              |  150 
crates/sqlez/Cargo.toml                                                              |   16 
crates/sqlez/src/bindable.rs                                                         |  352 
crates/sqlez/src/connection.rs                                                       |  334 
crates/sqlez/src/domain.rs                                                           |   56 
crates/sqlez/src/lib.rs                                                              |   11 
crates/sqlez/src/migrations.rs                                                       |  260 
crates/sqlez/src/savepoint.rs                                                        |  148 
crates/sqlez/src/statement.rs                                                        |  491 
crates/sqlez/src/thread_safe_connection.rs                                           |  359 
crates/sqlez/src/typed_statements.rs                                                 |   60 
crates/sqlez/src/util.rs                                                             |   32 
crates/sqlez_macros/Cargo.toml                                                       |   17 
crates/sqlez_macros/src/sqlez_macros.rs                                              |   93 
crates/terminal/Cargo.toml                                                           |   13 
crates/terminal/src/terminal.rs                                                      |  159 
crates/terminal/src/terminal_container_view.rs                                       |  676 
crates/terminal/src/terminal_view.rs                                                 |  471 
crates/terminal/src/tests/terminal_test_context.rs                                   |  137 
crates/terminal_view/Cargo.toml                                                      |   44 
crates/terminal_view/README.md                                                       |    0 
crates/terminal_view/scripts/print256color.sh                                        |    0 
crates/terminal_view/scripts/truecolor.sh                                            |    0 
crates/terminal_view/src/persistence.rs                                              |   51 
crates/terminal_view/src/terminal_element.rs                                         |   24 
crates/terminal_view/src/terminal_view.rs                                            | 1091 
crates/theme/src/theme.rs                                                            |    8 
crates/theme_testbench/src/theme_testbench.rs                                        |   26 
crates/util/Cargo.toml                                                               |    2 
crates/util/src/channel.rs                                                           |   40 
crates/util/src/lib.rs                                                               |   57 
crates/util/src/paths.rs                                                             |    0 
crates/vim/src/editor_events.rs                                                      |   17 
crates/vim/src/insert.rs                                                             |    2 
crates/vim/src/normal.rs                                                             |   61 
crates/vim/src/normal/change.rs                                                      |    4 
crates/vim/src/normal/delete.rs                                                      |    2 
crates/vim/src/state.rs                                                              |    2 
crates/vim/src/test/vim_test_context.rs                                              |   14 
crates/vim/src/vim.rs                                                                |   19 
crates/vim/src/visual.rs                                                             |    4 
crates/workspace/Cargo.toml                                                          |    7 
crates/workspace/src/dock.rs                                                         |  107 
crates/workspace/src/item.rs                                                         |  918 
crates/workspace/src/notifications.rs                                                |  334 
crates/workspace/src/pane.rs                                                         |   22 
crates/workspace/src/pane_group.rs                                                   |   14 
crates/workspace/src/persistence.rs                                                  |  836 
crates/workspace/src/persistence/model.rs                                            |  315 
crates/workspace/src/searchable.rs                                                   |    2 
crates/workspace/src/shared_screen.rs                                                |   32 
crates/workspace/src/workspace.rs                                                    |  979 
crates/workspace/test.db                                                             |    0 
crates/zed/Cargo.toml                                                                |    7 
crates/zed/src/feedback.rs                                                           |    4 
crates/zed/src/languages.rs                                                          |    3 
crates/zed/src/languages/c/outline.scm                                               |   64 
crates/zed/src/languages/cpp/outline.scm                                             |   48 
crates/zed/src/languages/racket/brackets.scm                                         |    3 
crates/zed/src/languages/racket/config.toml                                          |    9 
crates/zed/src/languages/racket/highlights.scm                                       |   34 
crates/zed/src/languages/racket/indents.scm                                          |    3 
crates/zed/src/languages/racket/outline.scm                                          |   10 
crates/zed/src/languages/ruby/brackets.scm                                           |    2 
crates/zed/src/languages/scheme/brackets.scm                                         |    3 
crates/zed/src/languages/scheme/config.toml                                          |    9 
crates/zed/src/languages/scheme/highlights.scm                                       |   28 
crates/zed/src/languages/scheme/indents.scm                                          |    3 
crates/zed/src/languages/scheme/outline.scm                                          |   10 
crates/zed/src/main.rs                                                               |   91 
crates/zed/src/zed.rs                                                                |  208 
styles/src/styleTree/app.ts                                                          |    2 
styles/src/styleTree/simpleMessageNotification.ts                                    |   31 
175 files changed, 14,004 insertions(+), 7,208 deletions(-)

Detailed changes

.github/workflows/release_actions.yml 🔗

@@ -14,7 +14,7 @@ jobs:
         content: |
           📣 Zed ${{ github.event.release.tag_name }} was just released!
           
-          Restart your Zed or head to https://zed.dev/releases to grab it.
+          Restart your Zed or head to https://zed.dev/releases/latest to grab it.
         
           ```md
           # Changelog

.gitignore 🔗

@@ -18,3 +18,4 @@ DerivedData/
 .swiftpm/config/registries.json
 .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
 .netrc
+**/*.db

Cargo.lock 🔗

@@ -2,13 +2,19 @@
 # It is not intended for manual editing.
 version = 3
 
+[[package]]
+name = "Inflector"
+version = "0.11.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
+
 [[package]]
 name = "activity_indicator"
 version = "0.1.0"
 dependencies = [
  "auto_update",
  "editor",
- "futures 0.3.24",
+ "futures 0.3.25",
  "gpui",
  "language",
  "project",
@@ -45,16 +51,16 @@ version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
 dependencies = [
- "getrandom 0.2.7",
+ "getrandom 0.2.8",
  "once_cell",
  "version_check",
 ]
 
 [[package]]
 name = "aho-corasick"
-version = "0.7.19"
+version = "0.7.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
 dependencies = [
  "memchr",
 ]
@@ -107,6 +113,12 @@ dependencies = [
  "winapi 0.3.9",
 ]
 
+[[package]]
+name = "aliasable"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
+
 [[package]]
 name = "ambient-authority"
 version = "0.0.1"
@@ -133,9 +145,12 @@ dependencies = [
 
 [[package]]
 name = "anyhow"
-version = "1.0.65"
+version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
+checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
+dependencies = [
+ "backtrace",
+]
 
 [[package]]
 name = "arrayref"
@@ -183,9 +198,9 @@ dependencies = [
 
 [[package]]
 name = "async-channel"
-version = "1.7.1"
+version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e14485364214912d3b19cc3435dde4df66065127f05fa0d75c712f36f12c2f28"
+checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833"
 dependencies = [
  "concurrent-queue",
  "event-listener",
@@ -220,15 +235,15 @@ dependencies = [
 
 [[package]]
 name = "async-executor"
-version = "1.4.1"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "871f9bb5e0a22eeb7e8cf16641feb87c9dc67032ccf8ff49e772eb9941d3a965"
+checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b"
 dependencies = [
+ "async-lock",
  "async-task",
  "concurrent-queue",
  "fastrand",
  "futures-lite",
- "once_cell",
  "slab",
 ]
 
@@ -246,31 +261,32 @@ dependencies = [
 
 [[package]]
 name = "async-io"
-version = "1.9.0"
+version = "1.12.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83e21f3a490c72b3b0cf44962180e60045de2925d8dff97918f7ee43c8f637c7"
+checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794"
 dependencies = [
+ "async-lock",
  "autocfg 1.1.0",
  "concurrent-queue",
  "futures-lite",
  "libc",
  "log",
- "once_cell",
  "parking",
  "polling",
  "slab",
  "socket2",
  "waker-fn",
- "winapi 0.3.9",
+ "windows-sys 0.42.0",
 ]
 
 [[package]]
 name = "async-lock"
-version = "2.5.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e97a171d191782fba31bb902b14ad94e24a68145032b7eedf871ab0bc0d077b6"
+checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685"
 dependencies = [
  "event-listener",
+ "futures-lite",
 ]
 
 [[package]]
@@ -290,26 +306,26 @@ name = "async-pipe"
 version = "0.1.3"
 source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553"
 dependencies = [
- "futures 0.3.24",
+ "futures 0.3.25",
  "log",
 ]
 
 [[package]]
 name = "async-process"
-version = "1.5.0"
+version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02111fd8655a613c25069ea89fc8d9bb89331fa77486eb3bc059ee757cfa481c"
+checksum = "6381ead98388605d0d9ff86371043b5aa922a3905824244de40dc263a14fcba4"
 dependencies = [
  "async-io",
+ "async-lock",
  "autocfg 1.1.0",
  "blocking",
  "cfg-if 1.0.0",
  "event-listener",
  "futures-lite",
  "libc",
- "once_cell",
  "signal-hook",
- "winapi 0.3.9",
+ "windows-sys 0.42.0",
 ]
 
 [[package]]
@@ -323,6 +339,17 @@ dependencies = [
  "syn",
 ]
 
+[[package]]
+name = "async-recursion"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2cda8f4bcc10624c4e85bc66b3f452cca98cfa5ca002dc83a16aad2367641bea"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
 [[package]]
 name = "async-stream"
 version = "0.3.3"
@@ -364,9 +391,9 @@ dependencies = [
 
 [[package]]
 name = "async-trait"
-version = "0.1.57"
+version = "0.1.59"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f"
+checksum = "31e6e93155431f3931513b243d371981bb2770112b370c82745a1d19d2f99364"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -428,6 +455,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "client",
+ "db",
  "gpui",
  "isahc",
  "lazy_static",
@@ -461,15 +489,15 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "axum"
-version = "0.5.16"
+version = "0.5.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e3356844c4d6a6d6467b8da2cffb4a2820be256f50a3a386c9d152bab31043"
+checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43"
 dependencies = [
  "async-trait",
  "axum-core",
  "base64",
  "bitflags",
- "bytes 1.2.1",
+ "bytes 1.3.0",
  "futures-util",
  "headers",
  "http",
@@ -484,7 +512,7 @@ dependencies = [
  "serde",
  "serde_json",
  "serde_urlencoded",
- "sha-1 0.10.0",
+ "sha-1 0.10.1",
  "sync_wrapper",
  "tokio",
  "tokio-tungstenite",
@@ -496,12 +524,12 @@ dependencies = [
 
 [[package]]
 name = "axum-core"
-version = "0.2.8"
+version = "0.2.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9f0c0a60006f2a293d82d571f635042a72edf927539b7685bd62d361963839b"
+checksum = "37e5939e02c56fecd5c017c37df4238c0a839fa76b7f97acdd7efb804fd181cc"
 dependencies = [
  "async-trait",
- "bytes 1.2.1",
+ "bytes 1.3.0",
  "futures-util",
  "http",
  "http-body",
@@ -517,7 +545,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "69034b3b0fd97923eee2ce8a47540edb21e07f48f87f67d44bb4271cec622bdb"
 dependencies = [
  "axum",
- "bytes 1.2.1",
+ "bytes 1.3.0",
  "futures-util",
  "http",
  "mime",
@@ -546,17 +574,30 @@ dependencies = [
  "rustc-demangle",
 ]
 
+[[package]]
+name = "bae"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33b8de67cc41132507eeece2584804efcb15f85ba516e34c944b7667f480397a"
+dependencies = [
+ "heck 0.3.3",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
 [[package]]
 name = "base64"
-version = "0.13.0"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
+checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
 
 [[package]]
 name = "base64ct"
-version = "1.5.2"
+version = "1.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea2b2456fd614d856680dcd9fcc660a51a820fa09daef2e49772b56a193c8474"
+checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf"
 
 [[package]]
 name = "bincode"
@@ -622,16 +663,61 @@ dependencies = [
 
 [[package]]
 name = "blocking"
-version = "1.2.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c6ccb65d468978a086b69884437ded69a90faab3bbe6e67f242173ea728acccc"
+checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8"
 dependencies = [
  "async-channel",
+ "async-lock",
  "async-task",
  "atomic-waker",
  "fastrand",
  "futures-lite",
- "once_cell",
+]
+
+[[package]]
+name = "borsh"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa"
+dependencies = [
+ "borsh-derive",
+ "hashbrown 0.11.2",
+]
+
+[[package]]
+name = "borsh-derive"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775"
+dependencies = [
+ "borsh-derive-internal",
+ "borsh-schema-derive-internal",
+ "proc-macro-crate",
+ "proc-macro2",
+ "syn",
+]
+
+[[package]]
+name = "borsh-derive-internal"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "borsh-schema-derive-internal"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
 ]
 
 [[package]]
@@ -673,15 +759,36 @@ dependencies = [
 
 [[package]]
 name = "bumpalo"
-version = "3.11.0"
+version = "3.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
+
+[[package]]
+name = "bytecheck"
+version = "0.6.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d11cac2c12b5adc6570dad2ee1b87eff4955dac476fe12d81e5fdd352e52406f"
+dependencies = [
+ "bytecheck_derive",
+ "ptr_meta",
+]
+
+[[package]]
+name = "bytecheck_derive"
+version = "0.6.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+checksum = "13e576ebe98e605500b3c8041bb888e966653577172df6dd97398714eb30b9bf"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
 
 [[package]]
 name = "bytemuck"
-version = "1.12.1"
+version = "1.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f5715e491b5a1598fc2bef5a606847b5dc1d48ea625bd3c02c00de8285591da"
+checksum = "aaa3a8d9a1ca92e282c96a32d6511b695d7d994d1d102ba85d279f9b2756947f"
 
 [[package]]
 name = "byteorder"
@@ -701,15 +808,9 @@ dependencies = [
 
 [[package]]
 name = "bytes"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db"
-
-[[package]]
-name = "cache-padded"
-version = "1.2.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
+checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c"
 
 [[package]]
 name = "call"
@@ -719,7 +820,7 @@ dependencies = [
  "async-broadcast",
  "client",
  "collections",
- "futures 0.3.24",
+ "futures 0.3.25",
  "gpui",
  "live_kit_client",
  "media",
@@ -802,9 +903,9 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6"
 
 [[package]]
 name = "cc"
-version = "1.0.73"
+version = "1.0.77"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
 dependencies = [
  "jobserver",
 ]
@@ -832,15 +933,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "chrono"
-version = "0.4.22"
+version = "0.4.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1"
+checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
 dependencies = [
  "iana-time-zone",
  "js-sys",
  "num-integer",
  "num-traits",
- "time 0.1.44",
+ "serde",
+ "time 0.1.45",
  "wasm-bindgen",
  "winapi 0.3.9",
 ]
@@ -888,9 +990,9 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "3.2.22"
+version = "3.2.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750"
+checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
 dependencies = [
  "atty",
  "bitflags",
@@ -900,7 +1002,7 @@ dependencies = [
  "once_cell",
  "strsim 0.10.0",
  "termcolor",
- "textwrap 0.15.1",
+ "textwrap 0.16.0",
 ]
 
 [[package]]
@@ -930,7 +1032,7 @@ name = "cli"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "clap 3.2.22",
+ "clap 3.2.23",
  "core-foundation",
  "core-services",
  "dirs 3.0.2",
@@ -944,11 +1046,11 @@ name = "client"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "async-recursion",
+ "async-recursion 0.3.2",
  "async-tungstenite",
  "collections",
  "db",
- "futures 0.3.24",
+ "futures 0.3.25",
  "gpui",
  "image",
  "isahc",
@@ -964,11 +1066,11 @@ dependencies = [
  "sum_tree",
  "tempfile",
  "thiserror",
- "time 0.3.15",
+ "time 0.3.17",
  "tiny_http",
  "url",
  "util",
- "uuid 1.2.1",
+ "uuid 1.2.2",
 ]
 
 [[package]]
@@ -980,9 +1082,9 @@ dependencies = [
 
 [[package]]
 name = "cmake"
-version = "0.1.48"
+version = "0.1.49"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8ad8cef104ac57b68b89df3208164d228503abbdce70f6880ffa3d970e7443a"
+checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c"
 dependencies = [
  "cc",
 ]
@@ -1028,24 +1130,24 @@ dependencies = [
 
 [[package]]
 name = "collab"
-version = "0.2.4"
+version = "0.3.1"
 dependencies = [
  "anyhow",
- "async-trait",
  "async-tungstenite",
  "axum",
  "axum-extra",
  "base64",
  "call",
- "clap 3.2.22",
+ "clap 3.2.23",
  "client",
  "collections",
  "ctor",
+ "dashmap",
  "editor",
  "env_logger",
  "envy",
  "fs",
- "futures 0.3.24",
+ "futures 0.3.25",
  "git",
  "gpui",
  "hyper",
@@ -1058,19 +1160,22 @@ dependencies = [
  "lsp",
  "nanoid",
  "parking_lot 0.11.2",
+ "pretty_assertions",
  "project",
  "prometheus",
  "rand 0.8.5",
  "reqwest",
  "rpc",
  "scrypt",
+ "sea-orm",
+ "sea-query",
  "serde",
  "serde_json",
  "settings",
  "sha-1 0.9.8",
  "sqlx",
  "theme",
- "time 0.3.15",
+ "time 0.3.17",
  "tokio",
  "tokio-tungstenite",
  "toml",
@@ -1094,7 +1199,7 @@ dependencies = [
  "clock",
  "collections",
  "editor",
- "futures 0.3.24",
+ "futures 0.3.25",
  "fuzzy",
  "gpui",
  "log",
@@ -1143,11 +1248,11 @@ dependencies = [
 
 [[package]]
 name = "concurrent-queue"
-version = "1.2.4"
+version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c"
+checksum = "bd7bef69dc86e3c610e4e7aed41035e2a7ed12e72dd7530f61327a6579a4390b"
 dependencies = [
- "cache-padded",
+ "crossbeam-utils 0.8.14",
 ]
 
 [[package]]
@@ -1374,7 +1479,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
 dependencies = [
  "cfg-if 1.0.0",
- "crossbeam-utils 0.8.12",
+ "crossbeam-utils 0.8.14",
 ]
 
 [[package]]
@@ -1385,30 +1490,30 @@ checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
 dependencies = [
  "cfg-if 1.0.0",
  "crossbeam-epoch",
- "crossbeam-utils 0.8.12",
+ "crossbeam-utils 0.8.14",
 ]
 
 [[package]]
 name = "crossbeam-epoch"
-version = "0.9.11"
+version = "0.9.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
 dependencies = [
  "autocfg 1.1.0",
  "cfg-if 1.0.0",
- "crossbeam-utils 0.8.12",
- "memoffset",
+ "crossbeam-utils 0.8.14",
+ "memoffset 0.7.1",
  "scopeguard",
 ]
 
 [[package]]
 name = "crossbeam-queue"
-version = "0.3.6"
+version = "0.3.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
+checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
 dependencies = [
  "cfg-if 1.0.0",
- "crossbeam-utils 0.8.12",
+ "crossbeam-utils 0.8.14",
 ]
 
 [[package]]
@@ -1424,9 +1529,9 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.12"
+version = "0.8.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
 dependencies = [
  "cfg-if 1.0.0",
 ]
@@ -1453,9 +1558,9 @@ dependencies = [
 
 [[package]]
 name = "ctor"
-version = "0.1.23"
+version = "0.1.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdffe87e1d521a10f9696f833fe502293ea446d7f256c06128293a4119bdf4cb"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
 dependencies = [
  "quote",
  "syn",
@@ -1478,9 +1583,9 @@ dependencies = [
 
 [[package]]
 name = "curl-sys"
-version = "0.4.56+curl-7.83.1"
+version = "0.4.59+curl-7.86.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6093e169dd4de29e468fa649fbae11cdcd5551c81fe5bf1b0677adad7ef3d26f"
+checksum = "6cfce34829f448b08f55b7db6d0009e23e2e86a34e8c2b366269bf5799b4a407"
 dependencies = [
  "cc",
  "libc",
@@ -1494,9 +1599,9 @@ dependencies = [
 
 [[package]]
 name = "cxx"
-version = "1.0.79"
+version = "1.0.83"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8"
+checksum = "bdf07d07d6531bfcdbe9b8b739b104610c6508dcc4d63b410585faf338241daf"
 dependencies = [
  "cc",
  "cxxbridge-flags",
@@ -1506,9 +1611,9 @@ dependencies = [
 
 [[package]]
 name = "cxx-build"
-version = "1.0.79"
+version = "1.0.83"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86"
+checksum = "d2eb5b96ecdc99f72657332953d4d9c50135af1bac34277801cc3937906ebd39"
 dependencies = [
  "cc",
  "codespan-reporting",
@@ -1521,21 +1626,34 @@ dependencies = [
 
 [[package]]
 name = "cxxbridge-flags"
-version = "1.0.79"
+version = "1.0.83"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78"
+checksum = "ac040a39517fd1674e0f32177648334b0f4074625b5588a64519804ba0553b12"
 
 [[package]]
 name = "cxxbridge-macro"
-version = "1.0.79"
+version = "1.0.83"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f"
+checksum = "1362b0ddcfc4eb0a1f57b68bd77dd99f0e826958a96abd0ae9bd092e114ffed6"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn",
 ]
 
+[[package]]
+name = "dashmap"
+version = "5.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
+dependencies = [
+ "cfg-if 1.0.0",
+ "hashbrown 0.12.3",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core 0.9.5",
+]
+
 [[package]]
 name = "data-url"
 version = "0.1.1"
@@ -1552,15 +1670,18 @@ dependencies = [
  "anyhow",
  "async-trait",
  "collections",
+ "env_logger",
  "gpui",
+ "indoc",
  "lazy_static",
  "log",
  "parking_lot 0.11.2",
- "rusqlite",
- "rusqlite_migration",
  "serde",
- "serde_rusqlite",
+ "smol",
+ "sqlez",
+ "sqlez_macros",
  "tempdir",
+ "util",
 ]
 
 [[package]]
@@ -1575,12 +1696,13 @@ dependencies = [
 
 [[package]]
 name = "dhat"
-version = "0.3.1"
+version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0684eaa19a59be283a6f99369917b679bd4d1d06604b2eb2e2f87b4bbd67668d"
+checksum = "4f2aaf837aaf456f6706cb46386ba8dffd4013a757e36f4ea05c20dd46b209a3"
 dependencies = [
  "backtrace",
  "lazy_static",
+ "mintex",
  "parking_lot 0.12.1",
  "rustc-hash",
  "serde",
@@ -1609,6 +1731,12 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "diff"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+
 [[package]]
 name = "digest"
 version = "0.9.0"
@@ -1620,9 +1748,9 @@ dependencies = [
 
 [[package]]
 name = "digest"
-version = "0.10.5"
+version = "0.10.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
+checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
 dependencies = [
  "block-buffer 0.10.3",
  "crypto-common",
@@ -1737,9 +1865,10 @@ dependencies = [
  "collections",
  "context_menu",
  "ctor",
+ "db",
  "drag_and_drop",
  "env_logger",
- "futures 0.3.24",
+ "futures 0.3.25",
  "fuzzy",
  "git",
  "gpui",
@@ -1760,6 +1889,7 @@ dependencies = [
  "smallvec",
  "smol",
  "snippet",
+ "sqlez",
  "sum_tree",
  "text",
  "theme",
@@ -1789,9 +1919,9 @@ dependencies = [
 
 [[package]]
 name = "env_logger"
-version = "0.9.1"
+version = "0.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c90bf5f19754d10198ccb95b70664fc925bd1fc090a0fd9a6ebc54acc8cd6272"
+checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
 dependencies = [
  "atty",
  "humantime",
@@ -1880,12 +2010,6 @@ version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
 
-[[package]]
-name = "fallible-streaming-iterator"
-version = "0.1.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
-
 [[package]]
 name = "fastrand"
 version = "1.8.0"
@@ -1933,12 +2057,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
 
 [[package]]
 name = "flate2"
-version = "1.0.24"
+version = "1.0.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
 dependencies = [
  "crc32fast",
- "miniz_oxide 0.5.4",
+ "miniz_oxide 0.6.2",
 ]
 
 [[package]]
@@ -2059,7 +2183,7 @@ dependencies = [
  "async-trait",
  "collections",
  "fsevent",
- "futures 0.3.24",
+ "futures 0.3.25",
  "git2",
  "gpui",
  "lazy_static",
@@ -2136,9 +2260,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
 
 [[package]]
 name = "futures"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c"
+checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0"
 dependencies = [
  "futures-channel",
  "futures-core",
@@ -2151,9 +2275,9 @@ dependencies = [
 
 [[package]]
 name = "futures-channel"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050"
+checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed"
 dependencies = [
  "futures-core",
  "futures-sink",
@@ -2161,15 +2285,15 @@ dependencies = [
 
 [[package]]
 name = "futures-core"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf"
+checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac"
 
 [[package]]
 name = "futures-executor"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab"
+checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2"
 dependencies = [
  "futures-core",
  "futures-task",
@@ -2178,9 +2302,9 @@ dependencies = [
 
 [[package]]
 name = "futures-intrusive"
-version = "0.4.0"
+version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62007592ac46aa7c2b6416f7deb9a8a8f63a01e0f1d6e1787d5630170db2b63e"
+checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5"
 dependencies = [
  "futures-core",
  "lock_api",
@@ -2189,9 +2313,9 @@ dependencies = [
 
 [[package]]
 name = "futures-io"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68"
+checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb"
 
 [[package]]
 name = "futures-lite"
@@ -2210,9 +2334,9 @@ dependencies = [
 
 [[package]]
 name = "futures-macro"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17"
+checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2221,21 +2345,21 @@ dependencies = [
 
 [[package]]
 name = "futures-sink"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56"
+checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9"
 
 [[package]]
 name = "futures-task"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1"
+checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
 
 [[package]]
 name = "futures-util"
-version = "0.3.24"
+version = "0.3.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90"
+checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6"
 dependencies = [
  "futures 0.1.31",
  "futures-channel",
@@ -2291,9 +2415,9 @@ dependencies = [
 
 [[package]]
 name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
 dependencies = [
  "cfg-if 1.0.0",
  "libc",

Cargo.toml 🔗

@@ -45,6 +45,8 @@ members = [
     "crates/search",
     "crates/settings",
     "crates/snippet",
+    "crates/sqlez",
+    "crates/sqlez_macros",
     "crates/sum_tree",
     "crates/terminal",
     "crates/text",
@@ -81,3 +83,4 @@ split-debuginfo = "unpacked"
 [profile.release]
 debug = true
 
+

assets/keymaps/vim.json 🔗

@@ -8,6 +8,22 @@
                     "Namespace": "G"
                 }
             ],
+            "i": [
+                "vim::PushOperator",
+                {
+                    "Object": {
+                        "around": false
+                    }
+                }
+            ],
+            "a": [
+                "vim::PushOperator",
+                {
+                    "Object": {
+                        "around": true
+                    }
+                }
+            ],
             "h": "vim::Left",
             "backspace": "vim::Backspace",
             "j": "vim::Down",
@@ -38,22 +54,6 @@
             ],
             "%": "vim::Matching",
             "escape": "editor::Cancel",
-            "i": [
-                "vim::PushOperator",
-                {
-                    "Object": {
-                        "around": false
-                    }
-                }
-            ],
-            "a": [
-                "vim::PushOperator",
-                {
-                    "Object": {
-                        "around": true
-                    }
-                }
-            ],
             "0": "vim::StartOfLine", // When no number operator present, use start of line motion
             "1": [
                 "vim::Number",
@@ -110,6 +110,12 @@
                 "vim::PushOperator",
                 "Yank"
             ],
+            "z": [
+                "vim::PushOperator",
+                {
+                    "Namespace": "Z"
+                }
+            ],
             "i": [
                 "vim::SwitchMode",
                 "Insert"
@@ -147,6 +153,30 @@
                 {
                     "focus": true
                 }
+            ],
+            "ctrl-f": [
+                "vim::Scroll",
+                "PageDown"
+            ],
+            "ctrl-b": [
+                "vim::Scroll",
+                "PageUp"
+            ],
+            "ctrl-d": [
+                "vim::Scroll",
+                "HalfPageDown"
+            ],
+            "ctrl-u": [
+                "vim::Scroll",
+                "HalfPageUp"
+            ],
+            "ctrl-e": [
+                "vim::Scroll",
+                "LineDown"
+            ],
+            "ctrl-y": [
+                "vim::Scroll",
+                "LineUp"
             ]
         }
     },
@@ -188,6 +218,18 @@
             "y": "vim::CurrentLine"
         }
     },
+    {
+        "context": "Editor && vim_operator == z",
+        "bindings": {
+            "t": "editor::ScrollCursorTop",
+            "z": "editor::ScrollCursorCenter",
+            "b": "editor::ScrollCursorBottom",
+            "escape": [
+                "vim::SwitchMode",
+                "Normal"
+            ]
+        }
+    },
     {
         "context": "Editor && VimObject",
         "bindings": {

crates/activity_indicator/src/activity_indicator.rs 🔗

@@ -11,7 +11,7 @@ use settings::Settings;
 use smallvec::SmallVec;
 use std::{cmp::Reverse, fmt::Write, sync::Arc};
 use util::ResultExt;
-use workspace::{ItemHandle, StatusItemView, Workspace};
+use workspace::{item::ItemHandle, StatusItemView, Workspace};
 
 actions!(lsp_status, [ShowErrorMessage]);
 

crates/auto_update/Cargo.toml 🔗

@@ -8,6 +8,7 @@ path = "src/auto_update.rs"
 doctest = false
 
 [dependencies]
+db = { path = "../db" }
 client = { path = "../client" }
 gpui = { path = "../gpui" }
 menu = { path = "../menu" }

crates/auto_update/src/auto_update.rs 🔗

@@ -1,17 +1,18 @@
 mod update_notification;
 
 use anyhow::{anyhow, Context, Result};
-use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
+use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN};
+use db::kvp::KEY_VALUE_STORE;
 use gpui::{
     actions, platform::AppVersion, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
     MutableAppContext, Task, WeakViewHandle,
 };
 use lazy_static::lazy_static;
 use serde::Deserialize;
-use settings::ReleaseChannel;
 use smol::{fs::File, io::AsyncReadExt, process::Command};
 use std::{env, ffi::OsString, path::PathBuf, sync::Arc, time::Duration};
 use update_notification::UpdateNotification;
+use util::channel::ReleaseChannel;
 use workspace::Workspace;
 
 const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
@@ -41,7 +42,6 @@ pub struct AutoUpdater {
     current_version: AppVersion,
     http_client: Arc<dyn HttpClient>,
     pending_poll: Option<Task<()>>,
-    db: project::Db,
     server_url: String,
 }
 
@@ -55,11 +55,11 @@ impl Entity for AutoUpdater {
     type Event = ();
 }
 
-pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableAppContext) {
+pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut MutableAppContext) {
     if let Some(version) = (*ZED_APP_VERSION).or_else(|| cx.platform().app_version().ok()) {
-        let server_url = ZED_SERVER_URL.to_string();
+        let server_url = server_url;
         let auto_updater = cx.add_model(|cx| {
-            let updater = AutoUpdater::new(version, db.clone(), http_client, server_url.clone());
+            let updater = AutoUpdater::new(version, http_client, server_url.clone());
             updater.start_polling(cx).detach();
             updater
         });
@@ -70,7 +70,14 @@ pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableA
             }
         });
         cx.add_global_action(move |_: &ViewReleaseNotes, cx| {
-            cx.platform().open_url(&format!("{server_url}/releases"));
+            let latest_release_url = if cx.has_global::<ReleaseChannel>()
+                && *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
+            {
+                format!("{server_url}/releases/preview/latest")
+            } else {
+                format!("{server_url}/releases/latest")
+            };
+            cx.platform().open_url(&latest_release_url);
         });
         cx.add_action(UpdateNotification::dismiss);
     }
@@ -113,14 +120,12 @@ impl AutoUpdater {
 
     fn new(
         current_version: AppVersion,
-        db: project::Db,
         http_client: Arc<dyn HttpClient>,
         server_url: String,
     ) -> Self {
         Self {
             status: AutoUpdateStatus::Idle,
             current_version,
-            db,
             http_client,
             server_url,
             pending_poll: None,
@@ -290,20 +295,28 @@ impl AutoUpdater {
         should_show: bool,
         cx: &AppContext,
     ) -> Task<Result<()>> {
-        let db = self.db.clone();
         cx.background().spawn(async move {
             if should_show {
-                db.write_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY, "")?;
+                KEY_VALUE_STORE
+                    .write_kvp(
+                        SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
+                        "".to_string(),
+                    )
+                    .await?;
             } else {
-                db.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?;
+                KEY_VALUE_STORE
+                    .delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
+                    .await?;
             }
             Ok(())
         })
     }
 
     fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
-        let db = self.db.clone();
-        cx.background()
-            .spawn(async move { Ok(db.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?.is_some()) })
+        cx.background().spawn(async move {
+            Ok(KEY_VALUE_STORE
+                .read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
+                .is_some())
+        })
     }
 }

crates/auto_update/src/update_notification.rs 🔗

@@ -5,8 +5,9 @@ use gpui::{
     Element, Entity, MouseButton, View, ViewContext,
 };
 use menu::Cancel;
-use settings::{ReleaseChannel, Settings};
-use workspace::Notification;
+use settings::Settings;
+use util::channel::ReleaseChannel;
+use workspace::notifications::Notification;
 
 pub struct UpdateNotification {
     version: AppVersion,
@@ -27,9 +28,9 @@ impl View for UpdateNotification {
 
     fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> gpui::ElementBox {
         let theme = cx.global::<Settings>().theme.clone();
-        let theme = &theme.update_notification;
+        let theme = &theme.simple_message_notification;
 
-        let app_name = cx.global::<ReleaseChannel>().name();
+        let app_name = cx.global::<ReleaseChannel>().display_name();
 
         MouseEventHandler::<ViewReleaseNotes>::new(0, cx, |state, cx| {
             Flex::column()

crates/breadcrumbs/src/breadcrumbs.rs 🔗

@@ -4,7 +4,10 @@ use gpui::{
 use itertools::Itertools;
 use search::ProjectSearchView;
 use settings::Settings;
-use workspace::{ItemEvent, ItemHandle, ToolbarItemLocation, ToolbarItemView};
+use workspace::{
+    item::{ItemEvent, ItemHandle},
+    ToolbarItemLocation, ToolbarItemView,
+};
 
 pub enum Event {
     UpdateLocation,

crates/call/src/call.rs 🔗

@@ -22,7 +22,7 @@ pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut Mu
 #[derive(Clone)]
 pub struct IncomingCall {
     pub room_id: u64,
-    pub caller: Arc<User>,
+    pub calling_user: Arc<User>,
     pub participants: Vec<Arc<User>>,
     pub initial_project: Option<proto::ParticipantProject>,
 }
@@ -78,9 +78,9 @@ impl ActiveCall {
                     user_store.get_users(envelope.payload.participant_user_ids, cx)
                 })
                 .await?,
-            caller: user_store
+            calling_user: user_store
                 .update(&mut cx, |user_store, cx| {
-                    user_store.get_user(envelope.payload.caller_user_id, cx)
+                    user_store.get_user(envelope.payload.calling_user_id, cx)
                 })
                 .await?,
             initial_project: envelope.payload.initial_project,
@@ -110,13 +110,13 @@ impl ActiveCall {
 
     pub fn invite(
         &mut self,
-        recipient_user_id: u64,
+        called_user_id: u64,
         initial_project: Option<ModelHandle<Project>>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let client = self.client.clone();
         let user_store = self.user_store.clone();
-        if !self.pending_invites.insert(recipient_user_id) {
+        if !self.pending_invites.insert(called_user_id) {
             return Task::ready(Err(anyhow!("user was already invited")));
         }
 
@@ -136,13 +136,13 @@ impl ActiveCall {
                     };
 
                     room.update(&mut cx, |room, cx| {
-                        room.call(recipient_user_id, initial_project_id, cx)
+                        room.call(called_user_id, initial_project_id, cx)
                     })
                     .await?;
                 } else {
                     let room = cx
                         .update(|cx| {
-                            Room::create(recipient_user_id, initial_project, client, user_store, cx)
+                            Room::create(called_user_id, initial_project, client, user_store, cx)
                         })
                         .await?;
 
@@ -155,7 +155,7 @@ impl ActiveCall {
 
             let result = invite.await;
             this.update(&mut cx, |this, cx| {
-                this.pending_invites.remove(&recipient_user_id);
+                this.pending_invites.remove(&called_user_id);
                 cx.notify();
             });
             result
@@ -164,7 +164,7 @@ impl ActiveCall {
 
     pub fn cancel_invite(
         &mut self,
-        recipient_user_id: u64,
+        called_user_id: u64,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let room_id = if let Some(room) = self.room() {
@@ -178,7 +178,7 @@ impl ActiveCall {
             client
                 .request(proto::CancelCall {
                     room_id,
-                    recipient_user_id,
+                    called_user_id,
                 })
                 .await?;
             anyhow::Ok(())

crates/call/src/participant.rs 🔗

@@ -4,7 +4,7 @@ use collections::HashMap;
 use gpui::WeakModelHandle;
 pub use live_kit_client::Frame;
 use project::Project;
-use std::sync::Arc;
+use std::{fmt, sync::Arc};
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum ParticipantLocation {
@@ -36,7 +36,7 @@ pub struct LocalParticipant {
     pub active_project: Option<WeakModelHandle<Project>>,
 }
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub struct RemoteParticipant {
     pub user: Arc<User>,
     pub projects: Vec<proto::ParticipantProject>,
@@ -49,6 +49,12 @@ pub struct RemoteVideoTrack {
     pub(crate) live_kit_track: Arc<live_kit_client::RemoteVideoTrack>,
 }
 
+impl fmt::Debug for RemoteVideoTrack {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RemoteVideoTrack").finish()
+    }
+}
+
 impl RemoteVideoTrack {
     pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
         self.live_kit_track.frames()

crates/call/src/room.rs 🔗

@@ -5,14 +5,18 @@ use crate::{
 use anyhow::{anyhow, Result};
 use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
 use collections::{BTreeMap, HashSet};
-use futures::StreamExt;
-use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task};
+use futures::{FutureExt, StreamExt};
+use gpui::{
+    AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
+};
 use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
 use postage::stream::Stream;
 use project::Project;
-use std::{mem, os::unix::prelude::OsStrExt, sync::Arc};
+use std::{mem, sync::Arc, time::Duration};
 use util::{post_inc, ResultExt};
 
+pub const RECONNECTION_TIMEOUT: Duration = client::RECEIVE_TIMEOUT;
+
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub enum Event {
     ParticipantLocationChanged {
@@ -46,6 +50,7 @@ pub struct Room {
     user_store: ModelHandle<UserStore>,
     subscriptions: Vec<client::Subscription>,
     pending_room_update: Option<Task<()>>,
+    _maintain_connection: Task<Result<()>>,
 }
 
 impl Entity for Room {
@@ -53,7 +58,7 @@ impl Entity for Room {
 
     fn release(&mut self, _: &mut MutableAppContext) {
         if self.status.is_online() {
-            self.client.send(proto::LeaveRoom { id: self.id }).log_err();
+            self.client.send(proto::LeaveRoom {}).log_err();
         }
     }
 }
@@ -66,21 +71,6 @@ impl Room {
         user_store: ModelHandle<UserStore>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
-        let mut client_status = client.status();
-        cx.spawn_weak(|this, mut cx| async move {
-            let is_connected = client_status
-                .next()
-                .await
-                .map_or(false, |s| s.is_connected());
-            // Even if we're initially connected, any future change of the status means we momentarily disconnected.
-            if !is_connected || client_status.next().await.is_some() {
-                if let Some(this) = this.upgrade(&cx) {
-                    let _ = this.update(&mut cx, |this, cx| this.leave(cx));
-                }
-            }
-        })
-        .detach();
-
         let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
             let room = live_kit_client::Room::new();
             let mut status = room.status();
@@ -131,6 +121,9 @@ impl Room {
             None
         };
 
+        let _maintain_connection =
+            cx.spawn_weak(|this, cx| Self::maintain_connection(this, client.clone(), cx));
+
         Self {
             id,
             live_kit: live_kit_room,
@@ -145,11 +138,12 @@ impl Room {
             pending_room_update: None,
             client,
             user_store,
+            _maintain_connection,
         }
     }
 
     pub(crate) fn create(
-        recipient_user_id: u64,
+        called_user_id: u64,
         initial_project: Option<ModelHandle<Project>>,
         client: Arc<Client>,
         user_store: ModelHandle<UserStore>,
@@ -182,7 +176,7 @@ impl Room {
             match room
                 .update(&mut cx, |room, cx| {
                     room.leave_when_empty = true;
-                    room.call(recipient_user_id, initial_project_id, cx)
+                    room.call(called_user_id, initial_project_id, cx)
                 })
                 .await
             {
@@ -241,10 +235,87 @@ impl Room {
         self.participant_user_ids.clear();
         self.subscriptions.clear();
         self.live_kit.take();
-        self.client.send(proto::LeaveRoom { id: self.id })?;
+        self.client.send(proto::LeaveRoom {})?;
         Ok(())
     }
 
+    async fn maintain_connection(
+        this: WeakModelHandle<Self>,
+        client: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<()> {
+        let mut client_status = client.status();
+        loop {
+            let is_connected = client_status
+                .next()
+                .await
+                .map_or(false, |s| s.is_connected());
+            // Even if we're initially connected, any future change of the status means we momentarily disconnected.
+            if !is_connected || client_status.next().await.is_some() {
+                let room_id = this
+                    .upgrade(&cx)
+                    .ok_or_else(|| anyhow!("room was dropped"))?
+                    .update(&mut cx, |this, cx| {
+                        this.status = RoomStatus::Rejoining;
+                        cx.notify();
+                        this.id
+                    });
+
+                // Wait for client to re-establish a connection to the server.
+                let mut reconnection_timeout = cx.background().timer(RECONNECTION_TIMEOUT).fuse();
+                let client_reconnection = async {
+                    loop {
+                        if let Some(status) = client_status.next().await {
+                            if status.is_connected() {
+                                return true;
+                            }
+                        } else {
+                            return false;
+                        }
+                    }
+                }
+                .fuse();
+                futures::pin_mut!(client_reconnection);
+
+                futures::select_biased! {
+                    reconnected = client_reconnection => {
+                        if reconnected {
+                            // Client managed to reconnect to the server. Now attempt to join the room.
+                            let rejoin_room = async {
+                                let response = client.request(proto::JoinRoom { id: room_id }).await?;
+                                let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+                                this.upgrade(&cx)
+                                    .ok_or_else(|| anyhow!("room was dropped"))?
+                                    .update(&mut cx, |this, cx| {
+                                        this.status = RoomStatus::Online;
+                                        this.apply_room_update(room_proto, cx)
+                                    })?;
+                                anyhow::Ok(())
+                            };
+
+                            // If we successfully joined the room, go back around the loop
+                            // waiting for future connection status changes.
+                            if rejoin_room.await.log_err().is_some() {
+                                continue;
+                            }
+                        }
+                    }
+                    _ = reconnection_timeout => {}
+                }
+
+                // The client failed to re-establish a connection to the server
+                // or an error occurred while trying to re-join the room. Either way
+                // we leave the room and return an error.
+                if let Some(this) = this.upgrade(&cx) {
+                    let _ = this.update(&mut cx, |this, cx| this.leave(cx));
+                }
+                return Err(anyhow!(
+                    "can't reconnect to room: client failed to re-establish connection"
+                ));
+            }
+        }
+    }
+
     pub fn id(&self) -> u64 {
         self.id
     }
@@ -294,6 +365,11 @@ impl Room {
             .position(|participant| Some(participant.user_id) == self.client.user_id());
         let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
 
+        let pending_participant_user_ids = room
+            .pending_participants
+            .iter()
+            .map(|p| p.user_id)
+            .collect::<Vec<_>>();
         let remote_participant_user_ids = room
             .participants
             .iter()
@@ -303,7 +379,7 @@ impl Room {
             self.user_store.update(cx, move |user_store, cx| {
                 (
                     user_store.get_users(remote_participant_user_ids, cx),
-                    user_store.get_users(room.pending_participant_user_ids, cx),
+                    user_store.get_users(pending_participant_user_ids, cx),
                 )
             });
         self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
@@ -320,9 +396,11 @@ impl Room {
                 }
 
                 if let Some(participants) = remote_participants.log_err() {
+                    let mut participant_peer_ids = HashSet::default();
                     for (participant, user) in room.participants.into_iter().zip(participants) {
                         let peer_id = PeerId(participant.peer_id);
                         this.participant_user_ids.insert(participant.user_id);
+                        participant_peer_ids.insert(peer_id);
 
                         let old_projects = this
                             .remote_participants
@@ -389,8 +467,8 @@ impl Room {
                         }
                     }
 
-                    this.remote_participants.retain(|_, participant| {
-                        if this.participant_user_ids.contains(&participant.user.id) {
+                    this.remote_participants.retain(|peer_id, participant| {
+                        if participant_peer_ids.contains(peer_id) {
                             true
                         } else {
                             for project in &participant.projects {
@@ -472,10 +550,12 @@ impl Room {
         {
             for participant in self.remote_participants.values() {
                 assert!(self.participant_user_ids.contains(&participant.user.id));
+                assert_ne!(participant.user.id, self.client.user_id().unwrap());
             }
 
             for participant in &self.pending_participants {
                 assert!(self.participant_user_ids.contains(&participant.id));
+                assert_ne!(participant.id, self.client.user_id().unwrap());
             }
 
             assert_eq!(
@@ -487,7 +567,7 @@ impl Room {
 
     pub(crate) fn call(
         &mut self,
-        recipient_user_id: u64,
+        called_user_id: u64,
         initial_project_id: Option<u64>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
@@ -503,7 +583,7 @@ impl Room {
             let result = client
                 .request(proto::Call {
                     room_id,
-                    recipient_user_id,
+                    called_user_id,
                     initial_project_id,
                 })
                 .await;
@@ -538,7 +618,7 @@ impl Room {
                         id: worktree.id().to_proto(),
                         root_name: worktree.root_name().into(),
                         visible: worktree.is_visible(),
-                        abs_path: worktree.abs_path().as_os_str().as_bytes().to_vec(),
+                        abs_path: worktree.abs_path().to_string_lossy().into(),
                     }
                 })
                 .collect(),
@@ -746,6 +826,7 @@ impl Default for ScreenTrack {
 #[derive(Copy, Clone, PartialEq, Eq)]
 pub enum RoomStatus {
     Online,
+    Rejoining,
     Offline,
 }
 

crates/client/src/client.rs 🔗

@@ -11,14 +11,12 @@ use async_tungstenite::tungstenite::{
     error::Error as WebsocketError,
     http::{Request, StatusCode},
 };
-use db::Db;
 use futures::{future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryStreamExt};
 use gpui::{
     actions,
     serde_json::{self, Value},
     AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext,
-    AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext,
-    ViewHandle,
+    AsyncAppContext, Entity, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
 };
 use http::HttpClient;
 use lazy_static::lazy_static;
@@ -27,13 +25,13 @@ use postage::watch;
 use rand::prelude::*;
 use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
 use serde::Deserialize;
-use settings::ReleaseChannel;
 use std::{
     any::TypeId,
     collections::HashMap,
     convert::TryFrom,
     fmt::Write as _,
     future::Future,
+    marker::PhantomData,
     path::PathBuf,
     sync::{Arc, Weak},
     time::{Duration, Instant},
@@ -41,6 +39,7 @@ use std::{
 use telemetry::Telemetry;
 use thiserror::Error;
 use url::Url;
+use util::channel::ReleaseChannel;
 use util::{ResultExt, TryFutureExt};
 
 pub use rpc::*;
@@ -172,7 +171,7 @@ struct ClientState {
     entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
     _reconnect_task: Option<Task<()>>,
     reconnect_interval: Duration,
-    entities_by_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakEntityHandle>,
+    entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
     models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
     entity_types_by_message_type: HashMap<TypeId, TypeId>,
     #[allow(clippy::type_complexity)]
@@ -182,7 +181,7 @@ struct ClientState {
             dyn Send
                 + Sync
                 + Fn(
-                    AnyEntityHandle,
+                    Subscriber,
                     Box<dyn AnyTypedEnvelope>,
                     &Arc<Client>,
                     AsyncAppContext,
@@ -191,12 +190,13 @@ struct ClientState {
     >,
 }
 
-enum AnyWeakEntityHandle {
+enum WeakSubscriber {
     Model(AnyWeakModelHandle),
     View(AnyWeakViewHandle),
+    Pending(Vec<Box<dyn AnyTypedEnvelope>>),
 }
 
-enum AnyEntityHandle {
+enum Subscriber {
     Model(AnyModelHandle),
     View(AnyViewHandle),
 }
@@ -254,6 +254,54 @@ impl Drop for Subscription {
     }
 }
 
+pub struct PendingEntitySubscription<T: Entity> {
+    client: Arc<Client>,
+    remote_id: u64,
+    _entity_type: PhantomData<T>,
+    consumed: bool,
+}
+
+impl<T: Entity> PendingEntitySubscription<T> {
+    pub fn set_model(mut self, model: &ModelHandle<T>, cx: &mut AsyncAppContext) -> Subscription {
+        self.consumed = true;
+        let mut state = self.client.state.write();
+        let id = (TypeId::of::<T>(), self.remote_id);
+        let Some(WeakSubscriber::Pending(messages)) =
+            state.entities_by_type_and_remote_id.remove(&id)
+        else {
+            unreachable!()
+        };
+
+        state
+            .entities_by_type_and_remote_id
+            .insert(id, WeakSubscriber::Model(model.downgrade().into()));
+        drop(state);
+        for message in messages {
+            self.client.handle_message(message, cx);
+        }
+        Subscription::Entity {
+            client: Arc::downgrade(&self.client),
+            id,
+        }
+    }
+}
+
+impl<T: Entity> Drop for PendingEntitySubscription<T> {
+    fn drop(&mut self) {
+        if !self.consumed {
+            let mut state = self.client.state.write();
+            if let Some(WeakSubscriber::Pending(messages)) = state
+                .entities_by_type_and_remote_id
+                .remove(&(TypeId::of::<T>(), self.remote_id))
+            {
+                for message in messages {
+                    log::info!("unhandled message {}", message.payload_type_name());
+                }
+            }
+        }
+    }
+}
+
 impl Client {
     pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
         Arc::new(Self {
@@ -349,7 +397,11 @@ impl Client {
                 let this = self.clone();
                 let reconnect_interval = state.reconnect_interval;
                 state._reconnect_task = Some(cx.spawn(|cx| async move {
+                    #[cfg(any(test, feature = "test-support"))]
+                    let mut rng = StdRng::seed_from_u64(0);
+                    #[cfg(not(any(test, feature = "test-support")))]
                     let mut rng = StdRng::from_entropy();
+
                     let mut delay = INITIAL_RECONNECTION_DELAY;
                     while let Err(error) = this.authenticate_and_connect(true, &cx).await {
                         log::error!("failed to connect {}", error);
@@ -387,26 +439,28 @@ impl Client {
         self.state
             .write()
             .entities_by_type_and_remote_id
-            .insert(id, AnyWeakEntityHandle::View(cx.weak_handle().into()));
+            .insert(id, WeakSubscriber::View(cx.weak_handle().into()));
         Subscription::Entity {
             client: Arc::downgrade(self),
             id,
         }
     }
 
-    pub fn add_model_for_remote_entity<T: Entity>(
+    pub fn subscribe_to_entity<T: Entity>(
         self: &Arc<Self>,
         remote_id: u64,
-        cx: &mut ModelContext<T>,
-    ) -> Subscription {
+    ) -> PendingEntitySubscription<T> {
         let id = (TypeId::of::<T>(), remote_id);
         self.state
             .write()
             .entities_by_type_and_remote_id
-            .insert(id, AnyWeakEntityHandle::Model(cx.weak_handle().into()));
-        Subscription::Entity {
-            client: Arc::downgrade(self),
-            id,
+            .insert(id, WeakSubscriber::Pending(Default::default()));
+
+        PendingEntitySubscription {
+            client: self.clone(),
+            remote_id,
+            consumed: false,
+            _entity_type: PhantomData,
         }
     }
 
@@ -434,7 +488,7 @@ impl Client {
         let prev_handler = state.message_handlers.insert(
             message_type_id,
             Arc::new(move |handle, envelope, client, cx| {
-                let handle = if let AnyEntityHandle::Model(handle) = handle {
+                let handle = if let Subscriber::Model(handle) = handle {
                     handle
                 } else {
                     unreachable!();
@@ -488,7 +542,7 @@ impl Client {
         F: 'static + Future<Output = Result<()>>,
     {
         self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
-            if let AnyEntityHandle::View(handle) = handle {
+            if let Subscriber::View(handle) = handle {
                 handler(handle.downcast::<E>().unwrap(), message, client, cx)
             } else {
                 unreachable!();
@@ -507,7 +561,7 @@ impl Client {
         F: 'static + Future<Output = Result<()>>,
     {
         self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
-            if let AnyEntityHandle::Model(handle) = handle {
+            if let Subscriber::Model(handle) = handle {
                 handler(handle.downcast::<E>().unwrap(), message, client, cx)
             } else {
                 unreachable!();
@@ -522,7 +576,7 @@ impl Client {
         H: 'static
             + Send
             + Sync
-            + Fn(AnyEntityHandle, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+            + Fn(Subscriber, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
         F: 'static + Future<Output = Result<()>>,
     {
         let model_type_id = TypeId::of::<E>();
@@ -784,94 +838,8 @@ impl Client {
                 let cx = cx.clone();
                 let this = self.clone();
                 async move {
-                    let mut message_id = 0_usize;
                     while let Some(message) = incoming.next().await {
-                        let mut state = this.state.write();
-                        message_id += 1;
-                        let type_name = message.payload_type_name();
-                        let payload_type_id = message.payload_type_id();
-                        let sender_id = message.original_sender_id().map(|id| id.0);
-
-                        let model = state
-                            .models_by_message_type
-                            .get(&payload_type_id)
-                            .and_then(|model| model.upgrade(&cx))
-                            .map(AnyEntityHandle::Model)
-                            .or_else(|| {
-                                let entity_type_id =
-                                    *state.entity_types_by_message_type.get(&payload_type_id)?;
-                                let entity_id = state
-                                    .entity_id_extractors
-                                    .get(&message.payload_type_id())
-                                    .map(|extract_entity_id| {
-                                        (extract_entity_id)(message.as_ref())
-                                    })?;
-
-                                let entity = state
-                                    .entities_by_type_and_remote_id
-                                    .get(&(entity_type_id, entity_id))?;
-                                if let Some(entity) = entity.upgrade(&cx) {
-                                    Some(entity)
-                                } else {
-                                    state
-                                        .entities_by_type_and_remote_id
-                                        .remove(&(entity_type_id, entity_id));
-                                    None
-                                }
-                            });
-
-                        let model = if let Some(model) = model {
-                            model
-                        } else {
-                            log::info!("unhandled message {}", type_name);
-                            continue;
-                        };
-
-                        let handler = state.message_handlers.get(&payload_type_id).cloned();
-                        // Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
-                        // It also ensures we don't hold the lock while yielding back to the executor, as
-                        // that might cause the executor thread driving this future to block indefinitely.
-                        drop(state);
-
-                        if let Some(handler) = handler {
-                            let future = handler(model, message, &this, cx.clone());
-                            let client_id = this.id;
-                            log::debug!(
-                                "rpc message received. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
-                                client_id,
-                                message_id,
-                                sender_id,
-                                type_name
-                            );
-                            cx.foreground()
-                                .spawn(async move {
-                                    match future.await {
-                                        Ok(()) => {
-                                            log::debug!(
-                                                "rpc message handled. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
-                                                client_id,
-                                                message_id,
-                                                sender_id,
-                                                type_name
-                                            );
-                                        }
-                                        Err(error) => {
-                                            log::error!(
-                                                "error handling message. client_id:{}, message_id:{}, sender_id:{:?}, type:{}, error:{:?}",
-                                                client_id,
-                                                message_id,
-                                                sender_id,
-                                                type_name,
-                                                error
-                                            );
-                                        }
-                                    }
-                                })
-                                .detach();
-                        } else {
-                            log::info!("unhandled message {}", type_name);
-                        }
-
+                        this.handle_message(message, &cx);
                         // Don't starve the main thread when receiving lots of messages at once.
                         smol::future::yield_now().await;
                     }
@@ -1218,8 +1186,99 @@ impl Client {
         self.peer.respond_with_error(receipt, error)
     }
 
-    pub fn start_telemetry(&self, db: Db) {
-        self.telemetry.start(db.clone());
+    fn handle_message(
+        self: &Arc<Client>,
+        message: Box<dyn AnyTypedEnvelope>,
+        cx: &AsyncAppContext,
+    ) {
+        let mut state = self.state.write();
+        let type_name = message.payload_type_name();
+        let payload_type_id = message.payload_type_id();
+        let sender_id = message.original_sender_id().map(|id| id.0);
+
+        let mut subscriber = None;
+
+        if let Some(message_model) = state
+            .models_by_message_type
+            .get(&payload_type_id)
+            .and_then(|model| model.upgrade(cx))
+        {
+            subscriber = Some(Subscriber::Model(message_model));
+        } else if let Some((extract_entity_id, entity_type_id)) =
+            state.entity_id_extractors.get(&payload_type_id).zip(
+                state
+                    .entity_types_by_message_type
+                    .get(&payload_type_id)
+                    .copied(),
+            )
+        {
+            let entity_id = (extract_entity_id)(message.as_ref());
+
+            match state
+                .entities_by_type_and_remote_id
+                .get_mut(&(entity_type_id, entity_id))
+            {
+                Some(WeakSubscriber::Pending(pending)) => {
+                    pending.push(message);
+                    return;
+                }
+                Some(weak_subscriber @ _) => subscriber = weak_subscriber.upgrade(cx),
+                _ => {}
+            }
+        }
+
+        let subscriber = if let Some(subscriber) = subscriber {
+            subscriber
+        } else {
+            log::info!("unhandled message {}", type_name);
+            return;
+        };
+
+        let handler = state.message_handlers.get(&payload_type_id).cloned();
+        // Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
+        // It also ensures we don't hold the lock while yielding back to the executor, as
+        // that might cause the executor thread driving this future to block indefinitely.
+        drop(state);
+
+        if let Some(handler) = handler {
+            let future = handler(subscriber, message, &self, cx.clone());
+            let client_id = self.id;
+            log::debug!(
+                "rpc message received. client_id:{}, sender_id:{:?}, type:{}",
+                client_id,
+                sender_id,
+                type_name
+            );
+            cx.foreground()
+                .spawn(async move {
+                    match future.await {
+                        Ok(()) => {
+                            log::debug!(
+                                "rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
+                                client_id,
+                                sender_id,
+                                type_name
+                            );
+                        }
+                        Err(error) => {
+                            log::error!(
+                                "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
+                                client_id,
+                                sender_id,
+                                type_name,
+                                error
+                            );
+                        }
+                    }
+                })
+                .detach();
+        } else {
+            log::info!("unhandled message {}", type_name);
+        }
+    }
+
+    pub fn start_telemetry(&self) {
+        self.telemetry.start();
     }
 
     pub fn report_event(&self, kind: &str, properties: Value) {
@@ -1231,11 +1290,12 @@ impl Client {
     }
 }
 
-impl AnyWeakEntityHandle {
-    fn upgrade(&self, cx: &AsyncAppContext) -> Option<AnyEntityHandle> {
+impl WeakSubscriber {
+    fn upgrade(&self, cx: &AsyncAppContext) -> Option<Subscriber> {
         match self {
-            AnyWeakEntityHandle::Model(handle) => handle.upgrade(cx).map(AnyEntityHandle::Model),
-            AnyWeakEntityHandle::View(handle) => handle.upgrade(cx).map(AnyEntityHandle::View),
+            WeakSubscriber::Model(handle) => handle.upgrade(cx).map(Subscriber::Model),
+            WeakSubscriber::View(handle) => handle.upgrade(cx).map(Subscriber::View),
+            WeakSubscriber::Pending(_) => None,
         }
     }
 }
@@ -1480,11 +1540,17 @@ mod tests {
             subscription: None,
         });
 
-        let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx));
-        let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx));
+        let _subscription1 = client
+            .subscribe_to_entity(1)
+            .set_model(&model1, &mut cx.to_async());
+        let _subscription2 = client
+            .subscribe_to_entity(2)
+            .set_model(&model2, &mut cx.to_async());
         // Ensure dropping a subscription for the same entity type still allows receiving of
         // messages for other entity IDs of the same type.
-        let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx));
+        let subscription3 = client
+            .subscribe_to_entity(3)
+            .set_model(&model3, &mut cx.to_async());
         drop(subscription3);
 
         server.send(proto::JoinProject { project_id: 1 });

crates/client/src/telemetry.rs 🔗

@@ -1,5 +1,5 @@
 use crate::http::HttpClient;
-use db::Db;
+use db::kvp::KEY_VALUE_STORE;
 use gpui::{
     executor::Background,
     serde_json::{self, value::Map, Value},
@@ -10,7 +10,6 @@ use lazy_static::lazy_static;
 use parking_lot::Mutex;
 use serde::Serialize;
 use serde_json::json;
-use settings::ReleaseChannel;
 use std::{
     io::Write,
     mem,
@@ -19,7 +18,7 @@ use std::{
     time::{Duration, SystemTime, UNIX_EPOCH},
 };
 use tempfile::NamedTempFile;
-use util::{post_inc, ResultExt, TryFutureExt};
+use util::{channel::ReleaseChannel, post_inc, ResultExt, TryFutureExt};
 use uuid::Uuid;
 
 pub struct Telemetry {
@@ -107,7 +106,7 @@ impl Telemetry {
     pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
         let platform = cx.platform();
         let release_channel = if cx.has_global::<ReleaseChannel>() {
-            Some(cx.global::<ReleaseChannel>().name())
+            Some(cx.global::<ReleaseChannel>().display_name())
         } else {
             None
         };
@@ -148,18 +147,21 @@ impl Telemetry {
         Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
     }
 
-    pub fn start(self: &Arc<Self>, db: Db) {
+    pub fn start(self: &Arc<Self>) {
         let this = self.clone();
         self.executor
             .spawn(
                 async move {
-                    let device_id = if let Ok(Some(device_id)) = db.read_kvp("device_id") {
-                        device_id
-                    } else {
-                        let device_id = Uuid::new_v4().to_string();
-                        db.write_kvp("device_id", &device_id)?;
-                        device_id
-                    };
+                    let device_id =
+                        if let Ok(Some(device_id)) = KEY_VALUE_STORE.read_kvp("device_id") {
+                            device_id
+                        } else {
+                            let device_id = Uuid::new_v4().to_string();
+                            KEY_VALUE_STORE
+                                .write_kvp("device_id".to_string(), device_id.clone())
+                                .await?;
+                            device_id
+                        };
 
                     let device_id: Arc<str> = device_id.into();
                     let mut state = this.state.lock();

crates/client/src/user.rs 🔗

@@ -150,7 +150,6 @@ impl UserStore {
                                     client.telemetry.set_authenticated_user_info(None, false);
                                 }
 
-                                client.telemetry.report_event("sign in", Default::default());
                                 current_user_tx.send(user).await.ok();
                             }
                         }

crates/collab/Cargo.toml 🔗

@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
 default-run = "collab"
 edition = "2021"
 name = "collab"
-version = "0.2.4"
+version = "0.3.1"
 
 [[bin]]
 name = "collab"
@@ -19,12 +19,12 @@ rpc = { path = "../rpc" }
 util = { path = "../util" }
 
 anyhow = "1.0.40"
-async-trait = "0.1.50"
 async-tungstenite = "0.16"
 axum = { version = "0.5", features = ["json", "headers", "ws"] }
 axum-extra = { version = "0.3", features = ["erased-json"] }
 base64 = "0.13"
 clap = { version = "3.1", features = ["derive"], optional = true }
+dashmap = "5.4"
 envy = "0.4.2"
 futures = "0.3"
 hyper = "0.14"
@@ -36,9 +36,13 @@ prometheus = "0.13"
 rand = "0.8"
 reqwest = { version = "0.11", features = ["json"], optional = true }
 scrypt = "0.7"
+# Remove fork dependency when a version with https://github.com/SeaQL/sea-orm/pull/1283 is released.
+sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls"] }
+sea-query = "0.27"
 serde = { version = "1.0", features = ["derive", "rc"] }
 serde_json = "1.0"
 sha-1 = "0.9"
+sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
 time = { version = "0.3", features = ["serde", "serde-well-known"] }
 tokio = { version = "1", features = ["full"] }
 tokio-tungstenite = "0.17"
@@ -49,11 +53,6 @@ tracing = "0.1.34"
 tracing-log = "0.1.3"
 tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
 
-[dependencies.sqlx]
-git = "https://github.com/launchbadge/sqlx"
-rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
-features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid"]
-
 [dev-dependencies]
 collections = { path = "../collections", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
@@ -65,6 +64,7 @@ fs = { path = "../fs", features = ["test-support"] }
 git = { path = "../git", features = ["test-support"] }
 live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }
+pretty_assertions = "1.3.0"
 project = { path = "../project", features = ["test-support"] }
 rpc = { path = "../rpc", features = ["test-support"] }
 settings = { path = "../settings", features = ["test-support"] }
@@ -76,13 +76,10 @@ env_logger = "0.9"
 log = { version = "0.4.16", features = ["kv_unstable_serde"] }
 util = { path = "../util" }
 lazy_static = "1.4"
+sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-sqlite"] }
 serde_json = { version = "1.0", features = ["preserve_order"] }
+sqlx = { version = "0.6", features = ["sqlite"] }
 unindent = "0.1"
 
-[dev-dependencies.sqlx]
-git = "https://github.com/launchbadge/sqlx"
-rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
-features = ["sqlite"]
-
 [features]
 seed-support = ["clap", "lipsum", "reqwest"]

crates/collab/migrations.sqlite/20221109000000_test_schema.sql 🔗

@@ -1,5 +1,5 @@
-CREATE TABLE IF NOT EXISTS "users" (
-    "id" INTEGER PRIMARY KEY,
+CREATE TABLE "users" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
     "github_login" VARCHAR,
     "admin" BOOLEAN,
     "email_address" VARCHAR(255) DEFAULT NULL,
@@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS "users" (
     "inviter_id" INTEGER REFERENCES users (id),
     "connected_once" BOOLEAN NOT NULL DEFAULT false,
     "created_at" TIMESTAMP NOT NULL DEFAULT now,
-    "metrics_id" VARCHAR(255),
+    "metrics_id" TEXT,
     "github_user_id" INTEGER
 );
 CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
@@ -16,15 +16,15 @@ CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
 CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
 CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
 
-CREATE TABLE IF NOT EXISTS "access_tokens" (
-    "id" INTEGER PRIMARY KEY,
+CREATE TABLE "access_tokens" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
     "user_id" INTEGER REFERENCES users (id),
     "hash" VARCHAR(128)
 );
 CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
 
-CREATE TABLE IF NOT EXISTS "contacts" (
-    "id" INTEGER PRIMARY KEY,
+CREATE TABLE "contacts" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
     "user_id_a" INTEGER REFERENCES users (id) NOT NULL,
     "user_id_b" INTEGER REFERENCES users (id) NOT NULL,
     "a_to_b" BOOLEAN NOT NULL,
@@ -34,8 +34,102 @@ CREATE TABLE IF NOT EXISTS "contacts" (
 CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
 CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
 
-CREATE TABLE IF NOT EXISTS "projects" (
-    "id" INTEGER PRIMARY KEY,
+CREATE TABLE "rooms" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+    "live_kit_room" VARCHAR NOT NULL
+);
+
+CREATE TABLE "projects" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+    "room_id" INTEGER REFERENCES rooms (id) NOT NULL,
     "host_user_id" INTEGER REFERENCES users (id) NOT NULL,
-    "unregistered" BOOLEAN NOT NULL DEFAULT false
+    "host_connection_id" INTEGER NOT NULL,
+    "host_connection_epoch" TEXT NOT NULL,
+    "unregistered" BOOLEAN NOT NULL DEFAULT FALSE
+);
+CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
+
+CREATE TABLE "worktrees" (
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "id" INTEGER NOT NULL,
+    "root_name" VARCHAR NOT NULL,
+    "abs_path" VARCHAR NOT NULL,
+    "visible" BOOL NOT NULL,
+    "scan_id" INTEGER NOT NULL,
+    "is_complete" BOOL NOT NULL,
+    PRIMARY KEY(project_id, id)
+);
+CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
+
+CREATE TABLE "worktree_entries" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INTEGER NOT NULL,
+    "id" INTEGER NOT NULL,
+    "is_dir" BOOL NOT NULL,
+    "path" VARCHAR NOT NULL,
+    "inode" INTEGER NOT NULL,
+    "mtime_seconds" INTEGER NOT NULL,
+    "mtime_nanos" INTEGER NOT NULL,
+    "is_symlink" BOOL NOT NULL,
+    "is_ignored" BOOL NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, id),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
+CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
+
+CREATE TABLE "worktree_diagnostic_summaries" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INTEGER NOT NULL,
+    "path" VARCHAR NOT NULL,
+    "language_server_id" INTEGER NOT NULL,
+    "error_count" INTEGER NOT NULL,
+    "warning_count" INTEGER NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, path),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
+CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
+
+CREATE TABLE "language_servers" (
+    "id" INTEGER NOT NULL,
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "name" VARCHAR NOT NULL,
+    PRIMARY KEY(project_id, id)
+);
+CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
+
+CREATE TABLE "project_collaborators" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "connection_id" INTEGER NOT NULL,
+    "connection_epoch" TEXT NOT NULL,
+    "user_id" INTEGER NOT NULL,
+    "replica_id" INTEGER NOT NULL,
+    "is_host" BOOLEAN NOT NULL
+);
+CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
+CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
+CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
+CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
+CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
+
+CREATE TABLE "room_participants" (
+    "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+    "room_id" INTEGER NOT NULL REFERENCES rooms (id),
+    "user_id" INTEGER NOT NULL REFERENCES users (id),
+    "answering_connection_id" INTEGER,
+    "answering_connection_epoch" TEXT,
+    "answering_connection_lost" BOOLEAN NOT NULL,
+    "location_kind" INTEGER,
+    "location_project_id" INTEGER,
+    "initial_project_id" INTEGER,
+    "calling_user_id" INTEGER NOT NULL REFERENCES users (id),
+    "calling_connection_id" INTEGER NOT NULL,
+    "calling_connection_epoch" TEXT NOT NULL
 );
+CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
+CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
+CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
+CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
+CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");

crates/collab/migrations/20221111092550_reconnection_support.sql 🔗

@@ -0,0 +1,90 @@
+CREATE TABLE IF NOT EXISTS "rooms" (
+    "id" SERIAL PRIMARY KEY,
+    "live_kit_room" VARCHAR NOT NULL
+);
+
+ALTER TABLE "projects"
+    ADD "room_id" INTEGER REFERENCES rooms (id),
+    ADD "host_connection_id" INTEGER,
+    ADD "host_connection_epoch" UUID;
+CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
+
+CREATE TABLE "worktrees" (
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "id" INT8 NOT NULL,
+    "root_name" VARCHAR NOT NULL,
+    "abs_path" VARCHAR NOT NULL,
+    "visible" BOOL NOT NULL,
+    "scan_id" INT8 NOT NULL,
+    "is_complete" BOOL NOT NULL,
+    PRIMARY KEY(project_id, id)
+);
+CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
+
+CREATE TABLE "worktree_entries" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INT8 NOT NULL,
+    "id" INT8 NOT NULL,
+    "is_dir" BOOL NOT NULL,
+    "path" VARCHAR NOT NULL,
+    "inode" INT8 NOT NULL,
+    "mtime_seconds" INT8 NOT NULL,
+    "mtime_nanos" INTEGER NOT NULL,
+    "is_symlink" BOOL NOT NULL,
+    "is_ignored" BOOL NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, id),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
+CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
+
+CREATE TABLE "worktree_diagnostic_summaries" (
+    "project_id" INTEGER NOT NULL,
+    "worktree_id" INT8 NOT NULL,
+    "path" VARCHAR NOT NULL,
+    "language_server_id" INT8 NOT NULL,
+    "error_count" INTEGER NOT NULL,
+    "warning_count" INTEGER NOT NULL,
+    PRIMARY KEY(project_id, worktree_id, path),
+    FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
+CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
+
+CREATE TABLE "language_servers" (
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "id" INT8 NOT NULL,
+    "name" VARCHAR NOT NULL,
+    PRIMARY KEY(project_id, id)
+);
+CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
+
+CREATE TABLE "project_collaborators" (
+    "id" SERIAL PRIMARY KEY,
+    "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
+    "connection_id" INTEGER NOT NULL,
+    "connection_epoch" UUID NOT NULL,
+    "user_id" INTEGER NOT NULL,
+    "replica_id" INTEGER NOT NULL,
+    "is_host" BOOLEAN NOT NULL
+);
+CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
+CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
+CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
+
+CREATE TABLE "room_participants" (
+    "id" SERIAL PRIMARY KEY,
+    "room_id" INTEGER NOT NULL REFERENCES rooms (id),
+    "user_id" INTEGER NOT NULL REFERENCES users (id),
+    "answering_connection_id" INTEGER,
+    "answering_connection_epoch" UUID,
+    "location_kind" INTEGER,
+    "location_project_id" INTEGER,
+    "initial_project_id" INTEGER,
+    "calling_user_id" INTEGER NOT NULL REFERENCES users (id),
+    "calling_connection_id" INTEGER NOT NULL,
+    "calling_connection_epoch" UUID NOT NULL
+);
+CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
+CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
+CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");

crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql 🔗

@@ -0,0 +1,7 @@
+ALTER TABLE "room_participants"
+    ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE;
+
+CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
+CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
+CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
+CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");

crates/collab/src/api.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
     auth,
-    db::{Invite, NewUserParams, Signup, User, UserId, WaitlistSummary},
+    db::{Invite, NewSignup, NewUserParams, User, UserId, WaitlistSummary},
     rpc::{self, ResultExt},
     AppState, Error, Result,
 };
@@ -204,7 +204,7 @@ async fn create_user(
 #[derive(Deserialize)]
 struct UpdateUserParams {
     admin: Option<bool>,
-    invite_count: Option<u32>,
+    invite_count: Option<i32>,
 }
 
 async fn update_user(
@@ -335,7 +335,7 @@ async fn get_user_for_invite_code(
 }
 
 async fn create_signup(
-    Json(params): Json<Signup>,
+    Json(params): Json<NewSignup>,
     Extension(app): Extension<Arc<AppState>>,
 ) -> Result<()> {
     app.db.create_signup(&params).await?;

crates/collab/src/auth.rs 🔗

@@ -75,7 +75,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
 
 const MAX_ACCESS_TOKENS_TO_STORE: usize = 8;
 
-pub async fn create_access_token(db: &db::DefaultDb, user_id: UserId) -> Result<String> {
+pub async fn create_access_token(db: &db::Database, user_id: UserId) -> Result<String> {
     let access_token = rpc::auth::random_token();
     let access_token_hash =
         hash_access_token(&access_token).context("failed to hash access token")?;

crates/collab/src/bin/seed.rs 🔗

@@ -1,12 +1,8 @@
-use collab::{Error, Result};
-use db::DefaultDb;
+use collab::db;
+use db::{ConnectOptions, Database};
 use serde::{de::DeserializeOwned, Deserialize};
 use std::fmt::Write;
 
-#[allow(unused)]
-#[path = "../db.rs"]
-mod db;
-
 #[derive(Debug, Deserialize)]
 struct GitHubUser {
     id: i32,
@@ -17,7 +13,7 @@ struct GitHubUser {
 #[tokio::main]
 async fn main() {
     let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
-    let db = DefaultDb::new(&database_url, 5)
+    let db = Database::new(ConnectOptions::new(database_url))
         .await
         .expect("failed to connect to postgres database");
     let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");

crates/collab/src/db.rs 🔗

@@ -1,214 +1,633 @@
+mod access_token;
+mod contact;
+mod language_server;
+mod project;
+mod project_collaborator;
+mod room;
+mod room_participant;
+mod signup;
+#[cfg(test)]
+mod tests;
+mod user;
+mod worktree;
+mod worktree_diagnostic_summary;
+mod worktree_entry;
+
 use crate::{Error, Result};
 use anyhow::anyhow;
-use axum::http::StatusCode;
-use collections::HashMap;
+use collections::{BTreeMap, HashMap, HashSet};
+pub use contact::Contact;
+use dashmap::DashMap;
 use futures::StreamExt;
-use serde::{Deserialize, Serialize};
-use sqlx::{
-    migrate::{Migrate as _, Migration, MigrationSource},
-    types::Uuid,
-    FromRow,
+use hyper::StatusCode;
+use rpc::{proto, ConnectionId};
+pub use sea_orm::ConnectOptions;
+use sea_orm::{
+    entity::prelude::*, ActiveValue, ConnectionTrait, DatabaseConnection, DatabaseTransaction,
+    DbErr, FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect,
+    Statement, TransactionTrait,
 };
-use std::{path::Path, time::Duration};
-use time::{OffsetDateTime, PrimitiveDateTime};
-
-#[cfg(test)]
-pub type DefaultDb = Db<sqlx::Sqlite>;
-
-#[cfg(not(test))]
-pub type DefaultDb = Db<sqlx::Postgres>;
-
-pub struct Db<D: sqlx::Database> {
-    pool: sqlx::Pool<D>,
+use sea_query::{Alias, Expr, OnConflict, Query};
+use serde::{Deserialize, Serialize};
+pub use signup::{Invite, NewSignup, WaitlistSummary};
+use sqlx::migrate::{Migrate, Migration, MigrationSource};
+use sqlx::Connection;
+use std::ops::{Deref, DerefMut};
+use std::path::Path;
+use std::time::Duration;
+use std::{future::Future, marker::PhantomData, rc::Rc, sync::Arc};
+use tokio::sync::{Mutex, OwnedMutexGuard};
+pub use user::Model as User;
+
+pub struct Database {
+    options: ConnectOptions,
+    pool: DatabaseConnection,
+    rooms: DashMap<RoomId, Arc<Mutex<()>>>,
     #[cfg(test)]
     background: Option<std::sync::Arc<gpui::executor::Background>>,
     #[cfg(test)]
     runtime: Option<tokio::runtime::Runtime>,
+    epoch: Uuid,
 }
 
-macro_rules! test_support {
-    ($self:ident, { $($token:tt)* }) => {{
-        let body = async {
-            $($token)*
-        };
-
-        if cfg!(test) {
-            #[cfg(not(test))]
-            unreachable!();
-
+impl Database {
+    pub async fn new(options: ConnectOptions) -> Result<Self> {
+        Ok(Self {
+            options: options.clone(),
+            pool: sea_orm::Database::connect(options).await?,
+            rooms: DashMap::with_capacity(16384),
             #[cfg(test)]
-            if let Some(background) = $self.background.as_ref() {
-                background.simulate_random_delay().await;
-            }
-
+            background: None,
             #[cfg(test)]
-            $self.runtime.as_ref().unwrap().block_on(body)
-        } else {
-            body.await
-        }
-    }};
-}
+            runtime: None,
+            epoch: Uuid::new_v4(),
+        })
+    }
 
-pub trait RowsAffected {
-    fn rows_affected(&self) -> u64;
-}
+    pub async fn migrate(
+        &self,
+        migrations_path: &Path,
+        ignore_checksum_mismatch: bool,
+    ) -> anyhow::Result<Vec<(Migration, Duration)>> {
+        let migrations = MigrationSource::resolve(migrations_path)
+            .await
+            .map_err(|err| anyhow!("failed to load migrations: {err:?}"))?;
 
-#[cfg(test)]
-impl RowsAffected for sqlx::sqlite::SqliteQueryResult {
-    fn rows_affected(&self) -> u64 {
-        self.rows_affected()
-    }
-}
+        let mut connection = sqlx::AnyConnection::connect(self.options.get_url()).await?;
 
-impl RowsAffected for sqlx::postgres::PgQueryResult {
-    fn rows_affected(&self) -> u64 {
-        self.rows_affected()
-    }
-}
+        connection.ensure_migrations_table().await?;
+        let applied_migrations: HashMap<_, _> = connection
+            .list_applied_migrations()
+            .await?
+            .into_iter()
+            .map(|m| (m.version, m))
+            .collect();
 
-#[cfg(test)]
-impl Db<sqlx::Sqlite> {
-    pub async fn new(url: &str, max_connections: u32) -> Result<Self> {
-        use std::str::FromStr as _;
-        let options = sqlx::sqlite::SqliteConnectOptions::from_str(url)
-            .unwrap()
-            .create_if_missing(true)
-            .shared_cache(true);
-        let pool = sqlx::sqlite::SqlitePoolOptions::new()
-            .min_connections(2)
-            .max_connections(max_connections)
-            .connect_with(options)
-            .await?;
-        Ok(Self {
-            pool,
-            background: None,
-            runtime: None,
-        })
-    }
+        let mut new_migrations = Vec::new();
+        for migration in migrations {
+            match applied_migrations.get(&migration.version) {
+                Some(applied_migration) => {
+                    if migration.checksum != applied_migration.checksum && !ignore_checksum_mismatch
+                    {
+                        Err(anyhow!(
+                            "checksum mismatch for applied migration {}",
+                            migration.description
+                        ))?;
+                    }
+                }
+                None => {
+                    let elapsed = connection.apply(&migration).await?;
+                    new_migrations.push((migration, elapsed));
+                }
+            }
+        }
 
-    pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>> {
-        test_support!(self, {
-            let query = "
-                SELECT users.*
-                FROM users
-                WHERE users.id IN (SELECT value from json_each($1))
-            ";
-            Ok(sqlx::query_as(query)
-                .bind(&serde_json::json!(ids))
-                .fetch_all(&self.pool)
-                .await?)
-        })
+        Ok(new_migrations)
     }
 
-    pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
-        test_support!(self, {
-            let query = "
-                SELECT metrics_id
-                FROM users
-                WHERE id = $1
-            ";
-            Ok(sqlx::query_scalar(query)
-                .bind(id)
-                .fetch_one(&self.pool)
-                .await?)
+    pub async fn clear_stale_data(&self) -> Result<()> {
+        self.transaction(|tx| async move {
+            project_collaborator::Entity::delete_many()
+                .filter(project_collaborator::Column::ConnectionEpoch.ne(self.epoch))
+                .exec(&*tx)
+                .await?;
+            room_participant::Entity::delete_many()
+                .filter(
+                    room_participant::Column::AnsweringConnectionEpoch
+                        .ne(self.epoch)
+                        .or(room_participant::Column::CallingConnectionEpoch.ne(self.epoch)),
+                )
+                .exec(&*tx)
+                .await?;
+            project::Entity::delete_many()
+                .filter(project::Column::HostConnectionEpoch.ne(self.epoch))
+                .exec(&*tx)
+                .await?;
+            room::Entity::delete_many()
+                .filter(
+                    room::Column::Id.not_in_subquery(
+                        Query::select()
+                            .column(room_participant::Column::RoomId)
+                            .from(room_participant::Entity)
+                            .distinct()
+                            .to_owned(),
+                    ),
+                )
+                .exec(&*tx)
+                .await?;
+            Ok(())
         })
+        .await
     }
 
+    // users
+
     pub async fn create_user(
         &self,
         email_address: &str,
         admin: bool,
         params: NewUserParams,
     ) -> Result<NewUserResult> {
-        test_support!(self, {
-            let query = "
-                INSERT INTO users (email_address, github_login, github_user_id, admin, metrics_id)
-                VALUES ($1, $2, $3, $4, $5)
-                ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login
-                RETURNING id, metrics_id
-            ";
+        self.transaction(|tx| async {
+            let tx = tx;
+            let user = user::Entity::insert(user::ActiveModel {
+                email_address: ActiveValue::set(Some(email_address.into())),
+                github_login: ActiveValue::set(params.github_login.clone()),
+                github_user_id: ActiveValue::set(Some(params.github_user_id)),
+                admin: ActiveValue::set(admin),
+                metrics_id: ActiveValue::set(Uuid::new_v4()),
+                ..Default::default()
+            })
+            .on_conflict(
+                OnConflict::column(user::Column::GithubLogin)
+                    .update_column(user::Column::GithubLogin)
+                    .to_owned(),
+            )
+            .exec_with_returning(&*tx)
+            .await?;
 
-            let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query)
-                .bind(email_address)
-                .bind(params.github_login)
-                .bind(params.github_user_id)
-                .bind(admin)
-                .bind(Uuid::new_v4().to_string())
-                .fetch_one(&self.pool)
-                .await?;
             Ok(NewUserResult {
-                user_id,
-                metrics_id,
+                user_id: user.id,
+                metrics_id: user.metrics_id.to_string(),
                 signup_device_id: None,
                 inviting_user_id: None,
             })
         })
+        .await
     }
 
-    pub async fn fuzzy_search_users(&self, _name_query: &str, _limit: u32) -> Result<Vec<User>> {
-        unimplemented!()
+    pub async fn get_user_by_id(&self, id: UserId) -> Result<Option<user::Model>> {
+        self.transaction(|tx| async move { Ok(user::Entity::find_by_id(id).one(&*tx).await?) })
+            .await
     }
 
-    pub async fn create_user_from_invite(
+    pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<user::Model>> {
+        self.transaction(|tx| async {
+            let tx = tx;
+            Ok(user::Entity::find()
+                .filter(user::Column::Id.is_in(ids.iter().copied()))
+                .all(&*tx)
+                .await?)
+        })
+        .await
+    }
+
+    pub async fn get_user_by_github_account(
         &self,
-        _invite: &Invite,
-        _user: NewUserParams,
-    ) -> Result<Option<NewUserResult>> {
-        unimplemented!()
+        github_login: &str,
+        github_user_id: Option<i32>,
+    ) -> Result<Option<User>> {
+        self.transaction(|tx| async move {
+            let tx = &*tx;
+            if let Some(github_user_id) = github_user_id {
+                if let Some(user_by_github_user_id) = user::Entity::find()
+                    .filter(user::Column::GithubUserId.eq(github_user_id))
+                    .one(tx)
+                    .await?
+                {
+                    let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
+                    user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
+                    Ok(Some(user_by_github_user_id.update(tx).await?))
+                } else if let Some(user_by_github_login) = user::Entity::find()
+                    .filter(user::Column::GithubLogin.eq(github_login))
+                    .one(tx)
+                    .await?
+                {
+                    let mut user_by_github_login = user_by_github_login.into_active_model();
+                    user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id));
+                    Ok(Some(user_by_github_login.update(tx).await?))
+                } else {
+                    Ok(None)
+                }
+            } else {
+                Ok(user::Entity::find()
+                    .filter(user::Column::GithubLogin.eq(github_login))
+                    .one(tx)
+                    .await?)
+            }
+        })
+        .await
     }
 
-    pub async fn create_signup(&self, _signup: &Signup) -> Result<()> {
-        unimplemented!()
+    pub async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>> {
+        self.transaction(|tx| async move {
+            Ok(user::Entity::find()
+                .order_by_asc(user::Column::GithubLogin)
+                .limit(limit as u64)
+                .offset(page as u64 * limit as u64)
+                .all(&*tx)
+                .await?)
+        })
+        .await
     }
 
-    pub async fn create_invite_from_code(
+    pub async fn get_users_with_no_invites(
         &self,
-        _code: &str,
-        _email_address: &str,
-        _device_id: Option<&str>,
-    ) -> Result<Invite> {
-        unimplemented!()
+        invited_by_another_user: bool,
+    ) -> Result<Vec<User>> {
+        self.transaction(|tx| async move {
+            Ok(user::Entity::find()
+                .filter(
+                    user::Column::InviteCount
+                        .eq(0)
+                        .and(if invited_by_another_user {
+                            user::Column::InviterId.is_not_null()
+                        } else {
+                            user::Column::InviterId.is_null()
+                        }),
+                )
+                .all(&*tx)
+                .await?)
+        })
+        .await
     }
 
-    pub async fn record_sent_invites(&self, _invites: &[Invite]) -> Result<()> {
-        unimplemented!()
+    pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
+        #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
+        enum QueryAs {
+            MetricsId,
+        }
+
+        self.transaction(|tx| async move {
+            let metrics_id: Uuid = user::Entity::find_by_id(id)
+                .select_only()
+                .column(user::Column::MetricsId)
+                .into_values::<_, QueryAs>()
+                .one(&*tx)
+                .await?
+                .ok_or_else(|| anyhow!("could not find user"))?;
+            Ok(metrics_id.to_string())
+        })
+        .await
+    }
+
+    pub async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()> {
+        self.transaction(|tx| async move {
+            user::Entity::update_many()
+                .filter(user::Column::Id.eq(id))
+                .set(user::ActiveModel {
+                    admin: ActiveValue::set(is_admin),
+                    ..Default::default()
+                })
+                .exec(&*tx)
+                .await?;
+            Ok(())
+        })
+        .await
+    }
+
+    pub async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()> {
+        self.transaction(|tx| async move {
+            user::Entity::update_many()
+                .filter(user::Column::Id.eq(id))
+                .set(user::ActiveModel {
+                    connected_once: ActiveValue::set(connected_once),
+                    ..Default::default()
+                })
+                .exec(&*tx)
+                .await?;
+            Ok(())
+        })
+        .await
+    }
+
+    pub async fn destroy_user(&self, id: UserId) -> Result<()> {
+        self.transaction(|tx| async move {
+            access_token::Entity::delete_many()
+                .filter(access_token::Column::UserId.eq(id))
+                .exec(&*tx)
+                .await?;
+            user::Entity::delete_by_id(id).exec(&*tx).await?;
+            Ok(())
+        })
+        .await
+    }
+
+    // contacts
+
+    pub async fn get_contacts(&self, user_id: UserId) -> Result<Vec<Contact>> {
+        #[derive(Debug, FromQueryResult)]
+        struct ContactWithUserBusyStatuses {
+            user_id_a: UserId,
+            user_id_b: UserId,
+            a_to_b: bool,
+            accepted: bool,
+            should_notify: bool,
+            user_a_busy: bool,
+            user_b_busy: bool,
+        }
+
+        self.transaction(|tx| async move {
+            let user_a_participant = Alias::new("user_a_participant");
+            let user_b_participant = Alias::new("user_b_participant");
+            let mut db_contacts = contact::Entity::find()
+                .column_as(
+                    Expr::tbl(user_a_participant.clone(), room_participant::Column::Id)
+                        .is_not_null(),
+                    "user_a_busy",
+                )
+                .column_as(
+                    Expr::tbl(user_b_participant.clone(), room_participant::Column::Id)
+                        .is_not_null(),
+                    "user_b_busy",
+                )
+                .filter(
+                    contact::Column::UserIdA
+                        .eq(user_id)
+                        .or(contact::Column::UserIdB.eq(user_id)),
+                )
+                .join_as(
+                    JoinType::LeftJoin,
+                    contact::Relation::UserARoomParticipant.def(),
+                    user_a_participant,
+                )
+                .join_as(
+                    JoinType::LeftJoin,
+                    contact::Relation::UserBRoomParticipant.def(),
+                    user_b_participant,
+                )
+                .into_model::<ContactWithUserBusyStatuses>()
+                .stream(&*tx)
+                .await?;
+
+            let mut contacts = Vec::new();
+            while let Some(db_contact) = db_contacts.next().await {
+                let db_contact = db_contact?;
+                if db_contact.user_id_a == user_id {
+                    if db_contact.accepted {
+                        contacts.push(Contact::Accepted {
+                            user_id: db_contact.user_id_b,
+                            should_notify: db_contact.should_notify && db_contact.a_to_b,
+                            busy: db_contact.user_b_busy,
+                        });
+                    } else if db_contact.a_to_b {
+                        contacts.push(Contact::Outgoing {
+                            user_id: db_contact.user_id_b,
+                        })
+                    } else {
+                        contacts.push(Contact::Incoming {
+                            user_id: db_contact.user_id_b,
+                            should_notify: db_contact.should_notify,
+                        });
+                    }
+                } else if db_contact.accepted {
+                    contacts.push(Contact::Accepted {
+                        user_id: db_contact.user_id_a,
+                        should_notify: db_contact.should_notify && !db_contact.a_to_b,
+                        busy: db_contact.user_a_busy,
+                    });
+                } else if db_contact.a_to_b {
+                    contacts.push(Contact::Incoming {
+                        user_id: db_contact.user_id_a,
+                        should_notify: db_contact.should_notify,
+                    });
+                } else {
+                    contacts.push(Contact::Outgoing {
+                        user_id: db_contact.user_id_a,
+                    });
+                }
+            }
+
+            contacts.sort_unstable_by_key(|contact| contact.user_id());
+
+            Ok(contacts)
+        })
+        .await
+    }
+
+    pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
+        self.transaction(|tx| async move {
+            let participant = room_participant::Entity::find()
+                .filter(room_participant::Column::UserId.eq(user_id))
+                .one(&*tx)
+                .await?;
+            Ok(participant.is_some())
+        })
+        .await
+    }
+
+    pub async fn has_contact(&self, user_id_1: UserId, user_id_2: UserId) -> Result<bool> {
+        self.transaction(|tx| async move {
+            let (id_a, id_b) = if user_id_1 < user_id_2 {
+                (user_id_1, user_id_2)
+            } else {
+                (user_id_2, user_id_1)
+            };
+
+            Ok(contact::Entity::find()
+                .filter(
+                    contact::Column::UserIdA
+                        .eq(id_a)
+                        .and(contact::Column::UserIdB.eq(id_b))
+                        .and(contact::Column::Accepted.eq(true)),
+                )
+                .one(&*tx)
+                .await?
+                .is_some())
+        })
+        .await
     }
-}
 
-impl Db<sqlx::Postgres> {
-    pub async fn new(url: &str, max_connections: u32) -> Result<Self> {
-        let pool = sqlx::postgres::PgPoolOptions::new()
-            .max_connections(max_connections)
-            .connect(url)
+    pub async fn send_contact_request(&self, sender_id: UserId, receiver_id: UserId) -> Result<()> {
+        self.transaction(|tx| async move {
+            let (id_a, id_b, a_to_b) = if sender_id < receiver_id {
+                (sender_id, receiver_id, true)
+            } else {
+                (receiver_id, sender_id, false)
+            };
+
+            let rows_affected = contact::Entity::insert(contact::ActiveModel {
+                user_id_a: ActiveValue::set(id_a),
+                user_id_b: ActiveValue::set(id_b),
+                a_to_b: ActiveValue::set(a_to_b),
+                accepted: ActiveValue::set(false),
+                should_notify: ActiveValue::set(true),
+                ..Default::default()
+            })
+            .on_conflict(
+                OnConflict::columns([contact::Column::UserIdA, contact::Column::UserIdB])
+                    .values([
+                        (contact::Column::Accepted, true.into()),
+                        (contact::Column::ShouldNotify, false.into()),
+                    ])
+                    .action_and_where(
+                        contact::Column::Accepted.eq(false).and(
+                            contact::Column::AToB
+                                .eq(a_to_b)
+                                .and(contact::Column::UserIdA.eq(id_b))
+                                .or(contact::Column::AToB
+                                    .ne(a_to_b)
+                                    .and(contact::Column::UserIdA.eq(id_a))),
+                        ),
+                    )
+                    .to_owned(),
+            )
+            .exec_without_returning(&*tx)
             .await?;
-        Ok(Self {
-            pool,
-            #[cfg(test)]
-            background: None,
-            #[cfg(test)]
-            runtime: None,
+
+            if rows_affected == 1 {
+                Ok(())
+            } else {
+                Err(anyhow!("contact already requested"))?
+            }
         })
+        .await
     }
 
-    #[cfg(test)]
-    pub fn teardown(&self, url: &str) {
-        self.runtime.as_ref().unwrap().block_on(async {
-            use util::ResultExt;
-            let query = "
-                SELECT pg_terminate_backend(pg_stat_activity.pid)
-                FROM pg_stat_activity
-                WHERE pg_stat_activity.datname = current_database() AND pid <> pg_backend_pid();
-            ";
-            sqlx::query(query).execute(&self.pool).await.log_err();
-            self.pool.close().await;
-            <sqlx::Sqlite as sqlx::migrate::MigrateDatabase>::drop_database(url)
-                .await
-                .log_err();
+    pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<()> {
+        self.transaction(|tx| async move {
+            let (id_a, id_b) = if responder_id < requester_id {
+                (responder_id, requester_id)
+            } else {
+                (requester_id, responder_id)
+            };
+
+            let result = contact::Entity::delete_many()
+                .filter(
+                    contact::Column::UserIdA
+                        .eq(id_a)
+                        .and(contact::Column::UserIdB.eq(id_b)),
+                )
+                .exec(&*tx)
+                .await?;
+
+            if result.rows_affected == 1 {
+                Ok(())
+            } else {
+                Err(anyhow!("no such contact"))?
+            }
         })
+        .await
+    }
+
+    pub async fn dismiss_contact_notification(
+        &self,
+        user_id: UserId,
+        contact_user_id: UserId,
+    ) -> Result<()> {
+        self.transaction(|tx| async move {
+            let (id_a, id_b, a_to_b) = if user_id < contact_user_id {
+                (user_id, contact_user_id, true)
+            } else {
+                (contact_user_id, user_id, false)
+            };
+
+            let result = contact::Entity::update_many()
+                .set(contact::ActiveModel {
+                    should_notify: ActiveValue::set(false),
+                    ..Default::default()
+                })
+                .filter(
+                    contact::Column::UserIdA
+                        .eq(id_a)
+                        .and(contact::Column::UserIdB.eq(id_b))
+                        .and(
+                            contact::Column::AToB
+                                .eq(a_to_b)
+                                .and(contact::Column::Accepted.eq(true))
+                                .or(contact::Column::AToB
+                                    .ne(a_to_b)
+                                    .and(contact::Column::Accepted.eq(false))),
+                        ),
+                )
+                .exec(&*tx)
+                .await?;
+            if result.rows_affected == 0 {
+                Err(anyhow!("no such contact request"))?
+            } else {
+                Ok(())
+            }
+        })
+        .await
+    }
+
+    pub async fn respond_to_contact_request(
+        &self,
+        responder_id: UserId,
+        requester_id: UserId,
+        accept: bool,
+    ) -> Result<()> {
+        self.transaction(|tx| async move {
+            let (id_a, id_b, a_to_b) = if responder_id < requester_id {
+                (responder_id, requester_id, false)
+            } else {
+                (requester_id, responder_id, true)
+            };
+            let rows_affected = if accept {
+                let result = contact::Entity::update_many()
+                    .set(contact::ActiveModel {
+                        accepted: ActiveValue::set(true),
+                        should_notify: ActiveValue::set(true),
+                        ..Default::default()
+                    })
+                    .filter(
+                        contact::Column::UserIdA
+                            .eq(id_a)
+                            .and(contact::Column::UserIdB.eq(id_b))
+                            .and(contact::Column::AToB.eq(a_to_b)),
+                    )
+                    .exec(&*tx)
+                    .await?;
+                result.rows_affected
+            } else {
+                let result = contact::Entity::delete_many()
+                    .filter(
+                        contact::Column::UserIdA
+                            .eq(id_a)
+                            .and(contact::Column::UserIdB.eq(id_b))
+                            .and(contact::Column::AToB.eq(a_to_b))
+                            .and(contact::Column::Accepted.eq(false)),
+                    )
+                    .exec(&*tx)
+                    .await?;
+
+                result.rows_affected
+            };
+
+            if rows_affected == 1 {
+                Ok(())
+            } else {
+                Err(anyhow!("no such contact request"))?
+            }
+        })
+        .await
+    }
+
+    pub fn fuzzy_like_string(string: &str) -> String {
+        let mut result = String::with_capacity(string.len() * 2 + 1);
+        for c in string.chars() {
+            if c.is_alphanumeric() {
+                result.push('%');
+                result.push(c);
+            }
+        }
+        result.push('%');
+        result
     }
 
     pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
-        test_support!(self, {
+        self.transaction(|tx| async {
+            let tx = tx;
             let like_string = Self::fuzzy_like_string(name_query);
             let query = "
                 SELECT users.*
@@ -217,71 +636,218 @@ impl Db<sqlx::Postgres> {
                 ORDER BY github_login <-> $2
                 LIMIT $3
             ";
-            Ok(sqlx::query_as(query)
-                .bind(like_string)
-                .bind(name_query)
-                .bind(limit as i32)
-                .fetch_all(&self.pool)
+
+            Ok(user::Entity::find()
+                .from_raw_sql(Statement::from_sql_and_values(
+                    self.pool.get_database_backend(),
+                    query.into(),
+                    vec![like_string.into(), name_query.into(), limit.into()],
+                ))
+                .all(&*tx)
                 .await?)
         })
+        .await
     }
 
-    pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>> {
-        test_support!(self, {
-            let query = "
-                SELECT users.*
-                FROM users
-                WHERE users.id = ANY ($1)
-            ";
-            Ok(sqlx::query_as(query)
-                .bind(&ids.into_iter().map(|id| id.0).collect::<Vec<_>>())
-                .fetch_all(&self.pool)
-                .await?)
+    // signups
+
+    pub async fn create_signup(&self, signup: &NewSignup) -> Result<()> {
+        self.transaction(|tx| async move {
+            signup::Entity::insert(signup::ActiveModel {
+                email_address: ActiveValue::set(signup.email_address.clone()),
+                email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
+                email_confirmation_sent: ActiveValue::set(false),
+                platform_mac: ActiveValue::set(signup.platform_mac),
+                platform_windows: ActiveValue::set(signup.platform_windows),
+                platform_linux: ActiveValue::set(signup.platform_linux),
+                platform_unknown: ActiveValue::set(false),
+                editor_features: ActiveValue::set(Some(signup.editor_features.clone())),
+                programming_languages: ActiveValue::set(Some(signup.programming_languages.clone())),
+                device_id: ActiveValue::set(signup.device_id.clone()),
+                added_to_mailing_list: ActiveValue::set(signup.added_to_mailing_list),
+                ..Default::default()
+            })
+            .on_conflict(
+                OnConflict::column(signup::Column::EmailAddress)
+                    .update_columns([
+                        signup::Column::PlatformMac,
+                        signup::Column::PlatformWindows,
+                        signup::Column::PlatformLinux,
+                        signup::Column::EditorFeatures,
+                        signup::Column::ProgrammingLanguages,
+                        signup::Column::DeviceId,
+                        signup::Column::AddedToMailingList,
+                    ])
+                    .to_owned(),
+            )
+            .exec(&*tx)
+            .await?;
+            Ok(())
         })
+        .await
     }
 
-    pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
-        test_support!(self, {
+    pub async fn get_signup(&self, email_address: &str) -> Result<signup::Model> {
+        self.transaction(|tx| async move {
+            let signup = signup::Entity::find()
+                .filter(signup::Column::EmailAddress.eq(email_address))
+                .one(&*tx)
+                .await?
+                .ok_or_else(|| {
+                    anyhow!("signup with email address {} doesn't exist", email_address)
+                })?;
+
+            Ok(signup)
+        })
+        .await
+    }
+
+    pub async fn get_waitlist_summary(&self) -> Result<WaitlistSummary> {
+        self.transaction(|tx| async move {
             let query = "
-                SELECT metrics_id::text
-                FROM users
-                WHERE id = $1
+                SELECT
+                    COUNT(*) as count,
+                    COALESCE(SUM(CASE WHEN platform_linux THEN 1 ELSE 0 END), 0) as linux_count,
+                    COALESCE(SUM(CASE WHEN platform_mac THEN 1 ELSE 0 END), 0) as mac_count,
+                    COALESCE(SUM(CASE WHEN platform_windows THEN 1 ELSE 0 END), 0) as windows_count,
+                    COALESCE(SUM(CASE WHEN platform_unknown THEN 1 ELSE 0 END), 0) as unknown_count
+                FROM (
+                    SELECT *
+                    FROM signups
+                    WHERE
+                        NOT email_confirmation_sent
+                ) AS unsent
             ";
-            Ok(sqlx::query_scalar(query)
-                .bind(id)
-                .fetch_one(&self.pool)
+            Ok(
+                WaitlistSummary::find_by_statement(Statement::from_sql_and_values(
+                    self.pool.get_database_backend(),
+                    query.into(),
+                    vec![],
+                ))
+                .one(&*tx)
+                .await?
+                .ok_or_else(|| anyhow!("invalid result"))?,
+            )
+        })
+        .await
+    }
+
+    pub async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> {
+        let emails = invites
+            .iter()
+            .map(|s| s.email_address.as_str())
+            .collect::<Vec<_>>();
+        self.transaction(|tx| async {
+            let tx = tx;
+            signup::Entity::update_many()
+                .filter(signup::Column::EmailAddress.is_in(emails.iter().copied()))
+                .set(signup::ActiveModel {
+                    email_confirmation_sent: ActiveValue::set(true),
+                    ..Default::default()
+                })
+                .exec(&*tx)
+                .await?;
+            Ok(())
+        })
+        .await
+    }
+
+    pub async fn get_unsent_invites(&self, count: usize) -> Result<Vec<Invite>> {
+        self.transaction(|tx| async move {
+            Ok(signup::Entity::find()
+                .select_only()
+                .column(signup::Column::EmailAddress)
+                .column(signup::Column::EmailConfirmationCode)
+                .filter(
+                    signup::Column::EmailConfirmationSent.eq(false).and(
+                        signup::Column::PlatformMac
+                            .eq(true)
+                            .or(signup::Column::PlatformUnknown.eq(true)),
+                    ),
+                )
+                .order_by_asc(signup::Column::CreatedAt)
+                .limit(count as u64)
+                .into_model()
+                .all(&*tx)
                 .await?)
         })
+        .await
     }
 
-    pub async fn create_user(
+    // invite codes
+
+    pub async fn create_invite_from_code(
         &self,
+        code: &str,
         email_address: &str,
-        admin: bool,
-        params: NewUserParams,
-    ) -> Result<NewUserResult> {
-        test_support!(self, {
-            let query = "
-                INSERT INTO users (email_address, github_login, github_user_id, admin)
-                VALUES ($1, $2, $3, $4)
-                ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login
-                RETURNING id, metrics_id::text
-            ";
+        device_id: Option<&str>,
+    ) -> Result<Invite> {
+        self.transaction(|tx| async move {
+            let existing_user = user::Entity::find()
+                .filter(user::Column::EmailAddress.eq(email_address))
+                .one(&*tx)
+                .await?;
 
-            let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query)
-                .bind(email_address)
-                .bind(params.github_login)
-                .bind(params.github_user_id)
-                .bind(admin)
-                .fetch_one(&self.pool)
+            if existing_user.is_some() {
+                Err(anyhow!("email address is already in use"))?;
+            }
+
+            let inviting_user_with_invites = match user::Entity::find()
+                .filter(
+                    user::Column::InviteCode
+                        .eq(code)
+                        .and(user::Column::InviteCount.gt(0)),
+                )
+                .one(&*tx)
+                .await?
+            {
+                Some(inviting_user) => inviting_user,
+                None => {
+                    return Err(Error::Http(
+                        StatusCode::UNAUTHORIZED,
+                        "unable to find an invite code with invites remaining".to_string(),
+                    ))?
+                }
+            };
+            user::Entity::update_many()
+                .filter(
+                    user::Column::Id
+                        .eq(inviting_user_with_invites.id)
+                        .and(user::Column::InviteCount.gt(0)),
+                )
+                .col_expr(
+                    user::Column::InviteCount,
+                    Expr::col(user::Column::InviteCount).sub(1),
+                )
+                .exec(&*tx)
                 .await?;
-            Ok(NewUserResult {
-                user_id,
-                metrics_id,
-                signup_device_id: None,
-                inviting_user_id: None,
+
+            let signup = signup::Entity::insert(signup::ActiveModel {
+                email_address: ActiveValue::set(email_address.into()),
+                email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
+                email_confirmation_sent: ActiveValue::set(false),
+                inviting_user_id: ActiveValue::set(Some(inviting_user_with_invites.id)),
+                platform_linux: ActiveValue::set(false),
+                platform_mac: ActiveValue::set(false),
+                platform_windows: ActiveValue::set(false),
+                platform_unknown: ActiveValue::set(true),
+                device_id: ActiveValue::set(device_id.map(|device_id| device_id.into())),
+                ..Default::default()
+            })
+            .on_conflict(
+                OnConflict::column(signup::Column::EmailAddress)
+                    .update_column(signup::Column::InvitingUserId)
+                    .to_owned(),
+            )
+            .exec_with_returning(&*tx)
+            .await?;
+
+            Ok(Invite {
+                email_address: signup.email_address,
+                email_confirmation_code: signup.email_confirmation_code,
             })
         })
+        .await
     }
 
     pub async fn create_user_from_invite(

crates/collab/src/db/access_token.rs 🔗

@@ -0,0 +1,29 @@
+use super::{AccessTokenId, UserId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "access_tokens")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: AccessTokenId,
+    pub user_id: UserId,
+    pub hash: String,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::user::Entity",
+        from = "Column::UserId",
+        to = "super::user::Column::Id"
+    )]
+    User,
+}
+
+impl Related<super::user::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::User.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/contact.rs 🔗

@@ -0,0 +1,58 @@
+use super::{ContactId, UserId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "contacts")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: ContactId,
+    pub user_id_a: UserId,
+    pub user_id_b: UserId,
+    pub a_to_b: bool,
+    pub should_notify: bool,
+    pub accepted: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::room_participant::Entity",
+        from = "Column::UserIdA",
+        to = "super::room_participant::Column::UserId"
+    )]
+    UserARoomParticipant,
+    #[sea_orm(
+        belongs_to = "super::room_participant::Entity",
+        from = "Column::UserIdB",
+        to = "super::room_participant::Column::UserId"
+    )]
+    UserBRoomParticipant,
+}
+
+impl ActiveModelBehavior for ActiveModel {}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Contact {
+    Accepted {
+        user_id: UserId,
+        should_notify: bool,
+        busy: bool,
+    },
+    Outgoing {
+        user_id: UserId,
+    },
+    Incoming {
+        user_id: UserId,
+        should_notify: bool,
+    },
+}
+
+impl Contact {
+    pub fn user_id(&self) -> UserId {
+        match self {
+            Contact::Accepted { user_id, .. } => *user_id,
+            Contact::Outgoing { user_id } => *user_id,
+            Contact::Incoming { user_id, .. } => *user_id,
+        }
+    }
+}

crates/collab/src/db/language_server.rs 🔗

@@ -0,0 +1,30 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "language_servers")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub project_id: ProjectId,
+    #[sea_orm(primary_key)]
+    pub id: i64,
+    pub name: String,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::project::Entity",
+        from = "Column::ProjectId",
+        to = "super::project::Column::Id"
+    )]
+    Project,
+}
+
+impl Related<super::project::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Project.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/project.rs 🔗

@@ -0,0 +1,67 @@
+use super::{ProjectId, RoomId, UserId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "projects")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: ProjectId,
+    pub room_id: RoomId,
+    pub host_user_id: UserId,
+    pub host_connection_id: i32,
+    pub host_connection_epoch: Uuid,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::user::Entity",
+        from = "Column::HostUserId",
+        to = "super::user::Column::Id"
+    )]
+    HostUser,
+    #[sea_orm(
+        belongs_to = "super::room::Entity",
+        from = "Column::RoomId",
+        to = "super::room::Column::Id"
+    )]
+    Room,
+    #[sea_orm(has_many = "super::worktree::Entity")]
+    Worktrees,
+    #[sea_orm(has_many = "super::project_collaborator::Entity")]
+    Collaborators,
+    #[sea_orm(has_many = "super::language_server::Entity")]
+    LanguageServers,
+}
+
+impl Related<super::user::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::HostUser.def()
+    }
+}
+
+impl Related<super::room::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Room.def()
+    }
+}
+
+impl Related<super::worktree::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Worktrees.def()
+    }
+}
+
+impl Related<super::project_collaborator::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Collaborators.def()
+    }
+}
+
+impl Related<super::language_server::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::LanguageServers.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/project_collaborator.rs 🔗

@@ -0,0 +1,33 @@
+use super::{ProjectCollaboratorId, ProjectId, ReplicaId, UserId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "project_collaborators")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: ProjectCollaboratorId,
+    pub project_id: ProjectId,
+    pub connection_id: i32,
+    pub connection_epoch: Uuid,
+    pub user_id: UserId,
+    pub replica_id: ReplicaId,
+    pub is_host: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::project::Entity",
+        from = "Column::ProjectId",
+        to = "super::project::Column::Id"
+    )]
+    Project,
+}
+
+impl Related<super::project::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Project.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/room.rs 🔗

@@ -0,0 +1,32 @@
+use super::RoomId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "rooms")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: RoomId,
+    pub live_kit_room: String,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(has_many = "super::room_participant::Entity")]
+    RoomParticipant,
+    #[sea_orm(has_many = "super::project::Entity")]
+    Project,
+}
+
+impl Related<super::room_participant::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::RoomParticipant.def()
+    }
+}
+
+impl Related<super::project::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Project.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/room_participant.rs 🔗

@@ -0,0 +1,50 @@
+use super::{ProjectId, RoomId, RoomParticipantId, UserId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "room_participants")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: RoomParticipantId,
+    pub room_id: RoomId,
+    pub user_id: UserId,
+    pub answering_connection_id: Option<i32>,
+    pub answering_connection_epoch: Option<Uuid>,
+    pub answering_connection_lost: bool,
+    pub location_kind: Option<i32>,
+    pub location_project_id: Option<ProjectId>,
+    pub initial_project_id: Option<ProjectId>,
+    pub calling_user_id: UserId,
+    pub calling_connection_id: i32,
+    pub calling_connection_epoch: Uuid,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::user::Entity",
+        from = "Column::UserId",
+        to = "super::user::Column::Id"
+    )]
+    User,
+    #[sea_orm(
+        belongs_to = "super::room::Entity",
+        from = "Column::RoomId",
+        to = "super::room::Column::Id"
+    )]
+    Room,
+}
+
+impl Related<super::user::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::User.def()
+    }
+}
+
+impl Related<super::room::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Room.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/signup.rs 🔗

@@ -0,0 +1,57 @@
+use super::{SignupId, UserId};
+use sea_orm::{entity::prelude::*, FromQueryResult};
+use serde::{Deserialize, Serialize};
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "signups")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: SignupId,
+    pub email_address: String,
+    pub email_confirmation_code: String,
+    pub email_confirmation_sent: bool,
+    pub created_at: DateTime,
+    pub device_id: Option<String>,
+    pub user_id: Option<UserId>,
+    pub inviting_user_id: Option<UserId>,
+    pub platform_mac: bool,
+    pub platform_linux: bool,
+    pub platform_windows: bool,
+    pub platform_unknown: bool,
+    pub editor_features: Option<Vec<String>>,
+    pub programming_languages: Option<Vec<String>>,
+    pub added_to_mailing_list: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}
+
+#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
+pub struct Invite {
+    pub email_address: String,
+    pub email_confirmation_code: String,
+}
+
+#[derive(Clone, Debug, Deserialize)]
+pub struct NewSignup {
+    pub email_address: String,
+    pub platform_mac: bool,
+    pub platform_windows: bool,
+    pub platform_linux: bool,
+    pub editor_features: Vec<String>,
+    pub programming_languages: Vec<String>,
+    pub device_id: Option<String>,
+    pub added_to_mailing_list: bool,
+    pub created_at: Option<DateTime>,
+}
+
+#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
+pub struct WaitlistSummary {
+    pub count: i64,
+    pub linux_count: i64,
+    pub mac_count: i64,
+    pub windows_count: i64,
+    pub unknown_count: i64,
+}

crates/collab/src/db_tests.rs → crates/collab/src/db/tests.rs 🔗

@@ -1,19 +1,22 @@
-use super::db::*;
+use super::*;
 use gpui::executor::{Background, Deterministic};
 use std::sync::Arc;
 
+#[cfg(test)]
+use pretty_assertions::{assert_eq, assert_ne};
+
 macro_rules! test_both_dbs {
     ($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
         #[gpui::test]
         async fn $postgres_test_name() {
-            let test_db = PostgresTestDb::new(Deterministic::new(0).build_background());
+            let test_db = TestDb::postgres(Deterministic::new(0).build_background());
             let $db = test_db.db();
             $body
         }
 
         #[gpui::test]
         async fn $sqlite_test_name() {
-            let test_db = SqliteTestDb::new(Deterministic::new(0).build_background());
+            let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
             let $db = test_db.db();
             $body
         }
@@ -26,9 +29,10 @@ test_both_dbs!(
     db,
     {
         let mut user_ids = Vec::new();
+        let mut user_metric_ids = Vec::new();
         for i in 1..=4 {
-            user_ids.push(
-                db.create_user(
+            let user = db
+                .create_user(
                     &format!("user{i}@example.com"),
                     false,
                     NewUserParams {
@@ -38,9 +42,9 @@ test_both_dbs!(
                     },
                 )
                 .await
-                .unwrap()
-                .user_id,
-            );
+                .unwrap();
+            user_ids.push(user.user_id);
+            user_metric_ids.push(user.metrics_id);
         }
 
         assert_eq!(
@@ -52,6 +56,7 @@ test_both_dbs!(
                     github_user_id: Some(1),
                     email_address: Some("user1@example.com".to_string()),
                     admin: false,
+                    metrics_id: user_metric_ids[0].parse().unwrap(),
                     ..Default::default()
                 },
                 User {
@@ -60,6 +65,7 @@ test_both_dbs!(
                     github_user_id: Some(2),
                     email_address: Some("user2@example.com".to_string()),
                     admin: false,
+                    metrics_id: user_metric_ids[1].parse().unwrap(),
                     ..Default::default()
                 },
                 User {
@@ -68,6 +74,7 @@ test_both_dbs!(
                     github_user_id: Some(3),
                     email_address: Some("user3@example.com".to_string()),
                     admin: false,
+                    metrics_id: user_metric_ids[2].parse().unwrap(),
                     ..Default::default()
                 },
                 User {
@@ -76,6 +83,7 @@ test_both_dbs!(
                     github_user_id: Some(4),
                     email_address: Some("user4@example.com".to_string()),
                     admin: false,
+                    metrics_id: user_metric_ids[3].parse().unwrap(),
                     ..Default::default()
                 }
             ]
@@ -258,7 +266,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         db.get_contacts(user_1).await.unwrap(),
         &[Contact::Accepted {
             user_id: user_2,
-            should_notify: true
+            should_notify: true,
+            busy: false,
         }],
     );
     assert!(db.has_contact(user_1, user_2).await.unwrap());
@@ -268,6 +277,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         &[Contact::Accepted {
             user_id: user_1,
             should_notify: false,
+            busy: false,
         }]
     );
 
@@ -284,6 +294,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         &[Contact::Accepted {
             user_id: user_2,
             should_notify: true,
+            busy: false,
         }]
     );
 
@@ -296,6 +307,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         &[Contact::Accepted {
             user_id: user_2,
             should_notify: false,
+            busy: false,
         }]
     );
 
@@ -309,10 +321,12 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
             Contact::Accepted {
                 user_id: user_2,
                 should_notify: false,
+                busy: false,
             },
             Contact::Accepted {
                 user_id: user_3,
-                should_notify: false
+                should_notify: false,
+                busy: false,
             }
         ]
     );
@@ -320,7 +334,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         db.get_contacts(user_3).await.unwrap(),
         &[Contact::Accepted {
             user_id: user_1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }],
     );
 
@@ -335,14 +350,16 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
         db.get_contacts(user_2).await.unwrap(),
         &[Contact::Accepted {
             user_id: user_1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }]
     );
     assert_eq!(
         db.get_contacts(user_3).await.unwrap(),
         &[Contact::Accepted {
             user_id: user_1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }],
     );
 });
@@ -388,16 +405,81 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
     assert_ne!(metrics_id1, metrics_id2);
 });
 
+test_both_dbs!(
+    test_project_count_postgres,
+    test_project_count_sqlite,
+    db,
+    {
+        let user1 = db
+            .create_user(
+                &format!("admin@example.com"),
+                true,
+                NewUserParams {
+                    github_login: "admin".into(),
+                    github_user_id: 0,
+                    invite_count: 0,
+                },
+            )
+            .await
+            .unwrap();
+        let user2 = db
+            .create_user(
+                &format!("user@example.com"),
+                false,
+                NewUserParams {
+                    github_login: "user".into(),
+                    github_user_id: 1,
+                    invite_count: 0,
+                },
+            )
+            .await
+            .unwrap();
+
+        let room_id = RoomId::from_proto(
+            db.create_room(user1.user_id, ConnectionId(0), "")
+                .await
+                .unwrap()
+                .id,
+        );
+        db.call(room_id, user1.user_id, ConnectionId(0), user2.user_id, None)
+            .await
+            .unwrap();
+        db.join_room(room_id, user2.user_id, ConnectionId(1))
+            .await
+            .unwrap();
+        assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
+
+        db.share_project(room_id, ConnectionId(1), &[])
+            .await
+            .unwrap();
+        assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
+
+        db.share_project(room_id, ConnectionId(1), &[])
+            .await
+            .unwrap();
+        assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
+
+        // Projects shared by admins aren't counted.
+        db.share_project(room_id, ConnectionId(0), &[])
+            .await
+            .unwrap();
+        assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
+
+        db.leave_room(ConnectionId(1)).await.unwrap();
+        assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
+    }
+);
+
 #[test]
 fn test_fuzzy_like_string() {
-    assert_eq!(DefaultDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
-    assert_eq!(DefaultDb::fuzzy_like_string("x y"), "%x%y%");
-    assert_eq!(DefaultDb::fuzzy_like_string(" z  "), "%z%");
+    assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
+    assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
+    assert_eq!(Database::fuzzy_like_string(" z  "), "%z%");
 }
 
 #[gpui::test]
 async fn test_fuzzy_search_users() {
-    let test_db = PostgresTestDb::new(build_background_executor());
+    let test_db = TestDb::postgres(build_background_executor());
     let db = test_db.db();
     for (i, github_login) in [
         "California",
@@ -433,7 +515,7 @@ async fn test_fuzzy_search_users() {
         &["rhode-island", "colorado", "oregon"],
     );
 
-    async fn fuzzy_search_user_names(db: &Db<sqlx::Postgres>, query: &str) -> Vec<String> {
+    async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
         db.fuzzy_search_users(query, 10)
             .await
             .unwrap()
@@ -445,7 +527,7 @@ async fn test_fuzzy_search_users() {
 
 #[gpui::test]
 async fn test_invite_codes() {
-    let test_db = PostgresTestDb::new(build_background_executor());
+    let test_db = TestDb::postgres(build_background_executor());
     let db = test_db.db();
 
     let NewUserResult { user_id: user1, .. } = db
@@ -504,16 +586,20 @@ async fn test_invite_codes() {
         db.get_contacts(user1).await.unwrap(),
         [Contact::Accepted {
             user_id: user2,
-            should_notify: true
+            should_notify: true,
+            busy: false,
         }]
     );
     assert_eq!(
         db.get_contacts(user2).await.unwrap(),
         [Contact::Accepted {
             user_id: user1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }]
     );
+    assert!(db.has_contact(user1, user2).await.unwrap());
+    assert!(db.has_contact(user2, user1).await.unwrap());
     assert_eq!(
         db.get_invite_code_for_user(user2).await.unwrap().unwrap().1,
         7
@@ -550,11 +636,13 @@ async fn test_invite_codes() {
         [
             Contact::Accepted {
                 user_id: user2,
-                should_notify: true
+                should_notify: true,
+                busy: false,
             },
             Contact::Accepted {
                 user_id: user3,
-                should_notify: true
+                should_notify: true,
+                busy: false,
             }
         ]
     );
@@ -562,9 +650,12 @@ async fn test_invite_codes() {
         db.get_contacts(user3).await.unwrap(),
         [Contact::Accepted {
             user_id: user1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }]
     );
+    assert!(db.has_contact(user1, user3).await.unwrap());
+    assert!(db.has_contact(user3, user1).await.unwrap());
     assert_eq!(
         db.get_invite_code_for_user(user3).await.unwrap().unwrap().1,
         3
@@ -607,15 +698,18 @@ async fn test_invite_codes() {
         [
             Contact::Accepted {
                 user_id: user2,
-                should_notify: true
+                should_notify: true,
+                busy: false,
             },
             Contact::Accepted {
                 user_id: user3,
-                should_notify: true
+                should_notify: true,
+                busy: false,
             },
             Contact::Accepted {
                 user_id: user4,
-                should_notify: true
+                should_notify: true,
+                busy: false,
             }
         ]
     );
@@ -623,9 +717,12 @@ async fn test_invite_codes() {
         db.get_contacts(user4).await.unwrap(),
         [Contact::Accepted {
             user_id: user1,
-            should_notify: false
+            should_notify: false,
+            busy: false,
         }]
     );
+    assert!(db.has_contact(user1, user4).await.unwrap());
+    assert!(db.has_contact(user4, user1).await.unwrap());
     assert_eq!(
         db.get_invite_code_for_user(user4).await.unwrap().unwrap().1,
         5
@@ -637,11 +734,162 @@ async fn test_invite_codes() {
         .unwrap_err();
     let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
     assert_eq!(invite_count, 1);
+
+    // A newer user can invite an existing one via a different email address
+    // than the one they used to sign up.
+    let user5 = db
+        .create_user(
+            "user5@example.com",
+            false,
+            NewUserParams {
+                github_login: "user5".into(),
+                github_user_id: 5,
+                invite_count: 0,
+            },
+        )
+        .await
+        .unwrap()
+        .user_id;
+    db.set_invite_count_for_user(user5, 5).await.unwrap();
+    let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
+    let user5_invite_to_user1 = db
+        .create_invite_from_code(&user5_invite_code, "user1@different.com", None)
+        .await
+        .unwrap();
+    let user1_2 = db
+        .create_user_from_invite(
+            &user5_invite_to_user1,
+            NewUserParams {
+                github_login: "user1".into(),
+                github_user_id: 1,
+                invite_count: 5,
+            },
+        )
+        .await
+        .unwrap()
+        .unwrap()
+        .user_id;
+    assert_eq!(user1_2, user1);
+    assert_eq!(
+        db.get_contacts(user1).await.unwrap(),
+        [
+            Contact::Accepted {
+                user_id: user2,
+                should_notify: true,
+                busy: false,
+            },
+            Contact::Accepted {
+                user_id: user3,
+                should_notify: true,
+                busy: false,
+            },
+            Contact::Accepted {
+                user_id: user4,
+                should_notify: true,
+                busy: false,
+            },
+            Contact::Accepted {
+                user_id: user5,
+                should_notify: false,
+                busy: false,
+            }
+        ]
+    );
+    assert_eq!(
+        db.get_contacts(user5).await.unwrap(),
+        [Contact::Accepted {
+            user_id: user1,
+            should_notify: true,
+            busy: false,
+        }]
+    );
+    assert!(db.has_contact(user1, user5).await.unwrap());
+    assert!(db.has_contact(user5, user1).await.unwrap());
+}
+
+#[gpui::test]
+async fn test_multiple_signup_overwrite() {
+    let test_db = TestDb::postgres(build_background_executor());
+    let db = test_db.db();
+
+    let email_address = "user_1@example.com".to_string();
+
+    let initial_signup_created_at_milliseconds = 0;
+
+    let initial_signup = NewSignup {
+        email_address: email_address.clone(),
+        platform_mac: false,
+        platform_linux: true,
+        platform_windows: false,
+        editor_features: vec!["speed".into()],
+        programming_languages: vec!["rust".into(), "c".into()],
+        device_id: Some(format!("device_id")),
+        added_to_mailing_list: false,
+        created_at: Some(
+            DateTime::from_timestamp_millis(initial_signup_created_at_milliseconds).unwrap(),
+        ),
+    };
+
+    db.create_signup(&initial_signup).await.unwrap();
+
+    let initial_signup_from_db = db.get_signup(&email_address).await.unwrap();
+
+    assert_eq!(
+        initial_signup_from_db.clone(),
+        signup::Model {
+            email_address: initial_signup.email_address,
+            platform_mac: initial_signup.platform_mac,
+            platform_linux: initial_signup.platform_linux,
+            platform_windows: initial_signup.platform_windows,
+            editor_features: Some(initial_signup.editor_features),
+            programming_languages: Some(initial_signup.programming_languages),
+            added_to_mailing_list: initial_signup.added_to_mailing_list,
+            ..initial_signup_from_db
+        }
+    );
+
+    let subsequent_signup = NewSignup {
+        email_address: email_address.clone(),
+        platform_mac: true,
+        platform_linux: false,
+        platform_windows: true,
+        editor_features: vec!["git integration".into(), "clean design".into()],
+        programming_languages: vec!["d".into(), "elm".into()],
+        device_id: Some(format!("different_device_id")),
+        added_to_mailing_list: true,
+        // subsequent signup happens next day
+        created_at: Some(
+            DateTime::from_timestamp_millis(
+                initial_signup_created_at_milliseconds + (1000 * 60 * 60 * 24),
+            )
+            .unwrap(),
+        ),
+    };
+
+    db.create_signup(&subsequent_signup).await.unwrap();
+
+    let subsequent_signup_from_db = db.get_signup(&email_address).await.unwrap();
+
+    assert_eq!(
+        subsequent_signup_from_db.clone(),
+        signup::Model {
+            platform_mac: subsequent_signup.platform_mac,
+            platform_linux: subsequent_signup.platform_linux,
+            platform_windows: subsequent_signup.platform_windows,
+            editor_features: Some(subsequent_signup.editor_features),
+            programming_languages: Some(subsequent_signup.programming_languages),
+            device_id: subsequent_signup.device_id,
+            added_to_mailing_list: subsequent_signup.added_to_mailing_list,
+            // shouldn't overwrite their creation Datetime - user shouldn't lose their spot in line
+            created_at: initial_signup_from_db.created_at,
+            ..subsequent_signup_from_db
+        }
+    );
 }
 
 #[gpui::test]
 async fn test_signups() {
-    let test_db = PostgresTestDb::new(build_background_executor());
+    let test_db = TestDb::postgres(build_background_executor());
     let db = test_db.db();
 
     let usernames = (0..8).map(|i| format!("person-{i}")).collect::<Vec<_>>();
@@ -649,7 +897,7 @@ async fn test_signups() {
     let all_signups = usernames
         .iter()
         .enumerate()
-        .map(|(i, username)| Signup {
+        .map(|(i, username)| NewSignup {
             email_address: format!("{username}@example.com"),
             platform_mac: true,
             platform_linux: i % 2 == 0,
@@ -657,8 +905,10 @@ async fn test_signups() {
             editor_features: vec!["speed".into()],
             programming_languages: vec!["rust".into(), "c".into()],
             device_id: Some(format!("device_id_{i}")),
+            added_to_mailing_list: i != 0, // One user failed to subscribe
+            created_at: Some(DateTime::from_timestamp_millis(i as i64).unwrap()), // Signups are consecutive
         })
-        .collect::<Vec<Signup>>();
+        .collect::<Vec<NewSignup>>();
 
     // people sign up on the waitlist
     for signup in &all_signups {

crates/collab/src/db/user.rs 🔗

@@ -0,0 +1,49 @@
+use super::UserId;
+use sea_orm::entity::prelude::*;
+use serde::Serialize;
+
+#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
+#[sea_orm(table_name = "users")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: UserId,
+    pub github_login: String,
+    pub github_user_id: Option<i32>,
+    pub email_address: Option<String>,
+    pub admin: bool,
+    pub invite_code: Option<String>,
+    pub invite_count: i32,
+    pub inviter_id: Option<UserId>,
+    pub connected_once: bool,
+    pub metrics_id: Uuid,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(has_many = "super::access_token::Entity")]
+    AccessToken,
+    #[sea_orm(has_one = "super::room_participant::Entity")]
+    RoomParticipant,
+    #[sea_orm(has_many = "super::project::Entity")]
+    HostedProjects,
+}
+
+impl Related<super::access_token::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::AccessToken.def()
+    }
+}
+
+impl Related<super::room_participant::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::RoomParticipant.def()
+    }
+}
+
+impl Related<super::project::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::HostedProjects.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/worktree.rs 🔗

@@ -0,0 +1,34 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktrees")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub id: i64,
+    #[sea_orm(primary_key)]
+    pub project_id: ProjectId,
+    pub abs_path: String,
+    pub root_name: String,
+    pub visible: bool,
+    pub scan_id: i64,
+    pub is_complete: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+    #[sea_orm(
+        belongs_to = "super::project::Entity",
+        from = "Column::ProjectId",
+        to = "super::project::Column::Id"
+    )]
+    Project,
+}
+
+impl Related<super::project::Entity> for Entity {
+    fn to() -> RelationDef {
+        Relation::Project.def()
+    }
+}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/worktree_diagnostic_summary.rs 🔗

@@ -0,0 +1,21 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktree_diagnostic_summaries")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub project_id: ProjectId,
+    #[sea_orm(primary_key)]
+    pub worktree_id: i64,
+    #[sea_orm(primary_key)]
+    pub path: String,
+    pub language_server_id: i64,
+    pub error_count: i32,
+    pub warning_count: i32,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/db/worktree_entry.rs 🔗

@@ -0,0 +1,25 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktree_entries")]
+pub struct Model {
+    #[sea_orm(primary_key)]
+    pub project_id: ProjectId,
+    #[sea_orm(primary_key)]
+    pub worktree_id: i64,
+    #[sea_orm(primary_key)]
+    pub id: i64,
+    pub is_dir: bool,
+    pub path: String,
+    pub inode: i64,
+    pub mtime_seconds: i64,
+    pub mtime_nanos: i32,
+    pub is_symlink: bool,
+    pub is_ignored: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}

crates/collab/src/executor.rs 🔗

@@ -0,0 +1,36 @@
+use std::{future::Future, time::Duration};
+
+#[derive(Clone)]
+pub enum Executor {
+    Production,
+    #[cfg(test)]
+    Deterministic(std::sync::Arc<gpui::executor::Background>),
+}
+
+impl Executor {
+    pub fn spawn_detached<F>(&self, future: F)
+    where
+        F: 'static + Send + Future<Output = ()>,
+    {
+        match self {
+            Executor::Production => {
+                tokio::spawn(future);
+            }
+            #[cfg(test)]
+            Executor::Deterministic(background) => {
+                background.spawn(future).detach();
+            }
+        }
+    }
+
+    pub fn sleep(&self, duration: Duration) -> impl Future<Output = ()> {
+        let this = self.clone();
+        async move {
+            match this {
+                Executor::Production => tokio::time::sleep(duration).await,
+                #[cfg(test)]
+                Executor::Deterministic(background) => background.timer(duration).await,
+            }
+        }
+    }
+}

crates/collab/src/integration_tests.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{
-    db::{NewUserParams, ProjectId, SqliteTestDb as TestDb, UserId},
-    rpc::{Executor, Server},
+    db::{self, NewUserParams, TestDb, UserId},
+    executor::Executor,
+    rpc::{Server, RECONNECT_TIMEOUT},
     AppState,
 };
 use ::rpc::Peer;
@@ -16,7 +17,7 @@ use editor::{
     ToggleCodeActions, Undo,
 };
 use fs::{FakeFs, Fs as _, HomeDir, LineEnding};
-use futures::{channel::oneshot, Future, StreamExt as _};
+use futures::{channel::oneshot, StreamExt as _};
 use gpui::{
     executor::{self, Deterministic},
     geometry::vector::vec2f,
@@ -30,9 +31,7 @@ use language::{
 use live_kit_client::MacOSDisplay;
 use lsp::{self, FakeLanguageServer};
 use parking_lot::Mutex;
-use project::{
-    search::SearchQuery, DiagnosticSummary, Project, ProjectPath, ProjectStore, WorktreeId,
-};
+use project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath, WorktreeId};
 use rand::prelude::*;
 use serde_json::json;
 use settings::{Formatter, Settings};
@@ -46,12 +45,11 @@ use std::{
         atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
         Arc,
     },
-    time::Duration,
 };
 use theme::ThemeRegistry;
 use unindent::Unindent as _;
 use util::post_inc;
-use workspace::{shared_screen::SharedScreen, Item, SplitDirection, ToggleFollow, Workspace};
+use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
 
 #[ctor::ctor]
 fn init_logger() {
@@ -71,8 +69,6 @@ async fn test_basic_calls(
     deterministic.forbid_parking();
     let mut server = TestServer::start(cx_a.background()).await;
 
-    let start = std::time::Instant::now();
-
     let client_a = server.create_client(cx_a, "user_a").await;
     let client_b = server.create_client(cx_b, "user_b").await;
     let client_c = server.create_client(cx_c, "user_c").await;
@@ -104,7 +100,7 @@ async fn test_basic_calls(
     // User B receives the call.
     let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
     let call_b = incoming_call_b.next().await.unwrap().unwrap();
-    assert_eq!(call_b.caller.github_login, "user_a");
+    assert_eq!(call_b.calling_user.github_login, "user_a");
 
     // User B connects via another client and also receives a ring on the newly-connected client.
     let _client_b2 = server.create_client(cx_b2, "user_b").await;
@@ -112,7 +108,7 @@ async fn test_basic_calls(
     let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
     deterministic.run_until_parked();
     let call_b2 = incoming_call_b2.next().await.unwrap().unwrap();
-    assert_eq!(call_b2.caller.github_login, "user_a");
+    assert_eq!(call_b2.calling_user.github_login, "user_a");
 
     // User B joins the room using the first client.
     active_call_b
@@ -165,7 +161,7 @@ async fn test_basic_calls(
 
     // User C receives the call, but declines it.
     let call_c = incoming_call_c.next().await.unwrap().unwrap();
-    assert_eq!(call_c.caller.github_login, "user_b");
+    assert_eq!(call_c.calling_user.github_login, "user_b");
     active_call_c.update(cx_c, |call, _| call.decline_incoming().unwrap());
     assert!(incoming_call_c.next().await.unwrap().is_none());
 
@@ -258,8 +254,6 @@ async fn test_basic_calls(
             pending: Default::default()
         }
     );
-
-    eprintln!("finished test {:?}", start.elapsed());
 }
 
 #[gpui::test(iterations = 10)]
@@ -308,7 +302,7 @@ async fn test_room_uniqueness(
     // User B receives the call from user A.
     let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
     let call_b1 = incoming_call_b.next().await.unwrap().unwrap();
-    assert_eq!(call_b1.caller.github_login, "user_a");
+    assert_eq!(call_b1.calling_user.github_login, "user_a");
 
     // Ensure calling users A and B from client C fails.
     active_call_c
@@ -367,11 +361,11 @@ async fn test_room_uniqueness(
         .unwrap();
     deterministic.run_until_parked();
     let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
-    assert_eq!(call_b2.caller.github_login, "user_c");
+    assert_eq!(call_b2.calling_user.github_login, "user_c");
 }
 
 #[gpui::test(iterations = 10)]
-async fn test_leaving_room_on_disconnection(
+async fn test_disconnecting_from_room(
     deterministic: Arc<Deterministic>,
     cx_a: &mut TestAppContext,
     cx_b: &mut TestAppContext,
@@ -420,9 +414,29 @@ async fn test_leaving_room_on_disconnection(
         }
     );
 
+    // User A automatically reconnects to the room upon disconnection.
+    server.disconnect_client(client_a.peer_id().unwrap());
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
+    deterministic.run_until_parked();
+    assert_eq!(
+        room_participants(&room_a, cx_a),
+        RoomParticipants {
+            remote: vec!["user_b".to_string()],
+            pending: Default::default()
+        }
+    );
+    assert_eq!(
+        room_participants(&room_b, cx_b),
+        RoomParticipants {
+            remote: vec!["user_a".to_string()],
+            pending: Default::default()
+        }
+    );
+
     // When user A disconnects, both client A and B clear their room on the active call.
+    server.forbid_connections();
     server.disconnect_client(client_a.peer_id().unwrap());
-    cx_a.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
     active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
     assert_eq!(
@@ -440,6 +454,10 @@ async fn test_leaving_room_on_disconnection(
         }
     );
 
+    // Allow user A to reconnect to the server.
+    server.allow_connections();
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
+
     // Call user B again from client A.
     active_call_a
         .update(cx_a, |call, cx| {
@@ -563,7 +581,7 @@ async fn test_calls_on_multiple_connections(
 
     // User B disconnects the client that is not on the call. Everything should be fine.
     client_b1.disconnect(&cx_b1.to_async()).unwrap();
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
     client_b1
         .authenticate_and_connect(false, &cx_b1.to_async())
         .await
@@ -622,12 +640,15 @@ async fn test_calls_on_multiple_connections(
     assert!(incoming_call_b2.next().await.unwrap().is_some());
 
     // User A disconnects, causing both connections to stop ringing.
+    server.forbid_connections();
     server.disconnect_client(client_a.peer_id().unwrap());
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     assert!(incoming_call_b1.next().await.unwrap().is_none());
     assert!(incoming_call_b2.next().await.unwrap().is_none());
 
     // User A reconnects automatically, then calls user B again.
+    server.allow_connections();
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
     active_call_a
         .update(cx_a, |call, cx| {
             call.invite(client_b1.user_id().unwrap(), None, cx)
@@ -642,7 +663,7 @@ async fn test_calls_on_multiple_connections(
     server.forbid_connections();
     server.disconnect_client(client_b1.peer_id().unwrap());
     server.disconnect_client(client_b2.peer_id().unwrap());
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
 }
 
@@ -695,7 +716,7 @@ async fn test_share_project(
     let incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
     deterministic.run_until_parked();
     let call = incoming_call_b.borrow().clone().unwrap();
-    assert_eq!(call.caller.github_login, "user_a");
+    assert_eq!(call.calling_user.github_login, "user_a");
     let initial_project = call.initial_project.unwrap();
     active_call_b
         .update(cx_b, |call, cx| call.accept_incoming(cx))
@@ -766,7 +787,7 @@ async fn test_share_project(
     let incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
     deterministic.run_until_parked();
     let call = incoming_call_c.borrow().clone().unwrap();
-    assert_eq!(call.caller.github_login, "user_b");
+    assert_eq!(call.calling_user.github_login, "user_b");
     let initial_project = call.initial_project.unwrap();
     active_call_c
         .update(cx_c, |call, cx| call.accept_incoming(cx))
@@ -905,8 +926,15 @@ async fn test_host_disconnect(
     let project_b = client_b.build_remote_project(project_id, cx_b).await;
     assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
 
-    let (_, workspace_b) =
-        cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
+    let (_, workspace_b) = cx_b.add_window(|cx| {
+        Workspace::new(
+            Default::default(),
+            0,
+            project_b.clone(),
+            |_, _| unimplemented!(),
+            cx,
+        )
+    });
     let editor_b = workspace_b
         .update(cx_b, |workspace, cx| {
             workspace.open_path((worktree_id, "b.txt"), None, true, cx)
@@ -925,8 +953,9 @@ async fn test_host_disconnect(
     assert!(cx_b.is_window_edited(workspace_b.window_id()));
 
     // Drop client A's connection. Collaborators should disappear and the project should not be shown as shared.
+    server.forbid_connections();
     server.disconnect_client(client_a.peer_id().unwrap());
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     project_a
         .condition(cx_a, |project, _| project.collaborators().is_empty())
         .await;
@@ -949,6 +978,11 @@ async fn test_host_disconnect(
         .unwrap();
     assert!(can_close);
 
+    // Allow client A to reconnect to the server.
+    server.allow_connections();
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
+
+    // Client B calls client A again after they reconnected.
     let active_call_b = cx_b.read(ActiveCall::global);
     active_call_b
         .update(cx_b, |call, cx| {
@@ -969,7 +1003,7 @@ async fn test_host_disconnect(
 
     // Drop client A's connection again. We should still unshare it successfully.
     server.disconnect_client(client_a.peer_id().unwrap());
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT);
     project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
 }
 
@@ -2284,7 +2318,6 @@ async fn test_leaving_project(
             project_id,
             client_b.client.clone(),
             client_b.user_store.clone(),
-            client_b.project_store.clone(),
             client_b.language_registry.clone(),
             FakeFs::new(cx.background()),
             cx,
@@ -2296,7 +2329,7 @@ async fn test_leaving_project(
     // Simulate connection loss for client C and ensure client A observes client C leaving the project.
     client_c.wait_for_current_user(cx_c).await;
     server.disconnect_client(client_c.peer_id().unwrap());
-    cx_a.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
+    cx_a.foreground().advance_clock(RECEIVE_TIMEOUT);
     deterministic.run_until_parked();
     project_a.read_with(cx_a, |project, _| {
         assert_eq!(project.collaborators().len(), 0);
@@ -2408,12 +2441,6 @@ async fn test_collaborating_with_diagnostics(
 
     // Wait for server to see the diagnostics update.
     deterministic.run_until_parked();
-    {
-        let store = server.store.lock().await;
-        let project = store.project(ProjectId::from_proto(project_id)).unwrap();
-        let worktree = project.worktrees.get(&worktree_id.to_proto()).unwrap();
-        assert!(!worktree.diagnostic_summaries.is_empty());
-    }
 
     // Ensure client B observes the new diagnostics.
     project_b.read_with(cx_b, |project, cx| {
@@ -2435,7 +2462,10 @@ async fn test_collaborating_with_diagnostics(
 
     // Join project as client C and observe the diagnostics.
     let project_c = client_c.build_remote_project(project_id, cx_c).await;
-    let project_c_diagnostic_summaries = Rc::new(RefCell::new(Vec::new()));
+    let project_c_diagnostic_summaries =
+        Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
+            project.diagnostic_summaries(cx).collect::<Vec<_>>()
+        })));
     project_c.update(cx_c, |_, cx| {
         let summaries = project_c_diagnostic_summaries.clone();
         cx.subscribe(&project_c, {
@@ -3701,8 +3731,15 @@ async fn test_collaborating_with_code_actions(
 
     // Join the project as client B.
     let project_b = client_b.build_remote_project(project_id, cx_b).await;
-    let (_window_b, workspace_b) =
-        cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
+    let (_window_b, workspace_b) = cx_b.add_window(|cx| {
+        Workspace::new(
+            Default::default(),
+            0,
+            project_b.clone(),
+            |_, _| unimplemented!(),
+            cx,
+        )
+    });
     let editor_b = workspace_b
         .update(cx_b, |workspace, cx| {
             workspace.open_path((worktree_id, "main.rs"), None, true, cx)
@@ -3922,8 +3959,15 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
         .unwrap();
     let project_b = client_b.build_remote_project(project_id, cx_b).await;
 
-    let (_window_b, workspace_b) =
-        cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
+    let (_window_b, workspace_b) = cx_b.add_window(|cx| {
+        Workspace::new(
+            Default::default(),
+            0,
+            project_b.clone(),
+            |_, _| unimplemented!(),
+            cx,
+        )
+    });
     let editor_b = workspace_b
         .update(cx_b, |workspace, cx| {
             workspace.open_path((worktree_id, "one.rs"), None, true, cx)
@@ -4176,18 +4220,21 @@ async fn test_contacts(
     cx_a: &mut TestAppContext,
     cx_b: &mut TestAppContext,
     cx_c: &mut TestAppContext,
+    cx_d: &mut TestAppContext,
 ) {
     cx_a.foreground().forbid_parking();
     let mut server = TestServer::start(cx_a.background()).await;
     let client_a = server.create_client(cx_a, "user_a").await;
     let client_b = server.create_client(cx_b, "user_b").await;
     let client_c = server.create_client(cx_c, "user_c").await;
+    let client_d = server.create_client(cx_d, "user_d").await;
     server
         .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
         .await;
     let active_call_a = cx_a.read(ActiveCall::global);
     let active_call_b = cx_b.read(ActiveCall::global);
     let active_call_c = cx_c.read(ActiveCall::global);
+    let _active_call_d = cx_d.read(ActiveCall::global);
 
     deterministic.run_until_parked();
     assert_eq!(
@@ -4211,10 +4258,11 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(contacts(&client_d, cx_d), []);
 
     server.disconnect_client(client_c.peer_id().unwrap());
     server.forbid_connections();
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     assert_eq!(
         contacts(&client_a, cx_a),
         [
@@ -4230,6 +4278,7 @@ async fn test_contacts(
         ]
     );
     assert_eq!(contacts(&client_c, cx_c), []);
+    assert_eq!(contacts(&client_d, cx_d), []);
 
     server.allow_connections();
     client_c
@@ -4259,6 +4308,7 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(contacts(&client_d, cx_d), []);
 
     active_call_a
         .update(cx_a, |call, cx| {
@@ -4288,6 +4338,39 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "busy")
         ]
     );
+    assert_eq!(contacts(&client_d, cx_d), []);
+
+    // Client B and client D become contacts while client B is being called.
+    server
+        .make_contacts(&mut [(&client_b, cx_b), (&client_d, cx_d)])
+        .await;
+    deterministic.run_until_parked();
+    assert_eq!(
+        contacts(&client_a, cx_a),
+        [
+            ("user_b".to_string(), "online", "busy"),
+            ("user_c".to_string(), "online", "free")
+        ]
+    );
+    assert_eq!(
+        contacts(&client_b, cx_b),
+        [
+            ("user_a".to_string(), "online", "busy"),
+            ("user_c".to_string(), "online", "free"),
+            ("user_d".to_string(), "online", "free"),
+        ]
+    );
+    assert_eq!(
+        contacts(&client_c, cx_c),
+        [
+            ("user_a".to_string(), "online", "busy"),
+            ("user_b".to_string(), "online", "busy")
+        ]
+    );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "busy")]
+    );
 
     active_call_b.update(cx_b, |call, _| call.decline_incoming().unwrap());
     deterministic.run_until_parked();
@@ -4302,7 +4385,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "free"),
-            ("user_c".to_string(), "online", "free")
+            ("user_c".to_string(), "online", "free"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4312,6 +4396,10 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "free")]
+    );
 
     active_call_c
         .update(cx_c, |call, cx| {
@@ -4331,7 +4419,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "busy"),
-            ("user_c".to_string(), "online", "busy")
+            ("user_c".to_string(), "online", "busy"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4341,6 +4430,10 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "free")]
+    );
 
     active_call_a
         .update(cx_a, |call, cx| call.accept_incoming(cx))
@@ -4358,7 +4451,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "busy"),
-            ("user_c".to_string(), "online", "busy")
+            ("user_c".to_string(), "online", "busy"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4368,6 +4462,10 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "free")]
+    );
 
     active_call_a
         .update(cx_a, |call, cx| {
@@ -4387,7 +4485,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "busy"),
-            ("user_c".to_string(), "online", "busy")
+            ("user_c".to_string(), "online", "busy"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4397,6 +4496,10 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "busy")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "busy")]
+    );
 
     active_call_a.update(cx_a, |call, cx| call.hang_up(cx).unwrap());
     deterministic.run_until_parked();
@@ -4411,7 +4514,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "free"),
-            ("user_c".to_string(), "online", "free")
+            ("user_c".to_string(), "online", "free"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4421,6 +4525,10 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "free")]
+    );
 
     active_call_a
         .update(cx_a, |call, cx| {
@@ -4440,7 +4548,8 @@ async fn test_contacts(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "online", "busy"),
-            ("user_c".to_string(), "online", "free")
+            ("user_c".to_string(), "online", "free"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4450,16 +4559,21 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "busy")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "busy")]
+    );
 
     server.forbid_connections();
     server.disconnect_client(client_a.peer_id().unwrap());
-    deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
+    deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
     assert_eq!(contacts(&client_a, cx_a), []);
     assert_eq!(
         contacts(&client_b, cx_b),
         [
             ("user_a".to_string(), "offline", "free"),
-            ("user_c".to_string(), "online", "free")
+            ("user_c".to_string(), "online", "free"),
+            ("user_d".to_string(), "online", "free")
         ]
     );
     assert_eq!(
@@ -4469,8 +4583,11 @@ async fn test_contacts(
             ("user_b".to_string(), "online", "free")
         ]
     );
+    assert_eq!(
+        contacts(&client_d, cx_d),
+        [("user_b".to_string(), "online", "free")]
+    );
 
-    #[allow(clippy::type_complexity)]
     fn contacts(
         client: &TestClient,
         cx: &TestAppContext,
@@ -4953,6 +5070,129 @@ async fn test_following(
     );
 }
 
+#[gpui::test]
+async fn test_following_tab_order(
+    deterministic: Arc<Deterministic>,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+) {
+    cx_a.update(editor::init);
+    cx_b.update(editor::init);
+
+    let mut server = TestServer::start(cx_a.background()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+        .await;
+    let active_call_a = cx_a.read(ActiveCall::global);
+    let active_call_b = cx_b.read(ActiveCall::global);
+
+    client_a
+        .fs
+        .insert_tree(
+            "/a",
+            json!({
+                "1.txt": "one",
+                "2.txt": "two",
+                "3.txt": "three",
+            }),
+        )
+        .await;
+    let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+    active_call_a
+        .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+        .await
+        .unwrap();
+
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+        .await
+        .unwrap();
+    let project_b = client_b.build_remote_project(project_id, cx_b).await;
+    active_call_b
+        .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+        .await
+        .unwrap();
+
+    let workspace_a = client_a.build_workspace(&project_a, cx_a);
+    let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
+
+    let workspace_b = client_b.build_workspace(&project_b, cx_b);
+    let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
+
+    let client_b_id = project_a.read_with(cx_a, |project, _| {
+        project.collaborators().values().next().unwrap().peer_id
+    });
+
+    //Open 1, 3 in that order on client A
+    workspace_a
+        .update(cx_a, |workspace, cx| {
+            workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+        })
+        .await
+        .unwrap();
+    workspace_a
+        .update(cx_a, |workspace, cx| {
+            workspace.open_path((worktree_id, "3.txt"), None, true, cx)
+        })
+        .await
+        .unwrap();
+
+    let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
+        pane.update(cx, |pane, cx| {
+            pane.items()
+                .map(|item| {
+                    item.project_path(cx)
+                        .unwrap()
+                        .path
+                        .to_str()
+                        .unwrap()
+                        .to_owned()
+                })
+                .collect::<Vec<_>>()
+        })
+    };
+
+    //Verify that the tabs opened in the order we expect
+    assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
+
+    //Follow client B as client A
+    workspace_a
+        .update(cx_a, |workspace, cx| {
+            workspace
+                .toggle_follow(&ToggleFollow(client_b_id), cx)
+                .unwrap()
+        })
+        .await
+        .unwrap();
+
+    //Open just 2 on client B
+    workspace_b
+        .update(cx_b, |workspace, cx| {
+            workspace.open_path((worktree_id, "2.txt"), None, true, cx)
+        })
+        .await
+        .unwrap();
+    deterministic.run_until_parked();
+
+    // Verify that newly opened followed file is at the end
+    assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+
+    //Open just 1 on client B
+    workspace_b
+        .update(cx_b, |workspace, cx| {
+            workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+        })
+        .await
+        .unwrap();
+    assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
+    deterministic.run_until_parked();
+
+    // Verify that following into 1 did not reorder
+    assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+}
+
 #[gpui::test(iterations = 10)]
 async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
     cx_a.foreground().forbid_parking();
@@ -5422,7 +5662,6 @@ async fn test_random_collaboration(
 
     let mut clients = Vec::new();
     let mut user_ids = Vec::new();
-    let mut peer_ids = Vec::new();
     let mut op_start_signals = Vec::new();
     let mut next_entity_id = 100000;
 
@@ -5449,7 +5688,6 @@ async fn test_random_collaboration(
                 let op_start_signal = futures::channel::mpsc::unbounded();
                 let guest = server.create_client(&mut guest_cx, &guest_username).await;
                 user_ids.push(guest.current_user_id(&guest_cx));
-                peer_ids.push(guest.peer_id().unwrap());
                 op_start_signals.push(op_start_signal.0);
                 clients.push(guest_cx.foreground().spawn(guest.simulate(
                     guest_username.clone(),
@@ -5461,16 +5699,26 @@ async fn test_random_collaboration(
                 log::info!("Added connection for {}", guest_username);
                 operations += 1;
             }
-            20..=29 if clients.len() > 1 => {
+            20..=24 if clients.len() > 1 => {
                 let guest_ix = rng.lock().gen_range(1..clients.len());
-                log::info!("Removing guest {}", user_ids[guest_ix]);
+                log::info!(
+                    "Simulating full disconnection of guest {}",
+                    user_ids[guest_ix]
+                );
                 let removed_guest_id = user_ids.remove(guest_ix);
-                let removed_peer_id = peer_ids.remove(guest_ix);
+                let user_connection_ids = server
+                    .connection_pool
+                    .lock()
+                    .await
+                    .user_connection_ids(removed_guest_id)
+                    .collect::<Vec<_>>();
+                assert_eq!(user_connection_ids.len(), 1);
+                let removed_peer_id = PeerId(user_connection_ids[0].0);
                 let guest = clients.remove(guest_ix);
                 op_start_signals.remove(guest_ix);
                 server.forbid_connections();
                 server.disconnect_client(removed_peer_id);
-                deterministic.advance_clock(RECEIVE_TIMEOUT);
+                deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
                 deterministic.start_waiting();
                 log::info!("Waiting for guest {} to exit...", removed_guest_id);
                 let (guest, mut guest_cx) = guest.await;
@@ -5482,18 +5730,15 @@ async fn test_random_collaboration(
                 }
                 for user_id in &user_ids {
                     let contacts = server.app_state.db.get_contacts(*user_id).await.unwrap();
-                    let contacts = server
-                        .store
-                        .lock()
-                        .await
-                        .build_initial_contacts_update(contacts)
-                        .contacts;
+                    let pool = server.connection_pool.lock().await;
                     for contact in contacts {
-                        if contact.online {
-                            assert_ne!(
-                                contact.user_id, removed_guest_id.0 as u64,
-                                "removed guest is still a contact of another peer"
-                            );
+                        if let db::Contact::Accepted { user_id, .. } = contact {
+                            if pool.is_user_online(user_id) {
+                                assert_ne!(
+                                    user_id, removed_guest_id,
+                                    "removed guest is still a contact of another peer"
+                                );
+                            }
                         }
                     }
                 }
@@ -5507,6 +5752,22 @@ async fn test_random_collaboration(
 
                 operations += 1;
             }
+            25..=29 if clients.len() > 1 => {
+                let guest_ix = rng.lock().gen_range(1..clients.len());
+                let user_id = user_ids[guest_ix];
+                log::info!("Simulating temporary disconnection of guest {}", user_id);
+                let user_connection_ids = server
+                    .connection_pool
+                    .lock()
+                    .await
+                    .user_connection_ids(user_id)
+                    .collect::<Vec<_>>();
+                assert_eq!(user_connection_ids.len(), 1);
+                let peer_id = PeerId(user_connection_ids[0].0);
+                server.disconnect_client(peer_id);
+                deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+                operations += 1;
+            }
             _ if !op_start_signals.is_empty() => {
                 while operations < max_operations && rng.lock().gen_bool(0.7) {
                     op_start_signals
@@ -5685,7 +5946,13 @@ impl TestServer {
     async fn start(background: Arc<executor::Background>) -> Self {
         static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
 
-        let test_db = TestDb::new(background.clone());
+        let use_postgres = env::var("USE_POSTGRES").ok();
+        let use_postgres = use_postgres.as_deref();
+        let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
+            TestDb::postgres(background.clone())
+        } else {
+            TestDb::sqlite(background.clone())
+        };
         let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
         let live_kit_server = live_kit_client::TestServer::create(
             format!("http://livekit.{}.test", live_kit_server_id),
@@ -5789,7 +6056,7 @@ impl TestServer {
                                 client_name,
                                 user,
                                 Some(connection_id_tx),
-                                cx.background(),
+                                Executor::Deterministic(cx.background()),
                             ))
                             .detach();
                         let connection_id = connection_id_rx.await.unwrap();
@@ -5803,17 +6070,15 @@ impl TestServer {
 
         let fs = FakeFs::new(cx.background());
         let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
-        let project_store = cx.add_model(|_| ProjectStore::new());
         let app_state = Arc::new(workspace::AppState {
             client: client.clone(),
             user_store: user_store.clone(),
-            project_store: project_store.clone(),
             languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
             themes: ThemeRegistry::new((), cx.font_cache()),
             fs: fs.clone(),
             build_window_options: Default::default,
             initialize_workspace: |_, _, _| unimplemented!(),
-            default_item_factory: |_, _| unimplemented!(),
+            dock_default_item_factory: |_, _| unimplemented!(),
         });
 
         Project::init(&client);
@@ -5834,7 +6099,6 @@ impl TestServer {
             remote_projects: Default::default(),
             next_root_dir_id: 0,
             user_store,
-            project_store,
             fs,
             language_registry: Arc::new(LanguageRegistry::test()),
             buffers: Default::default(),
@@ -5929,6 +6193,7 @@ impl Deref for TestServer {
 impl Drop for TestServer {
     fn drop(&mut self) {
         self.peer.reset();
+        self.server.teardown();
         self.test_live_kit_server.teardown().unwrap();
     }
 }
@@ -5940,7 +6205,6 @@ struct TestClient {
     remote_projects: Vec<ModelHandle<Project>>,
     next_root_dir_id: usize,
     pub user_store: ModelHandle<UserStore>,
-    pub project_store: ModelHandle<ProjectStore>,
     language_registry: Arc<LanguageRegistry>,
     fs: Arc<FakeFs>,
     buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
@@ -6010,7 +6274,6 @@ impl TestClient {
             Project::local(
                 self.client.clone(),
                 self.user_store.clone(),
-                self.project_store.clone(),
                 self.language_registry.clone(),
                 self.fs.clone(),
                 cx,
@@ -6038,7 +6301,6 @@ impl TestClient {
                 host_project_id,
                 self.client.clone(),
                 self.user_store.clone(),
-                self.project_store.clone(),
                 self.language_registry.clone(),
                 FakeFs::new(cx.background()),
                 cx,
@@ -6054,7 +6316,13 @@ impl TestClient {
     ) -> ViewHandle<Workspace> {
         let (_, root_view) = cx.add_window(|_| EmptyView);
         cx.add_view(&root_view, |cx| {
-            Workspace::new(project.clone(), |_, _| unimplemented!(), cx)
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
         })
     }
 
@@ -6168,7 +6436,6 @@ impl TestClient {
                             remote_project_id,
                             client.client.clone(),
                             client.user_store.clone(),
-                            client.project_store.clone(),
                             client.language_registry.clone(),
                             FakeFs::new(cx.background()),
                             cx.to_async(),
@@ -6187,11 +6454,14 @@ impl TestClient {
                         .clone()
                 }
             };
-            if let Err(error) = active_call
-                .update(cx, |call, cx| call.share_project(project.clone(), cx))
-                .await
-            {
-                log::error!("{}: error sharing project, {:?}", username, error);
+
+            if active_call.read_with(cx, |call, _| call.room().is_some()) {
+                if let Err(error) = active_call
+                    .update(cx, |call, cx| call.share_project(project.clone(), cx))
+                    .await
+                {
+                    log::error!("{}: error sharing project, {:?}", username, error);
+                }
             }
 
             let buffers = client.buffers.entry(project.clone()).or_default();
@@ -6418,7 +6688,7 @@ impl TestClient {
                         buffers.extend(search.await?.into_keys());
                     }
                 }
-                60..=69 => {
+                60..=79 => {
                     let worktree = project
                         .read_with(cx, |project, cx| {
                             project
@@ -6619,18 +6889,6 @@ impl Drop for TestClient {
     }
 }
 
-impl Executor for Arc<gpui::executor::Background> {
-    type Sleep = gpui::executor::Timer;
-
-    fn spawn_detached<F: 'static + Send + Future<Output = ()>>(&self, future: F) {
-        self.spawn(future).detach();
-    }
-
-    fn sleep(&self, duration: Duration) -> Self::Sleep {
-        self.as_ref().timer(duration)
-    }
-}
-
 #[derive(Debug, Eq, PartialEq)]
 struct RoomParticipants {
     remote: Vec<String>,

crates/collab/src/lib.rs 🔗

@@ -1,9 +1,22 @@
+pub mod api;
+pub mod auth;
+pub mod db;
+pub mod env;
+mod executor;
+#[cfg(test)]
+mod integration_tests;
+pub mod rpc;
+
 use axum::{http::StatusCode, response::IntoResponse};
+use db::Database;
+use serde::Deserialize;
+use std::{path::PathBuf, sync::Arc};
 
 pub type Result<T, E = Error> = std::result::Result<T, E>;
 
 pub enum Error {
     Http(StatusCode, String),
+    Database(sea_orm::error::DbErr),
     Internal(anyhow::Error),
 }
 
@@ -13,9 +26,9 @@ impl From<anyhow::Error> for Error {
     }
 }
 
-impl From<sqlx::Error> for Error {
-    fn from(error: sqlx::Error) -> Self {
-        Self::Internal(error.into())
+impl From<sea_orm::error::DbErr> for Error {
+    fn from(error: sea_orm::error::DbErr) -> Self {
+        Self::Database(error)
     }
 }
 
@@ -41,6 +54,9 @@ impl IntoResponse for Error {
     fn into_response(self) -> axum::response::Response {
         match self {
             Error::Http(code, message) => (code, message).into_response(),
+            Error::Database(error) => {
+                (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
+            }
             Error::Internal(error) => {
                 (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
             }
@@ -52,6 +68,7 @@ impl std::fmt::Debug for Error {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             Error::Http(code, message) => (code, message).fmt(f),
+            Error::Database(error) => error.fmt(f),
             Error::Internal(error) => error.fmt(f),
         }
     }
@@ -61,9 +78,64 @@ impl std::fmt::Display for Error {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             Error::Http(code, message) => write!(f, "{code}: {message}"),
+            Error::Database(error) => error.fmt(f),
             Error::Internal(error) => error.fmt(f),
         }
     }
 }
 
 impl std::error::Error for Error {}
+
+#[derive(Default, Deserialize)]
+pub struct Config {
+    pub http_port: u16,
+    pub database_url: String,
+    pub api_token: String,
+    pub invite_link_prefix: String,
+    pub live_kit_server: Option<String>,
+    pub live_kit_key: Option<String>,
+    pub live_kit_secret: Option<String>,
+    pub rust_log: Option<String>,
+    pub log_json: Option<bool>,
+}
+
+#[derive(Default, Deserialize)]
+pub struct MigrateConfig {
+    pub database_url: String,
+    pub migrations_path: Option<PathBuf>,
+}
+
+pub struct AppState {
+    pub db: Arc<Database>,
+    pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
+    pub config: Config,
+}
+
+impl AppState {
+    pub async fn new(config: Config) -> Result<Arc<Self>> {
+        let mut db_options = db::ConnectOptions::new(config.database_url.clone());
+        db_options.max_connections(5);
+        let db = Database::new(db_options).await?;
+        let live_kit_client = if let Some(((server, key), secret)) = config
+            .live_kit_server
+            .as_ref()
+            .zip(config.live_kit_key.as_ref())
+            .zip(config.live_kit_secret.as_ref())
+        {
+            Some(Arc::new(live_kit_server::api::LiveKitClient::new(
+                server.clone(),
+                key.clone(),
+                secret.clone(),
+            )) as Arc<dyn live_kit_server::api::Client>)
+        } else {
+            None
+        };
+
+        let this = Self {
+            db: Arc::new(db),
+            live_kit_client,
+            config,
+        };
+        Ok(Arc::new(this))
+    }
+}

crates/collab/src/main.rs 🔗

@@ -1,86 +1,18 @@
-mod api;
-mod auth;
-mod db;
-mod env;
-mod rpc;
-
-#[cfg(test)]
-mod db_tests;
-#[cfg(test)]
-mod integration_tests;
-
-use crate::rpc::ResultExt as _;
 use anyhow::anyhow;
 use axum::{routing::get, Router};
-use collab::{Error, Result};
-use db::DefaultDb as Db;
-use serde::Deserialize;
+use collab::{db, env, AppState, Config, MigrateConfig, Result};
+use db::Database;
 use std::{
     env::args,
     net::{SocketAddr, TcpListener},
-    path::{Path, PathBuf},
-    sync::Arc,
-    time::Duration,
+    path::Path,
 };
-use tokio::signal;
 use tracing_log::LogTracer;
 use tracing_subscriber::{filter::EnvFilter, fmt::format::JsonFields, Layer};
 use util::ResultExt;
 
 const VERSION: &'static str = env!("CARGO_PKG_VERSION");
 
-#[derive(Default, Deserialize)]
-pub struct Config {
-    pub http_port: u16,
-    pub database_url: String,
-    pub api_token: String,
-    pub invite_link_prefix: String,
-    pub live_kit_server: Option<String>,
-    pub live_kit_key: Option<String>,
-    pub live_kit_secret: Option<String>,
-    pub rust_log: Option<String>,
-    pub log_json: Option<bool>,
-}
-
-#[derive(Default, Deserialize)]
-pub struct MigrateConfig {
-    pub database_url: String,
-    pub migrations_path: Option<PathBuf>,
-}
-
-pub struct AppState {
-    db: Arc<Db>,
-    live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
-    config: Config,
-}
-
-impl AppState {
-    async fn new(config: Config) -> Result<Arc<Self>> {
-        let db = Db::new(&config.database_url, 5).await?;
-        let live_kit_client = if let Some(((server, key), secret)) = config
-            .live_kit_server
-            .as_ref()
-            .zip(config.live_kit_key.as_ref())
-            .zip(config.live_kit_secret.as_ref())
-        {
-            Some(Arc::new(live_kit_server::api::LiveKitClient::new(
-                server.clone(),
-                key.clone(),
-                secret.clone(),
-            )) as Arc<dyn live_kit_server::api::Client>)
-        } else {
-            None
-        };
-
-        let this = Self {
-            db: Arc::new(db),
-            live_kit_client,
-            config,
-        };
-        Ok(Arc::new(this))
-    }
-}
-
 #[tokio::main]
 async fn main() -> Result<()> {
     if let Err(error) = env::load_dotenv() {
@@ -96,7 +28,9 @@ async fn main() -> Result<()> {
         }
         Some("migrate") => {
             let config = envy::from_env::<MigrateConfig>().expect("error loading config");
-            let db = Db::new(&config.database_url, 5).await?;
+            let mut db_options = db::ConnectOptions::new(config.database_url.clone());
+            db_options.max_connections(5);
+            let db = Database::new(db_options).await?;
 
             let migrations_path = config
                 .migrations_path
@@ -118,18 +52,19 @@ async fn main() -> Result<()> {
             init_tracing(&config);
 
             let state = AppState::new(config).await?;
+            state.db.clear_stale_data().await?;
+
             let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port))
                 .expect("failed to bind TCP listener");
 
-            let rpc_server = rpc::Server::new(state.clone());
+            let rpc_server = collab::rpc::Server::new(state.clone());
 
-            let app = api::routes(rpc_server.clone(), state.clone())
-                .merge(rpc::routes(rpc_server.clone()))
+            let app = collab::api::routes(rpc_server.clone(), state.clone())
+                .merge(collab::rpc::routes(rpc_server.clone()))
                 .merge(Router::new().route("/", get(handle_root)));
 
             axum::Server::from_tcp(listener)?
                 .serve(app.into_make_service_with_connect_info::<SocketAddr>())
-                .with_graceful_shutdown(graceful_shutdown(rpc_server, state))
                 .await?;
         }
         _ => {
@@ -174,52 +109,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
 
     None
 }
-
-async fn graceful_shutdown(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) {
-    let ctrl_c = async {
-        signal::ctrl_c()
-            .await
-            .expect("failed to install Ctrl+C handler");
-    };
-
-    #[cfg(unix)]
-    let terminate = async {
-        signal::unix::signal(signal::unix::SignalKind::terminate())
-            .expect("failed to install signal handler")
-            .recv()
-            .await;
-    };
-
-    #[cfg(not(unix))]
-    let terminate = std::future::pending::<()>();
-
-    tokio::select! {
-        _ = ctrl_c => {},
-        _ = terminate => {},
-    }
-
-    if let Some(live_kit) = state.live_kit_client.as_ref() {
-        let deletions = rpc_server
-            .store()
-            .await
-            .rooms()
-            .values()
-            .map(|room| {
-                let name = room.live_kit_room.clone();
-                async {
-                    live_kit.delete_room(name).await.trace_err();
-                }
-            })
-            .collect::<Vec<_>>();
-
-        tracing::info!("deleting all live-kit rooms");
-        if let Err(_) = tokio::time::timeout(
-            Duration::from_secs(10),
-            futures::future::join_all(deletions),
-        )
-        .await
-        {
-            tracing::error!("timed out waiting for live-kit room deletion");
-        }
-    }
-}

crates/collab/src/rpc.rs 🔗

@@ -1,8 +1,9 @@
-mod store;
+mod connection_pool;
 
 use crate::{
     auth,
-    db::{self, ProjectId, User, UserId},
+    db::{self, Database, ProjectId, RoomId, User, UserId},
+    executor::Executor,
     AppState, Result,
 };
 use anyhow::anyhow;
@@ -23,6 +24,7 @@ use axum::{
     Extension, Router, TypedHeader,
 };
 use collections::{HashMap, HashSet};
+pub use connection_pool::ConnectionPool;
 use futures::{
     channel::oneshot,
     future::{self, BoxFuture},
@@ -38,8 +40,10 @@ use rpc::{
 use serde::{Serialize, Serializer};
 use std::{
     any::TypeId,
+    fmt,
     future::Future,
     marker::PhantomData,
+    mem,
     net::SocketAddr,
     ops::{Deref, DerefMut},
     rc::Rc,
@@ -49,14 +53,12 @@ use std::{
     },
     time::Duration,
 };
-pub use store::{Store, Worktree};
-use tokio::{
-    sync::{Mutex, MutexGuard},
-    time::Sleep,
-};
+use tokio::sync::{watch, Mutex, MutexGuard};
 use tower::ServiceBuilder;
 use tracing::{info_span, instrument, Instrument};
 
+pub const RECONNECT_TIMEOUT: Duration = rpc::RECEIVE_TIMEOUT;
+
 lazy_static! {
     static ref METRIC_CONNECTIONS: IntGauge =
         register_int_gauge!("connections", "number of connections").unwrap();
@@ -68,10 +70,10 @@ lazy_static! {
 }
 
 type MessageHandler =
-    Box<dyn Send + Sync + Fn(Arc<Server>, Box<dyn AnyTypedEnvelope>) -> BoxFuture<'static, ()>>;
+    Box<dyn Send + Sync + Fn(Box<dyn AnyTypedEnvelope>, Session) -> BoxFuture<'static, ()>>;
 
 struct Response<R> {
-    server: Arc<Server>,
+    peer: Arc<Peer>,
     receipt: Receipt<R>,
     responded: Arc<AtomicBool>,
 }
@@ -79,29 +81,73 @@ struct Response<R> {
 impl<R: RequestMessage> Response<R> {
     fn send(self, payload: R::Response) -> Result<()> {
         self.responded.store(true, SeqCst);
-        self.server.peer.respond(self.receipt, payload)?;
+        self.peer.respond(self.receipt, payload)?;
         Ok(())
     }
 }
 
-pub struct Server {
+#[derive(Clone)]
+struct Session {
+    user_id: UserId,
+    connection_id: ConnectionId,
+    db: Arc<Mutex<DbHandle>>,
     peer: Arc<Peer>,
-    pub(crate) store: Mutex<Store>,
-    app_state: Arc<AppState>,
-    handlers: HashMap<TypeId, MessageHandler>,
+    connection_pool: Arc<Mutex<ConnectionPool>>,
+    live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
+}
+
+impl Session {
+    async fn db(&self) -> MutexGuard<DbHandle> {
+        #[cfg(test)]
+        tokio::task::yield_now().await;
+        let guard = self.db.lock().await;
+        #[cfg(test)]
+        tokio::task::yield_now().await;
+        guard
+    }
+
+    async fn connection_pool(&self) -> ConnectionPoolGuard<'_> {
+        #[cfg(test)]
+        tokio::task::yield_now().await;
+        let guard = self.connection_pool.lock().await;
+        #[cfg(test)]
+        tokio::task::yield_now().await;
+        ConnectionPoolGuard {
+            guard,
+            _not_send: PhantomData,
+        }
+    }
+}
+
+impl fmt::Debug for Session {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Session")
+            .field("user_id", &self.user_id)
+            .field("connection_id", &self.connection_id)
+            .finish()
+    }
 }
 
-pub trait Executor: Send + Clone {
-    type Sleep: Send + Future;
-    fn spawn_detached<F: 'static + Send + Future<Output = ()>>(&self, future: F);
-    fn sleep(&self, duration: Duration) -> Self::Sleep;
+struct DbHandle(Arc<Database>);
+
+impl Deref for DbHandle {
+    type Target = Database;
+
+    fn deref(&self) -> &Self::Target {
+        self.0.as_ref()
+    }
 }
 
-#[derive(Clone)]
-pub struct RealExecutor;
+pub struct Server {
+    peer: Arc<Peer>,
+    pub(crate) connection_pool: Arc<Mutex<ConnectionPool>>,
+    app_state: Arc<AppState>,
+    handlers: HashMap<TypeId, MessageHandler>,
+    teardown: watch::Sender<()>,
+}
 
-pub(crate) struct StoreGuard<'a> {
-    guard: MutexGuard<'a, Store>,
+pub(crate) struct ConnectionPoolGuard<'a> {
+    guard: MutexGuard<'a, ConnectionPool>,
     _not_send: PhantomData<Rc<()>>,
 }
 
@@ -109,7 +155,7 @@ pub(crate) struct StoreGuard<'a> {
 pub struct ServerSnapshot<'a> {
     peer: &'a Peer,
     #[serde(serialize_with = "serialize_deref")]
-    store: StoreGuard<'a>,
+    connection_pool: ConnectionPoolGuard<'a>,
 }
 
 pub fn serialize_deref<S, T, U>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
@@ -126,81 +172,84 @@ impl Server {
         let mut server = Self {
             peer: Peer::new(),
             app_state,
-            store: Default::default(),
+            connection_pool: Default::default(),
             handlers: Default::default(),
+            teardown: watch::channel(()).0,
         };
 
         server
-            .add_request_handler(Server::ping)
-            .add_request_handler(Server::create_room)
-            .add_request_handler(Server::join_room)
-            .add_message_handler(Server::leave_room)
-            .add_request_handler(Server::call)
-            .add_request_handler(Server::cancel_call)
-            .add_message_handler(Server::decline_call)
-            .add_request_handler(Server::update_participant_location)
-            .add_request_handler(Server::share_project)
-            .add_message_handler(Server::unshare_project)
-            .add_request_handler(Server::join_project)
-            .add_message_handler(Server::leave_project)
-            .add_message_handler(Server::update_project)
-            .add_request_handler(Server::update_worktree)
-            .add_message_handler(Server::start_language_server)
-            .add_message_handler(Server::update_language_server)
-            .add_message_handler(Server::update_diagnostic_summary)
-            .add_request_handler(Server::forward_project_request::<proto::GetHover>)
-            .add_request_handler(Server::forward_project_request::<proto::GetDefinition>)
-            .add_request_handler(Server::forward_project_request::<proto::GetTypeDefinition>)
-            .add_request_handler(Server::forward_project_request::<proto::GetReferences>)
-            .add_request_handler(Server::forward_project_request::<proto::SearchProject>)
-            .add_request_handler(Server::forward_project_request::<proto::GetDocumentHighlights>)
-            .add_request_handler(Server::forward_project_request::<proto::GetProjectSymbols>)
-            .add_request_handler(Server::forward_project_request::<proto::OpenBufferForSymbol>)
-            .add_request_handler(Server::forward_project_request::<proto::OpenBufferById>)
-            .add_request_handler(Server::forward_project_request::<proto::OpenBufferByPath>)
-            .add_request_handler(Server::forward_project_request::<proto::GetCompletions>)
-            .add_request_handler(
-                Server::forward_project_request::<proto::ApplyCompletionAdditionalEdits>,
-            )
-            .add_request_handler(Server::forward_project_request::<proto::GetCodeActions>)
-            .add_request_handler(Server::forward_project_request::<proto::ApplyCodeAction>)
-            .add_request_handler(Server::forward_project_request::<proto::PrepareRename>)
-            .add_request_handler(Server::forward_project_request::<proto::PerformRename>)
-            .add_request_handler(Server::forward_project_request::<proto::ReloadBuffers>)
-            .add_request_handler(Server::forward_project_request::<proto::FormatBuffers>)
-            .add_request_handler(Server::forward_project_request::<proto::CreateProjectEntry>)
-            .add_request_handler(Server::forward_project_request::<proto::RenameProjectEntry>)
-            .add_request_handler(Server::forward_project_request::<proto::CopyProjectEntry>)
-            .add_request_handler(Server::forward_project_request::<proto::DeleteProjectEntry>)
-            .add_message_handler(Server::create_buffer_for_peer)
-            .add_request_handler(Server::update_buffer)
-            .add_message_handler(Server::update_buffer_file)
-            .add_message_handler(Server::buffer_reloaded)
-            .add_message_handler(Server::buffer_saved)
-            .add_request_handler(Server::save_buffer)
-            .add_request_handler(Server::get_users)
-            .add_request_handler(Server::fuzzy_search_users)
-            .add_request_handler(Server::request_contact)
-            .add_request_handler(Server::remove_contact)
-            .add_request_handler(Server::respond_to_contact_request)
-            .add_request_handler(Server::follow)
-            .add_message_handler(Server::unfollow)
-            .add_message_handler(Server::update_followers)
-            .add_message_handler(Server::update_diff_base)
-            .add_request_handler(Server::get_private_user_info);
+            .add_request_handler(ping)
+            .add_request_handler(create_room)
+            .add_request_handler(join_room)
+            .add_message_handler(leave_room)
+            .add_request_handler(call)
+            .add_request_handler(cancel_call)
+            .add_message_handler(decline_call)
+            .add_request_handler(update_participant_location)
+            .add_request_handler(share_project)
+            .add_message_handler(unshare_project)
+            .add_request_handler(join_project)
+            .add_message_handler(leave_project)
+            .add_request_handler(update_project)
+            .add_request_handler(update_worktree)
+            .add_message_handler(start_language_server)
+            .add_message_handler(update_language_server)
+            .add_message_handler(update_diagnostic_summary)
+            .add_request_handler(forward_project_request::<proto::GetHover>)
+            .add_request_handler(forward_project_request::<proto::GetDefinition>)
+            .add_request_handler(forward_project_request::<proto::GetTypeDefinition>)
+            .add_request_handler(forward_project_request::<proto::GetReferences>)
+            .add_request_handler(forward_project_request::<proto::SearchProject>)
+            .add_request_handler(forward_project_request::<proto::GetDocumentHighlights>)
+            .add_request_handler(forward_project_request::<proto::GetProjectSymbols>)
+            .add_request_handler(forward_project_request::<proto::OpenBufferForSymbol>)
+            .add_request_handler(forward_project_request::<proto::OpenBufferById>)
+            .add_request_handler(forward_project_request::<proto::OpenBufferByPath>)
+            .add_request_handler(forward_project_request::<proto::GetCompletions>)
+            .add_request_handler(forward_project_request::<proto::ApplyCompletionAdditionalEdits>)
+            .add_request_handler(forward_project_request::<proto::GetCodeActions>)
+            .add_request_handler(forward_project_request::<proto::ApplyCodeAction>)
+            .add_request_handler(forward_project_request::<proto::PrepareRename>)
+            .add_request_handler(forward_project_request::<proto::PerformRename>)
+            .add_request_handler(forward_project_request::<proto::ReloadBuffers>)
+            .add_request_handler(forward_project_request::<proto::FormatBuffers>)
+            .add_request_handler(forward_project_request::<proto::CreateProjectEntry>)
+            .add_request_handler(forward_project_request::<proto::RenameProjectEntry>)
+            .add_request_handler(forward_project_request::<proto::CopyProjectEntry>)
+            .add_request_handler(forward_project_request::<proto::DeleteProjectEntry>)
+            .add_message_handler(create_buffer_for_peer)
+            .add_request_handler(update_buffer)
+            .add_message_handler(update_buffer_file)
+            .add_message_handler(buffer_reloaded)
+            .add_message_handler(buffer_saved)
+            .add_request_handler(save_buffer)
+            .add_request_handler(get_users)
+            .add_request_handler(fuzzy_search_users)
+            .add_request_handler(request_contact)
+            .add_request_handler(remove_contact)
+            .add_request_handler(respond_to_contact_request)
+            .add_request_handler(follow)
+            .add_message_handler(unfollow)
+            .add_message_handler(update_followers)
+            .add_message_handler(update_diff_base)
+            .add_request_handler(get_private_user_info);
 
         Arc::new(server)
     }
 
-    fn add_message_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
+    pub fn teardown(&self) {
+        let _ = self.teardown.send(());
+    }
+
+    fn add_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
     where
-        F: 'static + Send + Sync + Fn(Arc<Self>, TypedEnvelope<M>) -> Fut,
+        F: 'static + Send + Sync + Fn(TypedEnvelope<M>, Session) -> Fut,
         Fut: 'static + Send + Future<Output = Result<()>>,
         M: EnvelopedMessage,
     {
         let prev_handler = self.handlers.insert(
             TypeId::of::<M>(),
-            Box::new(move |server, envelope| {
+            Box::new(move |envelope, session| {
                 let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
                 let span = info_span!(
                     "handle message",
@@ -212,7 +261,7 @@ impl Server {
                         "message received"
                     );
                 });
-                let future = (handler)(server, *envelope);
+                let future = (handler)(*envelope, session);
                 async move {
                     if let Err(error) = future.await {
                         tracing::error!(%error, "error handling message");
@@ -228,26 +277,35 @@ impl Server {
         self
     }
 
-    /// Handle a request while holding a lock to the store. This is useful when we're registering
-    /// a connection but we want to respond on the connection before anybody else can send on it.
+    fn add_message_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
+    where
+        F: 'static + Send + Sync + Fn(M, Session) -> Fut,
+        Fut: 'static + Send + Future<Output = Result<()>>,
+        M: EnvelopedMessage,
+    {
+        self.add_handler(move |envelope, session| handler(envelope.payload, session));
+        self
+    }
+
     fn add_request_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
     where
-        F: 'static + Send + Sync + Fn(Arc<Self>, TypedEnvelope<M>, Response<M>) -> Fut,
+        F: 'static + Send + Sync + Fn(M, Response<M>, Session) -> Fut,
         Fut: Send + Future<Output = Result<()>>,
         M: RequestMessage,
     {
         let handler = Arc::new(handler);
-        self.add_message_handler(move |server, envelope| {
+        self.add_handler(move |envelope, session| {
             let receipt = envelope.receipt();
             let handler = handler.clone();
             async move {
+                let peer = session.peer.clone();
                 let responded = Arc::new(AtomicBool::default());
                 let response = Response {
-                    server: server.clone(),
+                    peer: peer.clone(),
                     responded: responded.clone(),
-                    receipt: envelope.receipt(),
+                    receipt,
                 };
-                match (handler)(server.clone(), envelope, response).await {
+                match (handler)(envelope.payload, response, session).await {
                     Ok(()) => {
                         if responded.load(std::sync::atomic::Ordering::SeqCst) {
                             Ok(())
@@ -256,7 +314,7 @@ impl Server {
                         }
                     }
                     Err(error) => {
-                        server.peer.respond_with_error(
+                        peer.respond_with_error(
                             receipt,
                             proto::Error {
                                 message: error.to_string(),
@@ -269,29 +327,25 @@ impl Server {
         })
     }
 
-    pub fn handle_connection<E: Executor>(
+    pub fn handle_connection(
         self: &Arc<Self>,
         connection: Connection,
         address: String,
         user: User,
         mut send_connection_id: Option<oneshot::Sender<ConnectionId>>,
-        executor: E,
+        executor: Executor,
     ) -> impl Future<Output = Result<()>> {
-        let mut this = self.clone();
+        let this = self.clone();
         let user_id = user.id;
         let login = user.github_login;
         let span = info_span!("handle connection", %user_id, %login, %address);
+        let teardown = self.teardown.subscribe();
         async move {
             let (connection_id, handle_io, mut incoming_rx) = this
                 .peer
                 .add_connection(connection, {
                     let executor = executor.clone();
-                    move |duration| {
-                        let timer = executor.sleep(duration);
-                        async move {
-                            timer.await;
-                        }
-                    }
+                    move |duration| executor.sleep(duration)
                 });
 
             tracing::info!(%user_id, %login, %connection_id, %address, "connection opened");
@@ -313,22 +367,31 @@ impl Server {
             ).await?;
 
             {
-                let mut store = this.store().await;
-                let incoming_call = store.add_connection(connection_id, user_id, user.admin);
-                if let Some(incoming_call) = incoming_call {
-                    this.peer.send(connection_id, incoming_call)?;
-                }
-
-                this.peer.send(connection_id, store.build_initial_contacts_update(contacts))?;
+                let mut pool = this.connection_pool.lock().await;
+                pool.add_connection(connection_id, user_id, user.admin);
+                this.peer.send(connection_id, build_initial_contacts_update(contacts, &pool))?;
 
                 if let Some((code, count)) = invite_code {
                     this.peer.send(connection_id, proto::UpdateInviteInfo {
                         url: format!("{}{}", this.app_state.config.invite_link_prefix, code),
-                        count,
+                        count: count as u32,
                     })?;
                 }
             }
-            this.update_user_contacts(user_id).await?;
+
+            if let Some(incoming_call) = this.app_state.db.incoming_call_for_user(user_id).await? {
+                this.peer.send(connection_id, incoming_call)?;
+            }
+
+            let session = Session {
+                user_id,
+                connection_id,
+                db: Arc::new(Mutex::new(DbHandle(this.app_state.db.clone()))),
+                peer: this.peer.clone(),
+                connection_pool: this.connection_pool.clone(),
+                live_kit_client: this.app_state.live_kit_client.clone()
+            };
+            update_user_contacts(user_id, &session).await?;
 
             let handle_io = handle_io.fuse();
             futures::pin_mut!(handle_io);
@@ -360,7 +423,7 @@ impl Server {
                             let span_enter = span.enter();
                             if let Some(handler) = this.handlers.get(&message.payload_type_id()) {
                                 let is_background = message.is_background();
-                                let handle_message = (handler)(this.clone(), message);
+                                let handle_message = (handler)(message, session.clone());
                                 drop(span_enter);
 
                                 let handle_message = handle_message.instrument(span);
@@ -382,7 +445,7 @@ impl Server {
 
             drop(foreground_message_handlers);
             tracing::info!(%user_id, %login, %connection_id, %address, "signing out");
-            if let Err(error) = this.sign_out(connection_id).await {
+            if let Err(error) = sign_out(session, teardown, executor).await {
                 tracing::error!(%user_id, %login, %connection_id, %address, ?error, "error signing out");
             }
 
@@ -390,78 +453,6 @@ impl Server {
         }.instrument(span)
     }
 
-    #[instrument(skip(self), err)]
-    async fn sign_out(self: &mut Arc<Self>, connection_id: ConnectionId) -> Result<()> {
-        self.peer.disconnect(connection_id);
-
-        let mut projects_to_unshare = Vec::new();
-        let mut contacts_to_update = HashSet::default();
-        let mut room_left = None;
-        {
-            let mut store = self.store().await;
-
-            #[cfg(test)]
-            let removed_connection = store.remove_connection(connection_id).unwrap();
-            #[cfg(not(test))]
-            let removed_connection = store.remove_connection(connection_id)?;
-
-            for project in removed_connection.hosted_projects {
-                projects_to_unshare.push(project.id);
-                broadcast(connection_id, project.guests.keys().copied(), |conn_id| {
-                    self.peer.send(
-                        conn_id,
-                        proto::UnshareProject {
-                            project_id: project.id.to_proto(),
-                        },
-                    )
-                });
-            }
-
-            for project in removed_connection.guest_projects {
-                broadcast(connection_id, project.connection_ids, |conn_id| {
-                    self.peer.send(
-                        conn_id,
-                        proto::RemoveProjectCollaborator {
-                            project_id: project.id.to_proto(),
-                            peer_id: connection_id.0,
-                        },
-                    )
-                });
-            }
-
-            if let Some(room) = removed_connection.room {
-                self.room_updated(&room);
-                room_left = Some(self.room_left(&room, connection_id));
-            }
-
-            contacts_to_update.insert(removed_connection.user_id);
-            for connection_id in removed_connection.canceled_call_connection_ids {
-                self.peer
-                    .send(connection_id, proto::CallCanceled {})
-                    .trace_err();
-                contacts_to_update.extend(store.user_id_for_connection(connection_id).ok());
-            }
-        };
-
-        if let Some(room_left) = room_left {
-            room_left.await.trace_err();
-        }
-
-        for user_id in contacts_to_update {
-            self.update_user_contacts(user_id).await.trace_err();
-        }
-
-        for project_id in projects_to_unshare {
-            self.app_state
-                .db
-                .unregister_project(project_id)
-                .await
-                .trace_err();
-        }
-
-        Ok(())
-    }
-
     pub async fn invite_code_redeemed(
         self: &Arc<Self>,
         inviter_id: UserId,
@@ -469,9 +460,9 @@ impl Server {
     ) -> Result<()> {
         if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? {
             if let Some(code) = &user.invite_code {
-                let store = self.store().await;
-                let invitee_contact = store.contact_for_user(invitee_id, true);
-                for connection_id in store.connection_ids_for_user(inviter_id) {
+                let pool = self.connection_pool.lock().await;
+                let invitee_contact = contact_for_user(invitee_id, true, false, &pool);
+                for connection_id in pool.user_connection_ids(inviter_id) {
                     self.peer.send(
                         connection_id,
                         proto::UpdateContacts {
@@ -495,8 +486,8 @@ impl Server {
     pub async fn invite_count_updated(self: &Arc<Self>, user_id: UserId) -> Result<()> {
         if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? {
             if let Some(invite_code) = &user.invite_code {
-                let store = self.store().await;
-                for connection_id in store.connection_ids_for_user(user_id) {
+                let pool = self.connection_pool.lock().await;
+                for connection_id in pool.user_connection_ids(user_id) {
                     self.peer.send(
                         connection_id,
                         proto::UpdateInviteInfo {
@@ -513,1263 +504,1214 @@ impl Server {
         Ok(())
     }
 
-    async fn ping(
-        self: Arc<Server>,
-        _: TypedEnvelope<proto::Ping>,
-        response: Response<proto::Ping>,
-    ) -> Result<()> {
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
-
-    async fn create_room(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::CreateRoom>,
-        response: Response<proto::CreateRoom>,
-    ) -> Result<()> {
-        let user_id;
-        let room;
-        {
-            let mut store = self.store().await;
-            user_id = store.user_id_for_connection(request.sender_id)?;
-            room = store.create_room(request.sender_id)?.clone();
+    pub async fn snapshot<'a>(self: &'a Arc<Self>) -> ServerSnapshot<'a> {
+        ServerSnapshot {
+            connection_pool: ConnectionPoolGuard {
+                guard: self.connection_pool.lock().await,
+                _not_send: PhantomData,
+            },
+            peer: &self.peer,
         }
+    }
+}
 
-        let live_kit_connection_info =
-            if let Some(live_kit) = self.app_state.live_kit_client.as_ref() {
-                if let Some(_) = live_kit
-                    .create_room(room.live_kit_room.clone())
-                    .await
-                    .trace_err()
-                {
-                    if let Some(token) = live_kit
-                        .room_token(&room.live_kit_room, &request.sender_id.to_string())
-                        .trace_err()
-                    {
-                        Some(proto::LiveKitConnectionInfo {
-                            server_url: live_kit.url().into(),
-                            token,
-                        })
-                    } else {
-                        None
-                    }
-                } else {
-                    None
-                }
-            } else {
-                None
-            };
+impl<'a> Deref for ConnectionPoolGuard<'a> {
+    type Target = ConnectionPool;
 
-        response.send(proto::CreateRoomResponse {
-            room: Some(room),
-            live_kit_connection_info,
-        })?;
-        self.update_user_contacts(user_id).await?;
-        Ok(())
+    fn deref(&self) -> &Self::Target {
+        &*self.guard
     }
+}
 
-    async fn join_room(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::JoinRoom>,
-        response: Response<proto::JoinRoom>,
-    ) -> Result<()> {
-        let user_id;
-        {
-            let mut store = self.store().await;
-            user_id = store.user_id_for_connection(request.sender_id)?;
-            let (room, recipient_connection_ids) =
-                store.join_room(request.payload.id, request.sender_id)?;
-            for recipient_id in recipient_connection_ids {
-                self.peer
-                    .send(recipient_id, proto::CallCanceled {})
-                    .trace_err();
-            }
+impl<'a> DerefMut for ConnectionPoolGuard<'a> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut *self.guard
+    }
+}
 
-            let live_kit_connection_info =
-                if let Some(live_kit) = self.app_state.live_kit_client.as_ref() {
-                    if let Some(token) = live_kit
-                        .room_token(&room.live_kit_room, &request.sender_id.to_string())
-                        .trace_err()
-                    {
-                        Some(proto::LiveKitConnectionInfo {
-                            server_url: live_kit.url().into(),
-                            token,
-                        })
-                    } else {
-                        None
-                    }
-                } else {
-                    None
-                };
+impl<'a> Drop for ConnectionPoolGuard<'a> {
+    fn drop(&mut self) {
+        #[cfg(test)]
+        self.check_invariants();
+    }
+}
 
-            response.send(proto::JoinRoomResponse {
-                room: Some(room.clone()),
-                live_kit_connection_info,
-            })?;
-            self.room_updated(room);
+fn broadcast<F>(
+    sender_id: ConnectionId,
+    receiver_ids: impl IntoIterator<Item = ConnectionId>,
+    mut f: F,
+) where
+    F: FnMut(ConnectionId) -> anyhow::Result<()>,
+{
+    for receiver_id in receiver_ids {
+        if receiver_id != sender_id {
+            f(receiver_id).trace_err();
         }
-        self.update_user_contacts(user_id).await?;
-        Ok(())
     }
+}
 
-    async fn leave_room(self: Arc<Server>, message: TypedEnvelope<proto::LeaveRoom>) -> Result<()> {
-        let mut contacts_to_update = HashSet::default();
-        let room_left;
-        {
-            let mut store = self.store().await;
-            let user_id = store.user_id_for_connection(message.sender_id)?;
-            let left_room = store.leave_room(message.payload.id, message.sender_id)?;
-            contacts_to_update.insert(user_id);
+lazy_static! {
+    static ref ZED_PROTOCOL_VERSION: HeaderName = HeaderName::from_static("x-zed-protocol-version");
+}
 
-            for project in left_room.unshared_projects {
-                for connection_id in project.connection_ids() {
-                    self.peer.send(
-                        connection_id,
-                        proto::UnshareProject {
-                            project_id: project.id.to_proto(),
-                        },
-                    )?;
-                }
-            }
+pub struct ProtocolVersion(u32);
 
-            for project in left_room.left_projects {
-                if project.remove_collaborator {
-                    for connection_id in project.connection_ids {
-                        self.peer.send(
-                            connection_id,
-                            proto::RemoveProjectCollaborator {
-                                project_id: project.id.to_proto(),
-                                peer_id: message.sender_id.0,
-                            },
-                        )?;
-                    }
+impl Header for ProtocolVersion {
+    fn name() -> &'static HeaderName {
+        &ZED_PROTOCOL_VERSION
+    }
 
-                    self.peer.send(
-                        message.sender_id,
-                        proto::UnshareProject {
-                            project_id: project.id.to_proto(),
-                        },
-                    )?;
-                }
-            }
+    fn decode<'i, I>(values: &mut I) -> Result<Self, axum::headers::Error>
+    where
+        Self: Sized,
+        I: Iterator<Item = &'i axum::http::HeaderValue>,
+    {
+        let version = values
+            .next()
+            .ok_or_else(axum::headers::Error::invalid)?
+            .to_str()
+            .map_err(|_| axum::headers::Error::invalid())?
+            .parse()
+            .map_err(|_| axum::headers::Error::invalid())?;
+        Ok(Self(version))
+    }
 
-            self.room_updated(&left_room.room);
-            room_left = self.room_left(&left_room.room, message.sender_id);
+    fn encode<E: Extend<axum::http::HeaderValue>>(&self, values: &mut E) {
+        values.extend([self.0.to_string().parse().unwrap()]);
+    }
+}
 
-            for connection_id in left_room.canceled_call_connection_ids {
-                self.peer
-                    .send(connection_id, proto::CallCanceled {})
-                    .trace_err();
-                contacts_to_update.extend(store.user_id_for_connection(connection_id).ok());
-            }
-        }
+pub fn routes(server: Arc<Server>) -> Router<Body> {
+    Router::new()
+        .route("/rpc", get(handle_websocket_request))
+        .layer(
+            ServiceBuilder::new()
+                .layer(Extension(server.app_state.clone()))
+                .layer(middleware::from_fn(auth::validate_header)),
+        )
+        .route("/metrics", get(handle_metrics))
+        .layer(Extension(server))
+}
 
-        room_left.await.trace_err();
-        for user_id in contacts_to_update {
-            self.update_user_contacts(user_id).await?;
+pub async fn handle_websocket_request(
+    TypedHeader(ProtocolVersion(protocol_version)): TypedHeader<ProtocolVersion>,
+    ConnectInfo(socket_address): ConnectInfo<SocketAddr>,
+    Extension(server): Extension<Arc<Server>>,
+    Extension(user): Extension<User>,
+    ws: WebSocketUpgrade,
+) -> axum::response::Response {
+    if protocol_version != rpc::PROTOCOL_VERSION {
+        return (
+            StatusCode::UPGRADE_REQUIRED,
+            "client must be upgraded".to_string(),
+        )
+            .into_response();
+    }
+    let socket_address = socket_address.to_string();
+    ws.on_upgrade(move |socket| {
+        use util::ResultExt;
+        let socket = socket
+            .map_ok(to_tungstenite_message)
+            .err_into()
+            .with(|message| async move { Ok(to_axum_message(message)) });
+        let connection = Connection::new(Box::pin(socket));
+        async move {
+            server
+                .handle_connection(connection, socket_address, user, None, Executor::Production)
+                .await
+                .log_err();
         }
+    })
+}
 
-        Ok(())
-    }
+pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> Result<String> {
+    let connections = server
+        .connection_pool
+        .lock()
+        .await
+        .connections()
+        .filter(|connection| !connection.admin)
+        .count();
 
-    async fn call(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::Call>,
-        response: Response<proto::Call>,
-    ) -> Result<()> {
-        let caller_user_id = self
-            .store()
-            .await
-            .user_id_for_connection(request.sender_id)?;
-        let recipient_user_id = UserId::from_proto(request.payload.recipient_user_id);
-        let initial_project_id = request
-            .payload
-            .initial_project_id
-            .map(ProjectId::from_proto);
-        if !self
-            .app_state
-            .db
-            .has_contact(caller_user_id, recipient_user_id)
-            .await?
-        {
-            return Err(anyhow!("cannot call a user who isn't a contact"))?;
-        }
+    METRIC_CONNECTIONS.set(connections as _);
 
-        let room_id = request.payload.room_id;
-        let mut calls = {
-            let mut store = self.store().await;
-            let (room, recipient_connection_ids, incoming_call) = store.call(
-                room_id,
-                recipient_user_id,
-                initial_project_id,
-                request.sender_id,
-            )?;
-            self.room_updated(room);
-            recipient_connection_ids
-                .into_iter()
-                .map(|recipient_connection_id| {
-                    self.peer
-                        .request(recipient_connection_id, incoming_call.clone())
-                })
-                .collect::<FuturesUnordered<_>>()
-        };
-        self.update_user_contacts(recipient_user_id).await?;
+    let shared_projects = server.app_state.db.project_count_excluding_admins().await?;
+    METRIC_SHARED_PROJECTS.set(shared_projects as _);
 
-        while let Some(call_response) = calls.next().await {
-            match call_response.as_ref() {
-                Ok(_) => {
-                    response.send(proto::Ack {})?;
-                    return Ok(());
-                }
-                Err(_) => {
-                    call_response.trace_err();
-                }
-            }
-        }
+    let encoder = prometheus::TextEncoder::new();
+    let metric_families = prometheus::gather();
+    let encoded_metrics = encoder
+        .encode_to_string(&metric_families)
+        .map_err(|err| anyhow!("{}", err))?;
+    Ok(encoded_metrics)
+}
 
-        {
-            let mut store = self.store().await;
-            let room = store.call_failed(room_id, recipient_user_id)?;
-            self.room_updated(&room);
+#[instrument(err, skip(executor))]
+async fn sign_out(
+    session: Session,
+    mut teardown: watch::Receiver<()>,
+    executor: Executor,
+) -> Result<()> {
+    session.peer.disconnect(session.connection_id);
+    session
+        .connection_pool()
+        .await
+        .remove_connection(session.connection_id)?;
+
+    if let Some(mut left_projects) = session
+        .db()
+        .await
+        .connection_lost(session.connection_id)
+        .await
+        .trace_err()
+    {
+        for left_project in mem::take(&mut *left_projects) {
+            project_left(&left_project, &session);
         }
-        self.update_user_contacts(recipient_user_id).await?;
-
-        Err(anyhow!("failed to ring call recipient"))?
     }
 
-    async fn cancel_call(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::CancelCall>,
-        response: Response<proto::CancelCall>,
-    ) -> Result<()> {
-        let recipient_user_id = UserId::from_proto(request.payload.recipient_user_id);
-        {
-            let mut store = self.store().await;
-            let (room, recipient_connection_ids) = store.cancel_call(
-                request.payload.room_id,
-                recipient_user_id,
-                request.sender_id,
-            )?;
-            for recipient_id in recipient_connection_ids {
-                self.peer
-                    .send(recipient_id, proto::CallCanceled {})
-                    .trace_err();
+    futures::select_biased! {
+        _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
+            leave_room_for_session(&session).await.trace_err();
+
+            if !session
+                .connection_pool()
+                .await
+                .is_user_online(session.user_id)
+            {
+                let db = session.db().await;
+                if let Some(room) = db.decline_call(None, session.user_id).await.trace_err() {
+                    room_updated(&room, &session);
+                }
             }
-            self.room_updated(room);
-            response.send(proto::Ack {})?;
+            update_user_contacts(session.user_id, &session).await?;
         }
-        self.update_user_contacts(recipient_user_id).await?;
-        Ok(())
+        _ = teardown.changed().fuse() => {}
     }
 
-    async fn decline_call(
-        self: Arc<Server>,
-        message: TypedEnvelope<proto::DeclineCall>,
-    ) -> Result<()> {
-        let recipient_user_id;
-        {
-            let mut store = self.store().await;
-            recipient_user_id = store.user_id_for_connection(message.sender_id)?;
-            let (room, recipient_connection_ids) =
-                store.decline_call(message.payload.room_id, message.sender_id)?;
-            for recipient_id in recipient_connection_ids {
-                self.peer
-                    .send(recipient_id, proto::CallCanceled {})
-                    .trace_err();
-            }
-            self.room_updated(room);
-        }
-        self.update_user_contacts(recipient_user_id).await?;
-        Ok(())
-    }
-
-    async fn update_participant_location(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateParticipantLocation>,
-        response: Response<proto::UpdateParticipantLocation>,
-    ) -> Result<()> {
-        let room_id = request.payload.room_id;
-        let location = request
-            .payload
-            .location
-            .ok_or_else(|| anyhow!("invalid location"))?;
-        let mut store = self.store().await;
-        let room = store.update_participant_location(room_id, location, request.sender_id)?;
-        self.room_updated(room);
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
-
-    fn room_updated(&self, room: &proto::Room) {
-        for participant in &room.participants {
-            self.peer
-                .send(
-                    ConnectionId(participant.peer_id),
-                    proto::RoomUpdated {
-                        room: Some(room.clone()),
-                    },
-                )
-                .trace_err();
-        }
-    }
+    Ok(())
+}
 
-    fn room_left(
-        &self,
-        room: &proto::Room,
-        connection_id: ConnectionId,
-    ) -> impl Future<Output = Result<()>> {
-        let client = self.app_state.live_kit_client.clone();
-        let room_name = room.live_kit_room.clone();
-        let participant_count = room.participants.len();
-        async move {
-            if let Some(client) = client {
-                client
-                    .remove_participant(room_name.clone(), connection_id.to_string())
-                    .await?;
+async fn ping(_: proto::Ping, response: Response<proto::Ping>, _session: Session) -> Result<()> {
+    response.send(proto::Ack {})?;
+    Ok(())
+}
 
-                if participant_count == 0 {
-                    client.delete_room(room_name).await?;
-                }
+async fn create_room(
+    _request: proto::CreateRoom,
+    response: Response<proto::CreateRoom>,
+    session: Session,
+) -> Result<()> {
+    let live_kit_room = nanoid::nanoid!(30);
+    let live_kit_connection_info = if let Some(live_kit) = session.live_kit_client.as_ref() {
+        if let Some(_) = live_kit
+            .create_room(live_kit_room.clone())
+            .await
+            .trace_err()
+        {
+            if let Some(token) = live_kit
+                .room_token(&live_kit_room, &session.connection_id.to_string())
+                .trace_err()
+            {
+                Some(proto::LiveKitConnectionInfo {
+                    server_url: live_kit.url().into(),
+                    token,
+                })
+            } else {
+                None
             }
-
-            Ok(())
+        } else {
+            None
         }
-    }
+    } else {
+        None
+    };
 
-    async fn share_project(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::ShareProject>,
-        response: Response<proto::ShareProject>,
-    ) -> Result<()> {
-        let user_id = self
-            .store()
+    {
+        let room = session
+            .db()
             .await
-            .user_id_for_connection(request.sender_id)?;
-        let project_id = self.app_state.db.register_project(user_id).await?;
-        let mut store = self.store().await;
-        let room = store.share_project(
-            request.payload.room_id,
-            project_id,
-            request.payload.worktrees,
-            request.sender_id,
-        )?;
-        response.send(proto::ShareProjectResponse {
-            project_id: project_id.to_proto(),
-        })?;
-        self.room_updated(room);
+            .create_room(session.user_id, session.connection_id, &live_kit_room)
+            .await?;
 
-        Ok(())
+        response.send(proto::CreateRoomResponse {
+            room: Some(room.clone()),
+            live_kit_connection_info,
+        })?;
     }
 
-    async fn unshare_project(
-        self: Arc<Server>,
-        message: TypedEnvelope<proto::UnshareProject>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(message.payload.project_id);
-        let mut store = self.store().await;
-        let (room, project) = store.unshare_project(project_id, message.sender_id)?;
-        broadcast(
-            message.sender_id,
-            project.guest_connection_ids(),
-            |conn_id| self.peer.send(conn_id, message.payload.clone()),
-        );
-        self.room_updated(room);
-
-        Ok(())
-    }
+    update_user_contacts(session.user_id, &session).await?;
+    Ok(())
+}
 
-    async fn update_user_contacts(self: &Arc<Server>, user_id: UserId) -> Result<()> {
-        let contacts = self.app_state.db.get_contacts(user_id).await?;
-        let store = self.store().await;
-        let updated_contact = store.contact_for_user(user_id, false);
-        for contact in contacts {
-            if let db::Contact::Accepted {
-                user_id: contact_user_id,
-                ..
-            } = contact
-            {
-                for contact_conn_id in store.connection_ids_for_user(contact_user_id) {
-                    self.peer
-                        .send(
-                            contact_conn_id,
-                            proto::UpdateContacts {
-                                contacts: vec![updated_contact.clone()],
-                                remove_contacts: Default::default(),
-                                incoming_requests: Default::default(),
-                                remove_incoming_requests: Default::default(),
-                                outgoing_requests: Default::default(),
-                                remove_outgoing_requests: Default::default(),
-                            },
-                        )
-                        .trace_err();
-                }
-            }
-        }
-        Ok(())
+async fn join_room(
+    request: proto::JoinRoom,
+    response: Response<proto::JoinRoom>,
+    session: Session,
+) -> Result<()> {
+    let room = {
+        let room = session
+            .db()
+            .await
+            .join_room(
+                RoomId::from_proto(request.id),
+                session.user_id,
+                session.connection_id,
+            )
+            .await?;
+        room_updated(&room, &session);
+        room.clone()
+    };
+
+    for connection_id in session
+        .connection_pool()
+        .await
+        .user_connection_ids(session.user_id)
+    {
+        session
+            .peer
+            .send(connection_id, proto::CallCanceled {})
+            .trace_err();
     }
 
-    async fn join_project(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::JoinProject>,
-        response: Response<proto::JoinProject>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-
-        let host_user_id;
-        let guest_user_id;
-        let host_connection_id;
+    let live_kit_connection_info = if let Some(live_kit) = session.live_kit_client.as_ref() {
+        if let Some(token) = live_kit
+            .room_token(&room.live_kit_room, &session.connection_id.to_string())
+            .trace_err()
         {
-            let state = self.store().await;
-            let project = state.project(project_id)?;
-            host_user_id = project.host.user_id;
-            host_connection_id = project.host_connection_id;
-            guest_user_id = state.user_id_for_connection(request.sender_id)?;
-        };
-
-        tracing::info!(%project_id, %host_user_id, %host_connection_id, "join project");
-
-        let mut store = self.store().await;
-        let (project, replica_id) = store.join_project(request.sender_id, project_id)?;
-        let peer_count = project.guests.len();
-        let mut collaborators = Vec::with_capacity(peer_count);
-        collaborators.push(proto::Collaborator {
-            peer_id: project.host_connection_id.0,
-            replica_id: 0,
-            user_id: project.host.user_id.to_proto(),
-        });
-        let worktrees = project
-            .worktrees
-            .iter()
-            .map(|(id, worktree)| proto::WorktreeMetadata {
-                id: *id,
-                root_name: worktree.root_name.clone(),
-                visible: worktree.visible,
-                abs_path: worktree.abs_path.clone(),
+            Some(proto::LiveKitConnectionInfo {
+                server_url: live_kit.url().into(),
+                token,
             })
-            .collect::<Vec<_>>();
-
-        // Add all guests other than the requesting user's own connections as collaborators
-        for (guest_conn_id, guest) in &project.guests {
-            if request.sender_id != *guest_conn_id {
-                collaborators.push(proto::Collaborator {
-                    peer_id: guest_conn_id.0,
-                    replica_id: guest.replica_id as u32,
-                    user_id: guest.user_id.to_proto(),
-                });
-            }
-        }
-
-        for conn_id in project.connection_ids() {
-            if conn_id != request.sender_id {
-                self.peer
-                    .send(
-                        conn_id,
-                        proto::AddProjectCollaborator {
-                            project_id: project_id.to_proto(),
-                            collaborator: Some(proto::Collaborator {
-                                peer_id: request.sender_id.0,
-                                replica_id: replica_id as u32,
-                                user_id: guest_user_id.to_proto(),
-                            }),
-                        },
-                    )
-                    .trace_err();
-            }
+        } else {
+            None
         }
+    } else {
+        None
+    };
 
-        // First, we send the metadata associated with each worktree.
-        response.send(proto::JoinProjectResponse {
-            worktrees: worktrees.clone(),
-            replica_id: replica_id as u32,
-            collaborators: collaborators.clone(),
-            language_servers: project.language_servers.clone(),
-        })?;
-
-        for (worktree_id, worktree) in &project.worktrees {
-            #[cfg(any(test, feature = "test-support"))]
-            const MAX_CHUNK_SIZE: usize = 2;
-            #[cfg(not(any(test, feature = "test-support")))]
-            const MAX_CHUNK_SIZE: usize = 256;
-
-            // Stream this worktree's entries.
-            let message = proto::UpdateWorktree {
-                project_id: project_id.to_proto(),
-                worktree_id: *worktree_id,
-                abs_path: worktree.abs_path.clone(),
-                root_name: worktree.root_name.clone(),
-                updated_entries: worktree.entries.values().cloned().collect(),
-                removed_entries: Default::default(),
-                scan_id: worktree.scan_id,
-                is_last_update: worktree.is_complete,
-            };
-            for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
-                self.peer.send(request.sender_id, update.clone())?;
-            }
+    response.send(proto::JoinRoomResponse {
+        room: Some(room),
+        live_kit_connection_info,
+    })?;
 
-            // Stream this worktree's diagnostics.
-            for summary in worktree.diagnostic_summaries.values() {
-                self.peer.send(
-                    request.sender_id,
-                    proto::UpdateDiagnosticSummary {
-                        project_id: project_id.to_proto(),
-                        worktree_id: *worktree_id,
-                        summary: Some(summary.clone()),
-                    },
-                )?;
-            }
-        }
+    update_user_contacts(session.user_id, &session).await?;
+    Ok(())
+}
 
-        for language_server in &project.language_servers {
-            self.peer.send(
-                request.sender_id,
-                proto::UpdateLanguageServer {
-                    project_id: project_id.to_proto(),
-                    language_server_id: language_server.id,
-                    variant: Some(
-                        proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
-                            proto::LspDiskBasedDiagnosticsUpdated {},
-                        ),
-                    ),
-                },
-            )?;
-        }
+async fn leave_room(_message: proto::LeaveRoom, session: Session) -> Result<()> {
+    leave_room_for_session(&session).await
+}
 
-        Ok(())
+async fn call(
+    request: proto::Call,
+    response: Response<proto::Call>,
+    session: Session,
+) -> Result<()> {
+    let room_id = RoomId::from_proto(request.room_id);
+    let calling_user_id = session.user_id;
+    let calling_connection_id = session.connection_id;
+    let called_user_id = UserId::from_proto(request.called_user_id);
+    let initial_project_id = request.initial_project_id.map(ProjectId::from_proto);
+    if !session
+        .db()
+        .await
+        .has_contact(calling_user_id, called_user_id)
+        .await?
+    {
+        return Err(anyhow!("cannot call a user who isn't a contact"))?;
     }
 
-    async fn leave_project(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::LeaveProject>,
-    ) -> Result<()> {
-        let sender_id = request.sender_id;
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let project;
-        {
-            let mut store = self.store().await;
-            project = store.leave_project(project_id, sender_id)?;
-            tracing::info!(
-                %project_id,
-                host_user_id = %project.host_user_id,
-                host_connection_id = %project.host_connection_id,
-                "leave project"
-            );
-
-            if project.remove_collaborator {
-                broadcast(sender_id, project.connection_ids, |conn_id| {
-                    self.peer.send(
-                        conn_id,
-                        proto::RemoveProjectCollaborator {
-                            project_id: project_id.to_proto(),
-                            peer_id: sender_id.0,
-                        },
-                    )
-                });
+    let incoming_call = {
+        let (room, incoming_call) = &mut *session
+            .db()
+            .await
+            .call(
+                room_id,
+                calling_user_id,
+                calling_connection_id,
+                called_user_id,
+                initial_project_id,
+            )
+            .await?;
+        room_updated(&room, &session);
+        mem::take(incoming_call)
+    };
+    update_user_contacts(called_user_id, &session).await?;
+
+    let mut calls = session
+        .connection_pool()
+        .await
+        .user_connection_ids(called_user_id)
+        .map(|connection_id| session.peer.request(connection_id, incoming_call.clone()))
+        .collect::<FuturesUnordered<_>>();
+
+    while let Some(call_response) = calls.next().await {
+        match call_response.as_ref() {
+            Ok(_) => {
+                response.send(proto::Ack {})?;
+                return Ok(());
+            }
+            Err(_) => {
+                call_response.trace_err();
             }
         }
-
-        Ok(())
-    }
-
-    async fn update_project(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateProject>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        {
-            let mut state = self.store().await;
-            let guest_connection_ids = state
-                .read_project(project_id, request.sender_id)?
-                .guest_connection_ids();
-            let room =
-                state.update_project(project_id, &request.payload.worktrees, request.sender_id)?;
-            broadcast(request.sender_id, guest_connection_ids, |connection_id| {
-                self.peer
-                    .forward_send(request.sender_id, connection_id, request.payload.clone())
-            });
-            self.room_updated(room);
-        };
-
-        Ok(())
     }
 
-    async fn update_worktree(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateWorktree>,
-        response: Response<proto::UpdateWorktree>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let worktree_id = request.payload.worktree_id;
-        let connection_ids = self.store().await.update_worktree(
-            request.sender_id,
-            project_id,
-            worktree_id,
-            &request.payload.root_name,
-            &request.payload.abs_path,
-            &request.payload.removed_entries,
-            &request.payload.updated_entries,
-            request.payload.scan_id,
-            request.payload.is_last_update,
-        )?;
-
-        broadcast(request.sender_id, connection_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        response.send(proto::Ack {})?;
-        Ok(())
+    {
+        let room = session
+            .db()
+            .await
+            .call_failed(room_id, called_user_id)
+            .await?;
+        room_updated(&room, &session);
     }
+    update_user_contacts(called_user_id, &session).await?;
 
-    async fn update_diagnostic_summary(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateDiagnosticSummary>,
-    ) -> Result<()> {
-        let summary = request
-            .payload
-            .summary
-            .clone()
-            .ok_or_else(|| anyhow!("invalid summary"))?;
-        let receiver_ids = self.store().await.update_diagnostic_summary(
-            ProjectId::from_proto(request.payload.project_id),
-            request.payload.worktree_id,
-            request.sender_id,
-            summary,
-        )?;
+    Err(anyhow!("failed to ring user"))?
+}
 
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
+async fn cancel_call(
+    request: proto::CancelCall,
+    response: Response<proto::CancelCall>,
+    session: Session,
+) -> Result<()> {
+    let called_user_id = UserId::from_proto(request.called_user_id);
+    let room_id = RoomId::from_proto(request.room_id);
+    {
+        let room = session
+            .db()
+            .await
+            .cancel_call(Some(room_id), session.connection_id, called_user_id)
+            .await?;
+        room_updated(&room, &session);
     }
 
-    async fn start_language_server(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::StartLanguageServer>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.start_language_server(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-            request
-                .payload
-                .server
-                .clone()
-                .ok_or_else(|| anyhow!("invalid language server"))?,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
+    for connection_id in session
+        .connection_pool()
+        .await
+        .user_connection_ids(called_user_id)
+    {
+        session
+            .peer
+            .send(connection_id, proto::CallCanceled {})
+            .trace_err();
     }
+    response.send(proto::Ack {})?;
 
-    async fn update_language_server(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateLanguageServer>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.project_connection_ids(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
-    }
+    update_user_contacts(called_user_id, &session).await?;
+    Ok(())
+}
 
-    async fn forward_project_request<T>(
-        self: Arc<Server>,
-        request: TypedEnvelope<T>,
-        response: Response<T>,
-    ) -> Result<()>
-    where
-        T: EntityMessage + RequestMessage,
+async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<()> {
+    let room_id = RoomId::from_proto(message.room_id);
     {
-        let project_id = ProjectId::from_proto(request.payload.remote_entity_id());
-        let host_connection_id = self
-            .store()
+        let room = session
+            .db()
             .await
-            .read_project(project_id, request.sender_id)?
-            .host_connection_id;
-        let payload = self
-            .peer
-            .forward_request(request.sender_id, host_connection_id, request.payload)
+            .decline_call(Some(room_id), session.user_id)
             .await?;
-
-        // Ensure project still exists by the time we get the response from the host.
-        self.store()
-            .await
-            .read_project(project_id, request.sender_id)?;
-
-        response.send(payload)?;
-        Ok(())
+        room_updated(&room, &session);
     }
 
-    async fn save_buffer(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::SaveBuffer>,
-        response: Response<proto::SaveBuffer>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let host = self
-            .store()
-            .await
-            .read_project(project_id, request.sender_id)?
-            .host_connection_id;
-        let response_payload = self
+    for connection_id in session
+        .connection_pool()
+        .await
+        .user_connection_ids(session.user_id)
+    {
+        session
             .peer
-            .forward_request(request.sender_id, host, request.payload.clone())
-            .await?;
-
-        let mut guests = self
-            .store()
-            .await
-            .read_project(project_id, request.sender_id)?
-            .connection_ids();
-        guests.retain(|guest_connection_id| *guest_connection_id != request.sender_id);
-        broadcast(host, guests, |conn_id| {
-            self.peer
-                .forward_send(host, conn_id, response_payload.clone())
-        });
-        response.send(response_payload)?;
-        Ok(())
+            .send(connection_id, proto::CallCanceled {})
+            .trace_err();
     }
+    update_user_contacts(session.user_id, &session).await?;
+    Ok(())
+}
 
-    async fn create_buffer_for_peer(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::CreateBufferForPeer>,
-    ) -> Result<()> {
-        self.peer.forward_send(
-            request.sender_id,
-            ConnectionId(request.payload.peer_id),
-            request.payload,
-        )?;
-        Ok(())
-    }
+async fn update_participant_location(
+    request: proto::UpdateParticipantLocation,
+    response: Response<proto::UpdateParticipantLocation>,
+    session: Session,
+) -> Result<()> {
+    let room_id = RoomId::from_proto(request.room_id);
+    let location = request
+        .location
+        .ok_or_else(|| anyhow!("invalid location"))?;
+    let room = session
+        .db()
+        .await
+        .update_room_participant_location(room_id, session.connection_id, location)
+        .await?;
+    room_updated(&room, &session);
+    response.send(proto::Ack {})?;
+    Ok(())
+}
 
-    async fn update_buffer(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateBuffer>,
-        response: Response<proto::UpdateBuffer>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let receiver_ids = {
-            let store = self.store().await;
-            store.project_connection_ids(project_id, request.sender_id)?
-        };
+async fn share_project(
+    request: proto::ShareProject,
+    response: Response<proto::ShareProject>,
+    session: Session,
+) -> Result<()> {
+    let (project_id, room) = &*session
+        .db()
+        .await
+        .share_project(
+            RoomId::from_proto(request.room_id),
+            session.connection_id,
+            &request.worktrees,
+        )
+        .await?;
+    response.send(proto::ShareProjectResponse {
+        project_id: project_id.to_proto(),
+    })?;
+    room_updated(&room, &session);
 
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
+    Ok(())
+}
 
-    async fn update_buffer_file(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateBufferFile>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.project_connection_ids(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
-    }
+async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(message.project_id);
 
-    async fn buffer_reloaded(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::BufferReloaded>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.project_connection_ids(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
-    }
+    let (room, guest_connection_ids) = &*session
+        .db()
+        .await
+        .unshare_project(project_id, session.connection_id)
+        .await?;
 
-    async fn buffer_saved(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::BufferSaved>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.project_connection_ids(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
-    }
+    broadcast(
+        session.connection_id,
+        guest_connection_ids.iter().copied(),
+        |conn_id| session.peer.send(conn_id, message.clone()),
+    );
+    room_updated(&room, &session);
 
-    async fn follow(
-        self: Arc<Self>,
-        request: TypedEnvelope<proto::Follow>,
-        response: Response<proto::Follow>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let leader_id = ConnectionId(request.payload.leader_id);
-        let follower_id = request.sender_id;
-        {
-            let store = self.store().await;
-            if !store
-                .project_connection_ids(project_id, follower_id)?
-                .contains(&leader_id)
-            {
-                Err(anyhow!("no such peer"))?;
-            }
-        }
+    Ok(())
+}
 
-        let mut response_payload = self
+async fn join_project(
+    request: proto::JoinProject,
+    response: Response<proto::JoinProject>,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let guest_user_id = session.user_id;
+
+    tracing::info!(%project_id, "join project");
+
+    let (project, replica_id) = &mut *session
+        .db()
+        .await
+        .join_project(project_id, session.connection_id)
+        .await?;
+
+    let collaborators = project
+        .collaborators
+        .iter()
+        .filter(|collaborator| collaborator.connection_id != session.connection_id.0 as i32)
+        .map(|collaborator| proto::Collaborator {
+            peer_id: collaborator.connection_id as u32,
+            replica_id: collaborator.replica_id.0 as u32,
+            user_id: collaborator.user_id.to_proto(),
+        })
+        .collect::<Vec<_>>();
+    let worktrees = project
+        .worktrees
+        .iter()
+        .map(|(id, worktree)| proto::WorktreeMetadata {
+            id: *id,
+            root_name: worktree.root_name.clone(),
+            visible: worktree.visible,
+            abs_path: worktree.abs_path.clone(),
+        })
+        .collect::<Vec<_>>();
+
+    for collaborator in &collaborators {
+        session
             .peer
-            .forward_request(request.sender_id, leader_id, request.payload)
-            .await?;
-        response_payload
-            .views
-            .retain(|view| view.leader_id != Some(follower_id.0));
-        response.send(response_payload)?;
-        Ok(())
+            .send(
+                ConnectionId(collaborator.peer_id),
+                proto::AddProjectCollaborator {
+                    project_id: project_id.to_proto(),
+                    collaborator: Some(proto::Collaborator {
+                        peer_id: session.connection_id.0,
+                        replica_id: replica_id.0 as u32,
+                        user_id: guest_user_id.to_proto(),
+                    }),
+                },
+            )
+            .trace_err();
     }
 
-    async fn unfollow(self: Arc<Self>, request: TypedEnvelope<proto::Unfollow>) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let leader_id = ConnectionId(request.payload.leader_id);
-        let store = self.store().await;
-        if !store
-            .project_connection_ids(project_id, request.sender_id)?
-            .contains(&leader_id)
-        {
-            Err(anyhow!("no such peer"))?;
+    // First, we send the metadata associated with each worktree.
+    response.send(proto::JoinProjectResponse {
+        worktrees: worktrees.clone(),
+        replica_id: replica_id.0 as u32,
+        collaborators: collaborators.clone(),
+        language_servers: project.language_servers.clone(),
+    })?;
+
+    for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
+        #[cfg(any(test, feature = "test-support"))]
+        const MAX_CHUNK_SIZE: usize = 2;
+        #[cfg(not(any(test, feature = "test-support")))]
+        const MAX_CHUNK_SIZE: usize = 256;
+
+        // Stream this worktree's entries.
+        let message = proto::UpdateWorktree {
+            project_id: project_id.to_proto(),
+            worktree_id,
+            abs_path: worktree.abs_path.clone(),
+            root_name: worktree.root_name,
+            updated_entries: worktree.entries,
+            removed_entries: Default::default(),
+            scan_id: worktree.scan_id,
+            is_last_update: worktree.is_complete,
+        };
+        for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
+            session.peer.send(session.connection_id, update.clone())?;
         }
-        self.peer
-            .forward_send(request.sender_id, leader_id, request.payload)?;
-        Ok(())
-    }
 
-    async fn update_followers(
-        self: Arc<Self>,
-        request: TypedEnvelope<proto::UpdateFollowers>,
-    ) -> Result<()> {
-        let project_id = ProjectId::from_proto(request.payload.project_id);
-        let store = self.store().await;
-        let connection_ids = store.project_connection_ids(project_id, request.sender_id)?;
-        let leader_id = request
-            .payload
-            .variant
-            .as_ref()
-            .and_then(|variant| match variant {
-                proto::update_followers::Variant::CreateView(payload) => payload.leader_id,
-                proto::update_followers::Variant::UpdateView(payload) => payload.leader_id,
-                proto::update_followers::Variant::UpdateActiveView(payload) => payload.leader_id,
-            });
-        for follower_id in &request.payload.follower_ids {
-            let follower_id = ConnectionId(*follower_id);
-            if connection_ids.contains(&follower_id) && Some(follower_id.0) != leader_id {
-                self.peer
-                    .forward_send(request.sender_id, follower_id, request.payload.clone())?;
-            }
+        // Stream this worktree's diagnostics.
+        for summary in worktree.diagnostic_summaries {
+            session.peer.send(
+                session.connection_id,
+                proto::UpdateDiagnosticSummary {
+                    project_id: project_id.to_proto(),
+                    worktree_id: worktree.id,
+                    summary: Some(summary),
+                },
+            )?;
         }
-        Ok(())
     }
 
-    async fn get_users(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::GetUsers>,
-        response: Response<proto::GetUsers>,
-    ) -> Result<()> {
-        let user_ids = request
-            .payload
-            .user_ids
-            .into_iter()
-            .map(UserId::from_proto)
-            .collect();
-        let users = self
-            .app_state
-            .db
-            .get_users_by_ids(user_ids)
-            .await?
-            .into_iter()
-            .map(|user| proto::User {
-                id: user.id.to_proto(),
-                avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
-                github_login: user.github_login,
-            })
-            .collect();
-        response.send(proto::UsersResponse { users })?;
-        Ok(())
+    for language_server in &project.language_servers {
+        session.peer.send(
+            session.connection_id,
+            proto::UpdateLanguageServer {
+                project_id: project_id.to_proto(),
+                language_server_id: language_server.id,
+                variant: Some(
+                    proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
+                        proto::LspDiskBasedDiagnosticsUpdated {},
+                    ),
+                ),
+            },
+        )?;
     }
 
-    async fn fuzzy_search_users(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::FuzzySearchUsers>,
-        response: Response<proto::FuzzySearchUsers>,
-    ) -> Result<()> {
-        let user_id = self
-            .store()
-            .await
-            .user_id_for_connection(request.sender_id)?;
-        let query = request.payload.query;
-        let db = &self.app_state.db;
-        let users = match query.len() {
-            0 => vec![],
-            1 | 2 => db
-                .get_user_by_github_account(&query, None)
-                .await?
-                .into_iter()
-                .collect(),
-            _ => db.fuzzy_search_users(&query, 10).await?,
-        };
-        let users = users
-            .into_iter()
-            .filter(|user| user.id != user_id)
-            .map(|user| proto::User {
-                id: user.id.to_proto(),
-                avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
-                github_login: user.github_login,
-            })
-            .collect();
-        response.send(proto::UsersResponse { users })?;
-        Ok(())
-    }
+    Ok(())
+}
 
-    async fn request_contact(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::RequestContact>,
-        response: Response<proto::RequestContact>,
-    ) -> Result<()> {
-        let requester_id = self
-            .store()
-            .await
-            .user_id_for_connection(request.sender_id)?;
-        let responder_id = UserId::from_proto(request.payload.responder_id);
-        if requester_id == responder_id {
-            return Err(anyhow!("cannot add yourself as a contact"))?;
-        }
+async fn leave_project(request: proto::LeaveProject, session: Session) -> Result<()> {
+    let sender_id = session.connection_id;
+    let project_id = ProjectId::from_proto(request.project_id);
+
+    let project = session
+        .db()
+        .await
+        .leave_project(project_id, sender_id)
+        .await?;
+    tracing::info!(
+        %project_id,
+        host_user_id = %project.host_user_id,
+        host_connection_id = %project.host_connection_id,
+        "leave project"
+    );
+    project_left(&project, &session);
+
+    Ok(())
+}
 
-        self.app_state
-            .db
-            .send_contact_request(requester_id, responder_id)
-            .await?;
+async fn update_project(
+    request: proto::UpdateProject,
+    response: Response<proto::UpdateProject>,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let (room, guest_connection_ids) = &*session
+        .db()
+        .await
+        .update_project(project_id, session.connection_id, &request.worktrees)
+        .await?;
+    broadcast(
+        session.connection_id,
+        guest_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    room_updated(&room, &session);
+    response.send(proto::Ack {})?;
 
-        // Update outgoing contact requests of requester
-        let mut update = proto::UpdateContacts::default();
-        update.outgoing_requests.push(responder_id.to_proto());
-        for connection_id in self.store().await.connection_ids_for_user(requester_id) {
-            self.peer.send(connection_id, update.clone())?;
-        }
+    Ok(())
+}
 
-        // Update incoming contact requests of responder
-        let mut update = proto::UpdateContacts::default();
-        update
-            .incoming_requests
-            .push(proto::IncomingContactRequest {
-                requester_id: requester_id.to_proto(),
-                should_notify: true,
-            });
-        for connection_id in self.store().await.connection_ids_for_user(responder_id) {
-            self.peer.send(connection_id, update.clone())?;
-        }
+async fn update_worktree(
+    request: proto::UpdateWorktree,
+    response: Response<proto::UpdateWorktree>,
+    session: Session,
+) -> Result<()> {
+    let guest_connection_ids = session
+        .db()
+        .await
+        .update_worktree(&request, session.connection_id)
+        .await?;
+
+    broadcast(
+        session.connection_id,
+        guest_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    response.send(proto::Ack {})?;
+    Ok(())
+}
 
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
+async fn update_diagnostic_summary(
+    message: proto::UpdateDiagnosticSummary,
+    session: Session,
+) -> Result<()> {
+    let guest_connection_ids = session
+        .db()
+        .await
+        .update_diagnostic_summary(&message, session.connection_id)
+        .await?;
+
+    broadcast(
+        session.connection_id,
+        guest_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, message.clone())
+        },
+    );
 
-    async fn respond_to_contact_request(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::RespondToContactRequest>,
-        response: Response<proto::RespondToContactRequest>,
-    ) -> Result<()> {
-        let responder_id = self
-            .store()
-            .await
-            .user_id_for_connection(request.sender_id)?;
-        let requester_id = UserId::from_proto(request.payload.requester_id);
-        if request.payload.response == proto::ContactRequestResponse::Dismiss as i32 {
-            self.app_state
-                .db
-                .dismiss_contact_notification(responder_id, requester_id)
-                .await?;
-        } else {
-            let accept = request.payload.response == proto::ContactRequestResponse::Accept as i32;
-            self.app_state
-                .db
-                .respond_to_contact_request(responder_id, requester_id, accept)
-                .await?;
-
-            let store = self.store().await;
-            // Update responder with new contact
-            let mut update = proto::UpdateContacts::default();
-            if accept {
-                update
-                    .contacts
-                    .push(store.contact_for_user(requester_id, false));
-            }
-            update
-                .remove_incoming_requests
-                .push(requester_id.to_proto());
-            for connection_id in store.connection_ids_for_user(responder_id) {
-                self.peer.send(connection_id, update.clone())?;
-            }
+    Ok(())
+}
 
-            // Update requester with new contact
-            let mut update = proto::UpdateContacts::default();
-            if accept {
-                update
-                    .contacts
-                    .push(store.contact_for_user(responder_id, true));
-            }
-            update
-                .remove_outgoing_requests
-                .push(responder_id.to_proto());
-            for connection_id in store.connection_ids_for_user(requester_id) {
-                self.peer.send(connection_id, update.clone())?;
-            }
-        }
+async fn start_language_server(
+    request: proto::StartLanguageServer,
+    session: Session,
+) -> Result<()> {
+    let guest_connection_ids = session
+        .db()
+        .await
+        .start_language_server(&request, session.connection_id)
+        .await?;
+
+    broadcast(
+        session.connection_id,
+        guest_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
+}
 
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
+async fn update_language_server(
+    request: proto::UpdateLanguageServer,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
+}
 
-    async fn remove_contact(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::RemoveContact>,
-        response: Response<proto::RemoveContact>,
-    ) -> Result<()> {
-        let requester_id = self
-            .store()
+async fn forward_project_request<T>(
+    request: T,
+    response: Response<T>,
+    session: Session,
+) -> Result<()>
+where
+    T: EntityMessage + RequestMessage,
+{
+    let project_id = ProjectId::from_proto(request.remote_entity_id());
+    let host_connection_id = {
+        let collaborators = session
+            .db()
             .await
-            .user_id_for_connection(request.sender_id)?;
-        let responder_id = UserId::from_proto(request.payload.user_id);
-        self.app_state
-            .db
-            .remove_contact(requester_id, responder_id)
+            .project_collaborators(project_id, session.connection_id)
             .await?;
+        ConnectionId(
+            collaborators
+                .iter()
+                .find(|collaborator| collaborator.is_host)
+                .ok_or_else(|| anyhow!("host not found"))?
+                .connection_id as u32,
+        )
+    };
 
-        // Update outgoing contact requests of requester
-        let mut update = proto::UpdateContacts::default();
-        update
-            .remove_outgoing_requests
-            .push(responder_id.to_proto());
-        for connection_id in self.store().await.connection_ids_for_user(requester_id) {
-            self.peer.send(connection_id, update.clone())?;
-        }
-
-        // Update incoming contact requests of responder
-        let mut update = proto::UpdateContacts::default();
-        update
-            .remove_incoming_requests
-            .push(requester_id.to_proto());
-        for connection_id in self.store().await.connection_ids_for_user(responder_id) {
-            self.peer.send(connection_id, update.clone())?;
-        }
-
-        response.send(proto::Ack {})?;
-        Ok(())
-    }
+    let payload = session
+        .peer
+        .forward_request(session.connection_id, host_connection_id, request)
+        .await?;
 
-    async fn update_diff_base(
-        self: Arc<Server>,
-        request: TypedEnvelope<proto::UpdateDiffBase>,
-    ) -> Result<()> {
-        let receiver_ids = self.store().await.project_connection_ids(
-            ProjectId::from_proto(request.payload.project_id),
-            request.sender_id,
-        )?;
-        broadcast(request.sender_id, receiver_ids, |connection_id| {
-            self.peer
-                .forward_send(request.sender_id, connection_id, request.payload.clone())
-        });
-        Ok(())
-    }
+    response.send(payload)?;
+    Ok(())
+}
 
-    async fn get_private_user_info(
-        self: Arc<Self>,
-        request: TypedEnvelope<proto::GetPrivateUserInfo>,
-        response: Response<proto::GetPrivateUserInfo>,
-    ) -> Result<()> {
-        let user_id = self
-            .store()
+async fn save_buffer(
+    request: proto::SaveBuffer,
+    response: Response<proto::SaveBuffer>,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let host_connection_id = {
+        let collaborators = session
+            .db()
             .await
-            .user_id_for_connection(request.sender_id)?;
-        let metrics_id = self.app_state.db.get_user_metrics_id(user_id).await?;
-        let user = self
-            .app_state
-            .db
-            .get_user_by_id(user_id)
-            .await?
-            .ok_or_else(|| anyhow!("user not found"))?;
-        response.send(proto::GetPrivateUserInfoResponse {
-            metrics_id,
-            staff: user.admin,
-        })?;
-        Ok(())
-    }
-
-    pub(crate) async fn store(&self) -> StoreGuard<'_> {
-        #[cfg(test)]
-        tokio::task::yield_now().await;
-        let guard = self.store.lock().await;
-        #[cfg(test)]
-        tokio::task::yield_now().await;
-        StoreGuard {
-            guard,
-            _not_send: PhantomData,
-        }
-    }
+            .project_collaborators(project_id, session.connection_id)
+            .await?;
+        let host = collaborators
+            .iter()
+            .find(|collaborator| collaborator.is_host)
+            .ok_or_else(|| anyhow!("host not found"))?;
+        ConnectionId(host.connection_id as u32)
+    };
+    let response_payload = session
+        .peer
+        .forward_request(session.connection_id, host_connection_id, request.clone())
+        .await?;
+
+    let mut collaborators = session
+        .db()
+        .await
+        .project_collaborators(project_id, session.connection_id)
+        .await?;
+    collaborators
+        .retain(|collaborator| collaborator.connection_id != session.connection_id.0 as i32);
+    let project_connection_ids = collaborators
+        .iter()
+        .map(|collaborator| ConnectionId(collaborator.connection_id as u32));
+    broadcast(host_connection_id, project_connection_ids, |conn_id| {
+        session
+            .peer
+            .forward_send(host_connection_id, conn_id, response_payload.clone())
+    });
+    response.send(response_payload)?;
+    Ok(())
+}
 
-    pub async fn snapshot<'a>(self: &'a Arc<Self>) -> ServerSnapshot<'a> {
-        ServerSnapshot {
-            store: self.store().await,
-            peer: &self.peer,
-        }
-    }
+async fn create_buffer_for_peer(
+    request: proto::CreateBufferForPeer,
+    session: Session,
+) -> Result<()> {
+    session.peer.forward_send(
+        session.connection_id,
+        ConnectionId(request.peer_id),
+        request,
+    )?;
+    Ok(())
 }
 
-impl<'a> Deref for StoreGuard<'a> {
-    type Target = Store;
+async fn update_buffer(
+    request: proto::UpdateBuffer,
+    response: Response<proto::UpdateBuffer>,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    response.send(proto::Ack {})?;
+    Ok(())
+}
 
-    fn deref(&self) -> &Self::Target {
-        &*self.guard
-    }
+async fn update_buffer_file(request: proto::UpdateBufferFile, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
 }
 
-impl<'a> DerefMut for StoreGuard<'a> {
-    fn deref_mut(&mut self) -> &mut Self::Target {
-        &mut *self.guard
-    }
+async fn buffer_reloaded(request: proto::BufferReloaded, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
 }
 
-impl<'a> Drop for StoreGuard<'a> {
-    fn drop(&mut self) {
-        #[cfg(test)]
-        self.check_invariants();
-    }
+async fn buffer_saved(request: proto::BufferSaved, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
 }
 
-impl Executor for RealExecutor {
-    type Sleep = Sleep;
+async fn follow(
+    request: proto::Follow,
+    response: Response<proto::Follow>,
+    session: Session,
+) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let leader_id = ConnectionId(request.leader_id);
+    let follower_id = session.connection_id;
+    {
+        let project_connection_ids = session
+            .db()
+            .await
+            .project_connection_ids(project_id, session.connection_id)
+            .await?;
 
-    fn spawn_detached<F: 'static + Send + Future<Output = ()>>(&self, future: F) {
-        tokio::task::spawn(future);
+        if !project_connection_ids.contains(&leader_id) {
+            Err(anyhow!("no such peer"))?;
+        }
     }
 
-    fn sleep(&self, duration: Duration) -> Self::Sleep {
-        tokio::time::sleep(duration)
+    let mut response_payload = session
+        .peer
+        .forward_request(session.connection_id, leader_id, request)
+        .await?;
+    response_payload
+        .views
+        .retain(|view| view.leader_id != Some(follower_id.0));
+    response.send(response_payload)?;
+    Ok(())
+}
+
+async fn unfollow(request: proto::Unfollow, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let leader_id = ConnectionId(request.leader_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+    if !project_connection_ids.contains(&leader_id) {
+        Err(anyhow!("no such peer"))?;
     }
+    session
+        .peer
+        .forward_send(session.connection_id, leader_id, request)?;
+    Ok(())
 }
 
-fn broadcast<F>(
-    sender_id: ConnectionId,
-    receiver_ids: impl IntoIterator<Item = ConnectionId>,
-    mut f: F,
-) where
-    F: FnMut(ConnectionId) -> anyhow::Result<()>,
-{
-    for receiver_id in receiver_ids {
-        if receiver_id != sender_id {
-            f(receiver_id).trace_err();
+async fn update_followers(request: proto::UpdateFollowers, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db
+        .lock()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+
+    let leader_id = request.variant.as_ref().and_then(|variant| match variant {
+        proto::update_followers::Variant::CreateView(payload) => payload.leader_id,
+        proto::update_followers::Variant::UpdateView(payload) => payload.leader_id,
+        proto::update_followers::Variant::UpdateActiveView(payload) => payload.leader_id,
+    });
+    for follower_id in &request.follower_ids {
+        let follower_id = ConnectionId(*follower_id);
+        if project_connection_ids.contains(&follower_id) && Some(follower_id.0) != leader_id {
+            session
+                .peer
+                .forward_send(session.connection_id, follower_id, request.clone())?;
         }
     }
+    Ok(())
 }
 
-lazy_static! {
-    static ref ZED_PROTOCOL_VERSION: HeaderName = HeaderName::from_static("x-zed-protocol-version");
+async fn get_users(
+    request: proto::GetUsers,
+    response: Response<proto::GetUsers>,
+    session: Session,
+) -> Result<()> {
+    let user_ids = request
+        .user_ids
+        .into_iter()
+        .map(UserId::from_proto)
+        .collect();
+    let users = session
+        .db()
+        .await
+        .get_users_by_ids(user_ids)
+        .await?
+        .into_iter()
+        .map(|user| proto::User {
+            id: user.id.to_proto(),
+            avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
+            github_login: user.github_login,
+        })
+        .collect();
+    response.send(proto::UsersResponse { users })?;
+    Ok(())
 }
 
-pub struct ProtocolVersion(u32);
+async fn fuzzy_search_users(
+    request: proto::FuzzySearchUsers,
+    response: Response<proto::FuzzySearchUsers>,
+    session: Session,
+) -> Result<()> {
+    let query = request.query;
+    let users = match query.len() {
+        0 => vec![],
+        1 | 2 => session
+            .db()
+            .await
+            .get_user_by_github_account(&query, None)
+            .await?
+            .into_iter()
+            .collect(),
+        _ => session.db().await.fuzzy_search_users(&query, 10).await?,
+    };
+    let users = users
+        .into_iter()
+        .filter(|user| user.id != session.user_id)
+        .map(|user| proto::User {
+            id: user.id.to_proto(),
+            avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
+            github_login: user.github_login,
+        })
+        .collect();
+    response.send(proto::UsersResponse { users })?;
+    Ok(())
+}
 
-impl Header for ProtocolVersion {
-    fn name() -> &'static HeaderName {
-        &ZED_PROTOCOL_VERSION
+async fn request_contact(
+    request: proto::RequestContact,
+    response: Response<proto::RequestContact>,
+    session: Session,
+) -> Result<()> {
+    let requester_id = session.user_id;
+    let responder_id = UserId::from_proto(request.responder_id);
+    if requester_id == responder_id {
+        return Err(anyhow!("cannot add yourself as a contact"))?;
     }
 
-    fn decode<'i, I>(values: &mut I) -> Result<Self, axum::headers::Error>
-    where
-        Self: Sized,
-        I: Iterator<Item = &'i axum::http::HeaderValue>,
+    session
+        .db()
+        .await
+        .send_contact_request(requester_id, responder_id)
+        .await?;
+
+    // Update outgoing contact requests of requester
+    let mut update = proto::UpdateContacts::default();
+    update.outgoing_requests.push(responder_id.to_proto());
+    for connection_id in session
+        .connection_pool()
+        .await
+        .user_connection_ids(requester_id)
     {
-        let version = values
-            .next()
-            .ok_or_else(axum::headers::Error::invalid)?
-            .to_str()
-            .map_err(|_| axum::headers::Error::invalid())?
-            .parse()
-            .map_err(|_| axum::headers::Error::invalid())?;
-        Ok(Self(version))
+        session.peer.send(connection_id, update.clone())?;
     }
 
-    fn encode<E: Extend<axum::http::HeaderValue>>(&self, values: &mut E) {
-        values.extend([self.0.to_string().parse().unwrap()]);
+    // Update incoming contact requests of responder
+    let mut update = proto::UpdateContacts::default();
+    update
+        .incoming_requests
+        .push(proto::IncomingContactRequest {
+            requester_id: requester_id.to_proto(),
+            should_notify: true,
+        });
+    for connection_id in session
+        .connection_pool()
+        .await
+        .user_connection_ids(responder_id)
+    {
+        session.peer.send(connection_id, update.clone())?;
     }
-}
 
-pub fn routes(server: Arc<Server>) -> Router<Body> {
-    Router::new()
-        .route("/rpc", get(handle_websocket_request))
-        .layer(
-            ServiceBuilder::new()
-                .layer(Extension(server.app_state.clone()))
-                .layer(middleware::from_fn(auth::validate_header)),
-        )
-        .route("/metrics", get(handle_metrics))
-        .layer(Extension(server))
+    response.send(proto::Ack {})?;
+    Ok(())
 }
 
-pub async fn handle_websocket_request(
-    TypedHeader(ProtocolVersion(protocol_version)): TypedHeader<ProtocolVersion>,
-    ConnectInfo(socket_address): ConnectInfo<SocketAddr>,
-    Extension(server): Extension<Arc<Server>>,
-    Extension(user): Extension<User>,
-    ws: WebSocketUpgrade,
-) -> axum::response::Response {
-    if protocol_version != rpc::PROTOCOL_VERSION {
-        return (
-            StatusCode::UPGRADE_REQUIRED,
-            "client must be upgraded".to_string(),
-        )
-            .into_response();
-    }
-    let socket_address = socket_address.to_string();
-    ws.on_upgrade(move |socket| {
-        use util::ResultExt;
-        let socket = socket
-            .map_ok(to_tungstenite_message)
-            .err_into()
-            .with(|message| async move { Ok(to_axum_message(message)) });
-        let connection = Connection::new(Box::pin(socket));
-        async move {
-            server
-                .handle_connection(connection, socket_address, user, None, RealExecutor)
-                .await
-                .log_err();
+async fn respond_to_contact_request(
+    request: proto::RespondToContactRequest,
+    response: Response<proto::RespondToContactRequest>,
+    session: Session,
+) -> Result<()> {
+    let responder_id = session.user_id;
+    let requester_id = UserId::from_proto(request.requester_id);
+    let db = session.db().await;
+    if request.response == proto::ContactRequestResponse::Dismiss as i32 {
+        db.dismiss_contact_notification(responder_id, requester_id)
+            .await?;
+    } else {
+        let accept = request.response == proto::ContactRequestResponse::Accept as i32;
+
+        db.respond_to_contact_request(responder_id, requester_id, accept)
+            .await?;
+        let requester_busy = db.is_user_busy(requester_id).await?;
+        let responder_busy = db.is_user_busy(responder_id).await?;
+
+        let pool = session.connection_pool().await;
+        // Update responder with new contact
+        let mut update = proto::UpdateContacts::default();
+        if accept {
+            update
+                .contacts
+                .push(contact_for_user(requester_id, false, requester_busy, &pool));
         }
-    })
+        update
+            .remove_incoming_requests
+            .push(requester_id.to_proto());
+        for connection_id in pool.user_connection_ids(responder_id) {
+            session.peer.send(connection_id, update.clone())?;
+        }
+
+        // Update requester with new contact
+        let mut update = proto::UpdateContacts::default();
+        if accept {
+            update
+                .contacts
+                .push(contact_for_user(responder_id, true, responder_busy, &pool));
+        }
+        update
+            .remove_outgoing_requests
+            .push(responder_id.to_proto());
+        for connection_id in pool.user_connection_ids(requester_id) {
+            session.peer.send(connection_id, update.clone())?;
+        }
+    }
+
+    response.send(proto::Ack {})?;
+    Ok(())
 }
 
-pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> axum::response::Response {
-    let metrics = server.store().await.metrics();
-    METRIC_CONNECTIONS.set(metrics.connections as _);
-    METRIC_SHARED_PROJECTS.set(metrics.shared_projects as _);
+async fn remove_contact(
+    request: proto::RemoveContact,
+    response: Response<proto::RemoveContact>,
+    session: Session,
+) -> Result<()> {
+    let requester_id = session.user_id;
+    let responder_id = UserId::from_proto(request.user_id);
+    let db = session.db().await;
+    db.remove_contact(requester_id, responder_id).await?;
+
+    let pool = session.connection_pool().await;
+    // Update outgoing contact requests of requester
+    let mut update = proto::UpdateContacts::default();
+    update
+        .remove_outgoing_requests
+        .push(responder_id.to_proto());
+    for connection_id in pool.user_connection_ids(requester_id) {
+        session.peer.send(connection_id, update.clone())?;
+    }
 
-    let encoder = prometheus::TextEncoder::new();
-    let metric_families = prometheus::gather();
-    match encoder.encode_to_string(&metric_families) {
-        Ok(string) => (StatusCode::OK, string).into_response(),
-        Err(error) => (
-            StatusCode::INTERNAL_SERVER_ERROR,
-            format!("failed to encode metrics {:?}", error),
-        )
-            .into_response(),
+    // Update incoming contact requests of responder
+    let mut update = proto::UpdateContacts::default();
+    update
+        .remove_incoming_requests
+        .push(requester_id.to_proto());
+    for connection_id in pool.user_connection_ids(responder_id) {
+        session.peer.send(connection_id, update.clone())?;
     }
+
+    response.send(proto::Ack {})?;
+    Ok(())
+}
+
+async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
+    let project_id = ProjectId::from_proto(request.project_id);
+    let project_connection_ids = session
+        .db()
+        .await
+        .project_connection_ids(project_id, session.connection_id)
+        .await?;
+    broadcast(
+        session.connection_id,
+        project_connection_ids.iter().copied(),
+        |connection_id| {
+            session
+                .peer
+                .forward_send(session.connection_id, connection_id, request.clone())
+        },
+    );
+    Ok(())
+}
+
+async fn get_private_user_info(
+    _request: proto::GetPrivateUserInfo,
+    response: Response<proto::GetPrivateUserInfo>,
+    session: Session,
+) -> Result<()> {
+    let metrics_id = session
+        .db()
+        .await
+        .get_user_metrics_id(session.user_id)
+        .await?;
+    let user = session
+        .db()
+        .await
+        .get_user_by_id(session.user_id)
+        .await?
+        .ok_or_else(|| anyhow!("user not found"))?;
+    response.send(proto::GetPrivateUserInfoResponse {
+        metrics_id,
+        staff: user.admin,
+    })?;
+    Ok(())
 }
 
 fn to_axum_message(message: TungsteniteMessage) -> AxumMessage {

crates/collab/src/rpc/connection_pool.rs 🔗

@@ -0,0 +1,93 @@
+use crate::db::UserId;
+use anyhow::{anyhow, Result};
+use collections::{BTreeMap, HashSet};
+use rpc::ConnectionId;
+use serde::Serialize;
+use tracing::instrument;
+
+#[derive(Default, Serialize)]
+pub struct ConnectionPool {
+    connections: BTreeMap<ConnectionId, Connection>,
+    connected_users: BTreeMap<UserId, ConnectedUser>,
+}
+
+#[derive(Default, Serialize)]
+struct ConnectedUser {
+    connection_ids: HashSet<ConnectionId>,
+}
+
+#[derive(Serialize)]
+pub struct Connection {
+    pub user_id: UserId,
+    pub admin: bool,
+}
+
+impl ConnectionPool {
+    #[instrument(skip(self))]
+    pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId, admin: bool) {
+        self.connections
+            .insert(connection_id, Connection { user_id, admin });
+        let connected_user = self.connected_users.entry(user_id).or_default();
+        connected_user.connection_ids.insert(connection_id);
+    }
+
+    #[instrument(skip(self))]
+    pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
+        let connection = self
+            .connections
+            .get_mut(&connection_id)
+            .ok_or_else(|| anyhow!("no such connection"))?;
+
+        let user_id = connection.user_id;
+        let connected_user = self.connected_users.get_mut(&user_id).unwrap();
+        connected_user.connection_ids.remove(&connection_id);
+        if connected_user.connection_ids.is_empty() {
+            self.connected_users.remove(&user_id);
+        }
+        self.connections.remove(&connection_id).unwrap();
+        Ok(())
+    }
+
+    pub fn connections(&self) -> impl Iterator<Item = &Connection> {
+        self.connections.values()
+    }
+
+    pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator<Item = ConnectionId> + '_ {
+        self.connected_users
+            .get(&user_id)
+            .into_iter()
+            .map(|state| &state.connection_ids)
+            .flatten()
+            .copied()
+    }
+
+    pub fn is_user_online(&self, user_id: UserId) -> bool {
+        !self
+            .connected_users
+            .get(&user_id)
+            .unwrap_or(&Default::default())
+            .connection_ids
+            .is_empty()
+    }
+
+    #[cfg(test)]
+    pub fn check_invariants(&self) {
+        for (connection_id, connection) in &self.connections {
+            assert!(self
+                .connected_users
+                .get(&connection.user_id)
+                .unwrap()
+                .connection_ids
+                .contains(connection_id));
+        }
+
+        for (user_id, state) in &self.connected_users {
+            for connection_id in &state.connection_ids {
+                assert_eq!(
+                    self.connections.get(connection_id).unwrap().user_id,
+                    *user_id
+                );
+            }
+        }
+    }
+}

crates/collab/src/rpc/store.rs 🔗

@@ -1,1182 +0,0 @@
-use crate::db::{self, ProjectId, UserId};
-use anyhow::{anyhow, Result};
-use collections::{btree_map, BTreeMap, BTreeSet, HashMap, HashSet};
-use nanoid::nanoid;
-use rpc::{proto, ConnectionId};
-use serde::Serialize;
-use std::{borrow::Cow, mem, path::PathBuf, str};
-use tracing::instrument;
-use util::post_inc;
-
-pub type RoomId = u64;
-
-#[derive(Default, Serialize)]
-pub struct Store {
-    connections: BTreeMap<ConnectionId, ConnectionState>,
-    connected_users: BTreeMap<UserId, ConnectedUser>,
-    next_room_id: RoomId,
-    rooms: BTreeMap<RoomId, proto::Room>,
-    projects: BTreeMap<ProjectId, Project>,
-}
-
-#[derive(Default, Serialize)]
-struct ConnectedUser {
-    connection_ids: HashSet<ConnectionId>,
-    active_call: Option<Call>,
-}
-
-#[derive(Serialize)]
-struct ConnectionState {
-    user_id: UserId,
-    admin: bool,
-    projects: BTreeSet<ProjectId>,
-}
-
-#[derive(Copy, Clone, Eq, PartialEq, Serialize)]
-pub struct Call {
-    pub caller_user_id: UserId,
-    pub room_id: RoomId,
-    pub connection_id: Option<ConnectionId>,
-    pub initial_project_id: Option<ProjectId>,
-}
-
-#[derive(Serialize)]
-pub struct Project {
-    pub id: ProjectId,
-    pub room_id: RoomId,
-    pub host_connection_id: ConnectionId,
-    pub host: Collaborator,
-    pub guests: HashMap<ConnectionId, Collaborator>,
-    pub active_replica_ids: HashSet<ReplicaId>,
-    pub worktrees: BTreeMap<u64, Worktree>,
-    pub language_servers: Vec<proto::LanguageServer>,
-}
-
-#[derive(Serialize)]
-pub struct Collaborator {
-    pub replica_id: ReplicaId,
-    pub user_id: UserId,
-    pub admin: bool,
-}
-
-#[derive(Default, Serialize)]
-pub struct Worktree {
-    pub abs_path: Vec<u8>,
-    pub root_name: String,
-    pub visible: bool,
-    #[serde(skip)]
-    pub entries: BTreeMap<u64, proto::Entry>,
-    #[serde(skip)]
-    pub diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
-    pub scan_id: u64,
-    pub is_complete: bool,
-}
-
-pub type ReplicaId = u16;
-
-#[derive(Default)]
-pub struct RemovedConnectionState<'a> {
-    pub user_id: UserId,
-    pub hosted_projects: Vec<Project>,
-    pub guest_projects: Vec<LeftProject>,
-    pub contact_ids: HashSet<UserId>,
-    pub room: Option<Cow<'a, proto::Room>>,
-    pub canceled_call_connection_ids: Vec<ConnectionId>,
-}
-
-pub struct LeftProject {
-    pub id: ProjectId,
-    pub host_user_id: UserId,
-    pub host_connection_id: ConnectionId,
-    pub connection_ids: Vec<ConnectionId>,
-    pub remove_collaborator: bool,
-}
-
-pub struct LeftRoom<'a> {
-    pub room: Cow<'a, proto::Room>,
-    pub unshared_projects: Vec<Project>,
-    pub left_projects: Vec<LeftProject>,
-    pub canceled_call_connection_ids: Vec<ConnectionId>,
-}
-
-#[derive(Copy, Clone)]
-pub struct Metrics {
-    pub connections: usize,
-    pub shared_projects: usize,
-}
-
-impl Store {
-    pub fn metrics(&self) -> Metrics {
-        let connections = self.connections.values().filter(|c| !c.admin).count();
-        let mut shared_projects = 0;
-        for project in self.projects.values() {
-            if let Some(connection) = self.connections.get(&project.host_connection_id) {
-                if !connection.admin {
-                    shared_projects += 1;
-                }
-            }
-        }
-
-        Metrics {
-            connections,
-            shared_projects,
-        }
-    }
-
-    #[instrument(skip(self))]
-    pub fn add_connection(
-        &mut self,
-        connection_id: ConnectionId,
-        user_id: UserId,
-        admin: bool,
-    ) -> Option<proto::IncomingCall> {
-        self.connections.insert(
-            connection_id,
-            ConnectionState {
-                user_id,
-                admin,
-                projects: Default::default(),
-            },
-        );
-        let connected_user = self.connected_users.entry(user_id).or_default();
-        connected_user.connection_ids.insert(connection_id);
-        if let Some(active_call) = connected_user.active_call {
-            if active_call.connection_id.is_some() {
-                None
-            } else {
-                let room = self.room(active_call.room_id)?;
-                Some(proto::IncomingCall {
-                    room_id: active_call.room_id,
-                    caller_user_id: active_call.caller_user_id.to_proto(),
-                    participant_user_ids: room
-                        .participants
-                        .iter()
-                        .map(|participant| participant.user_id)
-                        .collect(),
-                    initial_project: active_call
-                        .initial_project_id
-                        .and_then(|id| Self::build_participant_project(id, &self.projects)),
-                })
-            }
-        } else {
-            None
-        }
-    }
-
-    #[instrument(skip(self))]
-    pub fn remove_connection(
-        &mut self,
-        connection_id: ConnectionId,
-    ) -> Result<RemovedConnectionState> {
-        let connection = self
-            .connections
-            .get_mut(&connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-
-        let user_id = connection.user_id;
-
-        let mut result = RemovedConnectionState {
-            user_id,
-            ..Default::default()
-        };
-
-        let connected_user = self.connected_users.get(&user_id).unwrap();
-        if let Some(active_call) = connected_user.active_call.as_ref() {
-            let room_id = active_call.room_id;
-            if active_call.connection_id == Some(connection_id) {
-                let left_room = self.leave_room(room_id, connection_id)?;
-                result.hosted_projects = left_room.unshared_projects;
-                result.guest_projects = left_room.left_projects;
-                result.room = Some(Cow::Owned(left_room.room.into_owned()));
-                result.canceled_call_connection_ids = left_room.canceled_call_connection_ids;
-            } else if connected_user.connection_ids.len() == 1 {
-                let (room, _) = self.decline_call(room_id, connection_id)?;
-                result.room = Some(Cow::Owned(room.clone()));
-            }
-        }
-
-        let connected_user = self.connected_users.get_mut(&user_id).unwrap();
-        connected_user.connection_ids.remove(&connection_id);
-        if connected_user.connection_ids.is_empty() {
-            self.connected_users.remove(&user_id);
-        }
-        self.connections.remove(&connection_id).unwrap();
-
-        Ok(result)
-    }
-
-    pub fn user_id_for_connection(&self, connection_id: ConnectionId) -> Result<UserId> {
-        Ok(self
-            .connections
-            .get(&connection_id)
-            .ok_or_else(|| anyhow!("unknown connection"))?
-            .user_id)
-    }
-
-    pub fn connection_ids_for_user(
-        &self,
-        user_id: UserId,
-    ) -> impl Iterator<Item = ConnectionId> + '_ {
-        self.connected_users
-            .get(&user_id)
-            .into_iter()
-            .map(|state| &state.connection_ids)
-            .flatten()
-            .copied()
-    }
-
-    pub fn is_user_online(&self, user_id: UserId) -> bool {
-        !self
-            .connected_users
-            .get(&user_id)
-            .unwrap_or(&Default::default())
-            .connection_ids
-            .is_empty()
-    }
-
-    fn is_user_busy(&self, user_id: UserId) -> bool {
-        self.connected_users
-            .get(&user_id)
-            .unwrap_or(&Default::default())
-            .active_call
-            .is_some()
-    }
-
-    pub fn build_initial_contacts_update(
-        &self,
-        contacts: Vec<db::Contact>,
-    ) -> proto::UpdateContacts {
-        let mut update = proto::UpdateContacts::default();
-
-        for contact in contacts {
-            match contact {
-                db::Contact::Accepted {
-                    user_id,
-                    should_notify,
-                } => {
-                    update
-                        .contacts
-                        .push(self.contact_for_user(user_id, should_notify));
-                }
-                db::Contact::Outgoing { user_id } => {
-                    update.outgoing_requests.push(user_id.to_proto())
-                }
-                db::Contact::Incoming {
-                    user_id,
-                    should_notify,
-                } => update
-                    .incoming_requests
-                    .push(proto::IncomingContactRequest {
-                        requester_id: user_id.to_proto(),
-                        should_notify,
-                    }),
-            }
-        }
-
-        update
-    }
-
-    pub fn contact_for_user(&self, user_id: UserId, should_notify: bool) -> proto::Contact {
-        proto::Contact {
-            user_id: user_id.to_proto(),
-            online: self.is_user_online(user_id),
-            busy: self.is_user_busy(user_id),
-            should_notify,
-        }
-    }
-
-    pub fn create_room(&mut self, creator_connection_id: ConnectionId) -> Result<&proto::Room> {
-        let connection = self
-            .connections
-            .get_mut(&creator_connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let connected_user = self
-            .connected_users
-            .get_mut(&connection.user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        anyhow::ensure!(
-            connected_user.active_call.is_none(),
-            "can't create a room with an active call"
-        );
-
-        let room_id = post_inc(&mut self.next_room_id);
-        let room = proto::Room {
-            id: room_id,
-            participants: vec![proto::Participant {
-                user_id: connection.user_id.to_proto(),
-                peer_id: creator_connection_id.0,
-                projects: Default::default(),
-                location: Some(proto::ParticipantLocation {
-                    variant: Some(proto::participant_location::Variant::External(
-                        proto::participant_location::External {},
-                    )),
-                }),
-            }],
-            pending_participant_user_ids: Default::default(),
-            live_kit_room: nanoid!(30),
-        };
-
-        self.rooms.insert(room_id, room);
-        connected_user.active_call = Some(Call {
-            caller_user_id: connection.user_id,
-            room_id,
-            connection_id: Some(creator_connection_id),
-            initial_project_id: None,
-        });
-        Ok(self.rooms.get(&room_id).unwrap())
-    }
-
-    pub fn join_room(
-        &mut self,
-        room_id: RoomId,
-        connection_id: ConnectionId,
-    ) -> Result<(&proto::Room, Vec<ConnectionId>)> {
-        let connection = self
-            .connections
-            .get_mut(&connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let user_id = connection.user_id;
-        let recipient_connection_ids = self.connection_ids_for_user(user_id).collect::<Vec<_>>();
-
-        let connected_user = self
-            .connected_users
-            .get_mut(&user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let active_call = connected_user
-            .active_call
-            .as_mut()
-            .ok_or_else(|| anyhow!("not being called"))?;
-        anyhow::ensure!(
-            active_call.room_id == room_id && active_call.connection_id.is_none(),
-            "not being called on this room"
-        );
-
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        anyhow::ensure!(
-            room.pending_participant_user_ids
-                .contains(&user_id.to_proto()),
-            anyhow!("no such room")
-        );
-        room.pending_participant_user_ids
-            .retain(|pending| *pending != user_id.to_proto());
-        room.participants.push(proto::Participant {
-            user_id: user_id.to_proto(),
-            peer_id: connection_id.0,
-            projects: Default::default(),
-            location: Some(proto::ParticipantLocation {
-                variant: Some(proto::participant_location::Variant::External(
-                    proto::participant_location::External {},
-                )),
-            }),
-        });
-        active_call.connection_id = Some(connection_id);
-
-        Ok((room, recipient_connection_ids))
-    }
-
-    pub fn leave_room(&mut self, room_id: RoomId, connection_id: ConnectionId) -> Result<LeftRoom> {
-        let connection = self
-            .connections
-            .get_mut(&connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let user_id = connection.user_id;
-
-        let connected_user = self
-            .connected_users
-            .get(&user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        anyhow::ensure!(
-            connected_user
-                .active_call
-                .map_or(false, |call| call.room_id == room_id
-                    && call.connection_id == Some(connection_id)),
-            "cannot leave a room before joining it"
-        );
-
-        // Given that users can only join one room at a time, we can safely unshare
-        // and leave all projects associated with the connection.
-        let mut unshared_projects = Vec::new();
-        let mut left_projects = Vec::new();
-        for project_id in connection.projects.clone() {
-            if let Ok((_, project)) = self.unshare_project(project_id, connection_id) {
-                unshared_projects.push(project);
-            } else if let Ok(project) = self.leave_project(project_id, connection_id) {
-                left_projects.push(project);
-            }
-        }
-        self.connected_users.get_mut(&user_id).unwrap().active_call = None;
-
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        room.participants
-            .retain(|participant| participant.peer_id != connection_id.0);
-
-        let mut canceled_call_connection_ids = Vec::new();
-        room.pending_participant_user_ids
-            .retain(|pending_participant_user_id| {
-                if let Some(connected_user) = self
-                    .connected_users
-                    .get_mut(&UserId::from_proto(*pending_participant_user_id))
-                {
-                    if let Some(call) = connected_user.active_call.as_ref() {
-                        if call.caller_user_id == user_id {
-                            connected_user.active_call.take();
-                            canceled_call_connection_ids
-                                .extend(connected_user.connection_ids.iter().copied());
-                            false
-                        } else {
-                            true
-                        }
-                    } else {
-                        true
-                    }
-                } else {
-                    true
-                }
-            });
-
-        let room = if room.participants.is_empty() {
-            Cow::Owned(self.rooms.remove(&room_id).unwrap())
-        } else {
-            Cow::Borrowed(self.rooms.get(&room_id).unwrap())
-        };
-
-        Ok(LeftRoom {
-            room,
-            unshared_projects,
-            left_projects,
-            canceled_call_connection_ids,
-        })
-    }
-
-    pub fn room(&self, room_id: RoomId) -> Option<&proto::Room> {
-        self.rooms.get(&room_id)
-    }
-
-    pub fn rooms(&self) -> &BTreeMap<RoomId, proto::Room> {
-        &self.rooms
-    }
-
-    pub fn call(
-        &mut self,
-        room_id: RoomId,
-        recipient_user_id: UserId,
-        initial_project_id: Option<ProjectId>,
-        from_connection_id: ConnectionId,
-    ) -> Result<(&proto::Room, Vec<ConnectionId>, proto::IncomingCall)> {
-        let caller_user_id = self.user_id_for_connection(from_connection_id)?;
-
-        let recipient_connection_ids = self
-            .connection_ids_for_user(recipient_user_id)
-            .collect::<Vec<_>>();
-        let mut recipient = self
-            .connected_users
-            .get_mut(&recipient_user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        anyhow::ensure!(
-            recipient.active_call.is_none(),
-            "recipient is already on another call"
-        );
-
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        anyhow::ensure!(
-            room.participants
-                .iter()
-                .any(|participant| participant.peer_id == from_connection_id.0),
-            "no such room"
-        );
-        anyhow::ensure!(
-            room.pending_participant_user_ids
-                .iter()
-                .all(|user_id| UserId::from_proto(*user_id) != recipient_user_id),
-            "cannot call the same user more than once"
-        );
-        room.pending_participant_user_ids
-            .push(recipient_user_id.to_proto());
-
-        if let Some(initial_project_id) = initial_project_id {
-            let project = self
-                .projects
-                .get(&initial_project_id)
-                .ok_or_else(|| anyhow!("no such project"))?;
-            anyhow::ensure!(project.room_id == room_id, "no such project");
-        }
-
-        recipient.active_call = Some(Call {
-            caller_user_id,
-            room_id,
-            connection_id: None,
-            initial_project_id,
-        });
-
-        Ok((
-            room,
-            recipient_connection_ids,
-            proto::IncomingCall {
-                room_id,
-                caller_user_id: caller_user_id.to_proto(),
-                participant_user_ids: room
-                    .participants
-                    .iter()
-                    .map(|participant| participant.user_id)
-                    .collect(),
-                initial_project: initial_project_id
-                    .and_then(|id| Self::build_participant_project(id, &self.projects)),
-            },
-        ))
-    }
-
-    pub fn call_failed(&mut self, room_id: RoomId, to_user_id: UserId) -> Result<&proto::Room> {
-        let mut recipient = self
-            .connected_users
-            .get_mut(&to_user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        anyhow::ensure!(recipient
-            .active_call
-            .map_or(false, |call| call.room_id == room_id
-                && call.connection_id.is_none()));
-        recipient.active_call = None;
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        room.pending_participant_user_ids
-            .retain(|user_id| UserId::from_proto(*user_id) != to_user_id);
-        Ok(room)
-    }
-
-    pub fn cancel_call(
-        &mut self,
-        room_id: RoomId,
-        recipient_user_id: UserId,
-        canceller_connection_id: ConnectionId,
-    ) -> Result<(&proto::Room, HashSet<ConnectionId>)> {
-        let canceller_user_id = self.user_id_for_connection(canceller_connection_id)?;
-        let canceller = self
-            .connected_users
-            .get(&canceller_user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let recipient = self
-            .connected_users
-            .get(&recipient_user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let canceller_active_call = canceller
-            .active_call
-            .as_ref()
-            .ok_or_else(|| anyhow!("no active call"))?;
-        let recipient_active_call = recipient
-            .active_call
-            .as_ref()
-            .ok_or_else(|| anyhow!("no active call for recipient"))?;
-
-        anyhow::ensure!(
-            canceller_active_call.room_id == room_id,
-            "users are on different calls"
-        );
-        anyhow::ensure!(
-            recipient_active_call.room_id == room_id,
-            "users are on different calls"
-        );
-        anyhow::ensure!(
-            recipient_active_call.connection_id.is_none(),
-            "recipient has already answered"
-        );
-        let room_id = recipient_active_call.room_id;
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        room.pending_participant_user_ids
-            .retain(|user_id| UserId::from_proto(*user_id) != recipient_user_id);
-
-        let recipient = self.connected_users.get_mut(&recipient_user_id).unwrap();
-        recipient.active_call.take();
-
-        Ok((room, recipient.connection_ids.clone()))
-    }
-
-    pub fn decline_call(
-        &mut self,
-        room_id: RoomId,
-        recipient_connection_id: ConnectionId,
-    ) -> Result<(&proto::Room, Vec<ConnectionId>)> {
-        let recipient_user_id = self.user_id_for_connection(recipient_connection_id)?;
-        let recipient = self
-            .connected_users
-            .get_mut(&recipient_user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        if let Some(active_call) = recipient.active_call {
-            anyhow::ensure!(active_call.room_id == room_id, "no such room");
-            anyhow::ensure!(
-                active_call.connection_id.is_none(),
-                "cannot decline a call after joining room"
-            );
-            recipient.active_call.take();
-            let recipient_connection_ids = self
-                .connection_ids_for_user(recipient_user_id)
-                .collect::<Vec<_>>();
-            let room = self
-                .rooms
-                .get_mut(&active_call.room_id)
-                .ok_or_else(|| anyhow!("no such room"))?;
-            room.pending_participant_user_ids
-                .retain(|user_id| UserId::from_proto(*user_id) != recipient_user_id);
-            Ok((room, recipient_connection_ids))
-        } else {
-            Err(anyhow!("user is not being called"))
-        }
-    }
-
-    pub fn update_participant_location(
-        &mut self,
-        room_id: RoomId,
-        location: proto::ParticipantLocation,
-        connection_id: ConnectionId,
-    ) -> Result<&proto::Room> {
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        if let Some(proto::participant_location::Variant::SharedProject(project)) =
-            location.variant.as_ref()
-        {
-            anyhow::ensure!(
-                room.participants
-                    .iter()
-                    .flat_map(|participant| &participant.projects)
-                    .any(|participant_project| participant_project.id == project.id),
-                "no such project"
-            );
-        }
-
-        let participant = room
-            .participants
-            .iter_mut()
-            .find(|participant| participant.peer_id == connection_id.0)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        participant.location = Some(location);
-
-        Ok(room)
-    }
-
-    pub fn share_project(
-        &mut self,
-        room_id: RoomId,
-        project_id: ProjectId,
-        worktrees: Vec<proto::WorktreeMetadata>,
-        host_connection_id: ConnectionId,
-    ) -> Result<&proto::Room> {
-        let connection = self
-            .connections
-            .get_mut(&host_connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-
-        let room = self
-            .rooms
-            .get_mut(&room_id)
-            .ok_or_else(|| anyhow!("no such room"))?;
-        let participant = room
-            .participants
-            .iter_mut()
-            .find(|participant| participant.peer_id == host_connection_id.0)
-            .ok_or_else(|| anyhow!("no such room"))?;
-
-        connection.projects.insert(project_id);
-        self.projects.insert(
-            project_id,
-            Project {
-                id: project_id,
-                room_id,
-                host_connection_id,
-                host: Collaborator {
-                    user_id: connection.user_id,
-                    replica_id: 0,
-                    admin: connection.admin,
-                },
-                guests: Default::default(),
-                active_replica_ids: Default::default(),
-                worktrees: worktrees
-                    .into_iter()
-                    .map(|worktree| {
-                        (
-                            worktree.id,
-                            Worktree {
-                                root_name: worktree.root_name,
-                                visible: worktree.visible,
-                                abs_path: worktree.abs_path.clone(),
-                                entries: Default::default(),
-                                diagnostic_summaries: Default::default(),
-                                scan_id: Default::default(),
-                                is_complete: Default::default(),
-                            },
-                        )
-                    })
-                    .collect(),
-                language_servers: Default::default(),
-            },
-        );
-
-        participant
-            .projects
-            .extend(Self::build_participant_project(project_id, &self.projects));
-
-        Ok(room)
-    }
-
-    pub fn unshare_project(
-        &mut self,
-        project_id: ProjectId,
-        connection_id: ConnectionId,
-    ) -> Result<(&proto::Room, Project)> {
-        match self.projects.entry(project_id) {
-            btree_map::Entry::Occupied(e) => {
-                if e.get().host_connection_id == connection_id {
-                    let project = e.remove();
-
-                    if let Some(host_connection) = self.connections.get_mut(&connection_id) {
-                        host_connection.projects.remove(&project_id);
-                    }
-
-                    for guest_connection in project.guests.keys() {
-                        if let Some(connection) = self.connections.get_mut(guest_connection) {
-                            connection.projects.remove(&project_id);
-                        }
-                    }
-
-                    let room = self
-                        .rooms
-                        .get_mut(&project.room_id)
-                        .ok_or_else(|| anyhow!("no such room"))?;
-                    let participant = room
-                        .participants
-                        .iter_mut()
-                        .find(|participant| participant.peer_id == connection_id.0)
-                        .ok_or_else(|| anyhow!("no such room"))?;
-                    participant
-                        .projects
-                        .retain(|project| project.id != project_id.to_proto());
-
-                    Ok((room, project))
-                } else {
-                    Err(anyhow!("no such project"))?
-                }
-            }
-            btree_map::Entry::Vacant(_) => Err(anyhow!("no such project"))?,
-        }
-    }
-
-    pub fn update_project(
-        &mut self,
-        project_id: ProjectId,
-        worktrees: &[proto::WorktreeMetadata],
-        connection_id: ConnectionId,
-    ) -> Result<&proto::Room> {
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        if project.host_connection_id == connection_id {
-            let mut old_worktrees = mem::take(&mut project.worktrees);
-            for worktree in worktrees {
-                if let Some(old_worktree) = old_worktrees.remove(&worktree.id) {
-                    project.worktrees.insert(worktree.id, old_worktree);
-                } else {
-                    project.worktrees.insert(
-                        worktree.id,
-                        Worktree {
-                            root_name: worktree.root_name.clone(),
-                            visible: worktree.visible,
-                            abs_path: worktree.abs_path.clone(),
-                            entries: Default::default(),
-                            diagnostic_summaries: Default::default(),
-                            scan_id: Default::default(),
-                            is_complete: false,
-                        },
-                    );
-                }
-            }
-
-            let room = self
-                .rooms
-                .get_mut(&project.room_id)
-                .ok_or_else(|| anyhow!("no such room"))?;
-            let participant_project = room
-                .participants
-                .iter_mut()
-                .flat_map(|participant| &mut participant.projects)
-                .find(|project| project.id == project_id.to_proto())
-                .ok_or_else(|| anyhow!("no such project"))?;
-            participant_project.worktree_root_names = worktrees
-                .iter()
-                .filter(|worktree| worktree.visible)
-                .map(|worktree| worktree.root_name.clone())
-                .collect();
-
-            Ok(room)
-        } else {
-            Err(anyhow!("no such project"))?
-        }
-    }
-
-    pub fn update_diagnostic_summary(
-        &mut self,
-        project_id: ProjectId,
-        worktree_id: u64,
-        connection_id: ConnectionId,
-        summary: proto::DiagnosticSummary,
-    ) -> Result<Vec<ConnectionId>> {
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        if project.host_connection_id == connection_id {
-            let worktree = project
-                .worktrees
-                .get_mut(&worktree_id)
-                .ok_or_else(|| anyhow!("no such worktree"))?;
-            worktree
-                .diagnostic_summaries
-                .insert(summary.path.clone().into(), summary);
-            return Ok(project.connection_ids());
-        }
-
-        Err(anyhow!("no such worktree"))?
-    }
-
-    pub fn start_language_server(
-        &mut self,
-        project_id: ProjectId,
-        connection_id: ConnectionId,
-        language_server: proto::LanguageServer,
-    ) -> Result<Vec<ConnectionId>> {
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        if project.host_connection_id == connection_id {
-            project.language_servers.push(language_server);
-            return Ok(project.connection_ids());
-        }
-
-        Err(anyhow!("no such project"))?
-    }
-
-    pub fn join_project(
-        &mut self,
-        requester_connection_id: ConnectionId,
-        project_id: ProjectId,
-    ) -> Result<(&Project, ReplicaId)> {
-        let connection = self
-            .connections
-            .get_mut(&requester_connection_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let user = self
-            .connected_users
-            .get(&connection.user_id)
-            .ok_or_else(|| anyhow!("no such connection"))?;
-        let active_call = user.active_call.ok_or_else(|| anyhow!("no such project"))?;
-        anyhow::ensure!(
-            active_call.connection_id == Some(requester_connection_id),
-            "no such project"
-        );
-
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        anyhow::ensure!(project.room_id == active_call.room_id, "no such project");
-
-        connection.projects.insert(project_id);
-        let mut replica_id = 1;
-        while project.active_replica_ids.contains(&replica_id) {
-            replica_id += 1;
-        }
-        project.active_replica_ids.insert(replica_id);
-        project.guests.insert(
-            requester_connection_id,
-            Collaborator {
-                replica_id,
-                user_id: connection.user_id,
-                admin: connection.admin,
-            },
-        );
-
-        Ok((project, replica_id))
-    }
-
-    pub fn leave_project(
-        &mut self,
-        project_id: ProjectId,
-        connection_id: ConnectionId,
-    ) -> Result<LeftProject> {
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-
-        // If the connection leaving the project is a collaborator, remove it.
-        let remove_collaborator = if let Some(guest) = project.guests.remove(&connection_id) {
-            project.active_replica_ids.remove(&guest.replica_id);
-            true
-        } else {
-            false
-        };
-
-        if let Some(connection) = self.connections.get_mut(&connection_id) {
-            connection.projects.remove(&project_id);
-        }
-
-        Ok(LeftProject {
-            id: project.id,
-            host_connection_id: project.host_connection_id,
-            host_user_id: project.host.user_id,
-            connection_ids: project.connection_ids(),
-            remove_collaborator,
-        })
-    }
-
-    #[allow(clippy::too_many_arguments)]
-    pub fn update_worktree(
-        &mut self,
-        connection_id: ConnectionId,
-        project_id: ProjectId,
-        worktree_id: u64,
-        worktree_root_name: &str,
-        worktree_abs_path: &[u8],
-        removed_entries: &[u64],
-        updated_entries: &[proto::Entry],
-        scan_id: u64,
-        is_last_update: bool,
-    ) -> Result<Vec<ConnectionId>> {
-        let project = self.write_project(project_id, connection_id)?;
-
-        let connection_ids = project.connection_ids();
-        let mut worktree = project.worktrees.entry(worktree_id).or_default();
-        worktree.root_name = worktree_root_name.to_string();
-        worktree.abs_path = worktree_abs_path.to_vec();
-
-        for entry_id in removed_entries {
-            worktree.entries.remove(entry_id);
-        }
-
-        for entry in updated_entries {
-            worktree.entries.insert(entry.id, entry.clone());
-        }
-
-        worktree.scan_id = scan_id;
-        worktree.is_complete = is_last_update;
-        Ok(connection_ids)
-    }
-
-    fn build_participant_project(
-        project_id: ProjectId,
-        projects: &BTreeMap<ProjectId, Project>,
-    ) -> Option<proto::ParticipantProject> {
-        Some(proto::ParticipantProject {
-            id: project_id.to_proto(),
-            worktree_root_names: projects
-                .get(&project_id)?
-                .worktrees
-                .values()
-                .filter(|worktree| worktree.visible)
-                .map(|worktree| worktree.root_name.clone())
-                .collect(),
-        })
-    }
-
-    pub fn project_connection_ids(
-        &self,
-        project_id: ProjectId,
-        acting_connection_id: ConnectionId,
-    ) -> Result<Vec<ConnectionId>> {
-        Ok(self
-            .read_project(project_id, acting_connection_id)?
-            .connection_ids())
-    }
-
-    pub fn project(&self, project_id: ProjectId) -> Result<&Project> {
-        self.projects
-            .get(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))
-    }
-
-    pub fn read_project(
-        &self,
-        project_id: ProjectId,
-        connection_id: ConnectionId,
-    ) -> Result<&Project> {
-        let project = self
-            .projects
-            .get(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        if project.host_connection_id == connection_id
-            || project.guests.contains_key(&connection_id)
-        {
-            Ok(project)
-        } else {
-            Err(anyhow!("no such project"))?
-        }
-    }
-
-    fn write_project(
-        &mut self,
-        project_id: ProjectId,
-        connection_id: ConnectionId,
-    ) -> Result<&mut Project> {
-        let project = self
-            .projects
-            .get_mut(&project_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
-        if project.host_connection_id == connection_id
-            || project.guests.contains_key(&connection_id)
-        {
-            Ok(project)
-        } else {
-            Err(anyhow!("no such project"))?
-        }
-    }
-
-    #[cfg(test)]
-    pub fn check_invariants(&self) {
-        for (connection_id, connection) in &self.connections {
-            for project_id in &connection.projects {
-                let project = &self.projects.get(project_id).unwrap();
-                if project.host_connection_id != *connection_id {
-                    assert!(project.guests.contains_key(connection_id));
-                }
-
-                for (worktree_id, worktree) in project.worktrees.iter() {
-                    let mut paths = HashMap::default();
-                    for entry in worktree.entries.values() {
-                        let prev_entry = paths.insert(&entry.path, entry);
-                        assert_eq!(
-                            prev_entry,
-                            None,
-                            "worktree {:?}, duplicate path for entries {:?} and {:?}",
-                            worktree_id,
-                            prev_entry.unwrap(),
-                            entry
-                        );
-                    }
-                }
-            }
-
-            assert!(self
-                .connected_users
-                .get(&connection.user_id)
-                .unwrap()
-                .connection_ids
-                .contains(connection_id));
-        }
-
-        for (user_id, state) in &self.connected_users {
-            for connection_id in &state.connection_ids {
-                assert_eq!(
-                    self.connections.get(connection_id).unwrap().user_id,
-                    *user_id
-                );
-            }
-
-            if let Some(active_call) = state.active_call.as_ref() {
-                if let Some(active_call_connection_id) = active_call.connection_id {
-                    assert!(
-                        state.connection_ids.contains(&active_call_connection_id),
-                        "call is active on a dead connection"
-                    );
-                    assert!(
-                        state.connection_ids.contains(&active_call_connection_id),
-                        "call is active on a dead connection"
-                    );
-                }
-            }
-        }
-
-        for (room_id, room) in &self.rooms {
-            for pending_user_id in &room.pending_participant_user_ids {
-                assert!(
-                    self.connected_users
-                        .contains_key(&UserId::from_proto(*pending_user_id)),
-                    "call is active on a user that has disconnected"
-                );
-            }
-
-            for participant in &room.participants {
-                assert!(
-                    self.connections
-                        .contains_key(&ConnectionId(participant.peer_id)),
-                    "room {} contains participant {:?} that has disconnected",
-                    room_id,
-                    participant
-                );
-
-                for participant_project in &participant.projects {
-                    let project = &self.projects[&ProjectId::from_proto(participant_project.id)];
-                    assert_eq!(
-                        project.room_id, *room_id,
-                        "project was shared on a different room"
-                    );
-                }
-            }
-
-            assert!(
-                !room.pending_participant_user_ids.is_empty() || !room.participants.is_empty(),
-                "room can't be empty"
-            );
-        }
-
-        for (project_id, project) in &self.projects {
-            let host_connection = self.connections.get(&project.host_connection_id).unwrap();
-            assert!(host_connection.projects.contains(project_id));
-
-            for guest_connection_id in project.guests.keys() {
-                let guest_connection = self.connections.get(guest_connection_id).unwrap();
-                assert!(guest_connection.projects.contains(project_id));
-            }
-            assert_eq!(project.active_replica_ids.len(), project.guests.len());
-            assert_eq!(
-                project.active_replica_ids,
-                project
-                    .guests
-                    .values()
-                    .map(|guest| guest.replica_id)
-                    .collect::<HashSet<_>>(),
-            );
-
-            let room = &self.rooms[&project.room_id];
-            let room_participant = room
-                .participants
-                .iter()
-                .find(|participant| participant.peer_id == project.host_connection_id.0)
-                .unwrap();
-            assert!(
-                room_participant
-                    .projects
-                    .iter()
-                    .any(|project| project.id == project_id.to_proto()),
-                "project was not shared in room"
-            );
-        }
-    }
-}
-
-impl Project {
-    pub fn guest_connection_ids(&self) -> Vec<ConnectionId> {
-        self.guests.keys().copied().collect()
-    }
-
-    pub fn connection_ids(&self) -> Vec<ConnectionId> {
-        self.guests
-            .keys()
-            .copied()
-            .chain(Some(self.host_connection_id))
-            .collect()
-    }
-}

crates/collab_ui/src/collab_ui.rs 🔗

@@ -43,7 +43,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
                     project_id,
                     app_state.client.clone(),
                     app_state.user_store.clone(),
-                    app_state.project_store.clone(),
                     app_state.languages.clone(),
                     app_state.fs.clone(),
                     cx.clone(),
@@ -51,7 +50,13 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
                 .await?;
 
                 let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
-                    let mut workspace = Workspace::new(project, app_state.default_item_factory, cx);
+                    let mut workspace = Workspace::new(
+                        Default::default(),
+                        0,
+                        project,
+                        app_state.dock_default_item_factory,
+                        cx,
+                    );
                     (app_state.initialize_workspace)(&mut workspace, &app_state, cx);
                     workspace
                 });

crates/collab_ui/src/contact_notification.rs 🔗

@@ -6,7 +6,7 @@ use gpui::{
     elements::*, impl_internal_actions, Entity, ModelHandle, MutableAppContext, RenderContext,
     View, ViewContext,
 };
-use workspace::Notification;
+use workspace::notifications::Notification;
 
 impl_internal_actions!(contact_notifications, [Dismiss, RespondToContactRequest]);
 

crates/collab_ui/src/incoming_call_notification.rs 🔗

@@ -74,7 +74,7 @@ impl IncomingCallNotification {
         let active_call = ActiveCall::global(cx);
         if action.accept {
             let join = active_call.update(cx, |active_call, cx| active_call.accept_incoming(cx));
-            let caller_user_id = self.call.caller.id;
+            let caller_user_id = self.call.calling_user.id;
             let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
             cx.spawn_weak(|_, mut cx| async move {
                 join.await?;
@@ -105,7 +105,7 @@ impl IncomingCallNotification {
             .as_ref()
             .unwrap_or(&default_project);
         Flex::row()
-            .with_children(self.call.caller.avatar.clone().map(|avatar| {
+            .with_children(self.call.calling_user.avatar.clone().map(|avatar| {
                 Image::new(avatar)
                     .with_style(theme.caller_avatar)
                     .aligned()
@@ -115,7 +115,7 @@ impl IncomingCallNotification {
                 Flex::column()
                     .with_child(
                         Label::new(
-                            self.call.caller.github_login.clone(),
+                            self.call.calling_user.github_login.clone(),
                             theme.caller_username.text.clone(),
                         )
                         .contained()

crates/command_palette/src/command_palette.rs 🔗

@@ -350,8 +350,9 @@ mod tests {
         });
 
         let project = Project::test(app_state.fs.clone(), [], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let editor = cx.add_view(&workspace, |cx| {
             let mut editor = Editor::single_line(None, cx);
             editor.set_text("abc", cx);

crates/db/Cargo.toml 🔗

@@ -12,16 +12,20 @@ test-support = []
 
 [dependencies]
 collections = { path = "../collections" }
+gpui = { path = "../gpui" }
+sqlez = { path = "../sqlez" }
+sqlez_macros = { path = "../sqlez_macros" }
+util = { path = "../util" }
 anyhow = "1.0.57"
+indoc = "1.0.4"
 async-trait = "0.1"
 lazy_static = "1.4.0"
 log = { version = "0.4.16", features = ["kv_unstable_serde"] }
 parking_lot = "0.11.1"
-rusqlite = { version = "0.28.0", features = ["bundled", "serde_json"] }
-rusqlite_migration = { git = "https://github.com/cljoly/rusqlite_migration", rev = "c433555d7c1b41b103426e35756eb3144d0ebbc6" }
-serde = { workspace = true }
-serde_rusqlite = "0.31.0"
+serde = { version = "1.0", features = ["derive"] }
+smol = "1.2"
 
 [dev-dependencies]
 gpui = { path = "../gpui", features = ["test-support"] }
+env_logger = "0.9.1"
 tempdir = { version = "0.3.7" }

crates/db/README.md 🔗

@@ -0,0 +1,5 @@
+# Building Queries
+
+First, craft your test data. The examples folder shows a template for building a test-db, and can be ran with `cargo run --example [your-example]`.
+
+To actually use and test your queries, import the generated DB file into https://sqliteonline.com/

crates/db/src/db.rs 🔗

@@ -1,119 +1,365 @@
-mod kvp;
-mod migrations;
+pub mod kvp;
+pub mod query;
 
-use std::fs;
+// Re-export
+pub use anyhow;
+use anyhow::Context;
+pub use indoc::indoc;
+pub use lazy_static;
+use parking_lot::{Mutex, RwLock};
+pub use smol;
+pub use sqlez;
+pub use sqlez_macros;
+pub use util::channel::{RELEASE_CHANNEL, RELEASE_CHANNEL_NAME};
+pub use util::paths::DB_DIR;
+
+use sqlez::domain::Migrator;
+use sqlez::thread_safe_connection::ThreadSafeConnection;
+use sqlez_macros::sql;
+use std::fs::create_dir_all;
 use std::path::{Path, PathBuf};
-use std::sync::Arc;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::time::{SystemTime, UNIX_EPOCH};
+use util::{async_iife, ResultExt};
+use util::channel::ReleaseChannel;
 
-use anyhow::Result;
-use log::error;
-use parking_lot::Mutex;
-use rusqlite::Connection;
+const CONNECTION_INITIALIZE_QUERY: &'static str = sql!(
+    PRAGMA foreign_keys=TRUE;
+);
 
-use migrations::MIGRATIONS;
+const DB_INITIALIZE_QUERY: &'static str = sql!(
+    PRAGMA journal_mode=WAL;
+    PRAGMA busy_timeout=1;
+    PRAGMA case_sensitive_like=TRUE;
+    PRAGMA synchronous=NORMAL;
+);
 
-#[derive(Clone)]
-pub enum Db {
-    Real(Arc<RealDb>),
-    Null,
-}
+const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB";
 
-pub struct RealDb {
-    connection: Mutex<Connection>,
-    path: Option<PathBuf>,
+const DB_FILE_NAME: &'static str = "db.sqlite";
+
+lazy_static::lazy_static! {
+    static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
+    pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
+    pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);    
 }
 
-impl Db {
-    /// Open or create a database at the given directory path.
-    pub fn open(db_dir: &Path, channel: &'static str) -> Self {
-        // Use 0 for now. Will implement incrementing and clearing of old db files soon TM
-        let current_db_dir = db_dir.join(Path::new(&format!("0-{}", channel)));
-        fs::create_dir_all(&current_db_dir)
-            .expect("Should be able to create the database directory");
-        let db_path = current_db_dir.join(Path::new("db.sqlite"));
-
-        Connection::open(db_path)
-            .map_err(Into::into)
-            .and_then(|connection| Self::initialize(connection))
-            .map(|connection| {
-                Db::Real(Arc::new(RealDb {
-                    connection,
-                    path: Some(db_dir.to_path_buf()),
-                }))
-            })
-            .unwrap_or_else(|e| {
-                error!(
-                    "Connecting to file backed db failed. Reverting to null db. {}",
-                    e
-                );
-                Self::Null
-            })
-    }
+/// Open or create a database at the given directory path.
+/// This will retry a couple times if there are failures. If opening fails once, the db directory
+/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created.
+/// In either case, static variables are set so that the user can be notified.
+pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, release_channel: &ReleaseChannel) -> ThreadSafeConnection<M> {
+    let release_channel_name = release_channel.dev_name();
+    let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
 
-    /// Open a in memory database for testing and as a fallback.
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn open_in_memory() -> Self {
-        Connection::open_in_memory()
-            .map_err(Into::into)
-            .and_then(|connection| Self::initialize(connection))
-            .map(|connection| {
-                Db::Real(Arc::new(RealDb {
-                    connection,
-                    path: None,
-                }))
-            })
-            .unwrap_or_else(|e| {
-                error!(
-                    "Connecting to in memory db failed. Reverting to null db. {}",
-                    e
-                );
-                Self::Null
-            })
-    }
+    let connection = async_iife!({
+        // Note: This still has a race condition where 1 set of migrations succeeds
+        // (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal))
+        // This will cause the first connection to have the database taken out 
+        // from under it. This *should* be fine though. The second dabatase failure will
+        // cause errors in the log and so should be observed by developers while writing
+        // soon-to-be good migrations. If user databases are corrupted, we toss them out
+        // and try again from a blank. As long as running all migrations from start to end 
+        // on a blank database is ok, this race condition will never be triggered.
+        //
+        // Basically: Don't ever push invalid migrations to stable or everyone will have
+        // a bad time.
+        
+        // If no db folder, create one at 0-{channel}
+        create_dir_all(&main_db_dir).context("Could not create db directory")?;
+        let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
+        
+        // Optimistically open databases in parallel
+        if !DB_FILE_OPERATIONS.is_locked() {
+            // Try building a connection
+            if let Some(connection) = open_main_db(&db_path).await {
+                return Ok(connection)
+            };
+        }
+        
+        // Take a lock in the failure case so that we move the db once per process instead 
+        // of potentially multiple times from different threads. This shouldn't happen in the
+        // normal path
+        let _lock = DB_FILE_OPERATIONS.lock();
+        if let Some(connection) = open_main_db(&db_path).await {
+            return Ok(connection)
+        };
+        
+        let backup_timestamp = SystemTime::now()
+            .duration_since(UNIX_EPOCH)
+            .expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
+            .as_millis();
+        
+        // If failed, move 0-{channel} to {current unix timestamp}-{channel}
+        let backup_db_dir = db_dir.join(Path::new(&format!(
+            "{}-{}",
+            backup_timestamp,
+            release_channel_name,
+        )));
+
+        std::fs::rename(&main_db_dir, &backup_db_dir)
+            .context("Failed clean up corrupted database, panicking.")?;
 
-    fn initialize(mut conn: Connection) -> Result<Mutex<Connection>> {
-        MIGRATIONS.to_latest(&mut conn)?;
+        // Set a static ref with the failed timestamp and error so we can notify the user
+        {
+            let mut guard = BACKUP_DB_PATH.write();
+            *guard = Some(backup_db_dir);
+        }
+        
+        // Create a new 0-{channel}
+        create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
+        let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
 
-        conn.pragma_update(None, "journal_mode", "WAL")?;
-        conn.pragma_update(None, "synchronous", "NORMAL")?;
-        conn.pragma_update(None, "foreign_keys", true)?;
-        conn.pragma_update(None, "case_sensitive_like", true)?;
+        // Try again
+        open_main_db(&db_path).await.context("Could not newly created db")
+    }).await.log_err();
 
-        Ok(Mutex::new(conn))
+    if let Some(connection) = connection {
+        return connection;
     }
+   
+    // Set another static ref so that we can escalate the notification
+    ALL_FILE_DB_FAILED.store(true, Ordering::Release);
+    
+    // If still failed, create an in memory db with a known name
+    open_fallback_db().await
+}
 
-    pub fn persisting(&self) -> bool {
-        self.real().and_then(|db| db.path.as_ref()).is_some()
-    }
+async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> {
+    log::info!("Opening main db");
+    ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
+        .with_db_initialization_query(DB_INITIALIZE_QUERY)
+        .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
+        .build()
+        .await
+        .log_err()
+}
 
-    pub fn real(&self) -> Option<&RealDb> {
-        match self {
-            Db::Real(db) => Some(&db),
-            _ => None,
-        }
-    }
+async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
+    log::info!("Opening fallback db");
+    ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false)
+        .with_db_initialization_query(DB_INITIALIZE_QUERY)
+        .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
+        .build()
+        .await
+        .expect(
+            "Fallback in memory database failed. Likely initialization queries or migrations have fundamental errors",
+        )
+}
+
+#[cfg(any(test, feature = "test-support"))]
+pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M> {
+    use sqlez::thread_safe_connection::locking_queue;
+
+    ThreadSafeConnection::<M>::builder(db_name, false)
+        .with_db_initialization_query(DB_INITIALIZE_QUERY)
+        .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
+        // Serialize queued writes via a mutex and run them synchronously
+        .with_write_queue_constructor(locking_queue())
+        .build()
+        .await
+        .unwrap()
 }
 
-impl Drop for Db {
-    fn drop(&mut self) {
-        match self {
-            Db::Real(real_db) => {
-                let lock = real_db.connection.lock();
+/// Implements a basic DB wrapper for a given domain
+#[macro_export]
+macro_rules! define_connection {
+    (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => {
+        pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>);
 
-                let _ = lock.pragma_update(None, "analysis_limit", "500");
-                let _ = lock.pragma_update(None, "optimize", "");
+        impl ::std::ops::Deref for $t {
+            type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>;
+
+            fn deref(&self) -> &Self::Target {
+                &self.0
             }
-            Db::Null => {}
         }
-    }
+        
+        impl $crate::sqlez::domain::Domain for $t {
+            fn name() -> &'static str {
+                stringify!($t)
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                $migrations
+            } 
+        }
+
+        #[cfg(any(test, feature = "test-support"))]
+        $crate::lazy_static::lazy_static! {
+            pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
+        }
+
+        #[cfg(not(any(test, feature = "test-support")))]
+        $crate::lazy_static::lazy_static! {
+            pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
+        }
+    };
+    (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => {
+        pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>);
+
+        impl ::std::ops::Deref for $t {
+            type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<($($d),+, $t)>;
+
+            fn deref(&self) -> &Self::Target {
+                &self.0
+            }
+        }
+        
+        impl $crate::sqlez::domain::Domain for $t {
+            fn name() -> &'static str {
+                stringify!($t)
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                $migrations
+            } 
+        }
+
+        #[cfg(any(test, feature = "test-support"))]
+        $crate::lazy_static::lazy_static! {
+            pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
+        }
+
+        #[cfg(not(any(test, feature = "test-support")))]
+        $crate::lazy_static::lazy_static! {
+            pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
+        }
+    };
 }
 
 #[cfg(test)]
 mod tests {
-    use crate::migrations::MIGRATIONS;
+    use std::{fs, thread};
+
+    use sqlez::{domain::Domain, connection::Connection};
+    use sqlez_macros::sql;
+    use tempdir::TempDir;
 
-    #[test]
-    fn test_migrations() {
-        assert!(MIGRATIONS.validate().is_ok());
+    use crate::{open_db, DB_FILE_NAME};
+        
+    // Test bad migration panics
+    #[gpui::test]
+    #[should_panic]
+    async fn test_bad_migration_panics() {
+        enum BadDB {}
+        
+        impl Domain for BadDB {
+            fn name() -> &'static str {
+                "db_tests"
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                &[sql!(CREATE TABLE test(value);),
+                    // failure because test already exists
+                  sql!(CREATE TABLE test(value);)]
+            }
+        }
+       
+        let tempdir = TempDir::new("DbTests").unwrap();
+        let _bad_db = open_db::<BadDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
+    }
+    
+    /// Test that DB exists but corrupted (causing recreate)
+    #[gpui::test]
+    async fn test_db_corruption() {
+        enum CorruptedDB {}
+        
+        impl Domain for CorruptedDB {
+            fn name() -> &'static str {
+                "db_tests"
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                &[sql!(CREATE TABLE test(value);)]
+            }
+        }
+        
+        enum GoodDB {}
+        
+        impl Domain for GoodDB {
+            fn name() -> &'static str {
+                "db_tests" //Notice same name
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                &[sql!(CREATE TABLE test2(value);)] //But different migration
+            }
+        }
+       
+        let tempdir = TempDir::new("DbTests").unwrap();
+        {
+            let corrupt_db = open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
+            assert!(corrupt_db.persistent());
+        }
+        
+        
+        let good_db = open_db::<GoodDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
+        assert!(good_db.select_row::<usize>("SELECT * FROM test2").unwrap()().unwrap().is_none());
+        
+        let mut corrupted_backup_dir = fs::read_dir(
+            tempdir.path()
+        ).unwrap().find(|entry| {
+            !entry.as_ref().unwrap().file_name().to_str().unwrap().starts_with("0")
+        }
+        ).unwrap().unwrap().path();
+        corrupted_backup_dir.push(DB_FILE_NAME);
+        
+        dbg!(&corrupted_backup_dir);
+        
+        let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
+        assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()().unwrap().is_none());
+    }
+    
+    /// Test that DB exists but corrupted (causing recreate)
+    #[gpui::test]
+    async fn test_simultaneous_db_corruption() {
+        enum CorruptedDB {}
+        
+        impl Domain for CorruptedDB {
+            fn name() -> &'static str {
+                "db_tests"
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                &[sql!(CREATE TABLE test(value);)]
+            }
+        }
+        
+        enum GoodDB {}
+        
+        impl Domain for GoodDB {
+            fn name() -> &'static str {
+                "db_tests" //Notice same name
+            }
+            
+            fn migrations() -> &'static [&'static str] {
+                &[sql!(CREATE TABLE test2(value);)] //But different migration
+            }
+        }
+       
+        let tempdir = TempDir::new("DbTests").unwrap();
+        {
+            // Setup the bad database
+            let corrupt_db = open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
+            assert!(corrupt_db.persistent());
+        }
+        
+        // Try to connect to it a bunch of times at once
+        let mut guards = vec![];
+        for _ in 0..10 {
+            let tmp_path = tempdir.path().to_path_buf();
+            let guard = thread::spawn(move || {
+                let good_db = smol::block_on(open_db::<GoodDB>(tmp_path.as_path(), &util::channel::ReleaseChannel::Dev));
+                assert!(good_db.select_row::<usize>("SELECT * FROM test2").unwrap()().unwrap().is_none());
+            });
+            
+            guards.push(guard);
+        
+        }
+        
+       for guard in guards.into_iter() {
+           assert!(guard.join().is_ok());
+       }
     }
 }

crates/db/src/items.rs 🔗

@@ -1,311 +0,0 @@
-use std::{ffi::OsStr, fmt::Display, hash::Hash, os::unix::prelude::OsStrExt, path::PathBuf};
-
-use anyhow::Result;
-use collections::HashSet;
-use rusqlite::{named_params, params};
-
-use super::Db;
-
-pub(crate) const ITEMS_M_1: &str = "
-CREATE TABLE items(
-    id INTEGER PRIMARY KEY,
-    kind TEXT
-) STRICT;
-CREATE TABLE item_path(
-    item_id INTEGER PRIMARY KEY,
-    path BLOB
-) STRICT;
-CREATE TABLE item_query(
-    item_id INTEGER PRIMARY KEY,
-    query TEXT
-) STRICT;
-";
-
-#[derive(PartialEq, Eq, Hash, Debug)]
-pub enum SerializedItemKind {
-    Editor,
-    Terminal,
-    ProjectSearch,
-    Diagnostics,
-}
-
-impl Display for SerializedItemKind {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.write_str(&format!("{:?}", self))
-    }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub enum SerializedItem {
-    Editor(usize, PathBuf),
-    Terminal(usize),
-    ProjectSearch(usize, String),
-    Diagnostics(usize),
-}
-
-impl SerializedItem {
-    fn kind(&self) -> SerializedItemKind {
-        match self {
-            SerializedItem::Editor(_, _) => SerializedItemKind::Editor,
-            SerializedItem::Terminal(_) => SerializedItemKind::Terminal,
-            SerializedItem::ProjectSearch(_, _) => SerializedItemKind::ProjectSearch,
-            SerializedItem::Diagnostics(_) => SerializedItemKind::Diagnostics,
-        }
-    }
-
-    fn id(&self) -> usize {
-        match self {
-            SerializedItem::Editor(id, _)
-            | SerializedItem::Terminal(id)
-            | SerializedItem::ProjectSearch(id, _)
-            | SerializedItem::Diagnostics(id) => *id,
-        }
-    }
-}
-
-impl Db {
-    fn write_item(&self, serialized_item: SerializedItem) -> Result<()> {
-        self.real()
-            .map(|db| {
-                let mut lock = db.connection.lock();
-                let tx = lock.transaction()?;
-
-                // Serialize the item
-                let id = serialized_item.id();
-                {
-                    let mut stmt = tx.prepare_cached(
-                        "INSERT OR REPLACE INTO items(id, kind) VALUES ((?), (?))",
-                    )?;
-
-                    dbg!("inserting item");
-                    stmt.execute(params![id, serialized_item.kind().to_string()])?;
-                }
-
-                // Serialize item data
-                match &serialized_item {
-                    SerializedItem::Editor(_, path) => {
-                        dbg!("inserting path");
-                        let mut stmt = tx.prepare_cached(
-                            "INSERT OR REPLACE INTO item_path(item_id, path) VALUES ((?), (?))",
-                        )?;
-
-                        let path_bytes = path.as_os_str().as_bytes();
-                        stmt.execute(params![id, path_bytes])?;
-                    }
-                    SerializedItem::ProjectSearch(_, query) => {
-                        dbg!("inserting query");
-                        let mut stmt = tx.prepare_cached(
-                            "INSERT OR REPLACE INTO item_query(item_id, query) VALUES ((?), (?))",
-                        )?;
-
-                        stmt.execute(params![id, query])?;
-                    }
-                    _ => {}
-                }
-
-                tx.commit()?;
-
-                let mut stmt = lock.prepare_cached("SELECT id, kind FROM items")?;
-                let _ = stmt
-                    .query_map([], |row| {
-                        let zero: usize = row.get(0)?;
-                        let one: String = row.get(1)?;
-
-                        dbg!(zero, one);
-                        Ok(())
-                    })?
-                    .collect::<Vec<Result<(), _>>>();
-
-                Ok(())
-            })
-            .unwrap_or(Ok(()))
-    }
-
-    fn delete_item(&self, item_id: usize) -> Result<()> {
-        self.real()
-            .map(|db| {
-                let lock = db.connection.lock();
-
-                let mut stmt = lock.prepare_cached(
-                    r#"
-                    DELETE FROM items WHERE id = (:id);
-                    DELETE FROM item_path WHERE id = (:id);
-                    DELETE FROM item_query WHERE id = (:id);
-                    "#,
-                )?;
-
-                stmt.execute(named_params! {":id": item_id})?;
-
-                Ok(())
-            })
-            .unwrap_or(Ok(()))
-    }
-
-    fn take_items(&self) -> Result<HashSet<SerializedItem>> {
-        self.real()
-            .map(|db| {
-                let mut lock = db.connection.lock();
-
-                let tx = lock.transaction()?;
-
-                // When working with transactions in rusqlite, need to make this kind of scope
-                // To make the borrow stuff work correctly. Don't know why, rust is wild.
-                let result = {
-                    let mut editors_stmt = tx.prepare_cached(
-                        r#"
-                        SELECT items.id, item_path.path
-                        FROM items
-                        LEFT JOIN item_path
-                            ON items.id = item_path.item_id
-                        WHERE items.kind = ?;
-                        "#,
-                    )?;
-
-                    let editors_iter = editors_stmt.query_map(
-                        [SerializedItemKind::Editor.to_string()],
-                        |row| {
-                            let id: usize = row.get(0)?;
-
-                            let buf: Vec<u8> = row.get(1)?;
-                            let path: PathBuf = OsStr::from_bytes(&buf).into();
-
-                            Ok(SerializedItem::Editor(id, path))
-                        },
-                    )?;
-
-                    let mut terminals_stmt = tx.prepare_cached(
-                        r#"
-                        SELECT items.id
-                        FROM items
-                        WHERE items.kind = ?;
-                        "#,
-                    )?;
-                    let terminals_iter = terminals_stmt.query_map(
-                        [SerializedItemKind::Terminal.to_string()],
-                        |row| {
-                            let id: usize = row.get(0)?;
-
-                            Ok(SerializedItem::Terminal(id))
-                        },
-                    )?;
-
-                    let mut search_stmt = tx.prepare_cached(
-                        r#"
-                        SELECT items.id, item_query.query
-                        FROM items
-                        LEFT JOIN item_query
-                            ON items.id = item_query.item_id
-                        WHERE items.kind = ?;
-                        "#,
-                    )?;
-                    let searches_iter = search_stmt.query_map(
-                        [SerializedItemKind::ProjectSearch.to_string()],
-                        |row| {
-                            let id: usize = row.get(0)?;
-                            let query = row.get(1)?;
-
-                            Ok(SerializedItem::ProjectSearch(id, query))
-                        },
-                    )?;
-
-                    #[cfg(debug_assertions)]
-                    let tmp =
-                        searches_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
-                    #[cfg(debug_assertions)]
-                    debug_assert!(tmp.len() == 0 || tmp.len() == 1);
-                    #[cfg(debug_assertions)]
-                    let searches_iter = tmp.into_iter();
-
-                    let mut diagnostic_stmt = tx.prepare_cached(
-                        r#"
-                        SELECT items.id
-                        FROM items
-                        WHERE items.kind = ?;
-                        "#,
-                    )?;
-
-                    let diagnostics_iter = diagnostic_stmt.query_map(
-                        [SerializedItemKind::Diagnostics.to_string()],
-                        |row| {
-                            let id: usize = row.get(0)?;
-
-                            Ok(SerializedItem::Diagnostics(id))
-                        },
-                    )?;
-
-                    #[cfg(debug_assertions)]
-                    let tmp =
-                        diagnostics_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
-                    #[cfg(debug_assertions)]
-                    debug_assert!(tmp.len() == 0 || tmp.len() == 1);
-                    #[cfg(debug_assertions)]
-                    let diagnostics_iter = tmp.into_iter();
-
-                    let res = editors_iter
-                        .chain(terminals_iter)
-                        .chain(diagnostics_iter)
-                        .chain(searches_iter)
-                        .collect::<Result<HashSet<SerializedItem>, rusqlite::Error>>()?;
-
-                    let mut delete_stmt = tx.prepare_cached(
-                        r#"
-                        DELETE FROM items;
-                        DELETE FROM item_path;
-                        DELETE FROM item_query;
-                        "#,
-                    )?;
-
-                    delete_stmt.execute([])?;
-
-                    res
-                };
-
-                tx.commit()?;
-
-                Ok(result)
-            })
-            .unwrap_or(Ok(HashSet::default()))
-    }
-}
-
-#[cfg(test)]
-mod test {
-    use anyhow::Result;
-
-    use super::*;
-
-    #[test]
-    fn test_items_round_trip() -> Result<()> {
-        let db = Db::open_in_memory();
-
-        let mut items = vec![
-            SerializedItem::Editor(0, PathBuf::from("/tmp/test.txt")),
-            SerializedItem::Terminal(1),
-            SerializedItem::ProjectSearch(2, "Test query!".to_string()),
-            SerializedItem::Diagnostics(3),
-        ]
-        .into_iter()
-        .collect::<HashSet<_>>();
-
-        for item in items.iter() {
-            dbg!("Inserting... ");
-            db.write_item(item.clone())?;
-        }
-
-        assert_eq!(items, db.take_items()?);
-
-        // Check that it's empty, as expected
-        assert_eq!(HashSet::default(), db.take_items()?);
-
-        for item in items.iter() {
-            db.write_item(item.clone())?;
-        }
-
-        items.remove(&SerializedItem::ProjectSearch(2, "Test query!".to_string()));
-        db.delete_item(2)?;
-
-        assert_eq!(items, db.take_items()?);
-
-        Ok(())
-    }
-}

crates/db/src/kvp.rs 🔗

@@ -1,82 +1,62 @@
-use anyhow::Result;
-use rusqlite::OptionalExtension;
-
-use super::Db;
-
-pub(crate) const KVP_M_1_UP: &str = "
-CREATE TABLE kv_store(
-    key TEXT PRIMARY KEY,
-    value TEXT NOT NULL
-) STRICT;
-";
-
-impl Db {
-    pub fn read_kvp(&self, key: &str) -> Result<Option<String>> {
-        self.real()
-            .map(|db| {
-                let lock = db.connection.lock();
-                let mut stmt = lock.prepare_cached("SELECT value FROM kv_store WHERE key = (?)")?;
-
-                Ok(stmt.query_row([key], |row| row.get(0)).optional()?)
-            })
-            .unwrap_or(Ok(None))
+use sqlez_macros::sql;
+
+use crate::{define_connection, query};
+
+define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
+    &[sql!(
+        CREATE TABLE IF NOT EXISTS kv_store(
+            key TEXT PRIMARY KEY,
+            value TEXT NOT NULL
+        ) STRICT;
+    )];
+);
+
+impl KeyValueStore {
+    query! {
+        pub fn read_kvp(key: &str) -> Result<Option<String>> {
+            SELECT value FROM kv_store WHERE key = (?)
+        }
     }
 
-    pub fn write_kvp(&self, key: &str, value: &str) -> Result<()> {
-        self.real()
-            .map(|db| {
-                let lock = db.connection.lock();
-
-                let mut stmt = lock.prepare_cached(
-                    "INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))",
-                )?;
-
-                stmt.execute([key, value])?;
-
-                Ok(())
-            })
-            .unwrap_or(Ok(()))
+    query! {
+        pub async fn write_kvp(key: String, value: String) -> Result<()> {
+            INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))
+        }
     }
 
-    pub fn delete_kvp(&self, key: &str) -> Result<()> {
-        self.real()
-            .map(|db| {
-                let lock = db.connection.lock();
-
-                let mut stmt = lock.prepare_cached("DELETE FROM kv_store WHERE key = (?)")?;
-
-                stmt.execute([key])?;
-
-                Ok(())
-            })
-            .unwrap_or(Ok(()))
+    query! {
+        pub async fn delete_kvp(key: String) -> Result<()> {
+            DELETE FROM kv_store WHERE key = (?)
+        }
     }
 }
 
 #[cfg(test)]
 mod tests {
-    use anyhow::Result;
-
-    use super::*;
-
-    #[test]
-    fn test_kvp() -> Result<()> {
-        let db = Db::open_in_memory();
+    use crate::kvp::KeyValueStore;
 
-        assert_eq!(db.read_kvp("key-1")?, None);
+    #[gpui::test]
+    async fn test_kvp() {
+        let db = KeyValueStore(crate::open_test_db("test_kvp").await);
 
-        db.write_kvp("key-1", "one")?;
-        assert_eq!(db.read_kvp("key-1")?, Some("one".to_string()));
+        assert_eq!(db.read_kvp("key-1").unwrap(), None);
 
-        db.write_kvp("key-1", "one-2")?;
-        assert_eq!(db.read_kvp("key-1")?, Some("one-2".to_string()));
+        db.write_kvp("key-1".to_string(), "one".to_string())
+            .await
+            .unwrap();
+        assert_eq!(db.read_kvp("key-1").unwrap(), Some("one".to_string()));
 
-        db.write_kvp("key-2", "two")?;
-        assert_eq!(db.read_kvp("key-2")?, Some("two".to_string()));
+        db.write_kvp("key-1".to_string(), "one-2".to_string())
+            .await
+            .unwrap();
+        assert_eq!(db.read_kvp("key-1").unwrap(), Some("one-2".to_string()));
 
-        db.delete_kvp("key-1")?;
-        assert_eq!(db.read_kvp("key-1")?, None);
+        db.write_kvp("key-2".to_string(), "two".to_string())
+            .await
+            .unwrap();
+        assert_eq!(db.read_kvp("key-2").unwrap(), Some("two".to_string()));
 
-        Ok(())
+        db.delete_kvp("key-1".to_string()).await.unwrap();
+        assert_eq!(db.read_kvp("key-1").unwrap(), None);
     }
 }

crates/db/src/migrations.rs 🔗

@@ -1,15 +0,0 @@
-use rusqlite_migration::{Migrations, M};
-
-// use crate::items::ITEMS_M_1;
-use crate::kvp::KVP_M_1_UP;
-
-// This must be ordered by development time! Only ever add new migrations to the end!!
-// Bad things will probably happen if you don't monotonically edit this vec!!!!
-// And no re-ordering ever!!!!!!!!!! The results of these migrations are on the user's
-// file system and so everything we do here is locked in _f_o_r_e_v_e_r_.
-lazy_static::lazy_static! {
-    pub static ref MIGRATIONS: Migrations<'static> = Migrations::new(vec![
-        M::up(KVP_M_1_UP),
-        // M::up(ITEMS_M_1),
-    ]);
-}

crates/db/src/query.rs 🔗

@@ -0,0 +1,314 @@
+#[macro_export]
+macro_rules! query {
+    ($vis:vis fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
+        $vis fn $id(&self) -> $crate::anyhow::Result<()> {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.exec(sql_stmt)?().context(::std::format!(
+                "Error in {}, exec failed to execute or parse for: {}",
+                ::std::stringify!($id),
+                sql_stmt,
+            ))
+        }
+    };
+    ($vis:vis async fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self) -> $crate::anyhow::Result<()> {
+            use $crate::anyhow::Context;
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.exec(sql_stmt)?().context(::std::format!(
+                    "Error in {}, exec failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
+        $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
+                .context(::std::format!(
+                    "Error in {}, exec_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis async fn $id:ident($arg:ident: $arg_type:ty) -> Result<()> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<()> {
+            use $crate::anyhow::Context;
+
+            self.write(move |connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.exec_bound::<$arg_type>(sql_stmt)?($arg)
+                    .context(::std::format!(
+                        "Error in {}, exec_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
+            use $crate::anyhow::Context;
+
+            self.write(move |connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
+                    .context(::std::format!(
+                        "Error in {}, exec_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident() ->  Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select::<$return_type>(sql_stmt)?(())
+                .context(::std::format!(
+                    "Error in {}, select_row failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis async fn $id:ident() -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
+        pub async fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
+            use $crate::anyhow::Context;
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select::<$return_type>(sql_stmt)?(())
+                    .context(::std::format!(
+                        "Error in {}, select_row failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                .context(::std::format!(
+                    "Error in {}, exec_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
+            use $crate::anyhow::Context;
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                    .context(::std::format!(
+                        "Error in {}, exec_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident() ->  Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row::<$return_type>(sql_stmt)?()
+                .context(::std::format!(
+                    "Error in {}, select_row failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis async fn $id:ident() ->  Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
+            use $crate::anyhow::Context;
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select_row::<$return_type>(sql_stmt)?()
+                    .context(::std::format!(
+                        "Error in {}, select_row failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident($arg:ident: $arg_type:ty) ->  Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<Option<$return_type>>  {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
+                .context(::std::format!(
+                    "Error in {}, select_row_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+
+        }
+    };
+    ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) ->  Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>>  {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                .context(::std::format!(
+                    "Error in {}, select_row_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+
+        }
+    };
+    ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) ->  Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>>  {
+            use $crate::anyhow::Context;
+
+
+            self.write(move |connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                    .context(::std::format!(
+                        "Error in {}, select_row_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident() ->  Result<$return_type:ty> { $($sql:tt)+ }) => {
+        $vis fn $id(&self) ->  $crate::anyhow::Result<$return_type>  {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row::<$return_type>(indoc! { $sql })?()
+                .context(::std::format!(
+                    "Error in {}, select_row_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))?
+                .context(::std::format!(
+                    "Error in {}, select_row_bound expected single row result but found none for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis async fn $id:ident() ->  Result<$return_type:ty> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self) ->  $crate::anyhow::Result<$return_type>  {
+            use $crate::anyhow::Context;
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select_row::<$return_type>(sql_stmt)?()
+                    .context(::std::format!(
+                        "Error in {}, select_row_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))?
+                    .context(::std::format!(
+                        "Error in {}, select_row_bound expected single row result but found none for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+    ($vis:vis fn $id:ident($arg:ident: $arg_type:ty) ->  Result<$return_type:ty> { $($sql:tt)+ }) => {
+        pub fn $id(&self, $arg: $arg_type) ->  $crate::anyhow::Result<$return_type>  {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
+                .context(::std::format!(
+                    "Error in {}, select_row_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))?
+                .context(::std::format!(
+                    "Error in {}, select_row_bound expected single row result but found none for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) ->  Result<$return_type:ty> { $($sql:tt)+ }) => {
+        $vis fn $id(&self, $($arg: $arg_type),+) ->  $crate::anyhow::Result<$return_type>  {
+            use $crate::anyhow::Context;
+
+            let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+            self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                .context(::std::format!(
+                    "Error in {}, select_row_bound failed to execute or parse for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))?
+                .context(::std::format!(
+                    "Error in {}, select_row_bound expected single row result but found none for: {}",
+                    ::std::stringify!($id),
+                    sql_stmt
+                ))
+        }
+    };
+    ($vis:vis fn async $id:ident($($arg:ident: $arg_type:ty),+) ->  Result<$return_type:ty> { $($sql:tt)+ }) => {
+        $vis async fn $id(&self, $($arg: $arg_type),+) ->  $crate::anyhow::Result<$return_type>  {
+            use $crate::anyhow::Context;
+
+
+            self.write(|connection| {
+                let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
+
+                connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
+                    .context(::std::format!(
+                        "Error in {}, select_row_bound failed to execute or parse for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))?
+                    .context(::std::format!(
+                        "Error in {}, select_row_bound expected single row result but found none for: {}",
+                        ::std::stringify!($id),
+                        sql_stmt
+                    ))
+            }).await
+        }
+    };
+}

crates/diagnostics/src/diagnostics.rs 🔗

@@ -5,8 +5,9 @@ use collections::{BTreeMap, HashSet};
 use editor::{
     diagnostic_block_renderer,
     display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
-    highlight_diagnostic_message, Autoscroll, Editor, ExcerptId, ExcerptRange, MultiBuffer,
-    ToOffset,
+    highlight_diagnostic_message,
+    scroll::autoscroll::Autoscroll,
+    Editor, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
 };
 use gpui::{
     actions, elements::*, fonts::TextStyle, impl_internal_actions, serde_json, AnyViewHandle,
@@ -29,7 +30,10 @@ use std::{
     sync::Arc,
 };
 use util::TryFutureExt;
-use workspace::{ItemHandle as _, ItemNavHistory, Workspace};
+use workspace::{
+    item::{Item, ItemEvent, ItemHandle},
+    ItemNavHistory, Pane, Workspace,
+};
 
 actions!(diagnostics, [Deploy]);
 
@@ -503,7 +507,7 @@ impl ProjectDiagnosticsEditor {
     }
 }
 
-impl workspace::Item for ProjectDiagnosticsEditor {
+impl Item for ProjectDiagnosticsEditor {
     fn tab_content(
         &self,
         _detail: Option<usize>,
@@ -571,7 +575,7 @@ impl workspace::Item for ProjectDiagnosticsEditor {
         unreachable!()
     }
 
-    fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
+    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
         Editor::to_item_events(event)
     }
 
@@ -581,7 +585,11 @@ impl workspace::Item for ProjectDiagnosticsEditor {
         });
     }
 
-    fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
+    fn clone_on_split(
+        &self,
+        _workspace_id: workspace::WorkspaceId,
+        cx: &mut ViewContext<Self>,
+    ) -> Option<Self>
     where
         Self: Sized,
     {
@@ -610,6 +618,20 @@ impl workspace::Item for ProjectDiagnosticsEditor {
     fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
         self.editor.update(cx, |editor, cx| editor.deactivated(cx));
     }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        Some("diagnostics")
+    }
+
+    fn deserialize(
+        project: ModelHandle<Project>,
+        workspace: WeakViewHandle<Workspace>,
+        _workspace_id: workspace::WorkspaceId,
+        _item_id: workspace::ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<Result<ViewHandle<Self>>> {
+        Task::ready(Ok(cx.add_view(|cx| Self::new(project, workspace, cx))))
+    }
 }
 
 fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
@@ -781,8 +803,15 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
 
         // Create some diagnostics
         project.update(cx, |project, cx| {

crates/diagnostics/src/items.rs 🔗

@@ -7,7 +7,7 @@ use gpui::{
 use language::Diagnostic;
 use project::Project;
 use settings::Settings;
-use workspace::StatusItemView;
+use workspace::{item::ItemHandle, StatusItemView};
 
 pub struct DiagnosticIndicator {
     summary: project::DiagnosticSummary,
@@ -219,7 +219,7 @@ impl View for DiagnosticIndicator {
 impl StatusItemView for DiagnosticIndicator {
     fn set_active_pane_item(
         &mut self,
-        active_pane_item: Option<&dyn workspace::ItemHandle>,
+        active_pane_item: Option<&dyn ItemHandle>,
         cx: &mut ViewContext<Self>,
     ) {
         if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {

crates/drag_and_drop/src/drag_and_drop.rs 🔗

@@ -9,11 +9,17 @@ use gpui::{
     View, WeakViewHandle,
 };
 
+const DEAD_ZONE: f32 = 4.;
+
 enum State<V: View> {
     Down {
         region_offset: Vector2F,
         region: RectF,
     },
+    DeadZone {
+        region_offset: Vector2F,
+        region: RectF,
+    },
     Dragging {
         window_id: usize,
         position: Vector2F,
@@ -35,6 +41,13 @@ impl<V: View> Clone for State<V> {
                 region_offset,
                 region,
             },
+            &State::DeadZone {
+                region_offset,
+                region,
+            } => State::DeadZone {
+                region_offset,
+                region,
+            },
             State::Dragging {
                 window_id,
                 position,
@@ -101,7 +114,7 @@ impl<V: View> DragAndDrop<V> {
     pub fn drag_started(event: MouseDown, cx: &mut EventContext) {
         cx.update_global(|this: &mut Self, _| {
             this.currently_dragged = Some(State::Down {
-                region_offset: event.region.origin() - event.position,
+                region_offset: event.position - event.region.origin(),
                 region: event.region,
             });
         })
@@ -122,7 +135,31 @@ impl<V: View> DragAndDrop<V> {
                     region_offset,
                     region,
                 })
-                | Some(&State::Dragging {
+                | Some(&State::DeadZone {
+                    region_offset,
+                    region,
+                }) => {
+                    if (dbg!(event.position) - (dbg!(region.origin() + region_offset))).length()
+                        > DEAD_ZONE
+                    {
+                        this.currently_dragged = Some(State::Dragging {
+                            window_id,
+                            region_offset,
+                            region,
+                            position: event.position,
+                            payload,
+                            render: Rc::new(move |payload, cx| {
+                                render(payload.downcast_ref::<T>().unwrap(), cx)
+                            }),
+                        });
+                    } else {
+                        this.currently_dragged = Some(State::DeadZone {
+                            region_offset,
+                            region,
+                        })
+                    }
+                }
+                Some(&State::Dragging {
                     region_offset,
                     region,
                     ..
@@ -151,6 +188,7 @@ impl<V: View> DragAndDrop<V> {
             .and_then(|state| {
                 match state {
                     State::Down { .. } => None,
+                    State::DeadZone { .. } => None,
                     State::Dragging {
                         window_id,
                         region_offset,
@@ -163,7 +201,7 @@ impl<V: View> DragAndDrop<V> {
                             return None;
                         }
 
-                        let position = position + region_offset;
+                        let position = position - region_offset;
                         Some(
                             Overlay::new(
                                 MouseEventHandler::<DraggedElementHandler>::new(0, cx, |_, cx| {

crates/editor/Cargo.toml 🔗

@@ -23,6 +23,7 @@ test-support = [
 drag_and_drop = { path = "../drag_and_drop" }
 text = { path = "../text" }
 clock = { path = "../clock" }
+db = { path = "../db" }
 collections = { path = "../collections" }
 context_menu = { path = "../context_menu" }
 fuzzy = { path = "../fuzzy" }
@@ -37,6 +38,7 @@ snippet = { path = "../snippet" }
 sum_tree = { path = "../sum_tree" }
 theme = { path = "../theme" }
 util = { path = "../util" }
+sqlez = { path = "../sqlez" }
 workspace = { path = "../workspace" }
 aho-corasick = "0.7"
 anyhow = "1.0"

crates/editor/src/editor.rs 🔗

@@ -9,6 +9,8 @@ mod link_go_to_definition;
 mod mouse_context_menu;
 pub mod movement;
 mod multi_buffer;
+mod persistence;
+pub mod scroll;
 pub mod selections_collection;
 
 #[cfg(test)]
@@ -32,13 +34,13 @@ use gpui::{
     elements::*,
     executor,
     fonts::{self, HighlightStyle, TextStyle},
-    geometry::vector::{vec2f, Vector2F},
+    geometry::vector::Vector2F,
     impl_actions, impl_internal_actions,
     platform::CursorStyle,
     serde_json::json,
-    text_layout, AnyViewHandle, AppContext, AsyncAppContext, Axis, ClipboardItem, Element,
-    ElementBox, Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription,
-    Task, View, ViewContext, ViewHandle, WeakViewHandle,
+    AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox, Entity,
+    ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View,
+    ViewContext, ViewHandle, WeakViewHandle,
 };
 use highlight_matching_bracket::refresh_matching_bracket_highlights;
 use hover_popover::{hide_hover, HoverState};
@@ -60,11 +62,13 @@ pub use multi_buffer::{
 use multi_buffer::{MultiBufferChunks, ToOffsetUtf16};
 use ordered_float::OrderedFloat;
 use project::{FormatTrigger, LocationLink, Project, ProjectPath, ProjectTransaction};
+use scroll::{
+    autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
+};
 use selections_collection::{resolve_multiple, MutableSelectionsCollection, SelectionsCollection};
 use serde::{Deserialize, Serialize};
 use settings::Settings;
 use smallvec::SmallVec;
-use smol::Timer;
 use snippet::Snippet;
 use std::{
     any::TypeId,
@@ -80,16 +84,14 @@ use std::{
 pub use sum_tree::Bias;
 use theme::{DiagnosticStyle, Theme};
 use util::{post_inc, ResultExt, TryFutureExt};
-use workspace::{ItemNavHistory, Workspace};
+use workspace::{ItemNavHistory, Workspace, WorkspaceId};
 
 use crate::git::diff_hunk_to_display;
 
 const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
-const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
 const MAX_LINE_LEN: usize = 1024;
 const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10;
 const MAX_SELECTION_HISTORY_LEN: usize = 1024;
-pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
 
 pub const FORMAT_TIMEOUT: Duration = Duration::from_secs(2);
 
@@ -99,12 +101,6 @@ pub struct SelectNext {
     pub replace_newest: bool,
 }
 
-#[derive(Clone, PartialEq)]
-pub struct Scroll {
-    pub scroll_position: Vector2F,
-    pub axis: Option<Axis>,
-}
-
 #[derive(Clone, PartialEq)]
 pub struct Select(pub SelectPhase);
 
@@ -257,7 +253,7 @@ impl_actions!(
     ]
 );
 
-impl_internal_actions!(editor, [Scroll, Select, Jump]);
+impl_internal_actions!(editor, [Select, Jump]);
 
 enum DocumentHighlightRead {}
 enum DocumentHighlightWrite {}
@@ -269,12 +265,8 @@ pub enum Direction {
     Next,
 }
 
-#[derive(Default)]
-struct ScrollbarAutoHide(bool);
-
 pub fn init(cx: &mut MutableAppContext) {
     cx.add_action(Editor::new_file);
-    cx.add_action(Editor::scroll);
     cx.add_action(Editor::select);
     cx.add_action(Editor::cancel);
     cx.add_action(Editor::newline);
@@ -304,12 +296,9 @@ pub fn init(cx: &mut MutableAppContext) {
     cx.add_action(Editor::redo);
     cx.add_action(Editor::move_up);
     cx.add_action(Editor::move_page_up);
-    cx.add_action(Editor::page_up);
     cx.add_action(Editor::move_down);
     cx.add_action(Editor::move_page_down);
-    cx.add_action(Editor::page_down);
     cx.add_action(Editor::next_screen);
-
     cx.add_action(Editor::move_left);
     cx.add_action(Editor::move_right);
     cx.add_action(Editor::move_to_previous_word_start);
@@ -369,9 +358,11 @@ pub fn init(cx: &mut MutableAppContext) {
     hover_popover::init(cx);
     link_go_to_definition::init(cx);
     mouse_context_menu::init(cx);
+    scroll::actions::init(cx);
 
     workspace::register_project_item::<Editor>(cx);
     workspace::register_followable_item::<Editor>(cx);
+    workspace::register_deserializable_item::<Editor>(cx);
 }
 
 trait InvalidationRegion {
@@ -409,46 +400,6 @@ pub enum SelectMode {
     All,
 }
 
-#[derive(PartialEq, Eq)]
-pub enum Autoscroll {
-    Next,
-    Strategy(AutoscrollStrategy),
-}
-
-impl Autoscroll {
-    pub fn fit() -> Self {
-        Self::Strategy(AutoscrollStrategy::Fit)
-    }
-
-    pub fn newest() -> Self {
-        Self::Strategy(AutoscrollStrategy::Newest)
-    }
-
-    pub fn center() -> Self {
-        Self::Strategy(AutoscrollStrategy::Center)
-    }
-}
-
-#[derive(PartialEq, Eq, Default)]
-pub enum AutoscrollStrategy {
-    Fit,
-    Newest,
-    #[default]
-    Center,
-    Top,
-    Bottom,
-}
-
-impl AutoscrollStrategy {
-    fn next(&self) -> Self {
-        match self {
-            AutoscrollStrategy::Center => AutoscrollStrategy::Top,
-            AutoscrollStrategy::Top => AutoscrollStrategy::Bottom,
-            _ => AutoscrollStrategy::Center,
-        }
-    }
-}
-
 #[derive(Copy, Clone, PartialEq, Eq)]
 pub enum EditorMode {
     SingleLine,
@@ -475,74 +426,12 @@ type CompletionId = usize;
 type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
 type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
 
-#[derive(Clone, Copy)]
-pub struct OngoingScroll {
-    last_timestamp: Instant,
-    axis: Option<Axis>,
-}
-
-impl OngoingScroll {
-    fn initial() -> OngoingScroll {
-        OngoingScroll {
-            last_timestamp: Instant::now() - SCROLL_EVENT_SEPARATION,
-            axis: None,
-        }
-    }
-
-    fn update(&mut self, axis: Option<Axis>) {
-        self.last_timestamp = Instant::now();
-        self.axis = axis;
-    }
-
-    pub fn filter(&self, delta: &mut Vector2F) -> Option<Axis> {
-        const UNLOCK_PERCENT: f32 = 1.9;
-        const UNLOCK_LOWER_BOUND: f32 = 6.;
-        let mut axis = self.axis;
-
-        let x = delta.x().abs();
-        let y = delta.y().abs();
-        let duration = Instant::now().duration_since(self.last_timestamp);
-        if duration > SCROLL_EVENT_SEPARATION {
-            //New ongoing scroll will start, determine axis
-            axis = if x <= y {
-                Some(Axis::Vertical)
-            } else {
-                Some(Axis::Horizontal)
-            };
-        } else if x.max(y) >= UNLOCK_LOWER_BOUND {
-            //Check if the current ongoing will need to unlock
-            match axis {
-                Some(Axis::Vertical) => {
-                    if x > y && x >= y * UNLOCK_PERCENT {
-                        axis = None;
-                    }
-                }
-
-                Some(Axis::Horizontal) => {
-                    if y > x && y >= x * UNLOCK_PERCENT {
-                        axis = None;
-                    }
-                }
-
-                None => {}
-            }
-        }
-
-        match axis {
-            Some(Axis::Vertical) => *delta = vec2f(0., delta.y()),
-            Some(Axis::Horizontal) => *delta = vec2f(delta.x(), 0.),
-            None => {}
-        }
-
-        axis
-    }
-}
-
 pub struct Editor {
     handle: WeakViewHandle<Self>,
     buffer: ModelHandle<MultiBuffer>,
     display_map: ModelHandle<DisplayMap>,
     pub selections: SelectionsCollection,
+    pub scroll_manager: ScrollManager,
     columnar_selection_tail: Option<Anchor>,
     add_selections_state: Option<AddSelectionsState>,
     select_next_state: Option<SelectNextState>,
@@ -552,10 +441,6 @@ pub struct Editor {
     select_larger_syntax_node_stack: Vec<Box<[Selection<usize>]>>,
     ime_transaction: Option<TransactionId>,
     active_diagnostics: Option<ActiveDiagnosticGroup>,
-    ongoing_scroll: OngoingScroll,
-    scroll_position: Vector2F,
-    scroll_top_anchor: Anchor,
-    autoscroll_request: Option<(Autoscroll, bool)>,
     soft_wrap_mode_override: Option<settings::SoftWrap>,
     get_field_editor_theme: Option<Arc<GetFieldEditorTheme>>,
     override_text_style: Option<Box<OverrideTextStyle>>,
@@ -563,10 +448,7 @@ pub struct Editor {
     focused: bool,
     blink_manager: ModelHandle<BlinkManager>,
     show_local_selections: bool,
-    show_scrollbars: bool,
-    hide_scrollbar_task: Option<Task<()>>,
     mode: EditorMode,
-    vertical_scroll_margin: f32,
     placeholder_text: Option<Arc<str>>,
     highlighted_rows: Option<Range<u32>>,
     #[allow(clippy::type_complexity)]
@@ -582,13 +464,12 @@ pub struct Editor {
     pending_rename: Option<RenameState>,
     searchable: bool,
     cursor_shape: CursorShape,
+    workspace_id: Option<WorkspaceId>,
     keymap_context_layers: BTreeMap<TypeId, gpui::keymap::Context>,
     input_enabled: bool,
     leader_replica_id: Option<u16>,
     hover_state: HoverState,
     link_go_to_definition_state: LinkGoToDefinitionState,
-    visible_line_count: Option<f32>,
-    last_autoscroll: Option<(Vector2F, f32, f32, AutoscrollStrategy)>,
     _subscriptions: Vec<Subscription>,
 }
 
@@ -597,9 +478,8 @@ pub struct EditorSnapshot {
     pub display_snapshot: DisplaySnapshot,
     pub placeholder_text: Option<Arc<str>>,
     is_focused: bool,
+    scroll_anchor: ScrollAnchor,
     ongoing_scroll: OngoingScroll,
-    scroll_position: Vector2F,
-    scroll_top_anchor: Anchor,
 }
 
 #[derive(Clone, Debug)]
@@ -1087,12 +967,9 @@ pub struct ClipboardSelection {
 
 #[derive(Debug)]
 pub struct NavigationData {
-    // Matching offsets for anchor and scroll_top_anchor allows us to recreate the anchor if the buffer
-    // has since been closed
     cursor_anchor: Anchor,
     cursor_position: Point,
-    scroll_position: Vector2F,
-    scroll_top_anchor: Anchor,
+    scroll_anchor: ScrollAnchor,
     scroll_top_row: u32,
 }
 
@@ -1160,9 +1037,8 @@ impl Editor {
                 display_map.set_state(&snapshot, cx);
             });
         });
-        clone.selections.set_state(&self.selections);
-        clone.scroll_position = self.scroll_position;
-        clone.scroll_top_anchor = self.scroll_top_anchor;
+        clone.selections.clone_state(&self.selections);
+        clone.scroll_manager.clone_state(&self.scroll_manager);
         clone.searchable = self.searchable;
         clone
     }
@@ -1197,6 +1073,7 @@ impl Editor {
             buffer: buffer.clone(),
             display_map: display_map.clone(),
             selections,
+            scroll_manager: ScrollManager::new(),
             columnar_selection_tail: None,
             add_selections_state: None,
             select_next_state: None,
@@ -1209,17 +1086,10 @@ impl Editor {
             soft_wrap_mode_override: None,
             get_field_editor_theme,
             project,
-            ongoing_scroll: OngoingScroll::initial(),
-            scroll_position: Vector2F::zero(),
-            scroll_top_anchor: Anchor::min(),
-            autoscroll_request: None,
             focused: false,
             blink_manager: blink_manager.clone(),
             show_local_selections: true,
-            show_scrollbars: true,
-            hide_scrollbar_task: None,
             mode,
-            vertical_scroll_margin: 3.0,
             placeholder_text: None,
             highlighted_rows: None,
             background_highlights: Default::default(),
@@ -1235,13 +1105,12 @@ impl Editor {
             searchable: true,
             override_text_style: None,
             cursor_shape: Default::default(),
+            workspace_id: None,
             keymap_context_layers: Default::default(),
             input_enabled: true,
             leader_replica_id: None,
             hover_state: Default::default(),
             link_go_to_definition_state: Default::default(),
-            visible_line_count: None,
-            last_autoscroll: None,
             _subscriptions: vec![
                 cx.observe(&buffer, Self::on_buffer_changed),
                 cx.subscribe(&buffer, Self::on_buffer_event),
@@ -1250,7 +1119,7 @@ impl Editor {
             ],
         };
         this.end_selection(cx);
-        this.make_scrollbar_visible(cx);
+        this.scroll_manager.show_scrollbar(cx);
 
         let editor_created_event = EditorCreated(cx.handle());
         cx.emit_global(editor_created_event);
@@ -1303,9 +1172,8 @@ impl Editor {
         EditorSnapshot {
             mode: self.mode,
             display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
-            ongoing_scroll: self.ongoing_scroll,
-            scroll_position: self.scroll_position,
-            scroll_top_anchor: self.scroll_top_anchor,
+            scroll_anchor: self.scroll_manager.anchor(),
+            ongoing_scroll: self.scroll_manager.ongoing_scroll(),
             placeholder_text: self.placeholder_text.clone(),
             is_focused: self
                 .handle
@@ -1344,64 +1212,6 @@ impl Editor {
         cx.notify();
     }
 
-    pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext<Self>) {
-        self.vertical_scroll_margin = margin_rows as f32;
-        cx.notify();
-    }
-
-    pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
-        self.set_scroll_position_internal(scroll_position, true, cx);
-    }
-
-    fn set_scroll_position_internal(
-        &mut self,
-        scroll_position: Vector2F,
-        local: bool,
-        cx: &mut ViewContext<Self>,
-    ) {
-        let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-
-        if scroll_position.y() <= 0. {
-            self.scroll_top_anchor = Anchor::min();
-            self.scroll_position = scroll_position.max(vec2f(0., 0.));
-        } else {
-            let scroll_top_buffer_offset =
-                DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
-            let anchor = map
-                .buffer_snapshot
-                .anchor_at(scroll_top_buffer_offset, Bias::Right);
-            self.scroll_position = vec2f(
-                scroll_position.x(),
-                scroll_position.y() - anchor.to_display_point(&map).row() as f32,
-            );
-            self.scroll_top_anchor = anchor;
-        }
-
-        self.make_scrollbar_visible(cx);
-        self.autoscroll_request.take();
-        hide_hover(self, cx);
-
-        cx.emit(Event::ScrollPositionChanged { local });
-        cx.notify();
-    }
-
-    fn set_visible_line_count(&mut self, lines: f32) {
-        self.visible_line_count = Some(lines)
-    }
-
-    fn set_scroll_top_anchor(
-        &mut self,
-        anchor: Anchor,
-        position: Vector2F,
-        cx: &mut ViewContext<Self>,
-    ) {
-        self.scroll_top_anchor = anchor;
-        self.scroll_position = position;
-        self.make_scrollbar_visible(cx);
-        cx.emit(Event::ScrollPositionChanged { local: false });
-        cx.notify();
-    }
-
     pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape, cx: &mut ViewContext<Self>) {
         self.cursor_shape = cursor_shape;
         cx.notify();
@@ -1427,199 +1237,6 @@ impl Editor {
         self.input_enabled = input_enabled;
     }
 
-    pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
-        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        compute_scroll_position(&display_map, self.scroll_position, &self.scroll_top_anchor)
-    }
-
-    pub fn clamp_scroll_left(&mut self, max: f32) -> bool {
-        if max < self.scroll_position.x() {
-            self.scroll_position.set_x(max);
-            true
-        } else {
-            false
-        }
-    }
-
-    pub fn autoscroll_vertically(
-        &mut self,
-        viewport_height: f32,
-        line_height: f32,
-        cx: &mut ViewContext<Self>,
-    ) -> bool {
-        let visible_lines = viewport_height / line_height;
-        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut scroll_position =
-            compute_scroll_position(&display_map, self.scroll_position, &self.scroll_top_anchor);
-        let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
-            (display_map.max_point().row() as f32 - visible_lines + 1.).max(0.)
-        } else {
-            display_map.max_point().row() as f32
-        };
-        if scroll_position.y() > max_scroll_top {
-            scroll_position.set_y(max_scroll_top);
-            self.set_scroll_position(scroll_position, cx);
-        }
-
-        let (autoscroll, local) = if let Some(autoscroll) = self.autoscroll_request.take() {
-            autoscroll
-        } else {
-            return false;
-        };
-
-        let first_cursor_top;
-        let last_cursor_bottom;
-        if let Some(highlighted_rows) = &self.highlighted_rows {
-            first_cursor_top = highlighted_rows.start as f32;
-            last_cursor_bottom = first_cursor_top + 1.;
-        } else if autoscroll == Autoscroll::newest() {
-            let newest_selection = self.selections.newest::<Point>(cx);
-            first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
-            last_cursor_bottom = first_cursor_top + 1.;
-        } else {
-            let selections = self.selections.all::<Point>(cx);
-            first_cursor_top = selections
-                .first()
-                .unwrap()
-                .head()
-                .to_display_point(&display_map)
-                .row() as f32;
-            last_cursor_bottom = selections
-                .last()
-                .unwrap()
-                .head()
-                .to_display_point(&display_map)
-                .row() as f32
-                + 1.0;
-        }
-
-        let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
-            0.
-        } else {
-            ((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
-        };
-        if margin < 0.0 {
-            return false;
-        }
-
-        let strategy = match autoscroll {
-            Autoscroll::Strategy(strategy) => strategy,
-            Autoscroll::Next => {
-                let last_autoscroll = &self.last_autoscroll;
-                if let Some(last_autoscroll) = last_autoscroll {
-                    if self.scroll_position == last_autoscroll.0
-                        && first_cursor_top == last_autoscroll.1
-                        && last_cursor_bottom == last_autoscroll.2
-                    {
-                        last_autoscroll.3.next()
-                    } else {
-                        AutoscrollStrategy::default()
-                    }
-                } else {
-                    AutoscrollStrategy::default()
-                }
-            }
-        };
-
-        match strategy {
-            AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
-                let margin = margin.min(self.vertical_scroll_margin);
-                let target_top = (first_cursor_top - margin).max(0.0);
-                let target_bottom = last_cursor_bottom + margin;
-                let start_row = scroll_position.y();
-                let end_row = start_row + visible_lines;
-
-                if target_top < start_row {
-                    scroll_position.set_y(target_top);
-                    self.set_scroll_position_internal(scroll_position, local, cx);
-                } else if target_bottom >= end_row {
-                    scroll_position.set_y(target_bottom - visible_lines);
-                    self.set_scroll_position_internal(scroll_position, local, cx);
-                }
-            }
-            AutoscrollStrategy::Center => {
-                scroll_position.set_y((first_cursor_top - margin).max(0.0));
-                self.set_scroll_position_internal(scroll_position, local, cx);
-            }
-            AutoscrollStrategy::Top => {
-                scroll_position.set_y((first_cursor_top).max(0.0));
-                self.set_scroll_position_internal(scroll_position, local, cx);
-            }
-            AutoscrollStrategy::Bottom => {
-                scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
-                self.set_scroll_position_internal(scroll_position, local, cx);
-            }
-        }
-
-        self.last_autoscroll = Some((
-            self.scroll_position,
-            first_cursor_top,
-            last_cursor_bottom,
-            strategy,
-        ));
-
-        true
-    }
-
-    pub fn autoscroll_horizontally(
-        &mut self,
-        start_row: u32,
-        viewport_width: f32,
-        scroll_width: f32,
-        max_glyph_width: f32,
-        layouts: &[text_layout::Line],
-        cx: &mut ViewContext<Self>,
-    ) -> bool {
-        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let selections = self.selections.all::<Point>(cx);
-
-        let mut target_left;
-        let mut target_right;
-
-        if self.highlighted_rows.is_some() {
-            target_left = 0.0_f32;
-            target_right = 0.0_f32;
-        } else {
-            target_left = std::f32::INFINITY;
-            target_right = 0.0_f32;
-            for selection in selections {
-                let head = selection.head().to_display_point(&display_map);
-                if head.row() >= start_row && head.row() < start_row + layouts.len() as u32 {
-                    let start_column = head.column().saturating_sub(3);
-                    let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3);
-                    target_left = target_left.min(
-                        layouts[(head.row() - start_row) as usize]
-                            .x_for_index(start_column as usize),
-                    );
-                    target_right = target_right.max(
-                        layouts[(head.row() - start_row) as usize].x_for_index(end_column as usize)
-                            + max_glyph_width,
-                    );
-                }
-            }
-        }
-
-        target_right = target_right.min(scroll_width);
-
-        if target_right - target_left > viewport_width {
-            return false;
-        }
-
-        let scroll_left = self.scroll_position.x() * max_glyph_width;
-        let scroll_right = scroll_left + viewport_width;
-
-        if target_left < scroll_left {
-            self.scroll_position.set_x(target_left / max_glyph_width);
-            true
-        } else if target_right > scroll_right {
-            self.scroll_position
-                .set_x((target_right - viewport_width) / max_glyph_width);
-            true
-        } else {
-            false
-        }
-    }
-
     fn selections_did_change(
         &mut self,
         local: bool,
@@ -1742,11 +1359,6 @@ impl Editor {
         });
     }
 
-    fn scroll(&mut self, action: &Scroll, cx: &mut ViewContext<Self>) {
-        self.ongoing_scroll.update(action.axis);
-        self.set_scroll_position(action.scroll_position, cx);
-    }
-
     fn select(&mut self, Select(phase): &Select, cx: &mut ViewContext<Self>) {
         self.hide_context_menu(cx);
 
@@ -2810,7 +2422,7 @@ impl Editor {
                         let all_edits_within_excerpt = buffer.read_with(&cx, |buffer, _| {
                             let excerpt_range = excerpt_range.to_offset(buffer);
                             buffer
-                                .edited_ranges_for_transaction(transaction)
+                                .edited_ranges_for_transaction::<usize>(transaction)
                                 .all(|range| {
                                     excerpt_range.start <= range.start
                                         && excerpt_range.end >= range.end
@@ -4069,23 +3681,6 @@ impl Editor {
         })
     }
 
-    pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext<Editor>) {
-        if self.take_rename(true, cx).is_some() {
-            return;
-        }
-
-        if let Some(_) = self.context_menu.as_mut() {
-            return;
-        }
-
-        if matches!(self.mode, EditorMode::SingleLine) {
-            cx.propagate_action();
-            return;
-        }
-
-        self.request_autoscroll(Autoscroll::Next, cx);
-    }
-
     pub fn move_up(&mut self, _: &MoveUp, cx: &mut ViewContext<Self>) {
         if self.take_rename(true, cx).is_some() {
             return;
@@ -4119,10 +3714,13 @@ impl Editor {
             return;
         }
 
-        if let Some(context_menu) = self.context_menu.as_mut() {
-            if context_menu.select_first(cx) {
-                return;
-            }
+        if self
+            .context_menu
+            .as_mut()
+            .map(|menu| menu.select_first(cx))
+            .unwrap_or(false)
+        {
+            return;
         }
 
         if matches!(self.mode, EditorMode::SingleLine) {
@@ -4130,9 +3728,10 @@ impl Editor {
             return;
         }
 
-        let row_count = match self.visible_line_count {
-            Some(row_count) => row_count as u32 - 1,
-            None => return,
+        let row_count = if let Some(row_count) = self.visible_line_count() {
+            row_count as u32 - 1
+        } else {
+            return;
         };
 
         let autoscroll = if action.center_cursor {
@@ -4154,32 +3753,6 @@ impl Editor {
         });
     }
 
-    pub fn page_up(&mut self, _: &PageUp, cx: &mut ViewContext<Self>) {
-        if self.take_rename(true, cx).is_some() {
-            return;
-        }
-
-        if let Some(context_menu) = self.context_menu.as_mut() {
-            if context_menu.select_first(cx) {
-                return;
-            }
-        }
-
-        if matches!(self.mode, EditorMode::SingleLine) {
-            cx.propagate_action();
-            return;
-        }
-
-        let lines = match self.visible_line_count {
-            Some(lines) => lines,
-            None => return,
-        };
-
-        let cur_position = self.scroll_position(cx);
-        let new_pos = cur_position - vec2f(0., lines + 1.);
-        self.set_scroll_position(new_pos, cx);
-    }
-
     pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext<Self>) {
         self.change_selections(Some(Autoscroll::fit()), cx, |s| {
             s.move_heads_with(|map, head, goal| movement::up(map, head, goal, false))
@@ -4217,10 +3790,13 @@ impl Editor {
             return;
         }
 
-        if let Some(context_menu) = self.context_menu.as_mut() {
-            if context_menu.select_last(cx) {
-                return;
-            }
+        if self
+            .context_menu
+            .as_mut()
+            .map(|menu| menu.select_last(cx))
+            .unwrap_or(false)
+        {
+            return;
         }
 
         if matches!(self.mode, EditorMode::SingleLine) {
@@ -4228,9 +3804,10 @@ impl Editor {
             return;
         }
 
-        let row_count = match self.visible_line_count {
-            Some(row_count) => row_count as u32 - 1,
-            None => return,
+        let row_count = if let Some(row_count) = self.visible_line_count() {
+            row_count as u32 - 1
+        } else {
+            return;
         };
 
         let autoscroll = if action.center_cursor {
@@ -4252,32 +3829,6 @@ impl Editor {
         });
     }
 
-    pub fn page_down(&mut self, _: &PageDown, cx: &mut ViewContext<Self>) {
-        if self.take_rename(true, cx).is_some() {
-            return;
-        }
-
-        if let Some(context_menu) = self.context_menu.as_mut() {
-            if context_menu.select_last(cx) {
-                return;
-            }
-        }
-
-        if matches!(self.mode, EditorMode::SingleLine) {
-            cx.propagate_action();
-            return;
-        }
-
-        let lines = match self.visible_line_count {
-            Some(lines) => lines,
-            None => return,
-        };
-
-        let cur_position = self.scroll_position(cx);
-        let new_pos = cur_position + vec2f(0., lines - 1.);
-        self.set_scroll_position(new_pos, cx);
-    }
-
     pub fn select_down(&mut self, _: &SelectDown, cx: &mut ViewContext<Self>) {
         self.change_selections(Some(Autoscroll::fit()), cx, |s| {
             s.move_heads_with(|map, head, goal| movement::down(map, head, goal, false))
@@ -4598,18 +4149,19 @@ impl Editor {
 
     fn push_to_nav_history(
         &self,
-        position: Anchor,
+        cursor_anchor: Anchor,
         new_position: Option<Point>,
         cx: &mut ViewContext<Self>,
     ) {
         if let Some(nav_history) = &self.nav_history {
             let buffer = self.buffer.read(cx).read(cx);
-            let point = position.to_point(&buffer);
-            let scroll_top_row = self.scroll_top_anchor.to_point(&buffer).row;
+            let cursor_position = cursor_anchor.to_point(&buffer);
+            let scroll_state = self.scroll_manager.anchor();
+            let scroll_top_row = scroll_state.top_row(&buffer);
             drop(buffer);
 
             if let Some(new_position) = new_position {
-                let row_delta = (new_position.row as i64 - point.row as i64).abs();
+                let row_delta = (new_position.row as i64 - cursor_position.row as i64).abs();
                 if row_delta < MIN_NAVIGATION_HISTORY_ROW_DELTA {
                     return;
                 }
@@ -4617,10 +4169,9 @@ impl Editor {
 
             nav_history.push(
                 Some(NavigationData {
-                    cursor_anchor: position,
-                    cursor_position: point,
-                    scroll_position: self.scroll_position,
-                    scroll_top_anchor: self.scroll_top_anchor,
+                    cursor_anchor,
+                    cursor_position,
+                    scroll_anchor: scroll_state,
                     scroll_top_row,
                 }),
                 cx,
@@ -5918,16 +5469,6 @@ impl Editor {
         });
     }
 
-    pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
-        self.autoscroll_request = Some((autoscroll, true));
-        cx.notify();
-    }
-
-    fn request_autoscroll_remotely(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
-        self.autoscroll_request = Some((autoscroll, false));
-        cx.notify();
-    }
-
     pub fn transact(
         &mut self,
         cx: &mut ViewContext<Self>,
@@ -6336,31 +5877,6 @@ impl Editor {
         self.blink_manager.read(cx).visible() && self.focused
     }
 
-    pub fn show_scrollbars(&self) -> bool {
-        self.show_scrollbars
-    }
-
-    fn make_scrollbar_visible(&mut self, cx: &mut ViewContext<Self>) {
-        if !self.show_scrollbars {
-            self.show_scrollbars = true;
-            cx.notify();
-        }
-
-        if cx.default_global::<ScrollbarAutoHide>().0 {
-            self.hide_scrollbar_task = Some(cx.spawn_weak(|this, mut cx| async move {
-                Timer::after(SCROLLBAR_SHOW_INTERVAL).await;
-                if let Some(this) = this.upgrade(&cx) {
-                    this.update(&mut cx, |this, cx| {
-                        this.show_scrollbars = false;
-                        cx.notify();
-                    });
-                }
-            }));
-        } else {
-            self.hide_scrollbar_task = None;
-        }
-    }
-
     fn on_buffer_changed(&mut self, _: ModelHandle<MultiBuffer>, cx: &mut ViewContext<Self>) {
         cx.notify();
     }
@@ -6568,11 +6084,7 @@ impl EditorSnapshot {
     }
 
     pub fn scroll_position(&self) -> Vector2F {
-        compute_scroll_position(
-            &self.display_snapshot,
-            self.scroll_position,
-            &self.scroll_top_anchor,
-        )
+        self.scroll_anchor.scroll_position(&self.display_snapshot)
     }
 }
 
@@ -6584,20 +6096,6 @@ impl Deref for EditorSnapshot {
     }
 }
 
-fn compute_scroll_position(
-    snapshot: &DisplaySnapshot,
-    mut scroll_position: Vector2F,
-    scroll_top_anchor: &Anchor,
-) -> Vector2F {
-    if *scroll_top_anchor != Anchor::min() {
-        let scroll_top = scroll_top_anchor.to_display_point(snapshot).row() as f32;
-        scroll_position.set_y(scroll_top + scroll_position.y());
-    } else {
-        scroll_position.set_y(0.);
-    }
-    scroll_position
-}
-
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub enum Event {
     ExcerptsAdded {
@@ -6622,7 +6120,6 @@ pub enum Event {
         local: bool,
     },
     Closed,
-    IgnoredInput,
 }
 
 pub struct EditorFocused(pub ViewHandle<Editor>);
@@ -6808,7 +6305,6 @@ impl View for Editor {
         cx: &mut ViewContext<Self>,
     ) {
         if !self.input_enabled {
-            cx.emit(Event::IgnoredInput);
             return;
         }
 
@@ -6845,7 +6341,6 @@ impl View for Editor {
         cx: &mut ViewContext<Self>,
     ) {
         if !self.input_enabled {
-            cx.emit(Event::IgnoredInput);
             return;
         }
 

crates/editor/src/editor_tests.rs 🔗

@@ -12,7 +12,7 @@ use crate::test::{
 };
 use gpui::{
     executor::Deterministic,
-    geometry::rect::RectF,
+    geometry::{rect::RectF, vector::vec2f},
     platform::{WindowBounds, WindowOptions},
 };
 use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry, Point};
@@ -22,7 +22,10 @@ use util::{
     assert_set_eq,
     test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
 };
-use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane};
+use workspace::{
+    item::{FollowableItem, ItemHandle},
+    NavigationEntry, Pane,
+};
 
 #[gpui::test]
 fn test_edit_events(cx: &mut MutableAppContext) {
@@ -475,7 +478,7 @@ fn test_clone(cx: &mut gpui::MutableAppContext) {
 fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
     cx.set_global(Settings::test(cx));
     cx.set_global(DragAndDrop::<Workspace>::default());
-    use workspace::Item;
+    use workspace::item::Item;
     let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx));
     let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
 
@@ -541,31 +544,30 @@ fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
 
         // Set scroll position to check later
         editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
-        let original_scroll_position = editor.scroll_position;
-        let original_scroll_top_anchor = editor.scroll_top_anchor;
+        let original_scroll_position = editor.scroll_manager.anchor();
 
         // Jump to the end of the document and adjust scroll
         editor.move_to_end(&MoveToEnd, cx);
         editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx);
-        assert_ne!(editor.scroll_position, original_scroll_position);
-        assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor);
+        assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
 
         let nav_entry = pop_history(&mut editor, cx).unwrap();
         editor.navigate(nav_entry.data.unwrap(), cx);
-        assert_eq!(editor.scroll_position, original_scroll_position);
-        assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
+        assert_eq!(editor.scroll_manager.anchor(), original_scroll_position);
 
         // Ensure we don't panic when navigation data contains invalid anchors *and* points.
-        let mut invalid_anchor = editor.scroll_top_anchor;
+        let mut invalid_anchor = editor.scroll_manager.anchor().top_anchor;
         invalid_anchor.text_anchor.buffer_id = Some(999);
         let invalid_point = Point::new(9999, 0);
         editor.navigate(
             Box::new(NavigationData {
                 cursor_anchor: invalid_anchor,
                 cursor_position: invalid_point,
-                scroll_top_anchor: invalid_anchor,
+                scroll_anchor: ScrollAnchor {
+                    top_anchor: invalid_anchor,
+                    offset: Default::default(),
+                },
                 scroll_top_row: invalid_point.row,
-                scroll_position: Default::default(),
             }),
             cx,
         );

crates/editor/src/element.rs 🔗

@@ -1,7 +1,7 @@
 use super::{
     display_map::{BlockContext, ToDisplayPoint},
-    Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Scroll, Select, SelectPhase,
-    SoftWrap, ToPoint, MAX_LINE_LEN,
+    Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Select, SelectPhase, SoftWrap,
+    ToPoint, MAX_LINE_LEN,
 };
 use crate::{
     display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
@@ -13,6 +13,7 @@ use crate::{
         GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
     },
     mouse_context_menu::DeployMouseContextMenu,
+    scroll::actions::Scroll,
     EditorStyle,
 };
 use clock::ReplicaId;
@@ -955,7 +956,7 @@ impl EditorElement {
                     move |_, cx| {
                         if let Some(view) = view.upgrade(cx.deref_mut()) {
                             view.update(cx.deref_mut(), |view, cx| {
-                                view.make_scrollbar_visible(cx);
+                                view.scroll_manager.show_scrollbar(cx);
                             });
                         }
                     }
@@ -977,7 +978,7 @@ impl EditorElement {
                                     position.set_y(top_row as f32);
                                     view.set_scroll_position(position, cx);
                                 } else {
-                                    view.make_scrollbar_visible(cx);
+                                    view.scroll_manager.show_scrollbar(cx);
                                 }
                             });
                         }
@@ -1298,7 +1299,7 @@ impl EditorElement {
         };
 
         let tooltip_style = cx.global::<Settings>().theme.tooltip.clone();
-        let scroll_x = snapshot.scroll_position.x();
+        let scroll_x = snapshot.scroll_anchor.offset.x();
         let (fixed_blocks, non_fixed_blocks) = snapshot
             .blocks_in_range(rows.clone())
             .partition::<Vec<_>, _>(|(_, block)| match block {
@@ -1670,7 +1671,7 @@ impl Element for EditorElement {
                 ));
             }
 
-            show_scrollbars = view.show_scrollbars();
+            show_scrollbars = view.scroll_manager.scrollbars_visible();
             include_root = view
                 .project
                 .as_ref()
@@ -1725,7 +1726,7 @@ impl Element for EditorElement {
         );
 
         self.update_view(cx.app, |view, cx| {
-            let clamped = view.clamp_scroll_left(scroll_max.x());
+            let clamped = view.scroll_manager.clamp_scroll_left(scroll_max.x());
 
             let autoscrolled = if autoscroll_horizontally {
                 view.autoscroll_horizontally(

crates/editor/src/items.rs 🔗

@@ -1,15 +1,16 @@
 use crate::{
     display_map::ToDisplayPoint, link_go_to_definition::hide_link_definition,
-    movement::surrounding_word, Anchor, Autoscroll, Editor, Event, ExcerptId, ExcerptRange,
-    MultiBuffer, MultiBufferSnapshot, NavigationData, ToPoint as _, FORMAT_TIMEOUT,
+    movement::surrounding_word, persistence::DB, scroll::ScrollAnchor, Anchor, Autoscroll, Editor,
+    Event, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, NavigationData, ToPoint as _,
+    FORMAT_TIMEOUT,
 };
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context, Result};
 use collections::HashSet;
 use futures::future::try_join_all;
 use futures::FutureExt;
 use gpui::{
     elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
-    RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
+    RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
 };
 use language::proto::serialize_anchor as serialize_text_anchor;
 use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
@@ -26,11 +27,11 @@ use std::{
     path::{Path, PathBuf},
 };
 use text::Selection;
-use util::TryFutureExt;
+use util::{ResultExt, TryFutureExt};
 use workspace::{
+    item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
     searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
-    FollowableItem, Item, ItemEvent, ItemHandle, ItemNavHistory, ProjectItem, StatusItemView,
-    ToolbarItemLocation,
+    ItemId, ItemNavHistory, Pane, StatusItemView, ToolbarItemLocation, Workspace, WorkspaceId,
 };
 
 pub const MAX_TAB_TITLE_LEN: usize = 24;
@@ -135,10 +136,13 @@ impl FollowableItem for Editor {
                 if !selections.is_empty() {
                     editor.set_selections_from_remote(selections, cx);
                 }
+
                 if let Some(scroll_top_anchor) = scroll_top_anchor {
-                    editor.set_scroll_top_anchor(
-                        scroll_top_anchor,
-                        vec2f(state.scroll_x, state.scroll_y),
+                    editor.set_scroll_anchor(
+                        ScrollAnchor {
+                            top_anchor: scroll_top_anchor,
+                            offset: vec2f(state.scroll_x, state.scroll_y),
+                        },
                         cx,
                     );
                 }
@@ -177,6 +181,7 @@ impl FollowableItem for Editor {
 
     fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
         let buffer = self.buffer.read(cx);
+        let scroll_anchor = self.scroll_manager.anchor();
         let excerpts = buffer
             .read(cx)
             .excerpts()
@@ -200,9 +205,9 @@ impl FollowableItem for Editor {
             singleton: buffer.is_singleton(),
             title: (!buffer.is_singleton()).then(|| buffer.title(cx).into()),
             excerpts,
-            scroll_top_anchor: Some(serialize_anchor(&self.scroll_top_anchor)),
-            scroll_x: self.scroll_position.x(),
-            scroll_y: self.scroll_position.y(),
+            scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.top_anchor)),
+            scroll_x: scroll_anchor.offset.x(),
+            scroll_y: scroll_anchor.offset.y(),
             selections: self
                 .selections
                 .disjoint_anchors()
@@ -251,9 +256,10 @@ impl FollowableItem for Editor {
                     true
                 }
                 Event::ScrollPositionChanged { .. } => {
-                    update.scroll_top_anchor = Some(serialize_anchor(&self.scroll_top_anchor));
-                    update.scroll_x = self.scroll_position.x();
-                    update.scroll_y = self.scroll_position.y();
+                    let scroll_anchor = self.scroll_manager.anchor();
+                    update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.top_anchor));
+                    update.scroll_x = scroll_anchor.offset.x();
+                    update.scroll_y = scroll_anchor.offset.y();
                     true
                 }
                 Event::SelectionsChanged { .. } => {
@@ -357,7 +363,7 @@ impl FollowableItem for Editor {
                     this.set_selections_from_remote(selections, cx);
                     this.request_autoscroll_remotely(Autoscroll::newest(), cx);
                 } else if let Some(anchor) = scroll_top_anchor {
-                    this.set_scroll_top_anchor(anchor, vec2f(message.scroll_x, message.scroll_y), cx);
+                    this.set_scroll_anchor(ScrollAnchor {top_anchor: anchor, offset: vec2f(message.scroll_x, message.scroll_y) }, cx);
                 }
             });
             Ok(())
@@ -461,13 +467,12 @@ impl Item for Editor {
                 buffer.clip_point(data.cursor_position, Bias::Left)
             };
 
-            let scroll_top_anchor = if buffer.can_resolve(&data.scroll_top_anchor) {
-                data.scroll_top_anchor
-            } else {
-                buffer.anchor_before(
+            let mut scroll_anchor = data.scroll_anchor;
+            if !buffer.can_resolve(&scroll_anchor.top_anchor) {
+                scroll_anchor.top_anchor = buffer.anchor_before(
                     buffer.clip_point(Point::new(data.scroll_top_row, 0), Bias::Left),
-                )
-            };
+                );
+            }
 
             drop(buffer);
 
@@ -475,8 +480,7 @@ impl Item for Editor {
                 false
             } else {
                 let nav_history = self.nav_history.take();
-                self.scroll_position = data.scroll_position;
-                self.scroll_top_anchor = scroll_top_anchor;
+                self.set_scroll_anchor(scroll_anchor, cx);
                 self.change_selections(Some(Autoscroll::fit()), cx, |s| {
                     s.select_ranges([offset..offset])
                 });
@@ -550,7 +554,7 @@ impl Item for Editor {
         self.buffer.read(cx).is_singleton()
     }
 
-    fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
+    fn clone_on_split(&self, _workspace_id: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self>
     where
         Self: Sized,
     {
@@ -673,7 +677,7 @@ impl Item for Editor {
         Task::ready(Ok(()))
     }
 
-    fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
+    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
         let mut result = Vec::new();
         match event {
             Event::Closed => result.push(ItemEvent::CloseItem),
@@ -735,6 +739,87 @@ impl Item for Editor {
         }));
         Some(breadcrumbs)
     }
+
+    fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
+        let workspace_id = workspace.database_id();
+        let item_id = cx.view_id();
+
+        fn serialize(
+            buffer: ModelHandle<Buffer>,
+            workspace_id: WorkspaceId,
+            item_id: ItemId,
+            cx: &mut MutableAppContext,
+        ) {
+            if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
+                let path = file.abs_path(cx);
+
+                cx.background()
+                    .spawn(async move {
+                        DB.save_path(item_id, workspace_id, path.clone())
+                            .await
+                            .log_err()
+                    })
+                    .detach();
+            }
+        }
+
+        if let Some(buffer) = self.buffer().read(cx).as_singleton() {
+            serialize(buffer.clone(), workspace_id, item_id, cx);
+
+            cx.subscribe(&buffer, |this, buffer, event, cx| {
+                if let Some(workspace_id) = this.workspace_id {
+                    if let language::Event::FileHandleChanged = event {
+                        serialize(buffer, workspace_id, cx.view_id(), cx);
+                    }
+                }
+            })
+            .detach();
+        }
+    }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        Some("Editor")
+    }
+
+    fn deserialize(
+        project: ModelHandle<Project>,
+        _workspace: WeakViewHandle<Workspace>,
+        workspace_id: workspace::WorkspaceId,
+        item_id: ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<Result<ViewHandle<Self>>> {
+        let project_item: Result<_> = project.update(cx, |project, cx| {
+            // Look up the path with this key associated, create a self with that path
+            let path = DB
+                .get_path(item_id, workspace_id)?
+                .context("No path stored for this editor")?;
+
+            let (worktree, path) = project
+                .find_local_worktree(&path, cx)
+                .with_context(|| format!("No worktree for path: {path:?}"))?;
+            let project_path = ProjectPath {
+                worktree_id: worktree.read(cx).id(),
+                path: path.into(),
+            };
+
+            Ok(project.open_path(project_path, cx))
+        });
+
+        project_item
+            .map(|project_item| {
+                cx.spawn(|pane, mut cx| async move {
+                    let (_, project_item) = project_item.await?;
+                    let buffer = project_item
+                        .downcast::<Buffer>()
+                        .context("Project item at stored path was not a buffer")?;
+
+                    Ok(cx.update(|cx| {
+                        cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
+                    }))
+                })
+            })
+            .unwrap_or_else(|error| Task::ready(Err(error)))
+    }
 }
 
 impl ProjectItem for Editor {

crates/editor/src/persistence.rs 🔗

@@ -0,0 +1,36 @@
+use std::path::PathBuf;
+
+use db::sqlez_macros::sql;
+use db::{define_connection, query};
+use workspace::{ItemId, WorkspaceDb, WorkspaceId};
+
+define_connection!(
+    pub static ref DB: EditorDb<WorkspaceDb> =
+        &[sql! (
+            CREATE TABLE editors(
+                item_id INTEGER NOT NULL,
+                workspace_id INTEGER NOT NULL,
+                path BLOB NOT NULL,
+                PRIMARY KEY(item_id, workspace_id),
+                FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                ON DELETE CASCADE
+                ON UPDATE CASCADE
+        ) STRICT;
+    )];
+);
+
+impl EditorDb {
+    query! {
+        pub fn get_path(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
+            SELECT path FROM editors
+            WHERE item_id = ? AND workspace_id = ?
+        }
+    }
+
+    query! {
+        pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
+            INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
+            VALUES (?, ?, ?)
+        }
+    }
+}

crates/editor/src/scroll.rs 🔗

@@ -0,0 +1,348 @@
+pub mod actions;
+pub mod autoscroll;
+pub mod scroll_amount;
+
+use std::{
+    cmp::Ordering,
+    time::{Duration, Instant},
+};
+
+use gpui::{
+    geometry::vector::{vec2f, Vector2F},
+    Axis, MutableAppContext, Task, ViewContext,
+};
+use language::Bias;
+
+use crate::{
+    display_map::{DisplaySnapshot, ToDisplayPoint},
+    hover_popover::hide_hover,
+    Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
+};
+
+use self::{
+    autoscroll::{Autoscroll, AutoscrollStrategy},
+    scroll_amount::ScrollAmount,
+};
+
+pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
+const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
+
+#[derive(Default)]
+pub struct ScrollbarAutoHide(pub bool);
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct ScrollAnchor {
+    pub offset: Vector2F,
+    pub top_anchor: Anchor,
+}
+
+impl ScrollAnchor {
+    fn new() -> Self {
+        Self {
+            offset: Vector2F::zero(),
+            top_anchor: Anchor::min(),
+        }
+    }
+
+    pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
+        let mut scroll_position = self.offset;
+        if self.top_anchor != Anchor::min() {
+            let scroll_top = self.top_anchor.to_display_point(snapshot).row() as f32;
+            scroll_position.set_y(scroll_top + scroll_position.y());
+        } else {
+            scroll_position.set_y(0.);
+        }
+        scroll_position
+    }
+
+    pub fn top_row(&self, buffer: &MultiBufferSnapshot) -> u32 {
+        self.top_anchor.to_point(buffer).row
+    }
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct OngoingScroll {
+    last_event: Instant,
+    axis: Option<Axis>,
+}
+
+impl OngoingScroll {
+    fn new() -> Self {
+        Self {
+            last_event: Instant::now() - SCROLL_EVENT_SEPARATION,
+            axis: None,
+        }
+    }
+
+    pub fn filter(&self, delta: &mut Vector2F) -> Option<Axis> {
+        const UNLOCK_PERCENT: f32 = 1.9;
+        const UNLOCK_LOWER_BOUND: f32 = 6.;
+        let mut axis = self.axis;
+
+        let x = delta.x().abs();
+        let y = delta.y().abs();
+        let duration = Instant::now().duration_since(self.last_event);
+        if duration > SCROLL_EVENT_SEPARATION {
+            //New ongoing scroll will start, determine axis
+            axis = if x <= y {
+                Some(Axis::Vertical)
+            } else {
+                Some(Axis::Horizontal)
+            };
+        } else if x.max(y) >= UNLOCK_LOWER_BOUND {
+            //Check if the current ongoing will need to unlock
+            match axis {
+                Some(Axis::Vertical) => {
+                    if x > y && x >= y * UNLOCK_PERCENT {
+                        axis = None;
+                    }
+                }
+
+                Some(Axis::Horizontal) => {
+                    if y > x && y >= x * UNLOCK_PERCENT {
+                        axis = None;
+                    }
+                }
+
+                None => {}
+            }
+        }
+
+        match axis {
+            Some(Axis::Vertical) => *delta = vec2f(0., delta.y()),
+            Some(Axis::Horizontal) => *delta = vec2f(delta.x(), 0.),
+            None => {}
+        }
+
+        axis
+    }
+}
+
+pub struct ScrollManager {
+    vertical_scroll_margin: f32,
+    anchor: ScrollAnchor,
+    ongoing: OngoingScroll,
+    autoscroll_request: Option<(Autoscroll, bool)>,
+    last_autoscroll: Option<(Vector2F, f32, f32, AutoscrollStrategy)>,
+    show_scrollbars: bool,
+    hide_scrollbar_task: Option<Task<()>>,
+    visible_line_count: Option<f32>,
+}
+
+impl ScrollManager {
+    pub fn new() -> Self {
+        ScrollManager {
+            vertical_scroll_margin: 3.0,
+            anchor: ScrollAnchor::new(),
+            ongoing: OngoingScroll::new(),
+            autoscroll_request: None,
+            show_scrollbars: true,
+            hide_scrollbar_task: None,
+            last_autoscroll: None,
+            visible_line_count: None,
+        }
+    }
+
+    pub fn clone_state(&mut self, other: &Self) {
+        self.anchor = other.anchor;
+        self.ongoing = other.ongoing;
+    }
+
+    pub fn anchor(&self) -> ScrollAnchor {
+        self.anchor
+    }
+
+    pub fn ongoing_scroll(&self) -> OngoingScroll {
+        self.ongoing
+    }
+
+    pub fn update_ongoing_scroll(&mut self, axis: Option<Axis>) {
+        self.ongoing.last_event = Instant::now();
+        self.ongoing.axis = axis;
+    }
+
+    pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
+        self.anchor.scroll_position(snapshot)
+    }
+
+    fn set_scroll_position(
+        &mut self,
+        scroll_position: Vector2F,
+        map: &DisplaySnapshot,
+        local: bool,
+        cx: &mut ViewContext<Editor>,
+    ) {
+        let new_anchor = if scroll_position.y() <= 0. {
+            ScrollAnchor {
+                top_anchor: Anchor::min(),
+                offset: scroll_position.max(vec2f(0., 0.)),
+            }
+        } else {
+            let scroll_top_buffer_offset =
+                DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
+            let top_anchor = map
+                .buffer_snapshot
+                .anchor_at(scroll_top_buffer_offset, Bias::Right);
+
+            ScrollAnchor {
+                top_anchor,
+                offset: vec2f(
+                    scroll_position.x(),
+                    scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
+                ),
+            }
+        };
+
+        self.set_anchor(new_anchor, local, cx);
+    }
+
+    fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
+        self.anchor = anchor;
+        cx.emit(Event::ScrollPositionChanged { local });
+        self.show_scrollbar(cx);
+        self.autoscroll_request.take();
+        cx.notify();
+    }
+
+    pub fn show_scrollbar(&mut self, cx: &mut ViewContext<Editor>) {
+        if !self.show_scrollbars {
+            self.show_scrollbars = true;
+            cx.notify();
+        }
+
+        if cx.default_global::<ScrollbarAutoHide>().0 {
+            self.hide_scrollbar_task = Some(cx.spawn_weak(|editor, mut cx| async move {
+                cx.background().timer(SCROLLBAR_SHOW_INTERVAL).await;
+                if let Some(editor) = editor.upgrade(&cx) {
+                    editor.update(&mut cx, |editor, cx| {
+                        editor.scroll_manager.show_scrollbars = false;
+                        cx.notify();
+                    });
+                }
+            }));
+        } else {
+            self.hide_scrollbar_task = None;
+        }
+    }
+
+    pub fn scrollbars_visible(&self) -> bool {
+        self.show_scrollbars
+    }
+
+    pub fn has_autoscroll_request(&self) -> bool {
+        self.autoscroll_request.is_some()
+    }
+
+    pub fn clamp_scroll_left(&mut self, max: f32) -> bool {
+        if max < self.anchor.offset.x() {
+            self.anchor.offset.set_x(max);
+            true
+        } else {
+            false
+        }
+    }
+}
+
+impl Editor {
+    pub fn vertical_scroll_margin(&mut self) -> usize {
+        self.scroll_manager.vertical_scroll_margin as usize
+    }
+
+    pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext<Self>) {
+        self.scroll_manager.vertical_scroll_margin = margin_rows as f32;
+        cx.notify();
+    }
+
+    pub fn visible_line_count(&self) -> Option<f32> {
+        self.scroll_manager.visible_line_count
+    }
+
+    pub(crate) fn set_visible_line_count(&mut self, lines: f32) {
+        self.scroll_manager.visible_line_count = Some(lines)
+    }
+
+    pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
+        self.set_scroll_position_internal(scroll_position, true, cx);
+    }
+
+    pub(crate) fn set_scroll_position_internal(
+        &mut self,
+        scroll_position: Vector2F,
+        local: bool,
+        cx: &mut ViewContext<Self>,
+    ) {
+        let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+
+        hide_hover(self, cx);
+        self.scroll_manager
+            .set_scroll_position(scroll_position, &map, local, cx);
+    }
+
+    pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
+        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+        self.scroll_manager.anchor.scroll_position(&display_map)
+    }
+
+    pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
+        self.set_scroll_anchor_internal(scroll_anchor, true, cx);
+    }
+
+    pub(crate) fn set_scroll_anchor_internal(
+        &mut self,
+        scroll_anchor: ScrollAnchor,
+        local: bool,
+        cx: &mut ViewContext<Self>,
+    ) {
+        hide_hover(self, cx);
+        self.scroll_manager.set_anchor(scroll_anchor, local, cx);
+    }
+
+    pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
+        if matches!(self.mode, EditorMode::SingleLine) {
+            cx.propagate_action();
+            return;
+        }
+
+        if self.take_rename(true, cx).is_some() {
+            return;
+        }
+
+        if amount.move_context_menu_selection(self, cx) {
+            return;
+        }
+
+        let cur_position = self.scroll_position(cx);
+        let new_pos = cur_position + vec2f(0., amount.lines(self) - 1.);
+        self.set_scroll_position(new_pos, cx);
+    }
+
+    /// Returns an ordering. The newest selection is:
+    ///     Ordering::Equal => on screen
+    ///     Ordering::Less => above the screen
+    ///     Ordering::Greater => below the screen
+    pub fn newest_selection_on_screen(&self, cx: &mut MutableAppContext) -> Ordering {
+        let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+        let newest_head = self
+            .selections
+            .newest_anchor()
+            .head()
+            .to_display_point(&snapshot);
+        let screen_top = self
+            .scroll_manager
+            .anchor
+            .top_anchor
+            .to_display_point(&snapshot);
+
+        if screen_top > newest_head {
+            return Ordering::Less;
+        }
+
+        if let Some(visible_lines) = self.visible_line_count() {
+            if newest_head.row() < screen_top.row() + visible_lines as u32 {
+                return Ordering::Equal;
+            }
+        }
+
+        Ordering::Greater
+    }
+}

crates/editor/src/scroll/actions.rs 🔗

@@ -0,0 +1,159 @@
+use gpui::{
+    actions, geometry::vector::Vector2F, impl_internal_actions, Axis, MutableAppContext,
+    ViewContext,
+};
+use language::Bias;
+
+use crate::{Editor, EditorMode};
+
+use super::{autoscroll::Autoscroll, scroll_amount::ScrollAmount, ScrollAnchor};
+
+actions!(
+    editor,
+    [
+        LineDown,
+        LineUp,
+        HalfPageDown,
+        HalfPageUp,
+        PageDown,
+        PageUp,
+        NextScreen,
+        ScrollCursorTop,
+        ScrollCursorCenter,
+        ScrollCursorBottom,
+    ]
+);
+
+#[derive(Clone, PartialEq)]
+pub struct Scroll {
+    pub scroll_position: Vector2F,
+    pub axis: Option<Axis>,
+}
+
+impl_internal_actions!(editor, [Scroll]);
+
+pub fn init(cx: &mut MutableAppContext) {
+    cx.add_action(Editor::next_screen);
+    cx.add_action(Editor::scroll);
+    cx.add_action(Editor::scroll_cursor_top);
+    cx.add_action(Editor::scroll_cursor_center);
+    cx.add_action(Editor::scroll_cursor_bottom);
+    cx.add_action(|this: &mut Editor, _: &LineDown, cx| {
+        this.scroll_screen(&ScrollAmount::LineDown, cx)
+    });
+    cx.add_action(|this: &mut Editor, _: &LineUp, cx| {
+        this.scroll_screen(&ScrollAmount::LineUp, cx)
+    });
+    cx.add_action(|this: &mut Editor, _: &HalfPageDown, cx| {
+        this.scroll_screen(&ScrollAmount::HalfPageDown, cx)
+    });
+    cx.add_action(|this: &mut Editor, _: &HalfPageUp, cx| {
+        this.scroll_screen(&ScrollAmount::HalfPageUp, cx)
+    });
+    cx.add_action(|this: &mut Editor, _: &PageDown, cx| {
+        this.scroll_screen(&ScrollAmount::PageDown, cx)
+    });
+    cx.add_action(|this: &mut Editor, _: &PageUp, cx| {
+        this.scroll_screen(&ScrollAmount::PageUp, cx)
+    });
+}
+
+impl Editor {
+    pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext<Editor>) -> Option<()> {
+        if self.take_rename(true, cx).is_some() {
+            return None;
+        }
+
+        self.context_menu.as_mut()?;
+
+        if matches!(self.mode, EditorMode::SingleLine) {
+            cx.propagate_action();
+            return None;
+        }
+
+        self.request_autoscroll(Autoscroll::Next, cx);
+
+        Some(())
+    }
+
+    fn scroll(&mut self, action: &Scroll, cx: &mut ViewContext<Self>) {
+        self.scroll_manager.update_ongoing_scroll(action.axis);
+        self.set_scroll_position(action.scroll_position, cx);
+    }
+
+    fn scroll_cursor_top(editor: &mut Editor, _: &ScrollCursorTop, cx: &mut ViewContext<Editor>) {
+        let snapshot = editor.snapshot(cx).display_snapshot;
+        let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
+
+        let mut new_screen_top = editor.selections.newest_display(cx).head();
+        *new_screen_top.row_mut() = new_screen_top.row().saturating_sub(scroll_margin_rows);
+        *new_screen_top.column_mut() = 0;
+        let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
+        let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
+
+        editor.set_scroll_anchor(
+            ScrollAnchor {
+                top_anchor: new_anchor,
+                offset: Default::default(),
+            },
+            cx,
+        )
+    }
+
+    fn scroll_cursor_center(
+        editor: &mut Editor,
+        _: &ScrollCursorCenter,
+        cx: &mut ViewContext<Editor>,
+    ) {
+        let snapshot = editor.snapshot(cx).display_snapshot;
+        let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
+            visible_rows as u32
+        } else {
+            return;
+        };
+
+        let mut new_screen_top = editor.selections.newest_display(cx).head();
+        *new_screen_top.row_mut() = new_screen_top.row().saturating_sub(visible_rows / 2);
+        *new_screen_top.column_mut() = 0;
+        let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
+        let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
+
+        editor.set_scroll_anchor(
+            ScrollAnchor {
+                top_anchor: new_anchor,
+                offset: Default::default(),
+            },
+            cx,
+        )
+    }
+
+    fn scroll_cursor_bottom(
+        editor: &mut Editor,
+        _: &ScrollCursorBottom,
+        cx: &mut ViewContext<Editor>,
+    ) {
+        let snapshot = editor.snapshot(cx).display_snapshot;
+        let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
+        let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
+            visible_rows as u32
+        } else {
+            return;
+        };
+
+        let mut new_screen_top = editor.selections.newest_display(cx).head();
+        *new_screen_top.row_mut() = new_screen_top
+            .row()
+            .saturating_sub(visible_rows.saturating_sub(scroll_margin_rows));
+        *new_screen_top.column_mut() = 0;
+        let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
+        let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
+
+        editor.set_scroll_anchor(
+            ScrollAnchor {
+                top_anchor: new_anchor,
+                offset: Default::default(),
+            },
+            cx,
+        )
+    }
+}

crates/editor/src/scroll/autoscroll.rs 🔗

@@ -0,0 +1,246 @@
+use std::cmp;
+
+use gpui::{text_layout, ViewContext};
+use language::Point;
+
+use crate::{display_map::ToDisplayPoint, Editor, EditorMode};
+
+#[derive(PartialEq, Eq)]
+pub enum Autoscroll {
+    Next,
+    Strategy(AutoscrollStrategy),
+}
+
+impl Autoscroll {
+    pub fn fit() -> Self {
+        Self::Strategy(AutoscrollStrategy::Fit)
+    }
+
+    pub fn newest() -> Self {
+        Self::Strategy(AutoscrollStrategy::Newest)
+    }
+
+    pub fn center() -> Self {
+        Self::Strategy(AutoscrollStrategy::Center)
+    }
+}
+
+#[derive(PartialEq, Eq, Default)]
+pub enum AutoscrollStrategy {
+    Fit,
+    Newest,
+    #[default]
+    Center,
+    Top,
+    Bottom,
+}
+
+impl AutoscrollStrategy {
+    fn next(&self) -> Self {
+        match self {
+            AutoscrollStrategy::Center => AutoscrollStrategy::Top,
+            AutoscrollStrategy::Top => AutoscrollStrategy::Bottom,
+            _ => AutoscrollStrategy::Center,
+        }
+    }
+}
+
+impl Editor {
+    pub fn autoscroll_vertically(
+        &mut self,
+        viewport_height: f32,
+        line_height: f32,
+        cx: &mut ViewContext<Editor>,
+    ) -> bool {
+        let visible_lines = viewport_height / line_height;
+        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+        let mut scroll_position = self.scroll_manager.scroll_position(&display_map);
+        let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
+            (display_map.max_point().row() as f32 - visible_lines + 1.).max(0.)
+        } else {
+            display_map.max_point().row() as f32
+        };
+        if scroll_position.y() > max_scroll_top {
+            scroll_position.set_y(max_scroll_top);
+            self.set_scroll_position(scroll_position, cx);
+        }
+
+        let (autoscroll, local) =
+            if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() {
+                autoscroll
+            } else {
+                return false;
+            };
+
+        let first_cursor_top;
+        let last_cursor_bottom;
+        if let Some(highlighted_rows) = &self.highlighted_rows {
+            first_cursor_top = highlighted_rows.start as f32;
+            last_cursor_bottom = first_cursor_top + 1.;
+        } else if autoscroll == Autoscroll::newest() {
+            let newest_selection = self.selections.newest::<Point>(cx);
+            first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
+            last_cursor_bottom = first_cursor_top + 1.;
+        } else {
+            let selections = self.selections.all::<Point>(cx);
+            first_cursor_top = selections
+                .first()
+                .unwrap()
+                .head()
+                .to_display_point(&display_map)
+                .row() as f32;
+            last_cursor_bottom = selections
+                .last()
+                .unwrap()
+                .head()
+                .to_display_point(&display_map)
+                .row() as f32
+                + 1.0;
+        }
+
+        let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
+            0.
+        } else {
+            ((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
+        };
+        if margin < 0.0 {
+            return false;
+        }
+
+        let strategy = match autoscroll {
+            Autoscroll::Strategy(strategy) => strategy,
+            Autoscroll::Next => {
+                let last_autoscroll = &self.scroll_manager.last_autoscroll;
+                if let Some(last_autoscroll) = last_autoscroll {
+                    if self.scroll_manager.anchor.offset == last_autoscroll.0
+                        && first_cursor_top == last_autoscroll.1
+                        && last_cursor_bottom == last_autoscroll.2
+                    {
+                        last_autoscroll.3.next()
+                    } else {
+                        AutoscrollStrategy::default()
+                    }
+                } else {
+                    AutoscrollStrategy::default()
+                }
+            }
+        };
+
+        match strategy {
+            AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
+                let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
+                let target_top = (first_cursor_top - margin).max(0.0);
+                let target_bottom = last_cursor_bottom + margin;
+                let start_row = scroll_position.y();
+                let end_row = start_row + visible_lines;
+
+                if target_top < start_row {
+                    scroll_position.set_y(target_top);
+                    self.set_scroll_position_internal(scroll_position, local, cx);
+                } else if target_bottom >= end_row {
+                    scroll_position.set_y(target_bottom - visible_lines);
+                    self.set_scroll_position_internal(scroll_position, local, cx);
+                }
+            }
+            AutoscrollStrategy::Center => {
+                scroll_position.set_y((first_cursor_top - margin).max(0.0));
+                self.set_scroll_position_internal(scroll_position, local, cx);
+            }
+            AutoscrollStrategy::Top => {
+                scroll_position.set_y((first_cursor_top).max(0.0));
+                self.set_scroll_position_internal(scroll_position, local, cx);
+            }
+            AutoscrollStrategy::Bottom => {
+                scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
+                self.set_scroll_position_internal(scroll_position, local, cx);
+            }
+        }
+
+        self.scroll_manager.last_autoscroll = Some((
+            self.scroll_manager.anchor.offset,
+            first_cursor_top,
+            last_cursor_bottom,
+            strategy,
+        ));
+
+        true
+    }
+
+    pub fn autoscroll_horizontally(
+        &mut self,
+        start_row: u32,
+        viewport_width: f32,
+        scroll_width: f32,
+        max_glyph_width: f32,
+        layouts: &[text_layout::Line],
+        cx: &mut ViewContext<Self>,
+    ) -> bool {
+        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+        let selections = self.selections.all::<Point>(cx);
+
+        let mut target_left;
+        let mut target_right;
+
+        if self.highlighted_rows.is_some() {
+            target_left = 0.0_f32;
+            target_right = 0.0_f32;
+        } else {
+            target_left = std::f32::INFINITY;
+            target_right = 0.0_f32;
+            for selection in selections {
+                let head = selection.head().to_display_point(&display_map);
+                if head.row() >= start_row && head.row() < start_row + layouts.len() as u32 {
+                    let start_column = head.column().saturating_sub(3);
+                    let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3);
+                    target_left = target_left.min(
+                        layouts[(head.row() - start_row) as usize]
+                            .x_for_index(start_column as usize),
+                    );
+                    target_right = target_right.max(
+                        layouts[(head.row() - start_row) as usize].x_for_index(end_column as usize)
+                            + max_glyph_width,
+                    );
+                }
+            }
+        }
+
+        target_right = target_right.min(scroll_width);
+
+        if target_right - target_left > viewport_width {
+            return false;
+        }
+
+        let scroll_left = self.scroll_manager.anchor.offset.x() * max_glyph_width;
+        let scroll_right = scroll_left + viewport_width;
+
+        if target_left < scroll_left {
+            self.scroll_manager
+                .anchor
+                .offset
+                .set_x(target_left / max_glyph_width);
+            true
+        } else if target_right > scroll_right {
+            self.scroll_manager
+                .anchor
+                .offset
+                .set_x((target_right - viewport_width) / max_glyph_width);
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
+        self.scroll_manager.autoscroll_request = Some((autoscroll, true));
+        cx.notify();
+    }
+
+    pub(crate) fn request_autoscroll_remotely(
+        &mut self,
+        autoscroll: Autoscroll,
+        cx: &mut ViewContext<Self>,
+    ) {
+        self.scroll_manager.autoscroll_request = Some((autoscroll, false));
+        cx.notify();
+    }
+}

crates/editor/src/scroll/scroll_amount.rs 🔗

@@ -0,0 +1,48 @@
+use gpui::ViewContext;
+use serde::Deserialize;
+use util::iife;
+
+use crate::Editor;
+
+#[derive(Clone, PartialEq, Deserialize)]
+pub enum ScrollAmount {
+    LineUp,
+    LineDown,
+    HalfPageUp,
+    HalfPageDown,
+    PageUp,
+    PageDown,
+}
+
+impl ScrollAmount {
+    pub fn move_context_menu_selection(
+        &self,
+        editor: &mut Editor,
+        cx: &mut ViewContext<Editor>,
+    ) -> bool {
+        iife!({
+            let context_menu = editor.context_menu.as_mut()?;
+
+            match self {
+                Self::LineDown | Self::HalfPageDown => context_menu.select_next(cx),
+                Self::LineUp | Self::HalfPageUp => context_menu.select_prev(cx),
+                Self::PageDown => context_menu.select_last(cx),
+                Self::PageUp => context_menu.select_first(cx),
+            }
+            .then_some(())
+        })
+        .is_some()
+    }
+
+    pub fn lines(&self, editor: &mut Editor) -> f32 {
+        match self {
+            Self::LineDown => 1.,
+            Self::LineUp => -1.,
+            Self::HalfPageDown => editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
+            Self::HalfPageUp => -editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
+            // Minus 1. here so that there is a pivot line that stays on the screen
+            Self::PageDown => editor.visible_line_count().unwrap_or(1.) - 1.,
+            Self::PageUp => -editor.visible_line_count().unwrap_or(1.) - 1.,
+        }
+    }
+}

crates/editor/src/selections_collection.rs 🔗

@@ -61,7 +61,7 @@ impl SelectionsCollection {
         self.buffer.read(cx).read(cx)
     }
 
-    pub fn set_state(&mut self, other: &SelectionsCollection) {
+    pub fn clone_state(&mut self, other: &SelectionsCollection) {
         self.next_selection_id = other.next_selection_id;
         self.line_mode = other.line_mode;
         self.disjoint = other.disjoint.clone();

crates/editor/src/test/editor_lsp_test_context.rs 🔗

@@ -63,8 +63,15 @@ impl<'a> EditorLspTestContext<'a> {
             .insert_tree("/root", json!({ "dir": { file_name: "" }}))
             .await;
 
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
         project
             .update(cx, |project, cx| {
                 project.find_or_create_local_worktree("/root", true, cx)

crates/file_finder/src/file_finder.rs 🔗

@@ -316,8 +316,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         cx.dispatch_action(window_id, Toggle);
 
         let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
@@ -371,8 +372,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/dir".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let (_, finder) =
             cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
 
@@ -446,8 +448,9 @@ mod tests {
             cx,
         )
         .await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let (_, finder) =
             cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
         finder
@@ -471,8 +474,9 @@ mod tests {
             cx,
         )
         .await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let (_, finder) =
             cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
 
@@ -524,8 +528,9 @@ mod tests {
             cx,
         )
         .await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let (_, finder) =
             cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
 
@@ -563,8 +568,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let (_, finder) =
             cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
         finder

crates/go_to_line/src/go_to_line.rs 🔗

@@ -1,6 +1,6 @@
 use std::sync::Arc;
 
-use editor::{display_map::ToDisplayPoint, Autoscroll, DisplayPoint, Editor};
+use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
 use gpui::{
     actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, Axis, Entity,
     MutableAppContext, RenderContext, View, ViewContext, ViewHandle,

crates/gpui/Cargo.toml 🔗

@@ -17,6 +17,7 @@ collections = { path = "../collections" }
 gpui_macros = { path = "../gpui_macros" }
 util = { path = "../util" }
 sum_tree = { path = "../sum_tree" }
+sqlez = { path = "../sqlez" }
 async-task = "4.0.3"
 backtrace = { version = "0.3", optional = true }
 ctor = "0.1"

crates/gpui/grammars/context-predicate/bindings/node/binding.cc 🔗

@@ -1,10 +1,10 @@
+#include "nan.h"
 #include "tree_sitter/parser.h"
 #include <node.h>
-#include "nan.h"
 
 using namespace v8;
 
-extern "C" TSLanguage * tree_sitter_context_predicate();
+extern "C" TSLanguage *tree_sitter_context_predicate();
 
 namespace {
 
@@ -16,13 +16,15 @@ void Init(Local<Object> exports, Local<Object> module) {
   tpl->InstanceTemplate()->SetInternalFieldCount(1);
 
   Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
-  Local<Object> instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
+  Local<Object> instance =
+      constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
   Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
 
-  Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("context_predicate").ToLocalChecked());
+  Nan::Set(instance, Nan::New("name").ToLocalChecked(),
+           Nan::New("context_predicate").ToLocalChecked());
   Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
 }
 
 NODE_MODULE(tree_sitter_context_predicate_binding, Init)
 
-}  // namespace
+} // namespace

crates/gpui/src/app.rs 🔗

@@ -594,6 +594,9 @@ type ReleaseObservationCallback = Box<dyn FnOnce(&dyn Any, &mut MutableAppContex
 type ActionObservationCallback = Box<dyn FnMut(TypeId, &mut MutableAppContext)>;
 type WindowActivationCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
 type WindowFullscreenCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
+type KeystrokeCallback = Box<
+    dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut MutableAppContext) -> bool,
+>;
 type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>;
 type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
 
@@ -619,6 +622,7 @@ pub struct MutableAppContext {
     observations: CallbackCollection<usize, ObservationCallback>,
     window_activation_observations: CallbackCollection<usize, WindowActivationCallback>,
     window_fullscreen_observations: CallbackCollection<usize, WindowFullscreenCallback>,
+    keystroke_observations: CallbackCollection<usize, KeystrokeCallback>,
 
     release_observations: Arc<Mutex<HashMap<usize, BTreeMap<usize, ReleaseObservationCallback>>>>,
     action_dispatch_observations: Arc<Mutex<BTreeMap<usize, ActionObservationCallback>>>,
@@ -678,6 +682,7 @@ impl MutableAppContext {
             global_observations: Default::default(),
             window_activation_observations: Default::default(),
             window_fullscreen_observations: Default::default(),
+            keystroke_observations: Default::default(),
             action_dispatch_observations: Default::default(),
             presenters_and_platform_windows: Default::default(),
             foreground,
@@ -763,11 +768,11 @@ impl MutableAppContext {
             .with_context(|| format!("invalid data for action {}", name))
     }
 
-    pub fn add_action<A, V, F>(&mut self, handler: F)
+    pub fn add_action<A, V, F, R>(&mut self, handler: F)
     where
         A: Action,
         V: View,
-        F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
+        F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
     {
         self.add_action_internal(handler, false)
     }
@@ -781,11 +786,11 @@ impl MutableAppContext {
         self.add_action_internal(handler, true)
     }
 
-    fn add_action_internal<A, V, F>(&mut self, mut handler: F, capture: bool)
+    fn add_action_internal<A, V, F, R>(&mut self, mut handler: F, capture: bool)
     where
         A: Action,
         V: View,
-        F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
+        F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
     {
         let handler = Box::new(
             move |view: &mut dyn AnyView,
@@ -1255,6 +1260,27 @@ impl MutableAppContext {
         }
     }
 
+    pub fn observe_keystrokes<F>(&mut self, window_id: usize, callback: F) -> Subscription
+    where
+        F: 'static
+            + FnMut(
+                &Keystroke,
+                &MatchResult,
+                Option<&Box<dyn Action>>,
+                &mut MutableAppContext,
+            ) -> bool,
+    {
+        let subscription_id = post_inc(&mut self.next_subscription_id);
+        self.keystroke_observations
+            .add_callback(window_id, subscription_id, Box::new(callback));
+
+        Subscription::KeystrokeObservation {
+            id: subscription_id,
+            window_id,
+            observations: Some(self.keystroke_observations.downgrade()),
+        }
+    }
+
     pub fn defer(&mut self, callback: impl 'static + FnOnce(&mut MutableAppContext)) {
         self.pending_effects.push_back(Effect::Deferred {
             callback: Box::new(callback),
@@ -1405,8 +1431,8 @@ impl MutableAppContext {
         true
     }
 
-    // Returns an iterator over all of the view ids from the passed view up to the root of the window
-    // Includes the passed view itself
+    /// Returns an iterator over all of the view ids from the passed view up to the root of the window
+    /// Includes the passed view itself
     fn ancestors(&self, window_id: usize, mut view_id: usize) -> impl Iterator<Item = usize> + '_ {
         std::iter::once(view_id)
             .into_iter()
@@ -1538,27 +1564,39 @@ impl MutableAppContext {
                 })
                 .collect();
 
-            match self
+            let match_result = self
                 .keystroke_matcher
-                .push_keystroke(keystroke.clone(), dispatch_path)
-            {
+                .push_keystroke(keystroke.clone(), dispatch_path);
+            let mut handled_by = None;
+
+            let keystroke_handled = match &match_result {
                 MatchResult::None => false,
                 MatchResult::Pending => true,
                 MatchResult::Matches(matches) => {
                     for (view_id, action) in matches {
                         if self.handle_dispatch_action_from_effect(
                             window_id,
-                            Some(view_id),
+                            Some(*view_id),
                             action.as_ref(),
                         ) {
                             self.keystroke_matcher.clear_pending();
-                            return true;
+                            handled_by = Some(action.boxed_clone());
+                            break;
                         }
                     }
-                    false
+                    handled_by.is_some()
                 }
-            }
+            };
+
+            self.keystroke(
+                window_id,
+                keystroke.clone(),
+                handled_by,
+                match_result.clone(),
+            );
+            keystroke_handled
         } else {
+            self.keystroke(window_id, keystroke.clone(), None, MatchResult::None);
             false
         }
     }
@@ -2110,6 +2148,12 @@ impl MutableAppContext {
                         } => {
                             self.handle_window_should_close_subscription_effect(window_id, callback)
                         }
+                        Effect::Keystroke {
+                            window_id,
+                            keystroke,
+                            handled_by,
+                            result,
+                        } => self.handle_keystroke_effect(window_id, keystroke, handled_by, result),
                     }
                     self.pending_notifications.clear();
                     self.remove_dropped_entities();
@@ -2188,6 +2232,21 @@ impl MutableAppContext {
         });
     }
 
+    fn keystroke(
+        &mut self,
+        window_id: usize,
+        keystroke: Keystroke,
+        handled_by: Option<Box<dyn Action>>,
+        result: MatchResult,
+    ) {
+        self.pending_effects.push_back(Effect::Keystroke {
+            window_id,
+            keystroke,
+            handled_by,
+            result,
+        });
+    }
+
     pub fn refresh_windows(&mut self) {
         self.pending_effects.push_back(Effect::RefreshWindows);
     }
@@ -2299,6 +2358,21 @@ impl MutableAppContext {
         });
     }
 
+    fn handle_keystroke_effect(
+        &mut self,
+        window_id: usize,
+        keystroke: Keystroke,
+        handled_by: Option<Box<dyn Action>>,
+        result: MatchResult,
+    ) {
+        self.update(|this| {
+            let mut observations = this.keystroke_observations.clone();
+            observations.emit_and_cleanup(window_id, this, {
+                move |callback, this| callback(&keystroke, &result, handled_by.as_ref(), this)
+            });
+        });
+    }
+
     fn handle_window_activation_effect(&mut self, window_id: usize, active: bool) {
         //Short circuit evaluation if we're already g2g
         if self
@@ -2852,6 +2926,12 @@ pub enum Effect {
         subscription_id: usize,
         callback: WindowFullscreenCallback,
     },
+    Keystroke {
+        window_id: usize,
+        keystroke: Keystroke,
+        handled_by: Option<Box<dyn Action>>,
+        result: MatchResult,
+    },
     RefreshWindows,
     DispatchActionFrom {
         window_id: usize,
@@ -2995,6 +3075,21 @@ impl Debug for Effect {
                 .debug_struct("Effect::WindowShouldCloseSubscription")
                 .field("window_id", window_id)
                 .finish(),
+            Effect::Keystroke {
+                window_id,
+                keystroke,
+                handled_by,
+                result,
+            } => f
+                .debug_struct("Effect::Keystroke")
+                .field("window_id", window_id)
+                .field("keystroke", keystroke)
+                .field(
+                    "keystroke",
+                    &handled_by.as_ref().map(|handled_by| handled_by.name()),
+                )
+                .field("result", result)
+                .finish(),
         }
     }
 }
@@ -3600,6 +3695,7 @@ impl<'a, T: View> ViewContext<'a, T> {
             return false;
         }
         self.ancestors(view.window_id, view.view_id)
+            .skip(1) // Skip self id
             .any(|parent| parent == self.view_id)
     }
 
@@ -3826,6 +3922,33 @@ impl<'a, T: View> ViewContext<'a, T> {
             })
     }
 
+    pub fn observe_keystroke<F>(&mut self, mut callback: F) -> Subscription
+    where
+        F: 'static
+            + FnMut(
+                &mut T,
+                &Keystroke,
+                Option<&Box<dyn Action>>,
+                &MatchResult,
+                &mut ViewContext<T>,
+            ) -> bool,
+    {
+        let observer = self.weak_handle();
+        self.app.observe_keystrokes(
+            self.window_id(),
+            move |keystroke, result, handled_by, cx| {
+                if let Some(observer) = observer.upgrade(cx) {
+                    observer.update(cx, |observer, cx| {
+                        callback(observer, keystroke, handled_by, result, cx);
+                    });
+                    true
+                } else {
+                    false
+                }
+            },
+        )
+    }
+
     pub fn emit(&mut self, payload: T::Event) {
         self.app.pending_effects.push_back(Effect::Event {
             entity_id: self.view_id,
@@ -5018,6 +5141,11 @@ pub enum Subscription {
         window_id: usize,
         observations: Option<Weak<Mapping<usize, WindowFullscreenCallback>>>,
     },
+    KeystrokeObservation {
+        id: usize,
+        window_id: usize,
+        observations: Option<Weak<Mapping<usize, KeystrokeCallback>>>,
+    },
 
     ReleaseObservation {
         id: usize,
@@ -5056,6 +5184,9 @@ impl Subscription {
             Subscription::ActionObservation { observations, .. } => {
                 observations.take();
             }
+            Subscription::KeystrokeObservation { observations, .. } => {
+                observations.take();
+            }
             Subscription::WindowActivationObservation { observations, .. } => {
                 observations.take();
             }
@@ -5175,6 +5306,27 @@ impl Drop for Subscription {
                     observations.lock().remove(id);
                 }
             }
+            Subscription::KeystrokeObservation {
+                id,
+                window_id,
+                observations,
+            } => {
+                if let Some(observations) = observations.as_ref().and_then(Weak::upgrade) {
+                    match observations
+                        .lock()
+                        .entry(*window_id)
+                        .or_default()
+                        .entry(*id)
+                    {
+                        btree_map::Entry::Vacant(entry) => {
+                            entry.insert(None);
+                        }
+                        btree_map::Entry::Occupied(entry) => {
+                            entry.remove();
+                        }
+                    }
+                }
+            }
             Subscription::WindowActivationObservation {
                 id,
                 window_id,

crates/gpui/src/executor.rs 🔗

@@ -66,21 +66,32 @@ struct DeterministicState {
     rng: rand::prelude::StdRng,
     seed: u64,
     scheduled_from_foreground: collections::HashMap<usize, Vec<ForegroundRunnable>>,
-    scheduled_from_background: Vec<Runnable>,
+    scheduled_from_background: Vec<BackgroundRunnable>,
     forbid_parking: bool,
     block_on_ticks: std::ops::RangeInclusive<usize>,
     now: std::time::Instant,
     next_timer_id: usize,
     pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>,
     waiting_backtrace: Option<backtrace::Backtrace>,
+    next_runnable_id: usize,
+    poll_history: Vec<usize>,
+    enable_runnable_backtraces: bool,
+    runnable_backtraces: collections::HashMap<usize, backtrace::Backtrace>,
 }
 
 #[cfg(any(test, feature = "test-support"))]
 struct ForegroundRunnable {
+    id: usize,
     runnable: Runnable,
     main: bool,
 }
 
+#[cfg(any(test, feature = "test-support"))]
+struct BackgroundRunnable {
+    id: usize,
+    runnable: Runnable,
+}
+
 #[cfg(any(test, feature = "test-support"))]
 pub struct Deterministic {
     state: Arc<parking_lot::Mutex<DeterministicState>>,
@@ -117,11 +128,29 @@ impl Deterministic {
                 next_timer_id: Default::default(),
                 pending_timers: Default::default(),
                 waiting_backtrace: None,
+                next_runnable_id: 0,
+                poll_history: Default::default(),
+                enable_runnable_backtraces: false,
+                runnable_backtraces: Default::default(),
             })),
             parker: Default::default(),
         })
     }
 
+    pub fn runnable_history(&self) -> Vec<usize> {
+        self.state.lock().poll_history.clone()
+    }
+
+    pub fn enable_runnable_backtrace(&self) {
+        self.state.lock().enable_runnable_backtraces = true;
+    }
+
+    pub fn runnable_backtrace(&self, runnable_id: usize) -> backtrace::Backtrace {
+        let mut backtrace = self.state.lock().runnable_backtraces[&runnable_id].clone();
+        backtrace.resolve();
+        backtrace
+    }
+
     pub fn build_background(self: &Arc<Self>) -> Arc<Background> {
         Arc::new(Background::Deterministic {
             executor: self.clone(),
@@ -142,6 +171,17 @@ impl Deterministic {
         main: bool,
     ) -> AnyLocalTask {
         let state = self.state.clone();
+        let id;
+        {
+            let mut state = state.lock();
+            id = util::post_inc(&mut state.next_runnable_id);
+            if state.enable_runnable_backtraces {
+                state
+                    .runnable_backtraces
+                    .insert(id, backtrace::Backtrace::new_unresolved());
+            }
+        }
+
         let unparker = self.parker.lock().unparker();
         let (runnable, task) = async_task::spawn_local(future, move |runnable| {
             let mut state = state.lock();
@@ -149,7 +189,7 @@ impl Deterministic {
                 .scheduled_from_foreground
                 .entry(cx_id)
                 .or_default()
-                .push(ForegroundRunnable { runnable, main });
+                .push(ForegroundRunnable { id, runnable, main });
             unparker.unpark();
         });
         runnable.schedule();
@@ -158,10 +198,23 @@ impl Deterministic {
 
     fn spawn(&self, future: AnyFuture) -> AnyTask {
         let state = self.state.clone();
+        let id;
+        {
+            let mut state = state.lock();
+            id = util::post_inc(&mut state.next_runnable_id);
+            if state.enable_runnable_backtraces {
+                state
+                    .runnable_backtraces
+                    .insert(id, backtrace::Backtrace::new_unresolved());
+            }
+        }
+
         let unparker = self.parker.lock().unparker();
         let (runnable, task) = async_task::spawn(future, move |runnable| {
             let mut state = state.lock();
-            state.scheduled_from_background.push(runnable);
+            state
+                .scheduled_from_background
+                .push(BackgroundRunnable { id, runnable });
             unparker.unpark();
         });
         runnable.schedule();
@@ -178,15 +231,27 @@ impl Deterministic {
         let woken = Arc::new(AtomicBool::new(false));
 
         let state = self.state.clone();
+        let id;
+        {
+            let mut state = state.lock();
+            id = util::post_inc(&mut state.next_runnable_id);
+            if state.enable_runnable_backtraces {
+                state
+                    .runnable_backtraces
+                    .insert(id, backtrace::Backtrace::new_unresolved());
+            }
+        }
+
         let unparker = self.parker.lock().unparker();
         let (runnable, mut main_task) = unsafe {
             async_task::spawn_unchecked(main_future, move |runnable| {
-                let mut state = state.lock();
+                let state = &mut *state.lock();
                 state
                     .scheduled_from_foreground
                     .entry(cx_id)
                     .or_default()
                     .push(ForegroundRunnable {
+                        id: util::post_inc(&mut state.next_runnable_id),
                         runnable,
                         main: true,
                     });
@@ -248,9 +313,10 @@ impl Deterministic {
             if !state.scheduled_from_background.is_empty() && state.rng.gen() {
                 let background_len = state.scheduled_from_background.len();
                 let ix = state.rng.gen_range(0..background_len);
-                let runnable = state.scheduled_from_background.remove(ix);
+                let background_runnable = state.scheduled_from_background.remove(ix);
+                state.poll_history.push(background_runnable.id);
                 drop(state);
-                runnable.run();
+                background_runnable.runnable.run();
             } else if !state.scheduled_from_foreground.is_empty() {
                 let available_cx_ids = state
                     .scheduled_from_foreground
@@ -266,6 +332,7 @@ impl Deterministic {
                 if scheduled_from_cx.is_empty() {
                     state.scheduled_from_foreground.remove(&cx_id_to_run);
                 }
+                state.poll_history.push(foreground_runnable.id);
 
                 drop(state);
 
@@ -298,9 +365,10 @@ impl Deterministic {
             let runnable_count = state.scheduled_from_background.len();
             let ix = state.rng.gen_range(0..=runnable_count);
             if ix < state.scheduled_from_background.len() {
-                let runnable = state.scheduled_from_background.remove(ix);
+                let background_runnable = state.scheduled_from_background.remove(ix);
+                state.poll_history.push(background_runnable.id);
                 drop(state);
-                runnable.run();
+                background_runnable.runnable.run();
             } else {
                 drop(state);
                 if let Poll::Ready(result) = future.poll(&mut cx) {

crates/gpui/src/keymap.rs 🔗

@@ -112,6 +112,21 @@ impl PartialEq for MatchResult {
 
 impl Eq for MatchResult {}
 
+impl Clone for MatchResult {
+    fn clone(&self) -> Self {
+        match self {
+            MatchResult::None => MatchResult::None,
+            MatchResult::Pending => MatchResult::Pending,
+            MatchResult::Matches(matches) => MatchResult::Matches(
+                matches
+                    .iter()
+                    .map(|(view_id, action)| (*view_id, Action::boxed_clone(action.as_ref())))
+                    .collect(),
+            ),
+        }
+    }
+}
+
 impl Matcher {
     pub fn new(keymap: Keymap) -> Self {
         Self {

crates/gpui/src/presenter.rs 🔗

@@ -17,10 +17,15 @@ use crate::{
     SceneBuilder, UpgradeModelHandle, UpgradeViewHandle, View, ViewHandle, WeakModelHandle,
     WeakViewHandle,
 };
+use anyhow::bail;
 use collections::{HashMap, HashSet};
 use pathfinder_geometry::vector::{vec2f, Vector2F};
 use serde_json::json;
 use smallvec::SmallVec;
+use sqlez::{
+    bindable::{Bind, Column},
+    statement::Statement,
+};
 use std::{
     marker::PhantomData,
     ops::{Deref, DerefMut, Range},
@@ -863,8 +868,9 @@ pub struct DebugContext<'a> {
     pub app: &'a AppContext,
 }
 
-#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
 pub enum Axis {
+    #[default]
     Horizontal,
     Vertical,
 }
@@ -894,6 +900,31 @@ impl ToJson for Axis {
     }
 }
 
+impl Bind for Axis {
+    fn bind(&self, statement: &Statement, start_index: i32) -> anyhow::Result<i32> {
+        match self {
+            Axis::Horizontal => "Horizontal",
+            Axis::Vertical => "Vertical",
+        }
+        .bind(statement, start_index)
+    }
+}
+
+impl Column for Axis {
+    fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
+        String::column(statement, start_index).and_then(|(axis_text, next_index)| {
+            Ok((
+                match axis_text.as_str() {
+                    "Horizontal" => Axis::Horizontal,
+                    "Vertical" => Axis::Vertical,
+                    _ => bail!("Stored serialized item kind is incorrect"),
+                },
+                next_index,
+            ))
+        })
+    }
+}
+
 pub trait Vector2FExt {
     fn along(self, axis: Axis) -> f32;
 }

crates/gpui/src/test.rs 🔗

@@ -1,11 +1,13 @@
 use crate::{
-    elements::Empty, executor, platform, Element, ElementBox, Entity, FontCache, Handle,
-    LeakDetector, MutableAppContext, Platform, RenderContext, Subscription, TestAppContext, View,
+    elements::Empty, executor, platform, util::CwdBacktrace, Element, ElementBox, Entity,
+    FontCache, Handle, LeakDetector, MutableAppContext, Platform, RenderContext, Subscription,
+    TestAppContext, View,
 };
 use futures::StreamExt;
 use parking_lot::Mutex;
 use smol::channel;
 use std::{
+    fmt::Write,
     panic::{self, RefUnwindSafe},
     rc::Rc,
     sync::{
@@ -29,13 +31,13 @@ pub fn run_test(
     mut num_iterations: u64,
     mut starting_seed: u64,
     max_retries: usize,
+    detect_nondeterminism: bool,
     test_fn: &mut (dyn RefUnwindSafe
               + Fn(
         &mut MutableAppContext,
         Rc<platform::test::ForegroundPlatform>,
         Arc<executor::Deterministic>,
         u64,
-        bool,
     )),
     fn_name: String,
 ) {
@@ -60,16 +62,20 @@ pub fn run_test(
             let platform = Arc::new(platform::test::platform());
             let font_system = platform.fonts();
             let font_cache = Arc::new(FontCache::new(font_system));
+            let mut prev_runnable_history: Option<Vec<usize>> = None;
 
-            loop {
-                let seed = atomic_seed.fetch_add(1, SeqCst);
-                let is_last_iteration = seed + 1 >= starting_seed + num_iterations;
+            for _ in 0..num_iterations {
+                let seed = atomic_seed.load(SeqCst);
 
                 if is_randomized {
                     dbg!(seed);
                 }
 
                 let deterministic = executor::Deterministic::new(seed);
+                if detect_nondeterminism {
+                    deterministic.enable_runnable_backtrace();
+                }
+
                 let leak_detector = Arc::new(Mutex::new(LeakDetector::default()));
                 let mut cx = TestAppContext::new(
                     foreground_platform.clone(),
@@ -82,13 +88,7 @@ pub fn run_test(
                     fn_name.clone(),
                 );
                 cx.update(|cx| {
-                    test_fn(
-                        cx,
-                        foreground_platform.clone(),
-                        deterministic.clone(),
-                        seed,
-                        is_last_iteration,
-                    );
+                    test_fn(cx, foreground_platform.clone(), deterministic.clone(), seed);
                 });
 
                 cx.update(|cx| cx.remove_all_windows());
@@ -96,8 +96,64 @@ pub fn run_test(
                 cx.update(|cx| cx.clear_globals());
 
                 leak_detector.lock().detect();
-                if is_last_iteration {
-                    break;
+
+                if detect_nondeterminism {
+                    let curr_runnable_history = deterministic.runnable_history();
+                    if let Some(prev_runnable_history) = prev_runnable_history {
+                        let mut prev_entries = prev_runnable_history.iter().fuse();
+                        let mut curr_entries = curr_runnable_history.iter().fuse();
+
+                        let mut nondeterministic = false;
+                        let mut common_history_prefix = Vec::new();
+                        let mut prev_history_suffix = Vec::new();
+                        let mut curr_history_suffix = Vec::new();
+                        loop {
+                            match (prev_entries.next(), curr_entries.next()) {
+                                (None, None) => break,
+                                (None, Some(curr_id)) => curr_history_suffix.push(*curr_id),
+                                (Some(prev_id), None) => prev_history_suffix.push(*prev_id),
+                                (Some(prev_id), Some(curr_id)) => {
+                                    if nondeterministic {
+                                        prev_history_suffix.push(*prev_id);
+                                        curr_history_suffix.push(*curr_id);
+                                    } else if prev_id == curr_id {
+                                        common_history_prefix.push(*curr_id);
+                                    } else {
+                                        nondeterministic = true;
+                                        prev_history_suffix.push(*prev_id);
+                                        curr_history_suffix.push(*curr_id);
+                                    }
+                                }
+                            }
+                        }
+
+                        if nondeterministic {
+                            let mut error = String::new();
+                            writeln!(&mut error, "Common prefix: {:?}", common_history_prefix)
+                                .unwrap();
+                            writeln!(&mut error, "Previous suffix: {:?}", prev_history_suffix)
+                                .unwrap();
+                            writeln!(&mut error, "Current suffix: {:?}", curr_history_suffix)
+                                .unwrap();
+
+                            let last_common_backtrace = common_history_prefix
+                                .last()
+                                .map(|runnable_id| deterministic.runnable_backtrace(*runnable_id));
+
+                            writeln!(
+                                &mut error,
+                                "Last future that ran on both executions: {:?}",
+                                last_common_backtrace.as_ref().map(CwdBacktrace)
+                            )
+                            .unwrap();
+                            panic!("Detected non-determinism.\n{}", error);
+                        }
+                    }
+                    prev_runnable_history = Some(curr_runnable_history);
+                }
+
+                if !detect_nondeterminism {
+                    atomic_seed.fetch_add(1, SeqCst);
                 }
             }
         });
@@ -112,7 +168,7 @@ pub fn run_test(
                     println!("retrying: attempt {}", retries);
                 } else {
                     if is_randomized {
-                        eprintln!("failing seed: {}", atomic_seed.load(SeqCst) - 1);
+                        eprintln!("failing seed: {}", atomic_seed.load(SeqCst));
                     }
                     panic::resume_unwind(error);
                 }

crates/gpui_macros/src/gpui_macros.rs 🔗

@@ -14,6 +14,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
     let mut max_retries = 0;
     let mut num_iterations = 1;
     let mut starting_seed = 0;
+    let mut detect_nondeterminism = false;
 
     for arg in args {
         match arg {
@@ -26,6 +27,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                 let key_name = meta.path.get_ident().map(|i| i.to_string());
                 let result = (|| {
                     match key_name.as_deref() {
+                        Some("detect_nondeterminism") => {
+                            detect_nondeterminism = parse_bool(&meta.lit)?
+                        }
                         Some("retries") => max_retries = parse_int(&meta.lit)?,
                         Some("iterations") => num_iterations = parse_int(&meta.lit)?,
                         Some("seed") => starting_seed = parse_int(&meta.lit)?,
@@ -77,10 +81,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                             inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
                             continue;
                         }
-                        Some("bool") => {
-                            inner_fn_args.extend(quote!(is_last_iteration,));
-                            continue;
-                        }
                         Some("Arc") => {
                             if let syn::PathArguments::AngleBracketed(args) =
                                 &last_segment.unwrap().arguments
@@ -146,7 +146,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                     #num_iterations as u64,
                     #starting_seed as u64,
                     #max_retries,
-                    &mut |cx, foreground_platform, deterministic, seed, is_last_iteration| {
+                    #detect_nondeterminism,
+                    &mut |cx, foreground_platform, deterministic, seed| {
                         #cx_vars
                         cx.foreground().run(#inner_fn_name(#inner_fn_args));
                         #cx_teardowns
@@ -165,9 +166,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                         Some("StdRng") => {
                             inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
                         }
-                        Some("bool") => {
-                            inner_fn_args.extend(quote!(is_last_iteration,));
-                        }
                         _ => {}
                     }
                 } else {
@@ -189,7 +187,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                     #num_iterations as u64,
                     #starting_seed as u64,
                     #max_retries,
-                    &mut |cx, _, _, seed, is_last_iteration| #inner_fn_name(#inner_fn_args),
+                    #detect_nondeterminism,
+                    &mut |cx, _, _, seed| #inner_fn_name(#inner_fn_args),
                     stringify!(#outer_fn_name).to_string(),
                 );
             }
@@ -209,3 +208,13 @@ fn parse_int(literal: &Lit) -> Result<usize, TokenStream> {
 
     result.map_err(|err| TokenStream::from(err.into_compile_error()))
 }
+
+fn parse_bool(literal: &Lit) -> Result<bool, TokenStream> {
+    let result = if let Lit::Bool(result) = &literal {
+        Ok(result.value)
+    } else {
+        Err(syn::Error::new(literal.span(), "must be a boolean"))
+    };
+
+    result.map_err(|err| TokenStream::from(err.into_compile_error()))
+}

crates/journal/src/journal.rs 🔗

@@ -1,5 +1,5 @@
 use chrono::{Datelike, Local, NaiveTime, Timelike};
-use editor::{Autoscroll, Editor};
+use editor::{scroll::autoscroll::Autoscroll, Editor};
 use gpui::{actions, MutableAppContext};
 use settings::{HourFormat, Settings};
 use std::{
@@ -115,7 +115,7 @@ mod tests {
 
         #[test]
         fn test_heading_entry_defaults_to_hour_12() {
-            let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
+            let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
             let actual_heading_entry = heading_entry(naive_time, &None);
             let expected_heading_entry = "# 3:00 PM";
 
@@ -124,7 +124,7 @@ mod tests {
 
         #[test]
         fn test_heading_entry_is_hour_12() {
-            let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
+            let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
             let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour12));
             let expected_heading_entry = "# 3:00 PM";
 
@@ -133,7 +133,7 @@ mod tests {
 
         #[test]
         fn test_heading_entry_is_hour_24() {
-            let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
+            let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
             let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour24));
             let expected_heading_entry = "# 15:00";
 

crates/outline/src/outline.rs 🔗

@@ -1,6 +1,6 @@
 use editor::{
-    combine_syntax_and_fuzzy_match_highlights, display_map::ToDisplayPoint, Anchor, AnchorRangeExt,
-    Autoscroll, DisplayPoint, Editor, ToPoint,
+    combine_syntax_and_fuzzy_match_highlights, display_map::ToDisplayPoint,
+    scroll::autoscroll::Autoscroll, Anchor, AnchorRangeExt, DisplayPoint, Editor, ToPoint,
 };
 use fuzzy::StringMatch;
 use gpui::{

crates/project/Cargo.toml 🔗

@@ -32,6 +32,7 @@ lsp = { path = "../lsp" }
 rpc = { path = "../rpc" }
 settings = { path = "../settings" }
 sum_tree = { path = "../sum_tree" }
+terminal = { path = "../terminal" }
 util = { path = "../util" }
 aho-corasick = "0.7"
 anyhow = "1.0.57"

crates/project/src/project.rs 🔗

@@ -10,7 +10,11 @@ use anyhow::{anyhow, Context, Result};
 use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
 use clock::ReplicaId;
 use collections::{hash_map, BTreeMap, HashMap, HashSet};
-use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
+use futures::{
+    channel::{mpsc, oneshot},
+    future::Shared,
+    AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
+};
 
 use gpui::{
     AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
@@ -45,12 +49,10 @@ use std::{
     cell::RefCell,
     cmp::{self, Ordering},
     convert::TryInto,
-    ffi::OsString,
     hash::Hash,
     mem,
     num::NonZeroU32,
     ops::Range,
-    os::unix::{ffi::OsStrExt, prelude::OsStringExt},
     path::{Component, Path, PathBuf},
     rc::Rc,
     str,
@@ -60,10 +62,10 @@ use std::{
     },
     time::Instant,
 };
+use terminal::{Terminal, TerminalBuilder};
 use thiserror::Error;
 use util::{defer, post_inc, ResultExt, TryFutureExt as _};
 
-pub use db::Db;
 pub use fs::*;
 pub use worktree::*;
 
@@ -71,10 +73,6 @@ pub trait Item: Entity {
     fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
 }
 
-pub struct ProjectStore {
-    projects: Vec<WeakModelHandle<Project>>,
-}
-
 // Language server state is stored across 3 collections:
 //     language_servers =>
 //         a mapping from unique server id to LanguageServerState which can either be a task for a
@@ -103,7 +101,6 @@ pub struct Project {
     next_entry_id: Arc<AtomicUsize>,
     next_diagnostic_group_id: usize,
     user_store: ModelHandle<UserStore>,
-    project_store: ModelHandle<ProjectStore>,
     fs: Arc<dyn Fs>,
     client_state: Option<ProjectClientState>,
     collaborators: HashMap<PeerId, Collaborator>,
@@ -153,6 +150,8 @@ enum WorktreeHandle {
 enum ProjectClientState {
     Local {
         remote_id: u64,
+        metadata_changed: mpsc::UnboundedSender<oneshot::Sender<()>>,
+        _maintain_metadata: Task<()>,
         _detect_unshare: Task<Option<()>>,
     },
     Remote {
@@ -413,46 +412,39 @@ impl Project {
     pub fn local(
         client: Arc<Client>,
         user_store: ModelHandle<UserStore>,
-        project_store: ModelHandle<ProjectStore>,
         languages: Arc<LanguageRegistry>,
         fs: Arc<dyn Fs>,
         cx: &mut MutableAppContext,
     ) -> ModelHandle<Self> {
-        cx.add_model(|cx: &mut ModelContext<Self>| {
-            let handle = cx.weak_handle();
-            project_store.update(cx, |store, cx| store.add_project(handle, cx));
-
-            Self {
-                worktrees: Default::default(),
-                collaborators: Default::default(),
-                opened_buffers: Default::default(),
-                shared_buffers: Default::default(),
-                incomplete_buffers: Default::default(),
-                loading_buffers: Default::default(),
-                loading_local_worktrees: Default::default(),
-                buffer_snapshots: Default::default(),
-                client_state: None,
-                opened_buffer: watch::channel(),
-                client_subscriptions: Vec::new(),
-                _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
-                _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
-                active_entry: None,
-                languages,
-                client,
-                user_store,
-                project_store,
-                fs,
-                next_entry_id: Default::default(),
-                next_diagnostic_group_id: Default::default(),
-                language_servers: Default::default(),
-                language_server_ids: Default::default(),
-                language_server_statuses: Default::default(),
-                last_workspace_edits_by_language_server: Default::default(),
-                language_server_settings: Default::default(),
-                buffers_being_formatted: Default::default(),
-                next_language_server_id: 0,
-                nonce: StdRng::from_entropy().gen(),
-            }
+        cx.add_model(|cx: &mut ModelContext<Self>| Self {
+            worktrees: Default::default(),
+            collaborators: Default::default(),
+            opened_buffers: Default::default(),
+            shared_buffers: Default::default(),
+            incomplete_buffers: Default::default(),
+            loading_buffers: Default::default(),
+            loading_local_worktrees: Default::default(),
+            buffer_snapshots: Default::default(),
+            client_state: None,
+            opened_buffer: watch::channel(),
+            client_subscriptions: Vec::new(),
+            _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
+            _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
+            active_entry: None,
+            languages,
+            client,
+            user_store,
+            fs,
+            next_entry_id: Default::default(),
+            next_diagnostic_group_id: Default::default(),
+            language_servers: Default::default(),
+            language_server_ids: Default::default(),
+            language_server_statuses: Default::default(),
+            last_workspace_edits_by_language_server: Default::default(),
+            language_server_settings: Default::default(),
+            buffers_being_formatted: Default::default(),
+            next_language_server_id: 0,
+            nonce: StdRng::from_entropy().gen(),
         })
     }
 
@@ -460,31 +452,28 @@ impl Project {
         remote_id: u64,
         client: Arc<Client>,
         user_store: ModelHandle<UserStore>,
-        project_store: ModelHandle<ProjectStore>,
         languages: Arc<LanguageRegistry>,
         fs: Arc<dyn Fs>,
         mut cx: AsyncAppContext,
     ) -> Result<ModelHandle<Self>, JoinProjectError> {
         client.authenticate_and_connect(true, &cx).await?;
 
+        let subscription = client.subscribe_to_entity(remote_id);
         let response = client
             .request(proto::JoinProject {
                 project_id: remote_id,
             })
             .await?;
+        let this = cx.add_model(|cx| {
+            let replica_id = response.replica_id as ReplicaId;
 
-        let replica_id = response.replica_id as ReplicaId;
-
-        let mut worktrees = Vec::new();
-        for worktree in response.worktrees {
-            let worktree = cx
-                .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
-            worktrees.push(worktree);
-        }
-
-        let this = cx.add_model(|cx: &mut ModelContext<Self>| {
-            let handle = cx.weak_handle();
-            project_store.update(cx, |store, cx| store.add_project(handle, cx));
+            let mut worktrees = Vec::new();
+            for worktree in response.worktrees {
+                let worktree = cx.update(|cx| {
+                    Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
+                });
+                worktrees.push(worktree);
+            }
 
             let mut this = Self {
                 worktrees: Vec::new(),
@@ -498,11 +487,10 @@ impl Project {
                 _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
                 languages,
                 user_store: user_store.clone(),
-                project_store,
                 fs,
                 next_entry_id: Default::default(),
                 next_diagnostic_group_id: Default::default(),
-                client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
+                client_subscriptions: Default::default(),
                 _subscriptions: Default::default(),
                 client: client.clone(),
                 client_state: Some(ProjectClientState::Remote {
@@ -551,10 +539,11 @@ impl Project {
                 nonce: StdRng::from_entropy().gen(),
             };
             for worktree in worktrees {
-                this.add_worktree(&worktree, cx);
+                let _ = this.add_worktree(&worktree, cx);
             }
             this
         });
+        let subscription = subscription.set_model(&this, &mut cx);
 
         let user_ids = response
             .collaborators
@@ -572,6 +561,7 @@ impl Project {
 
         this.update(&mut cx, |this, _| {
             this.collaborators = collaborators;
+            this.client_subscriptions.push(subscription);
         });
 
         Ok(this)
@@ -594,9 +584,7 @@ impl Project {
         let http_client = client::test::FakeHttpClient::with_404_response();
         let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
         let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
-        let project_store = cx.add_model(|_| ProjectStore::new());
-        let project =
-            cx.update(|cx| Project::local(client, user_store, project_store, languages, fs, cx));
+        let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
         for path in root_paths {
             let (tree, _) = project
                 .update(cx, |project, cx| {
@@ -677,10 +665,6 @@ impl Project {
         self.user_store.clone()
     }
 
-    pub fn project_store(&self) -> ModelHandle<ProjectStore> {
-        self.project_store.clone()
-    }
-
     #[cfg(any(test, feature = "test-support"))]
     pub fn check_invariants(&self, cx: &AppContext) {
         if self.is_local() {
@@ -752,59 +736,29 @@ impl Project {
         }
     }
 
-    fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
-        if let Some(ProjectClientState::Local { remote_id, .. }) = &self.client_state {
-            let project_id = *remote_id;
-            // Broadcast worktrees only if the project is online.
-            let worktrees = self
-                .worktrees
-                .iter()
-                .filter_map(|worktree| {
-                    worktree
-                        .upgrade(cx)
-                        .map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
-                })
-                .collect();
-            self.client
-                .send(proto::UpdateProject {
-                    project_id,
-                    worktrees,
-                })
-                .log_err();
-
-            let worktrees = self.visible_worktrees(cx).collect::<Vec<_>>();
-            let scans_complete = futures::future::join_all(
-                worktrees
-                    .iter()
-                    .filter_map(|worktree| Some(worktree.read(cx).as_local()?.scan_complete())),
-            );
-
-            let worktrees = worktrees.into_iter().map(|handle| handle.downgrade());
-
-            cx.spawn_weak(move |_, cx| async move {
-                scans_complete.await;
-                cx.read(|cx| {
-                    for worktree in worktrees {
-                        if let Some(worktree) = worktree
-                            .upgrade(cx)
-                            .and_then(|worktree| worktree.read(cx).as_local())
-                        {
-                            worktree.send_extension_counts(project_id);
-                        }
-                    }
-                })
-            })
-            .detach();
+    fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
+        let (tx, rx) = oneshot::channel();
+        if let Some(ProjectClientState::Local {
+            metadata_changed, ..
+        }) = &mut self.client_state
+        {
+            let _ = metadata_changed.unbounded_send(tx);
         }
-
-        self.project_store.update(cx, |_, cx| cx.notify());
         cx.notify();
+
+        async move {
+            // If the project is shared, this will resolve when the `_maintain_metadata` task has
+            // a chance to update the metadata. Otherwise, it will resolve right away because `tx`
+            // will get dropped.
+            let _ = rx.await;
+        }
     }
 
     pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
         &self.collaborators
     }
 
+    /// Collect all worktrees, including ones that don't appear in the project panel
     pub fn worktrees<'a>(
         &'a self,
         cx: &'a AppContext,
@@ -814,6 +768,7 @@ impl Project {
             .filter_map(move |worktree| worktree.upgrade(cx))
     }
 
+    /// Collect all user-visible worktrees, the ones that appear in the project panel
     pub fn visible_worktrees<'a>(
         &'a self,
         cx: &'a AppContext,
@@ -898,7 +853,7 @@ impl Project {
                     .request(proto::CreateProjectEntry {
                         worktree_id: project_path.worktree_id.to_proto(),
                         project_id,
-                        path: project_path.path.as_os_str().as_bytes().to_vec(),
+                        path: project_path.path.to_string_lossy().into(),
                         is_directory,
                     })
                     .await?;
@@ -942,7 +897,7 @@ impl Project {
                     .request(proto::CopyProjectEntry {
                         project_id,
                         entry_id: entry_id.to_proto(),
-                        new_path: new_path.as_os_str().as_bytes().to_vec(),
+                        new_path: new_path.to_string_lossy().into(),
                     })
                     .await?;
                 let entry = response
@@ -985,7 +940,7 @@ impl Project {
                     .request(proto::RenameProjectEntry {
                         project_id,
                         entry_id: entry_id.to_proto(),
-                        new_path: new_path.as_os_str().as_bytes().to_vec(),
+                        new_path: new_path.to_string_lossy().into(),
                     })
                     .await?;
                 let entry = response
@@ -1086,15 +1041,51 @@ impl Project {
             });
         }
 
-        self.client_subscriptions
-            .push(self.client.add_model_for_remote_entity(project_id, cx));
-        self.metadata_changed(cx);
+        self.client_subscriptions.push(
+            self.client
+                .subscribe_to_entity(project_id)
+                .set_model(&cx.handle(), &mut cx.to_async()),
+        );
+        let _ = self.metadata_changed(cx);
         cx.emit(Event::RemoteIdChanged(Some(project_id)));
         cx.notify();
 
         let mut status = self.client.status();
+        let (metadata_changed_tx, mut metadata_changed_rx) = mpsc::unbounded();
         self.client_state = Some(ProjectClientState::Local {
             remote_id: project_id,
+            metadata_changed: metadata_changed_tx,
+            _maintain_metadata: cx.spawn_weak(move |this, cx| async move {
+                while let Some(tx) = metadata_changed_rx.next().await {
+                    let mut txs = vec![tx];
+                    while let Ok(Some(next_tx)) = metadata_changed_rx.try_next() {
+                        txs.push(next_tx);
+                    }
+
+                    let Some(this) = this.upgrade(&cx) else { break };
+                    this.read_with(&cx, |this, cx| {
+                        let worktrees = this
+                            .worktrees
+                            .iter()
+                            .filter_map(|worktree| {
+                                worktree.upgrade(cx).map(|worktree| {
+                                    worktree.read(cx).as_local().unwrap().metadata_proto()
+                                })
+                            })
+                            .collect();
+                        this.client.request(proto::UpdateProject {
+                            project_id,
+                            worktrees,
+                        })
+                    })
+                    .await
+                    .log_err();
+
+                    for tx in txs {
+                        let _ = tx.send(());
+                    }
+                }
+            }),
             _detect_unshare: cx.spawn_weak(move |this, mut cx| {
                 async move {
                     let is_connected = status.next().await.map_or(false, |s| s.is_connected());
@@ -1144,7 +1135,7 @@ impl Project {
                 }
             }
 
-            self.metadata_changed(cx);
+            let _ = self.metadata_changed(cx);
             cx.notify();
             self.client.send(proto::UnshareProject {
                 project_id: remote_id,
@@ -1203,6 +1194,34 @@ impl Project {
         !self.is_local()
     }
 
+    pub fn create_terminal(
+        &mut self,
+        working_directory: Option<PathBuf>,
+        window_id: usize,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<ModelHandle<Terminal>> {
+        if self.is_remote() {
+            return Err(anyhow!(
+                "creating terminals as a guest is not supported yet"
+            ));
+        } else {
+            let settings = cx.global::<Settings>();
+            let shell = settings.terminal_shell();
+            let envs = settings.terminal_env();
+            let scroll = settings.terminal_scroll();
+
+            TerminalBuilder::new(
+                working_directory.clone(),
+                shell,
+                envs,
+                settings.terminal_overrides.blinking.clone(),
+                scroll,
+                window_id,
+            )
+            .map(|builder| cx.add_model(|cx| builder.subscribe(cx)))
+        }
+    }
+
     pub fn create_buffer(
         &mut self,
         text: &str,
@@ -1633,10 +1652,6 @@ impl Project {
                         operations: vec![language::proto::serialize_operation(operation)],
                     });
                     cx.background().spawn(request).detach_and_log_err(cx);
-                } else if let Some(project_id) = self.remote_id() {
-                    let _ = self
-                        .client
-                        .send(proto::RegisterProjectActivity { project_id });
                 }
             }
             BufferEvent::Edited { .. } => {
@@ -3428,19 +3443,29 @@ impl Project {
                 position: Some(language::proto::serialize_anchor(&anchor)),
                 version: serialize_version(&source_buffer.version()),
             };
-            cx.spawn_weak(|_, mut cx| async move {
+            cx.spawn_weak(|this, mut cx| async move {
                 let response = rpc.request(message).await?;
 
-                source_buffer_handle
-                    .update(&mut cx, |buffer, _| {
-                        buffer.wait_for_version(deserialize_version(response.version))
-                    })
-                    .await;
+                if this
+                    .upgrade(&cx)
+                    .ok_or_else(|| anyhow!("project was dropped"))?
+                    .read_with(&cx, |this, _| this.is_read_only())
+                {
+                    return Err(anyhow!(
+                        "failed to get completions: project was disconnected"
+                    ));
+                } else {
+                    source_buffer_handle
+                        .update(&mut cx, |buffer, _| {
+                            buffer.wait_for_version(deserialize_version(response.version))
+                        })
+                        .await;
 
-                let completions = response.completions.into_iter().map(|completion| {
-                    language::proto::deserialize_completion(completion, language.clone())
-                });
-                futures::future::try_join_all(completions).await
+                    let completions = response.completions.into_iter().map(|completion| {
+                        language::proto::deserialize_completion(completion, language.clone())
+                    });
+                    futures::future::try_join_all(completions).await
+                }
             })
         } else {
             Task::ready(Ok(Default::default()))
@@ -3617,7 +3642,7 @@ impl Project {
         } else if let Some(project_id) = self.remote_id() {
             let rpc = self.client.clone();
             let version = buffer.version();
-            cx.spawn_weak(|_, mut cx| async move {
+            cx.spawn_weak(|this, mut cx| async move {
                 let response = rpc
                     .request(proto::GetCodeActions {
                         project_id,
@@ -3628,17 +3653,27 @@ impl Project {
                     })
                     .await?;
 
-                buffer_handle
-                    .update(&mut cx, |buffer, _| {
-                        buffer.wait_for_version(deserialize_version(response.version))
-                    })
-                    .await;
+                if this
+                    .upgrade(&cx)
+                    .ok_or_else(|| anyhow!("project was dropped"))?
+                    .read_with(&cx, |this, _| this.is_read_only())
+                {
+                    return Err(anyhow!(
+                        "failed to get code actions: project was disconnected"
+                    ));
+                } else {
+                    buffer_handle
+                        .update(&mut cx, |buffer, _| {
+                            buffer.wait_for_version(deserialize_version(response.version))
+                        })
+                        .await;
 
-                response
-                    .actions
-                    .into_iter()
-                    .map(language::proto::deserialize_code_action)
-                    .collect()
+                    response
+                        .actions
+                        .into_iter()
+                        .map(language::proto::deserialize_code_action)
+                        .collect()
+                }
             })
         } else {
             Task::ready(Ok(Default::default()))
@@ -4147,9 +4182,13 @@ impl Project {
             let message = request.to_proto(project_id, buffer);
             return cx.spawn(|this, cx| async move {
                 let response = rpc.request(message).await?;
-                request
-                    .response_from_proto(response, this, buffer_handle, cx)
-                    .await
+                if this.read_with(&cx, |this, _| this.is_read_only()) {
+                    Err(anyhow!("disconnected before completing request"))
+                } else {
+                    request
+                        .response_from_proto(response, this, buffer_handle, cx)
+                        .await
+                }
             });
         }
         Task::ready(Ok(Default::default()))
@@ -4227,12 +4266,13 @@ impl Project {
                         });
                         let worktree = worktree?;
 
-                        let project_id = project.update(&mut cx, |project, cx| {
-                            project.add_worktree(&worktree, cx);
-                            project.remote_id()
-                        });
+                        project
+                            .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))
+                            .await;
 
-                        if let Some(project_id) = project_id {
+                        if let Some(project_id) =
+                            project.read_with(&cx, |project, _| project.remote_id())
+                        {
                             worktree
                                 .update(&mut cx, |worktree, cx| {
                                     worktree.as_local_mut().unwrap().share(project_id, cx)
@@ -4256,7 +4296,11 @@ impl Project {
         })
     }
 
-    pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
+    pub fn remove_worktree(
+        &mut self,
+        id_to_remove: WorktreeId,
+        cx: &mut ModelContext<Self>,
+    ) -> impl Future<Output = ()> {
         self.worktrees.retain(|worktree| {
             if let Some(worktree) = worktree.upgrade(cx) {
                 let id = worktree.read(cx).id();
@@ -4270,11 +4314,14 @@ impl Project {
                 false
             }
         });
-        self.metadata_changed(cx);
-        cx.notify();
+        self.metadata_changed(cx)
     }
 
-    fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
+    fn add_worktree(
+        &mut self,
+        worktree: &ModelHandle<Worktree>,
+        cx: &mut ModelContext<Self>,
+    ) -> impl Future<Output = ()> {
         cx.observe(worktree, |_, _, cx| cx.notify()).detach();
         if worktree.read(cx).is_local() {
             cx.subscribe(worktree, |this, worktree, event, cx| match event {
@@ -4298,15 +4345,13 @@ impl Project {
                 .push(WorktreeHandle::Weak(worktree.downgrade()));
         }
 
-        self.metadata_changed(cx);
         cx.observe_release(worktree, |this, worktree, cx| {
-            this.remove_worktree(worktree.id(), cx);
-            cx.notify();
+            let _ = this.remove_worktree(worktree.id(), cx);
         })
         .detach();
 
         cx.emit(Event::WorktreeAdded);
-        cx.notify();
+        self.metadata_changed(cx)
     }
 
     fn update_local_worktree_buffers(
@@ -4623,11 +4668,11 @@ impl Project {
                 } else {
                     let worktree =
                         Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
-                    this.add_worktree(&worktree, cx);
+                    let _ = this.add_worktree(&worktree, cx);
                 }
             }
 
-            this.metadata_changed(cx);
+            let _ = this.metadata_changed(cx);
             for (id, _) in old_worktrees_by_id {
                 cx.emit(Event::WorktreeRemoved(id));
             }
@@ -4669,7 +4714,7 @@ impl Project {
         let entry = worktree
             .update(&mut cx, |worktree, cx| {
                 let worktree = worktree.as_local_mut().unwrap();
-                let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
+                let path = PathBuf::from(envelope.payload.path);
                 worktree.create_entry(path, envelope.payload.is_directory, cx)
             })
             .await?;
@@ -4693,7 +4738,7 @@ impl Project {
         let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
         let entry = worktree
             .update(&mut cx, |worktree, cx| {
-                let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
+                let new_path = PathBuf::from(envelope.payload.new_path);
                 worktree
                     .as_local_mut()
                     .unwrap()
@@ -4721,7 +4766,7 @@ impl Project {
         let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
         let entry = worktree
             .update(&mut cx, |worktree, cx| {
-                let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
+                let new_path = PathBuf::from(envelope.payload.new_path);
                 worktree
                     .as_local_mut()
                     .unwrap()
@@ -5863,48 +5908,6 @@ impl Project {
     }
 }
 
-impl ProjectStore {
-    pub fn new() -> Self {
-        Self {
-            projects: Default::default(),
-        }
-    }
-
-    pub fn projects<'a>(
-        &'a self,
-        cx: &'a AppContext,
-    ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
-        self.projects
-            .iter()
-            .filter_map(|project| project.upgrade(cx))
-    }
-
-    fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
-        if let Err(ix) = self
-            .projects
-            .binary_search_by_key(&project.id(), WeakModelHandle::id)
-        {
-            self.projects.insert(ix, project);
-        }
-        cx.notify();
-    }
-
-    fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
-        let mut did_change = false;
-        self.projects.retain(|project| {
-            if project.is_upgradable(cx) {
-                true
-            } else {
-                did_change = true;
-                false
-            }
-        });
-        if did_change {
-            cx.notify();
-        }
-    }
-}
-
 impl WorktreeHandle {
     pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
         match self {
@@ -5983,16 +5986,10 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
     }
 }
 
-impl Entity for ProjectStore {
-    type Event = ();
-}
-
 impl Entity for Project {
     type Event = Event;
 
-    fn release(&mut self, cx: &mut gpui::MutableAppContext) {
-        self.project_store.update(cx, ProjectStore::prune_projects);
-
+    fn release(&mut self, _: &mut gpui::MutableAppContext) {
         match &self.client_state {
             Some(ProjectClientState::Local { remote_id, .. }) => {
                 self.client

crates/project/src/project_tests.rs 🔗

@@ -2166,7 +2166,11 @@ async fn test_rescan_and_remote_updates(
             proto::WorktreeMetadata {
                 id: initial_snapshot.id().to_proto(),
                 root_name: initial_snapshot.root_name().into(),
-                abs_path: initial_snapshot.abs_path().as_os_str().as_bytes().to_vec(),
+                abs_path: initial_snapshot
+                    .abs_path()
+                    .as_os_str()
+                    .to_string_lossy()
+                    .into(),
                 visible: true,
             },
             rpc.clone(),

crates/project/src/worktree.rs 🔗

@@ -41,7 +41,6 @@ use std::{
     future::Future,
     mem,
     ops::{Deref, DerefMut},
-    os::unix::prelude::{OsStrExt, OsStringExt},
     path::{Path, PathBuf},
     sync::{atomic::AtomicUsize, Arc},
     task::Poll,
@@ -83,6 +82,7 @@ pub struct RemoteWorktree {
     replica_id: ReplicaId,
     diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
     visible: bool,
+    disconnected: bool,
 }
 
 #[derive(Clone)]
@@ -168,7 +168,7 @@ enum ScanState {
 struct ShareState {
     project_id: u64,
     snapshots_tx: watch::Sender<LocalSnapshot>,
-    _maintain_remote_snapshot: Option<Task<Option<()>>>,
+    _maintain_remote_snapshot: Task<Option<()>>,
 }
 
 pub enum Event {
@@ -222,7 +222,7 @@ impl Worktree {
         let root_name = worktree.root_name.clone();
         let visible = worktree.visible;
 
-        let abs_path = PathBuf::from(OsString::from_vec(worktree.abs_path));
+        let abs_path = PathBuf::from(worktree.abs_path);
         let snapshot = Snapshot {
             id: WorktreeId(remote_id as usize),
             abs_path: Arc::from(abs_path.deref()),
@@ -248,6 +248,7 @@ impl Worktree {
                 client: client.clone(),
                 diagnostic_summaries: Default::default(),
                 visible,
+                disconnected: false,
             })
         });
 
@@ -660,7 +661,7 @@ impl LocalWorktree {
             id: self.id().to_proto(),
             root_name: self.root_name().to_string(),
             visible: self.visible,
-            abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
+            abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
         }
     }
 
@@ -972,11 +973,10 @@ impl LocalWorktree {
             let _ = share_tx.send(Ok(()));
         } else {
             let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
-            let rpc = self.client.clone();
             let worktree_id = cx.model_id() as u64;
 
             for (path, summary) in self.diagnostic_summaries.iter() {
-                if let Err(e) = rpc.send(proto::UpdateDiagnosticSummary {
+                if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
                     project_id,
                     worktree_id,
                     summary: Some(summary.to_proto(&path.0)),
@@ -986,15 +986,14 @@ impl LocalWorktree {
             }
 
             let maintain_remote_snapshot = cx.background().spawn({
-                let rpc = rpc;
-
+                let rpc = self.client.clone();
                 async move {
                     let mut prev_snapshot = match snapshots_rx.recv().await {
                         Some(snapshot) => {
                             let update = proto::UpdateWorktree {
                                 project_id,
                                 worktree_id,
-                                abs_path: snapshot.abs_path().as_os_str().as_bytes().to_vec(),
+                                abs_path: snapshot.abs_path().to_string_lossy().into(),
                                 root_name: snapshot.root_name().to_string(),
                                 updated_entries: snapshot
                                     .entries_by_path
@@ -1034,10 +1033,11 @@ impl LocalWorktree {
                 }
                 .log_err()
             });
+
             self.share = Some(ShareState {
                 project_id,
                 snapshots_tx,
-                _maintain_remote_snapshot: Some(maintain_remote_snapshot),
+                _maintain_remote_snapshot: maintain_remote_snapshot,
             });
         }
 
@@ -1055,25 +1055,6 @@ impl LocalWorktree {
     pub fn is_shared(&self) -> bool {
         self.share.is_some()
     }
-
-    pub fn send_extension_counts(&self, project_id: u64) {
-        let mut extensions = Vec::new();
-        let mut counts = Vec::new();
-
-        for (extension, count) in self.extension_counts() {
-            extensions.push(extension.to_string_lossy().to_string());
-            counts.push(*count as u32);
-        }
-
-        self.client
-            .send(proto::UpdateWorktreeExtensions {
-                project_id,
-                worktree_id: self.id().to_proto(),
-                extensions,
-                counts,
-            })
-            .log_err();
-    }
 }
 
 impl RemoteWorktree {
@@ -1090,6 +1071,7 @@ impl RemoteWorktree {
     pub fn disconnected_from_host(&mut self) {
         self.updates_tx.take();
         self.snapshot_subscriptions.clear();
+        self.disconnected = true;
     }
 
     pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
@@ -1104,10 +1086,12 @@ impl RemoteWorktree {
         self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
     }
 
-    fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
+    fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
         let (tx, rx) = oneshot::channel();
         if self.observed_snapshot(scan_id) {
             let _ = tx.send(());
+        } else if self.disconnected {
+            drop(tx);
         } else {
             match self
                 .snapshot_subscriptions
@@ -1118,7 +1102,8 @@ impl RemoteWorktree {
         }
 
         async move {
-            let _ = rx.await;
+            rx.await?;
+            Ok(())
         }
     }
 
@@ -1147,7 +1132,7 @@ impl RemoteWorktree {
     ) -> Task<Result<Entry>> {
         let wait_for_snapshot = self.wait_for_snapshot(scan_id);
         cx.spawn(|this, mut cx| async move {
-            wait_for_snapshot.await;
+            wait_for_snapshot.await?;
             this.update(&mut cx, |worktree, _| {
                 let worktree = worktree.as_remote_mut().unwrap();
                 let mut snapshot = worktree.background_snapshot.lock();
@@ -1166,7 +1151,7 @@ impl RemoteWorktree {
     ) -> Task<Result<()>> {
         let wait_for_snapshot = self.wait_for_snapshot(scan_id);
         cx.spawn(|this, mut cx| async move {
-            wait_for_snapshot.await;
+            wait_for_snapshot.await?;
             this.update(&mut cx, |worktree, _| {
                 let worktree = worktree.as_remote_mut().unwrap();
                 let mut snapshot = worktree.background_snapshot.lock();
@@ -1404,7 +1389,7 @@ impl LocalSnapshot {
         proto::UpdateWorktree {
             project_id,
             worktree_id: self.id().to_proto(),
-            abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
+            abs_path: self.abs_path().to_string_lossy().into(),
             root_name,
             updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
             removed_entries: Default::default(),
@@ -1472,7 +1457,7 @@ impl LocalSnapshot {
         proto::UpdateWorktree {
             project_id,
             worktree_id,
-            abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
+            abs_path: self.abs_path().to_string_lossy().into(),
             root_name: self.root_name().to_string(),
             updated_entries,
             removed_entries,
@@ -2951,7 +2936,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
         Self {
             id: entry.id.to_proto(),
             is_dir: entry.is_dir(),
-            path: entry.path.as_os_str().as_bytes().to_vec(),
+            path: entry.path.to_string_lossy().into(),
             inode: entry.inode,
             mtime: Some(entry.mtime.into()),
             is_symlink: entry.is_symlink,
@@ -2969,14 +2954,10 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
                 EntryKind::Dir
             } else {
                 let mut char_bag = *root_char_bag;
-                char_bag.extend(
-                    String::from_utf8_lossy(&entry.path)
-                        .chars()
-                        .map(|c| c.to_ascii_lowercase()),
-                );
+                char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
                 EntryKind::File(char_bag)
             };
-            let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
+            let path: Arc<Path> = PathBuf::from(entry.path).into();
             Ok(Entry {
                 id: ProjectEntryId::from_proto(entry.id),
                 kind,

crates/project_panel/src/project_panel.rs 🔗

@@ -1393,8 +1393,15 @@ mod tests {
         .await;
 
         let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
         let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, cx));
         assert_eq!(
             visible_entries_as_strings(&panel, 0..50, cx),
@@ -1486,8 +1493,15 @@ mod tests {
         .await;
 
         let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
         let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, cx));
 
         select_path(&panel, "root1", cx);

crates/project_symbols/src/project_symbols.rs 🔗

@@ -1,5 +1,6 @@
 use editor::{
-    combine_syntax_and_fuzzy_match_highlights, styled_runs_for_code_label, Autoscroll, Bias, Editor,
+    combine_syntax_and_fuzzy_match_highlights, scroll::autoscroll::Autoscroll,
+    styled_runs_for_code_label, Bias, Editor,
 };
 use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{

crates/rope/Cargo.toml 🔗

@@ -12,7 +12,7 @@ smallvec = { version = "1.6", features = ["union"] }
 sum_tree = { path = "../sum_tree" }
 arrayvec = "0.7.1"
 log = { version = "0.4.16", features = ["kv_unstable_serde"] }
-
+util = { path = "../util" }
 
 [dev-dependencies]
 rand = "0.8.3"

crates/rope/src/rope.rs 🔗

@@ -12,6 +12,7 @@ use std::{
     str,
 };
 use sum_tree::{Bias, Dimension, SumTree};
+use util::debug_panic;
 
 pub use offset_utf16::OffsetUtf16;
 pub use point::Point;
@@ -679,28 +680,33 @@ impl Chunk {
     fn point_to_offset(&self, target: Point) -> usize {
         let mut offset = 0;
         let mut point = Point::new(0, 0);
+
         for ch in self.0.chars() {
             if point >= target {
                 if point > target {
-                    panic!("point {:?} is inside of character {:?}", target, ch);
+                    debug_panic!("point {target:?} is inside of character {ch:?}");
                 }
                 break;
             }
 
             if ch == '\n' {
                 point.row += 1;
+                point.column = 0;
+
                 if point.row > target.row {
-                    panic!(
-                        "point {:?} is beyond the end of a line with length {}",
-                        target, point.column
+                    debug_panic!(
+                        "point {target:?} is beyond the end of a line with length {}",
+                        point.column
                     );
+                    break;
                 }
-                point.column = 0;
             } else {
                 point.column += ch.len_utf8() as u32;
             }
+
             offset += ch.len_utf8();
         }
+
         offset
     }
 
@@ -737,26 +743,27 @@ impl Chunk {
             if ch == '\n' {
                 point.row += 1;
                 point.column = 0;
+
                 if point.row > target.row {
-                    if clip {
-                        // Return the offset of the newline
-                        return offset;
+                    if !clip {
+                        debug_panic!(
+                            "point {target:?} is beyond the end of a line with length {}",
+                            point.column
+                        );
                     }
-                    panic!(
-                        "point {:?} is beyond the end of a line with length {}",
-                        target, point.column
-                    );
+                    // Return the offset of the newline
+                    return offset;
                 }
             } else {
                 point.column += ch.len_utf16() as u32;
             }
 
             if point > target {
-                if clip {
-                    // Return the offset of the codepoint which we have landed within, bias left
-                    return offset;
+                if !clip {
+                    debug_panic!("point {target:?} is inside of codepoint {ch:?}");
                 }
-                panic!("point {:?} is inside of codepoint {:?}", target, ch);
+                // Return the offset of the codepoint which we have landed within, bias left
+                return offset;
             }
 
             offset += ch.len_utf8();

crates/rpc/proto/zed.proto 🔗

@@ -48,9 +48,7 @@ message Envelope {
         OpenBufferForSymbolResponse open_buffer_for_symbol_response = 40;
 
         UpdateProject update_project = 41;
-        RegisterProjectActivity register_project_activity = 42;
         UpdateWorktree update_worktree = 43;
-        UpdateWorktreeExtensions update_worktree_extensions = 44;
 
         CreateProjectEntry create_project_entry = 45;
         RenameProjectEntry rename_project_entry = 46;
@@ -158,14 +156,12 @@ message JoinRoomResponse {
     optional LiveKitConnectionInfo live_kit_connection_info = 2;
 }
 
-message LeaveRoom {
-    uint64 id = 1;
-}
+message LeaveRoom {}
 
 message Room {
     uint64 id = 1;
     repeated Participant participants = 2;
-    repeated uint64 pending_participant_user_ids = 3;
+    repeated PendingParticipant pending_participants = 3;
     string live_kit_room = 4;
 }
 
@@ -176,6 +172,12 @@ message Participant {
     ParticipantLocation location = 4;
 }
 
+message PendingParticipant {
+    uint64 user_id = 1;
+    uint64 calling_user_id = 2;
+    optional uint64 initial_project_id = 3;
+}
+
 message ParticipantProject {
     uint64 id = 1;
     repeated string worktree_root_names = 2;
@@ -199,13 +201,13 @@ message ParticipantLocation {
 
 message Call {
     uint64 room_id = 1;
-    uint64 recipient_user_id = 2;
+    uint64 called_user_id = 2;
     optional uint64 initial_project_id = 3;
 }
 
 message IncomingCall {
     uint64 room_id = 1;
-    uint64 caller_user_id = 2;
+    uint64 calling_user_id = 2;
     repeated uint64 participant_user_ids = 3;
     optional ParticipantProject initial_project = 4;
 }
@@ -214,7 +216,7 @@ message CallCanceled {}
 
 message CancelCall {
     uint64 room_id = 1;
-    uint64 recipient_user_id = 2;
+    uint64 called_user_id = 2;
 }
 
 message DeclineCall {
@@ -253,10 +255,6 @@ message UpdateProject {
     repeated WorktreeMetadata worktrees = 2;
 }
 
-message RegisterProjectActivity {
-    uint64 project_id = 1;
-}
-
 message JoinProject {
     uint64 project_id = 1;
 }
@@ -280,33 +278,26 @@ message UpdateWorktree {
     repeated uint64 removed_entries = 5;
     uint64 scan_id = 6;
     bool is_last_update = 7;
-    bytes abs_path = 8;
-}
-
-message UpdateWorktreeExtensions {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    repeated string extensions = 3;
-    repeated uint32 counts = 4;
+    string abs_path = 8;
 }
 
 message CreateProjectEntry {
     uint64 project_id = 1;
     uint64 worktree_id = 2;
-    bytes path = 3;
+    string path = 3;
     bool is_directory = 4;
 }
 
 message RenameProjectEntry {
     uint64 project_id = 1;
     uint64 entry_id = 2;
-    bytes new_path = 3;
+    string new_path = 3;
 }
 
 message CopyProjectEntry {
     uint64 project_id = 1;
     uint64 entry_id = 2;
-    bytes new_path = 3;
+    string new_path = 3;
 }
 
 message DeleteProjectEntry {
@@ -898,7 +889,7 @@ message File {
 message Entry {
     uint64 id = 1;
     bool is_dir = 2;
-    bytes path = 3;
+    string path = 3;
     uint64 inode = 4;
     Timestamp mtime = 5;
     bool is_symlink = 6;
@@ -1093,7 +1084,7 @@ message WorktreeMetadata {
     uint64 id = 1;
     string root_name = 2;
     bool visible = 3;
-    bytes abs_path = 4;
+    string abs_path = 4;
 }
 
 message UpdateDiffBase {

crates/rpc/src/peer.rs 🔗

@@ -24,7 +24,7 @@ use std::{
 };
 use tracing::instrument;
 
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
+#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
 pub struct ConnectionId(pub u32);
 
 impl fmt::Display for ConnectionId {

crates/rpc/src/proto.rs 🔗

@@ -140,12 +140,11 @@ messages!(
     (OpenBufferResponse, Background),
     (PerformRename, Background),
     (PerformRenameResponse, Background),
+    (Ping, Foreground),
     (PrepareRename, Background),
     (PrepareRenameResponse, Background),
     (ProjectEntryResponse, Foreground),
     (RemoveContact, Foreground),
-    (Ping, Foreground),
-    (RegisterProjectActivity, Foreground),
     (ReloadBuffers, Foreground),
     (ReloadBuffersResponse, Foreground),
     (RemoveProjectCollaborator, Foreground),
@@ -175,7 +174,6 @@ messages!(
     (UpdateParticipantLocation, Foreground),
     (UpdateProject, Foreground),
     (UpdateWorktree, Foreground),
-    (UpdateWorktreeExtensions, Background),
     (UpdateDiffBase, Background),
     (GetPrivateUserInfo, Foreground),
     (GetPrivateUserInfoResponse, Foreground),
@@ -231,6 +229,7 @@ request_messages!(
     (Test, Test),
     (UpdateBuffer, Ack),
     (UpdateParticipantLocation, Ack),
+    (UpdateProject, Ack),
     (UpdateWorktree, Ack),
 );
 
@@ -262,7 +261,6 @@ entity_messages!(
     OpenBufferForSymbol,
     PerformRename,
     PrepareRename,
-    RegisterProjectActivity,
     ReloadBuffers,
     RemoveProjectCollaborator,
     RenameProjectEntry,
@@ -278,7 +276,6 @@ entity_messages!(
     UpdateLanguageServer,
     UpdateProject,
     UpdateWorktree,
-    UpdateWorktreeExtensions,
     UpdateDiffBase
 );
 

crates/rpc/src/rpc.rs 🔗

@@ -6,4 +6,4 @@ pub use conn::Connection;
 pub use peer::*;
 mod macros;
 
-pub const PROTOCOL_VERSION: u32 = 39;
+pub const PROTOCOL_VERSION: u32 = 40;

crates/search/src/buffer_search.rs 🔗

@@ -14,8 +14,9 @@ use serde::Deserialize;
 use settings::Settings;
 use std::{any::Any, sync::Arc};
 use workspace::{
+    item::ItemHandle,
     searchable::{Direction, SearchEvent, SearchableItemHandle, WeakSearchableItemHandle},
-    ItemHandle, Pane, ToolbarItemLocation, ToolbarItemView,
+    Pane, ToolbarItemLocation, ToolbarItemView,
 };
 
 #[derive(Clone, Deserialize, PartialEq)]

crates/search/src/project_search.rs 🔗

@@ -4,8 +4,8 @@ use crate::{
 };
 use collections::HashMap;
 use editor::{
-    items::active_match_index, Anchor, Autoscroll, Editor, MultiBuffer, SelectAll,
-    MAX_TAB_TITLE_LEN,
+    items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
+    SelectAll, MAX_TAB_TITLE_LEN,
 };
 use gpui::{
     actions, elements::*, platform::CursorStyle, Action, AnyViewHandle, AppContext, ElementBox,
@@ -24,9 +24,9 @@ use std::{
 };
 use util::ResultExt as _;
 use workspace::{
+    item::{Item, ItemEvent, ItemHandle},
     searchable::{Direction, SearchableItem, SearchableItemHandle},
-    Item, ItemEvent, ItemHandle, ItemNavHistory, Pane, ToolbarItemLocation, ToolbarItemView,
-    Workspace,
+    ItemNavHistory, Pane, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId,
 };
 
 actions!(project_search, [SearchInNew, ToggleFocus]);
@@ -315,7 +315,7 @@ impl Item for ProjectSearchView {
             .update(cx, |editor, cx| editor.reload(project, cx))
     }
 
-    fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
+    fn clone_on_split(&self, _workspace_id: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self>
     where
         Self: Sized,
     {
@@ -353,6 +353,20 @@ impl Item for ProjectSearchView {
     fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
         self.results_editor.breadcrumbs(theme, cx)
     }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        None
+    }
+
+    fn deserialize(
+        _project: ModelHandle<Project>,
+        _workspace: WeakViewHandle<Workspace>,
+        _workspace_id: workspace::WorkspaceId,
+        _item_id: workspace::ItemId,
+        _cx: &mut ViewContext<Pane>,
+    ) -> Task<anyhow::Result<ViewHandle<Self>>> {
+        unimplemented!()
+    }
 }
 
 impl ProjectSearchView {
@@ -893,7 +907,7 @@ impl View for ProjectSearchBar {
 impl ToolbarItemView for ProjectSearchBar {
     fn set_active_pane_item(
         &mut self,
-        active_pane_item: Option<&dyn workspace::ItemHandle>,
+        active_pane_item: Option<&dyn ItemHandle>,
         cx: &mut ViewContext<Self>,
     ) -> ToolbarItemLocation {
         cx.notify();

crates/settings/Cargo.toml 🔗

@@ -14,6 +14,7 @@ test-support = []
 assets = { path = "../assets" }
 collections = { path = "../collections" }
 gpui = { path = "../gpui" }
+sqlez = { path = "../sqlez" }
 fs = { path = "../fs" }
 anyhow = "1.0.38"
 futures = "0.3"

crates/settings/src/settings.rs 🔗

@@ -2,7 +2,7 @@ mod keymap_file;
 pub mod settings_file;
 pub mod watched_json;
 
-use anyhow::Result;
+use anyhow::{bail, Result};
 use gpui::{
     font_cache::{FamilyId, FontCache},
     AssetSource,
@@ -14,6 +14,10 @@ use schemars::{
 };
 use serde::{de::DeserializeOwned, Deserialize, Serialize};
 use serde_json::Value;
+use sqlez::{
+    bindable::{Bind, Column},
+    statement::Statement,
+};
 use std::{collections::HashMap, fmt::Write as _, num::NonZeroU32, str, sync::Arc};
 use theme::{Theme, ThemeRegistry};
 use tree_sitter::Query;
@@ -55,24 +59,6 @@ pub struct FeatureFlags {
     pub experimental_themes: bool,
 }
 
-#[derive(Copy, Clone, PartialEq, Eq, Default)]
-pub enum ReleaseChannel {
-    #[default]
-    Dev,
-    Preview,
-    Stable,
-}
-
-impl ReleaseChannel {
-    pub fn name(&self) -> &'static str {
-        match self {
-            ReleaseChannel::Dev => "Zed Dev",
-            ReleaseChannel::Preview => "Zed Preview",
-            ReleaseChannel::Stable => "Zed",
-        }
-    }
-}
-
 impl FeatureFlags {
     pub fn keymap_files(&self) -> Vec<&'static str> {
         vec![]
@@ -213,7 +199,7 @@ impl Default for Shell {
     }
 }
 
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
 #[serde(rename_all = "snake_case")]
 pub enum AlternateScroll {
     On,
@@ -235,6 +221,12 @@ pub enum WorkingDirectory {
     Always { directory: String },
 }
 
+impl Default for WorkingDirectory {
+    fn default() -> Self {
+        Self::CurrentProjectDirectory
+    }
+}
+
 #[derive(PartialEq, Eq, Debug, Default, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema)]
 #[serde(rename_all = "snake_case")]
 pub enum DockAnchor {
@@ -244,6 +236,33 @@ pub enum DockAnchor {
     Expanded,
 }
 
+impl Bind for DockAnchor {
+    fn bind(&self, statement: &Statement, start_index: i32) -> anyhow::Result<i32> {
+        match self {
+            DockAnchor::Bottom => "Bottom",
+            DockAnchor::Right => "Right",
+            DockAnchor::Expanded => "Expanded",
+        }
+        .bind(statement, start_index)
+    }
+}
+
+impl Column for DockAnchor {
+    fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
+        String::column(statement, start_index).and_then(|(anchor_text, next_index)| {
+            Ok((
+                match anchor_text.as_ref() {
+                    "Bottom" => DockAnchor::Bottom,
+                    "Right" => DockAnchor::Right,
+                    "Expanded" => DockAnchor::Expanded,
+                    _ => bail!("Stored dock anchor is incorrect"),
+                },
+                next_index,
+            ))
+        })
+    }
+}
+
 #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
 pub struct SettingsFileContent {
     pub experiments: Option<FeatureFlags>,
@@ -460,6 +479,32 @@ impl Settings {
         })
     }
 
+    fn terminal_setting<F, R: Default + Clone>(&self, f: F) -> R
+    where
+        F: Fn(&TerminalSettings) -> Option<&R>,
+    {
+        f(&self.terminal_overrides)
+            .or_else(|| f(&self.terminal_defaults))
+            .cloned()
+            .unwrap_or_else(|| R::default())
+    }
+
+    pub fn terminal_scroll(&self) -> AlternateScroll {
+        self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.as_ref())
+    }
+
+    pub fn terminal_shell(&self) -> Shell {
+        self.terminal_setting(|terminal_setting| terminal_setting.shell.as_ref())
+    }
+
+    pub fn terminal_env(&self) -> HashMap<String, String> {
+        self.terminal_setting(|terminal_setting| terminal_setting.env.as_ref())
+    }
+
+    pub fn terminal_strategy(&self) -> WorkingDirectory {
+        self.terminal_setting(|terminal_setting| terminal_setting.working_directory.as_ref())
+    }
+
     #[cfg(any(test, feature = "test-support"))]
     pub fn test(cx: &gpui::AppContext) -> Settings {
         Settings {

crates/sqlez/Cargo.lock 🔗

@@ -0,0 +1,150 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "anyhow"
+version = "1.0.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
+dependencies = [
+ "backtrace",
+]
+
+[[package]]
+name = "backtrace"
+version = "0.3.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "gimli"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
+
+[[package]]
+name = "indoc"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
+
+[[package]]
+name = "libc"
+version = "0.2.137"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
+
+[[package]]
+name = "libsqlite3-sys"
+version = "0.25.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa"
+dependencies = [
+ "cc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "sqlez"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "indoc",
+ "libsqlite3-sys",
+ "thread_local",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"

crates/sqlez/Cargo.toml 🔗

@@ -0,0 +1,16 @@
+[package]
+name = "sqlez"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = { version = "1.0.38", features = ["backtrace"] }
+indoc = "1.0.7"
+libsqlite3-sys = { version = "0.24", features = ["bundled"] }
+smol = "1.2"
+thread_local = "1.1.4"
+lazy_static = "1.4"
+parking_lot = "0.11.1"
+futures = "0.3"

crates/sqlez/src/bindable.rs 🔗

@@ -0,0 +1,352 @@
+use std::{
+    ffi::OsStr,
+    os::unix::prelude::OsStrExt,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+
+use anyhow::{Context, Result};
+
+use crate::statement::{SqlType, Statement};
+
+pub trait Bind {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32>;
+}
+
+pub trait Column: Sized {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)>;
+}
+
+impl Bind for bool {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind(self.then_some(1).unwrap_or(0), start_index)
+            .with_context(|| format!("Failed to bind bool at index {start_index}"))
+    }
+}
+
+impl Column for bool {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        i32::column(statement, start_index)
+            .map(|(i, next_index)| (i != 0, next_index))
+            .with_context(|| format!("Failed to read bool at index {start_index}"))
+    }
+}
+
+impl Bind for &[u8] {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_blob(start_index, self)
+            .with_context(|| format!("Failed to bind &[u8] at index {start_index}"))?;
+        Ok(start_index + 1)
+    }
+}
+
+impl<const C: usize> Bind for &[u8; C] {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_blob(start_index, self.as_slice())
+            .with_context(|| format!("Failed to bind &[u8; C] at index {start_index}"))?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Bind for Vec<u8> {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_blob(start_index, self)
+            .with_context(|| format!("Failed to bind Vec<u8> at index {start_index}"))?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Column for Vec<u8> {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement
+            .column_blob(start_index)
+            .with_context(|| format!("Failed to read Vec<u8> at index {start_index}"))?;
+
+        Ok((Vec::from(result), start_index + 1))
+    }
+}
+
+impl Bind for f64 {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_double(start_index, *self)
+            .with_context(|| format!("Failed to bind f64 at index {start_index}"))?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Column for f64 {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement
+            .column_double(start_index)
+            .with_context(|| format!("Failed to parse f64 at index {start_index}"))?;
+
+        Ok((result, start_index + 1))
+    }
+}
+
+impl Bind for i32 {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_int(start_index, *self)
+            .with_context(|| format!("Failed to bind i32 at index {start_index}"))?;
+
+        Ok(start_index + 1)
+    }
+}
+
+impl Column for i32 {
+    fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement.column_int(start_index)?;
+        Ok((result, start_index + 1))
+    }
+}
+
+impl Bind for i64 {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement
+            .bind_int64(start_index, *self)
+            .with_context(|| format!("Failed to bind i64 at index {start_index}"))?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Column for i64 {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement.column_int64(start_index)?;
+        Ok((result, start_index + 1))
+    }
+}
+
+impl Bind for usize {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        (*self as i64)
+            .bind(statement, start_index)
+            .with_context(|| format!("Failed to bind usize at index {start_index}"))
+    }
+}
+
+impl Column for usize {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement.column_int64(start_index)?;
+        Ok((result as usize, start_index + 1))
+    }
+}
+
+impl Bind for &str {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement.bind_text(start_index, self)?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Bind for Arc<str> {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement.bind_text(start_index, self.as_ref())?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Bind for String {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        statement.bind_text(start_index, self)?;
+        Ok(start_index + 1)
+    }
+}
+
+impl Column for Arc<str> {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement.column_text(start_index)?;
+        Ok((Arc::from(result), start_index + 1))
+    }
+}
+
+impl Column for String {
+    fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let result = statement.column_text(start_index)?;
+        Ok((result.to_owned(), start_index + 1))
+    }
+}
+
+impl<T: Bind> Bind for Option<T> {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        if let Some(this) = self {
+            this.bind(statement, start_index)
+        } else {
+            statement.bind_null(start_index)?;
+            Ok(start_index + 1)
+        }
+    }
+}
+
+impl<T: Column> Column for Option<T> {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        if let SqlType::Null = statement.column_type(start_index)? {
+            Ok((None, start_index + 1))
+        } else {
+            T::column(statement, start_index).map(|(result, next_index)| (Some(result), next_index))
+        }
+    }
+}
+
+impl<T: Bind, const COUNT: usize> Bind for [T; COUNT] {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let mut current_index = start_index;
+        for binding in self {
+            current_index = binding.bind(statement, current_index)?
+        }
+
+        Ok(current_index)
+    }
+}
+
+impl<T: Column + Default + Copy, const COUNT: usize> Column for [T; COUNT] {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let mut array = [Default::default(); COUNT];
+        let mut current_index = start_index;
+        for i in 0..COUNT {
+            (array[i], current_index) = T::column(statement, current_index)?;
+        }
+        Ok((array, current_index))
+    }
+}
+
+impl<T: Bind> Bind for Vec<T> {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let mut current_index = start_index;
+        for binding in self.iter() {
+            current_index = binding.bind(statement, current_index)?
+        }
+
+        Ok(current_index)
+    }
+}
+
+impl<T: Bind> Bind for &[T] {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let mut current_index = start_index;
+        for binding in *self {
+            current_index = binding.bind(statement, current_index)?
+        }
+
+        Ok(current_index)
+    }
+}
+
+impl Bind for &Path {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        self.as_os_str().as_bytes().bind(statement, start_index)
+    }
+}
+
+impl Bind for Arc<Path> {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        self.as_ref().bind(statement, start_index)
+    }
+}
+
+impl Bind for PathBuf {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        (self.as_ref() as &Path).bind(statement, start_index)
+    }
+}
+
+impl Column for PathBuf {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let blob = statement.column_blob(start_index)?;
+
+        Ok((
+            PathBuf::from(OsStr::from_bytes(blob).to_owned()),
+            start_index + 1,
+        ))
+    }
+}
+
+/// Unit impls do nothing. This simplifies query macros
+impl Bind for () {
+    fn bind(&self, _statement: &Statement, start_index: i32) -> Result<i32> {
+        Ok(start_index)
+    }
+}
+
+impl Column for () {
+    fn column(_statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        Ok(((), start_index))
+    }
+}
+
+impl<T1: Bind, T2: Bind> Bind for (T1, T2) {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = self.0.bind(statement, start_index)?;
+        self.1.bind(statement, next_index)
+    }
+}
+
+impl<T1: Column, T2: Column> Column for (T1, T2) {
+    fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (first, next_index) = T1::column(statement, start_index)?;
+        let (second, next_index) = T2::column(statement, next_index)?;
+        Ok(((first, second), next_index))
+    }
+}
+
+impl<T1: Bind, T2: Bind, T3: Bind> Bind for (T1, T2, T3) {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = self.0.bind(statement, start_index)?;
+        let next_index = self.1.bind(statement, next_index)?;
+        self.2.bind(statement, next_index)
+    }
+}
+
+impl<T1: Column, T2: Column, T3: Column> Column for (T1, T2, T3) {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (first, next_index) = T1::column(statement, start_index)?;
+        let (second, next_index) = T2::column(statement, next_index)?;
+        let (third, next_index) = T3::column(statement, next_index)?;
+        Ok(((first, second, third), next_index))
+    }
+}
+
+impl<T1: Bind, T2: Bind, T3: Bind, T4: Bind> Bind for (T1, T2, T3, T4) {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = self.0.bind(statement, start_index)?;
+        let next_index = self.1.bind(statement, next_index)?;
+        let next_index = self.2.bind(statement, next_index)?;
+        self.3.bind(statement, next_index)
+    }
+}
+
+impl<T1: Column, T2: Column, T3: Column, T4: Column> Column for (T1, T2, T3, T4) {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (first, next_index) = T1::column(statement, start_index)?;
+        let (second, next_index) = T2::column(statement, next_index)?;
+        let (third, next_index) = T3::column(statement, next_index)?;
+        let (fourth, next_index) = T4::column(statement, next_index)?;
+        Ok(((first, second, third, fourth), next_index))
+    }
+}
+
+impl<T1: Bind, T2: Bind, T3: Bind, T4: Bind, T5: Bind> Bind for (T1, T2, T3, T4, T5) {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = self.0.bind(statement, start_index)?;
+        let next_index = self.1.bind(statement, next_index)?;
+        let next_index = self.2.bind(statement, next_index)?;
+        let next_index = self.3.bind(statement, next_index)?;
+        self.4.bind(statement, next_index)
+    }
+}
+
+impl<T1: Column, T2: Column, T3: Column, T4: Column, T5: Column> Column for (T1, T2, T3, T4, T5) {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (first, next_index) = T1::column(statement, start_index)?;
+        let (second, next_index) = T2::column(statement, next_index)?;
+        let (third, next_index) = T3::column(statement, next_index)?;
+        let (fourth, next_index) = T4::column(statement, next_index)?;
+        let (fifth, next_index) = T5::column(statement, next_index)?;
+        Ok(((first, second, third, fourth, fifth), next_index))
+    }
+}

crates/sqlez/src/connection.rs 🔗

@@ -0,0 +1,334 @@
+use std::{
+    cell::RefCell,
+    ffi::{CStr, CString},
+    marker::PhantomData,
+    path::Path,
+    ptr,
+};
+
+use anyhow::{anyhow, Result};
+use libsqlite3_sys::*;
+
+pub struct Connection {
+    pub(crate) sqlite3: *mut sqlite3,
+    persistent: bool,
+    pub(crate) write: RefCell<bool>,
+    _sqlite: PhantomData<sqlite3>,
+}
+unsafe impl Send for Connection {}
+
+impl Connection {
+    pub(crate) fn open(uri: &str, persistent: bool) -> Result<Self> {
+        let mut connection = Self {
+            sqlite3: 0 as *mut _,
+            persistent,
+            write: RefCell::new(true),
+            _sqlite: PhantomData,
+        };
+
+        let flags = SQLITE_OPEN_CREATE | SQLITE_OPEN_NOMUTEX | SQLITE_OPEN_READWRITE;
+        unsafe {
+            sqlite3_open_v2(
+                CString::new(uri)?.as_ptr(),
+                &mut connection.sqlite3,
+                flags,
+                0 as *const _,
+            );
+
+            // Turn on extended error codes
+            sqlite3_extended_result_codes(connection.sqlite3, 1);
+
+            connection.last_error()?;
+        }
+
+        Ok(connection)
+    }
+
+    /// Attempts to open the database at uri. If it fails, a shared memory db will be opened
+    /// instead.
+    pub fn open_file(uri: &str) -> Self {
+        Self::open(uri, true).unwrap_or_else(|_| Self::open_memory(Some(uri)))
+    }
+
+    pub fn open_memory(uri: Option<&str>) -> Self {
+        let in_memory_path = if let Some(uri) = uri {
+            format!("file:{}?mode=memory&cache=shared", uri)
+        } else {
+            ":memory:".to_string()
+        };
+
+        Self::open(&in_memory_path, false).expect("Could not create fallback in memory db")
+    }
+
+    pub fn persistent(&self) -> bool {
+        self.persistent
+    }
+
+    pub fn can_write(&self) -> bool {
+        *self.write.borrow()
+    }
+
+    pub fn backup_main(&self, destination: &Connection) -> Result<()> {
+        unsafe {
+            let backup = sqlite3_backup_init(
+                destination.sqlite3,
+                CString::new("main")?.as_ptr(),
+                self.sqlite3,
+                CString::new("main")?.as_ptr(),
+            );
+            sqlite3_backup_step(backup, -1);
+            sqlite3_backup_finish(backup);
+            destination.last_error()
+        }
+    }
+
+    pub fn backup_main_to(&self, destination: impl AsRef<Path>) -> Result<()> {
+        let destination = Self::open_file(destination.as_ref().to_string_lossy().as_ref());
+        self.backup_main(&destination)
+    }
+
+    pub fn sql_has_syntax_error(&self, sql: &str) -> Option<(String, usize)> {
+        let sql = CString::new(sql).unwrap();
+        let mut remaining_sql = sql.as_c_str();
+        let sql_start = remaining_sql.as_ptr();
+
+        unsafe {
+            while {
+                let remaining_sql_str = remaining_sql.to_str().unwrap().trim();
+                remaining_sql_str != ";" && !remaining_sql_str.is_empty()
+            } {
+                let mut raw_statement = 0 as *mut sqlite3_stmt;
+                let mut remaining_sql_ptr = ptr::null();
+                sqlite3_prepare_v2(
+                    self.sqlite3,
+                    remaining_sql.as_ptr(),
+                    -1,
+                    &mut raw_statement,
+                    &mut remaining_sql_ptr,
+                );
+
+                let res = sqlite3_errcode(self.sqlite3);
+                let offset = sqlite3_error_offset(self.sqlite3);
+                let message = sqlite3_errmsg(self.sqlite3);
+
+                sqlite3_finalize(raw_statement);
+
+                if res == 1 && offset >= 0 {
+                    let err_msg =
+                        String::from_utf8_lossy(CStr::from_ptr(message as *const _).to_bytes())
+                            .into_owned();
+                    let sub_statement_correction =
+                        remaining_sql.as_ptr() as usize - sql_start as usize;
+
+                    return Some((err_msg, offset as usize + sub_statement_correction));
+                }
+                remaining_sql = CStr::from_ptr(remaining_sql_ptr);
+            }
+        }
+        None
+    }
+
+    pub(crate) fn last_error(&self) -> Result<()> {
+        unsafe {
+            let code = sqlite3_errcode(self.sqlite3);
+            const NON_ERROR_CODES: &[i32] = &[SQLITE_OK, SQLITE_ROW];
+            if NON_ERROR_CODES.contains(&code) {
+                return Ok(());
+            }
+
+            let message = sqlite3_errmsg(self.sqlite3);
+            let message = if message.is_null() {
+                None
+            } else {
+                Some(
+                    String::from_utf8_lossy(CStr::from_ptr(message as *const _).to_bytes())
+                        .into_owned(),
+                )
+            };
+
+            Err(anyhow!(
+                "Sqlite call failed with code {} and message: {:?}",
+                code as isize,
+                message
+            ))
+        }
+    }
+
+    pub(crate) fn with_write<T>(&self, callback: impl FnOnce(&Connection) -> T) -> T {
+        *self.write.borrow_mut() = true;
+        let result = callback(self);
+        *self.write.borrow_mut() = false;
+        result
+    }
+}
+
+impl Drop for Connection {
+    fn drop(&mut self) {
+        unsafe { sqlite3_close(self.sqlite3) };
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use anyhow::Result;
+    use indoc::indoc;
+
+    use crate::connection::Connection;
+
+    #[test]
+    fn string_round_trips() -> Result<()> {
+        let connection = Connection::open_memory(Some("string_round_trips"));
+        connection
+            .exec(indoc! {"
+            CREATE TABLE text (
+                text TEXT
+            );"})
+            .unwrap()()
+        .unwrap();
+
+        let text = "Some test text";
+
+        connection
+            .exec_bound("INSERT INTO text (text) VALUES (?);")
+            .unwrap()(text)
+        .unwrap();
+
+        assert_eq!(
+            connection.select_row("SELECT text FROM text;").unwrap()().unwrap(),
+            Some(text.to_string())
+        );
+
+        Ok(())
+    }
+
+    #[test]
+    fn tuple_round_trips() {
+        let connection = Connection::open_memory(Some("tuple_round_trips"));
+        connection
+            .exec(indoc! {"
+                CREATE TABLE test (
+                    text TEXT,
+                    integer INTEGER,
+                    blob BLOB
+                );"})
+            .unwrap()()
+        .unwrap();
+
+        let tuple1 = ("test".to_string(), 64, vec![0, 1, 2, 4, 8, 16, 32, 64]);
+        let tuple2 = ("test2".to_string(), 32, vec![64, 32, 16, 8, 4, 2, 1, 0]);
+
+        let mut insert = connection
+            .exec_bound::<(String, usize, Vec<u8>)>(
+                "INSERT INTO test (text, integer, blob) VALUES (?, ?, ?)",
+            )
+            .unwrap();
+
+        insert(tuple1.clone()).unwrap();
+        insert(tuple2.clone()).unwrap();
+
+        assert_eq!(
+            connection
+                .select::<(String, usize, Vec<u8>)>("SELECT * FROM test")
+                .unwrap()()
+            .unwrap(),
+            vec![tuple1, tuple2]
+        );
+    }
+
+    #[test]
+    fn bool_round_trips() {
+        let connection = Connection::open_memory(Some("bool_round_trips"));
+        connection
+            .exec(indoc! {"
+                CREATE TABLE bools (
+                    t INTEGER,
+                    f INTEGER
+                );"})
+            .unwrap()()
+        .unwrap();
+
+        connection
+            .exec_bound("INSERT INTO bools(t, f) VALUES (?, ?)")
+            .unwrap()((true, false))
+        .unwrap();
+
+        assert_eq!(
+            connection
+                .select_row::<(bool, bool)>("SELECT * FROM bools;")
+                .unwrap()()
+            .unwrap(),
+            Some((true, false))
+        );
+    }
+
+    #[test]
+    fn backup_works() {
+        let connection1 = Connection::open_memory(Some("backup_works"));
+        connection1
+            .exec(indoc! {"
+                CREATE TABLE blobs (
+                    data BLOB
+                );"})
+            .unwrap()()
+        .unwrap();
+        let blob = vec![0, 1, 2, 4, 8, 16, 32, 64];
+        connection1
+            .exec_bound::<Vec<u8>>("INSERT INTO blobs (data) VALUES (?);")
+            .unwrap()(blob.clone())
+        .unwrap();
+
+        // Backup connection1 to connection2
+        let connection2 = Connection::open_memory(Some("backup_works_other"));
+        connection1.backup_main(&connection2).unwrap();
+
+        // Delete the added blob and verify its deleted on the other side
+        let read_blobs = connection1
+            .select::<Vec<u8>>("SELECT * FROM blobs;")
+            .unwrap()()
+        .unwrap();
+        assert_eq!(read_blobs, vec![blob]);
+    }
+
+    #[test]
+    fn multi_step_statement_works() {
+        let connection = Connection::open_memory(Some("multi_step_statement_works"));
+
+        connection
+            .exec(indoc! {"
+                CREATE TABLE test (
+                    col INTEGER
+                )"})
+            .unwrap()()
+        .unwrap();
+
+        connection
+            .exec(indoc! {"
+            INSERT INTO test(col) VALUES (2)"})
+            .unwrap()()
+        .unwrap();
+
+        assert_eq!(
+            connection
+                .select_row::<usize>("SELECT * FROM test")
+                .unwrap()()
+            .unwrap(),
+            Some(2)
+        );
+    }
+
+    #[test]
+    fn test_sql_has_syntax_errors() {
+        let connection = Connection::open_memory(Some("test_sql_has_syntax_errors"));
+        let first_stmt =
+            "CREATE TABLE kv_store(key TEXT PRIMARY KEY, value TEXT NOT NULL) STRICT ;";
+        let second_stmt = "SELECT FROM";
+
+        let second_offset = connection.sql_has_syntax_error(second_stmt).unwrap().1;
+
+        let res = connection
+            .sql_has_syntax_error(&format!("{}\n{}", first_stmt, second_stmt))
+            .map(|(_, offset)| offset);
+
+        assert_eq!(res, Some(first_stmt.len() + second_offset + 1));
+    }
+}

crates/sqlez/src/domain.rs 🔗

@@ -0,0 +1,56 @@
+use crate::connection::Connection;
+
+pub trait Domain: 'static {
+    fn name() -> &'static str;
+    fn migrations() -> &'static [&'static str];
+}
+
+pub trait Migrator: 'static {
+    fn migrate(connection: &Connection) -> anyhow::Result<()>;
+}
+
+impl Migrator for () {
+    fn migrate(_connection: &Connection) -> anyhow::Result<()> {
+        Ok(()) // Do nothing
+    }
+}
+
+impl<D: Domain> Migrator for D {
+    fn migrate(connection: &Connection) -> anyhow::Result<()> {
+        connection.migrate(Self::name(), Self::migrations())
+    }
+}
+
+impl<D1: Domain, D2: Domain> Migrator for (D1, D2) {
+    fn migrate(connection: &Connection) -> anyhow::Result<()> {
+        D1::migrate(connection)?;
+        D2::migrate(connection)
+    }
+}
+
+impl<D1: Domain, D2: Domain, D3: Domain> Migrator for (D1, D2, D3) {
+    fn migrate(connection: &Connection) -> anyhow::Result<()> {
+        D1::migrate(connection)?;
+        D2::migrate(connection)?;
+        D3::migrate(connection)
+    }
+}
+
+impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain> Migrator for (D1, D2, D3, D4) {
+    fn migrate(connection: &Connection) -> anyhow::Result<()> {
+        D1::migrate(connection)?;
+        D2::migrate(connection)?;
+        D3::migrate(connection)?;
+        D4::migrate(connection)
+    }
+}
+
+impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain, D5: Domain> Migrator for (D1, D2, D3, D4, D5) {
+    fn migrate(connection: &Connection) -> anyhow::Result<()> {
+        D1::migrate(connection)?;
+        D2::migrate(connection)?;
+        D3::migrate(connection)?;
+        D4::migrate(connection)?;
+        D5::migrate(connection)
+    }
+}

crates/sqlez/src/lib.rs 🔗

@@ -0,0 +1,11 @@
+pub mod bindable;
+pub mod connection;
+pub mod domain;
+pub mod migrations;
+pub mod savepoint;
+pub mod statement;
+pub mod thread_safe_connection;
+pub mod typed_statements;
+mod util;
+
+pub use anyhow;

crates/sqlez/src/migrations.rs 🔗

@@ -0,0 +1,260 @@
+// Migrations are constructed by domain, and stored in a table in the connection db with domain name,
+// effected tables, actual query text, and order.
+// If a migration is run and any of the query texts don't match, the app panics on startup (maybe fallback
+// to creating a new db?)
+// Otherwise any missing migrations are run on the connection
+
+use anyhow::{anyhow, Result};
+use indoc::{formatdoc, indoc};
+
+use crate::connection::Connection;
+
+impl Connection {
+    pub fn migrate(&self, domain: &'static str, migrations: &[&'static str]) -> Result<()> {
+        self.with_savepoint("migrating", || {
+            // Setup the migrations table unconditionally
+            self.exec(indoc! {"
+                CREATE TABLE IF NOT EXISTS migrations (
+                    domain TEXT,
+                    step INTEGER,
+                    migration TEXT
+                )"})?()?;
+
+            let completed_migrations =
+                self.select_bound::<&str, (String, usize, String)>(indoc! {"
+                    SELECT domain, step, migration FROM migrations
+                    WHERE domain = ?
+                    ORDER BY step
+                    "})?(domain)?;
+
+            let mut store_completed_migration = self
+                .exec_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
+
+            for (index, migration) in migrations.iter().enumerate() {
+                if let Some((_, _, completed_migration)) = completed_migrations.get(index) {
+                    if completed_migration != migration {
+                        return Err(anyhow!(formatdoc! {"
+                            Migration changed for {} at step {}
+                            
+                            Stored migration:
+                            {}
+                            
+                            Proposed migration:
+                            {}", domain, index, completed_migration, migration}));
+                    } else {
+                        // Migration already run. Continue
+                        continue;
+                    }
+                }
+
+                self.exec(migration)?()?;
+                store_completed_migration((domain, index, *migration))?;
+            }
+
+            Ok(())
+        })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use indoc::indoc;
+
+    use crate::connection::Connection;
+
+    #[test]
+    fn test_migrations_are_added_to_table() {
+        let connection = Connection::open_memory(Some("migrations_are_added_to_table"));
+
+        // Create first migration with a single step and run it
+        connection
+            .migrate(
+                "test",
+                &[indoc! {"
+                CREATE TABLE test1 (
+                    a TEXT,
+                    b TEXT
+                )"}],
+            )
+            .unwrap();
+
+        // Verify it got added to the migrations table
+        assert_eq!(
+            &connection
+                .select::<String>("SELECT (migration) FROM migrations")
+                .unwrap()()
+            .unwrap()[..],
+            &[indoc! {"
+                CREATE TABLE test1 (
+                    a TEXT,
+                    b TEXT
+                )"}],
+        );
+
+        // Add another step to the migration and run it again
+        connection
+            .migrate(
+                "test",
+                &[
+                    indoc! {"
+                    CREATE TABLE test1 (
+                        a TEXT,
+                        b TEXT
+                    )"},
+                    indoc! {"
+                    CREATE TABLE test2 (
+                        c TEXT,
+                        d TEXT
+                    )"},
+                ],
+            )
+            .unwrap();
+
+        // Verify it is also added to the migrations table
+        assert_eq!(
+            &connection
+                .select::<String>("SELECT (migration) FROM migrations")
+                .unwrap()()
+            .unwrap()[..],
+            &[
+                indoc! {"
+                    CREATE TABLE test1 (
+                        a TEXT,
+                        b TEXT
+                    )"},
+                indoc! {"
+                    CREATE TABLE test2 (
+                        c TEXT,
+                        d TEXT
+                    )"},
+            ],
+        );
+    }
+
+    #[test]
+    fn test_migration_setup_works() {
+        let connection = Connection::open_memory(Some("migration_setup_works"));
+
+        connection
+            .exec(indoc! {"
+                CREATE TABLE IF NOT EXISTS migrations (
+                    domain TEXT,
+                    step INTEGER,
+                    migration TEXT
+                );"})
+            .unwrap()()
+        .unwrap();
+
+        let mut store_completed_migration = connection
+            .exec_bound::<(&str, usize, String)>(indoc! {"
+                INSERT INTO migrations (domain, step, migration)
+                VALUES (?, ?, ?)"})
+            .unwrap();
+
+        let domain = "test_domain";
+        for i in 0..5 {
+            // Create a table forcing a schema change
+            connection
+                .exec(&format!("CREATE TABLE table{} ( test TEXT );", i))
+                .unwrap()()
+            .unwrap();
+
+            store_completed_migration((domain, i, i.to_string())).unwrap();
+        }
+    }
+
+    #[test]
+    fn migrations_dont_rerun() {
+        let connection = Connection::open_memory(Some("migrations_dont_rerun"));
+
+        // Create migration which clears a tabl
+
+        // Manually create the table for that migration with a row
+        connection
+            .exec(indoc! {"
+                CREATE TABLE test_table (
+                    test_column INTEGER
+                );"})
+            .unwrap()()
+        .unwrap();
+        connection
+            .exec(indoc! {"
+            INSERT INTO test_table (test_column) VALUES (1);"})
+            .unwrap()()
+        .unwrap();
+
+        assert_eq!(
+            connection
+                .select_row::<usize>("SELECT * FROM test_table")
+                .unwrap()()
+            .unwrap(),
+            Some(1)
+        );
+
+        // Run the migration verifying that the row got dropped
+        connection
+            .migrate("test", &["DELETE FROM test_table"])
+            .unwrap();
+        assert_eq!(
+            connection
+                .select_row::<usize>("SELECT * FROM test_table")
+                .unwrap()()
+            .unwrap(),
+            None
+        );
+
+        // Recreate the dropped row
+        connection
+            .exec("INSERT INTO test_table (test_column) VALUES (2)")
+            .unwrap()()
+        .unwrap();
+
+        // Run the same migration again and verify that the table was left unchanged
+        connection
+            .migrate("test", &["DELETE FROM test_table"])
+            .unwrap();
+        assert_eq!(
+            connection
+                .select_row::<usize>("SELECT * FROM test_table")
+                .unwrap()()
+            .unwrap(),
+            Some(2)
+        );
+    }
+
+    #[test]
+    fn changed_migration_fails() {
+        let connection = Connection::open_memory(Some("changed_migration_fails"));
+
+        // Create a migration with two steps and run it
+        connection
+            .migrate(
+                "test migration",
+                &[
+                    indoc! {"
+                CREATE TABLE test (
+                    col INTEGER
+                )"},
+                    indoc! {"
+                    INSERT INTO test (col) VALUES (1)"},
+                ],
+            )
+            .unwrap();
+
+        // Create another migration with the same domain but different steps
+        let second_migration_result = connection.migrate(
+            "test migration",
+            &[
+                indoc! {"
+                CREATE TABLE test (
+                    color INTEGER
+                )"},
+                indoc! {"
+                INSERT INTO test (color) VALUES (1)"},
+            ],
+        );
+
+        // Verify new migration returns error when run
+        assert!(second_migration_result.is_err())
+    }
+}

crates/sqlez/src/savepoint.rs 🔗

@@ -0,0 +1,148 @@
+use anyhow::Result;
+use indoc::formatdoc;
+
+use crate::connection::Connection;
+
+impl Connection {
+    // Run a set of commands within the context of a `SAVEPOINT name`. If the callback
+    // returns Err(_), the savepoint will be rolled back. Otherwise, the save
+    // point is released.
+    pub fn with_savepoint<R, F>(&self, name: impl AsRef<str>, f: F) -> Result<R>
+    where
+        F: FnOnce() -> Result<R>,
+    {
+        let name = name.as_ref();
+        self.exec(&format!("SAVEPOINT {name}"))?()?;
+        let result = f();
+        match result {
+            Ok(_) => {
+                self.exec(&format!("RELEASE {name}"))?()?;
+            }
+            Err(_) => {
+                self.exec(&formatdoc! {"
+                    ROLLBACK TO {name};
+                    RELEASE {name}"})?()?;
+            }
+        }
+        result
+    }
+
+    // Run a set of commands within the context of a `SAVEPOINT name`. If the callback
+    // returns Ok(None) or Err(_), the savepoint will be rolled back. Otherwise, the save
+    // point is released.
+    pub fn with_savepoint_rollback<R, F>(&self, name: impl AsRef<str>, f: F) -> Result<Option<R>>
+    where
+        F: FnOnce() -> Result<Option<R>>,
+    {
+        let name = name.as_ref();
+        self.exec(&format!("SAVEPOINT {name}"))?()?;
+        let result = f();
+        match result {
+            Ok(Some(_)) => {
+                self.exec(&format!("RELEASE {name}"))?()?;
+            }
+            Ok(None) | Err(_) => {
+                self.exec(&formatdoc! {"
+                    ROLLBACK TO {name};
+                    RELEASE {name}"})?()?;
+            }
+        }
+        result
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::connection::Connection;
+    use anyhow::Result;
+    use indoc::indoc;
+
+    #[test]
+    fn test_nested_savepoints() -> Result<()> {
+        let connection = Connection::open_memory(Some("nested_savepoints"));
+
+        connection
+            .exec(indoc! {"
+            CREATE TABLE text (
+                text TEXT,
+                idx INTEGER
+            );"})
+            .unwrap()()
+        .unwrap();
+
+        let save1_text = "test save1";
+        let save2_text = "test save2";
+
+        connection.with_savepoint("first", || {
+            connection.exec_bound("INSERT INTO text(text, idx) VALUES (?, ?)")?((save1_text, 1))?;
+
+            assert!(connection
+                .with_savepoint("second", || -> Result<Option<()>, anyhow::Error> {
+                    connection.exec_bound("INSERT INTO text(text, idx) VALUES (?, ?)")?((
+                        save2_text, 2,
+                    ))?;
+
+                    assert_eq!(
+                        connection
+                            .select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?(
+                        )?,
+                        vec![save1_text, save2_text],
+                    );
+
+                    anyhow::bail!("Failed second save point :(")
+                })
+                .err()
+                .is_some());
+
+            assert_eq!(
+                connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+                vec![save1_text],
+            );
+
+            connection.with_savepoint_rollback::<(), _>("second", || {
+                connection.exec_bound("INSERT INTO text(text, idx) VALUES (?, ?)")?((
+                    save2_text, 2,
+                ))?;
+
+                assert_eq!(
+                    connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+                    vec![save1_text, save2_text],
+                );
+
+                Ok(None)
+            })?;
+
+            assert_eq!(
+                connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+                vec![save1_text],
+            );
+
+            connection.with_savepoint_rollback("second", || {
+                connection.exec_bound("INSERT INTO text(text, idx) VALUES (?, ?)")?((
+                    save2_text, 2,
+                ))?;
+
+                assert_eq!(
+                    connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+                    vec![save1_text, save2_text],
+                );
+
+                Ok(Some(()))
+            })?;
+
+            assert_eq!(
+                connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+                vec![save1_text, save2_text],
+            );
+
+            Ok(())
+        })?;
+
+        assert_eq!(
+            connection.select::<String>("SELECT text FROM text ORDER BY text.idx ASC")?()?,
+            vec![save1_text, save2_text],
+        );
+
+        Ok(())
+    }
+}

crates/sqlez/src/statement.rs 🔗

@@ -0,0 +1,491 @@
+use std::ffi::{c_int, CStr, CString};
+use std::marker::PhantomData;
+use std::{ptr, slice, str};
+
+use anyhow::{anyhow, bail, Context, Result};
+use libsqlite3_sys::*;
+
+use crate::bindable::{Bind, Column};
+use crate::connection::Connection;
+
+pub struct Statement<'a> {
+    raw_statements: Vec<*mut sqlite3_stmt>,
+    current_statement: usize,
+    connection: &'a Connection,
+    phantom: PhantomData<sqlite3_stmt>,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub enum StepResult {
+    Row,
+    Done,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub enum SqlType {
+    Text,
+    Integer,
+    Blob,
+    Float,
+    Null,
+}
+
+impl<'a> Statement<'a> {
+    pub fn prepare<T: AsRef<str>>(connection: &'a Connection, query: T) -> Result<Self> {
+        let mut statement = Self {
+            raw_statements: Default::default(),
+            current_statement: 0,
+            connection,
+            phantom: PhantomData,
+        };
+        unsafe {
+            let sql = CString::new(query.as_ref()).context("Error creating cstr")?;
+            let mut remaining_sql = sql.as_c_str();
+            while {
+                let remaining_sql_str = remaining_sql
+                    .to_str()
+                    .context("Parsing remaining sql")?
+                    .trim();
+                remaining_sql_str != ";" && !remaining_sql_str.is_empty()
+            } {
+                let mut raw_statement = 0 as *mut sqlite3_stmt;
+                let mut remaining_sql_ptr = ptr::null();
+                sqlite3_prepare_v2(
+                    connection.sqlite3,
+                    remaining_sql.as_ptr(),
+                    -1,
+                    &mut raw_statement,
+                    &mut remaining_sql_ptr,
+                );
+
+                remaining_sql = CStr::from_ptr(remaining_sql_ptr);
+                statement.raw_statements.push(raw_statement);
+
+                connection.last_error().with_context(|| {
+                    format!("Prepare call failed for query:\n{}", query.as_ref())
+                })?;
+
+                if !connection.can_write() && sqlite3_stmt_readonly(raw_statement) == 0 {
+                    let sql = CStr::from_ptr(sqlite3_sql(raw_statement));
+
+                    bail!(
+                        "Write statement prepared with connection that is not write capable. SQL:\n{} ",
+                        sql.to_str()?)
+                }
+            }
+        }
+
+        Ok(statement)
+    }
+
+    fn current_statement(&self) -> *mut sqlite3_stmt {
+        *self.raw_statements.get(self.current_statement).unwrap()
+    }
+
+    pub fn reset(&mut self) {
+        unsafe {
+            for raw_statement in self.raw_statements.iter() {
+                sqlite3_reset(*raw_statement);
+            }
+        }
+        self.current_statement = 0;
+    }
+
+    pub fn parameter_count(&self) -> i32 {
+        unsafe {
+            self.raw_statements
+                .iter()
+                .map(|raw_statement| sqlite3_bind_parameter_count(*raw_statement))
+                .max()
+                .unwrap_or(0)
+        }
+    }
+
+    fn bind_index_with(&self, index: i32, bind: impl Fn(&*mut sqlite3_stmt) -> ()) -> Result<()> {
+        let mut any_succeed = false;
+        unsafe {
+            for raw_statement in self.raw_statements.iter() {
+                if index <= sqlite3_bind_parameter_count(*raw_statement) {
+                    bind(raw_statement);
+                    self.connection
+                        .last_error()
+                        .with_context(|| format!("Failed to bind value at index {index}"))?;
+                    any_succeed = true;
+                } else {
+                    continue;
+                }
+            }
+        }
+        if any_succeed {
+            Ok(())
+        } else {
+            Err(anyhow!("Failed to bind parameters"))
+        }
+    }
+
+    pub fn bind_blob(&self, index: i32, blob: &[u8]) -> Result<()> {
+        let index = index as c_int;
+        let blob_pointer = blob.as_ptr() as *const _;
+        let len = blob.len() as c_int;
+
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_blob(*raw_statement, index, blob_pointer, len, SQLITE_TRANSIENT());
+        })
+    }
+
+    pub fn column_blob<'b>(&'b mut self, index: i32) -> Result<&'b [u8]> {
+        let index = index as c_int;
+        let pointer = unsafe { sqlite3_column_blob(self.current_statement(), index) };
+
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read blob at index {index}"))?;
+        if pointer.is_null() {
+            return Ok(&[]);
+        }
+        let len = unsafe { sqlite3_column_bytes(self.current_statement(), index) as usize };
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read length of blob at index {index}"))?;
+
+        unsafe { Ok(slice::from_raw_parts(pointer as *const u8, len)) }
+    }
+
+    pub fn bind_double(&self, index: i32, double: f64) -> Result<()> {
+        let index = index as c_int;
+
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_double(*raw_statement, index, double);
+        })
+    }
+
+    pub fn column_double(&self, index: i32) -> Result<f64> {
+        let index = index as c_int;
+        let result = unsafe { sqlite3_column_double(self.current_statement(), index) };
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read double at index {index}"))?;
+        Ok(result)
+    }
+
+    pub fn bind_int(&self, index: i32, int: i32) -> Result<()> {
+        let index = index as c_int;
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_int(*raw_statement, index, int);
+        })
+    }
+
+    pub fn column_int(&self, index: i32) -> Result<i32> {
+        let index = index as c_int;
+        let result = unsafe { sqlite3_column_int(self.current_statement(), index) };
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read int at index {index}"))?;
+        Ok(result)
+    }
+
+    pub fn bind_int64(&self, index: i32, int: i64) -> Result<()> {
+        let index = index as c_int;
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_int64(*raw_statement, index, int);
+        })
+    }
+
+    pub fn column_int64(&self, index: i32) -> Result<i64> {
+        let index = index as c_int;
+        let result = unsafe { sqlite3_column_int64(self.current_statement(), index) };
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read i64 at index {index}"))?;
+        Ok(result)
+    }
+
+    pub fn bind_null(&self, index: i32) -> Result<()> {
+        let index = index as c_int;
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_null(*raw_statement, index);
+        })
+    }
+
+    pub fn bind_text(&self, index: i32, text: &str) -> Result<()> {
+        let index = index as c_int;
+        let text_pointer = text.as_ptr() as *const _;
+        let len = text.len() as c_int;
+
+        self.bind_index_with(index, |raw_statement| unsafe {
+            sqlite3_bind_text(*raw_statement, index, text_pointer, len, SQLITE_TRANSIENT());
+        })
+    }
+
+    pub fn column_text<'b>(&'b mut self, index: i32) -> Result<&'b str> {
+        let index = index as c_int;
+        let pointer = unsafe { sqlite3_column_text(self.current_statement(), index) };
+
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read text from column {index}"))?;
+        if pointer.is_null() {
+            return Ok("");
+        }
+        let len = unsafe { sqlite3_column_bytes(self.current_statement(), index) as usize };
+        self.connection
+            .last_error()
+            .with_context(|| format!("Failed to read text length at {index}"))?;
+
+        let slice = unsafe { slice::from_raw_parts(pointer as *const u8, len) };
+        Ok(str::from_utf8(slice)?)
+    }
+
+    pub fn bind<T: Bind>(&self, value: T, index: i32) -> Result<i32> {
+        debug_assert!(index > 0);
+        value.bind(self, index)
+    }
+
+    pub fn column<T: Column>(&mut self) -> Result<T> {
+        let (result, _) = T::column(self, 0)?;
+        Ok(result)
+    }
+
+    pub fn column_type(&mut self, index: i32) -> Result<SqlType> {
+        let result = unsafe { sqlite3_column_type(self.current_statement(), index) };
+        self.connection.last_error()?;
+        match result {
+            SQLITE_INTEGER => Ok(SqlType::Integer),
+            SQLITE_FLOAT => Ok(SqlType::Float),
+            SQLITE_TEXT => Ok(SqlType::Text),
+            SQLITE_BLOB => Ok(SqlType::Blob),
+            SQLITE_NULL => Ok(SqlType::Null),
+            _ => Err(anyhow!("Column type returned was incorrect ")),
+        }
+    }
+
+    pub fn with_bindings(&mut self, bindings: impl Bind) -> Result<&mut Self> {
+        self.bind(bindings, 1)?;
+        Ok(self)
+    }
+
+    fn step(&mut self) -> Result<StepResult> {
+        unsafe {
+            match sqlite3_step(self.current_statement()) {
+                SQLITE_ROW => Ok(StepResult::Row),
+                SQLITE_DONE => {
+                    if self.current_statement >= self.raw_statements.len() - 1 {
+                        Ok(StepResult::Done)
+                    } else {
+                        self.current_statement += 1;
+                        self.step()
+                    }
+                }
+                SQLITE_MISUSE => Err(anyhow!("Statement step returned SQLITE_MISUSE")),
+                _other_error => {
+                    self.connection.last_error()?;
+                    unreachable!("Step returned error code and last error failed to catch it");
+                }
+            }
+        }
+    }
+
+    pub fn exec(&mut self) -> Result<()> {
+        fn logic(this: &mut Statement) -> Result<()> {
+            while this.step()? == StepResult::Row {}
+            Ok(())
+        }
+        let result = logic(self);
+        self.reset();
+        result
+    }
+
+    pub fn map<R>(&mut self, callback: impl FnMut(&mut Statement) -> Result<R>) -> Result<Vec<R>> {
+        fn logic<R>(
+            this: &mut Statement,
+            mut callback: impl FnMut(&mut Statement) -> Result<R>,
+        ) -> Result<Vec<R>> {
+            let mut mapped_rows = Vec::new();
+            while this.step()? == StepResult::Row {
+                mapped_rows.push(callback(this)?);
+            }
+            Ok(mapped_rows)
+        }
+
+        let result = logic(self, callback);
+        self.reset();
+        result
+    }
+
+    pub fn rows<R: Column>(&mut self) -> Result<Vec<R>> {
+        self.map(|s| s.column::<R>())
+    }
+
+    pub fn single<R>(&mut self, callback: impl FnOnce(&mut Statement) -> Result<R>) -> Result<R> {
+        fn logic<R>(
+            this: &mut Statement,
+            callback: impl FnOnce(&mut Statement) -> Result<R>,
+        ) -> Result<R> {
+            if this.step()? != StepResult::Row {
+                return Err(anyhow!("single called with query that returns no rows."));
+            }
+            let result = callback(this)?;
+
+            if this.step()? != StepResult::Done {
+                return Err(anyhow!(
+                    "single called with a query that returns more than one row."
+                ));
+            }
+
+            Ok(result)
+        }
+        let result = logic(self, callback);
+        self.reset();
+        result
+    }
+
+    pub fn row<R: Column>(&mut self) -> Result<R> {
+        self.single(|this| this.column::<R>())
+    }
+
+    pub fn maybe<R>(
+        &mut self,
+        callback: impl FnOnce(&mut Statement) -> Result<R>,
+    ) -> Result<Option<R>> {
+        fn logic<R>(
+            this: &mut Statement,
+            callback: impl FnOnce(&mut Statement) -> Result<R>,
+        ) -> Result<Option<R>> {
+            if this.step().context("Failed on step call")? != StepResult::Row {
+                return Ok(None);
+            }
+
+            let result = callback(this)
+                .map(|r| Some(r))
+                .context("Failed to parse row result")?;
+
+            if this.step().context("Second step call")? != StepResult::Done {
+                return Err(anyhow!(
+                    "maybe called with a query that returns more than one row."
+                ));
+            }
+
+            Ok(result)
+        }
+        let result = logic(self, callback);
+        self.reset();
+        result
+    }
+
+    pub fn maybe_row<R: Column>(&mut self) -> Result<Option<R>> {
+        self.maybe(|this| this.column::<R>())
+    }
+}
+
+impl<'a> Drop for Statement<'a> {
+    fn drop(&mut self) {
+        unsafe {
+            for raw_statement in self.raw_statements.iter() {
+                sqlite3_finalize(*raw_statement);
+            }
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use indoc::indoc;
+
+    use crate::{
+        connection::Connection,
+        statement::{Statement, StepResult},
+    };
+
+    #[test]
+    fn binding_multiple_statements_with_parameter_gaps() {
+        let connection =
+            Connection::open_memory(Some("binding_multiple_statements_with_parameter_gaps"));
+
+        connection
+            .exec(indoc! {"
+            CREATE TABLE test (
+                col INTEGER
+            )"})
+            .unwrap()()
+        .unwrap();
+
+        let statement = Statement::prepare(
+            &connection,
+            indoc! {"
+                INSERT INTO test(col) VALUES (?3);
+                SELECT * FROM test WHERE col = ?1"},
+        )
+        .unwrap();
+
+        statement
+            .bind_int(1, 1)
+            .expect("Could not bind parameter to first index");
+        statement
+            .bind_int(2, 2)
+            .expect("Could not bind parameter to second index");
+        statement
+            .bind_int(3, 3)
+            .expect("Could not bind parameter to third index");
+    }
+
+    #[test]
+    fn blob_round_trips() {
+        let connection1 = Connection::open_memory(Some("blob_round_trips"));
+        connection1
+            .exec(indoc! {"
+                CREATE TABLE blobs (
+                    data BLOB
+                )"})
+            .unwrap()()
+        .unwrap();
+
+        let blob = &[0, 1, 2, 4, 8, 16, 32, 64];
+
+        let mut write =
+            Statement::prepare(&connection1, "INSERT INTO blobs (data) VALUES (?)").unwrap();
+        write.bind_blob(1, blob).unwrap();
+        assert_eq!(write.step().unwrap(), StepResult::Done);
+
+        // Read the blob from the
+        let connection2 = Connection::open_memory(Some("blob_round_trips"));
+        let mut read = Statement::prepare(&connection2, "SELECT * FROM blobs").unwrap();
+        assert_eq!(read.step().unwrap(), StepResult::Row);
+        assert_eq!(read.column_blob(0).unwrap(), blob);
+        assert_eq!(read.step().unwrap(), StepResult::Done);
+
+        // Delete the added blob and verify its deleted on the other side
+        connection2.exec("DELETE FROM blobs").unwrap()().unwrap();
+        let mut read = Statement::prepare(&connection1, "SELECT * FROM blobs").unwrap();
+        assert_eq!(read.step().unwrap(), StepResult::Done);
+    }
+
+    #[test]
+    pub fn maybe_returns_options() {
+        let connection = Connection::open_memory(Some("maybe_returns_options"));
+        connection
+            .exec(indoc! {"
+                CREATE TABLE texts (
+                    text TEXT 
+                )"})
+            .unwrap()()
+        .unwrap();
+
+        assert!(connection
+            .select_row::<String>("SELECT text FROM texts")
+            .unwrap()()
+        .unwrap()
+        .is_none());
+
+        let text_to_insert = "This is a test";
+
+        connection
+            .exec_bound("INSERT INTO texts VALUES (?)")
+            .unwrap()(text_to_insert)
+        .unwrap();
+
+        assert_eq!(
+            connection.select_row("SELECT text FROM texts").unwrap()().unwrap(),
+            Some(text_to_insert.to_string())
+        );
+    }
+}

crates/sqlez/src/thread_safe_connection.rs 🔗

@@ -0,0 +1,359 @@
+use anyhow::Context;
+use futures::{channel::oneshot, Future, FutureExt};
+use lazy_static::lazy_static;
+use parking_lot::{Mutex, RwLock};
+use std::{collections::HashMap, marker::PhantomData, ops::Deref, sync::Arc, thread};
+use thread_local::ThreadLocal;
+
+use crate::{connection::Connection, domain::Migrator, util::UnboundedSyncSender};
+
+const MIGRATION_RETRIES: usize = 10;
+
+type QueuedWrite = Box<dyn 'static + Send + FnOnce()>;
+type WriteQueueConstructor =
+    Box<dyn 'static + Send + FnMut() -> Box<dyn 'static + Send + Sync + Fn(QueuedWrite)>>;
+lazy_static! {
+    /// List of queues of tasks by database uri. This lets us serialize writes to the database
+    /// and have a single worker thread per db file. This means many thread safe connections
+    /// (possibly with different migrations) could all be communicating with the same background
+    /// thread.
+    static ref QUEUES: RwLock<HashMap<Arc<str>, Box<dyn 'static + Send + Sync + Fn(QueuedWrite)>>> =
+        Default::default();
+}
+
+/// Thread safe connection to a given database file or in memory db. This can be cloned, shared, static,
+/// whatever. It derefs to a synchronous connection by thread that is read only. A write capable connection
+/// may be accessed by passing a callback to the `write` function which will queue the callback
+pub struct ThreadSafeConnection<M: Migrator + 'static = ()> {
+    uri: Arc<str>,
+    persistent: bool,
+    connection_initialize_query: Option<&'static str>,
+    connections: Arc<ThreadLocal<Connection>>,
+    _migrator: PhantomData<*mut M>,
+}
+
+unsafe impl<M: Migrator> Send for ThreadSafeConnection<M> {}
+unsafe impl<M: Migrator> Sync for ThreadSafeConnection<M> {}
+
+pub struct ThreadSafeConnectionBuilder<M: Migrator + 'static = ()> {
+    db_initialize_query: Option<&'static str>,
+    write_queue_constructor: Option<WriteQueueConstructor>,
+    connection: ThreadSafeConnection<M>,
+}
+
+impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
+    /// Sets the query to run every time a connection is opened. This must
+    /// be infallible (EG only use pragma statements) and not cause writes.
+    /// to the db or it will panic.
+    pub fn with_connection_initialize_query(mut self, initialize_query: &'static str) -> Self {
+        self.connection.connection_initialize_query = Some(initialize_query);
+        self
+    }
+
+    /// Queues an initialization query for the database file. This must be infallible
+    /// but may cause changes to the database file such as with `PRAGMA journal_mode`
+    pub fn with_db_initialization_query(mut self, initialize_query: &'static str) -> Self {
+        self.db_initialize_query = Some(initialize_query);
+        self
+    }
+
+    /// Specifies how the thread safe connection should serialize writes. If provided
+    /// the connection will call the write_queue_constructor for each database file in
+    /// this process. The constructor is responsible for setting up a background thread or
+    /// async task which handles queued writes with the provided connection.
+    pub fn with_write_queue_constructor(
+        mut self,
+        write_queue_constructor: WriteQueueConstructor,
+    ) -> Self {
+        self.write_queue_constructor = Some(write_queue_constructor);
+        self
+    }
+
+    pub async fn build(self) -> anyhow::Result<ThreadSafeConnection<M>> {
+        self.connection
+            .initialize_queues(self.write_queue_constructor);
+
+        let db_initialize_query = self.db_initialize_query;
+
+        self.connection
+            .write(move |connection| {
+                if let Some(db_initialize_query) = db_initialize_query {
+                    connection.exec(db_initialize_query).with_context(|| {
+                        format!(
+                            "Db initialize query failed to execute: {}",
+                            db_initialize_query
+                        )
+                    })?()?;
+                }
+
+                // Retry failed migrations in case they were run in parallel from different
+                // processes. This gives a best attempt at migrating before bailing
+                let mut migration_result =
+                    anyhow::Result::<()>::Err(anyhow::anyhow!("Migration never run"));
+
+                for _ in 0..MIGRATION_RETRIES {
+                    migration_result = connection
+                        .with_savepoint("thread_safe_multi_migration", || M::migrate(connection));
+
+                    if migration_result.is_ok() {
+                        break;
+                    }
+                }
+
+                migration_result
+            })
+            .await?;
+
+        Ok(self.connection)
+    }
+}
+
+impl<M: Migrator> ThreadSafeConnection<M> {
+    fn initialize_queues(&self, write_queue_constructor: Option<WriteQueueConstructor>) -> bool {
+        if !QUEUES.read().contains_key(&self.uri) {
+            let mut queues = QUEUES.write();
+            if !queues.contains_key(&self.uri) {
+                let mut write_queue_constructor =
+                    write_queue_constructor.unwrap_or(background_thread_queue());
+                queues.insert(self.uri.clone(), write_queue_constructor());
+                return true;
+            }
+        }
+        return false;
+    }
+
+    pub fn builder(uri: &str, persistent: bool) -> ThreadSafeConnectionBuilder<M> {
+        ThreadSafeConnectionBuilder::<M> {
+            db_initialize_query: None,
+            write_queue_constructor: None,
+            connection: Self {
+                uri: Arc::from(uri),
+                persistent,
+                connection_initialize_query: None,
+                connections: Default::default(),
+                _migrator: PhantomData,
+            },
+        }
+    }
+
+    /// Opens a new db connection with the initialized file path. This is internal and only
+    /// called from the deref function.
+    fn open_file(uri: &str) -> Connection {
+        Connection::open_file(uri)
+    }
+
+    /// Opens a shared memory connection using the file path as the identifier. This is internal
+    /// and only called from the deref function.
+    fn open_shared_memory(uri: &str) -> Connection {
+        Connection::open_memory(Some(uri))
+    }
+
+    pub fn write<T: 'static + Send + Sync>(
+        &self,
+        callback: impl 'static + Send + FnOnce(&Connection) -> T,
+    ) -> impl Future<Output = T> {
+        // Check and invalidate queue and maybe recreate queue
+        let queues = QUEUES.read();
+        let write_channel = queues
+            .get(&self.uri)
+            .expect("Queues are inserted when build is called. This should always succeed");
+
+        // Create a one shot channel for the result of the queued write
+        // so we can await on the result
+        let (sender, reciever) = oneshot::channel();
+
+        let thread_safe_connection = (*self).clone();
+        write_channel(Box::new(move || {
+            let connection = thread_safe_connection.deref();
+            let result = connection.with_write(|connection| callback(connection));
+            sender.send(result).ok();
+        }));
+        reciever.map(|response| response.expect("Write queue unexpectedly closed"))
+    }
+
+    pub(crate) fn create_connection(
+        persistent: bool,
+        uri: &str,
+        connection_initialize_query: Option<&'static str>,
+    ) -> Connection {
+        let mut connection = if persistent {
+            Self::open_file(uri)
+        } else {
+            Self::open_shared_memory(uri)
+        };
+
+        // Disallow writes on the connection. The only writes allowed for thread safe connections
+        // are from the background thread that can serialize them.
+        *connection.write.get_mut() = false;
+
+        if let Some(initialize_query) = connection_initialize_query {
+            connection.exec(initialize_query).expect(&format!(
+                "Initialize query failed to execute: {}",
+                initialize_query
+            ))()
+            .unwrap()
+        }
+
+        connection
+    }
+}
+
+impl ThreadSafeConnection<()> {
+    /// Special constructor for ThreadSafeConnection which disallows db initialization and migrations.
+    /// This allows construction to be infallible and not write to the db.
+    pub fn new(
+        uri: &str,
+        persistent: bool,
+        connection_initialize_query: Option<&'static str>,
+        write_queue_constructor: Option<WriteQueueConstructor>,
+    ) -> Self {
+        let connection = Self {
+            uri: Arc::from(uri),
+            persistent,
+            connection_initialize_query,
+            connections: Default::default(),
+            _migrator: PhantomData,
+        };
+
+        connection.initialize_queues(write_queue_constructor);
+        connection
+    }
+}
+
+impl<M: Migrator> Clone for ThreadSafeConnection<M> {
+    fn clone(&self) -> Self {
+        Self {
+            uri: self.uri.clone(),
+            persistent: self.persistent,
+            connection_initialize_query: self.connection_initialize_query.clone(),
+            connections: self.connections.clone(),
+            _migrator: PhantomData,
+        }
+    }
+}
+
+impl<M: Migrator> Deref for ThreadSafeConnection<M> {
+    type Target = Connection;
+
+    fn deref(&self) -> &Self::Target {
+        self.connections.get_or(|| {
+            Self::create_connection(self.persistent, &self.uri, self.connection_initialize_query)
+        })
+    }
+}
+
+pub fn background_thread_queue() -> WriteQueueConstructor {
+    use std::sync::mpsc::channel;
+
+    Box::new(|| {
+        let (sender, reciever) = channel::<QueuedWrite>();
+
+        thread::spawn(move || {
+            while let Ok(write) = reciever.recv() {
+                write()
+            }
+        });
+
+        let sender = UnboundedSyncSender::new(sender);
+        Box::new(move |queued_write| {
+            sender
+                .send(queued_write)
+                .expect("Could not send write action to background thread");
+        })
+    })
+}
+
+pub fn locking_queue() -> WriteQueueConstructor {
+    Box::new(|| {
+        let write_mutex = Mutex::new(());
+        Box::new(move |queued_write| {
+            let _lock = write_mutex.lock();
+            queued_write();
+        })
+    })
+}
+
+#[cfg(test)]
+mod test {
+    use indoc::indoc;
+    use lazy_static::__Deref;
+
+    use std::thread;
+
+    use crate::{domain::Domain, thread_safe_connection::ThreadSafeConnection};
+
+    #[test]
+    fn many_initialize_and_migrate_queries_at_once() {
+        let mut handles = vec![];
+
+        enum TestDomain {}
+        impl Domain for TestDomain {
+            fn name() -> &'static str {
+                "test"
+            }
+            fn migrations() -> &'static [&'static str] {
+                &["CREATE TABLE test(col1 TEXT, col2 TEXT) STRICT;"]
+            }
+        }
+
+        for _ in 0..100 {
+            handles.push(thread::spawn(|| {
+                let builder =
+                    ThreadSafeConnection::<TestDomain>::builder("annoying-test.db", false)
+                        .with_db_initialization_query("PRAGMA journal_mode=WAL")
+                        .with_connection_initialize_query(indoc! {"
+                                PRAGMA synchronous=NORMAL;
+                                PRAGMA busy_timeout=1;
+                                PRAGMA foreign_keys=TRUE;
+                                PRAGMA case_sensitive_like=TRUE;
+                            "});
+
+                let _ = smol::block_on(builder.build()).unwrap().deref();
+            }));
+        }
+
+        for handle in handles {
+            let _ = handle.join();
+        }
+    }
+
+    #[test]
+    #[should_panic]
+    fn wild_zed_lost_failure() {
+        enum TestWorkspace {}
+        impl Domain for TestWorkspace {
+            fn name() -> &'static str {
+                "workspace"
+            }
+
+            fn migrations() -> &'static [&'static str] {
+                &["
+                    CREATE TABLE workspaces(
+                        workspace_id INTEGER PRIMARY KEY,
+                        dock_visible INTEGER, -- Boolean
+                        dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded'
+                        dock_pane INTEGER, -- NULL indicates that we don't have a dock pane yet
+                        timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+                        FOREIGN KEY(dock_pane) REFERENCES panes(pane_id),
+                        FOREIGN KEY(active_pane) REFERENCES panes(pane_id)
+                    ) STRICT;
+                    
+                    CREATE TABLE panes(
+                        pane_id INTEGER PRIMARY KEY,
+                        workspace_id INTEGER NOT NULL,
+                        active INTEGER NOT NULL, -- Boolean
+                        FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) 
+                            ON DELETE CASCADE 
+                            ON UPDATE CASCADE
+                    ) STRICT;
+                "]
+            }
+        }
+
+        let builder =
+            ThreadSafeConnection::<TestWorkspace>::builder("wild_zed_lost_failure", false)
+                .with_connection_initialize_query("PRAGMA FOREIGN_KEYS=true");
+
+        smol::block_on(builder.build()).unwrap();
+    }
+}

crates/sqlez/src/typed_statements.rs 🔗

@@ -0,0 +1,60 @@
+use anyhow::{Context, Result};
+
+use crate::{
+    bindable::{Bind, Column},
+    connection::Connection,
+    statement::Statement,
+};
+
+impl Connection {
+    pub fn exec<'a>(&'a self, query: &str) -> Result<impl 'a + FnMut() -> Result<()>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move || statement.exec())
+    }
+
+    pub fn exec_bound<'a, B: Bind>(
+        &'a self,
+        query: &str,
+    ) -> Result<impl 'a + FnMut(B) -> Result<()>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move |bindings| statement.with_bindings(bindings)?.exec())
+    }
+
+    pub fn select<'a, C: Column>(
+        &'a self,
+        query: &str,
+    ) -> Result<impl 'a + FnMut() -> Result<Vec<C>>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move || statement.rows::<C>())
+    }
+
+    pub fn select_bound<'a, B: Bind, C: Column>(
+        &'a self,
+        query: &str,
+    ) -> Result<impl 'a + FnMut(B) -> Result<Vec<C>>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move |bindings| statement.with_bindings(bindings)?.rows::<C>())
+    }
+
+    pub fn select_row<'a, C: Column>(
+        &'a self,
+        query: &str,
+    ) -> Result<impl 'a + FnMut() -> Result<Option<C>>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move || statement.maybe_row::<C>())
+    }
+
+    pub fn select_row_bound<'a, B: Bind, C: Column>(
+        &'a self,
+        query: &str,
+    ) -> Result<impl 'a + FnMut(B) -> Result<Option<C>>> {
+        let mut statement = Statement::prepare(&self, query)?;
+        Ok(move |bindings| {
+            statement
+                .with_bindings(bindings)
+                .context("Bindings failed")?
+                .maybe_row::<C>()
+                .context("Maybe row failed")
+        })
+    }
+}

crates/sqlez/src/util.rs 🔗

@@ -0,0 +1,32 @@
+use std::ops::Deref;
+use std::sync::mpsc::Sender;
+
+use parking_lot::Mutex;
+use thread_local::ThreadLocal;
+
+/// Unbounded standard library sender which is stored per thread to get around
+/// the lack of sync on the standard library version while still being unbounded
+/// Note: this locks on the cloneable sender, but its done once per thread, so it
+/// shouldn't result in too much contention
+pub struct UnboundedSyncSender<T: Send> {
+    clonable_sender: Mutex<Sender<T>>,
+    local_senders: ThreadLocal<Sender<T>>,
+}
+
+impl<T: Send> UnboundedSyncSender<T> {
+    pub fn new(sender: Sender<T>) -> Self {
+        Self {
+            clonable_sender: Mutex::new(sender),
+            local_senders: ThreadLocal::new(),
+        }
+    }
+}
+
+impl<T: Send> Deref for UnboundedSyncSender<T> {
+    type Target = Sender<T>;
+
+    fn deref(&self) -> &Self::Target {
+        self.local_senders
+            .get_or(|| self.clonable_sender.lock().clone())
+    }
+}

crates/sqlez_macros/Cargo.toml 🔗

@@ -0,0 +1,17 @@
+[package]
+name = "sqlez_macros"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+path = "src/sqlez_macros.rs"
+proc-macro = true
+doctest = false
+
+[dependencies]
+syn = "1.0"
+quote = "1.0"
+proc-macro2 = "1.0"
+lazy_static = "1.4"
+sqlez = { path = "../sqlez" }
+sqlformat = "0.2"

crates/sqlez_macros/src/sqlez_macros.rs 🔗

@@ -0,0 +1,93 @@
+use proc_macro::{Delimiter, Span, TokenStream, TokenTree};
+use sqlez::thread_safe_connection::{locking_queue, ThreadSafeConnection};
+use syn::Error;
+
+lazy_static::lazy_static! {
+    static ref SQLITE: ThreadSafeConnection =  {
+        ThreadSafeConnection::new(":memory:", false, None, Some(locking_queue()))
+    };
+}
+
+#[proc_macro]
+pub fn sql(tokens: TokenStream) -> TokenStream {
+    let (spans, sql) = make_sql(tokens);
+
+    let error = SQLITE.sql_has_syntax_error(sql.trim());
+    let formatted_sql = sqlformat::format(&sql, &sqlformat::QueryParams::None, Default::default());
+
+    if let Some((error, error_offset)) = error {
+        create_error(spans, error_offset, error, &formatted_sql)
+    } else {
+        format!("r#\"{}\"#", &formatted_sql).parse().unwrap()
+    }
+}
+
+fn create_error(
+    spans: Vec<(usize, Span)>,
+    error_offset: usize,
+    error: String,
+    formatted_sql: &String,
+) -> TokenStream {
+    let error_span = spans
+        .into_iter()
+        .skip_while(|(offset, _)| offset <= &error_offset)
+        .map(|(_, span)| span)
+        .next()
+        .unwrap_or(Span::call_site());
+    let error_text = format!("Sql Error: {}\nFor Query: {}", error, formatted_sql);
+    TokenStream::from(Error::new(error_span.into(), error_text).into_compile_error())
+}
+
+fn make_sql(tokens: TokenStream) -> (Vec<(usize, Span)>, String) {
+    let mut sql_tokens = vec![];
+    flatten_stream(tokens.clone(), &mut sql_tokens);
+    // Lookup of spans by offset at the end of the token
+    let mut spans: Vec<(usize, Span)> = Vec::new();
+    let mut sql = String::new();
+    for (token_text, span) in sql_tokens {
+        sql.push_str(&token_text);
+        spans.push((sql.len(), span));
+    }
+    (spans, sql)
+}
+
+/// This method exists to normalize the representation of groups
+/// to always include spaces between tokens. This is why we don't use the usual .to_string().
+/// This allows our token search in token_at_offset to resolve
+/// ambiguity of '(tokens)' vs. '( token )', due to sqlite requiring byte offsets
+fn flatten_stream(tokens: TokenStream, result: &mut Vec<(String, Span)>) {
+    for token_tree in tokens.into_iter() {
+        match token_tree {
+            TokenTree::Group(group) => {
+                // push open delimiter
+                result.push((open_delimiter(group.delimiter()), group.span()));
+                // recurse
+                flatten_stream(group.stream(), result);
+                // push close delimiter
+                result.push((close_delimiter(group.delimiter()), group.span()));
+            }
+            TokenTree::Ident(ident) => {
+                result.push((format!("{} ", ident.to_string()), ident.span()));
+            }
+            leaf_tree => result.push((leaf_tree.to_string(), leaf_tree.span())),
+        }
+    }
+}
+
+fn open_delimiter(delimiter: Delimiter) -> String {
+    match delimiter {
+        Delimiter::Parenthesis => "( ".to_string(),
+        Delimiter::Brace => "[ ".to_string(),
+        Delimiter::Bracket => "{ ".to_string(),
+        Delimiter::None => "".to_string(),
+    }
+}
+
+fn close_delimiter(delimiter: Delimiter) -> String {
+    match delimiter {
+        Delimiter::Parenthesis => " ) ".to_string(),
+        Delimiter::Brace => " ] ".to_string(),
+        Delimiter::Bracket => " } ".to_string(),
+        Delimiter::None => "".to_string(),
+    }
+}

crates/terminal/Cargo.toml 🔗

@@ -7,16 +7,13 @@ edition = "2021"
 path = "src/terminal.rs"
 doctest = false
 
+
 [dependencies]
-context_menu = { path = "../context_menu" }
-editor = { path = "../editor" }
-language = { path = "../language" }
 gpui = { path = "../gpui" }
-project = { path = "../project" }
 settings = { path = "../settings" }
+db = { path = "../db" }
 theme = { path = "../theme" }
 util = { path = "../util" }
-workspace = { path = "../workspace" }
 alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" }
 procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
 smallvec = { version = "1.6", features = ["union"] }
@@ -33,11 +30,5 @@ thiserror = "1.0"
 lazy_static = "1.4.0"
 serde = { version = "1.0", features = ["derive"] }
 
-
-
 [dev-dependencies]
-gpui = { path = "../gpui", features = ["test-support"] }
-client = { path = "../client", features = ["test-support"]}
-project = { path = "../project", features = ["test-support"]}
-workspace = { path = "../workspace", features = ["test-support"] }
 rand = "0.8.5"

crates/terminal/src/terminal.rs 🔗

@@ -1,7 +1,5 @@
 pub mod mappings;
-pub mod terminal_container_view;
-pub mod terminal_element;
-pub mod terminal_view;
+pub use alacritty_terminal;
 
 use alacritty_terminal::{
     ansi::{ClearMode, Handler},
@@ -54,7 +52,7 @@ use gpui::{
     geometry::vector::{vec2f, Vector2F},
     keymap::Keystroke,
     scene::{MouseDown, MouseDrag, MouseScrollWheel, MouseUp},
-    ClipboardItem, Entity, ModelContext, MouseButton, MouseMovedEvent, MutableAppContext, Task,
+    ClipboardItem, Entity, ModelContext, MouseButton, MouseMovedEvent, Task,
 };
 
 use crate::mappings::{
@@ -63,12 +61,6 @@ use crate::mappings::{
 };
 use lazy_static::lazy_static;
 
-///Initialize and register all of our action handlers
-pub fn init(cx: &mut MutableAppContext) {
-    terminal_view::init(cx);
-    terminal_container_view::init(cx);
-}
-
 ///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
 ///Scroll multiplier that is set to 3 by default. This will be removed when I
 ///Implement scroll bars.
@@ -124,10 +116,10 @@ impl EventListener for ZedListener {
 
 #[derive(Clone, Copy, Debug)]
 pub struct TerminalSize {
-    cell_width: f32,
-    line_height: f32,
-    height: f32,
-    width: f32,
+    pub cell_width: f32,
+    pub line_height: f32,
+    pub height: f32,
+    pub width: f32,
 }
 
 impl TerminalSize {
@@ -206,7 +198,7 @@ impl Dimensions for TerminalSize {
 #[derive(Error, Debug)]
 pub struct TerminalError {
     pub directory: Option<PathBuf>,
-    pub shell: Option<Shell>,
+    pub shell: Shell,
     pub source: std::io::Error,
 }
 
@@ -234,24 +226,20 @@ impl TerminalError {
             })
     }
 
-    pub fn shell_to_string(&self) -> Option<String> {
-        self.shell.as_ref().map(|shell| match shell {
+    pub fn shell_to_string(&self) -> String {
+        match &self.shell {
             Shell::System => "<system shell>".to_string(),
             Shell::Program(p) => p.to_string(),
             Shell::WithArguments { program, args } => format!("{} {}", program, args.join(" ")),
-        })
+        }
     }
 
     pub fn fmt_shell(&self) -> String {
-        self.shell
-            .clone()
-            .map(|shell| match shell {
-                Shell::System => "<system defined shell>".to_string(),
-
-                Shell::Program(s) => s,
-                Shell::WithArguments { program, args } => format!("{} {}", program, args.join(" ")),
-            })
-            .unwrap_or_else(|| "<none specified, using system defined shell>".to_string())
+        match &self.shell {
+            Shell::System => "<system defined shell>".to_string(),
+            Shell::Program(s) => s.to_string(),
+            Shell::WithArguments { program, args } => format!("{} {}", program, args.join(" ")),
+        }
     }
 }
 
@@ -276,18 +264,18 @@ pub struct TerminalBuilder {
 impl TerminalBuilder {
     pub fn new(
         working_directory: Option<PathBuf>,
-        shell: Option<Shell>,
-        env: Option<HashMap<String, String>>,
+        shell: Shell,
+        mut env: HashMap<String, String>,
         blink_settings: Option<TerminalBlink>,
-        alternate_scroll: &AlternateScroll,
+        alternate_scroll: AlternateScroll,
         window_id: usize,
     ) -> Result<TerminalBuilder> {
         let pty_config = {
-            let alac_shell = shell.clone().and_then(|shell| match shell {
+            let alac_shell = match shell.clone() {
                 Shell::System => None,
                 Shell::Program(program) => Some(Program::Just(program)),
                 Shell::WithArguments { program, args } => Some(Program::WithArgs { program, args }),
-            });
+            };
 
             PtyConfig {
                 shell: alac_shell,
@@ -296,10 +284,9 @@ impl TerminalBuilder {
             }
         };
 
-        let mut env = env.unwrap_or_default();
-
         //TODO: Properly set the current locale,
         env.insert("LC_ALL".to_string(), "en_US.UTF-8".to_string());
+        env.insert("ZED_TERM".to_string(), true.to_string());
 
         let alac_scrolling = Scrolling::default();
         // alac_scrolling.set_history((BACK_BUFFER_SIZE * 2) as u32);
@@ -456,9 +443,9 @@ impl TerminalBuilder {
 }
 
 #[derive(Debug, Clone)]
-struct IndexedCell {
-    point: Point,
-    cell: Cell,
+pub struct IndexedCell {
+    pub point: Point,
+    pub cell: Cell,
 }
 
 impl Deref for IndexedCell {
@@ -470,17 +457,18 @@ impl Deref for IndexedCell {
     }
 }
 
+// TODO: Un-pub
 #[derive(Clone)]
 pub struct TerminalContent {
-    cells: Vec<IndexedCell>,
-    mode: TermMode,
-    display_offset: usize,
-    selection_text: Option<String>,
-    selection: Option<SelectionRange>,
-    cursor: RenderableCursor,
-    cursor_char: char,
-    size: TerminalSize,
-    last_hovered_hyperlink: Option<(String, RangeInclusive<Point>, usize)>,
+    pub cells: Vec<IndexedCell>,
+    pub mode: TermMode,
+    pub display_offset: usize,
+    pub selection_text: Option<String>,
+    pub selection: Option<SelectionRange>,
+    pub cursor: RenderableCursor,
+    pub cursor_char: char,
+    pub size: TerminalSize,
+    pub last_hovered_hyperlink: Option<(String, RangeInclusive<Point>, usize)>,
 }
 
 impl Default for TerminalContent {
@@ -517,14 +505,14 @@ pub struct Terminal {
     /// This is only used for terminal hyperlink checking
     last_mouse_position: Option<Vector2F>,
     pub matches: Vec<RangeInclusive<Point>>,
-    last_content: TerminalContent,
+    pub last_content: TerminalContent,
     last_synced: Instant,
     sync_task: Option<Task<()>>,
-    selection_head: Option<Point>,
-    breadcrumb_text: String,
+    pub selection_head: Option<Point>,
+    pub breadcrumb_text: String,
     shell_pid: u32,
     shell_fd: u32,
-    foreground_process_info: Option<LocalProcessInfo>,
+    pub foreground_process_info: Option<LocalProcessInfo>,
     scroll_px: f32,
     next_link_id: usize,
     selection_phase: SelectionPhase,
@@ -567,7 +555,7 @@ impl Terminal {
                 cx.emit(Event::Wakeup);
 
                 if self.update_process_info() {
-                    cx.emit(Event::TitleChanged)
+                    cx.emit(Event::TitleChanged);
                 }
             }
             AlacTermEvent::ColorRequest(idx, fun_ptr) => {
@@ -875,10 +863,6 @@ impl Terminal {
             return;
         };
 
-        if self.update_process_info() {
-            cx.emit(Event::TitleChanged);
-        }
-
         //Note that the ordering of events matters for event processing
         while let Some(e) = self.events.pop_front() {
             self.process_terminal_event(&e, &mut terminal, cx)
@@ -1176,25 +1160,11 @@ impl Terminal {
 
     pub fn find_matches(
         &mut self,
-        query: project::search::SearchQuery,
+        searcher: RegexSearch,
         cx: &mut ModelContext<Self>,
     ) -> Task<Vec<RangeInclusive<Point>>> {
         let term = self.term.clone();
         cx.background().spawn(async move {
-            let searcher = match query {
-                project::search::SearchQuery::Text { query, .. } => {
-                    RegexSearch::new(query.as_ref())
-                }
-                project::search::SearchQuery::Regex { query, .. } => {
-                    RegexSearch::new(query.as_ref())
-                }
-            };
-
-            if searcher.is_err() {
-                return Vec::new();
-            }
-            let searcher = searcher.unwrap();
-
             let term = term.lock();
 
             all_search_matches(&term, &searcher).collect()
@@ -1291,14 +1261,14 @@ fn open_uri(uri: &str) -> Result<(), std::io::Error> {
 
 #[cfg(test)]
 mod tests {
+    use alacritty_terminal::{
+        index::{Column, Line, Point},
+        term::cell::Cell,
+    };
     use gpui::geometry::vector::vec2f;
-    use rand::{thread_rng, Rng};
-
-    use crate::content_index_for_mouse;
+    use rand::{rngs::ThreadRng, thread_rng, Rng};
 
-    use self::terminal_test_context::TerminalTestContext;
-
-    pub mod terminal_test_context;
+    use crate::{content_index_for_mouse, IndexedCell, TerminalContent, TerminalSize};
 
     #[test]
     fn test_mouse_to_cell() {
@@ -1315,7 +1285,7 @@ mod tests {
                 width: cell_size * (viewport_cells as f32),
             };
 
-            let (content, cells) = TerminalTestContext::create_terminal_content(size, &mut rng);
+            let (content, cells) = create_terminal_content(size, &mut rng);
 
             for i in 0..(viewport_cells - 1) {
                 let i = i as usize;
@@ -1351,7 +1321,7 @@ mod tests {
             width: 100.,
         };
 
-        let (content, cells) = TerminalTestContext::create_terminal_content(size, &mut rng);
+        let (content, cells) = create_terminal_content(size, &mut rng);
 
         assert_eq!(
             content.cells[content_index_for_mouse(vec2f(-10., -10.), &content)].c,
@@ -1362,4 +1332,37 @@ mod tests {
             cells[9][9]
         );
     }
+
+    fn create_terminal_content(
+        size: TerminalSize,
+        rng: &mut ThreadRng,
+    ) -> (TerminalContent, Vec<Vec<char>>) {
+        let mut ic = Vec::new();
+        let mut cells = Vec::new();
+
+        for row in 0..((size.height() / size.line_height()) as usize) {
+            let mut row_vec = Vec::new();
+            for col in 0..((size.width() / size.cell_width()) as usize) {
+                let cell_char = rng.gen();
+                ic.push(IndexedCell {
+                    point: Point::new(Line(row as i32), Column(col)),
+                    cell: Cell {
+                        c: cell_char,
+                        ..Default::default()
+                    },
+                });
+                row_vec.push(cell_char)
+            }
+            cells.push(row_vec)
+        }
+
+        (
+            TerminalContent {
+                cells: ic,
+                size,
+                ..Default::default()
+            },
+            cells,
+        )
+    }
 }

crates/terminal/src/terminal_container_view.rs 🔗

@@ -1,676 +0,0 @@
-use crate::terminal_view::TerminalView;
-use crate::{Event, Terminal, TerminalBuilder, TerminalError};
-
-use alacritty_terminal::index::Point;
-use dirs::home_dir;
-use gpui::{
-    actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MutableAppContext, Task,
-    View, ViewContext, ViewHandle,
-};
-use util::truncate_and_trailoff;
-use workspace::searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle};
-use workspace::{Item, ItemEvent, ToolbarItemLocation, Workspace};
-
-use project::{LocalWorktree, Project, ProjectPath};
-use settings::{AlternateScroll, Settings, WorkingDirectory};
-use smallvec::SmallVec;
-use std::ops::RangeInclusive;
-use std::path::{Path, PathBuf};
-
-use crate::terminal_element::TerminalElement;
-
-actions!(terminal, [DeployModal]);
-
-pub fn init(cx: &mut MutableAppContext) {
-    cx.add_action(TerminalContainer::deploy);
-}
-
-//Make terminal view an enum, that can give you views for the error and non-error states
-//Take away all the result unwrapping in the current TerminalView by making it 'infallible'
-//Bubble up to deploy(_modal)() calls
-
-pub enum TerminalContainerContent {
-    Connected(ViewHandle<TerminalView>),
-    Error(ViewHandle<ErrorView>),
-}
-
-impl TerminalContainerContent {
-    fn handle(&self) -> AnyViewHandle {
-        match self {
-            Self::Connected(handle) => handle.into(),
-            Self::Error(handle) => handle.into(),
-        }
-    }
-}
-
-pub struct TerminalContainer {
-    pub content: TerminalContainerContent,
-    associated_directory: Option<PathBuf>,
-}
-
-pub struct ErrorView {
-    error: TerminalError,
-}
-
-impl Entity for TerminalContainer {
-    type Event = Event;
-}
-
-impl Entity for ErrorView {
-    type Event = Event;
-}
-
-impl TerminalContainer {
-    ///Create a new Terminal in the current working directory or the user's home directory
-    pub fn deploy(
-        workspace: &mut Workspace,
-        _: &workspace::NewTerminal,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        let strategy = cx
-            .global::<Settings>()
-            .terminal_overrides
-            .working_directory
-            .clone()
-            .unwrap_or(WorkingDirectory::CurrentProjectDirectory);
-
-        let working_directory = get_working_directory(workspace, cx, strategy);
-        let view = cx.add_view(|cx| TerminalContainer::new(working_directory, false, cx));
-        workspace.add_item(Box::new(view), cx);
-    }
-
-    ///Create a new Terminal view. This spawns a task, a thread, and opens the TTY devices    
-    pub fn new(
-        working_directory: Option<PathBuf>,
-        modal: bool,
-        cx: &mut ViewContext<Self>,
-    ) -> Self {
-        let settings = cx.global::<Settings>();
-        let shell = settings.terminal_overrides.shell.clone();
-        let envs = settings.terminal_overrides.env.clone(); //Should be short and cheap.
-
-        //TODO: move this pattern to settings
-        let scroll = settings
-            .terminal_overrides
-            .alternate_scroll
-            .as_ref()
-            .unwrap_or(
-                settings
-                    .terminal_defaults
-                    .alternate_scroll
-                    .as_ref()
-                    .unwrap_or_else(|| &AlternateScroll::On),
-            );
-
-        let content = match TerminalBuilder::new(
-            working_directory.clone(),
-            shell,
-            envs,
-            settings.terminal_overrides.blinking.clone(),
-            scroll,
-            cx.window_id(),
-        ) {
-            Ok(terminal) => {
-                let terminal = cx.add_model(|cx| terminal.subscribe(cx));
-                let view = cx.add_view(|cx| TerminalView::from_terminal(terminal, modal, cx));
-                cx.subscribe(&view, |_this, _content, event, cx| cx.emit(*event))
-                    .detach();
-                TerminalContainerContent::Connected(view)
-            }
-            Err(error) => {
-                let view = cx.add_view(|_| ErrorView {
-                    error: error.downcast::<TerminalError>().unwrap(),
-                });
-                TerminalContainerContent::Error(view)
-            }
-        };
-        cx.focus(content.handle());
-
-        TerminalContainer {
-            content,
-            associated_directory: working_directory,
-        }
-    }
-
-    pub fn from_terminal(
-        terminal: ModelHandle<Terminal>,
-        modal: bool,
-        cx: &mut ViewContext<Self>,
-    ) -> Self {
-        let connected_view = cx.add_view(|cx| TerminalView::from_terminal(terminal, modal, cx));
-        TerminalContainer {
-            content: TerminalContainerContent::Connected(connected_view),
-            associated_directory: None,
-        }
-    }
-
-    fn connected(&self) -> Option<ViewHandle<TerminalView>> {
-        match &self.content {
-            TerminalContainerContent::Connected(vh) => Some(vh.clone()),
-            TerminalContainerContent::Error(_) => None,
-        }
-    }
-}
-
-impl View for TerminalContainer {
-    fn ui_name() -> &'static str {
-        "Terminal"
-    }
-
-    fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> ElementBox {
-        match &self.content {
-            TerminalContainerContent::Connected(connected) => ChildView::new(connected, cx),
-            TerminalContainerContent::Error(error) => ChildView::new(error, cx),
-        }
-        .boxed()
-    }
-
-    fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
-        if cx.is_self_focused() {
-            cx.focus(self.content.handle());
-        }
-    }
-}
-
-impl View for ErrorView {
-    fn ui_name() -> &'static str {
-        "Terminal Error"
-    }
-
-    fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> ElementBox {
-        let settings = cx.global::<Settings>();
-        let style = TerminalElement::make_text_style(cx.font_cache(), settings);
-
-        //TODO:
-        //We want markdown style highlighting so we can format the program and working directory with ``
-        //We want a max-width of 75% with word-wrap
-        //We want to be able to select the text
-        //Want to be able to scroll if the error message is massive somehow (resiliency)
-
-        let program_text = {
-            match self.error.shell_to_string() {
-                Some(shell_txt) => format!("Shell Program: `{}`", shell_txt),
-                None => "No program specified".to_string(),
-            }
-        };
-
-        let directory_text = {
-            match self.error.directory.as_ref() {
-                Some(path) => format!("Working directory: `{}`", path.to_string_lossy()),
-                None => "No working directory specified".to_string(),
-            }
-        };
-
-        let error_text = self.error.source.to_string();
-
-        Flex::column()
-            .with_child(
-                Text::new("Failed to open the terminal.".to_string(), style.clone())
-                    .contained()
-                    .boxed(),
-            )
-            .with_child(Text::new(program_text, style.clone()).contained().boxed())
-            .with_child(Text::new(directory_text, style.clone()).contained().boxed())
-            .with_child(Text::new(error_text, style).contained().boxed())
-            .aligned()
-            .boxed()
-    }
-}
-
-impl Item for TerminalContainer {
-    fn tab_content(
-        &self,
-        _detail: Option<usize>,
-        tab_theme: &theme::Tab,
-        cx: &gpui::AppContext,
-    ) -> ElementBox {
-        let title = match &self.content {
-            TerminalContainerContent::Connected(connected) => connected
-                .read(cx)
-                .handle()
-                .read(cx)
-                .foreground_process_info
-                .as_ref()
-                .map(|fpi| {
-                    format!(
-                        "{} — {}",
-                        truncate_and_trailoff(
-                            &fpi.cwd
-                                .file_name()
-                                .map(|name| name.to_string_lossy().to_string())
-                                .unwrap_or_default(),
-                            25
-                        ),
-                        truncate_and_trailoff(
-                            &{
-                                format!(
-                                    "{}{}",
-                                    fpi.name,
-                                    if fpi.argv.len() >= 1 {
-                                        format!(" {}", (&fpi.argv[1..]).join(" "))
-                                    } else {
-                                        "".to_string()
-                                    }
-                                )
-                            },
-                            25
-                        )
-                    )
-                })
-                .unwrap_or_else(|| "Terminal".to_string()),
-            TerminalContainerContent::Error(_) => "Terminal".to_string(),
-        };
-
-        Flex::row()
-            .with_child(
-                Label::new(title, tab_theme.label.clone())
-                    .aligned()
-                    .contained()
-                    .boxed(),
-            )
-            .boxed()
-    }
-
-    fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self> {
-        //From what I can tell, there's no  way to tell the current working
-        //Directory of the terminal from outside the shell. There might be
-        //solutions to this, but they are non-trivial and require more IPC
-        Some(TerminalContainer::new(
-            self.associated_directory.clone(),
-            false,
-            cx,
-        ))
-    }
-
-    fn project_path(&self, _cx: &gpui::AppContext) -> Option<ProjectPath> {
-        None
-    }
-
-    fn project_entry_ids(&self, _cx: &gpui::AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
-        SmallVec::new()
-    }
-
-    fn is_singleton(&self, _cx: &gpui::AppContext) -> bool {
-        false
-    }
-
-    fn set_nav_history(&mut self, _: workspace::ItemNavHistory, _: &mut ViewContext<Self>) {}
-
-    fn can_save(&self, _cx: &gpui::AppContext) -> bool {
-        false
-    }
-
-    fn save(
-        &mut self,
-        _project: gpui::ModelHandle<Project>,
-        _cx: &mut ViewContext<Self>,
-    ) -> gpui::Task<gpui::anyhow::Result<()>> {
-        unreachable!("save should not have been called");
-    }
-
-    fn save_as(
-        &mut self,
-        _project: gpui::ModelHandle<Project>,
-        _abs_path: std::path::PathBuf,
-        _cx: &mut ViewContext<Self>,
-    ) -> gpui::Task<gpui::anyhow::Result<()>> {
-        unreachable!("save_as should not have been called");
-    }
-
-    fn reload(
-        &mut self,
-        _project: gpui::ModelHandle<Project>,
-        _cx: &mut ViewContext<Self>,
-    ) -> gpui::Task<gpui::anyhow::Result<()>> {
-        gpui::Task::ready(Ok(()))
-    }
-
-    fn is_dirty(&self, cx: &gpui::AppContext) -> bool {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            connected.read(cx).has_bell()
-        } else {
-            false
-        }
-    }
-
-    fn has_conflict(&self, _cx: &AppContext) -> bool {
-        false
-    }
-
-    fn as_searchable(&self, handle: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
-        Some(Box::new(handle.clone()))
-    }
-
-    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
-        match event {
-            Event::BreadcrumbsChanged => vec![ItemEvent::UpdateBreadcrumbs],
-            Event::TitleChanged | Event::Wakeup => vec![ItemEvent::UpdateTab],
-            Event::CloseTerminal => vec![ItemEvent::CloseItem],
-            _ => vec![],
-        }
-    }
-
-    fn breadcrumb_location(&self) -> ToolbarItemLocation {
-        if self.connected().is_some() {
-            ToolbarItemLocation::PrimaryLeft { flex: None }
-        } else {
-            ToolbarItemLocation::Hidden
-        }
-    }
-
-    fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
-        let connected = self.connected()?;
-
-        Some(vec![Text::new(
-            connected
-                .read(cx)
-                .terminal()
-                .read(cx)
-                .breadcrumb_text
-                .to_string(),
-            theme.breadcrumbs.text.clone(),
-        )
-        .boxed()])
-    }
-}
-
-impl SearchableItem for TerminalContainer {
-    type Match = RangeInclusive<Point>;
-
-    fn supported_options() -> SearchOptions {
-        SearchOptions {
-            case: false,
-            word: false,
-            regex: false,
-        }
-    }
-
-    /// Convert events raised by this item into search-relevant events (if applicable)
-    fn to_search_event(event: &Self::Event) -> Option<SearchEvent> {
-        match event {
-            Event::Wakeup => Some(SearchEvent::MatchesInvalidated),
-            Event::SelectionsChanged => Some(SearchEvent::ActiveMatchChanged),
-            _ => None,
-        }
-    }
-
-    /// Clear stored matches
-    fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            let terminal = connected.read(cx).terminal().clone();
-            terminal.update(cx, |term, _| term.matches.clear())
-        }
-    }
-
-    /// Store matches returned from find_matches somewhere for rendering
-    fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            let terminal = connected.read(cx).terminal().clone();
-            terminal.update(cx, |term, _| term.matches = matches)
-        }
-    }
-
-    /// Return the selection content to pre-load into this search
-    fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            let terminal = connected.read(cx).terminal().clone();
-            terminal
-                .read(cx)
-                .last_content
-                .selection_text
-                .clone()
-                .unwrap_or_default()
-        } else {
-            Default::default()
-        }
-    }
-
-    /// Focus match at given index into the Vec of matches
-    fn activate_match(&mut self, index: usize, _: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            let terminal = connected.read(cx).terminal().clone();
-            terminal.update(cx, |term, _| term.activate_match(index));
-            cx.notify();
-        }
-    }
-
-    /// Get all of the matches for this query, should be done on the background
-    fn find_matches(
-        &mut self,
-        query: project::search::SearchQuery,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Vec<Self::Match>> {
-        if let TerminalContainerContent::Connected(connected) = &self.content {
-            let terminal = connected.read(cx).terminal().clone();
-            terminal.update(cx, |term, cx| term.find_matches(query, cx))
-        } else {
-            Task::ready(Vec::new())
-        }
-    }
-
-    /// Reports back to the search toolbar what the active match should be (the selection)
-    fn active_match_index(
-        &mut self,
-        matches: Vec<Self::Match>,
-        cx: &mut ViewContext<Self>,
-    ) -> Option<usize> {
-        let connected = self.connected();
-        // Selection head might have a value if there's a selection that isn't
-        // associated with a match. Therefore, if there are no matches, we should
-        // report None, no matter the state of the terminal
-        let res = if matches.len() > 0 && connected.is_some() {
-            if let Some(selection_head) = connected
-                .unwrap()
-                .read(cx)
-                .terminal()
-                .read(cx)
-                .selection_head
-            {
-                // If selection head is contained in a match. Return that match
-                if let Some(ix) = matches
-                    .iter()
-                    .enumerate()
-                    .find(|(_, search_match)| {
-                        search_match.contains(&selection_head)
-                            || search_match.start() > &selection_head
-                    })
-                    .map(|(ix, _)| ix)
-                {
-                    Some(ix)
-                } else {
-                    // If no selection after selection head, return the last match
-                    Some(matches.len().saturating_sub(1))
-                }
-            } else {
-                // Matches found but no active selection, return the first last one (closest to cursor)
-                Some(matches.len().saturating_sub(1))
-            }
-        } else {
-            None
-        };
-
-        res
-    }
-}
-
-///Get's the working directory for the given workspace, respecting the user's settings.
-pub fn get_working_directory(
-    workspace: &Workspace,
-    cx: &AppContext,
-    strategy: WorkingDirectory,
-) -> Option<PathBuf> {
-    let res = match strategy {
-        WorkingDirectory::CurrentProjectDirectory => current_project_directory(workspace, cx)
-            .or_else(|| first_project_directory(workspace, cx)),
-        WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx),
-        WorkingDirectory::AlwaysHome => None,
-        WorkingDirectory::Always { directory } => {
-            shellexpand::full(&directory) //TODO handle this better
-                .ok()
-                .map(|dir| Path::new(&dir.to_string()).to_path_buf())
-                .filter(|dir| dir.is_dir())
-        }
-    };
-    res.or_else(home_dir)
-}
-
-///Get's the first project's home directory, or the home directory
-fn first_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<PathBuf> {
-    workspace
-        .worktrees(cx)
-        .next()
-        .and_then(|worktree_handle| worktree_handle.read(cx).as_local())
-        .and_then(get_path_from_wt)
-}
-
-///Gets the intuitively correct working directory from the given workspace
-///If there is an active entry for this project, returns that entry's worktree root.
-///If there's no active entry but there is a worktree, returns that worktrees root.
-///If either of these roots are files, or if there are any other query failures,
-///  returns the user's home directory
-fn current_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<PathBuf> {
-    let project = workspace.project().read(cx);
-
-    project
-        .active_entry()
-        .and_then(|entry_id| project.worktree_for_entry(entry_id, cx))
-        .or_else(|| workspace.worktrees(cx).next())
-        .and_then(|worktree_handle| worktree_handle.read(cx).as_local())
-        .and_then(get_path_from_wt)
-}
-
-fn get_path_from_wt(wt: &LocalWorktree) -> Option<PathBuf> {
-    wt.root_entry()
-        .filter(|re| re.is_dir())
-        .map(|_| wt.abs_path().to_path_buf())
-}
-
-#[cfg(test)]
-mod tests {
-
-    use super::*;
-    use gpui::TestAppContext;
-
-    use std::path::Path;
-
-    use crate::tests::terminal_test_context::TerminalTestContext;
-
-    ///Working directory calculation tests
-
-    ///No Worktrees in project -> home_dir()
-    #[gpui::test]
-    async fn no_worktree(cx: &mut TestAppContext) {
-        //Setup variables
-        let mut cx = TerminalTestContext::new(cx);
-        let (project, workspace) = cx.blank_workspace().await;
-        //Test
-        cx.cx.read(|cx| {
-            let workspace = workspace.read(cx);
-            let active_entry = project.read(cx).active_entry();
-
-            //Make sure enviroment is as expeted
-            assert!(active_entry.is_none());
-            assert!(workspace.worktrees(cx).next().is_none());
-
-            let res = current_project_directory(workspace, cx);
-            assert_eq!(res, None);
-            let res = first_project_directory(workspace, cx);
-            assert_eq!(res, None);
-        });
-    }
-
-    ///No active entry, but a worktree, worktree is a file -> home_dir()
-    #[gpui::test]
-    async fn no_active_entry_worktree_is_file(cx: &mut TestAppContext) {
-        //Setup variables
-
-        let mut cx = TerminalTestContext::new(cx);
-        let (project, workspace) = cx.blank_workspace().await;
-        cx.create_file_wt(project.clone(), "/root.txt").await;
-
-        cx.cx.read(|cx| {
-            let workspace = workspace.read(cx);
-            let active_entry = project.read(cx).active_entry();
-
-            //Make sure enviroment is as expeted
-            assert!(active_entry.is_none());
-            assert!(workspace.worktrees(cx).next().is_some());
-
-            let res = current_project_directory(workspace, cx);
-            assert_eq!(res, None);
-            let res = first_project_directory(workspace, cx);
-            assert_eq!(res, None);
-        });
-    }
-
-    //No active entry, but a worktree, worktree is a folder -> worktree_folder
-    #[gpui::test]
-    async fn no_active_entry_worktree_is_dir(cx: &mut TestAppContext) {
-        //Setup variables
-        let mut cx = TerminalTestContext::new(cx);
-        let (project, workspace) = cx.blank_workspace().await;
-        let (_wt, _entry) = cx.create_folder_wt(project.clone(), "/root/").await;
-
-        //Test
-        cx.cx.update(|cx| {
-            let workspace = workspace.read(cx);
-            let active_entry = project.read(cx).active_entry();
-
-            assert!(active_entry.is_none());
-            assert!(workspace.worktrees(cx).next().is_some());
-
-            let res = current_project_directory(workspace, cx);
-            assert_eq!(res, Some((Path::new("/root/")).to_path_buf()));
-            let res = first_project_directory(workspace, cx);
-            assert_eq!(res, Some((Path::new("/root/")).to_path_buf()));
-        });
-    }
-
-    //Active entry with a work tree, worktree is a file -> home_dir()
-    #[gpui::test]
-    async fn active_entry_worktree_is_file(cx: &mut TestAppContext) {
-        //Setup variables
-        let mut cx = TerminalTestContext::new(cx);
-        let (project, workspace) = cx.blank_workspace().await;
-        let (_wt, _entry) = cx.create_folder_wt(project.clone(), "/root1/").await;
-        let (wt2, entry2) = cx.create_file_wt(project.clone(), "/root2.txt").await;
-        cx.insert_active_entry_for(wt2, entry2, project.clone());
-
-        //Test
-        cx.cx.update(|cx| {
-            let workspace = workspace.read(cx);
-            let active_entry = project.read(cx).active_entry();
-
-            assert!(active_entry.is_some());
-
-            let res = current_project_directory(workspace, cx);
-            assert_eq!(res, None);
-            let res = first_project_directory(workspace, cx);
-            assert_eq!(res, Some((Path::new("/root1/")).to_path_buf()));
-        });
-    }
-
-    //Active entry, with a worktree, worktree is a folder -> worktree_folder
-    #[gpui::test]
-    async fn active_entry_worktree_is_dir(cx: &mut TestAppContext) {
-        //Setup variables
-        let mut cx = TerminalTestContext::new(cx);
-        let (project, workspace) = cx.blank_workspace().await;
-        let (_wt, _entry) = cx.create_folder_wt(project.clone(), "/root1/").await;
-        let (wt2, entry2) = cx.create_folder_wt(project.clone(), "/root2/").await;
-        cx.insert_active_entry_for(wt2, entry2, project.clone());
-
-        //Test
-        cx.cx.update(|cx| {
-            let workspace = workspace.read(cx);
-            let active_entry = project.read(cx).active_entry();
-
-            assert!(active_entry.is_some());
-
-            let res = current_project_directory(workspace, cx);
-            assert_eq!(res, Some((Path::new("/root2/")).to_path_buf()));
-            let res = first_project_directory(workspace, cx);
-            assert_eq!(res, Some((Path::new("/root1/")).to_path_buf()));
-        });
-    }
-}

crates/terminal/src/terminal_view.rs 🔗

@@ -1,471 +0,0 @@
-use std::{ops::RangeInclusive, time::Duration};
-
-use alacritty_terminal::{index::Point, term::TermMode};
-use context_menu::{ContextMenu, ContextMenuItem};
-use gpui::{
-    actions,
-    elements::{AnchorCorner, ChildView, ParentElement, Stack},
-    geometry::vector::Vector2F,
-    impl_actions, impl_internal_actions,
-    keymap::Keystroke,
-    AnyViewHandle, AppContext, Element, ElementBox, Entity, ModelHandle, MutableAppContext, Task,
-    View, ViewContext, ViewHandle,
-};
-use serde::Deserialize;
-use settings::{Settings, TerminalBlink};
-use smol::Timer;
-use util::ResultExt;
-use workspace::pane;
-
-use crate::{terminal_element::TerminalElement, Event, Terminal};
-
-const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
-
-///Event to transmit the scroll from the element to the view
-#[derive(Clone, Debug, PartialEq)]
-pub struct ScrollTerminal(pub i32);
-
-#[derive(Clone, PartialEq)]
-pub struct DeployContextMenu {
-    pub position: Vector2F,
-}
-
-#[derive(Clone, Default, Deserialize, PartialEq)]
-pub struct SendText(String);
-
-#[derive(Clone, Default, Deserialize, PartialEq)]
-pub struct SendKeystroke(String);
-
-actions!(
-    terminal,
-    [Clear, Copy, Paste, ShowCharacterPalette, SearchTest]
-);
-
-impl_actions!(terminal, [SendText, SendKeystroke]);
-
-impl_internal_actions!(project_panel, [DeployContextMenu]);
-
-pub fn init(cx: &mut MutableAppContext) {
-    //Useful terminal views
-    cx.add_action(TerminalView::send_text);
-    cx.add_action(TerminalView::send_keystroke);
-    cx.add_action(TerminalView::deploy_context_menu);
-    cx.add_action(TerminalView::copy);
-    cx.add_action(TerminalView::paste);
-    cx.add_action(TerminalView::clear);
-    cx.add_action(TerminalView::show_character_palette);
-}
-
-///A terminal view, maintains the PTY's file handles and communicates with the terminal
-pub struct TerminalView {
-    terminal: ModelHandle<Terminal>,
-    has_new_content: bool,
-    //Currently using iTerm bell, show bell emoji in tab until input is received
-    has_bell: bool,
-    // Only for styling purposes. Doesn't effect behavior
-    modal: bool,
-    context_menu: ViewHandle<ContextMenu>,
-    blink_state: bool,
-    blinking_on: bool,
-    blinking_paused: bool,
-    blink_epoch: usize,
-}
-
-impl Entity for TerminalView {
-    type Event = Event;
-}
-
-impl TerminalView {
-    pub fn from_terminal(
-        terminal: ModelHandle<Terminal>,
-        modal: bool,
-        cx: &mut ViewContext<Self>,
-    ) -> Self {
-        cx.observe(&terminal, |_, _, cx| cx.notify()).detach();
-        cx.subscribe(&terminal, |this, _, event, cx| match event {
-            Event::Wakeup => {
-                if !cx.is_self_focused() {
-                    this.has_new_content = true;
-                    cx.notify();
-                }
-                cx.emit(Event::Wakeup);
-            }
-            Event::Bell => {
-                this.has_bell = true;
-                cx.emit(Event::Wakeup);
-            }
-            Event::BlinkChanged => this.blinking_on = !this.blinking_on,
-            _ => cx.emit(*event),
-        })
-        .detach();
-
-        Self {
-            terminal,
-            has_new_content: true,
-            has_bell: false,
-            modal,
-            context_menu: cx.add_view(ContextMenu::new),
-            blink_state: true,
-            blinking_on: false,
-            blinking_paused: false,
-            blink_epoch: 0,
-        }
-    }
-
-    pub fn handle(&self) -> ModelHandle<Terminal> {
-        self.terminal.clone()
-    }
-
-    pub fn has_new_content(&self) -> bool {
-        self.has_new_content
-    }
-
-    pub fn has_bell(&self) -> bool {
-        self.has_bell
-    }
-
-    pub fn clear_bel(&mut self, cx: &mut ViewContext<TerminalView>) {
-        self.has_bell = false;
-        cx.emit(Event::Wakeup);
-    }
-
-    pub fn deploy_context_menu(&mut self, action: &DeployContextMenu, cx: &mut ViewContext<Self>) {
-        let menu_entries = vec![
-            ContextMenuItem::item("Clear", Clear),
-            ContextMenuItem::item("Close", pane::CloseActiveItem),
-        ];
-
-        self.context_menu.update(cx, |menu, cx| {
-            menu.show(action.position, AnchorCorner::TopLeft, menu_entries, cx)
-        });
-
-        cx.notify();
-    }
-
-    fn show_character_palette(&mut self, _: &ShowCharacterPalette, cx: &mut ViewContext<Self>) {
-        if !self
-            .terminal
-            .read(cx)
-            .last_content
-            .mode
-            .contains(TermMode::ALT_SCREEN)
-        {
-            cx.show_character_palette();
-        } else {
-            self.terminal.update(cx, |term, cx| {
-                term.try_keystroke(
-                    &Keystroke::parse("ctrl-cmd-space").unwrap(),
-                    cx.global::<Settings>()
-                        .terminal_overrides
-                        .option_as_meta
-                        .unwrap_or(false),
-                )
-            });
-        }
-    }
-
-    fn clear(&mut self, _: &Clear, cx: &mut ViewContext<Self>) {
-        self.terminal.update(cx, |term, _| term.clear());
-        cx.notify();
-    }
-
-    pub fn should_show_cursor(
-        &self,
-        focused: bool,
-        cx: &mut gpui::RenderContext<'_, Self>,
-    ) -> bool {
-        //Don't blink the cursor when not focused, blinking is disabled, or paused
-        if !focused
-            || !self.blinking_on
-            || self.blinking_paused
-            || self
-                .terminal
-                .read(cx)
-                .last_content
-                .mode
-                .contains(TermMode::ALT_SCREEN)
-        {
-            return true;
-        }
-
-        let setting = {
-            let settings = cx.global::<Settings>();
-            settings
-                .terminal_overrides
-                .blinking
-                .clone()
-                .unwrap_or(TerminalBlink::TerminalControlled)
-        };
-
-        match setting {
-            //If the user requested to never blink, don't blink it.
-            TerminalBlink::Off => true,
-            //If the terminal is controlling it, check terminal mode
-            TerminalBlink::TerminalControlled | TerminalBlink::On => self.blink_state,
-        }
-    }
-
-    fn blink_cursors(&mut self, epoch: usize, cx: &mut ViewContext<Self>) {
-        if epoch == self.blink_epoch && !self.blinking_paused {
-            self.blink_state = !self.blink_state;
-            cx.notify();
-
-            let epoch = self.next_blink_epoch();
-            cx.spawn(|this, mut cx| {
-                let this = this.downgrade();
-                async move {
-                    Timer::after(CURSOR_BLINK_INTERVAL).await;
-                    if let Some(this) = this.upgrade(&cx) {
-                        this.update(&mut cx, |this, cx| this.blink_cursors(epoch, cx));
-                    }
-                }
-            })
-            .detach();
-        }
-    }
-
-    pub fn pause_cursor_blinking(&mut self, cx: &mut ViewContext<Self>) {
-        self.blink_state = true;
-        cx.notify();
-
-        let epoch = self.next_blink_epoch();
-        cx.spawn(|this, mut cx| {
-            let this = this.downgrade();
-            async move {
-                Timer::after(CURSOR_BLINK_INTERVAL).await;
-                if let Some(this) = this.upgrade(&cx) {
-                    this.update(&mut cx, |this, cx| this.resume_cursor_blinking(epoch, cx))
-                }
-            }
-        })
-        .detach();
-    }
-
-    pub fn find_matches(
-        &mut self,
-        query: project::search::SearchQuery,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Vec<RangeInclusive<Point>>> {
-        self.terminal
-            .update(cx, |term, cx| term.find_matches(query, cx))
-    }
-
-    pub fn terminal(&self) -> &ModelHandle<Terminal> {
-        &self.terminal
-    }
-
-    fn next_blink_epoch(&mut self) -> usize {
-        self.blink_epoch += 1;
-        self.blink_epoch
-    }
-
-    fn resume_cursor_blinking(&mut self, epoch: usize, cx: &mut ViewContext<Self>) {
-        if epoch == self.blink_epoch {
-            self.blinking_paused = false;
-            self.blink_cursors(epoch, cx);
-        }
-    }
-
-    ///Attempt to paste the clipboard into the terminal
-    fn copy(&mut self, _: &Copy, cx: &mut ViewContext<Self>) {
-        self.terminal.update(cx, |term, _| term.copy())
-    }
-
-    ///Attempt to paste the clipboard into the terminal
-    fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
-        if let Some(item) = cx.read_from_clipboard() {
-            self.terminal
-                .update(cx, |terminal, _cx| terminal.paste(item.text()));
-        }
-    }
-
-    fn send_text(&mut self, text: &SendText, cx: &mut ViewContext<Self>) {
-        self.clear_bel(cx);
-        self.terminal.update(cx, |term, _| {
-            term.input(text.0.to_string());
-        });
-    }
-
-    fn send_keystroke(&mut self, text: &SendKeystroke, cx: &mut ViewContext<Self>) {
-        if let Some(keystroke) = Keystroke::parse(&text.0).log_err() {
-            self.clear_bel(cx);
-            self.terminal.update(cx, |term, cx| {
-                term.try_keystroke(
-                    &keystroke,
-                    cx.global::<Settings>()
-                        .terminal_overrides
-                        .option_as_meta
-                        .unwrap_or(false),
-                );
-            });
-        }
-    }
-}
-
-impl View for TerminalView {
-    fn ui_name() -> &'static str {
-        "Terminal"
-    }
-
-    fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> ElementBox {
-        let terminal_handle = self.terminal.clone().downgrade();
-
-        let self_id = cx.view_id();
-        let focused = cx
-            .focused_view_id(cx.window_id())
-            .filter(|view_id| *view_id == self_id)
-            .is_some();
-
-        Stack::new()
-            .with_child(
-                TerminalElement::new(
-                    cx.handle(),
-                    terminal_handle,
-                    focused,
-                    self.should_show_cursor(focused, cx),
-                )
-                .contained()
-                .boxed(),
-            )
-            .with_child(ChildView::new(&self.context_menu, cx).boxed())
-            .boxed()
-    }
-
-    fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
-        self.has_new_content = false;
-        self.terminal.read(cx).focus_in();
-        self.blink_cursors(self.blink_epoch, cx);
-        cx.notify();
-    }
-
-    fn focus_out(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
-        self.terminal.update(cx, |terminal, _| {
-            terminal.focus_out();
-        });
-        cx.notify();
-    }
-
-    fn key_down(&mut self, event: &gpui::KeyDownEvent, cx: &mut ViewContext<Self>) -> bool {
-        self.clear_bel(cx);
-        self.pause_cursor_blinking(cx);
-
-        self.terminal.update(cx, |term, cx| {
-            term.try_keystroke(
-                &event.keystroke,
-                cx.global::<Settings>()
-                    .terminal_overrides
-                    .option_as_meta
-                    .unwrap_or(false),
-            )
-        })
-    }
-
-    //IME stuff
-    fn selected_text_range(&self, cx: &AppContext) -> Option<std::ops::Range<usize>> {
-        if self
-            .terminal
-            .read(cx)
-            .last_content
-            .mode
-            .contains(TermMode::ALT_SCREEN)
-        {
-            None
-        } else {
-            Some(0..0)
-        }
-    }
-
-    fn replace_text_in_range(
-        &mut self,
-        _: Option<std::ops::Range<usize>>,
-        text: &str,
-        cx: &mut ViewContext<Self>,
-    ) {
-        self.terminal.update(cx, |terminal, _| {
-            terminal.input(text.into());
-        });
-    }
-
-    fn keymap_context(&self, cx: &gpui::AppContext) -> gpui::keymap::Context {
-        let mut context = Self::default_keymap_context();
-        if self.modal {
-            context.set.insert("ModalTerminal".into());
-        }
-        let mode = self.terminal.read(cx).last_content.mode;
-        context.map.insert(
-            "screen".to_string(),
-            (if mode.contains(TermMode::ALT_SCREEN) {
-                "alt"
-            } else {
-                "normal"
-            })
-            .to_string(),
-        );
-
-        if mode.contains(TermMode::APP_CURSOR) {
-            context.set.insert("DECCKM".to_string());
-        }
-        if mode.contains(TermMode::APP_KEYPAD) {
-            context.set.insert("DECPAM".to_string());
-        }
-        //Note the ! here
-        if !mode.contains(TermMode::APP_KEYPAD) {
-            context.set.insert("DECPNM".to_string());
-        }
-        if mode.contains(TermMode::SHOW_CURSOR) {
-            context.set.insert("DECTCEM".to_string());
-        }
-        if mode.contains(TermMode::LINE_WRAP) {
-            context.set.insert("DECAWM".to_string());
-        }
-        if mode.contains(TermMode::ORIGIN) {
-            context.set.insert("DECOM".to_string());
-        }
-        if mode.contains(TermMode::INSERT) {
-            context.set.insert("IRM".to_string());
-        }
-        //LNM is apparently the name for this. https://vt100.net/docs/vt510-rm/LNM.html
-        if mode.contains(TermMode::LINE_FEED_NEW_LINE) {
-            context.set.insert("LNM".to_string());
-        }
-        if mode.contains(TermMode::FOCUS_IN_OUT) {
-            context.set.insert("report_focus".to_string());
-        }
-        if mode.contains(TermMode::ALTERNATE_SCROLL) {
-            context.set.insert("alternate_scroll".to_string());
-        }
-        if mode.contains(TermMode::BRACKETED_PASTE) {
-            context.set.insert("bracketed_paste".to_string());
-        }
-        if mode.intersects(TermMode::MOUSE_MODE) {
-            context.set.insert("any_mouse_reporting".to_string());
-        }
-        {
-            let mouse_reporting = if mode.contains(TermMode::MOUSE_REPORT_CLICK) {
-                "click"
-            } else if mode.contains(TermMode::MOUSE_DRAG) {
-                "drag"
-            } else if mode.contains(TermMode::MOUSE_MOTION) {
-                "motion"
-            } else {
-                "off"
-            };
-            context
-                .map
-                .insert("mouse_reporting".to_string(), mouse_reporting.to_string());
-        }
-        {
-            let format = if mode.contains(TermMode::SGR_MOUSE) {
-                "sgr"
-            } else if mode.contains(TermMode::UTF8_MOUSE) {
-                "utf8"
-            } else {
-                "normal"
-            };
-            context
-                .map
-                .insert("mouse_format".to_string(), format.to_string());
-        }
-        context
-    }
-}

crates/terminal/src/tests/terminal_test_context.rs 🔗

@@ -1,137 +0,0 @@
-use std::{path::Path, time::Duration};
-
-use alacritty_terminal::{
-    index::{Column, Line, Point},
-    term::cell::Cell,
-};
-use gpui::{ModelHandle, TestAppContext, ViewHandle};
-
-use project::{Entry, Project, ProjectPath, Worktree};
-use rand::{rngs::ThreadRng, Rng};
-use workspace::{AppState, Workspace};
-
-use crate::{IndexedCell, TerminalContent, TerminalSize};
-
-pub struct TerminalTestContext<'a> {
-    pub cx: &'a mut TestAppContext,
-}
-
-impl<'a> TerminalTestContext<'a> {
-    pub fn new(cx: &'a mut TestAppContext) -> Self {
-        cx.set_condition_duration(Some(Duration::from_secs(5)));
-
-        TerminalTestContext { cx }
-    }
-
-    ///Creates a worktree with 1 file: /root.txt
-    pub async fn blank_workspace(&mut self) -> (ModelHandle<Project>, ViewHandle<Workspace>) {
-        let params = self.cx.update(AppState::test);
-
-        let project = Project::test(params.fs.clone(), [], self.cx).await;
-        let (_, workspace) = self
-            .cx
-            .add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
-
-        (project, workspace)
-    }
-
-    ///Creates a worktree with 1 folder: /root{suffix}/
-    pub async fn create_folder_wt(
-        &mut self,
-        project: ModelHandle<Project>,
-        path: impl AsRef<Path>,
-    ) -> (ModelHandle<Worktree>, Entry) {
-        self.create_wt(project, true, path).await
-    }
-
-    ///Creates a worktree with 1 file: /root{suffix}.txt
-    pub async fn create_file_wt(
-        &mut self,
-        project: ModelHandle<Project>,
-        path: impl AsRef<Path>,
-    ) -> (ModelHandle<Worktree>, Entry) {
-        self.create_wt(project, false, path).await
-    }
-
-    async fn create_wt(
-        &mut self,
-        project: ModelHandle<Project>,
-        is_dir: bool,
-        path: impl AsRef<Path>,
-    ) -> (ModelHandle<Worktree>, Entry) {
-        let (wt, _) = project
-            .update(self.cx, |project, cx| {
-                project.find_or_create_local_worktree(path, true, cx)
-            })
-            .await
-            .unwrap();
-
-        let entry = self
-            .cx
-            .update(|cx| {
-                wt.update(cx, |wt, cx| {
-                    wt.as_local()
-                        .unwrap()
-                        .create_entry(Path::new(""), is_dir, cx)
-                })
-            })
-            .await
-            .unwrap();
-
-        (wt, entry)
-    }
-
-    pub fn insert_active_entry_for(
-        &mut self,
-        wt: ModelHandle<Worktree>,
-        entry: Entry,
-        project: ModelHandle<Project>,
-    ) {
-        self.cx.update(|cx| {
-            let p = ProjectPath {
-                worktree_id: wt.read(cx).id(),
-                path: entry.path,
-            };
-            project.update(cx, |project, cx| project.set_active_path(Some(p), cx));
-        });
-    }
-
-    pub fn create_terminal_content(
-        size: TerminalSize,
-        rng: &mut ThreadRng,
-    ) -> (TerminalContent, Vec<Vec<char>>) {
-        let mut ic = Vec::new();
-        let mut cells = Vec::new();
-
-        for row in 0..((size.height() / size.line_height()) as usize) {
-            let mut row_vec = Vec::new();
-            for col in 0..((size.width() / size.cell_width()) as usize) {
-                let cell_char = rng.gen();
-                ic.push(IndexedCell {
-                    point: Point::new(Line(row as i32), Column(col)),
-                    cell: Cell {
-                        c: cell_char,
-                        ..Default::default()
-                    },
-                });
-                row_vec.push(cell_char)
-            }
-            cells.push(row_vec)
-        }
-
-        (
-            TerminalContent {
-                cells: ic,
-                size,
-                ..Default::default()
-            },
-            cells,
-        )
-    }
-}
-
-impl<'a> Drop for TerminalTestContext<'a> {
-    fn drop(&mut self) {
-        self.cx.set_condition_duration(None);
-    }
-}

crates/terminal_view/Cargo.toml 🔗

@@ -0,0 +1,44 @@
+[package]
+name = "terminal_view"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+path = "src/terminal_view.rs"
+doctest = false
+
+[dependencies]
+context_menu = { path = "../context_menu" }
+editor = { path = "../editor" }
+language = { path = "../language" }
+gpui = { path = "../gpui" }
+project = { path = "../project" }
+settings = { path = "../settings" }
+theme = { path = "../theme" }
+util = { path = "../util" }
+workspace = { path = "../workspace" }
+db = { path = "../db" }
+procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
+terminal = { path = "../terminal" }
+smallvec = { version = "1.6", features = ["union"] }
+smol = "1.2.5"
+mio-extras = "2.0.6"
+futures = "0.3"
+ordered-float = "2.1.1"
+itertools = "0.10"
+dirs = "4.0.0"
+shellexpand = "2.1.0"
+libc = "0.2"
+anyhow = "1"
+thiserror = "1.0"
+lazy_static = "1.4.0"
+serde = { version = "1.0", features = ["derive"] }
+
+
+
+[dev-dependencies]
+gpui = { path = "../gpui", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"]}
+project = { path = "../project", features = ["test-support"]}
+workspace = { path = "../workspace", features = ["test-support"] }
+rand = "0.8.5"

crates/terminal_view/src/persistence.rs 🔗

@@ -0,0 +1,51 @@
+use std::path::PathBuf;
+
+use db::{define_connection, query, sqlez_macros::sql};
+use workspace::{ItemId, WorkspaceDb, WorkspaceId};
+
+define_connection! {
+    pub static ref TERMINAL_DB: TerminalDb<WorkspaceDb> =
+        &[sql!(
+            CREATE TABLE terminals (
+                workspace_id INTEGER,
+                item_id INTEGER UNIQUE,
+                working_directory BLOB,
+                PRIMARY KEY(workspace_id, item_id),
+                FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                ON DELETE CASCADE
+            ) STRICT;
+        )];
+}
+
+impl TerminalDb {
+    query! {
+       pub async fn update_workspace_id(
+            new_id: WorkspaceId,
+            old_id: WorkspaceId,
+            item_id: ItemId
+        ) -> Result<()> {
+            UPDATE terminals
+            SET workspace_id = ?
+            WHERE workspace_id = ? AND item_id = ?
+        }
+    }
+
+    query! {
+        pub async fn save_working_directory(
+            item_id: ItemId,
+            workspace_id: WorkspaceId,
+            working_directory: PathBuf
+        ) -> Result<()> {
+            INSERT OR REPLACE INTO terminals(item_id, workspace_id, working_directory)
+            VALUES (?, ?, ?)
+        }
+    }
+
+    query! {
+        pub async fn take_working_directory(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
+            DELETE FROM terminals
+            WHERE item_id = ? AND workspace_id = ?
+            RETURNING working_directory
+        }
+    }
+}

crates/terminal/src/terminal_element.rs → crates/terminal_view/src/terminal_element.rs 🔗

@@ -1,9 +1,3 @@
-use alacritty_terminal::{
-    ansi::{Color as AnsiColor, Color::Named, CursorShape as AlacCursorShape, NamedColor},
-    grid::Dimensions,
-    index::Point,
-    term::{cell::Flags, TermMode},
-};
 use editor::{Cursor, HighlightedRange, HighlightedRangeLine};
 use gpui::{
     color::Color,
@@ -22,17 +16,23 @@ use itertools::Itertools;
 use language::CursorShape;
 use ordered_float::OrderedFloat;
 use settings::Settings;
+use terminal::{
+    alacritty_terminal::{
+        ansi::{Color as AnsiColor, Color::Named, CursorShape as AlacCursorShape, NamedColor},
+        grid::Dimensions,
+        index::Point,
+        term::{cell::Flags, TermMode},
+    },
+    mappings::colors::convert_color,
+    IndexedCell, Terminal, TerminalContent, TerminalSize,
+};
 use theme::TerminalStyle;
 use util::ResultExt;
 
 use std::{fmt::Debug, ops::RangeInclusive};
 use std::{mem, ops::Range};
 
-use crate::{
-    mappings::colors::convert_color,
-    terminal_view::{DeployContextMenu, TerminalView},
-    IndexedCell, Terminal, TerminalContent, TerminalSize,
-};
+use crate::{DeployContextMenu, TerminalView};
 
 ///The information generated during layout that is nescessary for painting
 pub struct LayoutState {
@@ -299,7 +299,7 @@ impl TerminalElement {
     ///Convert the Alacritty cell styles to GPUI text styles and background color
     fn cell_style(
         indexed: &IndexedCell,
-        fg: AnsiColor,
+        fg: terminal::alacritty_terminal::ansi::Color,
         style: &TerminalStyle,
         text_style: &TextStyle,
         font_cache: &FontCache,

crates/terminal_view/src/terminal_view.rs 🔗

@@ -0,0 +1,1091 @@
+mod persistence;
+pub mod terminal_element;
+
+use std::{
+    ops::RangeInclusive,
+    path::{Path, PathBuf},
+    time::Duration,
+};
+
+use context_menu::{ContextMenu, ContextMenuItem};
+use dirs::home_dir;
+use gpui::{
+    actions,
+    elements::{AnchorCorner, ChildView, Flex, Label, ParentElement, Stack, Text},
+    geometry::vector::Vector2F,
+    impl_actions, impl_internal_actions,
+    keymap::Keystroke,
+    AnyViewHandle, AppContext, Element, ElementBox, Entity, ModelHandle, MutableAppContext, Task,
+    View, ViewContext, ViewHandle, WeakViewHandle,
+};
+use project::{LocalWorktree, Project, ProjectPath};
+use serde::Deserialize;
+use settings::{Settings, TerminalBlink, WorkingDirectory};
+use smallvec::SmallVec;
+use smol::Timer;
+use terminal::{
+    alacritty_terminal::{
+        index::Point,
+        term::{search::RegexSearch, TermMode},
+    },
+    Event, Terminal,
+};
+use util::{truncate_and_trailoff, ResultExt};
+use workspace::{
+    item::{Item, ItemEvent},
+    notifications::NotifyResultExt,
+    pane, register_deserializable_item,
+    searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle},
+    Pane, ToolbarItemLocation, Workspace, WorkspaceId,
+};
+
+use crate::{persistence::TERMINAL_DB, terminal_element::TerminalElement};
+
+const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
+
+///Event to transmit the scroll from the element to the view
+#[derive(Clone, Debug, PartialEq)]
+pub struct ScrollTerminal(pub i32);
+
+#[derive(Clone, PartialEq)]
+pub struct DeployContextMenu {
+    pub position: Vector2F,
+}
+
+#[derive(Clone, Default, Deserialize, PartialEq)]
+pub struct SendText(String);
+
+#[derive(Clone, Default, Deserialize, PartialEq)]
+pub struct SendKeystroke(String);
+
+actions!(
+    terminal,
+    [Clear, Copy, Paste, ShowCharacterPalette, SearchTest]
+);
+
+impl_actions!(terminal, [SendText, SendKeystroke]);
+
+impl_internal_actions!(project_panel, [DeployContextMenu]);
+
+pub fn init(cx: &mut MutableAppContext) {
+    cx.add_action(TerminalView::deploy);
+
+    register_deserializable_item::<TerminalView>(cx);
+
+    //Useful terminal views
+    cx.add_action(TerminalView::send_text);
+    cx.add_action(TerminalView::send_keystroke);
+    cx.add_action(TerminalView::deploy_context_menu);
+    cx.add_action(TerminalView::copy);
+    cx.add_action(TerminalView::paste);
+    cx.add_action(TerminalView::clear);
+    cx.add_action(TerminalView::show_character_palette);
+}
+
+///A terminal view, maintains the PTY's file handles and communicates with the terminal
+pub struct TerminalView {
+    terminal: ModelHandle<Terminal>,
+    has_new_content: bool,
+    //Currently using iTerm bell, show bell emoji in tab until input is received
+    has_bell: bool,
+    context_menu: ViewHandle<ContextMenu>,
+    blink_state: bool,
+    blinking_on: bool,
+    blinking_paused: bool,
+    blink_epoch: usize,
+    workspace_id: WorkspaceId,
+}
+
+impl Entity for TerminalView {
+    type Event = Event;
+}
+
+impl TerminalView {
+    ///Create a new Terminal in the current working directory or the user's home directory
+    pub fn deploy(
+        workspace: &mut Workspace,
+        _: &workspace::NewTerminal,
+        cx: &mut ViewContext<Workspace>,
+    ) {
+        let strategy = cx.global::<Settings>().terminal_strategy();
+
+        let working_directory = get_working_directory(workspace, cx, strategy);
+
+        let window_id = cx.window_id();
+        let terminal = workspace
+            .project()
+            .update(cx, |project, cx| {
+                project.create_terminal(working_directory, window_id, cx)
+            })
+            .notify_err(workspace, cx);
+
+        if let Some(terminal) = terminal {
+            let view = cx.add_view(|cx| TerminalView::new(terminal, workspace.database_id(), cx));
+            workspace.add_item(Box::new(view), cx)
+        }
+    }
+
+    pub fn new(
+        terminal: ModelHandle<Terminal>,
+        workspace_id: WorkspaceId,
+        cx: &mut ViewContext<Self>,
+    ) -> Self {
+        cx.observe(&terminal, |_, _, cx| cx.notify()).detach();
+        cx.subscribe(&terminal, |this, _, event, cx| match event {
+            Event::Wakeup => {
+                if !cx.is_self_focused() {
+                    this.has_new_content = true;
+                    cx.notify();
+                }
+                cx.emit(Event::Wakeup);
+            }
+            Event::Bell => {
+                this.has_bell = true;
+                cx.emit(Event::Wakeup);
+            }
+            Event::BlinkChanged => this.blinking_on = !this.blinking_on,
+            Event::TitleChanged => {
+                if let Some(foreground_info) = &this.terminal().read(cx).foreground_process_info {
+                    let cwd = foreground_info.cwd.clone();
+
+                    let item_id = cx.view_id();
+                    let workspace_id = this.workspace_id;
+                    cx.background()
+                        .spawn(async move {
+                            TERMINAL_DB
+                                .save_working_directory(item_id, workspace_id, cwd)
+                                .await
+                                .log_err();
+                        })
+                        .detach();
+                }
+            }
+            _ => cx.emit(*event),
+        })
+        .detach();
+
+        Self {
+            terminal,
+            has_new_content: true,
+            has_bell: false,
+            context_menu: cx.add_view(ContextMenu::new),
+            blink_state: true,
+            blinking_on: false,
+            blinking_paused: false,
+            blink_epoch: 0,
+            workspace_id,
+        }
+    }
+
+    pub fn handle(&self) -> ModelHandle<Terminal> {
+        self.terminal.clone()
+    }
+
+    pub fn has_new_content(&self) -> bool {
+        self.has_new_content
+    }
+
+    pub fn has_bell(&self) -> bool {
+        self.has_bell
+    }
+
+    pub fn clear_bel(&mut self, cx: &mut ViewContext<TerminalView>) {
+        self.has_bell = false;
+        cx.emit(Event::Wakeup);
+    }
+
+    pub fn deploy_context_menu(&mut self, action: &DeployContextMenu, cx: &mut ViewContext<Self>) {
+        let menu_entries = vec![
+            ContextMenuItem::item("Clear", Clear),
+            ContextMenuItem::item("Close", pane::CloseActiveItem),
+        ];
+
+        self.context_menu.update(cx, |menu, cx| {
+            menu.show(action.position, AnchorCorner::TopLeft, menu_entries, cx)
+        });
+
+        cx.notify();
+    }
+
+    fn show_character_palette(&mut self, _: &ShowCharacterPalette, cx: &mut ViewContext<Self>) {
+        if !self
+            .terminal
+            .read(cx)
+            .last_content
+            .mode
+            .contains(TermMode::ALT_SCREEN)
+        {
+            cx.show_character_palette();
+        } else {
+            self.terminal.update(cx, |term, cx| {
+                term.try_keystroke(
+                    &Keystroke::parse("ctrl-cmd-space").unwrap(),
+                    cx.global::<Settings>()
+                        .terminal_overrides
+                        .option_as_meta
+                        .unwrap_or(false),
+                )
+            });
+        }
+    }
+
+    fn clear(&mut self, _: &Clear, cx: &mut ViewContext<Self>) {
+        self.terminal.update(cx, |term, _| term.clear());
+        cx.notify();
+    }
+
+    pub fn should_show_cursor(
+        &self,
+        focused: bool,
+        cx: &mut gpui::RenderContext<'_, Self>,
+    ) -> bool {
+        //Don't blink the cursor when not focused, blinking is disabled, or paused
+        if !focused
+            || !self.blinking_on
+            || self.blinking_paused
+            || self
+                .terminal
+                .read(cx)
+                .last_content
+                .mode
+                .contains(TermMode::ALT_SCREEN)
+        {
+            return true;
+        }
+
+        let setting = {
+            let settings = cx.global::<Settings>();
+            settings
+                .terminal_overrides
+                .blinking
+                .clone()
+                .unwrap_or(TerminalBlink::TerminalControlled)
+        };
+
+        match setting {
+            //If the user requested to never blink, don't blink it.
+            TerminalBlink::Off => true,
+            //If the terminal is controlling it, check terminal mode
+            TerminalBlink::TerminalControlled | TerminalBlink::On => self.blink_state,
+        }
+    }
+
+    fn blink_cursors(&mut self, epoch: usize, cx: &mut ViewContext<Self>) {
+        if epoch == self.blink_epoch && !self.blinking_paused {
+            self.blink_state = !self.blink_state;
+            cx.notify();
+
+            let epoch = self.next_blink_epoch();
+            cx.spawn(|this, mut cx| {
+                let this = this.downgrade();
+                async move {
+                    Timer::after(CURSOR_BLINK_INTERVAL).await;
+                    if let Some(this) = this.upgrade(&cx) {
+                        this.update(&mut cx, |this, cx| this.blink_cursors(epoch, cx));
+                    }
+                }
+            })
+            .detach();
+        }
+    }
+
+    pub fn pause_cursor_blinking(&mut self, cx: &mut ViewContext<Self>) {
+        self.blink_state = true;
+        cx.notify();
+
+        let epoch = self.next_blink_epoch();
+        cx.spawn(|this, mut cx| {
+            let this = this.downgrade();
+            async move {
+                Timer::after(CURSOR_BLINK_INTERVAL).await;
+                if let Some(this) = this.upgrade(&cx) {
+                    this.update(&mut cx, |this, cx| this.resume_cursor_blinking(epoch, cx))
+                }
+            }
+        })
+        .detach();
+    }
+
+    pub fn find_matches(
+        &mut self,
+        query: project::search::SearchQuery,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Vec<RangeInclusive<Point>>> {
+        let searcher = regex_search_for_query(query);
+
+        if let Some(searcher) = searcher {
+            self.terminal
+                .update(cx, |term, cx| term.find_matches(searcher, cx))
+        } else {
+            cx.background().spawn(async { Vec::new() })
+        }
+    }
+
+    pub fn terminal(&self) -> &ModelHandle<Terminal> {
+        &self.terminal
+    }
+
+    fn next_blink_epoch(&mut self) -> usize {
+        self.blink_epoch += 1;
+        self.blink_epoch
+    }
+
+    fn resume_cursor_blinking(&mut self, epoch: usize, cx: &mut ViewContext<Self>) {
+        if epoch == self.blink_epoch {
+            self.blinking_paused = false;
+            self.blink_cursors(epoch, cx);
+        }
+    }
+
+    ///Attempt to paste the clipboard into the terminal
+    fn copy(&mut self, _: &Copy, cx: &mut ViewContext<Self>) {
+        self.terminal.update(cx, |term, _| term.copy())
+    }
+
+    ///Attempt to paste the clipboard into the terminal
+    fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
+        if let Some(item) = cx.read_from_clipboard() {
+            self.terminal
+                .update(cx, |terminal, _cx| terminal.paste(item.text()));
+        }
+    }
+
+    fn send_text(&mut self, text: &SendText, cx: &mut ViewContext<Self>) {
+        self.clear_bel(cx);
+        self.terminal.update(cx, |term, _| {
+            term.input(text.0.to_string());
+        });
+    }
+
+    fn send_keystroke(&mut self, text: &SendKeystroke, cx: &mut ViewContext<Self>) {
+        if let Some(keystroke) = Keystroke::parse(&text.0).log_err() {
+            self.clear_bel(cx);
+            self.terminal.update(cx, |term, cx| {
+                term.try_keystroke(
+                    &keystroke,
+                    cx.global::<Settings>()
+                        .terminal_overrides
+                        .option_as_meta
+                        .unwrap_or(false),
+                );
+            });
+        }
+    }
+}
+
+pub fn regex_search_for_query(query: project::search::SearchQuery) -> Option<RegexSearch> {
+    let searcher = match query {
+        project::search::SearchQuery::Text { query, .. } => RegexSearch::new(&query),
+        project::search::SearchQuery::Regex { query, .. } => RegexSearch::new(&query),
+    };
+    searcher.ok()
+}
+
+impl View for TerminalView {
+    fn ui_name() -> &'static str {
+        "Terminal"
+    }
+
+    fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> ElementBox {
+        let terminal_handle = self.terminal.clone().downgrade();
+
+        let self_id = cx.view_id();
+        let focused = cx
+            .focused_view_id(cx.window_id())
+            .filter(|view_id| *view_id == self_id)
+            .is_some();
+
+        Stack::new()
+            .with_child(
+                TerminalElement::new(
+                    cx.handle(),
+                    terminal_handle,
+                    focused,
+                    self.should_show_cursor(focused, cx),
+                )
+                .contained()
+                .boxed(),
+            )
+            .with_child(ChildView::new(&self.context_menu, cx).boxed())
+            .boxed()
+    }
+
+    fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
+        self.has_new_content = false;
+        self.terminal.read(cx).focus_in();
+        self.blink_cursors(self.blink_epoch, cx);
+        cx.notify();
+    }
+
+    fn focus_out(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
+        self.terminal.update(cx, |terminal, _| {
+            terminal.focus_out();
+        });
+        cx.notify();
+    }
+
+    fn key_down(&mut self, event: &gpui::KeyDownEvent, cx: &mut ViewContext<Self>) -> bool {
+        self.clear_bel(cx);
+        self.pause_cursor_blinking(cx);
+
+        self.terminal.update(cx, |term, cx| {
+            term.try_keystroke(
+                &event.keystroke,
+                cx.global::<Settings>()
+                    .terminal_overrides
+                    .option_as_meta
+                    .unwrap_or(false),
+            )
+        })
+    }
+
+    //IME stuff
+    fn selected_text_range(&self, cx: &AppContext) -> Option<std::ops::Range<usize>> {
+        if self
+            .terminal
+            .read(cx)
+            .last_content
+            .mode
+            .contains(TermMode::ALT_SCREEN)
+        {
+            None
+        } else {
+            Some(0..0)
+        }
+    }
+
+    fn replace_text_in_range(
+        &mut self,
+        _: Option<std::ops::Range<usize>>,
+        text: &str,
+        cx: &mut ViewContext<Self>,
+    ) {
+        self.terminal.update(cx, |terminal, _| {
+            terminal.input(text.into());
+        });
+    }
+
+    fn keymap_context(&self, cx: &gpui::AppContext) -> gpui::keymap::Context {
+        let mut context = Self::default_keymap_context();
+
+        let mode = self.terminal.read(cx).last_content.mode;
+        context.map.insert(
+            "screen".to_string(),
+            (if mode.contains(TermMode::ALT_SCREEN) {
+                "alt"
+            } else {
+                "normal"
+            })
+            .to_string(),
+        );
+
+        if mode.contains(TermMode::APP_CURSOR) {
+            context.set.insert("DECCKM".to_string());
+        }
+        if mode.contains(TermMode::APP_KEYPAD) {
+            context.set.insert("DECPAM".to_string());
+        }
+        //Note the ! here
+        if !mode.contains(TermMode::APP_KEYPAD) {
+            context.set.insert("DECPNM".to_string());
+        }
+        if mode.contains(TermMode::SHOW_CURSOR) {
+            context.set.insert("DECTCEM".to_string());
+        }
+        if mode.contains(TermMode::LINE_WRAP) {
+            context.set.insert("DECAWM".to_string());
+        }
+        if mode.contains(TermMode::ORIGIN) {
+            context.set.insert("DECOM".to_string());
+        }
+        if mode.contains(TermMode::INSERT) {
+            context.set.insert("IRM".to_string());
+        }
+        //LNM is apparently the name for this. https://vt100.net/docs/vt510-rm/LNM.html
+        if mode.contains(TermMode::LINE_FEED_NEW_LINE) {
+            context.set.insert("LNM".to_string());
+        }
+        if mode.contains(TermMode::FOCUS_IN_OUT) {
+            context.set.insert("report_focus".to_string());
+        }
+        if mode.contains(TermMode::ALTERNATE_SCROLL) {
+            context.set.insert("alternate_scroll".to_string());
+        }
+        if mode.contains(TermMode::BRACKETED_PASTE) {
+            context.set.insert("bracketed_paste".to_string());
+        }
+        if mode.intersects(TermMode::MOUSE_MODE) {
+            context.set.insert("any_mouse_reporting".to_string());
+        }
+        {
+            let mouse_reporting = if mode.contains(TermMode::MOUSE_REPORT_CLICK) {
+                "click"
+            } else if mode.contains(TermMode::MOUSE_DRAG) {
+                "drag"
+            } else if mode.contains(TermMode::MOUSE_MOTION) {
+                "motion"
+            } else {
+                "off"
+            };
+            context
+                .map
+                .insert("mouse_reporting".to_string(), mouse_reporting.to_string());
+        }
+        {
+            let format = if mode.contains(TermMode::SGR_MOUSE) {
+                "sgr"
+            } else if mode.contains(TermMode::UTF8_MOUSE) {
+                "utf8"
+            } else {
+                "normal"
+            };
+            context
+                .map
+                .insert("mouse_format".to_string(), format.to_string());
+        }
+        context
+    }
+}
+
+impl Item for TerminalView {
+    fn tab_content(
+        &self,
+        _detail: Option<usize>,
+        tab_theme: &theme::Tab,
+        cx: &gpui::AppContext,
+    ) -> ElementBox {
+        let title = self
+            .terminal()
+            .read(cx)
+            .foreground_process_info
+            .as_ref()
+            .map(|fpi| {
+                format!(
+                    "{} — {}",
+                    truncate_and_trailoff(
+                        &fpi.cwd
+                            .file_name()
+                            .map(|name| name.to_string_lossy().to_string())
+                            .unwrap_or_default(),
+                        25
+                    ),
+                    truncate_and_trailoff(
+                        &{
+                            format!(
+                                "{}{}",
+                                fpi.name,
+                                if fpi.argv.len() >= 1 {
+                                    format!(" {}", (&fpi.argv[1..]).join(" "))
+                                } else {
+                                    "".to_string()
+                                }
+                            )
+                        },
+                        25
+                    )
+                )
+            })
+            .unwrap_or_else(|| "Terminal".to_string());
+
+        Flex::row()
+            .with_child(
+                Label::new(title, tab_theme.label.clone())
+                    .aligned()
+                    .contained()
+                    .boxed(),
+            )
+            .boxed()
+    }
+
+    fn clone_on_split(
+        &self,
+        _workspace_id: WorkspaceId,
+        _cx: &mut ViewContext<Self>,
+    ) -> Option<Self> {
+        //From what I can tell, there's no  way to tell the current working
+        //Directory of the terminal from outside the shell. There might be
+        //solutions to this, but they are non-trivial and require more IPC
+
+        // Some(TerminalContainer::new(
+        //     Err(anyhow::anyhow!("failed to instantiate terminal")),
+        //     workspace_id,
+        //     cx,
+        // ))
+
+        // TODO
+        None
+    }
+
+    fn project_path(&self, _cx: &gpui::AppContext) -> Option<ProjectPath> {
+        None
+    }
+
+    fn project_entry_ids(&self, _cx: &gpui::AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
+        SmallVec::new()
+    }
+
+    fn is_singleton(&self, _cx: &gpui::AppContext) -> bool {
+        false
+    }
+
+    fn set_nav_history(&mut self, _: workspace::ItemNavHistory, _: &mut ViewContext<Self>) {}
+
+    fn can_save(&self, _cx: &gpui::AppContext) -> bool {
+        false
+    }
+
+    fn save(
+        &mut self,
+        _project: gpui::ModelHandle<Project>,
+        _cx: &mut ViewContext<Self>,
+    ) -> gpui::Task<gpui::anyhow::Result<()>> {
+        unreachable!("save should not have been called");
+    }
+
+    fn save_as(
+        &mut self,
+        _project: gpui::ModelHandle<Project>,
+        _abs_path: std::path::PathBuf,
+        _cx: &mut ViewContext<Self>,
+    ) -> gpui::Task<gpui::anyhow::Result<()>> {
+        unreachable!("save_as should not have been called");
+    }
+
+    fn reload(
+        &mut self,
+        _project: gpui::ModelHandle<Project>,
+        _cx: &mut ViewContext<Self>,
+    ) -> gpui::Task<gpui::anyhow::Result<()>> {
+        gpui::Task::ready(Ok(()))
+    }
+
+    fn is_dirty(&self, _cx: &gpui::AppContext) -> bool {
+        self.has_bell()
+    }
+
+    fn has_conflict(&self, _cx: &AppContext) -> bool {
+        false
+    }
+
+    fn as_searchable(&self, handle: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
+        Some(Box::new(handle.clone()))
+    }
+
+    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
+        match event {
+            Event::BreadcrumbsChanged => vec![ItemEvent::UpdateBreadcrumbs],
+            Event::TitleChanged | Event::Wakeup => vec![ItemEvent::UpdateTab],
+            Event::CloseTerminal => vec![ItemEvent::CloseItem],
+            _ => vec![],
+        }
+    }
+
+    fn breadcrumb_location(&self) -> ToolbarItemLocation {
+        ToolbarItemLocation::PrimaryLeft { flex: None }
+    }
+
+    fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
+        Some(vec![Text::new(
+            self.terminal().read(cx).breadcrumb_text.to_string(),
+            theme.breadcrumbs.text.clone(),
+        )
+        .boxed()])
+    }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        Some("Terminal")
+    }
+
+    fn deserialize(
+        project: ModelHandle<Project>,
+        _workspace: WeakViewHandle<Workspace>,
+        workspace_id: workspace::WorkspaceId,
+        item_id: workspace::ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<anyhow::Result<ViewHandle<Self>>> {
+        let window_id = cx.window_id();
+        cx.spawn(|pane, mut cx| async move {
+            let cwd = TERMINAL_DB
+                .take_working_directory(item_id, workspace_id)
+                .await
+                .log_err()
+                .flatten();
+
+            cx.update(|cx| {
+                let terminal = project.update(cx, |project, cx| {
+                    project.create_terminal(cwd, window_id, cx)
+                })?;
+
+                Ok(cx.add_view(pane, |cx| TerminalView::new(terminal, workspace_id, cx)))
+            })
+        })
+    }
+
+    fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
+        cx.background()
+            .spawn(TERMINAL_DB.update_workspace_id(
+                workspace.database_id(),
+                self.workspace_id,
+                cx.view_id(),
+            ))
+            .detach();
+        self.workspace_id = workspace.database_id();
+    }
+}
+
+impl SearchableItem for TerminalView {
+    type Match = RangeInclusive<Point>;
+
+    fn supported_options() -> SearchOptions {
+        SearchOptions {
+            case: false,
+            word: false,
+            regex: false,
+        }
+    }
+
+    /// Convert events raised by this item into search-relevant events (if applicable)
+    fn to_search_event(event: &Self::Event) -> Option<SearchEvent> {
+        match event {
+            Event::Wakeup => Some(SearchEvent::MatchesInvalidated),
+            Event::SelectionsChanged => Some(SearchEvent::ActiveMatchChanged),
+            _ => None,
+        }
+    }
+
+    /// Clear stored matches
+    fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
+        self.terminal().update(cx, |term, _| term.matches.clear())
+    }
+
+    /// Store matches returned from find_matches somewhere for rendering
+    fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
+        self.terminal().update(cx, |term, _| term.matches = matches)
+    }
+
+    /// Return the selection content to pre-load into this search
+    fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String {
+        self.terminal()
+            .read(cx)
+            .last_content
+            .selection_text
+            .clone()
+            .unwrap_or_default()
+    }
+
+    /// Focus match at given index into the Vec of matches
+    fn activate_match(&mut self, index: usize, _: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
+        self.terminal()
+            .update(cx, |term, _| term.activate_match(index));
+        cx.notify();
+    }
+
+    /// Get all of the matches for this query, should be done on the background
+    fn find_matches(
+        &mut self,
+        query: project::search::SearchQuery,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Vec<Self::Match>> {
+        if let Some(searcher) = regex_search_for_query(query) {
+            self.terminal()
+                .update(cx, |term, cx| term.find_matches(searcher, cx))
+        } else {
+            Task::ready(vec![])
+        }
+    }
+
+    /// Reports back to the search toolbar what the active match should be (the selection)
+    fn active_match_index(
+        &mut self,
+        matches: Vec<Self::Match>,
+        cx: &mut ViewContext<Self>,
+    ) -> Option<usize> {
+        // Selection head might have a value if there's a selection that isn't
+        // associated with a match. Therefore, if there are no matches, we should
+        // report None, no matter the state of the terminal
+        let res = if matches.len() > 0 {
+            if let Some(selection_head) = self.terminal().read(cx).selection_head {
+                // If selection head is contained in a match. Return that match
+                if let Some(ix) = matches
+                    .iter()
+                    .enumerate()
+                    .find(|(_, search_match)| {
+                        search_match.contains(&selection_head)
+                            || search_match.start() > &selection_head
+                    })
+                    .map(|(ix, _)| ix)
+                {
+                    Some(ix)
+                } else {
+                    // If no selection after selection head, return the last match
+                    Some(matches.len().saturating_sub(1))
+                }
+            } else {
+                // Matches found but no active selection, return the first last one (closest to cursor)
+                Some(matches.len().saturating_sub(1))
+            }
+        } else {
+            None
+        };
+
+        res
+    }
+}
+
+///Get's the working directory for the given workspace, respecting the user's settings.
+pub fn get_working_directory(
+    workspace: &Workspace,
+    cx: &AppContext,
+    strategy: WorkingDirectory,
+) -> Option<PathBuf> {
+    let res = match strategy {
+        WorkingDirectory::CurrentProjectDirectory => current_project_directory(workspace, cx)
+            .or_else(|| first_project_directory(workspace, cx)),
+        WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx),
+        WorkingDirectory::AlwaysHome => None,
+        WorkingDirectory::Always { directory } => {
+            shellexpand::full(&directory) //TODO handle this better
+                .ok()
+                .map(|dir| Path::new(&dir.to_string()).to_path_buf())
+                .filter(|dir| dir.is_dir())
+        }
+    };
+    res.or_else(home_dir)
+}
+
+///Get's the first project's home directory, or the home directory
+fn first_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<PathBuf> {
+    workspace
+        .worktrees(cx)
+        .next()
+        .and_then(|worktree_handle| worktree_handle.read(cx).as_local())
+        .and_then(get_path_from_wt)
+}
+
+///Gets the intuitively correct working directory from the given workspace
+///If there is an active entry for this project, returns that entry's worktree root.
+///If there's no active entry but there is a worktree, returns that worktrees root.
+///If either of these roots are files, or if there are any other query failures,
+///  returns the user's home directory
+fn current_project_directory(workspace: &Workspace, cx: &AppContext) -> Option<PathBuf> {
+    let project = workspace.project().read(cx);
+
+    project
+        .active_entry()
+        .and_then(|entry_id| project.worktree_for_entry(entry_id, cx))
+        .or_else(|| workspace.worktrees(cx).next())
+        .and_then(|worktree_handle| worktree_handle.read(cx).as_local())
+        .and_then(get_path_from_wt)
+}
+
+fn get_path_from_wt(wt: &LocalWorktree) -> Option<PathBuf> {
+    wt.root_entry()
+        .filter(|re| re.is_dir())
+        .map(|_| wt.abs_path().to_path_buf())
+}
+
+#[cfg(test)]
+mod tests {
+
+    use super::*;
+    use gpui::TestAppContext;
+    use project::{Entry, Project, ProjectPath, Worktree};
+    use workspace::AppState;
+
+    use std::path::Path;
+
+    ///Working directory calculation tests
+
+    ///No Worktrees in project -> home_dir()
+    #[gpui::test]
+    async fn no_worktree(cx: &mut TestAppContext) {
+        //Setup variables
+        let (project, workspace) = blank_workspace(cx).await;
+        //Test
+        cx.read(|cx| {
+            let workspace = workspace.read(cx);
+            let active_entry = project.read(cx).active_entry();
+
+            //Make sure enviroment is as expeted
+            assert!(active_entry.is_none());
+            assert!(workspace.worktrees(cx).next().is_none());
+
+            let res = current_project_directory(workspace, cx);
+            assert_eq!(res, None);
+            let res = first_project_directory(workspace, cx);
+            assert_eq!(res, None);
+        });
+    }
+
+    ///No active entry, but a worktree, worktree is a file -> home_dir()
+    #[gpui::test]
+    async fn no_active_entry_worktree_is_file(cx: &mut TestAppContext) {
+        //Setup variables
+
+        let (project, workspace) = blank_workspace(cx).await;
+        create_file_wt(project.clone(), "/root.txt", cx).await;
+
+        cx.read(|cx| {
+            let workspace = workspace.read(cx);
+            let active_entry = project.read(cx).active_entry();
+
+            //Make sure enviroment is as expeted
+            assert!(active_entry.is_none());
+            assert!(workspace.worktrees(cx).next().is_some());
+
+            let res = current_project_directory(workspace, cx);
+            assert_eq!(res, None);
+            let res = first_project_directory(workspace, cx);
+            assert_eq!(res, None);
+        });
+    }
+
+    //No active entry, but a worktree, worktree is a folder -> worktree_folder
+    #[gpui::test]
+    async fn no_active_entry_worktree_is_dir(cx: &mut TestAppContext) {
+        //Setup variables
+        let (project, workspace) = blank_workspace(cx).await;
+        let (_wt, _entry) = create_folder_wt(project.clone(), "/root/", cx).await;
+
+        //Test
+        cx.update(|cx| {
+            let workspace = workspace.read(cx);
+            let active_entry = project.read(cx).active_entry();
+
+            assert!(active_entry.is_none());
+            assert!(workspace.worktrees(cx).next().is_some());
+
+            let res = current_project_directory(workspace, cx);
+            assert_eq!(res, Some((Path::new("/root/")).to_path_buf()));
+            let res = first_project_directory(workspace, cx);
+            assert_eq!(res, Some((Path::new("/root/")).to_path_buf()));
+        });
+    }
+
+    //Active entry with a work tree, worktree is a file -> home_dir()
+    #[gpui::test]
+    async fn active_entry_worktree_is_file(cx: &mut TestAppContext) {
+        //Setup variables
+
+        let (project, workspace) = blank_workspace(cx).await;
+        let (_wt, _entry) = create_folder_wt(project.clone(), "/root1/", cx).await;
+        let (wt2, entry2) = create_file_wt(project.clone(), "/root2.txt", cx).await;
+        insert_active_entry_for(wt2, entry2, project.clone(), cx);
+
+        //Test
+        cx.update(|cx| {
+            let workspace = workspace.read(cx);
+            let active_entry = project.read(cx).active_entry();
+
+            assert!(active_entry.is_some());
+
+            let res = current_project_directory(workspace, cx);
+            assert_eq!(res, None);
+            let res = first_project_directory(workspace, cx);
+            assert_eq!(res, Some((Path::new("/root1/")).to_path_buf()));
+        });
+    }
+
+    //Active entry, with a worktree, worktree is a folder -> worktree_folder
+    #[gpui::test]
+    async fn active_entry_worktree_is_dir(cx: &mut TestAppContext) {
+        //Setup variables
+        let (project, workspace) = blank_workspace(cx).await;
+        let (_wt, _entry) = create_folder_wt(project.clone(), "/root1/", cx).await;
+        let (wt2, entry2) = create_folder_wt(project.clone(), "/root2/", cx).await;
+        insert_active_entry_for(wt2, entry2, project.clone(), cx);
+
+        //Test
+        cx.update(|cx| {
+            let workspace = workspace.read(cx);
+            let active_entry = project.read(cx).active_entry();
+
+            assert!(active_entry.is_some());
+
+            let res = current_project_directory(workspace, cx);
+            assert_eq!(res, Some((Path::new("/root2/")).to_path_buf()));
+            let res = first_project_directory(workspace, cx);
+            assert_eq!(res, Some((Path::new("/root1/")).to_path_buf()));
+        });
+    }
+
+    ///Creates a worktree with 1 file: /root.txt
+    pub async fn blank_workspace(
+        cx: &mut TestAppContext,
+    ) -> (ModelHandle<Project>, ViewHandle<Workspace>) {
+        let params = cx.update(AppState::test);
+
+        let project = Project::test(params.fs.clone(), [], cx).await;
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
+
+        (project, workspace)
+    }
+
+    ///Creates a worktree with 1 folder: /root{suffix}/
+    async fn create_folder_wt(
+        project: ModelHandle<Project>,
+        path: impl AsRef<Path>,
+        cx: &mut TestAppContext,
+    ) -> (ModelHandle<Worktree>, Entry) {
+        create_wt(project, true, path, cx).await
+    }
+
+    ///Creates a worktree with 1 file: /root{suffix}.txt
+    async fn create_file_wt(
+        project: ModelHandle<Project>,
+        path: impl AsRef<Path>,
+        cx: &mut TestAppContext,
+    ) -> (ModelHandle<Worktree>, Entry) {
+        create_wt(project, false, path, cx).await
+    }
+
+    async fn create_wt(
+        project: ModelHandle<Project>,
+        is_dir: bool,
+        path: impl AsRef<Path>,
+        cx: &mut TestAppContext,
+    ) -> (ModelHandle<Worktree>, Entry) {
+        let (wt, _) = project
+            .update(cx, |project, cx| {
+                project.find_or_create_local_worktree(path, true, cx)
+            })
+            .await
+            .unwrap();
+
+        let entry = cx
+            .update(|cx| {
+                wt.update(cx, |wt, cx| {
+                    wt.as_local()
+                        .unwrap()
+                        .create_entry(Path::new(""), is_dir, cx)
+                })
+            })
+            .await
+            .unwrap();
+
+        (wt, entry)
+    }
+
+    pub fn insert_active_entry_for(
+        wt: ModelHandle<Worktree>,
+        entry: Entry,
+        project: ModelHandle<Project>,
+        cx: &mut TestAppContext,
+    ) {
+        cx.update(|cx| {
+            let p = ProjectPath {
+                worktree_id: wt.read(cx).id(),
+                path: entry.path,
+            };
+            project.update(cx, |project, cx| project.set_active_path(Some(p), cx));
+        });
+    }
+}

crates/theme/src/theme.rs 🔗

@@ -31,6 +31,7 @@ pub struct Theme {
     pub shared_screen: ContainerStyle,
     pub contact_notification: ContactNotification,
     pub update_notification: UpdateNotification,
+    pub simple_message_notification: MessageNotification,
     pub project_shared_notification: ProjectSharedNotification,
     pub incoming_call_notification: IncomingCallNotification,
     pub tooltip: TooltipStyle,
@@ -478,6 +479,13 @@ pub struct UpdateNotification {
     pub dismiss_button: Interactive<IconButton>,
 }
 
+#[derive(Deserialize, Default)]
+pub struct MessageNotification {
+    pub message: ContainedText,
+    pub action_message: Interactive<ContainedText>,
+    pub dismiss_button: Interactive<IconButton>,
+}
+
 #[derive(Deserialize, Default)]
 pub struct ProjectSharedNotification {
     pub window_height: f32,

crates/theme_testbench/src/theme_testbench.rs 🔗

@@ -6,18 +6,24 @@ use gpui::{
         Padding, ParentElement,
     },
     fonts::TextStyle,
-    Border, Element, Entity, MutableAppContext, Quad, RenderContext, View, ViewContext,
+    Border, Element, Entity, ModelHandle, MutableAppContext, Quad, RenderContext, Task, View,
+    ViewContext, ViewHandle, WeakViewHandle,
 };
 use project::{Project, ProjectEntryId, ProjectPath};
 use settings::Settings;
 use smallvec::SmallVec;
 use theme::{ColorScheme, Layer, Style, StyleSet};
-use workspace::{Item, Workspace};
+use workspace::{
+    item::{Item, ItemEvent},
+    register_deserializable_item, Pane, Workspace,
+};
 
 actions!(theme, [DeployThemeTestbench]);
 
 pub fn init(cx: &mut MutableAppContext) {
     cx.add_action(ThemeTestbench::deploy);
+
+    register_deserializable_item::<ThemeTestbench>(cx)
 }
 
 pub struct ThemeTestbench {}
@@ -351,7 +357,21 @@ impl Item for ThemeTestbench {
         gpui::Task::ready(Ok(()))
     }
 
-    fn to_item_events(_: &Self::Event) -> Vec<workspace::ItemEvent> {
+    fn to_item_events(_: &Self::Event) -> Vec<ItemEvent> {
         Vec::new()
     }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        Some("ThemeTestBench")
+    }
+
+    fn deserialize(
+        _project: ModelHandle<Project>,
+        _workspace: WeakViewHandle<Workspace>,
+        _workspace_id: workspace::WorkspaceId,
+        _item_id: workspace::ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<gpui::anyhow::Result<ViewHandle<Self>>> {
+        Task::ready(Ok(cx.add_view(|_| Self {})))
+    }
 }

crates/util/Cargo.toml 🔗

@@ -11,6 +11,7 @@ test-support = ["serde_json", "tempdir", "git2"]
 
 [dependencies]
 anyhow = "1.0.38"
+backtrace = "0.3"
 futures = "0.3"
 log = { version = "0.4.16", features = ["kv_unstable_serde"] }
 lazy_static = "1.4.0"
@@ -18,6 +19,7 @@ rand = { workspace = true }
 tempdir = { version = "0.3.7", optional = true }
 serde_json = { version = "1.0", features = ["preserve_order"], optional = true }
 git2 = { version = "0.15", default-features = false, optional = true }
+dirs = "3.0"
 
 
 [dev-dependencies]

crates/util/src/channel.rs 🔗

@@ -0,0 +1,40 @@
+use std::env;
+
+use lazy_static::lazy_static;
+
+lazy_static! {
+    pub static ref RELEASE_CHANNEL_NAME: String = env::var("ZED_RELEASE_CHANNEL")
+        .unwrap_or(include_str!("../../zed/RELEASE_CHANNEL").to_string());
+    pub static ref RELEASE_CHANNEL: ReleaseChannel = match RELEASE_CHANNEL_NAME.as_str() {
+        "dev" => ReleaseChannel::Dev,
+        "preview" => ReleaseChannel::Preview,
+        "stable" => ReleaseChannel::Stable,
+        _ => panic!("invalid release channel {}", *RELEASE_CHANNEL_NAME),
+    };
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Default)]
+pub enum ReleaseChannel {
+    #[default]
+    Dev,
+    Preview,
+    Stable,
+}
+
+impl ReleaseChannel {
+    pub fn display_name(&self) -> &'static str {
+        match self {
+            ReleaseChannel::Dev => "Zed Dev",
+            ReleaseChannel::Preview => "Zed Preview",
+            ReleaseChannel::Stable => "Zed",
+        }
+    }
+
+    pub fn dev_name(&self) -> &'static str {
+        match self {
+            ReleaseChannel::Dev => "dev",
+            ReleaseChannel::Preview => "preview",
+            ReleaseChannel::Stable => "stable",
+        }
+    }
+}

crates/util/src/lib.rs 🔗

@@ -1,6 +1,9 @@
+pub mod channel;
+pub mod paths;
 #[cfg(any(test, feature = "test-support"))]
 pub mod test;
 
+pub use backtrace::Backtrace;
 use futures::Future;
 use rand::{seq::SliceRandom, Rng};
 use std::{
@@ -10,6 +13,18 @@ use std::{
     task::{Context, Poll},
 };
 
+#[macro_export]
+macro_rules! debug_panic {
+    ( $($fmt_arg:tt)* ) => {
+        if cfg!(debug_assertions) {
+            panic!( $($fmt_arg)* );
+        } else {
+            let backtrace = $crate::Backtrace::new();
+            log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace);
+        }
+    };
+}
+
 pub fn truncate(s: &str, max_chars: usize) -> &str {
     match s.char_indices().nth(max_chars) {
         None => s,
@@ -191,6 +206,34 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
     }
 }
 
+// copy unstable standard feature option unzip
+// https://github.com/rust-lang/rust/issues/87800
+// Remove when this ship in Rust 1.66 or 1.67
+pub fn unzip_option<T, U>(option: Option<(T, U)>) -> (Option<T>, Option<U>) {
+    match option {
+        Some((a, b)) => (Some(a), Some(b)),
+        None => (None, None),
+    }
+}
+
+/// Immediately invoked function expression. Good for using the ? operator
+/// in functions which do not return an Option or Result
+#[macro_export]
+macro_rules! iife {
+    ($block:block) => {
+        (|| $block)()
+    };
+}
+
+/// Async lImmediately invoked function expression. Good for using the ? operator
+/// in functions which do not return an Option or Result. Async version of above
+#[macro_export]
+macro_rules! async_iife {
+    ($block:block) => {
+        (|| async move { $block })()
+    };
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
@@ -208,4 +251,18 @@ mod tests {
         extend_sorted(&mut vec, vec![1000, 19, 17, 9, 5], 8, |a, b| b.cmp(a));
         assert_eq!(vec, &[1000, 101, 21, 19, 17, 13, 9, 8]);
     }
+
+    #[test]
+    fn test_iife() {
+        fn option_returning_function() -> Option<()> {
+            None
+        }
+
+        let foo = iife!({
+            option_returning_function()?;
+            Some(())
+        });
+
+        assert_eq!(foo, None);
+    }
 }

crates/vim/src/editor_events.rs 🔗

@@ -22,20 +22,9 @@ fn editor_focused(EditorFocused(editor): &EditorFocused, cx: &mut MutableAppCont
         vim.active_editor = Some(editor.downgrade());
         vim.selection_subscription = Some(cx.subscribe(editor, |editor, event, cx| {
             if editor.read(cx).leader_replica_id().is_none() {
-                match event {
-                    editor::Event::SelectionsChanged { local: true } => {
-                        let newest_empty =
-                            editor.read(cx).selections.newest::<usize>(cx).is_empty();
-                        editor_local_selections_changed(newest_empty, cx);
-                    }
-                    editor::Event::IgnoredInput => {
-                        Vim::update(cx, |vim, cx| {
-                            if vim.active_operator().is_some() {
-                                vim.clear_operator(cx);
-                            }
-                        });
-                    }
-                    _ => (),
+                if let editor::Event::SelectionsChanged { local: true } = event {
+                    let newest_empty = editor.read(cx).selections.newest::<usize>(cx).is_empty();
+                    editor_local_selections_changed(newest_empty, cx);
                 }
             }
         }));

crates/vim/src/insert.rs 🔗

@@ -1,5 +1,5 @@
 use crate::{state::Mode, Vim};
-use editor::{Autoscroll, Bias};
+use editor::{scroll::autoscroll::Autoscroll, Bias};
 use gpui::{actions, MutableAppContext, ViewContext};
 use language::SelectionGoal;
 use workspace::Workspace;

crates/vim/src/normal.rs 🔗

@@ -2,7 +2,7 @@ mod change;
 mod delete;
 mod yank;
 
-use std::borrow::Cow;
+use std::{borrow::Cow, cmp::Ordering};
 
 use crate::{
     motion::Motion,
@@ -12,10 +12,13 @@ use crate::{
 };
 use collections::{HashMap, HashSet};
 use editor::{
-    display_map::ToDisplayPoint, Anchor, Autoscroll, Bias, ClipboardSelection, DisplayPoint,
+    display_map::ToDisplayPoint,
+    scroll::{autoscroll::Autoscroll, scroll_amount::ScrollAmount},
+    Anchor, Bias, ClipboardSelection, DisplayPoint, Editor,
 };
-use gpui::{actions, MutableAppContext, ViewContext};
+use gpui::{actions, impl_actions, MutableAppContext, ViewContext};
 use language::{AutoindentMode, Point, SelectionGoal};
+use serde::Deserialize;
 use workspace::Workspace;
 
 use self::{
@@ -24,6 +27,9 @@ use self::{
     yank::{yank_motion, yank_object},
 };
 
+#[derive(Clone, PartialEq, Deserialize)]
+struct Scroll(ScrollAmount);
+
 actions!(
     vim,
     [
@@ -41,6 +47,8 @@ actions!(
     ]
 );
 
+impl_actions!(vim, [Scroll]);
+
 pub fn init(cx: &mut MutableAppContext) {
     cx.add_action(insert_after);
     cx.add_action(insert_first_non_whitespace);
@@ -72,6 +80,13 @@ pub fn init(cx: &mut MutableAppContext) {
         })
     });
     cx.add_action(paste);
+    cx.add_action(|_: &mut Workspace, Scroll(amount): &Scroll, cx| {
+        Vim::update(cx, |vim, cx| {
+            vim.update_active_editor(cx, |editor, cx| {
+                scroll(editor, amount, cx);
+            })
+        })
+    });
 }
 
 pub fn normal_motion(
@@ -367,6 +382,46 @@ fn paste(_: &mut Workspace, _: &Paste, cx: &mut ViewContext<Workspace>) {
     });
 }
 
+fn scroll(editor: &mut Editor, amount: &ScrollAmount, cx: &mut ViewContext<Editor>) {
+    let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq();
+    editor.scroll_screen(amount, cx);
+    if should_move_cursor {
+        let selection_ordering = editor.newest_selection_on_screen(cx);
+        if selection_ordering.is_eq() {
+            return;
+        }
+
+        let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
+            visible_rows as u32
+        } else {
+            return;
+        };
+
+        let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
+        let top_anchor = editor.scroll_manager.anchor().top_anchor;
+
+        editor.change_selections(None, cx, |s| {
+            s.replace_cursors_with(|snapshot| {
+                let mut new_point = top_anchor.to_display_point(&snapshot);
+
+                match selection_ordering {
+                    Ordering::Less => {
+                        *new_point.row_mut() += scroll_margin_rows;
+                        new_point = snapshot.clip_point(new_point, Bias::Right);
+                    }
+                    Ordering::Greater => {
+                        *new_point.row_mut() += visible_rows - scroll_margin_rows as u32;
+                        new_point = snapshot.clip_point(new_point, Bias::Left);
+                    }
+                    Ordering::Equal => unreachable!(),
+                }
+
+                vec![new_point]
+            })
+        });
+    }
+}
+
 #[cfg(test)]
 mod test {
     use indoc::indoc;

crates/vim/src/normal/change.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{motion::Motion, object::Object, state::Mode, utils::copy_selections_content, Vim};
 use editor::{
-    char_kind, display_map::DisplaySnapshot, movement, Autoscroll, CharKind, DisplayPoint,
+    char_kind, display_map::DisplaySnapshot, movement, scroll::autoscroll::Autoscroll, CharKind,
+    DisplayPoint,
 };
 use gpui::MutableAppContext;
 use language::Selection;
@@ -199,7 +200,6 @@ mod test {
                 Test test
                 ˇtest"})
             .await;
-        println!("Marker");
         cx.assert(indoc! {"
                 Test test
                 ˇ

crates/vim/src/normal/delete.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{motion::Motion, object::Object, utils::copy_selections_content, Vim};
 use collections::{HashMap, HashSet};
-use editor::{display_map::ToDisplayPoint, Autoscroll, Bias};
+use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Bias};
 use gpui::MutableAppContext;
 
 pub fn delete_motion(vim: &mut Vim, motion: Motion, times: usize, cx: &mut MutableAppContext) {

crates/vim/src/state.rs 🔗

@@ -18,6 +18,7 @@ impl Default for Mode {
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
 pub enum Namespace {
     G,
+    Z,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
@@ -95,6 +96,7 @@ impl Operator {
         let operator_context = match operator {
             Some(Operator::Number(_)) => "n",
             Some(Operator::Namespace(Namespace::G)) => "g",
+            Some(Operator::Namespace(Namespace::Z)) => "z",
             Some(Operator::Object { around: false }) => "i",
             Some(Operator::Object { around: true }) => "a",
             Some(Operator::Change) => "c",

crates/vim/src/test/vim_test_context.rs 🔗

@@ -41,11 +41,19 @@ impl<'a> VimTestContext<'a> {
             .insert_tree("/root", json!({ "dir": { "test.txt": "" } }))
             .await;
 
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
 
-        // Setup search toolbars
+        // Setup search toolbars and keypress hook
         workspace.update(cx, |workspace, cx| {
+            observe_keypresses(window_id, cx);
             workspace.active_pane().update(cx, |pane, cx| {
                 pane.toolbar().update(cx, |toolbar, cx| {
                     let buffer_search_bar = cx.add_view(BufferSearchBar::new);

crates/vim/src/vim.rs 🔗

@@ -81,6 +81,25 @@ pub fn init(cx: &mut MutableAppContext) {
     .detach();
 }
 
+// Any keystrokes not mapped to vim should clear the active operator
+pub fn observe_keypresses(window_id: usize, cx: &mut MutableAppContext) {
+    cx.observe_keystrokes(window_id, |_keystroke, _result, handled_by, cx| {
+        if let Some(handled_by) = handled_by {
+            if handled_by.namespace() == "vim" {
+                return true;
+            }
+        }
+
+        Vim::update(cx, |vim, cx| {
+            if vim.active_operator().is_some() {
+                vim.clear_operator(cx);
+            }
+        });
+        true
+    })
+    .detach()
+}
+
 #[derive(Default)]
 pub struct Vim {
     editors: HashMap<usize, WeakViewHandle<Editor>>,

crates/vim/src/visual.rs 🔗

@@ -1,7 +1,9 @@
 use std::borrow::Cow;
 
 use collections::HashMap;
-use editor::{display_map::ToDisplayPoint, Autoscroll, Bias, ClipboardSelection};
+use editor::{
+    display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Bias, ClipboardSelection,
+};
 use gpui::{actions, MutableAppContext, ViewContext};
 use language::{AutoindentMode, SelectionGoal};
 use workspace::Workspace;

crates/workspace/Cargo.toml 🔗

@@ -18,6 +18,7 @@ test-support = [
 ]
 
 [dependencies]
+db = { path = "../db" }
 call = { path = "../call" }
 client = { path = "../client" }
 collections = { path = "../collections" }
@@ -31,14 +32,19 @@ project = { path = "../project" }
 settings = { path = "../settings" }
 theme = { path = "../theme" }
 util = { path = "../util" }
+async-recursion = "1.0.0"
+bincode = "1.2.1"
 anyhow = "1.0.38"
 futures = "0.3"
+lazy_static = "1.4"
+env_logger = "0.9.1"
 log = { version = "0.4.16", features = ["kv_unstable_serde"] }
 parking_lot = "0.11.1"
 postage = { version = "0.4.1", features = ["futures-traits"] }
 serde = { version = "1.0", features = ["derive", "rc"] }
 serde_json = { version = "1.0", features = ["preserve_order"] }
 smallvec = { version = "1.6", features = ["union"] }
+indoc = "1.0.4"
 
 [dev-dependencies]
 call = { path = "../call", features = ["test-support"] }
@@ -47,3 +53,4 @@ gpui = { path = "../gpui", features = ["test-support"] }
 project = { path = "../project", features = ["test-support"] }
 settings = { path = "../settings", features = ["test-support"] }
 fs = { path = "../fs", features = ["test-support"] }
+db = { path = "../db", features = ["test-support"] }

crates/workspace/src/dock.rs 🔗

@@ -98,14 +98,14 @@ pub fn icon_for_dock_anchor(anchor: DockAnchor) -> &'static str {
 }
 
 impl DockPosition {
-    fn is_visible(&self) -> bool {
+    pub fn is_visible(&self) -> bool {
         match self {
             DockPosition::Shown(_) => true,
             DockPosition::Hidden(_) => false,
         }
     }
 
-    fn anchor(&self) -> DockAnchor {
+    pub fn anchor(&self) -> DockAnchor {
         match self {
             DockPosition::Shown(anchor) | DockPosition::Hidden(anchor) => *anchor,
         }
@@ -126,20 +126,24 @@ impl DockPosition {
     }
 }
 
-pub type DefaultItemFactory =
-    fn(&mut Workspace, &mut ViewContext<Workspace>) -> Box<dyn ItemHandle>;
+pub type DockDefaultItemFactory =
+    fn(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) -> Option<Box<dyn ItemHandle>>;
 
 pub struct Dock {
     position: DockPosition,
     panel_sizes: HashMap<DockAnchor, f32>,
     pane: ViewHandle<Pane>,
-    default_item_factory: DefaultItemFactory,
+    default_item_factory: DockDefaultItemFactory,
 }
 
 impl Dock {
-    pub fn new(cx: &mut ViewContext<Workspace>, default_item_factory: DefaultItemFactory) -> Self {
-        let anchor = cx.global::<Settings>().default_dock_anchor;
-        let pane = cx.add_view(|cx| Pane::new(Some(anchor), cx));
+    pub fn new(
+        default_item_factory: DockDefaultItemFactory,
+        cx: &mut ViewContext<Workspace>,
+    ) -> Self {
+        let position = DockPosition::Hidden(cx.global::<Settings>().default_dock_anchor);
+
+        let pane = cx.add_view(|cx| Pane::new(Some(position.anchor()), cx));
         pane.update(cx, |pane, cx| {
             pane.set_active(false, cx);
         });
@@ -152,7 +156,7 @@ impl Dock {
         Self {
             pane,
             panel_sizes: Default::default(),
-            position: DockPosition::Hidden(anchor),
+            position,
             default_item_factory,
         }
     }
@@ -169,7 +173,7 @@ impl Dock {
         self.position.is_visible() && self.position.anchor() == anchor
     }
 
-    fn set_dock_position(
+    pub(crate) fn set_dock_position(
         workspace: &mut Workspace,
         new_position: DockPosition,
         cx: &mut ViewContext<Workspace>,
@@ -191,9 +195,11 @@ impl Dock {
             // Ensure that the pane has at least one item or construct a default item to put in it
             let pane = workspace.dock.pane.clone();
             if pane.read(cx).items().next().is_none() {
-                let item_to_add = (workspace.dock.default_item_factory)(workspace, cx);
-                // Adding the item focuses the pane by default
-                Pane::add_item(workspace, &pane, item_to_add, true, true, None, cx);
+                if let Some(item_to_add) = (workspace.dock.default_item_factory)(workspace, cx) {
+                    Pane::add_item(workspace, &pane, item_to_add, true, true, None, cx);
+                } else {
+                    workspace.dock.position = workspace.dock.position.hide();
+                }
             } else {
                 cx.focus(pane);
             }
@@ -205,6 +211,7 @@ impl Dock {
             cx.focus(last_active_center_pane);
         }
         cx.emit(crate::Event::DockAnchorChanged);
+        workspace.serialize_workspace(cx);
         cx.notify();
     }
 
@@ -341,6 +348,10 @@ impl Dock {
                 }
             })
     }
+
+    pub fn position(&self) -> DockPosition {
+        self.position
+    }
 }
 
 pub struct ToggleDockButton {
@@ -447,20 +458,77 @@ impl StatusItemView for ToggleDockButton {
 
 #[cfg(test)]
 mod tests {
-    use std::ops::{Deref, DerefMut};
+    use std::{
+        ops::{Deref, DerefMut},
+        path::PathBuf,
+    };
 
     use gpui::{AppContext, TestAppContext, UpdateView, ViewContext};
     use project::{FakeFs, Project};
     use settings::Settings;
 
     use super::*;
-    use crate::{sidebar::Sidebar, tests::TestItem, ItemHandle, Workspace};
+    use crate::{
+        dock,
+        item::test::TestItem,
+        persistence::model::{
+            SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
+        },
+        register_deserializable_item,
+        sidebar::Sidebar,
+        ItemHandle, Workspace,
+    };
 
     pub fn default_item_factory(
         _workspace: &mut Workspace,
         cx: &mut ViewContext<Workspace>,
-    ) -> Box<dyn ItemHandle> {
-        Box::new(cx.add_view(|_| TestItem::new()))
+    ) -> Option<Box<dyn ItemHandle>> {
+        Some(Box::new(cx.add_view(|_| TestItem::new())))
+    }
+
+    #[gpui::test]
+    async fn test_dock_workspace_infinite_loop(cx: &mut TestAppContext) {
+        cx.foreground().forbid_parking();
+        Settings::test_async(cx);
+
+        cx.update(|cx| {
+            register_deserializable_item::<TestItem>(cx);
+        });
+
+        let serialized_workspace = SerializedWorkspace {
+            id: 0,
+            location: Vec::<PathBuf>::new().into(),
+            dock_position: dock::DockPosition::Shown(DockAnchor::Expanded),
+            center_group: SerializedPaneGroup::Pane(SerializedPane {
+                active: false,
+                children: vec![],
+            }),
+            dock_pane: SerializedPane {
+                active: true,
+                children: vec![SerializedItem {
+                    active: true,
+                    item_id: 0,
+                    kind: "test".into(),
+                }],
+            },
+            left_sidebar_open: false,
+        };
+
+        let fs = FakeFs::new(cx.background());
+        let project = Project::test(fs, [], cx).await;
+
+        let (_, _workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Some(serialized_workspace),
+                0,
+                project.clone(),
+                default_item_factory,
+                cx,
+            )
+        });
+
+        cx.foreground().run_until_parked();
+        //Should terminate
     }
 
     #[gpui::test]
@@ -568,8 +636,9 @@ mod tests {
 
             cx.update(|cx| init(cx));
             let project = Project::test(fs, [], cx).await;
-            let (window_id, workspace) =
-                cx.add_window(|cx| Workspace::new(project, default_item_factory, cx));
+            let (window_id, workspace) = cx.add_window(|cx| {
+                Workspace::new(Default::default(), 0, project, default_item_factory, cx)
+            });
 
             workspace.update(cx, |workspace, cx| {
                 let left_panel = cx.add_view(|_| TestItem::new());

crates/workspace/src/item.rs 🔗

@@ -0,0 +1,918 @@
+use std::{
+    any::{Any, TypeId},
+    borrow::Cow,
+    cell::RefCell,
+    fmt,
+    path::PathBuf,
+    rc::Rc,
+    sync::atomic::{AtomicBool, Ordering},
+    time::Duration,
+};
+
+use anyhow::Result;
+use client::proto;
+use gpui::{
+    AnyViewHandle, AppContext, ElementBox, ModelHandle, MutableAppContext, Task, View, ViewContext,
+    ViewHandle, WeakViewHandle,
+};
+use project::{Project, ProjectEntryId, ProjectPath};
+use settings::{Autosave, Settings};
+use smallvec::SmallVec;
+use theme::Theme;
+use util::ResultExt;
+
+use crate::{
+    pane, persistence::model::ItemId, searchable::SearchableItemHandle, DelayedDebouncedEditAction,
+    FollowableItemBuilders, ItemNavHistory, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
+};
+
+#[derive(Eq, PartialEq, Hash)]
+pub enum ItemEvent {
+    CloseItem,
+    UpdateTab,
+    UpdateBreadcrumbs,
+    Edit,
+}
+
+pub trait Item: View {
+    fn deactivated(&mut self, _: &mut ViewContext<Self>) {}
+    fn workspace_deactivated(&mut self, _: &mut ViewContext<Self>) {}
+    fn navigate(&mut self, _: Box<dyn Any>, _: &mut ViewContext<Self>) -> bool {
+        false
+    }
+    fn tab_description<'a>(&'a self, _: usize, _: &'a AppContext) -> Option<Cow<'a, str>> {
+        None
+    }
+    fn tab_content(&self, detail: Option<usize>, style: &theme::Tab, cx: &AppContext)
+        -> ElementBox;
+    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
+    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
+    fn is_singleton(&self, cx: &AppContext) -> bool;
+    fn set_nav_history(&mut self, _: ItemNavHistory, _: &mut ViewContext<Self>);
+    fn clone_on_split(&self, _workspace_id: WorkspaceId, _: &mut ViewContext<Self>) -> Option<Self>
+    where
+        Self: Sized,
+    {
+        None
+    }
+    fn is_dirty(&self, _: &AppContext) -> bool {
+        false
+    }
+    fn has_conflict(&self, _: &AppContext) -> bool {
+        false
+    }
+    fn can_save(&self, cx: &AppContext) -> bool;
+    fn save(
+        &mut self,
+        project: ModelHandle<Project>,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>>;
+    fn save_as(
+        &mut self,
+        project: ModelHandle<Project>,
+        abs_path: PathBuf,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>>;
+    fn reload(
+        &mut self,
+        project: ModelHandle<Project>,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>>;
+    fn git_diff_recalc(
+        &mut self,
+        _project: ModelHandle<Project>,
+        _cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>> {
+        Task::ready(Ok(()))
+    }
+    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent>;
+    fn should_close_item_on_event(_: &Self::Event) -> bool {
+        false
+    }
+    fn should_update_tab_on_event(_: &Self::Event) -> bool {
+        false
+    }
+    fn is_edit_event(_: &Self::Event) -> bool {
+        false
+    }
+    fn act_as_type(
+        &self,
+        type_id: TypeId,
+        self_handle: &ViewHandle<Self>,
+        _: &AppContext,
+    ) -> Option<AnyViewHandle> {
+        if TypeId::of::<Self>() == type_id {
+            Some(self_handle.into())
+        } else {
+            None
+        }
+    }
+    fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
+        None
+    }
+
+    fn breadcrumb_location(&self) -> ToolbarItemLocation {
+        ToolbarItemLocation::Hidden
+    }
+
+    fn breadcrumbs(&self, _theme: &Theme, _cx: &AppContext) -> Option<Vec<ElementBox>> {
+        None
+    }
+
+    fn added_to_workspace(&mut self, _workspace: &mut Workspace, _cx: &mut ViewContext<Self>) {}
+
+    fn serialized_item_kind() -> Option<&'static str>;
+
+    fn deserialize(
+        project: ModelHandle<Project>,
+        workspace: WeakViewHandle<Workspace>,
+        workspace_id: WorkspaceId,
+        item_id: ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<Result<ViewHandle<Self>>>;
+}
+
+pub trait ItemHandle: 'static + fmt::Debug {
+    fn subscribe_to_item_events(
+        &self,
+        cx: &mut MutableAppContext,
+        handler: Box<dyn Fn(ItemEvent, &mut MutableAppContext)>,
+    ) -> gpui::Subscription;
+    fn tab_description<'a>(&self, detail: usize, cx: &'a AppContext) -> Option<Cow<'a, str>>;
+    fn tab_content(&self, detail: Option<usize>, style: &theme::Tab, cx: &AppContext)
+        -> ElementBox;
+    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
+    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
+    fn is_singleton(&self, cx: &AppContext) -> bool;
+    fn boxed_clone(&self) -> Box<dyn ItemHandle>;
+    fn clone_on_split(
+        &self,
+        workspace_id: WorkspaceId,
+        cx: &mut MutableAppContext,
+    ) -> Option<Box<dyn ItemHandle>>;
+    fn added_to_pane(
+        &self,
+        workspace: &mut Workspace,
+        pane: ViewHandle<Pane>,
+        cx: &mut ViewContext<Workspace>,
+    );
+    fn deactivated(&self, cx: &mut MutableAppContext);
+    fn workspace_deactivated(&self, cx: &mut MutableAppContext);
+    fn navigate(&self, data: Box<dyn Any>, cx: &mut MutableAppContext) -> bool;
+    fn id(&self) -> usize;
+    fn window_id(&self) -> usize;
+    fn to_any(&self) -> AnyViewHandle;
+    fn is_dirty(&self, cx: &AppContext) -> bool;
+    fn has_conflict(&self, cx: &AppContext) -> bool;
+    fn can_save(&self, cx: &AppContext) -> bool;
+    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>>;
+    fn save_as(
+        &self,
+        project: ModelHandle<Project>,
+        abs_path: PathBuf,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>>;
+    fn reload(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext)
+        -> Task<Result<()>>;
+    fn git_diff_recalc(
+        &self,
+        project: ModelHandle<Project>,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>>;
+    fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle>;
+    fn to_followable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn FollowableItemHandle>>;
+    fn on_release(
+        &self,
+        cx: &mut MutableAppContext,
+        callback: Box<dyn FnOnce(&mut MutableAppContext)>,
+    ) -> gpui::Subscription;
+    fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
+    fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
+    fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>>;
+    fn serialized_item_kind(&self) -> Option<&'static str>;
+}
+
+pub trait WeakItemHandle {
+    fn id(&self) -> usize;
+    fn window_id(&self) -> usize;
+    fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>>;
+}
+
+impl dyn ItemHandle {
+    pub fn downcast<T: View>(&self) -> Option<ViewHandle<T>> {
+        self.to_any().downcast()
+    }
+
+    pub fn act_as<T: View>(&self, cx: &AppContext) -> Option<ViewHandle<T>> {
+        self.act_as_type(TypeId::of::<T>(), cx)
+            .and_then(|t| t.downcast())
+    }
+}
+
+impl<T: Item> ItemHandle for ViewHandle<T> {
+    fn subscribe_to_item_events(
+        &self,
+        cx: &mut MutableAppContext,
+        handler: Box<dyn Fn(ItemEvent, &mut MutableAppContext)>,
+    ) -> gpui::Subscription {
+        cx.subscribe(self, move |_, event, cx| {
+            for item_event in T::to_item_events(event) {
+                handler(item_event, cx)
+            }
+        })
+    }
+
+    fn tab_description<'a>(&self, detail: usize, cx: &'a AppContext) -> Option<Cow<'a, str>> {
+        self.read(cx).tab_description(detail, cx)
+    }
+
+    fn tab_content(
+        &self,
+        detail: Option<usize>,
+        style: &theme::Tab,
+        cx: &AppContext,
+    ) -> ElementBox {
+        self.read(cx).tab_content(detail, style, cx)
+    }
+
+    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
+        self.read(cx).project_path(cx)
+    }
+
+    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
+        self.read(cx).project_entry_ids(cx)
+    }
+
+    fn is_singleton(&self, cx: &AppContext) -> bool {
+        self.read(cx).is_singleton(cx)
+    }
+
+    fn boxed_clone(&self) -> Box<dyn ItemHandle> {
+        Box::new(self.clone())
+    }
+
+    fn clone_on_split(
+        &self,
+        workspace_id: WorkspaceId,
+        cx: &mut MutableAppContext,
+    ) -> Option<Box<dyn ItemHandle>> {
+        self.update(cx, |item, cx| {
+            cx.add_option_view(|cx| item.clone_on_split(workspace_id, cx))
+        })
+        .map(|handle| Box::new(handle) as Box<dyn ItemHandle>)
+    }
+
+    fn added_to_pane(
+        &self,
+        workspace: &mut Workspace,
+        pane: ViewHandle<Pane>,
+        cx: &mut ViewContext<Workspace>,
+    ) {
+        let history = pane.read(cx).nav_history_for_item(self);
+        self.update(cx, |this, cx| {
+            this.set_nav_history(history, cx);
+            this.added_to_workspace(workspace, cx);
+        });
+
+        if let Some(followed_item) = self.to_followable_item_handle(cx) {
+            if let Some(message) = followed_item.to_state_proto(cx) {
+                workspace.update_followers(
+                    proto::update_followers::Variant::CreateView(proto::View {
+                        id: followed_item.id() as u64,
+                        variant: Some(message),
+                        leader_id: workspace.leader_for_pane(&pane).map(|id| id.0),
+                    }),
+                    cx,
+                );
+            }
+        }
+
+        if workspace
+            .panes_by_item
+            .insert(self.id(), pane.downgrade())
+            .is_none()
+        {
+            let mut pending_autosave = DelayedDebouncedEditAction::new();
+            let mut pending_git_update = DelayedDebouncedEditAction::new();
+            let pending_update = Rc::new(RefCell::new(None));
+            let pending_update_scheduled = Rc::new(AtomicBool::new(false));
+
+            let mut event_subscription =
+                Some(cx.subscribe(self, move |workspace, item, event, cx| {
+                    let pane = if let Some(pane) = workspace
+                        .panes_by_item
+                        .get(&item.id())
+                        .and_then(|pane| pane.upgrade(cx))
+                    {
+                        pane
+                    } else {
+                        log::error!("unexpected item event after pane was dropped");
+                        return;
+                    };
+
+                    if let Some(item) = item.to_followable_item_handle(cx) {
+                        let leader_id = workspace.leader_for_pane(&pane);
+
+                        if leader_id.is_some() && item.should_unfollow_on_event(event, cx) {
+                            workspace.unfollow(&pane, cx);
+                        }
+
+                        if item.add_event_to_update_proto(
+                            event,
+                            &mut *pending_update.borrow_mut(),
+                            cx,
+                        ) && !pending_update_scheduled.load(Ordering::SeqCst)
+                        {
+                            pending_update_scheduled.store(true, Ordering::SeqCst);
+                            cx.after_window_update({
+                                let pending_update = pending_update.clone();
+                                let pending_update_scheduled = pending_update_scheduled.clone();
+                                move |this, cx| {
+                                    pending_update_scheduled.store(false, Ordering::SeqCst);
+                                    this.update_followers(
+                                        proto::update_followers::Variant::UpdateView(
+                                            proto::UpdateView {
+                                                id: item.id() as u64,
+                                                variant: pending_update.borrow_mut().take(),
+                                                leader_id: leader_id.map(|id| id.0),
+                                            },
+                                        ),
+                                        cx,
+                                    );
+                                }
+                            });
+                        }
+                    }
+
+                    for item_event in T::to_item_events(event).into_iter() {
+                        match item_event {
+                            ItemEvent::CloseItem => {
+                                Pane::close_item(workspace, pane, item.id(), cx)
+                                    .detach_and_log_err(cx);
+                                return;
+                            }
+
+                            ItemEvent::UpdateTab => {
+                                pane.update(cx, |_, cx| {
+                                    cx.emit(pane::Event::ChangeItemTitle);
+                                    cx.notify();
+                                });
+                            }
+
+                            ItemEvent::Edit => {
+                                if let Autosave::AfterDelay { milliseconds } =
+                                    cx.global::<Settings>().autosave
+                                {
+                                    let delay = Duration::from_millis(milliseconds);
+                                    let item = item.clone();
+                                    pending_autosave.fire_new(
+                                        delay,
+                                        workspace,
+                                        cx,
+                                        |project, mut cx| async move {
+                                            cx.update(|cx| Pane::autosave_item(&item, project, cx))
+                                                .await
+                                                .log_err();
+                                        },
+                                    );
+                                }
+
+                                let settings = cx.global::<Settings>();
+                                let debounce_delay = settings.git_overrides.gutter_debounce;
+
+                                let item = item.clone();
+
+                                if let Some(delay) = debounce_delay {
+                                    const MIN_GIT_DELAY: u64 = 50;
+
+                                    let delay = delay.max(MIN_GIT_DELAY);
+                                    let duration = Duration::from_millis(delay);
+
+                                    pending_git_update.fire_new(
+                                        duration,
+                                        workspace,
+                                        cx,
+                                        |project, mut cx| async move {
+                                            cx.update(|cx| item.git_diff_recalc(project, cx))
+                                                .await
+                                                .log_err();
+                                        },
+                                    );
+                                } else {
+                                    let project = workspace.project().downgrade();
+                                    cx.spawn_weak(|_, mut cx| async move {
+                                        if let Some(project) = project.upgrade(&cx) {
+                                            cx.update(|cx| item.git_diff_recalc(project, cx))
+                                                .await
+                                                .log_err();
+                                        }
+                                    })
+                                    .detach();
+                                }
+                            }
+
+                            _ => {}
+                        }
+                    }
+                }));
+
+            cx.observe_focus(self, move |workspace, item, focused, cx| {
+                if !focused && cx.global::<Settings>().autosave == Autosave::OnFocusChange {
+                    Pane::autosave_item(&item, workspace.project.clone(), cx)
+                        .detach_and_log_err(cx);
+                }
+            })
+            .detach();
+
+            let item_id = self.id();
+            cx.observe_release(self, move |workspace, _, _| {
+                workspace.panes_by_item.remove(&item_id);
+                event_subscription.take();
+            })
+            .detach();
+        }
+
+        cx.defer(|workspace, cx| {
+            workspace.serialize_workspace(cx);
+        });
+    }
+
+    fn deactivated(&self, cx: &mut MutableAppContext) {
+        self.update(cx, |this, cx| this.deactivated(cx));
+    }
+
+    fn workspace_deactivated(&self, cx: &mut MutableAppContext) {
+        self.update(cx, |this, cx| this.workspace_deactivated(cx));
+    }
+
+    fn navigate(&self, data: Box<dyn Any>, cx: &mut MutableAppContext) -> bool {
+        self.update(cx, |this, cx| this.navigate(data, cx))
+    }
+
+    fn id(&self) -> usize {
+        self.id()
+    }
+
+    fn window_id(&self) -> usize {
+        self.window_id()
+    }
+
+    fn to_any(&self) -> AnyViewHandle {
+        self.into()
+    }
+
+    fn is_dirty(&self, cx: &AppContext) -> bool {
+        self.read(cx).is_dirty(cx)
+    }
+
+    fn has_conflict(&self, cx: &AppContext) -> bool {
+        self.read(cx).has_conflict(cx)
+    }
+
+    fn can_save(&self, cx: &AppContext) -> bool {
+        self.read(cx).can_save(cx)
+    }
+
+    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>> {
+        self.update(cx, |item, cx| item.save(project, cx))
+    }
+
+    fn save_as(
+        &self,
+        project: ModelHandle<Project>,
+        abs_path: PathBuf,
+        cx: &mut MutableAppContext,
+    ) -> Task<anyhow::Result<()>> {
+        self.update(cx, |item, cx| item.save_as(project, abs_path, cx))
+    }
+
+    fn reload(
+        &self,
+        project: ModelHandle<Project>,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>> {
+        self.update(cx, |item, cx| item.reload(project, cx))
+    }
+
+    fn git_diff_recalc(
+        &self,
+        project: ModelHandle<Project>,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>> {
+        self.update(cx, |item, cx| item.git_diff_recalc(project, cx))
+    }
+
+    fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle> {
+        self.read(cx).act_as_type(type_id, self, cx)
+    }
+
+    fn to_followable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn FollowableItemHandle>> {
+        if cx.has_global::<FollowableItemBuilders>() {
+            let builders = cx.global::<FollowableItemBuilders>();
+            let item = self.to_any();
+            Some(builders.get(&item.view_type())?.1(item))
+        } else {
+            None
+        }
+    }
+
+    fn on_release(
+        &self,
+        cx: &mut MutableAppContext,
+        callback: Box<dyn FnOnce(&mut MutableAppContext)>,
+    ) -> gpui::Subscription {
+        cx.observe_release(self, move |_, cx| callback(cx))
+    }
+
+    fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>> {
+        self.read(cx).as_searchable(self)
+    }
+
+    fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation {
+        self.read(cx).breadcrumb_location()
+    }
+
+    fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
+        self.read(cx).breadcrumbs(theme, cx)
+    }
+
+    fn serialized_item_kind(&self) -> Option<&'static str> {
+        T::serialized_item_kind()
+    }
+}
+
+impl From<Box<dyn ItemHandle>> for AnyViewHandle {
+    fn from(val: Box<dyn ItemHandle>) -> Self {
+        val.to_any()
+    }
+}
+
+impl From<&Box<dyn ItemHandle>> for AnyViewHandle {
+    fn from(val: &Box<dyn ItemHandle>) -> Self {
+        val.to_any()
+    }
+}
+
+impl Clone for Box<dyn ItemHandle> {
+    fn clone(&self) -> Box<dyn ItemHandle> {
+        self.boxed_clone()
+    }
+}
+
+impl<T: Item> WeakItemHandle for WeakViewHandle<T> {
+    fn id(&self) -> usize {
+        self.id()
+    }
+
+    fn window_id(&self) -> usize {
+        self.window_id()
+    }
+
+    fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
+        self.upgrade(cx).map(|v| Box::new(v) as Box<dyn ItemHandle>)
+    }
+}
+
+pub trait ProjectItem: Item {
+    type Item: project::Item;
+
+    fn for_project_item(
+        project: ModelHandle<Project>,
+        item: ModelHandle<Self::Item>,
+        cx: &mut ViewContext<Self>,
+    ) -> Self;
+}
+
+pub trait FollowableItem: Item {
+    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
+    fn from_state_proto(
+        pane: ViewHandle<Pane>,
+        project: ModelHandle<Project>,
+        state: &mut Option<proto::view::Variant>,
+        cx: &mut MutableAppContext,
+    ) -> Option<Task<Result<ViewHandle<Self>>>>;
+    fn add_event_to_update_proto(
+        &self,
+        event: &Self::Event,
+        update: &mut Option<proto::update_view::Variant>,
+        cx: &AppContext,
+    ) -> bool;
+    fn apply_update_proto(
+        &mut self,
+        project: &ModelHandle<Project>,
+        message: proto::update_view::Variant,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>>;
+
+    fn set_leader_replica_id(&mut self, leader_replica_id: Option<u16>, cx: &mut ViewContext<Self>);
+    fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool;
+}
+
+pub trait FollowableItemHandle: ItemHandle {
+    fn set_leader_replica_id(&self, leader_replica_id: Option<u16>, cx: &mut MutableAppContext);
+    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
+    fn add_event_to_update_proto(
+        &self,
+        event: &dyn Any,
+        update: &mut Option<proto::update_view::Variant>,
+        cx: &AppContext,
+    ) -> bool;
+    fn apply_update_proto(
+        &self,
+        project: &ModelHandle<Project>,
+        message: proto::update_view::Variant,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>>;
+    fn should_unfollow_on_event(&self, event: &dyn Any, cx: &AppContext) -> bool;
+}
+
+impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
+    fn set_leader_replica_id(&self, leader_replica_id: Option<u16>, cx: &mut MutableAppContext) {
+        self.update(cx, |this, cx| {
+            this.set_leader_replica_id(leader_replica_id, cx)
+        })
+    }
+
+    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
+        self.read(cx).to_state_proto(cx)
+    }
+
+    fn add_event_to_update_proto(
+        &self,
+        event: &dyn Any,
+        update: &mut Option<proto::update_view::Variant>,
+        cx: &AppContext,
+    ) -> bool {
+        if let Some(event) = event.downcast_ref() {
+            self.read(cx).add_event_to_update_proto(event, update, cx)
+        } else {
+            false
+        }
+    }
+
+    fn apply_update_proto(
+        &self,
+        project: &ModelHandle<Project>,
+        message: proto::update_view::Variant,
+        cx: &mut MutableAppContext,
+    ) -> Task<Result<()>> {
+        self.update(cx, |this, cx| this.apply_update_proto(project, message, cx))
+    }
+
+    fn should_unfollow_on_event(&self, event: &dyn Any, cx: &AppContext) -> bool {
+        if let Some(event) = event.downcast_ref() {
+            T::should_unfollow_on_event(event, cx)
+        } else {
+            false
+        }
+    }
+}
+
+#[cfg(test)]
+pub(crate) mod test {
+    use std::{any::Any, borrow::Cow, cell::Cell};
+
+    use gpui::{
+        elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, RenderContext, Task,
+        View, ViewContext, ViewHandle, WeakViewHandle,
+    };
+    use project::{Project, ProjectEntryId, ProjectPath};
+    use smallvec::SmallVec;
+
+    use crate::{sidebar::SidebarItem, ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
+
+    use super::{Item, ItemEvent};
+
+    pub struct TestItem {
+        pub workspace_id: WorkspaceId,
+        pub state: String,
+        pub label: String,
+        pub save_count: usize,
+        pub save_as_count: usize,
+        pub reload_count: usize,
+        pub is_dirty: bool,
+        pub is_singleton: bool,
+        pub has_conflict: bool,
+        pub project_entry_ids: Vec<ProjectEntryId>,
+        pub project_path: Option<ProjectPath>,
+        pub nav_history: Option<ItemNavHistory>,
+        pub tab_descriptions: Option<Vec<&'static str>>,
+        pub tab_detail: Cell<Option<usize>>,
+    }
+
+    pub enum TestItemEvent {
+        Edit,
+    }
+
+    impl Clone for TestItem {
+        fn clone(&self) -> Self {
+            Self {
+                state: self.state.clone(),
+                label: self.label.clone(),
+                save_count: self.save_count,
+                save_as_count: self.save_as_count,
+                reload_count: self.reload_count,
+                is_dirty: self.is_dirty,
+                is_singleton: self.is_singleton,
+                has_conflict: self.has_conflict,
+                project_entry_ids: self.project_entry_ids.clone(),
+                project_path: self.project_path.clone(),
+                nav_history: None,
+                tab_descriptions: None,
+                tab_detail: Default::default(),
+                workspace_id: self.workspace_id,
+            }
+        }
+    }
+
+    impl TestItem {
+        pub fn new() -> Self {
+            Self {
+                state: String::new(),
+                label: String::new(),
+                save_count: 0,
+                save_as_count: 0,
+                reload_count: 0,
+                is_dirty: false,
+                has_conflict: false,
+                project_entry_ids: Vec::new(),
+                project_path: None,
+                is_singleton: true,
+                nav_history: None,
+                tab_descriptions: None,
+                tab_detail: Default::default(),
+                workspace_id: 0,
+            }
+        }
+
+        pub fn new_deserialized(id: WorkspaceId) -> Self {
+            let mut this = Self::new();
+            this.workspace_id = id;
+            this
+        }
+
+        pub fn with_label(mut self, state: &str) -> Self {
+            self.label = state.to_string();
+            self
+        }
+
+        pub fn with_singleton(mut self, singleton: bool) -> Self {
+            self.is_singleton = singleton;
+            self
+        }
+
+        pub fn with_project_entry_ids(mut self, project_entry_ids: &[u64]) -> Self {
+            self.project_entry_ids.extend(
+                project_entry_ids
+                    .iter()
+                    .copied()
+                    .map(ProjectEntryId::from_proto),
+            );
+            self
+        }
+
+        pub fn set_state(&mut self, state: String, cx: &mut ViewContext<Self>) {
+            self.push_to_nav_history(cx);
+            self.state = state;
+        }
+
+        fn push_to_nav_history(&mut self, cx: &mut ViewContext<Self>) {
+            if let Some(history) = &mut self.nav_history {
+                history.push(Some(Box::new(self.state.clone())), cx);
+            }
+        }
+    }
+
+    impl Entity for TestItem {
+        type Event = TestItemEvent;
+    }
+
+    impl View for TestItem {
+        fn ui_name() -> &'static str {
+            "TestItem"
+        }
+
+        fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
+            Empty::new().boxed()
+        }
+    }
+
+    impl Item for TestItem {
+        fn tab_description<'a>(&'a self, detail: usize, _: &'a AppContext) -> Option<Cow<'a, str>> {
+            self.tab_descriptions.as_ref().and_then(|descriptions| {
+                let description = *descriptions.get(detail).or_else(|| descriptions.last())?;
+                Some(description.into())
+            })
+        }
+
+        fn tab_content(&self, detail: Option<usize>, _: &theme::Tab, _: &AppContext) -> ElementBox {
+            self.tab_detail.set(detail);
+            Empty::new().boxed()
+        }
+
+        fn project_path(&self, _: &AppContext) -> Option<ProjectPath> {
+            self.project_path.clone()
+        }
+
+        fn project_entry_ids(&self, _: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
+            self.project_entry_ids.iter().copied().collect()
+        }
+
+        fn is_singleton(&self, _: &AppContext) -> bool {
+            self.is_singleton
+        }
+
+        fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext<Self>) {
+            self.nav_history = Some(history);
+        }
+
+        fn navigate(&mut self, state: Box<dyn Any>, _: &mut ViewContext<Self>) -> bool {
+            let state = *state.downcast::<String>().unwrap_or_default();
+            if state != self.state {
+                self.state = state;
+                true
+            } else {
+                false
+            }
+        }
+
+        fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
+            self.push_to_nav_history(cx);
+        }
+
+        fn clone_on_split(
+            &self,
+            _workspace_id: WorkspaceId,
+            _: &mut ViewContext<Self>,
+        ) -> Option<Self>
+        where
+            Self: Sized,
+        {
+            Some(self.clone())
+        }
+
+        fn is_dirty(&self, _: &AppContext) -> bool {
+            self.is_dirty
+        }
+
+        fn has_conflict(&self, _: &AppContext) -> bool {
+            self.has_conflict
+        }
+
+        fn can_save(&self, _: &AppContext) -> bool {
+            !self.project_entry_ids.is_empty()
+        }
+
+        fn save(
+            &mut self,
+            _: ModelHandle<Project>,
+            _: &mut ViewContext<Self>,
+        ) -> Task<anyhow::Result<()>> {
+            self.save_count += 1;
+            self.is_dirty = false;
+            Task::ready(Ok(()))
+        }
+
+        fn save_as(
+            &mut self,
+            _: ModelHandle<Project>,
+            _: std::path::PathBuf,
+            _: &mut ViewContext<Self>,
+        ) -> Task<anyhow::Result<()>> {
+            self.save_as_count += 1;
+            self.is_dirty = false;
+            Task::ready(Ok(()))
+        }
+
+        fn reload(
+            &mut self,
+            _: ModelHandle<Project>,
+            _: &mut ViewContext<Self>,
+        ) -> Task<anyhow::Result<()>> {
+            self.reload_count += 1;
+            self.is_dirty = false;
+            Task::ready(Ok(()))
+        }
+
+        fn to_item_events(_: &Self::Event) -> Vec<ItemEvent> {
+            vec![ItemEvent::UpdateTab, ItemEvent::Edit]
+        }
+
+        fn serialized_item_kind() -> Option<&'static str> {
+            None
+        }
+
+        fn deserialize(
+            _project: ModelHandle<Project>,
+            _workspace: WeakViewHandle<Workspace>,
+            workspace_id: WorkspaceId,
+            _item_id: ItemId,
+            cx: &mut ViewContext<Pane>,
+        ) -> Task<anyhow::Result<ViewHandle<Self>>> {
+            let view = cx.add_view(|_cx| Self::new_deserialized(workspace_id));
+            Task::Ready(Some(anyhow::Ok(view)))
+        }
+    }
+
+    impl SidebarItem for TestItem {}
+}

crates/workspace/src/notifications.rs 🔗

@@ -0,0 +1,334 @@
+use std::{any::TypeId, ops::DerefMut};
+
+use collections::HashSet;
+use gpui::{AnyViewHandle, Entity, MutableAppContext, View, ViewContext, ViewHandle};
+
+use crate::Workspace;
+
+pub fn init(cx: &mut MutableAppContext) {
+    cx.set_global(NotificationTracker::new());
+    simple_message_notification::init(cx);
+}
+
+pub trait Notification: View {
+    fn should_dismiss_notification_on_event(&self, event: &<Self as Entity>::Event) -> bool;
+}
+
+pub trait NotificationHandle {
+    fn id(&self) -> usize;
+    fn to_any(&self) -> AnyViewHandle;
+}
+
+impl<T: Notification> NotificationHandle for ViewHandle<T> {
+    fn id(&self) -> usize {
+        self.id()
+    }
+
+    fn to_any(&self) -> AnyViewHandle {
+        self.into()
+    }
+}
+
+impl From<&dyn NotificationHandle> for AnyViewHandle {
+    fn from(val: &dyn NotificationHandle) -> Self {
+        val.to_any()
+    }
+}
+
+struct NotificationTracker {
+    notifications_sent: HashSet<TypeId>,
+}
+
+impl std::ops::Deref for NotificationTracker {
+    type Target = HashSet<TypeId>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.notifications_sent
+    }
+}
+
+impl DerefMut for NotificationTracker {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.notifications_sent
+    }
+}
+
+impl NotificationTracker {
+    fn new() -> Self {
+        Self {
+            notifications_sent: HashSet::default(),
+        }
+    }
+}
+
+impl Workspace {
+    pub fn show_notification_once<V: Notification>(
+        &mut self,
+        id: usize,
+        cx: &mut ViewContext<Self>,
+        build_notification: impl FnOnce(&mut ViewContext<Self>) -> ViewHandle<V>,
+    ) {
+        if !cx
+            .global::<NotificationTracker>()
+            .contains(&TypeId::of::<V>())
+        {
+            cx.update_global::<NotificationTracker, _, _>(|tracker, _| {
+                tracker.insert(TypeId::of::<V>())
+            });
+
+            self.show_notification::<V>(id, cx, build_notification)
+        }
+    }
+
+    pub fn show_notification<V: Notification>(
+        &mut self,
+        id: usize,
+        cx: &mut ViewContext<Self>,
+        build_notification: impl FnOnce(&mut ViewContext<Self>) -> ViewHandle<V>,
+    ) {
+        let type_id = TypeId::of::<V>();
+        if self
+            .notifications
+            .iter()
+            .all(|(existing_type_id, existing_id, _)| {
+                (*existing_type_id, *existing_id) != (type_id, id)
+            })
+        {
+            let notification = build_notification(cx);
+            cx.subscribe(&notification, move |this, handle, event, cx| {
+                if handle.read(cx).should_dismiss_notification_on_event(event) {
+                    this.dismiss_notification(type_id, id, cx);
+                }
+            })
+            .detach();
+            self.notifications
+                .push((type_id, id, Box::new(notification)));
+            cx.notify();
+        }
+    }
+
+    fn dismiss_notification(&mut self, type_id: TypeId, id: usize, cx: &mut ViewContext<Self>) {
+        self.notifications
+            .retain(|(existing_type_id, existing_id, _)| {
+                if (*existing_type_id, *existing_id) == (type_id, id) {
+                    cx.notify();
+                    false
+                } else {
+                    true
+                }
+            });
+    }
+}
+
+pub mod simple_message_notification {
+    use std::process::Command;
+
+    use gpui::{
+        actions,
+        elements::{Flex, MouseEventHandler, Padding, ParentElement, Svg, Text},
+        impl_actions, Action, CursorStyle, Element, Entity, MouseButton, MutableAppContext, View,
+        ViewContext,
+    };
+    use menu::Cancel;
+    use serde::Deserialize;
+    use settings::Settings;
+
+    use crate::Workspace;
+
+    use super::Notification;
+
+    actions!(message_notifications, [CancelMessageNotification]);
+
+    #[derive(Clone, Default, Deserialize, PartialEq)]
+    pub struct OsOpen(pub String);
+
+    impl_actions!(message_notifications, [OsOpen]);
+
+    pub fn init(cx: &mut MutableAppContext) {
+        cx.add_action(MessageNotification::dismiss);
+        cx.add_action(
+            |_workspace: &mut Workspace, open_action: &OsOpen, _cx: &mut ViewContext<Workspace>| {
+                #[cfg(target_os = "macos")]
+                {
+                    let mut command = Command::new("open");
+                    command.arg(open_action.0.clone());
+
+                    command.spawn().ok();
+                }
+            },
+        )
+    }
+
+    pub struct MessageNotification {
+        message: String,
+        click_action: Option<Box<dyn Action>>,
+        click_message: Option<String>,
+    }
+
+    pub enum MessageNotificationEvent {
+        Dismiss,
+    }
+
+    impl Entity for MessageNotification {
+        type Event = MessageNotificationEvent;
+    }
+
+    impl MessageNotification {
+        pub fn new_messsage<S: AsRef<str>>(message: S) -> MessageNotification {
+            Self {
+                message: message.as_ref().to_string(),
+                click_action: None,
+                click_message: None,
+            }
+        }
+
+        pub fn new<S1: AsRef<str>, A: Action, S2: AsRef<str>>(
+            message: S1,
+            click_action: A,
+            click_message: S2,
+        ) -> Self {
+            Self {
+                message: message.as_ref().to_string(),
+                click_action: Some(Box::new(click_action) as Box<dyn Action>),
+                click_message: Some(click_message.as_ref().to_string()),
+            }
+        }
+
+        pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
+            cx.emit(MessageNotificationEvent::Dismiss);
+        }
+    }
+
+    impl View for MessageNotification {
+        fn ui_name() -> &'static str {
+            "MessageNotification"
+        }
+
+        fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> gpui::ElementBox {
+            let theme = cx.global::<Settings>().theme.clone();
+            let theme = &theme.update_notification;
+
+            enum MessageNotificationTag {}
+
+            let click_action = self
+                .click_action
+                .as_ref()
+                .map(|action| action.boxed_clone());
+            let click_message = self.click_message.as_ref().map(|message| message.clone());
+            let message = self.message.clone();
+
+            MouseEventHandler::<MessageNotificationTag>::new(0, cx, |state, cx| {
+                Flex::column()
+                    .with_child(
+                        Flex::row()
+                            .with_child(
+                                Text::new(message, theme.message.text.clone())
+                                    .contained()
+                                    .with_style(theme.message.container)
+                                    .aligned()
+                                    .top()
+                                    .left()
+                                    .flex(1., true)
+                                    .boxed(),
+                            )
+                            .with_child(
+                                MouseEventHandler::<Cancel>::new(0, cx, |state, _| {
+                                    let style = theme.dismiss_button.style_for(state, false);
+                                    Svg::new("icons/x_mark_8.svg")
+                                        .with_color(style.color)
+                                        .constrained()
+                                        .with_width(style.icon_width)
+                                        .aligned()
+                                        .contained()
+                                        .with_style(style.container)
+                                        .constrained()
+                                        .with_width(style.button_width)
+                                        .with_height(style.button_width)
+                                        .boxed()
+                                })
+                                .with_padding(Padding::uniform(5.))
+                                .on_click(MouseButton::Left, move |_, cx| {
+                                    cx.dispatch_action(CancelMessageNotification)
+                                })
+                                .aligned()
+                                .constrained()
+                                .with_height(
+                                    cx.font_cache().line_height(theme.message.text.font_size),
+                                )
+                                .aligned()
+                                .top()
+                                .flex_float()
+                                .boxed(),
+                            )
+                            .boxed(),
+                    )
+                    .with_children({
+                        let style = theme.action_message.style_for(state, false);
+                        if let Some(click_message) = click_message {
+                            Some(
+                                Text::new(click_message, style.text.clone())
+                                    .contained()
+                                    .with_style(style.container)
+                                    .boxed(),
+                            )
+                        } else {
+                            None
+                        }
+                        .into_iter()
+                    })
+                    .contained()
+                    .boxed()
+            })
+            .with_cursor_style(CursorStyle::PointingHand)
+            .on_click(MouseButton::Left, move |_, cx| {
+                if let Some(click_action) = click_action.as_ref() {
+                    cx.dispatch_any_action(click_action.boxed_clone())
+                }
+            })
+            .boxed()
+        }
+    }
+
+    impl Notification for MessageNotification {
+        fn should_dismiss_notification_on_event(&self, event: &<Self as Entity>::Event) -> bool {
+            match event {
+                MessageNotificationEvent::Dismiss => true,
+            }
+        }
+    }
+}
+
+pub trait NotifyResultExt {
+    type Ok;
+
+    fn notify_err(
+        self,
+        workspace: &mut Workspace,
+        cx: &mut ViewContext<Workspace>,
+    ) -> Option<Self::Ok>;
+}
+
+impl<T, E> NotifyResultExt for Result<T, E>
+where
+    E: std::fmt::Debug,
+{
+    type Ok = T;
+
+    fn notify_err(self, workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) -> Option<T> {
+        match self {
+            Ok(value) => Some(value),
+            Err(err) => {
+                workspace.show_notification(0, cx, |cx| {
+                    cx.add_view(|_cx| {
+                        simple_message_notification::MessageNotification::new_messsage(format!(
+                            "Error: {:?}",
+                            err,
+                        ))
+                    })
+                });
+
+                None
+            }
+        }
+    }
+}

crates/workspace/src/pane.rs 🔗

@@ -3,8 +3,9 @@ mod dragged_item_receiver;
 use super::{ItemHandle, SplitDirection};
 use crate::{
     dock::{icon_for_dock_anchor, AnchorDockBottom, AnchorDockRight, ExpandDock, HideDock},
+    item::WeakItemHandle,
     toolbar::Toolbar,
-    Item, NewFile, NewSearch, NewTerminal, WeakItemHandle, Workspace,
+    Item, NewFile, NewSearch, NewTerminal, Workspace,
 };
 use anyhow::Result;
 use collections::{HashMap, HashSet, VecDeque};
@@ -1634,7 +1635,7 @@ mod tests {
     use std::sync::Arc;
 
     use super::*;
-    use crate::tests::TestItem;
+    use crate::item::test::TestItem;
     use gpui::{executor::Deterministic, TestAppContext};
     use project::FakeFs;
 
@@ -1645,8 +1646,9 @@ mod tests {
         let fs = FakeFs::new(cx.background());
 
         let project = Project::test(fs, None, cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
 
         // 1. Add with a destination index
@@ -1734,8 +1736,9 @@ mod tests {
         let fs = FakeFs::new(cx.background());
 
         let project = Project::test(fs, None, cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
 
         // 1. Add with a destination index
@@ -1811,8 +1814,9 @@ mod tests {
         let fs = FakeFs::new(cx.background());
 
         let project = Project::test(fs, None, cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
 
         // singleton view
@@ -1922,7 +1926,7 @@ mod tests {
 
         let project = Project::test(fs, None, cx).await;
         let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+            cx.add_window(|cx| Workspace::new(None, 0, project, |_, _| unimplemented!(), cx));
         let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
 
         add_labled_item(&workspace, &pane, "A", cx);

crates/workspace/src/pane_group.rs 🔗

@@ -13,10 +13,14 @@ use theme::Theme;
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct PaneGroup {
-    root: Member,
+    pub(crate) root: Member,
 }
 
 impl PaneGroup {
+    pub(crate) fn with_root(root: Member) -> Self {
+        Self { root }
+    }
+
     pub fn new(pane: ViewHandle<Pane>) -> Self {
         Self {
             root: Member::Pane(pane),
@@ -85,7 +89,7 @@ impl PaneGroup {
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
-enum Member {
+pub(crate) enum Member {
     Axis(PaneAxis),
     Pane(ViewHandle<Pane>),
 }
@@ -276,9 +280,9 @@ impl Member {
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
-struct PaneAxis {
-    axis: Axis,
-    members: Vec<Member>,
+pub(crate) struct PaneAxis {
+    pub axis: Axis,
+    pub members: Vec<Member>,
 }
 
 impl PaneAxis {

crates/workspace/src/persistence.rs 🔗

@@ -0,0 +1,836 @@
+#![allow(dead_code)]
+
+pub mod model;
+
+use std::path::Path;
+
+use anyhow::{anyhow, bail, Context, Result};
+use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
+use gpui::Axis;
+
+use util::{iife, unzip_option, ResultExt};
+
+use crate::dock::DockPosition;
+use crate::WorkspaceId;
+
+use model::{
+    GroupId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
+    WorkspaceLocation,
+};
+
+define_connection! {
+    pub static ref DB: WorkspaceDb<()> =
+        &[sql!(
+            CREATE TABLE workspaces(
+                workspace_id INTEGER PRIMARY KEY,
+                workspace_location BLOB UNIQUE,
+                dock_visible INTEGER, // Boolean
+                dock_anchor TEXT, // Enum: 'Bottom' / 'Right' / 'Expanded'
+                dock_pane INTEGER, // NULL indicates that we don't have a dock pane yet
+                left_sidebar_open INTEGER, //Boolean
+                timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+                FOREIGN KEY(dock_pane) REFERENCES panes(pane_id)
+            ) STRICT;
+            
+            CREATE TABLE pane_groups(
+                group_id INTEGER PRIMARY KEY,
+                workspace_id INTEGER NOT NULL,
+                parent_group_id INTEGER, // NULL indicates that this is a root node
+                position INTEGER, // NULL indicates that this is a root node
+                axis TEXT NOT NULL, // Enum: 'Vertical' / 'Horizontal'
+                FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                ON DELETE CASCADE
+                ON UPDATE CASCADE,
+                FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
+            ) STRICT;
+            
+            CREATE TABLE panes(
+                pane_id INTEGER PRIMARY KEY,
+                workspace_id INTEGER NOT NULL,
+                active INTEGER NOT NULL, // Boolean
+                FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                ON DELETE CASCADE
+                ON UPDATE CASCADE
+            ) STRICT;
+            
+            CREATE TABLE center_panes(
+                pane_id INTEGER PRIMARY KEY,
+                parent_group_id INTEGER, // NULL means that this is a root pane
+                position INTEGER, // NULL means that this is a root pane
+                FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
+                ON DELETE CASCADE,
+                FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
+            ) STRICT;
+            
+            CREATE TABLE items(
+                item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique
+                workspace_id INTEGER NOT NULL,
+                pane_id INTEGER NOT NULL,
+                kind TEXT NOT NULL,
+                position INTEGER NOT NULL,
+                active INTEGER NOT NULL,
+                FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                ON DELETE CASCADE
+                ON UPDATE CASCADE,
+                FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
+                ON DELETE CASCADE,
+                PRIMARY KEY(item_id, workspace_id)
+            ) STRICT;
+        )];
+}
+
+impl WorkspaceDb {
+    /// Returns a serialized workspace for the given worktree_roots. If the passed array
+    /// is empty, the most recent workspace is returned instead. If no workspace for the
+    /// passed roots is stored, returns none.
+    pub fn workspace_for_roots<P: AsRef<Path>>(
+        &self,
+        worktree_roots: &[P],
+    ) -> Option<SerializedWorkspace> {
+        let workspace_location: WorkspaceLocation = worktree_roots.into();
+
+        // Note that we re-assign the workspace_id here in case it's empty
+        // and we've grabbed the most recent workspace
+        let (workspace_id, workspace_location, left_sidebar_open, dock_position): (
+            WorkspaceId,
+            WorkspaceLocation,
+            bool,
+            DockPosition,
+        ) = iife!({
+            if worktree_roots.len() == 0 {
+                self.select_row(sql!(
+                    SELECT workspace_id, workspace_location, left_sidebar_open, dock_visible, dock_anchor
+                    FROM workspaces
+                    ORDER BY timestamp DESC LIMIT 1))?()?
+            } else {
+                self.select_row_bound(sql!(
+                    SELECT workspace_id, workspace_location, left_sidebar_open, dock_visible, dock_anchor
+                    FROM workspaces 
+                    WHERE workspace_location = ?))?(&workspace_location)?
+            }
+            .context("No workspaces found")
+        })
+        .warn_on_err()
+        .flatten()?;
+
+        Some(SerializedWorkspace {
+            id: workspace_id,
+            location: workspace_location.clone(),
+            dock_pane: self
+                .get_dock_pane(workspace_id)
+                .context("Getting dock pane")
+                .log_err()?,
+            center_group: self
+                .get_center_pane_group(workspace_id)
+                .context("Getting center group")
+                .log_err()?,
+            dock_position,
+            left_sidebar_open
+        })
+    }
+
+    /// Saves a workspace using the worktree roots. Will garbage collect any workspaces
+    /// that used this workspace previously
+    pub async fn save_workspace(&self, workspace: SerializedWorkspace) {
+        self.write(move |conn| {
+            conn.with_savepoint("update_worktrees", || {
+                // Clear out panes and pane_groups
+                conn.exec_bound(sql!(
+                    UPDATE workspaces SET dock_pane = NULL WHERE workspace_id = ?1;
+                    DELETE FROM pane_groups WHERE workspace_id = ?1;
+                    DELETE FROM panes WHERE workspace_id = ?1;))?(workspace.id)
+                .expect("Clearing old panes");
+
+                conn.exec_bound(sql!(
+                    DELETE FROM workspaces WHERE workspace_location = ? AND workspace_id != ?
+                ))?((&workspace.location, workspace.id.clone()))
+                .context("clearing out old locations")?;
+
+                // Upsert
+                conn.exec_bound(sql!(
+                        INSERT INTO workspaces(
+                            workspace_id,
+                            workspace_location,
+                            left_sidebar_open,
+                            dock_visible,
+                            dock_anchor,
+                            timestamp
+                        )
+                        VALUES (?1, ?2, ?3, ?4, ?5, CURRENT_TIMESTAMP)
+                        ON CONFLICT DO
+                            UPDATE SET
+                            workspace_location = ?2,
+                            left_sidebar_open = ?3,
+                            dock_visible = ?4,
+                            dock_anchor = ?5,
+                            timestamp = CURRENT_TIMESTAMP
+                ))?((workspace.id, &workspace.location, workspace.left_sidebar_open, workspace.dock_position))
+                .context("Updating workspace")?;
+
+                // Save center pane group and dock pane
+                Self::save_pane_group(conn, workspace.id, &workspace.center_group, None)
+                    .context("save pane group in save workspace")?;
+
+                let dock_id = Self::save_pane(conn, workspace.id, &workspace.dock_pane, None, true)
+                    .context("save pane in save workspace")?;
+
+                // Complete workspace initialization
+                conn.exec_bound(sql!(
+                    UPDATE workspaces
+                    SET dock_pane = ?
+                    WHERE workspace_id = ?
+                ))?((dock_id, workspace.id))
+                .context("Finishing initialization with dock pane")?;
+
+                Ok(())
+            })
+            .log_err();
+        })
+        .await;
+    }
+
+    query! {
+        pub async fn next_id() -> Result<WorkspaceId> {
+            INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id
+        }
+    }
+
+    query! {
+        pub fn recent_workspaces(limit: usize) -> Result<Vec<(WorkspaceId, WorkspaceLocation)>> {
+            SELECT workspace_id, workspace_location 
+            FROM workspaces
+            WHERE workspace_location IS NOT NULL
+            ORDER BY timestamp DESC 
+            LIMIT ?
+        }
+    }
+
+    fn get_center_pane_group(&self, workspace_id: WorkspaceId) -> Result<SerializedPaneGroup> {
+        self.get_pane_group(workspace_id, None)?
+            .into_iter()
+            .next()
+            .context("No center pane group")
+    }
+
+    fn get_pane_group(
+        &self,
+        workspace_id: WorkspaceId,
+        group_id: Option<GroupId>,
+    ) -> Result<Vec<SerializedPaneGroup>> {
+        type GroupKey = (Option<GroupId>, WorkspaceId);
+        type GroupOrPane = (Option<GroupId>, Option<Axis>, Option<PaneId>, Option<bool>);
+        self.select_bound::<GroupKey, GroupOrPane>(sql!(
+            SELECT group_id, axis, pane_id, active
+                FROM (SELECT 
+                        group_id,
+                        axis,
+                        NULL as pane_id,
+                        NULL as active,
+                        position,
+                        parent_group_id,
+                        workspace_id
+                      FROM pane_groups 
+                     UNION
+                      SELECT 
+                        NULL,
+                        NULL,  
+                        center_panes.pane_id,
+                        panes.active as active,
+                        position,
+                        parent_group_id,
+                        panes.workspace_id as workspace_id
+                      FROM center_panes
+                      JOIN panes ON center_panes.pane_id = panes.pane_id) 
+            WHERE parent_group_id IS ? AND workspace_id = ?
+            ORDER BY position
+        ))?((group_id, workspace_id))?
+        .into_iter()
+        .map(|(group_id, axis, pane_id, active)| {
+            if let Some((group_id, axis)) = group_id.zip(axis) {
+                Ok(SerializedPaneGroup::Group {
+                    axis,
+                    children: self.get_pane_group(workspace_id, Some(group_id))?,
+                })
+            } else if let Some((pane_id, active)) = pane_id.zip(active) {
+                Ok(SerializedPaneGroup::Pane(SerializedPane::new(
+                    self.get_items(pane_id)?,
+                    active,
+                )))
+            } else {
+                bail!("Pane Group Child was neither a pane group or a pane");
+            }
+        })
+        // Filter out panes and pane groups which don't have any children or items
+        .filter(|pane_group| match pane_group {
+            Ok(SerializedPaneGroup::Group { children, .. }) => !children.is_empty(),
+            Ok(SerializedPaneGroup::Pane(pane)) => !pane.children.is_empty(),
+            _ => true,
+        })
+        .collect::<Result<_>>()
+    }
+
+   
+    fn save_pane_group(
+        conn: &Connection,
+        workspace_id: WorkspaceId,
+        pane_group: &SerializedPaneGroup,
+        parent: Option<(GroupId, usize)>,
+    ) -> Result<()> {
+        match pane_group {
+            SerializedPaneGroup::Group { axis, children } => {
+                let (parent_id, position) = unzip_option(parent);
+
+                let group_id = conn.select_row_bound::<_, i64>(sql!(
+                        INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) 
+                        VALUES (?, ?, ?, ?) 
+                        RETURNING group_id
+                ))?((
+                    workspace_id,
+                    parent_id,
+                    position,
+                    *axis,
+                ))?
+                .ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
+
+                for (position, group) in children.iter().enumerate() {
+                    Self::save_pane_group(conn, workspace_id, group, Some((group_id, position)))?
+                }
+
+                Ok(())
+            }
+            SerializedPaneGroup::Pane(pane) => {
+                Self::save_pane(conn, workspace_id, &pane, parent, false)?;
+                Ok(())
+            }
+        }
+    }
+
+    fn get_dock_pane(&self, workspace_id: WorkspaceId) -> Result<SerializedPane> {
+        let (pane_id, active) = self.select_row_bound(sql!(
+            SELECT pane_id, active
+            FROM panes
+            WHERE pane_id = (SELECT dock_pane FROM workspaces WHERE workspace_id = ?)
+        ))?(
+            workspace_id,
+        )?
+        .context("No dock pane for workspace")?;
+
+        Ok(SerializedPane::new(
+            self.get_items(pane_id).context("Reading items")?,
+            active,
+        ))
+    }
+
+    fn save_pane(
+        conn: &Connection,
+        workspace_id: WorkspaceId,
+        pane: &SerializedPane,
+        parent: Option<(GroupId, usize)>, // None indicates BOTH dock pane AND center_pane
+        dock: bool,
+    ) -> Result<PaneId> {
+        let pane_id = conn.select_row_bound::<_, i64>(sql!(
+            INSERT INTO panes(workspace_id, active) 
+            VALUES (?, ?) 
+            RETURNING pane_id
+        ))?((workspace_id, pane.active))?
+        .ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?;
+
+        if !dock {
+            let (parent_id, order) = unzip_option(parent);
+            conn.exec_bound(sql!(
+                INSERT INTO center_panes(pane_id, parent_group_id, position)
+                VALUES (?, ?, ?)
+            ))?((pane_id, parent_id, order))?;
+        }
+
+        Self::save_items(conn, workspace_id, pane_id, &pane.children).context("Saving items")?;
+
+        Ok(pane_id)
+    }
+
+    fn get_items(&self, pane_id: PaneId) -> Result<Vec<SerializedItem>> {
+        Ok(self.select_bound(sql!(
+            SELECT kind, item_id, active FROM items
+            WHERE pane_id = ?
+            ORDER BY position
+        ))?(pane_id)?)
+    }
+
+    fn save_items(
+        conn: &Connection,
+        workspace_id: WorkspaceId,
+        pane_id: PaneId,
+        items: &[SerializedItem],
+    ) -> Result<()> {
+        let mut insert = conn.exec_bound(sql!(
+            INSERT INTO items(workspace_id, pane_id, position, kind, item_id, active) VALUES (?, ?, ?, ?, ?, ?)
+        )).context("Preparing insertion")?;
+        for (position, item) in items.iter().enumerate() {
+            insert((workspace_id, pane_id, position, item))?;
+        }
+
+        Ok(())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+
+    use std::sync::Arc;
+
+    use db::open_test_db;
+    use settings::DockAnchor;
+
+    use super::*;
+
+    #[gpui::test]
+    async fn test_next_id_stability() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
+
+        db.write(|conn| {
+            conn.migrate(
+                "test_table",
+                &[sql!(
+                    CREATE TABLE test_table(
+                        text TEXT,
+                        workspace_id INTEGER,
+                        FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                            ON DELETE CASCADE
+                    ) STRICT;
+                )],
+            )
+            .unwrap();
+        })
+        .await;
+
+        let id = db.next_id().await.unwrap();
+        // Assert the empty row got inserted
+        assert_eq!(
+            Some(id),
+            db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
+                SELECT workspace_id FROM workspaces WHERE workspace_id = ?
+            ))
+            .unwrap()(id)
+            .unwrap()
+        );
+
+        db.write(move |conn| {
+            conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+                .unwrap()(("test-text-1", id))
+            .unwrap()
+        })
+        .await;
+
+        let test_text_1 = db
+            .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+            .unwrap()(1)
+        .unwrap()
+        .unwrap();
+        assert_eq!(test_text_1, "test-text-1");
+    }
+
+    #[gpui::test]
+    async fn test_workspace_id_stability() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
+
+        db.write(|conn| {
+            conn.migrate(
+                "test_table",
+                &[sql!(
+                    CREATE TABLE test_table(
+                        text TEXT,
+                        workspace_id INTEGER,
+                        FOREIGN KEY(workspace_id) 
+                            REFERENCES workspaces(workspace_id)
+                            ON DELETE CASCADE
+                    ) STRICT;)],
+            )
+        })
+        .await
+        .unwrap();
+
+        let mut workspace_1 = SerializedWorkspace {
+            id: 1,
+            location: (["/tmp", "/tmp2"]).into(),
+            dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom),
+            center_group: Default::default(),
+            dock_pane: Default::default(),
+            left_sidebar_open: true
+        };
+
+        let mut workspace_2 = SerializedWorkspace {
+            id: 2,
+            location: (["/tmp"]).into(),
+            dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded),
+            center_group: Default::default(),
+            dock_pane: Default::default(),
+            left_sidebar_open: false
+        };
+
+        db.save_workspace(workspace_1.clone()).await;
+
+        db.write(|conn| {
+            conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+                .unwrap()(("test-text-1", 1))
+            .unwrap();
+        })
+        .await;
+
+        db.save_workspace(workspace_2.clone()).await;
+
+        db.write(|conn| {
+            conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+                .unwrap()(("test-text-2", 2))
+            .unwrap();
+        })
+        .await;
+
+        workspace_1.location = (["/tmp", "/tmp3"]).into();
+        db.save_workspace(workspace_1.clone()).await;
+        db.save_workspace(workspace_1).await;
+
+        workspace_2.dock_pane.children.push(SerializedItem {
+            kind: Arc::from("Test"),
+            item_id: 10,
+            active: true,
+        });
+        db.save_workspace(workspace_2).await;
+
+        let test_text_2 = db
+            .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+            .unwrap()(2)
+        .unwrap()
+        .unwrap();
+        assert_eq!(test_text_2, "test-text-2");
+
+        let test_text_1 = db
+            .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+            .unwrap()(1)
+        .unwrap()
+        .unwrap();
+        assert_eq!(test_text_1, "test-text-1");
+    }
+
+    #[gpui::test]
+    async fn test_full_workspace_serialization() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
+
+        let dock_pane = crate::persistence::model::SerializedPane {
+            children: vec![
+                SerializedItem::new("Terminal", 1, false),
+                SerializedItem::new("Terminal", 2, false),
+                SerializedItem::new("Terminal", 3, true),
+                SerializedItem::new("Terminal", 4, false),
+            ],
+            active: false,
+        };
+
+        //  -----------------
+        //  | 1,2   | 5,6   |
+        //  | - - - |       |
+        //  | 3,4   |       |
+        //  -----------------
+        let center_group = SerializedPaneGroup::Group {
+            axis: gpui::Axis::Horizontal,
+            children: vec![
+                SerializedPaneGroup::Group {
+                    axis: gpui::Axis::Vertical,
+                    children: vec![
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 5, false),
+                                SerializedItem::new("Terminal", 6, true),
+                            ],
+                            false,
+                        )),
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 7, true),
+                                SerializedItem::new("Terminal", 8, false),
+                            ],
+                            false,
+                        )),
+                    ],
+                },
+                SerializedPaneGroup::Pane(SerializedPane::new(
+                    vec![
+                        SerializedItem::new("Terminal", 9, false),
+                        SerializedItem::new("Terminal", 10, true),
+                    ],
+                    false,
+                )),
+            ],
+        };
+
+        let workspace = SerializedWorkspace {
+            id: 5,
+            location: (["/tmp", "/tmp2"]).into(),
+            dock_position: DockPosition::Shown(DockAnchor::Bottom),
+            center_group,
+            dock_pane,
+            left_sidebar_open: true
+        };
+
+        db.save_workspace(workspace.clone()).await;
+        let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
+
+        assert_eq!(workspace, round_trip_workspace.unwrap());
+
+        // Test guaranteed duplicate IDs
+        db.save_workspace(workspace.clone()).await;
+        db.save_workspace(workspace.clone()).await;
+
+        let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
+        assert_eq!(workspace, round_trip_workspace.unwrap());
+    }
+
+    #[gpui::test]
+    async fn test_workspace_assignment() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
+
+        let workspace_1 = SerializedWorkspace {
+            id: 1,
+            location: (["/tmp", "/tmp2"]).into(),
+            dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom),
+            center_group: Default::default(),
+            dock_pane: Default::default(),
+            left_sidebar_open: true,
+        };
+
+        let mut workspace_2 = SerializedWorkspace {
+            id: 2,
+            location: (["/tmp"]).into(),
+            dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded),
+            center_group: Default::default(),
+            dock_pane: Default::default(),
+            left_sidebar_open: false,
+        };
+
+        db.save_workspace(workspace_1.clone()).await;
+        db.save_workspace(workspace_2.clone()).await;
+
+        // Test that paths are treated as a set
+        assert_eq!(
+            db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+            workspace_1
+        );
+        assert_eq!(
+            db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
+            workspace_1
+        );
+
+        // Make sure that other keys work
+        assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
+        assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
+
+        // Test 'mutate' case of updating a pre-existing id
+        workspace_2.location = (["/tmp", "/tmp2"]).into();
+
+        db.save_workspace(workspace_2.clone()).await;
+        assert_eq!(
+            db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+            workspace_2
+        );
+
+        // Test other mechanism for mutating
+        let mut workspace_3 = SerializedWorkspace {
+            id: 3,
+            location: (&["/tmp", "/tmp2"]).into(),
+            dock_position: DockPosition::Shown(DockAnchor::Right),
+            center_group: Default::default(),
+            dock_pane: Default::default(),
+            left_sidebar_open: false
+        };
+
+        db.save_workspace(workspace_3.clone()).await;
+        assert_eq!(
+            db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+            workspace_3
+        );
+
+        // Make sure that updating paths differently also works
+        workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
+        db.save_workspace(workspace_3.clone()).await;
+        assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
+        assert_eq!(
+            db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
+                .unwrap(),
+            workspace_3
+        );
+    }
+
+    use crate::dock::DockPosition;
+    use crate::persistence::model::SerializedWorkspace;
+    use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
+
+    fn default_workspace<P: AsRef<Path>>(
+        workspace_id: &[P],
+        dock_pane: SerializedPane,
+        center_group: &SerializedPaneGroup,
+    ) -> SerializedWorkspace {
+        SerializedWorkspace {
+            id: 4,
+            location: workspace_id.into(),
+            dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Right),
+            center_group: center_group.clone(),
+            dock_pane,
+            left_sidebar_open: true
+        }
+    }
+
+    #[gpui::test]
+    async fn test_basic_dock_pane() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("basic_dock_pane").await);
+
+        let dock_pane = crate::persistence::model::SerializedPane::new(
+            vec![
+                SerializedItem::new("Terminal", 1, false),
+                SerializedItem::new("Terminal", 4, false),
+                SerializedItem::new("Terminal", 2, false),
+                SerializedItem::new("Terminal", 3, true),
+            ],
+            false,
+        );
+
+        let workspace = default_workspace(&["/tmp"], dock_pane, &Default::default());
+
+        db.save_workspace(workspace.clone()).await;
+
+        let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
+
+        assert_eq!(workspace.dock_pane, new_workspace.dock_pane);
+    }
+
+    #[gpui::test]
+    async fn test_simple_split() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("simple_split").await);
+
+        //  -----------------
+        //  | 1,2   | 5,6   |
+        //  | - - - |       |
+        //  | 3,4   |       |
+        //  -----------------
+        let center_pane = SerializedPaneGroup::Group {
+            axis: gpui::Axis::Horizontal,
+            children: vec![
+                SerializedPaneGroup::Group {
+                    axis: gpui::Axis::Vertical,
+                    children: vec![
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 1, false),
+                                SerializedItem::new("Terminal", 2, true),
+                            ],
+                            false,
+                        )),
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 4, false),
+                                SerializedItem::new("Terminal", 3, true),
+                            ],
+                            true,
+                        )),
+                    ],
+                },
+                SerializedPaneGroup::Pane(SerializedPane::new(
+                    vec![
+                        SerializedItem::new("Terminal", 5, true),
+                        SerializedItem::new("Terminal", 6, false),
+                    ],
+                    false,
+                )),
+            ],
+        };
+
+        let workspace = default_workspace(&["/tmp"], Default::default(), &center_pane);
+
+        db.save_workspace(workspace.clone()).await;
+
+        let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
+
+        assert_eq!(workspace.center_group, new_workspace.center_group);
+    }
+
+    #[gpui::test]
+    async fn test_cleanup_panes() {
+        env_logger::try_init().ok();
+
+        let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
+
+        let center_pane = SerializedPaneGroup::Group {
+            axis: gpui::Axis::Horizontal,
+            children: vec![
+                SerializedPaneGroup::Group {
+                    axis: gpui::Axis::Vertical,
+                    children: vec![
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 1, false),
+                                SerializedItem::new("Terminal", 2, true),
+                            ],
+                            false,
+                        )),
+                        SerializedPaneGroup::Pane(SerializedPane::new(
+                            vec![
+                                SerializedItem::new("Terminal", 4, false),
+                                SerializedItem::new("Terminal", 3, true),
+                            ],
+                            true,
+                        )),
+                    ],
+                },
+                SerializedPaneGroup::Pane(SerializedPane::new(
+                    vec![
+                        SerializedItem::new("Terminal", 5, false),
+                        SerializedItem::new("Terminal", 6, true),
+                    ],
+                    false,
+                )),
+            ],
+        };
+
+        let id = &["/tmp"];
+
+        let mut workspace = default_workspace(id, Default::default(), &center_pane);
+
+        db.save_workspace(workspace.clone()).await;
+
+        workspace.center_group = SerializedPaneGroup::Group {
+            axis: gpui::Axis::Vertical,
+            children: vec![
+                SerializedPaneGroup::Pane(SerializedPane::new(
+                    vec![
+                        SerializedItem::new("Terminal", 1, false),
+                        SerializedItem::new("Terminal", 2, true),
+                    ],
+                    false,
+                )),
+                SerializedPaneGroup::Pane(SerializedPane::new(
+                    vec![
+                        SerializedItem::new("Terminal", 4, true),
+                        SerializedItem::new("Terminal", 3, false),
+                    ],
+                    true,
+                )),
+            ],
+        };
+
+        db.save_workspace(workspace.clone()).await;
+
+        let new_workspace = db.workspace_for_roots(id).unwrap();
+
+        assert_eq!(workspace.center_group, new_workspace.center_group);
+    }
+}

crates/workspace/src/persistence/model.rs 🔗

@@ -0,0 +1,315 @@
+use std::{
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+
+use anyhow::{Context, Result};
+
+use async_recursion::async_recursion;
+use gpui::{AsyncAppContext, Axis, ModelHandle, Task, ViewHandle};
+
+use db::sqlez::{
+    bindable::{Bind, Column},
+    statement::Statement,
+};
+use project::Project;
+use settings::DockAnchor;
+use util::ResultExt;
+
+use crate::{
+    dock::DockPosition, ItemDeserializers, Member, Pane, PaneAxis, Workspace, WorkspaceId,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct WorkspaceLocation(Arc<Vec<PathBuf>>);
+
+impl WorkspaceLocation {
+    pub fn paths(&self) -> Arc<Vec<PathBuf>> {
+        self.0.clone()
+    }
+}
+
+impl<P: AsRef<Path>, T: IntoIterator<Item = P>> From<T> for WorkspaceLocation {
+    fn from(iterator: T) -> Self {
+        let mut roots = iterator
+            .into_iter()
+            .map(|p| p.as_ref().to_path_buf())
+            .collect::<Vec<_>>();
+        roots.sort();
+        Self(Arc::new(roots))
+    }
+}
+
+impl Bind for &WorkspaceLocation {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        bincode::serialize(&self.0)
+            .expect("Bincode serialization of paths should not fail")
+            .bind(statement, start_index)
+    }
+}
+
+impl Column for WorkspaceLocation {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let blob = statement.column_blob(start_index)?;
+        Ok((
+            WorkspaceLocation(bincode::deserialize(blob).context("Bincode failed")?),
+            start_index + 1,
+        ))
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct SerializedWorkspace {
+    pub id: WorkspaceId,
+    pub location: WorkspaceLocation,
+    pub dock_position: DockPosition,
+    pub center_group: SerializedPaneGroup,
+    pub dock_pane: SerializedPane,
+    pub left_sidebar_open: bool,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum SerializedPaneGroup {
+    Group {
+        axis: Axis,
+        children: Vec<SerializedPaneGroup>,
+    },
+    Pane(SerializedPane),
+}
+
+#[cfg(test)]
+impl Default for SerializedPaneGroup {
+    fn default() -> Self {
+        Self::Pane(SerializedPane {
+            children: vec![SerializedItem::default()],
+            active: false,
+        })
+    }
+}
+
+impl SerializedPaneGroup {
+    #[async_recursion(?Send)]
+    pub(crate) async fn deserialize(
+        &self,
+        project: &ModelHandle<Project>,
+        workspace_id: WorkspaceId,
+        workspace: &ViewHandle<Workspace>,
+        cx: &mut AsyncAppContext,
+    ) -> Option<(Member, Option<ViewHandle<Pane>>)> {
+        match self {
+            SerializedPaneGroup::Group { axis, children } => {
+                let mut current_active_pane = None;
+                let mut members = Vec::new();
+                for child in children {
+                    if let Some((new_member, active_pane)) = child
+                        .deserialize(project, workspace_id, workspace, cx)
+                        .await
+                    {
+                        members.push(new_member);
+
+                        current_active_pane = current_active_pane.or(active_pane);
+                    }
+                }
+
+                if members.is_empty() {
+                    return None;
+                }
+
+                Some((
+                    Member::Axis(PaneAxis {
+                        axis: *axis,
+                        members,
+                    }),
+                    current_active_pane,
+                ))
+            }
+            SerializedPaneGroup::Pane(serialized_pane) => {
+                let pane = workspace.update(cx, |workspace, cx| workspace.add_pane(cx));
+                let active = serialized_pane.active;
+                serialized_pane
+                    .deserialize_to(project, &pane, workspace_id, workspace, cx)
+                    .await;
+
+                if pane.read_with(cx, |pane, _| pane.items().next().is_some()) {
+                    Some((Member::Pane(pane.clone()), active.then(|| pane)))
+                } else {
+                    None
+                }
+            }
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Default, Clone)]
+pub struct SerializedPane {
+    pub(crate) active: bool,
+    pub(crate) children: Vec<SerializedItem>,
+}
+
+impl SerializedPane {
+    pub fn new(children: Vec<SerializedItem>, active: bool) -> Self {
+        SerializedPane { children, active }
+    }
+
+    pub async fn deserialize_to(
+        &self,
+        project: &ModelHandle<Project>,
+        pane_handle: &ViewHandle<Pane>,
+        workspace_id: WorkspaceId,
+        workspace: &ViewHandle<Workspace>,
+        cx: &mut AsyncAppContext,
+    ) {
+        let mut active_item_index = None;
+        for (index, item) in self.children.iter().enumerate() {
+            let project = project.clone();
+            let item_handle = pane_handle
+                .update(cx, |_, cx| {
+                    if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind) {
+                        deserializer(
+                            project,
+                            workspace.downgrade(),
+                            workspace_id,
+                            item.item_id,
+                            cx,
+                        )
+                    } else {
+                        Task::ready(Err(anyhow::anyhow!(
+                            "Deserializer does not exist for item kind: {}",
+                            item.kind
+                        )))
+                    }
+                })
+                .await
+                .log_err();
+
+            if let Some(item_handle) = item_handle {
+                workspace.update(cx, |workspace, cx| {
+                    Pane::add_item(workspace, &pane_handle, item_handle, false, false, None, cx);
+                })
+            }
+
+            if item.active {
+                active_item_index = Some(index);
+            }
+        }
+
+        if let Some(active_item_index) = active_item_index {
+            pane_handle.update(cx, |pane, cx| {
+                pane.activate_item(active_item_index, false, false, cx);
+            })
+        }
+    }
+}
+
+pub type GroupId = i64;
+pub type PaneId = i64;
+pub type ItemId = usize;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct SerializedItem {
+    pub kind: Arc<str>,
+    pub item_id: ItemId,
+    pub active: bool,
+}
+
+impl SerializedItem {
+    pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
+        Self {
+            kind: Arc::from(kind.as_ref()),
+            item_id,
+            active,
+        }
+    }
+}
+
+#[cfg(test)]
+impl Default for SerializedItem {
+    fn default() -> Self {
+        SerializedItem {
+            kind: Arc::from("Terminal"),
+            item_id: 100000,
+            active: false,
+        }
+    }
+}
+
+impl Bind for &SerializedItem {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = statement.bind(self.kind.clone(), start_index)?;
+        let next_index = statement.bind(self.item_id, next_index)?;
+        statement.bind(self.active, next_index)
+    }
+}
+
+impl Column for SerializedItem {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (kind, next_index) = Arc::<str>::column(statement, start_index)?;
+        let (item_id, next_index) = ItemId::column(statement, next_index)?;
+        let (active, next_index) = bool::column(statement, next_index)?;
+        Ok((
+            SerializedItem {
+                kind,
+                item_id,
+                active,
+            },
+            next_index,
+        ))
+    }
+}
+
+impl Bind for DockPosition {
+    fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
+        let next_index = statement.bind(self.is_visible(), start_index)?;
+        statement.bind(self.anchor(), next_index)
+    }
+}
+
+impl Column for DockPosition {
+    fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
+        let (visible, next_index) = bool::column(statement, start_index)?;
+        let (dock_anchor, next_index) = DockAnchor::column(statement, next_index)?;
+        let position = if visible {
+            DockPosition::Shown(dock_anchor)
+        } else {
+            DockPosition::Hidden(dock_anchor)
+        };
+        Ok((position, next_index))
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use db::sqlez::connection::Connection;
+    use settings::DockAnchor;
+
+    use super::WorkspaceLocation;
+
+    #[test]
+    fn test_workspace_round_trips() {
+        let db = Connection::open_memory(Some("workspace_id_round_trips"));
+
+        db.exec(indoc::indoc! {"
+                CREATE TABLE workspace_id_test(
+                    workspace_id INTEGER,
+                    dock_anchor TEXT
+                );"})
+            .unwrap()()
+        .unwrap();
+
+        let workspace_id: WorkspaceLocation = WorkspaceLocation::from(&["\test2", "\test1"]);
+
+        db.exec_bound("INSERT INTO workspace_id_test(workspace_id, dock_anchor) VALUES (?,?)")
+            .unwrap()((&workspace_id, DockAnchor::Bottom))
+        .unwrap();
+
+        assert_eq!(
+            db.select_row("SELECT workspace_id, dock_anchor FROM workspace_id_test LIMIT 1")
+                .unwrap()()
+            .unwrap(),
+            Some((
+                WorkspaceLocation::from(&["\test1", "\test2"]),
+                DockAnchor::Bottom
+            ))
+        );
+    }
+}

crates/workspace/src/searchable.rs 🔗

@@ -6,7 +6,7 @@ use gpui::{
 };
 use project::search::SearchQuery;
 
-use crate::{Item, ItemHandle, WeakItemHandle};
+use crate::{item::WeakItemHandle, Item, ItemHandle};
 
 #[derive(Debug)]
 pub enum SearchEvent {

crates/workspace/src/shared_screen.rs 🔗

@@ -1,4 +1,6 @@
-use crate::{Item, ItemNavHistory};
+use crate::{
+    item::ItemEvent, persistence::model::ItemId, Item, ItemNavHistory, Pane, Workspace, WorkspaceId,
+};
 use anyhow::{anyhow, Result};
 use call::participant::{Frame, RemoteVideoTrack};
 use client::{PeerId, User};
@@ -6,8 +8,10 @@ use futures::StreamExt;
 use gpui::{
     elements::*,
     geometry::{rect::RectF, vector::vec2f},
-    Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext,
+    Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext, ViewHandle,
+    WeakViewHandle,
 };
+use project::Project;
 use settings::Settings;
 use smallvec::SmallVec;
 use std::{
@@ -142,7 +146,11 @@ impl Item for SharedScreen {
         self.nav_history = Some(history);
     }
 
-    fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self> {
+    fn clone_on_split(
+        &self,
+        _workspace_id: WorkspaceId,
+        cx: &mut ViewContext<Self>,
+    ) -> Option<Self> {
         let track = self.track.upgrade()?;
         Some(Self::new(&track, self.peer_id, self.user.clone(), cx))
     }
@@ -176,9 +184,23 @@ impl Item for SharedScreen {
         Task::ready(Err(anyhow!("Item::reload called on SharedScreen")))
     }
 
-    fn to_item_events(event: &Self::Event) -> Vec<crate::ItemEvent> {
+    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
         match event {
-            Event::Close => vec![crate::ItemEvent::CloseItem],
+            Event::Close => vec![ItemEvent::CloseItem],
         }
     }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        None
+    }
+
+    fn deserialize(
+        _project: ModelHandle<Project>,
+        _workspace: WeakViewHandle<Workspace>,
+        _workspace_id: WorkspaceId,
+        _item_id: ItemId,
+        _cx: &mut ViewContext<Pane>,
+    ) -> Task<Result<ViewHandle<Self>>> {
+        unreachable!("Shared screen can not be deserialized")
+    }
 }

crates/workspace/src/workspace.rs 🔗

@@ -3,8 +3,11 @@
 /// This may cause issues when you're trying to write tests that use workspace focus to add items at
 /// specific locations.
 pub mod dock;
+pub mod item;
+pub mod notifications;
 pub mod pane;
 pub mod pane_group;
+mod persistence;
 pub mod searchable;
 pub mod shared_screen;
 pub mod sidebar;
@@ -15,7 +18,7 @@ use anyhow::{anyhow, Result};
 use call::ActiveCall;
 use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
 use collections::{hash_map, HashMap, HashSet};
-use dock::{DefaultItemFactory, Dock, ToggleDockButton};
+use dock::{Dock, DockDefaultItemFactory, ToggleDockButton};
 use drag_and_drop::DragAndDrop;
 use fs::{self, Fs};
 use futures::{
@@ -32,57 +35,42 @@ use gpui::{
     MouseButton, MutableAppContext, PathPromptOptions, PromptLevel, RenderContext, Task, View,
     ViewContext, ViewHandle, WeakViewHandle,
 };
+use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ProjectItem};
 use language::LanguageRegistry;
+use std::{
+    any::TypeId,
+    borrow::Cow,
+    future::Future,
+    path::{Path, PathBuf},
+    sync::Arc,
+    time::Duration,
+};
+
+use crate::{
+    notifications::simple_message_notification::{MessageNotification, OsOpen},
+    persistence::model::{SerializedPane, SerializedPaneGroup, SerializedWorkspace},
+};
 use log::{error, warn};
+use notifications::NotificationHandle;
 pub use pane::*;
 pub use pane_group::*;
+use persistence::{model::SerializedItem, DB};
+pub use persistence::{
+    model::{ItemId, WorkspaceLocation},
+    WorkspaceDb,
+};
 use postage::prelude::Stream;
-use project::{Project, ProjectEntryId, ProjectPath, ProjectStore, Worktree, WorktreeId};
-use searchable::SearchableItemHandle;
+use project::{Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
 use serde::Deserialize;
 use settings::{Autosave, DockAnchor, Settings};
 use shared_screen::SharedScreen;
 use sidebar::{Sidebar, SidebarButtons, SidebarSide, ToggleSidebarItem};
-use smallvec::SmallVec;
 use status_bar::StatusBar;
 pub use status_bar::StatusItemView;
-use std::{
-    any::{Any, TypeId},
-    borrow::Cow,
-    cell::RefCell,
-    fmt,
-    future::Future,
-    path::{Path, PathBuf},
-    rc::Rc,
-    sync::{
-        atomic::{AtomicBool, Ordering::SeqCst},
-        Arc,
-    },
-    time::Duration,
-};
 use theme::{Theme, ThemeRegistry};
 pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
 use util::ResultExt;
 
-type ProjectItemBuilders = HashMap<
-    TypeId,
-    fn(ModelHandle<Project>, AnyModelHandle, &mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
->;
-
-type FollowableItemBuilder = fn(
-    ViewHandle<Pane>,
-    ModelHandle<Project>,
-    &mut Option<proto::view::Variant>,
-    &mut MutableAppContext,
-) -> Option<Task<Result<Box<dyn FollowableItemHandle>>>>;
-type FollowableItemBuilders = HashMap<
-    TypeId,
-    (
-        FollowableItemBuilder,
-        fn(AnyViewHandle) -> Box<dyn FollowableItemHandle>,
-    ),
->;
-
 #[derive(Clone, PartialEq)]
 pub struct RemoveWorktreeFromProject(pub WorktreeId);
 
@@ -151,6 +139,8 @@ pub struct OpenProjectEntryInPane {
     project_entry: ProjectEntryId,
 }
 
+pub type WorkspaceId = i64;
+
 impl_internal_actions!(
     workspace,
     [
@@ -169,6 +159,7 @@ impl_actions!(workspace, [ActivatePane]);
 pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
     pane::init(cx);
     dock::init(cx);
+    notifications::init(cx);
 
     cx.add_global_action(open);
     cx.add_global_action({
@@ -183,7 +174,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
         let app_state = Arc::downgrade(&app_state);
         move |_: &NewFile, cx: &mut MutableAppContext| {
             if let Some(app_state) = app_state.upgrade() {
-                open_new(&app_state, cx)
+                open_new(&app_state, cx).detach();
             }
         }
     });
@@ -191,7 +182,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
         let app_state = Arc::downgrade(&app_state);
         move |_: &NewWindow, cx: &mut MutableAppContext| {
             if let Some(app_state) = app_state.upgrade() {
-                open_new(&app_state, cx)
+                open_new(&app_state, cx).detach();
             }
         }
     });
@@ -310,6 +301,10 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
     client.add_view_message_handler(Workspace::handle_update_followers);
 }
 
+type ProjectItemBuilders = HashMap<
+    TypeId,
+    fn(ModelHandle<Project>, AnyModelHandle, &mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
+>;
 pub fn register_project_item<I: ProjectItem>(cx: &mut MutableAppContext) {
     cx.update_default_global(|builders: &mut ProjectItemBuilders, _| {
         builders.insert(TypeId::of::<I::Item>(), |project, model, cx| {
@@ -319,6 +314,19 @@ pub fn register_project_item<I: ProjectItem>(cx: &mut MutableAppContext) {
     });
 }
 
+type FollowableItemBuilder = fn(
+    ViewHandle<Pane>,
+    ModelHandle<Project>,
+    &mut Option<proto::view::Variant>,
+    &mut MutableAppContext,
+) -> Option<Task<Result<Box<dyn FollowableItemHandle>>>>;
+type FollowableItemBuilders = HashMap<
+    TypeId,
+    (
+        FollowableItemBuilder,
+        fn(AnyViewHandle) -> Box<dyn FollowableItemHandle>,
+    ),
+>;
 pub fn register_followable_item<I: FollowableItem>(cx: &mut MutableAppContext) {
     cx.update_default_global(|builders: &mut FollowableItemBuilders, _| {
         builders.insert(
@@ -336,204 +344,68 @@ pub fn register_followable_item<I: FollowableItem>(cx: &mut MutableAppContext) {
     });
 }
 
+type ItemDeserializers = HashMap<
+    Arc<str>,
+    fn(
+        ModelHandle<Project>,
+        WeakViewHandle<Workspace>,
+        WorkspaceId,
+        ItemId,
+        &mut ViewContext<Pane>,
+    ) -> Task<Result<Box<dyn ItemHandle>>>,
+>;
+pub fn register_deserializable_item<I: Item>(cx: &mut MutableAppContext) {
+    cx.update_default_global(|deserializers: &mut ItemDeserializers, _cx| {
+        if let Some(serialized_item_kind) = I::serialized_item_kind() {
+            deserializers.insert(
+                Arc::from(serialized_item_kind),
+                |project, workspace, workspace_id, item_id, cx| {
+                    let task = I::deserialize(project, workspace, workspace_id, item_id, cx);
+                    cx.foreground()
+                        .spawn(async { Ok(Box::new(task.await?) as Box<_>) })
+                },
+            );
+        }
+    });
+}
+
 pub struct AppState {
     pub languages: Arc<LanguageRegistry>,
     pub themes: Arc<ThemeRegistry>,
     pub client: Arc<client::Client>,
     pub user_store: ModelHandle<client::UserStore>,
-    pub project_store: ModelHandle<ProjectStore>,
     pub fs: Arc<dyn fs::Fs>,
     pub build_window_options: fn() -> WindowOptions<'static>,
     pub initialize_workspace: fn(&mut Workspace, &Arc<AppState>, &mut ViewContext<Workspace>),
-    pub default_item_factory: DefaultItemFactory,
+    pub dock_default_item_factory: DockDefaultItemFactory,
 }
 
-#[derive(Eq, PartialEq, Hash)]
-pub enum ItemEvent {
-    CloseItem,
-    UpdateTab,
-    UpdateBreadcrumbs,
-    Edit,
-}
-
-pub trait Item: View {
-    fn deactivated(&mut self, _: &mut ViewContext<Self>) {}
-    fn workspace_deactivated(&mut self, _: &mut ViewContext<Self>) {}
-    fn navigate(&mut self, _: Box<dyn Any>, _: &mut ViewContext<Self>) -> bool {
-        false
-    }
-    fn tab_description<'a>(&'a self, _: usize, _: &'a AppContext) -> Option<Cow<'a, str>> {
-        None
-    }
-    fn tab_content(&self, detail: Option<usize>, style: &theme::Tab, cx: &AppContext)
-        -> ElementBox;
-    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
-    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
-    fn is_singleton(&self, cx: &AppContext) -> bool;
-    fn set_nav_history(&mut self, _: ItemNavHistory, _: &mut ViewContext<Self>);
-    fn clone_on_split(&self, _: &mut ViewContext<Self>) -> Option<Self>
-    where
-        Self: Sized,
-    {
-        None
-    }
-    fn is_dirty(&self, _: &AppContext) -> bool {
-        false
-    }
-    fn has_conflict(&self, _: &AppContext) -> bool {
-        false
-    }
-    fn can_save(&self, cx: &AppContext) -> bool;
-    fn save(
-        &mut self,
-        project: ModelHandle<Project>,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Result<()>>;
-    fn save_as(
-        &mut self,
-        project: ModelHandle<Project>,
-        abs_path: PathBuf,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Result<()>>;
-    fn reload(
-        &mut self,
-        project: ModelHandle<Project>,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Result<()>>;
-    fn git_diff_recalc(
-        &mut self,
-        _project: ModelHandle<Project>,
-        _cx: &mut ViewContext<Self>,
-    ) -> Task<Result<()>> {
-        Task::ready(Ok(()))
-    }
-    fn to_item_events(event: &Self::Event) -> Vec<ItemEvent>;
-    fn should_close_item_on_event(_: &Self::Event) -> bool {
-        false
-    }
-    fn should_update_tab_on_event(_: &Self::Event) -> bool {
-        false
-    }
-    fn is_edit_event(_: &Self::Event) -> bool {
-        false
-    }
-    fn act_as_type(
-        &self,
-        type_id: TypeId,
-        self_handle: &ViewHandle<Self>,
-        _: &AppContext,
-    ) -> Option<AnyViewHandle> {
-        if TypeId::of::<Self>() == type_id {
-            Some(self_handle.into())
-        } else {
-            None
-        }
-    }
-    fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
-        None
-    }
-
-    fn breadcrumb_location(&self) -> ToolbarItemLocation {
-        ToolbarItemLocation::Hidden
-    }
-    fn breadcrumbs(&self, _theme: &Theme, _cx: &AppContext) -> Option<Vec<ElementBox>> {
-        None
-    }
-}
-
-pub trait ProjectItem: Item {
-    type Item: project::Item;
-
-    fn for_project_item(
-        project: ModelHandle<Project>,
-        item: ModelHandle<Self::Item>,
-        cx: &mut ViewContext<Self>,
-    ) -> Self;
-}
-
-pub trait FollowableItem: Item {
-    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
-    fn from_state_proto(
-        pane: ViewHandle<Pane>,
-        project: ModelHandle<Project>,
-        state: &mut Option<proto::view::Variant>,
-        cx: &mut MutableAppContext,
-    ) -> Option<Task<Result<ViewHandle<Self>>>>;
-    fn add_event_to_update_proto(
-        &self,
-        event: &Self::Event,
-        update: &mut Option<proto::update_view::Variant>,
-        cx: &AppContext,
-    ) -> bool;
-    fn apply_update_proto(
-        &mut self,
-        project: &ModelHandle<Project>,
-        message: proto::update_view::Variant,
-        cx: &mut ViewContext<Self>,
-    ) -> Task<Result<()>>;
-
-    fn set_leader_replica_id(&mut self, leader_replica_id: Option<u16>, cx: &mut ViewContext<Self>);
-    fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool;
-}
+impl AppState {
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn test(cx: &mut MutableAppContext) -> Arc<Self> {
+        use fs::HomeDir;
 
-pub trait FollowableItemHandle: ItemHandle {
-    fn set_leader_replica_id(&self, leader_replica_id: Option<u16>, cx: &mut MutableAppContext);
-    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
-    fn add_event_to_update_proto(
-        &self,
-        event: &dyn Any,
-        update: &mut Option<proto::update_view::Variant>,
-        cx: &AppContext,
-    ) -> bool;
-    fn apply_update_proto(
-        &self,
-        project: &ModelHandle<Project>,
-        message: proto::update_view::Variant,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>>;
-    fn should_unfollow_on_event(&self, event: &dyn Any, cx: &AppContext) -> bool;
-}
+        cx.set_global(HomeDir(Path::new("/tmp/").to_path_buf()));
+        let settings = Settings::test(cx);
+        cx.set_global(settings);
 
-impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
-    fn set_leader_replica_id(&self, leader_replica_id: Option<u16>, cx: &mut MutableAppContext) {
-        self.update(cx, |this, cx| {
-            this.set_leader_replica_id(leader_replica_id, cx)
+        let fs = fs::FakeFs::new(cx.background().clone());
+        let languages = Arc::new(LanguageRegistry::test());
+        let http_client = client::test::FakeHttpClient::with_404_response();
+        let client = Client::new(http_client.clone(), cx);
+        let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+        let themes = ThemeRegistry::new((), cx.font_cache().clone());
+        Arc::new(Self {
+            client,
+            themes,
+            fs,
+            languages,
+            user_store,
+            initialize_workspace: |_, _, _| {},
+            build_window_options: Default::default,
+            dock_default_item_factory: |_, _| unimplemented!(),
         })
     }
-
-    fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
-        self.read(cx).to_state_proto(cx)
-    }
-
-    fn add_event_to_update_proto(
-        &self,
-        event: &dyn Any,
-        update: &mut Option<proto::update_view::Variant>,
-        cx: &AppContext,
-    ) -> bool {
-        if let Some(event) = event.downcast_ref() {
-            self.read(cx).add_event_to_update_proto(event, update, cx)
-        } else {
-            false
-        }
-    }
-
-    fn apply_update_proto(
-        &self,
-        project: &ModelHandle<Project>,
-        message: proto::update_view::Variant,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>> {
-        self.update(cx, |this, cx| this.apply_update_proto(project, message, cx))
-    }
-
-    fn should_unfollow_on_event(&self, event: &dyn Any, cx: &AppContext) -> bool {
-        if let Some(event) = event.downcast_ref() {
-            T::should_unfollow_on_event(event, cx)
-        } else {
-            false
-        }
-    }
 }
 
 struct DelayedDebouncedEditAction {
@@ -577,7 +449,7 @@ impl DelayedDebouncedEditAction {
 
             futures::select_biased! {
                 _ = receiver => return,
-                _ = timer => {}
+                    _ = timer => {}
             }
 
             if let Some(project) = project.upgrade(&cx) {
@@ -587,482 +459,6 @@ impl DelayedDebouncedEditAction {
     }
 }
 
-pub trait ItemHandle: 'static + fmt::Debug {
-    fn subscribe_to_item_events(
-        &self,
-        cx: &mut MutableAppContext,
-        handler: Box<dyn Fn(ItemEvent, &mut MutableAppContext)>,
-    ) -> gpui::Subscription;
-    fn tab_description<'a>(&self, detail: usize, cx: &'a AppContext) -> Option<Cow<'a, str>>;
-    fn tab_content(&self, detail: Option<usize>, style: &theme::Tab, cx: &AppContext)
-        -> ElementBox;
-    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
-    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
-    fn is_singleton(&self, cx: &AppContext) -> bool;
-    fn boxed_clone(&self) -> Box<dyn ItemHandle>;
-    fn clone_on_split(&self, cx: &mut MutableAppContext) -> Option<Box<dyn ItemHandle>>;
-    fn added_to_pane(
-        &self,
-        workspace: &mut Workspace,
-        pane: ViewHandle<Pane>,
-        cx: &mut ViewContext<Workspace>,
-    );
-    fn deactivated(&self, cx: &mut MutableAppContext);
-    fn workspace_deactivated(&self, cx: &mut MutableAppContext);
-    fn navigate(&self, data: Box<dyn Any>, cx: &mut MutableAppContext) -> bool;
-    fn id(&self) -> usize;
-    fn window_id(&self) -> usize;
-    fn to_any(&self) -> AnyViewHandle;
-    fn is_dirty(&self, cx: &AppContext) -> bool;
-    fn has_conflict(&self, cx: &AppContext) -> bool;
-    fn can_save(&self, cx: &AppContext) -> bool;
-    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>>;
-    fn save_as(
-        &self,
-        project: ModelHandle<Project>,
-        abs_path: PathBuf,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>>;
-    fn reload(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext)
-        -> Task<Result<()>>;
-    fn git_diff_recalc(
-        &self,
-        project: ModelHandle<Project>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>>;
-    fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle>;
-    fn to_followable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn FollowableItemHandle>>;
-    fn on_release(
-        &self,
-        cx: &mut MutableAppContext,
-        callback: Box<dyn FnOnce(&mut MutableAppContext)>,
-    ) -> gpui::Subscription;
-    fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
-    fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
-    fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>>;
-}
-
-pub trait WeakItemHandle {
-    fn id(&self) -> usize;
-    fn window_id(&self) -> usize;
-    fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>>;
-}
-
-impl dyn ItemHandle {
-    pub fn downcast<T: View>(&self) -> Option<ViewHandle<T>> {
-        self.to_any().downcast()
-    }
-
-    pub fn act_as<T: View>(&self, cx: &AppContext) -> Option<ViewHandle<T>> {
-        self.act_as_type(TypeId::of::<T>(), cx)
-            .and_then(|t| t.downcast())
-    }
-}
-
-impl<T: Item> ItemHandle for ViewHandle<T> {
-    fn subscribe_to_item_events(
-        &self,
-        cx: &mut MutableAppContext,
-        handler: Box<dyn Fn(ItemEvent, &mut MutableAppContext)>,
-    ) -> gpui::Subscription {
-        cx.subscribe(self, move |_, event, cx| {
-            for item_event in T::to_item_events(event) {
-                handler(item_event, cx)
-            }
-        })
-    }
-
-    fn tab_description<'a>(&self, detail: usize, cx: &'a AppContext) -> Option<Cow<'a, str>> {
-        self.read(cx).tab_description(detail, cx)
-    }
-
-    fn tab_content(
-        &self,
-        detail: Option<usize>,
-        style: &theme::Tab,
-        cx: &AppContext,
-    ) -> ElementBox {
-        self.read(cx).tab_content(detail, style, cx)
-    }
-
-    fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
-        self.read(cx).project_path(cx)
-    }
-
-    fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
-        self.read(cx).project_entry_ids(cx)
-    }
-
-    fn is_singleton(&self, cx: &AppContext) -> bool {
-        self.read(cx).is_singleton(cx)
-    }
-
-    fn boxed_clone(&self) -> Box<dyn ItemHandle> {
-        Box::new(self.clone())
-    }
-
-    fn clone_on_split(&self, cx: &mut MutableAppContext) -> Option<Box<dyn ItemHandle>> {
-        self.update(cx, |item, cx| {
-            cx.add_option_view(|cx| item.clone_on_split(cx))
-        })
-        .map(|handle| Box::new(handle) as Box<dyn ItemHandle>)
-    }
-
-    fn added_to_pane(
-        &self,
-        workspace: &mut Workspace,
-        pane: ViewHandle<Pane>,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        let history = pane.read(cx).nav_history_for_item(self);
-        self.update(cx, |this, cx| this.set_nav_history(history, cx));
-
-        if let Some(followed_item) = self.to_followable_item_handle(cx) {
-            if let Some(message) = followed_item.to_state_proto(cx) {
-                workspace.update_followers(
-                    proto::update_followers::Variant::CreateView(proto::View {
-                        id: followed_item.id() as u64,
-                        variant: Some(message),
-                        leader_id: workspace.leader_for_pane(&pane).map(|id| id.0),
-                    }),
-                    cx,
-                );
-            }
-        }
-
-        if workspace
-            .panes_by_item
-            .insert(self.id(), pane.downgrade())
-            .is_none()
-        {
-            let mut pending_autosave = DelayedDebouncedEditAction::new();
-            let mut pending_git_update = DelayedDebouncedEditAction::new();
-            let pending_update = Rc::new(RefCell::new(None));
-            let pending_update_scheduled = Rc::new(AtomicBool::new(false));
-
-            let mut event_subscription =
-                Some(cx.subscribe(self, move |workspace, item, event, cx| {
-                    let pane = if let Some(pane) = workspace
-                        .panes_by_item
-                        .get(&item.id())
-                        .and_then(|pane| pane.upgrade(cx))
-                    {
-                        pane
-                    } else {
-                        log::error!("unexpected item event after pane was dropped");
-                        return;
-                    };
-
-                    if let Some(item) = item.to_followable_item_handle(cx) {
-                        let leader_id = workspace.leader_for_pane(&pane);
-
-                        if leader_id.is_some() && item.should_unfollow_on_event(event, cx) {
-                            workspace.unfollow(&pane, cx);
-                        }
-
-                        if item.add_event_to_update_proto(
-                            event,
-                            &mut *pending_update.borrow_mut(),
-                            cx,
-                        ) && !pending_update_scheduled.load(SeqCst)
-                        {
-                            pending_update_scheduled.store(true, SeqCst);
-                            cx.after_window_update({
-                                let pending_update = pending_update.clone();
-                                let pending_update_scheduled = pending_update_scheduled.clone();
-                                move |this, cx| {
-                                    pending_update_scheduled.store(false, SeqCst);
-                                    this.update_followers(
-                                        proto::update_followers::Variant::UpdateView(
-                                            proto::UpdateView {
-                                                id: item.id() as u64,
-                                                variant: pending_update.borrow_mut().take(),
-                                                leader_id: leader_id.map(|id| id.0),
-                                            },
-                                        ),
-                                        cx,
-                                    );
-                                }
-                            });
-                        }
-                    }
-
-                    for item_event in T::to_item_events(event).into_iter() {
-                        match item_event {
-                            ItemEvent::CloseItem => {
-                                Pane::close_item(workspace, pane, item.id(), cx)
-                                    .detach_and_log_err(cx);
-                                return;
-                            }
-
-                            ItemEvent::UpdateTab => {
-                                pane.update(cx, |_, cx| {
-                                    cx.emit(pane::Event::ChangeItemTitle);
-                                    cx.notify();
-                                });
-                            }
-
-                            ItemEvent::Edit => {
-                                if let Autosave::AfterDelay { milliseconds } =
-                                    cx.global::<Settings>().autosave
-                                {
-                                    let delay = Duration::from_millis(milliseconds);
-                                    let item = item.clone();
-                                    pending_autosave.fire_new(
-                                        delay,
-                                        workspace,
-                                        cx,
-                                        |project, mut cx| async move {
-                                            cx.update(|cx| Pane::autosave_item(&item, project, cx))
-                                                .await
-                                                .log_err();
-                                        },
-                                    );
-                                }
-
-                                let settings = cx.global::<Settings>();
-                                let debounce_delay = settings.git_overrides.gutter_debounce;
-
-                                let item = item.clone();
-
-                                if let Some(delay) = debounce_delay {
-                                    const MIN_GIT_DELAY: u64 = 50;
-
-                                    let delay = delay.max(MIN_GIT_DELAY);
-                                    let duration = Duration::from_millis(delay);
-
-                                    pending_git_update.fire_new(
-                                        duration,
-                                        workspace,
-                                        cx,
-                                        |project, mut cx| async move {
-                                            cx.update(|cx| item.git_diff_recalc(project, cx))
-                                                .await
-                                                .log_err();
-                                        },
-                                    );
-                                } else {
-                                    let project = workspace.project().downgrade();
-                                    cx.spawn_weak(|_, mut cx| async move {
-                                        if let Some(project) = project.upgrade(&cx) {
-                                            cx.update(|cx| item.git_diff_recalc(project, cx))
-                                                .await
-                                                .log_err();
-                                        }
-                                    })
-                                    .detach();
-                                }
-                            }
-
-                            _ => {}
-                        }
-                    }
-                }));
-
-            cx.observe_focus(self, move |workspace, item, focused, cx| {
-                if !focused && cx.global::<Settings>().autosave == Autosave::OnFocusChange {
-                    Pane::autosave_item(&item, workspace.project.clone(), cx)
-                        .detach_and_log_err(cx);
-                }
-            })
-            .detach();
-
-            let item_id = self.id();
-            cx.observe_release(self, move |workspace, _, _| {
-                workspace.panes_by_item.remove(&item_id);
-                event_subscription.take();
-            })
-            .detach();
-        }
-    }
-
-    fn deactivated(&self, cx: &mut MutableAppContext) {
-        self.update(cx, |this, cx| this.deactivated(cx));
-    }
-
-    fn workspace_deactivated(&self, cx: &mut MutableAppContext) {
-        self.update(cx, |this, cx| this.workspace_deactivated(cx));
-    }
-
-    fn navigate(&self, data: Box<dyn Any>, cx: &mut MutableAppContext) -> bool {
-        self.update(cx, |this, cx| this.navigate(data, cx))
-    }
-
-    fn id(&self) -> usize {
-        self.id()
-    }
-
-    fn window_id(&self) -> usize {
-        self.window_id()
-    }
-
-    fn to_any(&self) -> AnyViewHandle {
-        self.into()
-    }
-
-    fn is_dirty(&self, cx: &AppContext) -> bool {
-        self.read(cx).is_dirty(cx)
-    }
-
-    fn has_conflict(&self, cx: &AppContext) -> bool {
-        self.read(cx).has_conflict(cx)
-    }
-
-    fn can_save(&self, cx: &AppContext) -> bool {
-        self.read(cx).can_save(cx)
-    }
-
-    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>> {
-        self.update(cx, |item, cx| item.save(project, cx))
-    }
-
-    fn save_as(
-        &self,
-        project: ModelHandle<Project>,
-        abs_path: PathBuf,
-        cx: &mut MutableAppContext,
-    ) -> Task<anyhow::Result<()>> {
-        self.update(cx, |item, cx| item.save_as(project, abs_path, cx))
-    }
-
-    fn reload(
-        &self,
-        project: ModelHandle<Project>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>> {
-        self.update(cx, |item, cx| item.reload(project, cx))
-    }
-
-    fn git_diff_recalc(
-        &self,
-        project: ModelHandle<Project>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<()>> {
-        self.update(cx, |item, cx| item.git_diff_recalc(project, cx))
-    }
-
-    fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle> {
-        self.read(cx).act_as_type(type_id, self, cx)
-    }
-
-    fn to_followable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn FollowableItemHandle>> {
-        if cx.has_global::<FollowableItemBuilders>() {
-            let builders = cx.global::<FollowableItemBuilders>();
-            let item = self.to_any();
-            Some(builders.get(&item.view_type())?.1(item))
-        } else {
-            None
-        }
-    }
-
-    fn on_release(
-        &self,
-        cx: &mut MutableAppContext,
-        callback: Box<dyn FnOnce(&mut MutableAppContext)>,
-    ) -> gpui::Subscription {
-        cx.observe_release(self, move |_, cx| callback(cx))
-    }
-
-    fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>> {
-        self.read(cx).as_searchable(self)
-    }
-
-    fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation {
-        self.read(cx).breadcrumb_location()
-    }
-
-    fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
-        self.read(cx).breadcrumbs(theme, cx)
-    }
-}
-
-impl From<Box<dyn ItemHandle>> for AnyViewHandle {
-    fn from(val: Box<dyn ItemHandle>) -> Self {
-        val.to_any()
-    }
-}
-
-impl From<&Box<dyn ItemHandle>> for AnyViewHandle {
-    fn from(val: &Box<dyn ItemHandle>) -> Self {
-        val.to_any()
-    }
-}
-
-impl Clone for Box<dyn ItemHandle> {
-    fn clone(&self) -> Box<dyn ItemHandle> {
-        self.boxed_clone()
-    }
-}
-
-impl<T: Item> WeakItemHandle for WeakViewHandle<T> {
-    fn id(&self) -> usize {
-        self.id()
-    }
-
-    fn window_id(&self) -> usize {
-        self.window_id()
-    }
-
-    fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
-        self.upgrade(cx).map(|v| Box::new(v) as Box<dyn ItemHandle>)
-    }
-}
-
-pub trait Notification: View {
-    fn should_dismiss_notification_on_event(&self, event: &<Self as Entity>::Event) -> bool;
-}
-
-pub trait NotificationHandle {
-    fn id(&self) -> usize;
-    fn to_any(&self) -> AnyViewHandle;
-}
-
-impl<T: Notification> NotificationHandle for ViewHandle<T> {
-    fn id(&self) -> usize {
-        self.id()
-    }
-
-    fn to_any(&self) -> AnyViewHandle {
-        self.into()
-    }
-}
-
-impl From<&dyn NotificationHandle> for AnyViewHandle {
-    fn from(val: &dyn NotificationHandle) -> Self {
-        val.to_any()
-    }
-}
-
-impl AppState {
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn test(cx: &mut MutableAppContext) -> Arc<Self> {
-        use fs::HomeDir;
-
-        cx.set_global(HomeDir(Path::new("/tmp/").to_path_buf()));
-        let settings = Settings::test(cx);
-        cx.set_global(settings);
-
-        let fs = fs::FakeFs::new(cx.background().clone());
-        let languages = Arc::new(LanguageRegistry::test());
-        let http_client = client::test::FakeHttpClient::with_404_response();
-        let client = Client::new(http_client.clone(), cx);
-        let project_store = cx.add_model(|_| ProjectStore::new());
-        let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
-        let themes = ThemeRegistry::new((), cx.font_cache().clone());
-        Arc::new(Self {
-            client,
-            themes,
-            fs,
-            languages,
-            user_store,
-            project_store,
-            initialize_workspace: |_, _, _| {},
-            build_window_options: Default::default,
-            default_item_factory: |_, _| unimplemented!(),
-        })
-    }
-}
-
 pub enum Event {
     DockAnchorChanged,
     PaneAdded(ViewHandle<Pane>),
@@ -1094,6 +490,7 @@ pub struct Workspace {
     window_edited: bool,
     active_call: Option<(ModelHandle<ActiveCall>, Vec<gpui::Subscription>)>,
     leader_updates_tx: mpsc::UnboundedSender<(PeerId, proto::UpdateFollowers)>,
+    database_id: WorkspaceId,
     _apply_leader_updates: Task<Result<()>>,
     _observe_current_user: Task<()>,
 }
@@ -1113,8 +510,10 @@ struct FollowerState {
 
 impl Workspace {
     pub fn new(
+        serialized_workspace: Option<SerializedWorkspace>,
+        workspace_id: WorkspaceId,
         project: ModelHandle<Project>,
-        dock_default_factory: DefaultItemFactory,
+        dock_default_factory: DockDefaultItemFactory,
         cx: &mut ViewContext<Self>,
     ) -> Self {
         cx.observe_fullscreen(|_, _, cx| cx.notify()).detach();
@@ -1132,6 +531,7 @@ impl Workspace {
                 }
                 project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded => {
                     this.update_window_title(cx);
+                    this.serialize_workspace(cx);
                 }
                 project::Event::DisconnectedFromHost => {
                     this.update_window_edited(cx);
@@ -1151,6 +551,8 @@ impl Workspace {
         .detach();
         cx.focus(&center_pane);
         cx.emit(Event::PaneAdded(center_pane.clone()));
+        let dock = Dock::new(dock_default_factory, cx);
+        let dock_pane = dock.pane().clone();
 
         let fs = project.read(cx).fs().clone();
         let user_store = project.read(cx).user_store();
@@ -1191,9 +593,6 @@ impl Workspace {
 
         cx.emit_global(WorkspaceCreated(weak_handle.clone()));
 
-        let dock = Dock::new(cx, dock_default_factory);
-        let dock_pane = dock.pane().clone();
-
         let left_sidebar = cx.add_view(|_| Sidebar::new(SidebarSide::Left));
         let right_sidebar = cx.add_view(|_| Sidebar::new(SidebarSide::Right));
         let left_sidebar_buttons = cx.add_view(|cx| SidebarButtons::new(left_sidebar.clone(), cx));
@@ -1220,41 +619,145 @@ impl Workspace {
             active_call = Some((call, subscriptions));
         }
 
-        let mut this = Workspace {
-            modal: None,
-            weak_self: weak_handle,
-            center: PaneGroup::new(center_pane.clone()),
-            dock,
-            // When removing an item, the last element remaining in this array
-            // is used to find where focus should fallback to. As such, the order
-            // of these two variables is important.
-            panes: vec![dock_pane, center_pane.clone()],
-            panes_by_item: Default::default(),
-            active_pane: center_pane.clone(),
-            last_active_center_pane: Some(center_pane.downgrade()),
-            status_bar,
-            titlebar_item: None,
-            notifications: Default::default(),
-            client,
-            remote_entity_subscription: None,
-            user_store,
-            fs,
-            left_sidebar,
-            right_sidebar,
-            project,
-            leader_state: Default::default(),
-            follower_states_by_leader: Default::default(),
-            last_leaders_by_pane: Default::default(),
-            window_edited: false,
-            active_call,
-            _observe_current_user,
-            _apply_leader_updates,
-            leader_updates_tx,
-        };
-        this.project_remote_id_changed(this.project.read(cx).remote_id(), cx);
-        cx.defer(|this, cx| this.update_window_title(cx));
+        let mut this = Workspace {
+            modal: None,
+            weak_self: weak_handle.clone(),
+            center: PaneGroup::new(center_pane.clone()),
+            dock,
+            // When removing an item, the last element remaining in this array
+            // is used to find where focus should fallback to. As such, the order
+            // of these two variables is important.
+            panes: vec![dock_pane.clone(), center_pane.clone()],
+            panes_by_item: Default::default(),
+            active_pane: center_pane.clone(),
+            last_active_center_pane: Some(center_pane.downgrade()),
+            status_bar,
+            titlebar_item: None,
+            notifications: Default::default(),
+            client,
+            remote_entity_subscription: None,
+            user_store,
+            fs,
+            left_sidebar,
+            right_sidebar,
+            project: project.clone(),
+            leader_state: Default::default(),
+            follower_states_by_leader: Default::default(),
+            last_leaders_by_pane: Default::default(),
+            window_edited: false,
+            active_call,
+            database_id: workspace_id,
+            _observe_current_user,
+            _apply_leader_updates,
+            leader_updates_tx,
+        };
+        this.project_remote_id_changed(project.read(cx).remote_id(), cx);
+        cx.defer(|this, cx| this.update_window_title(cx));
+
+        if let Some(serialized_workspace) = serialized_workspace {
+            cx.defer(move |_, cx| {
+                Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx)
+            });
+        }
+
+        this
+    }
+
+    fn new_local(
+        abs_paths: Vec<PathBuf>,
+        app_state: Arc<AppState>,
+        cx: &mut MutableAppContext,
+    ) -> Task<(
+        ViewHandle<Workspace>,
+        Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>,
+    )> {
+        let project_handle = Project::local(
+            app_state.client.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            cx,
+        );
+
+        cx.spawn(|mut cx| async move {
+            let serialized_workspace = persistence::DB.workspace_for_roots(&abs_paths.as_slice());
+
+            let paths_to_open = serialized_workspace
+                .as_ref()
+                .map(|workspace| workspace.location.paths())
+                .unwrap_or(Arc::new(abs_paths));
+
+            // Get project paths for all of the abs_paths
+            let mut worktree_roots: HashSet<Arc<Path>> = Default::default();
+            let mut project_paths = Vec::new();
+            for path in paths_to_open.iter() {
+                if let Some((worktree, project_entry)) = cx
+                    .update(|cx| {
+                        Workspace::project_path_for_path(project_handle.clone(), &path, true, cx)
+                    })
+                    .await
+                    .log_err()
+                {
+                    worktree_roots.insert(worktree.read_with(&mut cx, |tree, _| tree.abs_path()));
+                    project_paths.push(Some(project_entry));
+                } else {
+                    project_paths.push(None);
+                }
+            }
+
+            let workspace_id = if let Some(serialized_workspace) = serialized_workspace.as_ref() {
+                serialized_workspace.id
+            } else {
+                DB.next_id().await.unwrap_or(0)
+            };
+
+            // Use the serialized workspace to construct the new window
+            let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
+                let mut workspace = Workspace::new(
+                    serialized_workspace,
+                    workspace_id,
+                    project_handle,
+                    app_state.dock_default_item_factory,
+                    cx,
+                );
+                (app_state.initialize_workspace)(&mut workspace, &app_state, cx);
+                workspace
+            });
+
+            notify_if_database_failed(&workspace, &mut cx);
+
+            // Call open path for each of the project paths
+            // (this will bring them to the front if they were in the serialized workspace)
+            debug_assert!(paths_to_open.len() == project_paths.len());
+            let tasks = paths_to_open
+                .iter()
+                .cloned()
+                .zip(project_paths.into_iter())
+                .map(|(abs_path, project_path)| {
+                    let workspace = workspace.clone();
+                    cx.spawn(|mut cx| {
+                        let fs = app_state.fs.clone();
+                        async move {
+                            let project_path = project_path?;
+                            if fs.is_file(&abs_path).await {
+                                Some(
+                                    workspace
+                                        .update(&mut cx, |workspace, cx| {
+                                            workspace.open_path(project_path, None, true, cx)
+                                        })
+                                        .await,
+                                )
+                            } else {
+                                None
+                            }
+                        }
+                    })
+                });
+
+            let opened_items = futures::future::join_all(tasks.into_iter()).await;
 
-        this
+            (workspace, opened_items)
+        })
     }
 
     pub fn weak_handle(&self) -> WeakViewHandle<Self> {

crates/zed/Cargo.toml 🔗

@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
 description = "The fast, collaborative code editor."
 edition = "2021"
 name = "zed"
-version = "0.67.0"
+version = "0.68.0"
 
 [lib]
 name = "zed"
@@ -48,7 +48,7 @@ rpc = { path = "../rpc" }
 settings = { path = "../settings" }
 sum_tree = { path = "../sum_tree" }
 text = { path = "../text" }
-terminal = { path = "../terminal" }
+terminal_view = { path = "../terminal_view" }
 theme = { path = "../theme" }
 theme_selector = { path = "../theme_selector" }
 theme_testbench = { path = "../theme_testbench" }
@@ -62,7 +62,6 @@ async-trait = "0.1"
 backtrace = "0.3"
 chrono = "0.4"
 ctor = "0.1.20"
-dirs = "3.0"
 easy-parallel = "3.1.0"
 env_logger = "0.9"
 futures = "0.3"
@@ -105,6 +104,8 @@ tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", re
 tree-sitter-typescript = "0.20.1"
 tree-sitter-ruby = "0.20.0"
 tree-sitter-html = "0.19.0"
+tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"}
+tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
 url = "2.2"
 
 [dev-dependencies]

crates/zed/src/feedback.rs 🔗

@@ -5,7 +5,7 @@ use gpui::{
     Element, Entity, MouseButton, RenderContext, View,
 };
 use settings::Settings;
-use workspace::StatusItemView;
+use workspace::{item::ItemHandle, StatusItemView};
 
 pub const NEW_ISSUE_URL: &str = "https://github.com/zed-industries/feedback/issues/new/choose";
 
@@ -43,7 +43,7 @@ impl View for FeedbackLink {
 impl StatusItemView for FeedbackLink {
     fn set_active_pane_item(
         &mut self,
-        _: Option<&dyn workspace::ItemHandle>,
+        _: Option<&dyn ItemHandle>,
         _: &mut gpui::ViewContext<Self>,
     ) {
     }

crates/zed/src/languages.rs 🔗

@@ -14,6 +14,7 @@ mod language_plugin;
 mod python;
 mod ruby;
 mod rust;
+
 mod typescript;
 
 // 1. Add tree-sitter-{language} parser to zed crate
@@ -127,6 +128,8 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
             tree_sitter_embedded_template::language(),
             Some(CachedLspAdapter::new(ruby::RubyLanguageServer).await),
         ),
+        ("scheme", tree_sitter_scheme::language(), None),
+        ("racket", tree_sitter_racket::language(), None),
     ] {
         languages.add(language(name, grammar, lsp_adapter));
     }

crates/zed/src/languages/c/outline.scm 🔗

@@ -14,17 +14,57 @@
     declarator: (_) @name) @item
 
 (declaration
-    type: (_) @context
-    declarator: (function_declarator
-        declarator: (_) @name
-        parameters: (parameter_list
-            "(" @context
-            ")" @context))) @item
+    (type_qualifier)? @context
+    type: (_)? @context
+    declarator: [
+        (function_declarator
+            declarator: (_) @name
+            parameters: (parameter_list
+                "(" @context
+                ")" @context))
+        (pointer_declarator
+            "*" @context
+            declarator: (function_declarator
+                declarator: (_) @name
+                parameters: (parameter_list
+                    "(" @context
+                    ")" @context)))
+        (pointer_declarator
+            "*" @context
+            declarator: (pointer_declarator
+                "*" @context
+                declarator: (function_declarator
+                    declarator: (_) @name
+                    parameters: (parameter_list
+                        "(" @context
+                        ")" @context))))
+    ]
+) @item
 
 (function_definition
-    type: (_) @context
-    declarator: (function_declarator
-        declarator: (_) @name
-        parameters: (parameter_list
-            "(" @context
-            ")" @context))) @item
+    (type_qualifier)? @context
+    type: (_)? @context
+    declarator: [
+        (function_declarator
+            declarator: (_) @name
+            parameters: (parameter_list
+                "(" @context
+                ")" @context))
+        (pointer_declarator
+            "*" @context
+            declarator: (function_declarator
+                declarator: (_) @name
+                parameters: (parameter_list
+                    "(" @context
+                    ")" @context)))
+        (pointer_declarator
+            "*" @context
+            declarator: (pointer_declarator
+                "*" @context
+                declarator: (function_declarator
+                    declarator: (_) @name
+                    parameters: (parameter_list
+                        "(" @context
+                        ")" @context))))
+    ]
+) @item

crates/zed/src/languages/cpp/outline.scm 🔗

@@ -51,6 +51,22 @@
                 parameters: (parameter_list
                     "(" @context
                     ")" @context)))
+        (pointer_declarator
+            "*" @context
+            declarator: (pointer_declarator
+                "*" @context
+                declarator: (function_declarator
+                    declarator: (_) @name
+                    parameters: (parameter_list
+                        "(" @context
+                        ")" @context))))
+        (reference_declarator
+            ["&" "&&"] @context
+            (function_declarator
+                declarator: (_) @name
+                parameters: (parameter_list
+                    "(" @context
+                    ")" @context)))
     ]
     (type_qualifier)? @context) @item
 
@@ -74,6 +90,22 @@
                 parameters: (parameter_list
                     "(" @context
                     ")" @context)))
+        (pointer_declarator
+            "*" @context
+            declarator: (pointer_declarator
+                "*" @context
+                declarator: (function_declarator
+                    declarator: (_) @name
+                    parameters: (parameter_list
+                        "(" @context
+                        ")" @context))))
+        (reference_declarator
+            ["&" "&&"] @context
+            (function_declarator
+                declarator: (_) @name
+                parameters: (parameter_list
+                    "(" @context
+                    ")" @context)))
     ]
     (type_qualifier)? @context) @item
 
@@ -97,5 +129,21 @@
                 parameters: (parameter_list
                     "(" @context
                     ")" @context)))
+        (pointer_declarator
+            "*" @context
+            declarator: (pointer_declarator
+                "*" @context
+                declarator: (function_declarator
+                    declarator: (_) @name
+                    parameters: (parameter_list
+                        "(" @context
+                        ")" @context))))
+        (reference_declarator
+            ["&" "&&"] @context
+            (function_declarator
+                declarator: (_) @name
+                parameters: (parameter_list
+                    "(" @context
+                    ")" @context)))
     ]
     (type_qualifier)? @context) @item

crates/zed/src/languages/racket/config.toml 🔗

@@ -0,0 +1,9 @@
+name = "Racket"
+path_suffixes = ["rkt"]
+line_comment = "; "
+autoclose_before = "])"
+brackets = [
+    { start = "[", end = "]", close = true, newline = false },
+    { start = "(", end = ")", close = true, newline = false },
+    { start = "\"", end = "\"", close = true, newline = false },
+]

crates/zed/src/languages/racket/highlights.scm 🔗

@@ -0,0 +1,40 @@
+["(" ")" "[" "]" "{" "}"] @punctuation.bracket
+
+[(string)
+ (here_string)
+ (byte_string)] @string
+(regex) @string.special
+(escape_sequence) @escape
+
+[(comment)
+ (block_comment)
+ (sexp_comment)] @comment
+
+(symbol) @variable
+
+(number) @number
+(character) @constant.builtin
+(boolean) @constant.builtin
+(keyword) @constant
+(quote . (symbol)) @constant
+
+(extension) @keyword
+(lang_name) @variable.builtin
+
+((symbol) @operator
+ (#match? @operator "^(\\+|-|\\*|/|=|>|<|>=|<=)$"))
+
+(list
+  .
+  (symbol) @function)
+
+(list
+  .
+  (symbol) @keyword
+  (#match? @keyword

crates/zed/src/languages/ruby/brackets.scm 🔗

@@ -11,4 +11,4 @@
 (begin "begin" @open "end" @close)
 (module "module" @open "end" @close)
 (_ . "def" @open "end" @close)
-(_ . "class" @open "end" @close)
+(_ . "class" @open "end" @close)

crates/zed/src/languages/scheme/config.toml 🔗

@@ -0,0 +1,9 @@
+name = "Scheme"
+path_suffixes = ["scm", "ss"]
+line_comment = "; "
+autoclose_before = "])"
+brackets = [
+    { start = "[", end = "]", close = true, newline = false },
+    { start = "(", end = ")", close = true, newline = false },
+    { start = "\"", end = "\"", close = true, newline = false },
+]

crates/zed/src/languages/scheme/highlights.scm 🔗

@@ -0,0 +1,28 @@
+["(" ")" "[" "]" "{" "}"] @punctuation.bracket
+
+(number) @number
+(character) @constant.builtin
+(boolean) @constant.builtin
+
+(symbol) @variable
+(string) @string
+
+(escape_sequence) @escape
+
+[(comment)
+ (block_comment)
+ (directive)] @comment
+
+((symbol) @operator
+ (#match? @operator "^(\\+|-|\\*|/|=|>|<|>=|<=)$"))
+
+(list
+  .
+  (symbol) @function)
+
+(list
+  .
+  (symbol) @keyword
+  (#match? @keyword
+   "^(define-syntax|let\\*|lambda|λ|case|=>|quote-splicing|unquote-splicing|set!|let|letrec|letrec-syntax|let-values|let\\*-values|do|else|define|cond|syntax-rules|unquote|begin|quote|let-syntax|and|if|quasiquote|letrec|delay|or|when|unless|identifier-syntax|assert|library|export|import|rename|only|except|prefix)$"
+   ))

crates/zed/src/main.rs 🔗

@@ -23,7 +23,7 @@ use isahc::{config::Configurable, Request};
 use language::LanguageRegistry;
 use log::LevelFilter;
 use parking_lot::Mutex;
-use project::{Fs, HomeDir, ProjectStore};
+use project::{Fs, HomeDir};
 use serde_json::json;
 use settings::{
     self, settings_file::SettingsFile, KeymapFileContent, Settings, SettingsFileContent,
@@ -32,17 +32,16 @@ use settings::{
 use smol::process::Command;
 use std::fs::OpenOptions;
 use std::{env, ffi::OsStr, panic, path::PathBuf, sync::Arc, thread, time::Duration};
-use terminal::terminal_container_view::{get_working_directory, TerminalContainer};
+use terminal_view::{get_working_directory, TerminalView};
 
 use fs::RealFs;
 use settings::watched_json::{watch_keymap_file, watch_settings_file, WatchedJsonFile};
 use theme::ThemeRegistry;
-use util::{ResultExt, TryFutureExt};
-use workspace::{self, AppState, ItemHandle, NewFile, OpenPaths, Workspace};
-use zed::{
-    self, build_window_options, initialize_workspace, languages, menus, RELEASE_CHANNEL,
-    RELEASE_CHANNEL_NAME,
+use util::{channel::RELEASE_CHANNEL, paths, ResultExt, TryFutureExt};
+use workspace::{
+    self, item::ItemHandle, notifications::NotifyResultExt, AppState, NewFile, OpenPaths, Workspace,
 };
+use zed::{self, build_window_options, initialize_workspace, languages, menus};
 
 fn main() {
     let http = http::client();
@@ -56,10 +55,6 @@ fn main() {
         .map_or("dev".to_string(), |v| v.to_string());
     init_panic_hook(app_version, http.clone(), app.background());
 
-    let db = app.background().spawn(async move {
-        project::Db::open(&*zed::paths::DB_DIR, RELEASE_CHANNEL_NAME.as_str())
-    });
-
     load_embedded_fonts(&app);
 
     let fs = Arc::new(RealFs);
@@ -91,11 +86,11 @@ fn main() {
 
     app.run(move |cx| {
         cx.set_global(*RELEASE_CHANNEL);
-        cx.set_global(HomeDir(zed::paths::HOME.to_path_buf()));
+        cx.set_global(HomeDir(paths::HOME.to_path_buf()));
 
         let client = client::Client::new(http.clone(), cx);
         let mut languages = LanguageRegistry::new(login_shell_env_loaded);
-        languages.set_language_server_download_dir(zed::paths::LANGUAGES_DIR.clone());
+        languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
         let languages = Arc::new(languages);
         let init_languages = cx
             .background()
@@ -106,7 +101,7 @@ fn main() {
 
         //Setup settings global before binding actions
         cx.set_global(SettingsFile::new(
-            &*zed::paths::SETTINGS,
+            &*paths::SETTINGS,
             settings_file_content.clone(),
             fs.clone(),
         ));
@@ -126,7 +121,7 @@ fn main() {
         diagnostics::init(cx);
         search::init(cx);
         vim::init(cx);
-        terminal::init(cx);
+        terminal_view::init(cx);
         theme_testbench::init(cx);
 
         cx.spawn(|cx| watch_themes(fs.clone(), themes.clone(), cx))
@@ -146,9 +141,7 @@ fn main() {
         })
         .detach();
 
-        let project_store = cx.add_model(|_| ProjectStore::new());
-        let db = cx.background().block(db);
-        client.start_telemetry(db.clone());
+        client.start_telemetry();
         client.report_event("start app", Default::default());
 
         let app_state = Arc::new(AppState {
@@ -156,14 +149,15 @@ fn main() {
             themes,
             client: client.clone(),
             user_store,
-            project_store,
             fs,
             build_window_options,
             initialize_workspace,
-            default_item_factory,
+            dock_default_item_factory,
         });
-        auto_update::init(db, http, cx);
+        auto_update::init(http, client::ZED_SERVER_URL.clone(), cx);
+
         workspace::init(app_state.clone(), cx);
+
         journal::init(app_state.clone(), cx);
         theme_selector::init(app_state.clone(), cx);
         zed::init(&app_state, cx);
@@ -209,25 +203,10 @@ fn main() {
 }
 
 fn init_paths() {
-    std::fs::create_dir_all(&*zed::paths::CONFIG_DIR).expect("could not create config path");
-    std::fs::create_dir_all(&*zed::paths::LANGUAGES_DIR).expect("could not create languages path");
-    std::fs::create_dir_all(&*zed::paths::DB_DIR).expect("could not create database path");
-    std::fs::create_dir_all(&*zed::paths::LOGS_DIR).expect("could not create logs path");
-
-    // Copy setting files from legacy locations. TODO: remove this after a few releases.
-    thread::spawn(|| {
-        if std::fs::metadata(&*zed::paths::legacy::SETTINGS).is_ok()
-            && std::fs::metadata(&*zed::paths::SETTINGS).is_err()
-        {
-            std::fs::copy(&*zed::paths::legacy::SETTINGS, &*zed::paths::SETTINGS).log_err();
-        }
-
-        if std::fs::metadata(&*zed::paths::legacy::KEYMAP).is_ok()
-            && std::fs::metadata(&*zed::paths::KEYMAP).is_err()
-        {
-            std::fs::copy(&*zed::paths::legacy::KEYMAP, &*zed::paths::KEYMAP).log_err();
-        }
-    });
+    std::fs::create_dir_all(&*util::paths::CONFIG_DIR).expect("could not create config path");
+    std::fs::create_dir_all(&*util::paths::LANGUAGES_DIR).expect("could not create languages path");
+    std::fs::create_dir_all(&*util::paths::DB_DIR).expect("could not create database path");
+    std::fs::create_dir_all(&*util::paths::LOGS_DIR).expect("could not create logs path");
 }
 
 fn init_logger() {
@@ -240,16 +219,15 @@ fn init_logger() {
         const KIB: u64 = 1024;
         const MIB: u64 = 1024 * KIB;
         const MAX_LOG_BYTES: u64 = MIB;
-        if std::fs::metadata(&*zed::paths::LOG)
-            .map_or(false, |metadata| metadata.len() > MAX_LOG_BYTES)
+        if std::fs::metadata(&*paths::LOG).map_or(false, |metadata| metadata.len() > MAX_LOG_BYTES)
         {
-            let _ = std::fs::rename(&*zed::paths::LOG, &*zed::paths::OLD_LOG);
+            let _ = std::fs::rename(&*paths::LOG, &*paths::OLD_LOG);
         }
 
         let log_file = OpenOptions::new()
             .create(true)
             .append(true)
-            .open(&*zed::paths::LOG)
+            .open(&*paths::LOG)
             .expect("could not open logfile");
         simplelog::WriteLogger::init(level, simplelog::Config::default(), log_file)
             .expect("could not initialize logger");
@@ -261,7 +239,7 @@ fn init_panic_hook(app_version: String, http: Arc<dyn HttpClient>, background: A
         .spawn({
             async move {
                 let panic_report_url = format!("{}/api/panic", &*client::ZED_SERVER_URL);
-                let mut children = smol::fs::read_dir(&*zed::paths::LOGS_DIR).await?;
+                let mut children = smol::fs::read_dir(&*paths::LOGS_DIR).await?;
                 while let Some(child) = children.next().await {
                     let child = child?;
                     let child_path = child.path();
@@ -349,7 +327,7 @@ fn init_panic_hook(app_version: String, http: Arc<dyn HttpClient>, background: A
 
         let panic_filename = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
         std::fs::write(
-            zed::paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, panic_filename)),
+            paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, panic_filename)),
             &message,
         )
         .context("error writing panic to disk")
@@ -483,8 +461,8 @@ fn load_config_files(
         .clone()
         .spawn(async move {
             let settings_file =
-                WatchedJsonFile::new(fs.clone(), &executor, zed::paths::SETTINGS.clone()).await;
-            let keymap_file = WatchedJsonFile::new(fs, &executor, zed::paths::KEYMAP.clone()).await;
+                WatchedJsonFile::new(fs.clone(), &executor, paths::SETTINGS.clone()).await;
+            let keymap_file = WatchedJsonFile::new(fs, &executor, paths::KEYMAP.clone()).await;
             tx.send((settings_file, keymap_file)).ok()
         })
         .detach();
@@ -605,10 +583,10 @@ async fn handle_cli_connection(
     }
 }
 
-pub fn default_item_factory(
+pub fn dock_default_item_factory(
     workspace: &mut Workspace,
     cx: &mut ViewContext<Workspace>,
-) -> Box<dyn ItemHandle> {
+) -> Option<Box<dyn ItemHandle>> {
     let strategy = cx
         .global::<Settings>()
         .terminal_overrides
@@ -618,6 +596,15 @@ pub fn default_item_factory(
 
     let working_directory = get_working_directory(workspace, cx, strategy);
 
-    let terminal_handle = cx.add_view(|cx| TerminalContainer::new(working_directory, false, cx));
-    Box::new(terminal_handle)
+    let window_id = cx.window_id();
+    let terminal = workspace
+        .project()
+        .update(cx, |project, cx| {
+            project.create_terminal(working_directory, window_id, cx)
+        })
+        .notify_err(workspace, cx)?;
+
+    let terminal_view = cx.add_view(|cx| TerminalView::new(terminal, workspace.database_id(), cx));
+
+    Some(Box::new(terminal_view))
 }

crates/zed/src/zed.rs 🔗

@@ -1,7 +1,6 @@
 mod feedback;
 pub mod languages;
 pub mod menus;
-pub mod paths;
 #[cfg(any(test, feature = "test-support"))]
 pub mod test;
 
@@ -13,7 +12,6 @@ use collab_ui::{CollabTitlebarItem, ToggleCollaborationMenu};
 use collections::VecDeque;
 pub use editor;
 use editor::{Editor, MultiBuffer};
-use lazy_static::lazy_static;
 
 use gpui::{
     actions,
@@ -26,15 +24,16 @@ use gpui::{
     AssetSource, AsyncAppContext, TitlebarOptions, ViewContext, WindowKind,
 };
 use language::Rope;
+use lazy_static::lazy_static;
 pub use lsp;
 pub use project;
 use project_panel::ProjectPanel;
 use search::{BufferSearchBar, ProjectSearchBar};
 use serde::Deserialize;
 use serde_json::to_string_pretty;
-use settings::{keymap_file_json_schema, settings_file_json_schema, ReleaseChannel, Settings};
+use settings::{keymap_file_json_schema, settings_file_json_schema, Settings};
 use std::{env, path::Path, str, sync::Arc};
-use util::ResultExt;
+use util::{channel::ReleaseChannel, paths, ResultExt};
 pub use workspace;
 use workspace::{sidebar::SidebarSide, AppState, Workspace};
 
@@ -82,14 +81,6 @@ lazy_static! {
         .ok()
         .as_deref()
         .and_then(parse_pixel_position_env_var);
-    pub static ref RELEASE_CHANNEL_NAME: String =
-        env::var("ZED_RELEASE_CHANNEL").unwrap_or(include_str!("../RELEASE_CHANNEL").to_string());
-    pub static ref RELEASE_CHANNEL: ReleaseChannel = match RELEASE_CHANNEL_NAME.as_str() {
-        "dev" => ReleaseChannel::Dev,
-        "preview" => ReleaseChannel::Preview,
-        "stable" => ReleaseChannel::Stable,
-        _ => panic!("invalid release channel {}", *RELEASE_CHANNEL_NAME),
-    };
 }
 
 pub fn init(app_state: &Arc<AppState>, cx: &mut gpui::MutableAppContext) {
@@ -348,6 +339,9 @@ pub fn initialize_workspace(
 
     auto_update::notify_of_any_new_update(cx.weak_handle(), cx);
 
+    let window_id = cx.window_id();
+    vim::observe_keypresses(window_id, cx);
+
     cx.on_window_should_close(|workspace, cx| {
         if let Some(task) = workspace.close(&Default::default(), cx) {
             task.detach_and_log_err(cx);
@@ -406,7 +400,7 @@ fn quit(_: &Quit, cx: &mut gpui::MutableAppContext) {
 }
 
 fn about(_: &mut Workspace, _: &About, cx: &mut gpui::ViewContext<Workspace>) {
-    let app_name = cx.global::<ReleaseChannel>().name();
+    let app_name = cx.global::<ReleaseChannel>().display_name();
     let version = env!("CARGO_PKG_VERSION");
     cx.prompt(
         gpui::PromptLevel::Info,
@@ -479,10 +473,11 @@ fn open_config_file(
 
         workspace
             .update(&mut cx, |workspace, cx| {
-                workspace.with_local_workspace(cx, app_state, |workspace, cx| {
+                workspace.with_local_workspace(&app_state, cx, |workspace, cx| {
                     workspace.open_paths(vec![path.to_path_buf()], false, cx)
                 })
             })
+            .await
             .await;
         Ok::<_, anyhow::Error>(())
     })
@@ -496,51 +491,55 @@ fn open_log_file(
 ) {
     const MAX_LINES: usize = 1000;
 
-    workspace.with_local_workspace(cx, app_state.clone(), |_, cx| {
-        cx.spawn_weak(|workspace, mut cx| async move {
-            let (old_log, new_log) = futures::join!(
-                app_state.fs.load(&paths::OLD_LOG),
-                app_state.fs.load(&paths::LOG)
-            );
+    workspace
+        .with_local_workspace(&app_state.clone(), cx, move |_, cx| {
+            cx.spawn_weak(|workspace, mut cx| async move {
+                let (old_log, new_log) = futures::join!(
+                    app_state.fs.load(&paths::OLD_LOG),
+                    app_state.fs.load(&paths::LOG)
+                );
 
-            if let Some(workspace) = workspace.upgrade(&cx) {
-                let mut lines = VecDeque::with_capacity(MAX_LINES);
-                for line in old_log
-                    .iter()
-                    .flat_map(|log| log.lines())
-                    .chain(new_log.iter().flat_map(|log| log.lines()))
-                {
-                    if lines.len() == MAX_LINES {
-                        lines.pop_front();
+                if let Some(workspace) = workspace.upgrade(&cx) {
+                    let mut lines = VecDeque::with_capacity(MAX_LINES);
+                    for line in old_log
+                        .iter()
+                        .flat_map(|log| log.lines())
+                        .chain(new_log.iter().flat_map(|log| log.lines()))
+                    {
+                        if lines.len() == MAX_LINES {
+                            lines.pop_front();
+                        }
+                        lines.push_back(line);
                     }
-                    lines.push_back(line);
-                }
-                let log = lines
-                    .into_iter()
-                    .flat_map(|line| [line, "\n"])
-                    .collect::<String>();
-
-                workspace.update(&mut cx, |workspace, cx| {
-                    let project = workspace.project().clone();
-                    let buffer = project
-                        .update(cx, |project, cx| project.create_buffer("", None, cx))
-                        .expect("creating buffers on a local workspace always succeeds");
-                    buffer.update(cx, |buffer, cx| buffer.edit([(0..0, log)], None, cx));
-
-                    let buffer = cx.add_model(|cx| {
-                        MultiBuffer::singleton(buffer, cx).with_title("Log".into())
+                    let log = lines
+                        .into_iter()
+                        .flat_map(|line| [line, "\n"])
+                        .collect::<String>();
+
+                    workspace.update(&mut cx, |workspace, cx| {
+                        let project = workspace.project().clone();
+                        let buffer = project
+                            .update(cx, |project, cx| project.create_buffer("", None, cx))
+                            .expect("creating buffers on a local workspace always succeeds");
+                        buffer.update(cx, |buffer, cx| buffer.edit([(0..0, log)], None, cx));
+
+                        let buffer = cx.add_model(|cx| {
+                            MultiBuffer::singleton(buffer, cx).with_title("Log".into())
+                        });
+                        workspace.add_item(
+                            Box::new(
+                                cx.add_view(|cx| {
+                                    Editor::for_multibuffer(buffer, Some(project), cx)
+                                }),
+                            ),
+                            cx,
+                        );
                     });
-                    workspace.add_item(
-                        Box::new(
-                            cx.add_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx)),
-                        ),
-                        cx,
-                    );
-                });
-            }
+                }
+            })
+            .detach();
         })
         .detach();
-    });
 }
 
 fn open_telemetry_log_file(
@@ -548,7 +547,7 @@ fn open_telemetry_log_file(
     app_state: Arc<AppState>,
     cx: &mut ViewContext<Workspace>,
 ) {
-    workspace.with_local_workspace(cx, app_state.clone(), |_, cx| {
+    workspace.with_local_workspace(&app_state.clone(), cx, move |_, cx| {
         cx.spawn_weak(|workspace, mut cx| async move {
             let workspace = workspace.upgrade(&cx)?;
             let path = app_state.client.telemetry_log_file_path()?;
@@ -596,31 +595,36 @@ fn open_telemetry_log_file(
             Some(())
         })
         .detach();
-    });
+    }).detach();
 }
 
 fn open_bundled_config_file(
     workspace: &mut Workspace,
     app_state: Arc<AppState>,
     asset_path: &'static str,
-    title: &str,
+    title: &'static str,
     cx: &mut ViewContext<Workspace>,
 ) {
-    workspace.with_local_workspace(cx, app_state, |workspace, cx| {
-        let project = workspace.project().clone();
-        let buffer = project.update(cx, |project, cx| {
-            let text = Assets::get(asset_path).unwrap().data;
-            let text = str::from_utf8(text.as_ref()).unwrap();
-            project
-                .create_buffer(text, project.languages().get_language("JSON"), cx)
-                .expect("creating buffers on a local workspace always succeeds")
-        });
-        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx).with_title(title.into()));
-        workspace.add_item(
-            Box::new(cx.add_view(|cx| Editor::for_multibuffer(buffer, Some(project.clone()), cx))),
-            cx,
-        );
-    });
+    workspace
+        .with_local_workspace(&app_state.clone(), cx, |workspace, cx| {
+            let project = workspace.project().clone();
+            let buffer = project.update(cx, |project, cx| {
+                let text = Assets::get(asset_path).unwrap().data;
+                let text = str::from_utf8(text.as_ref()).unwrap();
+                project
+                    .create_buffer(text, project.languages().get_language("JSON"), cx)
+                    .expect("creating buffers on a local workspace always succeeds")
+            });
+            let buffer =
+                cx.add_model(|cx| MultiBuffer::singleton(buffer, cx).with_title(title.into()));
+            workspace.add_item(
+                Box::new(
+                    cx.add_view(|cx| Editor::for_multibuffer(buffer, Some(project.clone()), cx)),
+                ),
+                cx,
+            );
+        })
+        .detach();
 }
 
 fn schema_file_match(path: &Path) -> &Path {
@@ -639,7 +643,7 @@ fn parse_pixel_position_env_var(value: &str) -> Option<Vector2F> {
 mod tests {
     use super::*;
     use assets::Assets;
-    use editor::{Autoscroll, DisplayPoint, Editor};
+    use editor::{scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
     use gpui::{
         executor::Deterministic, AssetSource, MutableAppContext, TestAppContext, ViewHandle,
     };
@@ -651,7 +655,8 @@ mod tests {
     };
     use theme::ThemeRegistry;
     use workspace::{
-        open_paths, pane, Item, ItemHandle, NewFile, Pane, SplitDirection, WorkspaceHandle,
+        item::{Item, ItemHandle},
+        open_new, open_paths, pane, NewFile, Pane, SplitDirection, WorkspaceHandle,
     };
 
     #[gpui::test]
@@ -787,7 +792,8 @@ mod tests {
     #[gpui::test]
     async fn test_new_empty_workspace(cx: &mut TestAppContext) {
         let app_state = init(cx);
-        cx.dispatch_global_action(workspace::NewFile);
+        cx.update(|cx| open_new(&app_state, cx)).await;
+
         let window_id = *cx.window_ids().first().unwrap();
         let workspace = cx.root_view::<Workspace>(window_id).unwrap();
         let editor = workspace.update(cx, |workspace, cx| {
@@ -831,8 +837,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
 
         let entries = cx.read(|cx| workspace.file_project_paths(cx));
         let file1 = entries[0].clone();
@@ -951,8 +958,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/dir1".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
 
         // Open a file within an existing worktree.
         cx.update(|cx| {
@@ -1111,8 +1119,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
 
         // Open a file within an existing worktree.
         cx.update(|cx| {
@@ -1154,8 +1163,9 @@ mod tests {
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
         project.update(cx, |project, _| project.languages().add(rust_lang()));
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
         let worktree = cx.read(|cx| workspace.read(cx).worktrees(cx).next().unwrap());
 
         // Create a new untitled buffer
@@ -1244,8 +1254,9 @@ mod tests {
 
         let project = Project::test(app_state.fs.clone(), [], cx).await;
         project.update(cx, |project, _| project.languages().add(rust_lang()));
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
 
         // Create a new untitled buffer
         cx.dispatch_action(window_id, NewFile);
@@ -1298,8 +1309,9 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
+        let (window_id, workspace) = cx.add_window(|cx| {
+            Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
+        });
 
         let entries = cx.read(|cx| workspace.file_project_paths(cx));
         let file1 = entries[0].clone();
@@ -1373,8 +1385,15 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
 
         let entries = cx.read(|cx| workspace.file_project_paths(cx));
         let file1 = entries[0].clone();
@@ -1638,8 +1657,15 @@ mod tests {
             .await;
 
         let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
-        let (_, workspace) =
-            cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
+        let (_, workspace) = cx.add_window(|cx| {
+            Workspace::new(
+                Default::default(),
+                0,
+                project.clone(),
+                |_, _| unimplemented!(),
+                cx,
+            )
+        });
         let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
 
         let entries = cx.read(|cx| workspace.file_project_paths(cx));

styles/src/styleTree/app.ts 🔗

@@ -12,6 +12,7 @@ import sharedScreen from "./sharedScreen";
 import projectDiagnostics from "./projectDiagnostics";
 import contactNotification from "./contactNotification";
 import updateNotification from "./updateNotification";
+import simpleMessageNotification from "./simpleMessageNotification";
 import projectSharedNotification from "./projectSharedNotification";
 import tooltip from "./tooltip";
 import terminal from "./terminal";
@@ -47,6 +48,7 @@ export default function app(colorScheme: ColorScheme): Object {
       },
     },
     updateNotification: updateNotification(colorScheme),
+    simpleMessageNotification: simpleMessageNotification(colorScheme),
     tooltip: tooltip(colorScheme),
     terminal: terminal(colorScheme),
     colorScheme: {

styles/src/styleTree/simpleMessageNotification.ts 🔗

@@ -0,0 +1,31 @@
+import { ColorScheme } from "../themes/common/colorScheme";
+import { foreground, text } from "./components";
+
+const headerPadding = 8;
+
+export default function simpleMessageNotification(colorScheme: ColorScheme): Object {
+  let layer = colorScheme.middle;
+  return {
+    message: {
+      ...text(layer, "sans", { size: "md" }),
+      margin: { left: headerPadding, right: headerPadding },
+    },
+    actionMessage: {
+      ...text(layer, "sans", { size: "md" }),
+      margin: { left: headerPadding, top: 6, bottom: 6 },
+      hover: {
+        color: foreground(layer, "hovered"),
+      },
+    },
+    dismissButton: {
+      color: foreground(layer),
+      iconWidth: 8,
+      iconHeight: 8,
+      buttonWidth: 8,
+      buttonHeight: 8,
+      hover: {
+        color: foreground(layer, "hovered"),
+      },
+    },
+  };
+}