Fix shared database test isolation (#51809)

Mikayla Maki , Anthony Eid , and Max Brunsfeld created

All of the important changes are in
[`db.rs`](https://github.com/zed-industries/zed/pull/51809/changes#diff-2f644eab943bfa58feec29256281a3d9e8d4d7784cd34783e845af8beb15b16d).
Consider reading the commit log in order to review this work.

The DB crate's macro and API was changed to fix flakiness observed in
the MultiWorkspace tests when run locally. This flakiness was caused by
a shared `static LazyLock`, that caused concurrent test runs to interact
with the same underlying in-memory database. This flakiness wasn't
possible on CI due to it's usage of `cargo nextest`, whose
process-per-test approach masked this problem.

Essentially, I've changed the `static_connection` macro to remove the
static database variable and redone the internal model. Now, all
database types are thin wrappers around a generic `AppDatabase`. The
`AppDatabase` collects all of the individual table's migrations via the
`inventory` crate, and so only runs the migrations once on startup,
rather than a dozen times on startup.

The new API requires a `cx` so that we can replace the database returned
at runtime, rather than relying exclusively on a process-global
thread-local. However, we are still using a `static LazyLock` so that we
only need to take an `&App`, instead of an `&mut App`. These databases
types are `Clone + Send + Sync`, so you can easily capture-and-move the
database into background tasks and other places that don't have a `cx`.

For tests that require database isolation, it is now possible to set
their own database in init. See
[`workspace::init_test`](https://github.com/zed-industries/zed/pull/51809/changes#diff-041673bbd1947a35d45945636c0055429dfc8b5985faf93f8a8a960c9ad31e28R13610),
for the flakiness fix.

Best part, this change should be entirely compiler driven, so the Zed
agent was able to make the app-wide refactor easily.

Before you mark this PR as ready for review, make sure that you have:
- [x] Added a solid test coverage and/or screenshots from doing manual
testing
- [x] Done a self-review taking into account security and performance
aspects
- [x] Aligned any UI changes with the [UI
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)

Release Notes:

- N/A

---------

Co-authored-by: Anthony Eid <hello@anthonyeid.me>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>

Change summary

Cargo.lock                                                 |   2 
crates/agent_ui/src/agent_panel.rs                         |  57 +-
crates/agent_ui/src/thread_metadata_store.rs               |   5 
crates/auto_update/src/auto_update.rs                      |  22 
crates/collab_ui/src/collab_panel.rs                       |  45 +-
crates/collab_ui/src/notification_panel.rs                 |  31 
crates/command_palette/src/command_palette.rs              |  66 +-
crates/command_palette/src/persistence.rs                  |   2 
crates/component_preview/examples/component_preview.rs     |   5 
crates/component_preview/src/component_preview.rs          |  10 
crates/component_preview/src/persistence.rs                |   2 
crates/db/Cargo.toml                                       |   2 
crates/db/src/db.rs                                        | 142 ++++++-
crates/db/src/kvp.rs                                       |  38 +
crates/debugger_ui/src/debugger_panel.rs                   |   7 
crates/debugger_ui/src/persistence.rs                      |  20 
crates/debugger_ui/src/session/running.rs                  |  11 
crates/debugger_ui/src/session/running/breakpoint_list.rs  |   7 
crates/debugger_ui/src/session/running/stack_frame_list.rs |  10 
crates/debugger_ui/src/tests/stack_frame_list.rs           |   7 
crates/edit_prediction/src/edit_prediction.rs              |  26 
crates/edit_prediction_ui/src/edit_prediction_button.rs    |   2 
crates/editor/src/editor.rs                                |  27 
crates/editor/src/items.rs                                 |  64 ++-
crates/editor/src/persistence.rs                           |  63 ++-
crates/editor/src/scroll.rs                                |   7 
crates/extensions_ui/src/extension_suggest.rs              |  14 
crates/git_graph/src/git_graph.rs                          |  18 
crates/git_ui/src/git_panel.rs                             |  30 
crates/git_ui/src/project_diff.rs                          |   9 
crates/image_viewer/src/image_viewer.rs                    |  21 
crates/keymap_editor/src/keymap_editor.rs                  |  28 -
crates/language_onboarding/src/python.rs                   |   2 
crates/onboarding/src/multibuffer_hint.rs                  |  22 
crates/onboarding/src/onboarding.rs                        |  28 
crates/outline_panel/src/outline_panel.rs                  |  36 +
crates/project_panel/src/project_panel.rs                  |  36 +
crates/recent_projects/src/dev_container_suggest.rs        |  15 
crates/recent_projects/src/recent_projects.rs              |  20 
crates/session/src/session.rs                              |  58 +-
crates/terminal_view/src/persistence.rs                    |   2 
crates/terminal_view/src/terminal_panel.rs                 |  31 
crates/terminal_view/src/terminal_view.rs                  |  27 
crates/title_bar/src/onboarding_banner.rs                  |  14 
crates/toolchain_selector/src/active_toolchain.rs          |  18 
crates/toolchain_selector/src/toolchain_selector.rs        |  18 
crates/vim/src/state.rs                                    |  19 
crates/workspace/src/history_manager.rs                    |   9 
crates/workspace/src/multi_workspace.rs                    |  14 
crates/workspace/src/persistence.rs                        | 121 +++--
crates/workspace/src/welcome.rs                            |  21 
crates/workspace/src/workspace.rs                          | 107 +++--
crates/zed/src/main.rs                                     |  56 +-
crates/zed/src/zed.rs                                      |  15 
crates/zed/src/zed/open_listener.rs                        |   5 
55 files changed, 887 insertions(+), 607 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -4618,6 +4618,7 @@ dependencies = [
  "anyhow",
  "gpui",
  "indoc",
+ "inventory",
  "log",
  "paths",
  "release_channel",
@@ -4626,6 +4627,7 @@ dependencies = [
  "sqlez_macros",
  "tempfile",
  "util",
+ "uuid",
  "zed_env_vars",
 ]
 

crates/agent_ui/src/agent_panel.rs 🔗

@@ -14,7 +14,7 @@ use agent::{ContextServerRegistry, SharedThread, ThreadStore};
 use agent_client_protocol as acp;
 use agent_servers::AgentServer;
 use collections::HashSet;
-use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use db::kvp::{Dismissable, KeyValueStore};
 use itertools::Itertools;
 use project::AgentId;
 use serde::{Deserialize, Serialize};
@@ -95,8 +95,11 @@ const AGENT_PANEL_KEY: &str = "agent_panel";
 const RECENTLY_UPDATED_MENU_LIMIT: usize = 6;
 const DEFAULT_THREAD_TITLE: &str = "New Thread";
 
-fn read_serialized_panel(workspace_id: workspace::WorkspaceId) -> Option<SerializedAgentPanel> {
-    let scope = KEY_VALUE_STORE.scoped(AGENT_PANEL_KEY);
+fn read_serialized_panel(
+    workspace_id: workspace::WorkspaceId,
+    kvp: &KeyValueStore,
+) -> Option<SerializedAgentPanel> {
+    let scope = kvp.scoped(AGENT_PANEL_KEY);
     let key = i64::from(workspace_id).to_string();
     scope
         .read(&key)
@@ -108,8 +111,9 @@ fn read_serialized_panel(workspace_id: workspace::WorkspaceId) -> Option<Seriali
 async fn save_serialized_panel(
     workspace_id: workspace::WorkspaceId,
     panel: SerializedAgentPanel,
+    kvp: KeyValueStore,
 ) -> Result<()> {
-    let scope = KEY_VALUE_STORE.scoped(AGENT_PANEL_KEY);
+    let scope = kvp.scoped(AGENT_PANEL_KEY);
     let key = i64::from(workspace_id).to_string();
     scope.write(key, serde_json::to_string(&panel)?).await?;
     Ok(())
@@ -117,9 +121,8 @@ async fn save_serialized_panel(
 
 /// Migration: reads the original single-panel format stored under the
 /// `"agent_panel"` KVP key before per-workspace keying was introduced.
-fn read_legacy_serialized_panel() -> Option<SerializedAgentPanel> {
-    KEY_VALUE_STORE
-        .read_kvp(AGENT_PANEL_KEY)
+fn read_legacy_serialized_panel(kvp: &KeyValueStore) -> Option<SerializedAgentPanel> {
+    kvp.read_kvp(AGENT_PANEL_KEY)
         .log_err()
         .flatten()
         .and_then(|json| serde_json::from_str::<SerializedAgentPanel>(&json).log_err())
@@ -782,6 +785,7 @@ impl AgentPanel {
             }
         });
 
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = Some(cx.background_spawn(async move {
             save_serialized_panel(
                 workspace_id,
@@ -791,6 +795,7 @@ impl AgentPanel {
                     last_active_thread,
                     start_thread_in,
                 },
+                kvp,
             )
             .await?;
             anyhow::Ok(())
@@ -803,6 +808,7 @@ impl AgentPanel {
         mut cx: AsyncWindowContext,
     ) -> Task<Result<Entity<Self>>> {
         let prompt_store = cx.update(|_window, cx| PromptStore::global(cx));
+        let kvp = cx.update(|_window, cx| KeyValueStore::global(cx)).ok();
         cx.spawn(async move |cx| {
             let prompt_store = match prompt_store {
                 Ok(prompt_store) => prompt_store.await.ok(),
@@ -815,9 +821,11 @@ impl AgentPanel {
 
             let serialized_panel = cx
                 .background_spawn(async move {
-                    workspace_id
-                        .and_then(read_serialized_panel)
-                        .or_else(read_legacy_serialized_panel)
+                    kvp.and_then(|kvp| {
+                        workspace_id
+                            .and_then(|id| read_serialized_panel(id, &kvp))
+                            .or_else(|| read_legacy_serialized_panel(&kvp))
+                    })
                 })
                 .await;
 
@@ -1089,7 +1097,7 @@ impl AgentPanel {
             _worktree_creation_task: None,
             show_trust_workspace_message: false,
             last_configuration_error_telemetry: None,
-            on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()),
+            on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed(cx)),
             _active_view_observation: None,
         };
 
@@ -1308,16 +1316,17 @@ impl AgentPanel {
         }
 
         let thread_store = self.thread_store.clone();
+        let kvp = KeyValueStore::global(cx);
 
         if let Some(agent) = agent_choice {
             cx.background_spawn({
                 let agent = agent.clone();
+                let kvp = kvp;
                 async move {
                     if let Some(serialized) =
                         serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
                     {
-                        KEY_VALUE_STORE
-                            .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
+                        kvp.write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
                             .await
                             .log_err();
                     }
@@ -1344,17 +1353,15 @@ impl AgentPanel {
                 let ext_agent = if is_via_collab {
                     Agent::NativeAgent
                 } else {
-                    cx.background_spawn(async move {
-                        KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY)
-                    })
-                    .await
-                    .log_err()
-                    .flatten()
-                    .and_then(|value| {
-                        serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
-                    })
-                    .map(|agent| agent.agent)
-                    .unwrap_or(Agent::NativeAgent)
+                    cx.background_spawn(async move { kvp.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) })
+                        .await
+                        .log_err()
+                        .flatten()
+                        .and_then(|value| {
+                            serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
+                        })
+                        .map(|agent| agent.agent)
+                        .unwrap_or(Agent::NativeAgent)
                 };
 
                 let server = ext_agent.server(fs, thread_store);
@@ -4139,7 +4146,7 @@ impl AgentPanel {
     }
 
     fn should_render_trial_end_upsell(&self, cx: &mut Context<Self>) -> bool {
-        if TrialEndUpsell::dismissed() {
+        if TrialEndUpsell::dismissed(cx) {
             return false;
         }
 

crates/agent_ui/src/thread_metadata_store.rs 🔗

@@ -95,7 +95,7 @@ impl ThreadMetadataStore {
             return;
         }
 
-        let db = THREAD_METADATA_DB.clone();
+        let db = ThreadMetadataDb::global(cx);
         let thread_store = cx.new(|cx| Self::new(db, cx));
         cx.set_global(GlobalThreadMetadataStore(thread_store));
     }
@@ -251,7 +251,6 @@ impl ThreadMetadataStore {
 
 impl Global for ThreadMetadataStore {}
 
-#[derive(Clone)]
 struct ThreadMetadataDb(ThreadSafeConnection);
 
 impl Domain for ThreadMetadataDb {
@@ -270,7 +269,7 @@ impl Domain for ThreadMetadataDb {
     )];
 }
 
-db::static_connection!(THREAD_METADATA_DB, ThreadMetadataDb, []);
+db::static_connection!(ThreadMetadataDb, []);
 
 impl ThreadMetadataDb {
     /// List all sidebar thread metadata, ordered by updated_at descending.

crates/auto_update/src/auto_update.rs 🔗

@@ -1,6 +1,6 @@
 use anyhow::{Context as _, Result};
 use client::Client;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use futures_lite::StreamExt;
 use gpui::{
     App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, Task, Window,
@@ -770,17 +770,16 @@ impl AutoUpdater {
         should_show: bool,
         cx: &App,
     ) -> Task<Result<()>> {
+        let kvp = KeyValueStore::global(cx);
         cx.background_spawn(async move {
             if should_show {
-                KEY_VALUE_STORE
-                    .write_kvp(
-                        SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
-                        "".to_string(),
-                    )
-                    .await?;
+                kvp.write_kvp(
+                    SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
+                    "".to_string(),
+                )
+                .await?;
             } else {
-                KEY_VALUE_STORE
-                    .delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
+                kvp.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
                     .await?;
             }
             Ok(())
@@ -788,10 +787,9 @@ impl AutoUpdater {
     }
 
     pub fn should_show_update_notification(&self, cx: &App) -> Task<Result<bool>> {
+        let kvp = KeyValueStore::global(cx);
         cx.background_spawn(async move {
-            Ok(KEY_VALUE_STORE
-                .read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
-                .is_some())
+            Ok(kvp.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?.is_some())
         })
     }
 }

crates/collab_ui/src/collab_panel.rs 🔗

@@ -9,7 +9,7 @@ use channel::{Channel, ChannelEvent, ChannelStore};
 use client::{ChannelId, Client, Contact, User, UserStore};
 use collections::{HashMap, HashSet};
 use contact_finder::ContactFinder;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::{Editor, EditorElement, EditorStyle};
 use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
 use gpui::{
@@ -429,16 +429,17 @@ impl CollabPanel {
             .ok()
             .flatten()
         {
-            Some(serialization_key) => cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
-                .await
-                .context("reading collaboration panel from key value store")
-                .log_err()
-                .flatten()
-                .map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
-                .transpose()
-                .log_err()
-                .flatten(),
+            Some(serialization_key) => {
+                let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+                kvp.read_kvp(&serialization_key)
+                    .context("reading collaboration panel from key value store")
+                    .log_err()
+                    .flatten()
+                    .map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
+                    .transpose()
+                    .log_err()
+                    .flatten()
+            }
             None => None,
         };
 
@@ -479,19 +480,19 @@ impl CollabPanel {
         };
         let width = self.width;
         let collapsed_channels = self.collapsed_channels.clone();
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = cx.background_spawn(
             async move {
-                KEY_VALUE_STORE
-                    .write_kvp(
-                        serialization_key,
-                        serde_json::to_string(&SerializedCollabPanel {
-                            width,
-                            collapsed_channels: Some(
-                                collapsed_channels.iter().map(|cid| cid.0).collect(),
-                            ),
-                        })?,
-                    )
-                    .await?;
+                kvp.write_kvp(
+                    serialization_key,
+                    serde_json::to_string(&SerializedCollabPanel {
+                        width,
+                        collapsed_channels: Some(
+                            collapsed_channels.iter().map(|cid| cid.0).collect(),
+                        ),
+                    })?,
+                )
+                .await?;
                 anyhow::Ok(())
             }
             .log_err(),

crates/collab_ui/src/notification_panel.rs 🔗

@@ -3,7 +3,7 @@ use anyhow::Result;
 use channel::ChannelStore;
 use client::{ChannelId, Client, Notification, User, UserStore};
 use collections::HashMap;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use futures::StreamExt;
 use gpui::{
     AnyElement, App, AsyncWindowContext, ClickEvent, Context, DismissEvent, Element, Entity,
@@ -186,16 +186,13 @@ impl NotificationPanel {
         cx: AsyncWindowContext,
     ) -> Task<Result<Entity<Self>>> {
         cx.spawn(async move |cx| {
-            let serialized_panel = if let Some(panel) = cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(NOTIFICATION_PANEL_KEY) })
-                .await
-                .log_err()
-                .flatten()
-            {
-                Some(serde_json::from_str::<SerializedNotificationPanel>(&panel)?)
-            } else {
-                None
-            };
+            let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+            let serialized_panel =
+                if let Some(panel) = kvp.read_kvp(NOTIFICATION_PANEL_KEY).log_err().flatten() {
+                    Some(serde_json::from_str::<SerializedNotificationPanel>(&panel)?)
+                } else {
+                    None
+                };
 
             workspace.update_in(cx, |workspace, window, cx| {
                 let panel = Self::new(workspace, window, cx);
@@ -212,14 +209,14 @@ impl NotificationPanel {
 
     fn serialize(&mut self, cx: &mut Context<Self>) {
         let width = self.width;
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = cx.background_spawn(
             async move {
-                KEY_VALUE_STORE
-                    .write_kvp(
-                        NOTIFICATION_PANEL_KEY.into(),
-                        serde_json::to_string(&SerializedNotificationPanel { width })?,
-                    )
-                    .await?;
+                kvp.write_kvp(
+                    NOTIFICATION_PANEL_KEY.into(),
+                    serde_json::to_string(&SerializedNotificationPanel { width })?,
+                )
+                .await?;
                 anyhow::Ok(())
             }
             .log_err(),

crates/command_palette/src/command_palette.rs 🔗

@@ -18,7 +18,7 @@ use gpui::{
     Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
     ParentElement, Render, Styled, Task, WeakEntity, Window,
 };
-use persistence::COMMAND_PALETTE_HISTORY;
+use persistence::CommandPaletteDB;
 use picker::Direction;
 use picker::{Picker, PickerDelegate};
 use postage::{sink::Sink, stream::Stream};
@@ -180,9 +180,9 @@ struct QueryHistory {
 }
 
 impl QueryHistory {
-    fn history(&mut self) -> &mut VecDeque<String> {
+    fn history(&mut self, cx: &App) -> &mut VecDeque<String> {
         self.history.get_or_insert_with(|| {
-            COMMAND_PALETTE_HISTORY
+            CommandPaletteDB::global(cx)
                 .list_recent_queries()
                 .unwrap_or_default()
                 .into_iter()
@@ -190,18 +190,18 @@ impl QueryHistory {
         })
     }
 
-    fn add(&mut self, query: String) {
-        if let Some(pos) = self.history().iter().position(|h| h == &query) {
-            self.history().remove(pos);
+    fn add(&mut self, query: String, cx: &App) {
+        if let Some(pos) = self.history(cx).iter().position(|h| h == &query) {
+            self.history(cx).remove(pos);
         }
-        self.history().push_back(query);
+        self.history(cx).push_back(query);
         self.cursor = None;
         self.prefix = None;
     }
 
-    fn validate_cursor(&mut self, current_query: &str) -> Option<usize> {
+    fn validate_cursor(&mut self, current_query: &str, cx: &App) -> Option<usize> {
         if let Some(pos) = self.cursor {
-            if self.history().get(pos).map(|s| s.as_str()) != Some(current_query) {
+            if self.history(cx).get(pos).map(|s| s.as_str()) != Some(current_query) {
                 self.cursor = None;
                 self.prefix = None;
             }
@@ -209,39 +209,39 @@ impl QueryHistory {
         self.cursor
     }
 
-    fn previous(&mut self, current_query: &str) -> Option<&str> {
-        if self.validate_cursor(current_query).is_none() {
+    fn previous(&mut self, current_query: &str, cx: &App) -> Option<&str> {
+        if self.validate_cursor(current_query, cx).is_none() {
             self.prefix = Some(current_query.to_string());
         }
 
         let prefix = self.prefix.clone().unwrap_or_default();
-        let start_index = self.cursor.unwrap_or(self.history().len());
+        let start_index = self.cursor.unwrap_or(self.history(cx).len());
 
         for i in (0..start_index).rev() {
             if self
-                .history()
+                .history(cx)
                 .get(i)
                 .is_some_and(|e| e.starts_with(&prefix))
             {
                 self.cursor = Some(i);
-                return self.history().get(i).map(|s| s.as_str());
+                return self.history(cx).get(i).map(|s| s.as_str());
             }
         }
         None
     }
 
-    fn next(&mut self, current_query: &str) -> Option<&str> {
-        let selected = self.validate_cursor(current_query)?;
+    fn next(&mut self, current_query: &str, cx: &App) -> Option<&str> {
+        let selected = self.validate_cursor(current_query, cx)?;
         let prefix = self.prefix.clone().unwrap_or_default();
 
-        for i in (selected + 1)..self.history().len() {
+        for i in (selected + 1)..self.history(cx).len() {
             if self
-                .history()
+                .history(cx)
                 .get(i)
                 .is_some_and(|e| e.starts_with(&prefix))
             {
                 self.cursor = Some(i);
-                return self.history().get(i).map(|s| s.as_str());
+                return self.history(cx).get(i).map(|s| s.as_str());
             }
         }
         None
@@ -338,8 +338,8 @@ impl CommandPaletteDelegate {
     /// Hit count for each command in the palette.
     /// We only account for commands triggered directly via command palette and not by e.g. keystrokes because
     /// if a user already knows a keystroke for a command, they are unlikely to use a command palette to look for it.
-    fn hit_counts(&self) -> HashMap<String, u16> {
-        if let Ok(commands) = COMMAND_PALETTE_HISTORY.list_commands_used() {
+    fn hit_counts(&self, cx: &App) -> HashMap<String, u16> {
+        if let Ok(commands) = CommandPaletteDB::global(cx).list_commands_used() {
             commands
                 .into_iter()
                 .map(|command| (command.command_name, command.invocations))
@@ -378,21 +378,25 @@ impl PickerDelegate for CommandPaletteDelegate {
         direction: Direction,
         query: &str,
         _window: &mut Window,
-        _cx: &mut App,
+        cx: &mut App,
     ) -> Option<String> {
         match direction {
             Direction::Up => {
                 let should_use_history =
                     self.selected_ix == 0 || self.query_history.is_navigating();
                 if should_use_history {
-                    if let Some(query) = self.query_history.previous(query).map(|s| s.to_string()) {
+                    if let Some(query) = self
+                        .query_history
+                        .previous(query, cx)
+                        .map(|s| s.to_string())
+                    {
                         return Some(query);
                     }
                 }
             }
             Direction::Down => {
                 if self.query_history.is_navigating() {
-                    if let Some(query) = self.query_history.next(query).map(|s| s.to_string()) {
+                    if let Some(query) = self.query_history.next(query, cx).map(|s| s.to_string()) {
                         return Some(query);
                     } else {
                         let prefix = self.query_history.prefix.take().unwrap_or_default();
@@ -444,7 +448,7 @@ impl PickerDelegate for CommandPaletteDelegate {
 
         let task = cx.background_spawn({
             let mut commands = self.all_commands.clone();
-            let hit_counts = self.hit_counts();
+            let hit_counts = self.hit_counts(cx);
             let executor = cx.background_executor().clone();
             let query = normalize_action_query(query_str);
             let query_for_link = query_str.to_string();
@@ -566,7 +570,7 @@ impl PickerDelegate for CommandPaletteDelegate {
         }
 
         if !self.latest_query.is_empty() {
-            self.query_history.add(self.latest_query.clone());
+            self.query_history.add(self.latest_query.clone(), cx);
             self.query_history.reset_cursor();
         }
 
@@ -581,9 +585,9 @@ impl PickerDelegate for CommandPaletteDelegate {
         self.commands.clear();
         let command_name = command.name.clone();
         let latest_query = self.latest_query.clone();
+        let db = CommandPaletteDB::global(cx);
         cx.background_spawn(async move {
-            COMMAND_PALETTE_HISTORY
-                .write_command_invocation(command_name, latest_query)
+            db.write_command_invocation(command_name, latest_query)
                 .await
         })
         .detach_and_log_err(cx);
@@ -771,11 +775,9 @@ mod tests {
 
     #[gpui::test]
     async fn test_command_palette(cx: &mut TestAppContext) {
-        persistence::COMMAND_PALETTE_HISTORY
-            .clear_all()
-            .await
-            .unwrap();
         let app_state = init_test(cx);
+        let db = cx.update(|cx| persistence::CommandPaletteDB::global(cx));
+        db.clear_all().await.unwrap();
         let project = Project::test(app_state.fs.clone(), [], cx).await;
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));

crates/command_palette/src/persistence.rs 🔗

@@ -69,7 +69,7 @@ impl Domain for CommandPaletteDB {
     )];
 }
 
-db::static_connection!(COMMAND_PALETTE_HISTORY, CommandPaletteDB, []);
+db::static_connection!(CommandPaletteDB, []);
 
 impl CommandPaletteDB {
     pub async fn write_command_invocation(

crates/component_preview/examples/component_preview.rs 🔗

@@ -48,7 +48,10 @@ fn main() {
         let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
         let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
         let session_id = uuid::Uuid::new_v4().to_string();
-        let session = cx.foreground_executor().block_on(Session::new(session_id));
+        let kvp = db::kvp::KeyValueStore::global(cx);
+        let session = cx
+            .foreground_executor()
+            .block_on(Session::new(session_id, kvp));
         let session = cx.new(|cx| AppSession::new(session, cx));
         let node_runtime = NodeRuntime::unavailable();
 

crates/component_preview/src/component_preview.rs 🔗

@@ -9,7 +9,7 @@ use gpui::{
 use gpui::{ListState, ScrollHandle, ScrollStrategy, UniformListScrollHandle};
 use language::LanguageRegistry;
 use notifications::status_toast::{StatusToast, ToastIcon};
-use persistence::COMPONENT_PREVIEW_DB;
+use persistence::ComponentPreviewDb;
 use project::Project;
 use std::{iter::Iterator, ops::Range, sync::Arc};
 use ui::{ButtonLike, Divider, HighlightedLabel, ListItem, ListSubHeader, Tooltip, prelude::*};
@@ -784,7 +784,7 @@ impl SerializableItem for ComponentPreview {
         cx: &mut App,
     ) -> Task<anyhow::Result<Entity<Self>>> {
         let deserialized_active_page =
-            match COMPONENT_PREVIEW_DB.get_active_page(item_id, workspace_id) {
+            match ComponentPreviewDb::global(cx).get_active_page(item_id, workspace_id) {
                 Ok(page) => {
                     if let Some(page) = page {
                         ActivePageId(page)
@@ -845,7 +845,7 @@ impl SerializableItem for ComponentPreview {
             alive_items,
             workspace_id,
             "component_previews",
-            &COMPONENT_PREVIEW_DB,
+            &ComponentPreviewDb::global(cx),
             cx,
         )
     }
@@ -860,9 +860,9 @@ impl SerializableItem for ComponentPreview {
     ) -> Option<Task<anyhow::Result<()>>> {
         let active_page = self.active_page_id(cx);
         let workspace_id = self.workspace_id?;
+        let db = ComponentPreviewDb::global(cx);
         Some(cx.background_spawn(async move {
-            COMPONENT_PREVIEW_DB
-                .save_active_page(item_id, workspace_id, active_page.0)
+            db.save_active_page(item_id, workspace_id, active_page.0)
                 .await
         }))
     }

crates/component_preview/src/persistence.rs 🔗

@@ -23,7 +23,7 @@ impl Domain for ComponentPreviewDb {
     )];
 }
 
-db::static_connection!(COMPONENT_PREVIEW_DB, ComponentPreviewDb, [WorkspaceDb]);
+db::static_connection!(ComponentPreviewDb, [WorkspaceDb]);
 
 impl ComponentPreviewDb {
     pub async fn save_active_page(

crates/db/Cargo.toml 🔗

@@ -19,6 +19,7 @@ test-support = []
 anyhow.workspace = true
 gpui.workspace = true
 indoc.workspace = true
+inventory.workspace = true
 log.workspace = true
 paths.workspace = true
 release_channel.workspace = true
@@ -26,6 +27,7 @@ smol.workspace = true
 sqlez.workspace = true
 sqlez_macros.workspace = true
 util.workspace = true
+uuid.workspace = true
 zed_env_vars.workspace = true
 
 [dev-dependencies]

crates/db/src/db.rs 🔗

@@ -4,12 +4,15 @@ pub mod query;
 // Re-export
 pub use anyhow;
 use anyhow::Context as _;
-use gpui::{App, AppContext};
+pub use gpui;
+use gpui::{App, AppContext, Global};
 pub use indoc::indoc;
+pub use inventory;
 pub use paths::database_dir;
 pub use smol;
 pub use sqlez;
 pub use sqlez_macros;
+pub use uuid;
 
 pub use release_channel::RELEASE_CHANNEL;
 use sqlez::domain::Migrator;
@@ -22,6 +25,103 @@ use std::sync::{LazyLock, atomic::Ordering};
 use util::{ResultExt, maybe};
 use zed_env_vars::ZED_STATELESS;
 
+/// A migration registered via `static_connection!` and collected at link time.
+pub struct DomainMigration {
+    pub name: &'static str,
+    pub migrations: &'static [&'static str],
+    pub dependencies: &'static [&'static str],
+    pub should_allow_migration_change: fn(usize, &str, &str) -> bool,
+}
+
+inventory::collect!(DomainMigration);
+
+/// The shared database connection backing all domain-specific DB wrappers.
+/// Set as a GPUI global per-App. Falls back to a shared LazyLock if not set.
+pub struct AppDatabase(pub ThreadSafeConnection);
+
+impl Global for AppDatabase {}
+
+/// Migrator that runs all inventory-registered domain migrations.
+pub struct AppMigrator;
+
+impl Migrator for AppMigrator {
+    fn migrate(connection: &sqlez::connection::Connection) -> anyhow::Result<()> {
+        let registrations: Vec<&DomainMigration> = inventory::iter::<DomainMigration>().collect();
+        let sorted = topological_sort(&registrations);
+        for reg in &sorted {
+            let mut should_allow = reg.should_allow_migration_change;
+            connection.migrate(reg.name, reg.migrations, &mut should_allow)?;
+        }
+        Ok(())
+    }
+}
+
+impl AppDatabase {
+    /// Opens the production database and runs all inventory-registered
+    /// migrations in dependency order.
+    pub fn new() -> Self {
+        let db_dir = database_dir();
+        let scope = RELEASE_CHANNEL.dev_name();
+        let connection = smol::block_on(open_db::<AppMigrator>(db_dir, scope));
+        Self(connection)
+    }
+
+    /// Creates a new in-memory database with a unique name and runs all
+    /// inventory-registered migrations in dependency order.
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn test_new() -> Self {
+        let name = format!("test-db-{}", uuid::Uuid::new_v4());
+        let connection = smol::block_on(open_test_db::<AppMigrator>(&name));
+        Self(connection)
+    }
+
+    /// Returns the per-App connection if set, otherwise falls back to
+    /// the shared LazyLock.
+    pub fn global(cx: &App) -> &ThreadSafeConnection {
+        #[allow(unreachable_code)]
+        if let Some(db) = cx.try_global::<Self>() {
+            return &db.0;
+        } else {
+            #[cfg(any(feature = "test-support", test))]
+            return &TEST_APP_DATABASE.0;
+
+            panic!("database not initialized")
+        }
+    }
+}
+
+fn topological_sort<'a>(registrations: &[&'a DomainMigration]) -> Vec<&'a DomainMigration> {
+    let mut sorted: Vec<&DomainMigration> = Vec::new();
+    let mut visited: std::collections::HashSet<&str> = std::collections::HashSet::new();
+
+    fn visit<'a>(
+        name: &str,
+        registrations: &[&'a DomainMigration],
+        sorted: &mut Vec<&'a DomainMigration>,
+        visited: &mut std::collections::HashSet<&'a str>,
+    ) {
+        if visited.contains(name) {
+            return;
+        }
+        if let Some(reg) = registrations.iter().find(|r| r.name == name) {
+            for dep in reg.dependencies {
+                visit(dep, registrations, sorted, visited);
+            }
+            visited.insert(reg.name);
+            sorted.push(reg);
+        }
+    }
+
+    for reg in registrations {
+        visit(reg.name, registrations, &mut sorted, &mut visited);
+    }
+    sorted
+}
+
+/// Shared fallback `AppDatabase` used when no per-App global is set.
+#[cfg(any(test, feature = "test-support"))]
+static TEST_APP_DATABASE: LazyLock<AppDatabase> = LazyLock::new(AppDatabase::test_new);
+
 const CONNECTION_INITIALIZE_QUERY: &str = sql!(
     PRAGMA foreign_keys=TRUE;
 );
@@ -110,12 +210,11 @@ pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection {
 /// Implements a basic DB wrapper for a given domain
 ///
 /// Arguments:
-/// - static variable name for connection
 /// - type of connection wrapper
 /// - dependencies, whose migrations should be run prior to this domain's migrations
 #[macro_export]
 macro_rules! static_connection {
-    ($id:ident, $t:ident, [ $($d:ty),* ] $(, $global:ident)?) => {
+    ($t:ident, [ $($d:ty),* ]) => {
         impl ::std::ops::Deref for $t {
             type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
 
@@ -124,30 +223,33 @@ macro_rules! static_connection {
             }
         }
 
+        impl ::std::clone::Clone for $t {
+            fn clone(&self) -> Self {
+                $t(self.0.clone())
+            }
+        }
+
         impl $t {
+            /// Returns an instance backed by the per-App database if set,
+            /// or the shared fallback connection otherwise.
+            pub fn global(cx: &$crate::gpui::App) -> Self {
+                $t($crate::AppDatabase::global(cx).clone())
+            }
+
             #[cfg(any(test, feature = "test-support"))]
             pub async fn open_test_db(name: &'static str) -> Self {
                 $t($crate::open_test_db::<$t>(name).await)
             }
         }
 
-        #[cfg(any(test, feature = "test-support"))]
-        pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
-            #[allow(unused_parens)]
-            $t($crate::smol::block_on($crate::open_test_db::<($($d,)* $t)>(stringify!($id))))
-        });
-
-        #[cfg(not(any(test, feature = "test-support")))]
-        pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
-            let db_dir = $crate::database_dir();
-            let scope = if false $(|| stringify!($global) == "global")? {
-                "global"
-            } else {
-                $crate::RELEASE_CHANNEL.dev_name()
-            };
-            #[allow(unused_parens)]
-            $t($crate::smol::block_on($crate::open_db::<($($d,)* $t)>(db_dir, scope)))
-        });
+        $crate::inventory::submit! {
+            $crate::DomainMigration {
+                name: <$t as $crate::sqlez::domain::Domain>::NAME,
+                migrations: <$t as $crate::sqlez::domain::Domain>::MIGRATIONS,
+                dependencies: &[$(<$d as $crate::sqlez::domain::Domain>::NAME),*],
+                should_allow_migration_change: <$t as $crate::sqlez::domain::Domain>::should_allow_migration_change,
+            }
+        }
     }
 }
 

crates/db/src/kvp.rs 🔗

@@ -11,6 +11,12 @@ use crate::{
 
 pub struct KeyValueStore(crate::sqlez::thread_safe_connection::ThreadSafeConnection);
 
+impl KeyValueStore {
+    pub fn from_app_db(db: &crate::AppDatabase) -> Self {
+        Self(db.0.clone())
+    }
+}
+
 impl Domain for KeyValueStore {
     const NAME: &str = stringify!(KeyValueStore);
 
@@ -32,26 +38,25 @@ impl Domain for KeyValueStore {
     ];
 }
 
-crate::static_connection!(KEY_VALUE_STORE, KeyValueStore, []);
+crate::static_connection!(KeyValueStore, []);
 
 pub trait Dismissable {
     const KEY: &'static str;
 
-    fn dismissed() -> bool {
-        KEY_VALUE_STORE
+    fn dismissed(cx: &App) -> bool {
+        KeyValueStore::global(cx)
             .read_kvp(Self::KEY)
             .log_err()
             .is_some_and(|s| s.is_some())
     }
 
     fn set_dismissed(is_dismissed: bool, cx: &mut App) {
+        let db = KeyValueStore::global(cx);
         write_and_log(cx, move || async move {
             if is_dismissed {
-                KEY_VALUE_STORE
-                    .write_kvp(Self::KEY.into(), "1".into())
-                    .await
+                db.write_kvp(Self::KEY.into(), "1".into()).await
             } else {
-                KEY_VALUE_STORE.delete_kvp(Self::KEY.into()).await
+                db.delete_kvp(Self::KEY.into()).await
             }
         })
     }
@@ -228,9 +233,26 @@ impl Domain for GlobalKeyValueStore {
     )];
 }
 
-crate::static_connection!(GLOBAL_KEY_VALUE_STORE, GlobalKeyValueStore, [], global);
+impl std::ops::Deref for GlobalKeyValueStore {
+    type Target = ThreadSafeConnection;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+static GLOBAL_KEY_VALUE_STORE: std::sync::LazyLock<GlobalKeyValueStore> =
+    std::sync::LazyLock::new(|| {
+        let db_dir = crate::database_dir();
+        GlobalKeyValueStore(smol::block_on(crate::open_db::<GlobalKeyValueStore>(
+            db_dir, "global",
+        )))
+    });
 
 impl GlobalKeyValueStore {
+    pub fn global() -> &'static Self {
+        &GLOBAL_KEY_VALUE_STORE
+    }
+
     query! {
         pub fn read_kvp(key: &str) -> Result<Option<String>> {
             SELECT value FROM kv_store WHERE key = (?)

crates/debugger_ui/src/debugger_panel.rs 🔗

@@ -1461,7 +1461,12 @@ async fn register_session_inner(
         .detach();
     })
     .ok();
-    let serialized_layout = persistence::get_serialized_layout(adapter_name).await;
+    let serialized_layout = this
+        .update(cx, |_, cx| {
+            persistence::get_serialized_layout(&adapter_name, &db::kvp::KeyValueStore::global(cx))
+        })
+        .ok()
+        .flatten();
     let debug_session = this.update_in(cx, |this, window, cx| {
         let parent_session = this
             .sessions_with_children

crates/debugger_ui/src/persistence.rs 🔗

@@ -1,7 +1,7 @@
 use anyhow::Context as _;
 use collections::HashMap;
 use dap::{Capabilities, adapters::DebugAdapterName};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use gpui::{Axis, Context, Entity, EntityId, Focusable, Subscription, WeakEntity, Window};
 use project::Project;
 use serde::{Deserialize, Serialize};
@@ -125,15 +125,15 @@ const DEBUGGER_PANEL_PREFIX: &str = "debugger_panel_";
 pub(crate) async fn serialize_pane_layout(
     adapter_name: DebugAdapterName,
     pane_group: SerializedLayout,
+    kvp: KeyValueStore,
 ) -> anyhow::Result<()> {
     let serialized_pane_group = serde_json::to_string(&pane_group)
         .context("Serializing pane group with serde_json as a string")?;
-    KEY_VALUE_STORE
-        .write_kvp(
-            format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
-            serialized_pane_group,
-        )
-        .await
+    kvp.write_kvp(
+        format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
+        serialized_pane_group,
+    )
+    .await
 }
 
 pub(crate) fn build_serialized_layout(
@@ -187,13 +187,13 @@ fn serialize_pane(pane: &Entity<Pane>, cx: &App) -> SerializedPane {
     }
 }
 
-pub(crate) async fn get_serialized_layout(
+pub(crate) fn get_serialized_layout(
     adapter_name: impl AsRef<str>,
+    kvp: &KeyValueStore,
 ) -> Option<SerializedLayout> {
     let key = format!("{DEBUGGER_PANEL_PREFIX}-{}", adapter_name.as_ref());
 
-    KEY_VALUE_STORE
-        .read_kvp(&key)
+    kvp.read_kvp(&key)
         .log_err()
         .flatten()
         .and_then(|value| serde_json::from_str::<SerializedLayout>(&value).ok())

crates/debugger_ui/src/session/running.rs 🔗

@@ -1501,9 +1501,14 @@ impl RunningState {
                     return;
                 };
 
-                persistence::serialize_pane_layout(adapter_name, pane_layout)
-                    .await
-                    .log_err();
+                let kvp = this
+                    .read_with(cx, |_, cx| db::kvp::KeyValueStore::global(cx))
+                    .ok();
+                if let Some(kvp) = kvp {
+                    persistence::serialize_pane_layout(adapter_name, pane_layout, kvp)
+                        .await
+                        .log_err();
+                }
 
                 this.update(cx, |this, _| {
                     this._schedule_serialize.take();

crates/debugger_ui/src/session/running/breakpoint_list.rs 🔗

@@ -6,7 +6,7 @@ use std::{
 };
 
 use dap::{Capabilities, ExceptionBreakpointsFilter, adapters::DebugAdapterName};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::Editor;
 use gpui::{
     Action, AppContext, ClickEvent, Entity, FocusHandle, Focusable, MouseButton, ScrollStrategy,
@@ -520,8 +520,9 @@ impl BreakpointList {
             });
             let value = serde_json::to_string(&settings);
 
+            let kvp = KeyValueStore::global(cx);
             cx.background_executor()
-                .spawn(async move { KEY_VALUE_STORE.write_kvp(key, value?).await })
+                .spawn(async move { kvp.write_kvp(key, value?).await })
         } else {
             Task::ready(Result::Ok(()))
         }
@@ -532,7 +533,7 @@ impl BreakpointList {
         adapter_name: DebugAdapterName,
         cx: &mut Context<Self>,
     ) -> anyhow::Result<()> {
-        let Some(val) = KEY_VALUE_STORE.read_kvp(&Self::kvp_key(&adapter_name))? else {
+        let Some(val) = KeyValueStore::global(cx).read_kvp(&Self::kvp_key(&adapter_name))? else {
             return Ok(());
         };
         let value: PersistedAdapterOptions = serde_json::from_str(&val)?;

crates/debugger_ui/src/session/running/stack_frame_list.rs 🔗

@@ -5,7 +5,7 @@ use std::time::Duration;
 use anyhow::{Context as _, Result, anyhow};
 use dap::StackFrameId;
 use dap::adapters::DebugAdapterName;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use gpui::{
     Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState,
     Subscription, Task, WeakEntity, list,
@@ -122,7 +122,7 @@ impl StackFrameList {
             .flatten()
             .and_then(|database_id| {
                 let key = stack_frame_filter_key(&session.read(cx).adapter(), database_id);
-                KEY_VALUE_STORE
+                KeyValueStore::global(cx)
                     .read_kvp(&key)
                     .ok()
                     .flatten()
@@ -852,8 +852,10 @@ impl StackFrameList {
             .flatten()
         {
             let key = stack_frame_filter_key(&self.session.read(cx).adapter(), database_id);
-            let save_task = KEY_VALUE_STORE.write_kvp(key, self.list_filter.into());
-            cx.background_spawn(save_task).detach();
+            let kvp = KeyValueStore::global(cx);
+            let filter: String = self.list_filter.into();
+            cx.background_spawn(async move { kvp.write_kvp(key, filter).await })
+                .detach();
         }
 
         if let Some(ThreadStatus::Stopped) = thread_status {

crates/debugger_ui/src/tests/stack_frame_list.rs 🔗

@@ -9,7 +9,7 @@ use dap::{
     StackFrame,
     requests::{Scopes, StackTrace, Threads},
 };
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::{Editor, ToPoint as _};
 use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
 use project::{FakeFs, Project};
@@ -1217,7 +1217,10 @@ async fn test_stack_frame_filter_persistence(
         .expect("workspace id has to be some for this test to work properly");
 
     let key = stack_frame_filter_key(&adapter_name, workspace_id);
-    let stored_value = KEY_VALUE_STORE.read_kvp(&key).unwrap();
+    let stored_value = cx
+        .update(|_, cx| KeyValueStore::global(cx))
+        .read_kvp(&key)
+        .unwrap();
     assert_eq!(
         stored_value,
         Some(StackFrameFilter::OnlyUserFrames.into()),

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -12,7 +12,7 @@ use cloud_llm_client::{
 };
 use collections::{HashMap, HashSet};
 use copilot::{Copilot, Reinstall, SignIn, SignOut};
-use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use db::kvp::{Dismissable, KeyValueStore};
 use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile};
 use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
 use futures::{
@@ -770,7 +770,7 @@ impl EditPredictionStore {
     }
 
     pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
-        let data_collection_choice = Self::load_data_collection_choice();
+        let data_collection_choice = Self::load_data_collection_choice(cx);
 
         let llm_token = LlmApiToken::global(cx);
 
@@ -2745,8 +2745,8 @@ impl EditPredictionStore {
         self.data_collection_choice.is_enabled(cx)
     }
 
-    fn load_data_collection_choice() -> DataCollectionChoice {
-        let choice = KEY_VALUE_STORE
+    fn load_data_collection_choice(cx: &App) -> DataCollectionChoice {
+        let choice = KeyValueStore::global(cx)
             .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
             .log_err()
             .flatten();
@@ -2766,11 +2766,13 @@ impl EditPredictionStore {
         self.data_collection_choice = self.data_collection_choice.toggle();
         let new_choice = self.data_collection_choice;
         let is_enabled = new_choice.is_enabled(cx);
-        db::write_and_log(cx, move || {
-            KEY_VALUE_STORE.write_kvp(
+        let kvp = KeyValueStore::global(cx);
+        db::write_and_log(cx, move || async move {
+            kvp.write_kvp(
                 ZED_PREDICT_DATA_COLLECTION_CHOICE.into(),
                 is_enabled.to_string(),
             )
+            .await
         });
     }
 
@@ -3006,12 +3008,13 @@ struct ZedPredictUpsell;
 impl Dismissable for ZedPredictUpsell {
     const KEY: &'static str = "dismissed-edit-predict-upsell";
 
-    fn dismissed() -> bool {
+    fn dismissed(cx: &App) -> bool {
         // To make this backwards compatible with older versions of Zed, we
         // check if the user has seen the previous Edit Prediction Onboarding
         // before, by checking the data collection choice which was written to
         // the database once the user clicked on "Accept and Enable"
-        if KEY_VALUE_STORE
+        let kvp = KeyValueStore::global(cx);
+        if kvp
             .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
             .log_err()
             .is_some_and(|s| s.is_some())
@@ -3019,15 +3022,14 @@ impl Dismissable for ZedPredictUpsell {
             return true;
         }
 
-        KEY_VALUE_STORE
-            .read_kvp(Self::KEY)
+        kvp.read_kvp(Self::KEY)
             .log_err()
             .is_some_and(|s| s.is_some())
     }
 }
 
-pub fn should_show_upsell_modal() -> bool {
-    !ZedPredictUpsell::dismissed()
+pub fn should_show_upsell_modal(cx: &App) -> bool {
+    !ZedPredictUpsell::dismissed(cx)
 }
 
 pub fn init(cx: &mut App) {

crates/edit_prediction_ui/src/edit_prediction_button.rs 🔗

@@ -379,7 +379,7 @@ impl Render for EditPredictionButton {
                     }
                 };
 
-                if edit_prediction::should_show_upsell_modal() {
+                if edit_prediction::should_show_upsell_modal(cx) {
                     let tooltip_meta = if self.user_store.read(cx).current_user().is_some() {
                         "Choose a Plan"
                     } else {

crates/editor/src/editor.rs 🔗

@@ -153,7 +153,7 @@ use multi_buffer::{
     ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
 };
 use parking_lot::Mutex;
-use persistence::DB;
+use persistence::EditorDb;
 use project::{
     BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent,
     CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId,
@@ -3757,6 +3757,7 @@ impl Editor {
                 let selections = selections.clone();
                 let background_executor = cx.background_executor().clone();
                 let editor_id = cx.entity().entity_id().as_u64() as ItemId;
+                let db = EditorDb::global(cx);
                 self.serialize_selections = cx.background_spawn(async move {
                     background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
                     let db_selections = selections
@@ -3769,7 +3770,7 @@ impl Editor {
                         })
                         .collect();
 
-                    DB.save_editor_selections(editor_id, workspace_id, db_selections)
+                    db.save_editor_selections(editor_id, workspace_id, db_selections)
                         .await
                         .with_context(|| {
                             format!(
@@ -3854,16 +3855,17 @@ impl Editor {
                 (start, end, start_fp, end_fp)
             })
             .collect::<Vec<_>>();
+        let db = EditorDb::global(cx);
         self.serialize_folds = cx.background_spawn(async move {
             background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
             if db_folds.is_empty() {
                 // No folds - delete any persisted folds for this file
-                DB.delete_file_folds(workspace_id, file_path)
+                db.delete_file_folds(workspace_id, file_path)
                     .await
                     .with_context(|| format!("deleting file folds for workspace {workspace_id:?}"))
                     .log_err();
             } else {
-                DB.save_file_folds(workspace_id, file_path, db_folds)
+                db.save_file_folds(workspace_id, file_path, db_folds)
                     .await
                     .with_context(|| {
                         format!("persisting file folds for workspace {workspace_id:?}")
@@ -25357,12 +25359,13 @@ impl Editor {
                 });
 
             // Try file_folds (path-based) first, fallback to editor_folds (migration)
+            let db = EditorDb::global(cx);
             let (folds, needs_migration) = if let Some(ref path) = file_path {
-                if let Some(folds) = DB.get_file_folds(workspace_id, path).log_err()
+                if let Some(folds) = db.get_file_folds(workspace_id, path).log_err()
                     && !folds.is_empty()
                 {
                     (Some(folds), false)
-                } else if let Some(folds) = DB.get_editor_folds(item_id, workspace_id).log_err()
+                } else if let Some(folds) = db.get_editor_folds(item_id, workspace_id).log_err()
                     && !folds.is_empty()
                 {
                     // Found old editor_folds data, will migrate to file_folds
@@ -25372,7 +25375,7 @@ impl Editor {
                 }
             } else {
                 // No file path, try editor_folds as fallback
-                let folds = DB.get_editor_folds(item_id, workspace_id).log_err();
+                let folds = db.get_editor_folds(item_id, workspace_id).log_err();
                 (folds.filter(|f| !f.is_empty()), false)
             };
 
@@ -25471,8 +25474,9 @@ impl Editor {
                     if needs_migration {
                         if let Some(ref path) = file_path {
                             let path = path.clone();
+                            let db = EditorDb::global(cx);
                             cx.spawn(async move |_, _| {
-                                DB.save_file_folds(workspace_id, path, db_folds_for_migration)
+                                db.save_file_folds(workspace_id, path, db_folds_for_migration)
                                     .await
                                     .log_err();
                             })
@@ -25482,7 +25486,7 @@ impl Editor {
                 }
             }
 
-            if let Some(selections) = DB.get_editor_selections(item_id, workspace_id).log_err()
+            if let Some(selections) = db.get_editor_selections(item_id, workspace_id).log_err()
                 && !selections.is_empty()
             {
                 let snapshot = buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx));
@@ -25517,7 +25521,10 @@ impl Editor {
             return;
         }
 
-        let Some(folds) = DB.get_file_folds(workspace_id, &file_path).log_err() else {
+        let Some(folds) = EditorDb::global(cx)
+            .get_file_folds(workspace_id, &file_path)
+            .log_err()
+        else {
             return;
         };
         if folds.is_empty() {

crates/editor/src/items.rs 🔗

@@ -4,7 +4,7 @@ use crate::{
     NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _,
     display_map::HighlightKey,
     editor_settings::SeedQuerySetting,
-    persistence::{DB, SerializedEditor},
+    persistence::{EditorDb, SerializedEditor},
     scroll::{ScrollAnchor, ScrollOffset},
 };
 use anyhow::{Context as _, Result, anyhow};
@@ -1135,7 +1135,13 @@ impl SerializableItem for Editor {
         _window: &mut Window,
         cx: &mut App,
     ) -> Task<Result<()>> {
-        workspace::delete_unloaded_items(alive_items, workspace_id, "editors", &DB, cx)
+        workspace::delete_unloaded_items(
+            alive_items,
+            workspace_id,
+            "editors",
+            &EditorDb::global(cx),
+            cx,
+        )
     }
 
     fn deserialize(
@@ -1146,7 +1152,7 @@ impl SerializableItem for Editor {
         window: &mut Window,
         cx: &mut App,
     ) -> Task<Result<Entity<Self>>> {
-        let serialized_editor = match DB
+        let serialized_editor = match EditorDb::global(cx)
             .get_serialized_editor(item_id, workspace_id)
             .context("Failed to query editor state")
         {
@@ -1361,6 +1367,7 @@ impl SerializableItem for Editor {
 
         let snapshot = buffer.read(cx).snapshot();
 
+        let db = EditorDb::global(cx);
         Some(cx.spawn_in(window, async move |_this, cx| {
             cx.background_spawn(async move {
                 let (contents, language) = if serialize_dirty_buffers && is_dirty {
@@ -1378,7 +1385,7 @@ impl SerializableItem for Editor {
                     mtime,
                 };
                 log::debug!("Serializing editor {item_id:?} in workspace {workspace_id:?}");
-                DB.save_serialized_editor(item_id, workspace_id, editor)
+                db.save_serialized_editor(item_id, workspace_id, editor)
                     .await
                     .context("failed to save serialized editor")
             })
@@ -2110,7 +2117,9 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let workspace_id = db.next_id().await.unwrap();
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
             let item_id = 1234 as ItemId;
             let mtime = fs
                 .metadata(Path::new(path!("/file.rs")))
@@ -2126,7 +2135,8 @@ mod tests {
                 mtime: Some(mtime),
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor.clone())
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor.clone())
                 .await
                 .unwrap();
 
@@ -2149,8 +2159,10 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
 
             let item_id = 5678 as ItemId;
             let serialized_editor = SerializedEditor {
@@ -2160,7 +2172,8 @@ mod tests {
                 mtime: None,
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor)
                 .await
                 .unwrap();
 
@@ -2189,8 +2202,10 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
 
             let item_id = 9012 as ItemId;
             let serialized_editor = SerializedEditor {
@@ -2200,7 +2215,8 @@ mod tests {
                 mtime: None,
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor)
                 .await
                 .unwrap();
 
@@ -2227,8 +2243,10 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
 
             let item_id = 9345 as ItemId;
             let old_mtime = MTime::from_seconds_and_nanos(0, 50);
@@ -2239,7 +2257,8 @@ mod tests {
                 mtime: Some(old_mtime),
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor)
                 .await
                 .unwrap();
 
@@ -2259,8 +2278,10 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
 
             let item_id = 10000 as ItemId;
             let serialized_editor = SerializedEditor {
@@ -2270,7 +2291,8 @@ mod tests {
                 mtime: None,
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor)
                 .await
                 .unwrap();
 
@@ -2301,8 +2323,10 @@ mod tests {
                 MultiWorkspace::test_new(project.clone(), window, cx)
             });
             let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+            let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+            let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-            let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
             let item_id = 11000 as ItemId;
 
             let mtime = fs
@@ -2320,7 +2344,8 @@ mod tests {
                 mtime: Some(mtime),
             };
 
-            DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+            editor_db
+                .save_serialized_editor(item_id, workspace_id, serialized_editor)
                 .await
                 .unwrap();
 
@@ -2357,8 +2382,10 @@ mod tests {
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+        let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx));
+        let editor_db = cx.update(|_, cx| EditorDb::global(cx));
 
-        let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+        let workspace_id = db.next_id().await.unwrap();
         let item_id = 99999 as ItemId;
 
         let serialized_editor = SerializedEditor {
@@ -2368,7 +2395,8 @@ mod tests {
             mtime: None,
         };
 
-        DB.save_serialized_editor(item_id, workspace_id, serialized_editor)
+        editor_db
+            .save_serialized_editor(item_id, workspace_id, serialized_editor)
             .await
             .unwrap();
 

crates/editor/src/persistence.rs 🔗

@@ -226,7 +226,7 @@ impl Domain for EditorDb {
     ];
 }
 
-db::static_connection!(DB, EditorDb, [WorkspaceDb]);
+db::static_connection!(EditorDb, [WorkspaceDb]);
 
 // https://www.sqlite.org/limits.html
 // > <..> the maximum value of a host parameter number is SQLITE_MAX_VARIABLE_NUMBER,
@@ -415,8 +415,10 @@ mod tests {
     use super::*;
 
     #[gpui::test]
-    async fn test_save_and_get_serialized_editor() {
-        let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+    async fn test_save_and_get_serialized_editor(cx: &mut gpui::TestAppContext) {
+        let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+        let workspace_id = db.next_id().await.unwrap();
+        let editor_db = cx.update(|cx| EditorDb::global(cx));
 
         let serialized_editor = SerializedEditor {
             abs_path: Some(PathBuf::from("testing.txt")),
@@ -425,11 +427,12 @@ mod tests {
             mtime: None,
         };
 
-        DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+        editor_db
+            .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
             .await
             .unwrap();
 
-        let have = DB
+        let have = editor_db
             .get_serialized_editor(1234, workspace_id)
             .unwrap()
             .unwrap();
@@ -443,11 +446,12 @@ mod tests {
             mtime: None,
         };
 
-        DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+        editor_db
+            .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
             .await
             .unwrap();
 
-        let have = DB
+        let have = editor_db
             .get_serialized_editor(1234, workspace_id)
             .unwrap()
             .unwrap();
@@ -461,11 +465,12 @@ mod tests {
             mtime: None,
         };
 
-        DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+        editor_db
+            .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
             .await
             .unwrap();
 
-        let have = DB
+        let have = editor_db
             .get_serialized_editor(1234, workspace_id)
             .unwrap()
             .unwrap();
@@ -479,11 +484,12 @@ mod tests {
             mtime: Some(MTime::from_seconds_and_nanos(100, 42)),
         };
 
-        DB.save_serialized_editor(1234, workspace_id, serialized_editor.clone())
+        editor_db
+            .save_serialized_editor(1234, workspace_id, serialized_editor.clone())
             .await
             .unwrap();
 
-        let have = DB
+        let have = editor_db
             .get_serialized_editor(1234, workspace_id)
             .unwrap()
             .unwrap();
@@ -499,8 +505,10 @@ mod tests {
     // The search uses contains_str_at() to find fingerprints in the buffer.
 
     #[gpui::test]
-    async fn test_save_and_get_file_folds() {
-        let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
+    async fn test_save_and_get_file_folds(cx: &mut gpui::TestAppContext) {
+        let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+        let workspace_id = db.next_id().await.unwrap();
+        let editor_db = cx.update(|cx| EditorDb::global(cx));
 
         // file_folds table uses path as key (no FK to editors table)
         let file_path: Arc<Path> = Arc::from(Path::new("/tmp/test_file_folds.rs"));
@@ -520,12 +528,13 @@ mod tests {
                 "} // end Foo".to_string(),
             ),
         ];
-        DB.save_file_folds(workspace_id, file_path.clone(), folds.clone())
+        editor_db
+            .save_file_folds(workspace_id, file_path.clone(), folds.clone())
             .await
             .unwrap();
 
         // Retrieve and verify fingerprints are preserved
-        let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+        let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
         assert_eq!(retrieved.len(), 2);
         assert_eq!(
             retrieved[0],
@@ -553,11 +562,12 @@ mod tests {
             "impl Bar {".to_string(),
             "} // end impl".to_string(),
         )];
-        DB.save_file_folds(workspace_id, file_path.clone(), new_folds)
+        editor_db
+            .save_file_folds(workspace_id, file_path.clone(), new_folds)
             .await
             .unwrap();
 
-        let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+        let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
         assert_eq!(retrieved.len(), 1);
         assert_eq!(
             retrieved[0],
@@ -570,10 +580,11 @@ mod tests {
         );
 
         // Test delete
-        DB.delete_file_folds(workspace_id, file_path.clone())
+        editor_db
+            .delete_file_folds(workspace_id, file_path.clone())
             .await
             .unwrap();
-        let retrieved = DB.get_file_folds(workspace_id, &file_path).unwrap();
+        let retrieved = editor_db.get_file_folds(workspace_id, &file_path).unwrap();
         assert!(retrieved.is_empty());
 
         // Test multiple files don't interfere
@@ -582,15 +593,21 @@ mod tests {
         let folds_a = vec![(10, 20, "a_start".to_string(), "a_end".to_string())];
         let folds_b = vec![(30, 40, "b_start".to_string(), "b_end".to_string())];
 
-        DB.save_file_folds(workspace_id, file_path_a.clone(), folds_a)
+        editor_db
+            .save_file_folds(workspace_id, file_path_a.clone(), folds_a)
             .await
             .unwrap();
-        DB.save_file_folds(workspace_id, file_path_b.clone(), folds_b)
+        editor_db
+            .save_file_folds(workspace_id, file_path_b.clone(), folds_b)
             .await
             .unwrap();
 
-        let retrieved_a = DB.get_file_folds(workspace_id, &file_path_a).unwrap();
-        let retrieved_b = DB.get_file_folds(workspace_id, &file_path_b).unwrap();
+        let retrieved_a = editor_db
+            .get_file_folds(workspace_id, &file_path_a)
+            .unwrap();
+        let retrieved_b = editor_db
+            .get_file_folds(workspace_id, &file_path_b)
+            .unwrap();
 
         assert_eq!(retrieved_a.len(), 1);
         assert_eq!(retrieved_b.len(), 1);

crates/editor/src/scroll.rs 🔗

@@ -8,7 +8,7 @@ use crate::{
     InlayHintRefreshReason, MultiBufferSnapshot, RowExt, ToPoint,
     display_map::{DisplaySnapshot, ToDisplayPoint},
     hover_popover::hide_hover,
-    persistence::DB,
+    persistence::EditorDb,
 };
 pub use autoscroll::{Autoscroll, AutoscrollStrategy};
 use core::fmt::Debug;
@@ -467,12 +467,13 @@ impl ScrollManager {
             let item_id = cx.entity().entity_id().as_u64() as ItemId;
             let executor = cx.background_executor().clone();
 
+            let db = EditorDb::global(cx);
             self._save_scroll_position_task = cx.background_executor().spawn(async move {
                 executor.timer(Duration::from_millis(10)).await;
                 log::debug!(
                     "Saving scroll position for item {item_id:?} in workspace {workspace_id:?}"
                 );
-                DB.save_scroll_position(
+                db.save_scroll_position(
                     item_id,
                     workspace_id,
                     top_row,
@@ -937,7 +938,7 @@ impl Editor {
         window: &mut Window,
         cx: &mut Context<Editor>,
     ) {
-        let scroll_position = DB.get_scroll_position(item_id, workspace_id);
+        let scroll_position = EditorDb::global(cx).get_scroll_position(item_id, workspace_id);
         if let Ok(Some((top_row, x, y))) = scroll_position {
             let top_anchor = self
                 .buffer()

crates/extensions_ui/src/extension_suggest.rs 🔗

@@ -1,12 +1,13 @@
 use std::collections::HashMap;
 use std::sync::{Arc, OnceLock};
 
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::Editor;
 use extension_host::ExtensionStore;
 use gpui::{AppContext as _, Context, Entity, SharedString, Window};
 use language::Buffer;
 use ui::prelude::*;
+use util::ResultExt;
 use util::rel_path::RelPath;
 use workspace::notifications::simple_message_notification::MessageNotification;
 use workspace::{Workspace, notifications::NotificationId};
@@ -147,7 +148,8 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
     };
 
     let key = language_extension_key(&extension_id);
-    let Ok(None) = KEY_VALUE_STORE.read_kvp(&key) else {
+    let kvp = KeyValueStore::global(cx);
+    let Ok(None) = kvp.read_kvp(&key) else {
         return;
     };
 
@@ -193,9 +195,11 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
                 .secondary_icon_color(Color::Error)
                 .secondary_on_click(move |_window, cx| {
                     let key = language_extension_key(&extension_id);
-                    db::write_and_log(cx, move || {
-                        KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string())
-                    });
+                    let kvp = KeyValueStore::global(cx);
+                    cx.background_spawn(async move {
+                        kvp.write_kvp(key, "dismissed".to_string()).await.log_err()
+                    })
+                    .detach();
                 })
             })
         });

crates/git_graph/src/git_graph.rs 🔗

@@ -2358,7 +2358,7 @@ impl SerializableItem for GitGraph {
             alive_items,
             workspace_id,
             "git_graphs",
-            &persistence::GIT_GRAPHS,
+            &persistence::GitGraphsDb::global(cx),
             cx,
         )
     }
@@ -2371,7 +2371,8 @@ impl SerializableItem for GitGraph {
         window: &mut Window,
         cx: &mut App,
     ) -> Task<gpui::Result<Entity<Self>>> {
-        if persistence::GIT_GRAPHS
+        let db = persistence::GitGraphsDb::global(cx);
+        if db
             .get_git_graph(item_id, workspace_id)
             .ok()
             .is_some_and(|is_open| is_open)
@@ -2392,11 +2393,12 @@ impl SerializableItem for GitGraph {
         cx: &mut Context<Self>,
     ) -> Option<Task<gpui::Result<()>>> {
         let workspace_id = workspace.database_id()?;
-        Some(cx.background_spawn(async move {
-            persistence::GIT_GRAPHS
-                .save_git_graph(item_id, workspace_id, true)
-                .await
-        }))
+        let db = persistence::GitGraphsDb::global(cx);
+        Some(
+            cx.background_spawn(
+                async move { db.save_git_graph(item_id, workspace_id, true).await },
+            ),
+        )
     }
 
     fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -2430,7 +2432,7 @@ mod persistence {
         )]);
     }
 
-    db::static_connection!(GIT_GRAPHS, GitGraphsDb, [WorkspaceDb]);
+    db::static_connection!(GitGraphsDb, [WorkspaceDb]);
 
     impl GitGraphsDb {
         query! {

crates/git_ui/src/git_panel.rs 🔗

@@ -14,7 +14,7 @@ use anyhow::Context as _;
 use askpass::AskPassDelegate;
 use cloud_llm_client::CompletionIntent;
 use collections::{BTreeMap, HashMap, HashSet};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::{
     Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset,
     actions::ExpandAllDiffHunks,
@@ -928,6 +928,7 @@ impl GitPanel {
         let width = self.width;
         let amend_pending = self.amend_pending;
         let signoff_enabled = self.signoff_enabled;
+        let kvp = KeyValueStore::global(cx);
 
         self.pending_serialization = cx.spawn(async move |git_panel, cx| {
             cx.background_executor()
@@ -948,16 +949,15 @@ impl GitPanel {
             };
             cx.background_spawn(
                 async move {
-                    KEY_VALUE_STORE
-                        .write_kvp(
-                            serialization_key,
-                            serde_json::to_string(&SerializedGitPanel {
-                                width,
-                                amend_pending,
-                                signoff_enabled,
-                            })?,
-                        )
-                        .await?;
+                    kvp.write_kvp(
+                        serialization_key,
+                        serde_json::to_string(&SerializedGitPanel {
+                            width,
+                            amend_pending,
+                            signoff_enabled,
+                        })?,
+                    )
+                    .await?;
                     anyhow::Ok(())
                 }
                 .log_err(),
@@ -5542,12 +5542,14 @@ impl GitPanel {
         mut cx: AsyncWindowContext,
     ) -> anyhow::Result<Entity<Self>> {
         let serialized_panel = match workspace
-            .read_with(&cx, |workspace, _| Self::serialization_key(workspace))
+            .read_with(&cx, |workspace, cx| {
+                Self::serialization_key(workspace).map(|key| (key, KeyValueStore::global(cx)))
+            })
             .ok()
             .flatten()
         {
-            Some(serialization_key) => cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
+            Some((serialization_key, kvp)) => cx
+                .background_spawn(async move { kvp.read_kvp(&serialization_key) })
                 .await
                 .context("loading git panel")
                 .log_err()

crates/git_ui/src/project_diff.rs 🔗

@@ -1219,8 +1219,9 @@ impl SerializableItem for ProjectDiff {
         window: &mut Window,
         cx: &mut App,
     ) -> Task<Result<Entity<Self>>> {
+        let db = persistence::ProjectDiffDb::global(cx);
         window.spawn(cx, async move |cx| {
-            let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?;
+            let diff_base = db.get_diff_base(item_id, workspace_id)?;
 
             let diff = cx.update(|window, cx| {
                 let branch_diff = cx
@@ -1246,10 +1247,10 @@ impl SerializableItem for ProjectDiff {
         let workspace_id = workspace.database_id()?;
         let diff_base = self.diff_base(cx).clone();
 
+        let db = persistence::ProjectDiffDb::global(cx);
         Some(cx.background_spawn({
             async move {
-                persistence::PROJECT_DIFF_DB
-                    .save_diff_base(item_id, workspace_id, diff_base.clone())
+                db.save_diff_base(item_id, workspace_id, diff_base.clone())
                     .await
             }
         }))
@@ -1289,7 +1290,7 @@ mod persistence {
         )];
     }
 
-    db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]);
+    db::static_connection!(ProjectDiffDb, [WorkspaceDb]);
 
     impl ProjectDiffDb {
         pub async fn save_diff_base(

crates/image_viewer/src/image_viewer.rs 🔗

@@ -16,7 +16,7 @@ use gpui::{
     WeakEntity, Window, actions, checkerboard, div, img, point, px, size,
 };
 use language::File as _;
-use persistence::IMAGE_VIEWER;
+use persistence::ImageViewerDb;
 use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent};
 use settings::Settings;
 use theme::ThemeSettings;
@@ -600,8 +600,9 @@ impl SerializableItem for ImageView {
         window: &mut Window,
         cx: &mut App,
     ) -> Task<anyhow::Result<Entity<Self>>> {
+        let db = ImageViewerDb::global(cx);
         window.spawn(cx, async move |cx| {
-            let image_path = IMAGE_VIEWER
+            let image_path = db
                 .get_image_path(item_id, workspace_id)?
                 .context("No image path found")?;
 
@@ -634,13 +635,8 @@ impl SerializableItem for ImageView {
         _window: &mut Window,
         cx: &mut App,
     ) -> Task<anyhow::Result<()>> {
-        delete_unloaded_items(
-            alive_items,
-            workspace_id,
-            "image_viewers",
-            &IMAGE_VIEWER,
-            cx,
-        )
+        let db = ImageViewerDb::global(cx);
+        delete_unloaded_items(alive_items, workspace_id, "image_viewers", &db, cx)
     }
 
     fn serialize(
@@ -654,12 +650,11 @@ impl SerializableItem for ImageView {
         let workspace_id = workspace.database_id()?;
         let image_path = self.image_item.read(cx).abs_path(cx)?;
 
+        let db = ImageViewerDb::global(cx);
         Some(cx.background_spawn({
             async move {
                 log::debug!("Saving image at path {image_path:?}");
-                IMAGE_VIEWER
-                    .save_image_path(item_id, workspace_id, image_path)
-                    .await
+                db.save_image_path(item_id, workspace_id, image_path).await
             }
         }))
     }
@@ -910,7 +905,7 @@ mod persistence {
         )];
     }
 
-    db::static_connection!(IMAGE_VIEWER, ImageViewerDb, [WorkspaceDb]);
+    db::static_connection!(ImageViewerDb, [WorkspaceDb]);
 
     impl ImageViewerDb {
         query! {

crates/keymap_editor/src/keymap_editor.rs 🔗

@@ -47,7 +47,7 @@ use zed_actions::{ChangeKeybinding, OpenKeymap};
 
 use crate::{
     action_completion_provider::ActionCompletionProvider,
-    persistence::KEYBINDING_EDITORS,
+    persistence::KeybindingEditorDb,
     ui_components::keystroke_input::{
         ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording,
     },
@@ -3818,13 +3818,8 @@ impl SerializableItem for KeymapEditor {
         _window: &mut Window,
         cx: &mut App,
     ) -> gpui::Task<gpui::Result<()>> {
-        workspace::delete_unloaded_items(
-            alive_items,
-            workspace_id,
-            "keybinding_editors",
-            &KEYBINDING_EDITORS,
-            cx,
-        )
+        let db = KeybindingEditorDb::global(cx);
+        workspace::delete_unloaded_items(alive_items, workspace_id, "keybinding_editors", &db, cx)
     }
 
     fn deserialize(
@@ -3835,11 +3830,9 @@ impl SerializableItem for KeymapEditor {
         window: &mut Window,
         cx: &mut App,
     ) -> gpui::Task<gpui::Result<Entity<Self>>> {
+        let db = KeybindingEditorDb::global(cx);
         window.spawn(cx, async move |cx| {
-            if KEYBINDING_EDITORS
-                .get_keybinding_editor(item_id, workspace_id)?
-                .is_some()
-            {
+            if db.get_keybinding_editor(item_id, workspace_id)?.is_some() {
                 cx.update(|window, cx| cx.new(|cx| KeymapEditor::new(workspace, window, cx)))
             } else {
                 Err(anyhow!("No keybinding editor to deserialize"))
@@ -3856,11 +3849,10 @@ impl SerializableItem for KeymapEditor {
         cx: &mut ui::Context<Self>,
     ) -> Option<gpui::Task<gpui::Result<()>>> {
         let workspace_id = workspace.database_id()?;
-        Some(cx.background_spawn(async move {
-            KEYBINDING_EDITORS
-                .save_keybinding_editor(item_id, workspace_id)
-                .await
-        }))
+        let db = KeybindingEditorDb::global(cx);
+        Some(cx.background_spawn(
+            async move { db.save_keybinding_editor(item_id, workspace_id).await },
+        ))
     }
 
     fn should_serialize(&self, _event: &Self::Event) -> bool {
@@ -3889,7 +3881,7 @@ mod persistence {
         )];
     }
 
-    db::static_connection!(KEYBINDING_EDITORS, KeybindingEditorDb, [WorkspaceDb]);
+    db::static_connection!(KeybindingEditorDb, [WorkspaceDb]);
 
     impl KeybindingEditorDb {
         query! {

crates/language_onboarding/src/python.rs 🔗

@@ -23,7 +23,7 @@ impl BasedPyrightBanner {
                 this.have_basedpyright = true;
             }
         });
-        let dismissed = Self::dismissed();
+        let dismissed = Self::dismissed(cx);
         Self {
             dismissed,
             have_basedpyright: false,

crates/onboarding/src/multibuffer_hint.rs 🔗

@@ -2,7 +2,7 @@ use std::collections::HashSet;
 use std::sync::OnceLock;
 use std::sync::atomic::{AtomicUsize, Ordering};
 
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use gpui::{App, EntityId, EventEmitter, Subscription};
 use ui::{IconButtonShape, Tooltip, prelude::*};
 use workspace::item::{ItemBufferKind, ItemEvent, ItemHandle};
@@ -35,10 +35,10 @@ impl MultibufferHint {
 }
 
 impl MultibufferHint {
-    fn counter() -> &'static AtomicUsize {
+    fn counter(cx: &App) -> &'static AtomicUsize {
         static SHOWN_COUNT: OnceLock<AtomicUsize> = OnceLock::new();
         SHOWN_COUNT.get_or_init(|| {
-            let value: usize = KEY_VALUE_STORE
+            let value: usize = KeyValueStore::global(cx)
                 .read_kvp(SHOWN_COUNT_KEY)
                 .ok()
                 .flatten()
@@ -49,19 +49,21 @@ impl MultibufferHint {
         })
     }
 
-    fn shown_count() -> usize {
-        Self::counter().load(Ordering::Relaxed)
+    fn shown_count(cx: &App) -> usize {
+        Self::counter(cx).load(Ordering::Relaxed)
     }
 
     fn increment_count(cx: &mut App) {
-        Self::set_count(Self::shown_count() + 1, cx)
+        Self::set_count(Self::shown_count(cx) + 1, cx)
     }
 
     pub(crate) fn set_count(count: usize, cx: &mut App) {
-        Self::counter().store(count, Ordering::Relaxed);
+        Self::counter(cx).store(count, Ordering::Relaxed);
 
-        db::write_and_log(cx, move || {
-            KEY_VALUE_STORE.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count))
+        let kvp = KeyValueStore::global(cx);
+        db::write_and_log(cx, move || async move {
+            kvp.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count))
+                .await
         });
     }
 
@@ -71,7 +73,7 @@ impl MultibufferHint {
 
     /// Determines the toolbar location for this [`MultibufferHint`].
     fn determine_toolbar_location(&mut self, cx: &mut Context<Self>) -> ToolbarItemLocation {
-        if Self::shown_count() >= NUMBER_OF_HINTS {
+        if Self::shown_count(cx) >= NUMBER_OF_HINTS {
             return ToolbarItemLocation::Hidden;
         }
 

crates/onboarding/src/onboarding.rs 🔗

@@ -1,6 +1,6 @@
 use crate::multibuffer_hint::MultibufferHint;
 use client::{Client, UserStore, zed_urls};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use fs::Fs;
 use gpui::{
     Action, AnyElement, App, AppContext, AsyncWindowContext, Context, Entity, EventEmitter,
@@ -194,8 +194,10 @@ pub fn show_onboarding_view(app_state: Arc<AppState>, cx: &mut App) -> Task<anyh
 
                 cx.notify();
             };
-            db::write_and_log(cx, || {
-                KEY_VALUE_STORE.write_kvp(FIRST_OPEN.to_string(), "false".to_string())
+            let kvp = KeyValueStore::global(cx);
+            db::write_and_log(cx, move || async move {
+                kvp.write_kvp(FIRST_OPEN.to_string(), "false".to_string())
+                    .await
             });
         },
     )
@@ -559,7 +561,7 @@ impl workspace::SerializableItem for Onboarding {
             alive_items,
             workspace_id,
             "onboarding_pages",
-            &persistence::ONBOARDING_PAGES,
+            &persistence::OnboardingPagesDb::global(cx),
             cx,
         )
     }
@@ -572,10 +574,9 @@ impl workspace::SerializableItem for Onboarding {
         window: &mut Window,
         cx: &mut App,
     ) -> gpui::Task<gpui::Result<Entity<Self>>> {
+        let db = persistence::OnboardingPagesDb::global(cx);
         window.spawn(cx, async move |cx| {
-            if let Some(_) =
-                persistence::ONBOARDING_PAGES.get_onboarding_page(item_id, workspace_id)?
-            {
+            if let Some(_) = db.get_onboarding_page(item_id, workspace_id)? {
                 workspace.update(cx, |workspace, cx| Onboarding::new(workspace, cx))
             } else {
                 Err(anyhow::anyhow!("No onboarding page to deserialize"))
@@ -593,11 +594,12 @@ impl workspace::SerializableItem for Onboarding {
     ) -> Option<gpui::Task<gpui::Result<()>>> {
         let workspace_id = workspace.database_id()?;
 
-        Some(cx.background_spawn(async move {
-            persistence::ONBOARDING_PAGES
-                .save_onboarding_page(item_id, workspace_id)
-                .await
-        }))
+        let db = persistence::OnboardingPagesDb::global(cx);
+        Some(
+            cx.background_spawn(
+                async move { db.save_onboarding_page(item_id, workspace_id).await },
+            ),
+        )
     }
 
     fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -646,7 +648,7 @@ mod persistence {
         ];
     }
 
-    db::static_connection!(ONBOARDING_PAGES, OnboardingPagesDb, [WorkspaceDb]);
+    db::static_connection!(OnboardingPagesDb, [WorkspaceDb]);
 
     impl OnboardingPagesDb {
         query! {

crates/outline_panel/src/outline_panel.rs 🔗

@@ -2,7 +2,7 @@ mod outline_panel_settings;
 
 use anyhow::Context as _;
 use collections::{BTreeSet, HashMap, HashSet, hash_map};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::{
     AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange,
     MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects,
@@ -693,16 +693,18 @@ impl OutlinePanel {
             .ok()
             .flatten()
         {
-            Some(serialization_key) => cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
-                .await
-                .context("loading outline panel")
-                .log_err()
-                .flatten()
-                .map(|panel| serde_json::from_str::<SerializedOutlinePanel>(&panel))
-                .transpose()
-                .log_err()
-                .flatten(),
+            Some(serialization_key) => {
+                let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+                cx.background_spawn(async move { kvp.read_kvp(&serialization_key) })
+                    .await
+                    .context("loading outline panel")
+                    .log_err()
+                    .flatten()
+                    .map(|panel| serde_json::from_str::<SerializedOutlinePanel>(&panel))
+                    .transpose()
+                    .log_err()
+                    .flatten()
+            }
             None => None,
         };
 
@@ -958,14 +960,14 @@ impl OutlinePanel {
         };
         let width = self.width;
         let active = Some(self.active);
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = cx.background_spawn(
             async move {
-                KEY_VALUE_STORE
-                    .write_kvp(
-                        serialization_key,
-                        serde_json::to_string(&SerializedOutlinePanel { width, active })?,
-                    )
-                    .await?;
+                kvp.write_kvp(
+                    serialization_key,
+                    serde_json::to_string(&SerializedOutlinePanel { width, active })?,
+                )
+                .await?;
                 anyhow::Ok(())
             }
             .log_err(),

crates/project_panel/src/project_panel.rs 🔗

@@ -5,7 +5,7 @@ use anyhow::{Context as _, Result};
 use client::{ErrorCode, ErrorExt};
 use collections::{BTreeSet, HashMap, hash_map};
 use command_palette_hooks::CommandPaletteFilter;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::{
     Editor, EditorEvent, MultiBufferOffset,
     items::{
@@ -999,16 +999,18 @@ impl ProjectPanel {
             .ok()
             .flatten()
         {
-            Some(serialization_key) => cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
-                .await
-                .context("loading project panel")
-                .log_err()
-                .flatten()
-                .map(|panel| serde_json::from_str::<SerializedProjectPanel>(&panel))
-                .transpose()
-                .log_err()
-                .flatten(),
+            Some(serialization_key) => {
+                let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?;
+                cx.background_spawn(async move { kvp.read_kvp(&serialization_key) })
+                    .await
+                    .context("loading project panel")
+                    .log_err()
+                    .flatten()
+                    .map(|panel| serde_json::from_str::<SerializedProjectPanel>(&panel))
+                    .transpose()
+                    .log_err()
+                    .flatten()
+            }
             None => None,
         };
 
@@ -1114,14 +1116,14 @@ impl ProjectPanel {
             return;
         };
         let width = self.width;
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = cx.background_spawn(
             async move {
-                KEY_VALUE_STORE
-                    .write_kvp(
-                        serialization_key,
-                        serde_json::to_string(&SerializedProjectPanel { width })?,
-                    )
-                    .await?;
+                kvp.write_kvp(
+                    serialization_key,
+                    serde_json::to_string(&SerializedProjectPanel { width })?,
+                )
+                .await?;
                 anyhow::Ok(())
             }
             .log_err(),

crates/recent_projects/src/dev_container_suggest.rs 🔗

@@ -1,9 +1,10 @@
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use dev_container::find_configs_in_snapshot;
 use gpui::{SharedString, Window};
 use project::{Project, WorktreeId};
 use std::sync::LazyLock;
 use ui::prelude::*;
+use util::ResultExt;
 use util::rel_path::RelPath;
 use workspace::Workspace;
 use workspace::notifications::NotificationId;
@@ -61,7 +62,7 @@ pub fn suggest_on_worktree_updated(
     let project_path = abs_path.to_string_lossy().to_string();
     let key_for_dismiss = project_devcontainer_key(&project_path);
 
-    let already_dismissed = KEY_VALUE_STORE
+    let already_dismissed = KeyValueStore::global(cx)
         .read_kvp(&key_for_dismiss)
         .ok()
         .flatten()
@@ -98,9 +99,13 @@ pub fn suggest_on_worktree_updated(
                 .secondary_on_click({
                     move |_window, cx| {
                         let key = key_for_dismiss.clone();
-                        db::write_and_log(cx, move || {
-                            KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string())
-                        });
+                        let kvp = KeyValueStore::global(cx);
+                        cx.background_spawn(async move {
+                            kvp.write_kvp(key, "dismissed".to_string())
+                                .await
+                                .log_err();
+                        })
+                        .detach();
                     }
                 })
             })

crates/recent_projects/src/recent_projects.rs 🔗

@@ -46,7 +46,7 @@ use ui::{
 use util::{ResultExt, paths::PathExt};
 use workspace::{
     HistoryManager, ModalView, MultiWorkspace, OpenOptions, OpenVisible, PathList,
-    SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId,
+    SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId,
     notifications::DetachAndPromptErr, with_active_or_new_workspace,
 };
 use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote};
@@ -88,8 +88,9 @@ pub async fn get_recent_projects(
     current_workspace_id: Option<WorkspaceId>,
     limit: Option<usize>,
     fs: Arc<dyn fs::Fs>,
+    db: &WorkspaceDb,
 ) -> Vec<RecentProjectEntry> {
-    let workspaces = WORKSPACE_DB
+    let workspaces = db
         .recent_workspaces_on_disk(fs.as_ref())
         .await
         .unwrap_or_default();
@@ -138,8 +139,8 @@ pub async fn get_recent_projects(
     }
 }
 
-pub async fn delete_recent_project(workspace_id: WorkspaceId) {
-    let _ = WORKSPACE_DB.delete_workspace_by_id(workspace_id).await;
+pub async fn delete_recent_project(workspace_id: WorkspaceId, db: &WorkspaceDb) {
+    let _ = db.delete_workspace_by_id(workspace_id).await;
 }
 
 fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec<OpenFolderEntry> {
@@ -508,9 +509,10 @@ impl RecentProjects {
         let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent));
         // We do not want to block the UI on a potentially lengthy call to DB, so we're gonna swap
         // out workspace locations once the future runs to completion.
+        let db = WorkspaceDb::global(cx);
         cx.spawn_in(window, async move |this, cx| {
             let Some(fs) = fs else { return };
-            let workspaces = WORKSPACE_DB
+            let workspaces = db
                 .recent_workspaces_on_disk(fs.as_ref())
                 .await
                 .log_err()
@@ -1500,13 +1502,11 @@ impl RecentProjectsDelegate {
                 .workspace
                 .upgrade()
                 .map(|ws| ws.read(cx).app_state().fs.clone());
+            let db = WorkspaceDb::global(cx);
             cx.spawn_in(window, async move |this, cx| {
-                WORKSPACE_DB
-                    .delete_workspace_by_id(workspace_id)
-                    .await
-                    .log_err();
+                db.delete_workspace_by_id(workspace_id).await.log_err();
                 let Some(fs) = fs else { return };
-                let workspaces = WORKSPACE_DB
+                let workspaces = db
                     .recent_workspaces_on_disk(fs.as_ref())
                     .await
                     .unwrap_or_default();

crates/session/src/session.rs 🔗

@@ -1,4 +1,4 @@
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use gpui::{App, AppContext as _, Context, Subscription, Task, WindowId};
 use util::ResultExt;
 
@@ -12,20 +12,19 @@ const SESSION_ID_KEY: &str = "session_id";
 const SESSION_WINDOW_STACK_KEY: &str = "session_window_stack";
 
 impl Session {
-    pub async fn new(session_id: String) -> Self {
-        let old_session_id = KEY_VALUE_STORE.read_kvp(SESSION_ID_KEY).ok().flatten();
+    pub async fn new(session_id: String, db: KeyValueStore) -> Self {
+        let old_session_id = db.read_kvp(SESSION_ID_KEY).ok().flatten();
 
-        KEY_VALUE_STORE
-            .write_kvp(SESSION_ID_KEY.to_string(), session_id.clone())
+        db.write_kvp(SESSION_ID_KEY.to_string(), session_id.clone())
             .await
             .log_err();
 
-        let old_window_ids = KEY_VALUE_STORE
+        let old_window_ids = db
             .read_kvp(SESSION_WINDOW_STACK_KEY)
             .ok()
             .flatten()
             .and_then(|json| serde_json::from_str::<Vec<u64>>(&json).ok())
-            .map(|vec| {
+            .map(|vec: Vec<u64>| {
                 vec.into_iter()
                     .map(WindowId::from)
                     .collect::<Vec<WindowId>>()
@@ -72,25 +71,28 @@ impl AppSession {
         let _subscriptions = vec![cx.on_app_quit(Self::app_will_quit)];
 
         #[cfg(not(any(test, feature = "test-support")))]
-        let _serialization_task = cx.spawn(async move |_, cx| {
-            // Disabled in tests: the infinite loop bypasses "parking forbidden" checks,
-            // causing tests to hang instead of panicking.
-            {
-                let mut current_window_stack = Vec::new();
-                loop {
-                    if let Some(windows) = cx.update(|cx| window_stack(cx))
-                        && windows != current_window_stack
-                    {
-                        store_window_stack(&windows).await;
-                        current_window_stack = windows;
+        let _serialization_task = {
+            let db = KeyValueStore::global(cx);
+            cx.spawn(async move |_, cx| {
+                // Disabled in tests: the infinite loop bypasses "parking forbidden" checks,
+                // causing tests to hang instead of panicking.
+                {
+                    let mut current_window_stack = Vec::new();
+                    loop {
+                        if let Some(windows) = cx.update(|cx| window_stack(cx))
+                            && windows != current_window_stack
+                        {
+                            store_window_stack(db.clone(), &windows).await;
+                            current_window_stack = windows;
+                        }
+
+                        cx.background_executor()
+                            .timer(std::time::Duration::from_millis(500))
+                            .await;
                     }
-
-                    cx.background_executor()
-                        .timer(std::time::Duration::from_millis(500))
-                        .await;
                 }
-            }
-        });
+            })
+        };
 
         #[cfg(any(test, feature = "test-support"))]
         let _serialization_task = Task::ready(());
@@ -104,7 +106,8 @@ impl AppSession {
 
     fn app_will_quit(&mut self, cx: &mut Context<Self>) -> Task<()> {
         if let Some(window_stack) = window_stack(cx) {
-            cx.background_spawn(async move { store_window_stack(&window_stack).await })
+            let db = KeyValueStore::global(cx);
+            cx.background_spawn(async move { store_window_stack(db, &window_stack).await })
         } else {
             Task::ready(())
         }
@@ -137,10 +140,9 @@ fn window_stack(cx: &App) -> Option<Vec<u64>> {
     )
 }
 
-async fn store_window_stack(windows: &[u64]) {
+async fn store_window_stack(db: KeyValueStore, windows: &[u64]) {
     if let Ok(window_ids_json) = serde_json::to_string(windows) {
-        KEY_VALUE_STORE
-            .write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json)
+        db.write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json)
             .await
             .log_err();
     }

crates/terminal_view/src/persistence.rs 🔗

@@ -425,7 +425,7 @@ impl Domain for TerminalDb {
     ];
 }
 
-db::static_connection!(TERMINAL_DB, TerminalDb, [WorkspaceDb]);
+db::static_connection!(TerminalDb, [WorkspaceDb]);
 
 impl TerminalDb {
     query! {

crates/terminal_view/src/terminal_panel.rs 🔗

@@ -8,7 +8,7 @@ use crate::{
 };
 use breadcrumbs::Breadcrumbs;
 use collections::HashMap;
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use futures::{channel::oneshot, future::join_all};
 use gpui::{
     Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter,
@@ -250,16 +250,17 @@ impl TerminalPanel {
     ) -> Result<Entity<Self>> {
         let mut terminal_panel = None;
 
-        if let Some((database_id, serialization_key)) = workspace
-            .read_with(&cx, |workspace, _| {
+        if let Some((database_id, serialization_key, kvp)) = workspace
+            .read_with(&cx, |workspace, cx| {
                 workspace
                     .database_id()
                     .zip(TerminalPanel::serialization_key(workspace))
+                    .map(|(id, key)| (id, key, KeyValueStore::global(cx)))
             })
             .ok()
             .flatten()
             && let Some(serialized_panel) = cx
-                .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
+                .background_spawn(async move { kvp.read_kvp(&serialization_key) })
                 .await
                 .log_err()
                 .flatten()
@@ -939,6 +940,7 @@ impl TerminalPanel {
         else {
             return;
         };
+        let kvp = KeyValueStore::global(cx);
         self.pending_serialization = cx.spawn(async move |terminal_panel, cx| {
             cx.background_executor()
                 .timer(Duration::from_millis(50))
@@ -953,17 +955,16 @@ impl TerminalPanel {
             });
             cx.background_spawn(
                 async move {
-                    KEY_VALUE_STORE
-                        .write_kvp(
-                            serialization_key,
-                            serde_json::to_string(&SerializedTerminalPanel {
-                                items,
-                                active_item_id: None,
-                                height,
-                                width,
-                            })?,
-                        )
-                        .await?;
+                    kvp.write_kvp(
+                        serialization_key,
+                        serde_json::to_string(&SerializedTerminalPanel {
+                            items,
+                            active_item_id: None,
+                            height,
+                            width,
+                        })?,
+                    )
+                    .await?;
                     anyhow::Ok(())
                 }
                 .log_err(),

crates/terminal_view/src/terminal_view.rs 🔗

@@ -15,7 +15,7 @@ use gpui::{
 };
 use itertools::Itertools;
 use menu;
-use persistence::TERMINAL_DB;
+use persistence::TerminalDb;
 use project::{Project, ProjectEntryId, search::SearchQuery};
 use schemars::JsonSchema;
 use serde::Deserialize;
@@ -1676,11 +1676,11 @@ impl Item for TerminalView {
                 log::debug!(
                     "Updating workspace id for the terminal, old: {old_id:?}, new: {new_id:?}",
                 );
-                cx.background_spawn(TERMINAL_DB.update_workspace_id(
-                    new_id,
-                    old_id,
-                    cx.entity_id().as_u64(),
-                ))
+                let db = TerminalDb::global(cx);
+                let entity_id = cx.entity_id().as_u64();
+                cx.background_spawn(async move {
+                    db.update_workspace_id(new_id, old_id, entity_id).await
+                })
                 .detach();
             }
             self.workspace_id = workspace.database_id();
@@ -1703,7 +1703,8 @@ impl SerializableItem for TerminalView {
         _window: &mut Window,
         cx: &mut App,
     ) -> Task<anyhow::Result<()>> {
-        delete_unloaded_items(alive_items, workspace_id, "terminals", &TERMINAL_DB, cx)
+        let db = TerminalDb::global(cx);
+        delete_unloaded_items(alive_items, workspace_id, "terminals", &db, cx)
     }
 
     fn serialize(
@@ -1728,14 +1729,13 @@ impl SerializableItem for TerminalView {
         let custom_title = self.custom_title.clone();
         self.needs_serialize = false;
 
+        let db = TerminalDb::global(cx);
         Some(cx.background_spawn(async move {
             if let Some(cwd) = cwd {
-                TERMINAL_DB
-                    .save_working_directory(item_id, workspace_id, cwd)
+                db.save_working_directory(item_id, workspace_id, cwd)
                     .await?;
             }
-            TERMINAL_DB
-                .save_custom_title(item_id, workspace_id, custom_title)
+            db.save_custom_title(item_id, workspace_id, custom_title)
                 .await?;
             Ok(())
         }))
@@ -1756,7 +1756,8 @@ impl SerializableItem for TerminalView {
         window.spawn(cx, async move |cx| {
             let (cwd, custom_title) = cx
                 .update(|_window, cx| {
-                    let from_db = TERMINAL_DB
+                    let db = TerminalDb::global(cx);
+                    let from_db = db
                         .get_working_directory(item_id, workspace_id)
                         .log_err()
                         .flatten();
@@ -1770,7 +1771,7 @@ impl SerializableItem for TerminalView {
                             .upgrade()
                             .and_then(|workspace| default_working_directory(workspace.read(cx), cx))
                     };
-                    let custom_title = TERMINAL_DB
+                    let custom_title = db
                         .get_custom_title(item_id, workspace_id)
                         .log_err()
                         .flatten()

crates/title_bar/src/onboarding_banner.rs 🔗

@@ -44,7 +44,7 @@ impl OnboardingBanner {
                 subtitle: subtitle.or(Some(SharedString::from("Introducing:"))),
             },
             visible_when: None,
-            dismissed: get_dismissed(source),
+            dismissed: get_dismissed(source, cx),
         }
     }
 
@@ -75,9 +75,9 @@ fn dismissed_at_key(source: &str) -> String {
     }
 }
 
-fn get_dismissed(source: &str) -> bool {
+fn get_dismissed(source: &str, cx: &App) -> bool {
     let dismissed_at = dismissed_at_key(source);
-    db::kvp::KEY_VALUE_STORE
+    db::kvp::KeyValueStore::global(cx)
         .read_kvp(&dismissed_at)
         .log_err()
         .is_some_and(|dismissed| dismissed.is_some())
@@ -85,9 +85,10 @@ fn get_dismissed(source: &str) -> bool {
 
 fn persist_dismissed(source: &str, cx: &mut App) {
     let dismissed_at = dismissed_at_key(source);
-    cx.spawn(async |_| {
+    let kvp = db::kvp::KeyValueStore::global(cx);
+    cx.spawn(async move |_| {
         let time = chrono::Utc::now().to_rfc3339();
-        db::kvp::KEY_VALUE_STORE.write_kvp(dismissed_at, time).await
+        kvp.write_kvp(dismissed_at, time).await
     })
     .detach_and_log_err(cx);
 }
@@ -105,7 +106,8 @@ pub fn restore_banner(cx: &mut App) {
 
     let source = &cx.global::<BannerGlobal>().entity.read(cx).source;
     let dismissed_at = dismissed_at_key(source);
-    cx.spawn(async |_| db::kvp::KEY_VALUE_STORE.delete_kvp(dismissed_at).await)
+    let kvp = db::kvp::KeyValueStore::global(cx);
+    cx.spawn(async move |_| kvp.delete_kvp(dismissed_at).await)
         .detach_and_log_err(cx);
 }
 

crates/toolchain_selector/src/active_toolchain.rs 🔗

@@ -202,15 +202,15 @@ impl ActiveToolchain {
                         this.worktree_for_id(worktree_id, cx)
                             .map(|worktree| worktree.read(cx).abs_path())
                     })?;
-                    workspace::WORKSPACE_DB
-                        .set_toolchain(
-                            workspace_id,
-                            worktree_root_path,
-                            relative_path.clone(),
-                            toolchain.clone(),
-                        )
-                        .await
-                        .ok()?;
+                    let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx)).ok()?;
+                    db.set_toolchain(
+                        workspace_id,
+                        worktree_root_path,
+                        relative_path.clone(),
+                        toolchain.clone(),
+                    )
+                    .await
+                    .ok()?;
                     project
                         .update(cx, |this, cx| {
                             this.activate_toolchain(

crates/toolchain_selector/src/toolchain_selector.rs 🔗

@@ -920,16 +920,16 @@ impl PickerDelegate for ToolchainSelectorDelegate {
                 let worktree_abs_path_root = self.worktree_abs_path_root.clone();
                 let path = self.relative_path.clone();
                 let relative_path = self.relative_path.clone();
+                let db = workspace::WorkspaceDb::global(cx);
                 cx.spawn_in(window, async move |_, cx| {
-                    workspace::WORKSPACE_DB
-                        .set_toolchain(
-                            workspace_id,
-                            worktree_abs_path_root,
-                            relative_path,
-                            toolchain.clone(),
-                        )
-                        .await
-                        .log_err();
+                    db.set_toolchain(
+                        workspace_id,
+                        worktree_abs_path_root,
+                        relative_path,
+                        toolchain.clone(),
+                    )
+                    .await
+                    .log_err();
                     workspace
                         .update(cx, |this, cx| {
                             this.project().update(cx, |this, cx| {

crates/vim/src/state.rs 🔗

@@ -322,10 +322,11 @@ impl MarksState {
             let Some(workspace_id) = this.update(cx, |this, cx| this.workspace_id(cx)).ok()? else {
                 return None;
             };
+            let db = cx.update(|cx| VimDb::global(cx));
             let (marks, paths) = cx
                 .background_spawn(async move {
-                    let marks = DB.get_marks(workspace_id)?;
-                    let paths = DB.get_global_marks_paths(workspace_id)?;
+                    let marks = db.get_marks(workspace_id)?;
+                    let paths = db.get_global_marks_paths(workspace_id)?;
                     anyhow::Ok((marks, paths))
                 })
                 .await
@@ -444,8 +445,9 @@ impl MarksState {
                 if let Some(workspace_id) = self.workspace_id(cx) {
                     let path = path.clone();
                     let key = key.clone();
+                    let db = VimDb::global(cx);
                     cx.background_spawn(async move {
-                        DB.set_global_mark_path(workspace_id, key, path).await
+                        db.set_global_mark_path(workspace_id, key, path).await
                     })
                     .detach_and_log_err(cx);
                 }
@@ -461,8 +463,9 @@ impl MarksState {
         self.serialized_marks.insert(path.clone(), new_points);
 
         if let Some(workspace_id) = self.workspace_id(cx) {
+            let db = VimDb::global(cx);
             cx.background_spawn(async move {
-                DB.set_marks(workspace_id, path.clone(), to_write).await?;
+                db.set_marks(workspace_id, path.clone(), to_write).await?;
                 anyhow::Ok(())
             })
             .detach_and_log_err(cx);
@@ -655,8 +658,9 @@ impl MarksState {
         let path = if let Some(target) = self.global_marks.get(&mark_name.clone()) {
             let name = mark_name.clone();
             if let Some(workspace_id) = self.workspace_id(cx) {
+                let db = VimDb::global(cx);
                 cx.background_spawn(async move {
-                    DB.delete_global_marks_path(workspace_id, name).await
+                    db.delete_global_marks_path(workspace_id, name).await
                 })
                 .detach_and_log_err(cx);
             }
@@ -696,7 +700,8 @@ impl MarksState {
             .get_mut(&path)
             .map(|m| m.remove(&mark_name.clone()));
         if let Some(workspace_id) = self.workspace_id(cx) {
-            cx.background_spawn(async move { DB.delete_mark(workspace_id, path, mark_name).await })
+            let db = VimDb::global(cx);
+            cx.background_spawn(async move { db.delete_mark(workspace_id, path, mark_name).await })
                 .detach_and_log_err(cx);
         }
     }
@@ -1764,7 +1769,7 @@ impl Domain for VimDb {
     ];
 }
 
-db::static_connection!(DB, VimDb, [WorkspaceDb]);
+db::static_connection!(VimDb, [WorkspaceDb]);
 
 struct SerializedMark {
     path: Arc<Path>,

crates/workspace/src/history_manager.rs 🔗

@@ -7,7 +7,8 @@ use ui::{App, Context};
 use util::{ResultExt, paths::PathExt};
 
 use crate::{
-    NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId, path_list::PathList,
+    NewWindow, SerializedWorkspaceLocation, WorkspaceId, path_list::PathList,
+    persistence::WorkspaceDb,
 };
 
 pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
@@ -40,8 +41,9 @@ impl HistoryManager {
     }
 
     fn init(this: Entity<HistoryManager>, fs: Arc<dyn Fs>, cx: &App) {
+        let db = WorkspaceDb::global(cx);
         cx.spawn(async move |cx| {
-            let recent_folders = WORKSPACE_DB
+            let recent_folders = db
                 .recent_workspaces_on_disk(fs.as_ref())
                 .await
                 .unwrap_or_default()
@@ -102,6 +104,7 @@ impl HistoryManager {
             .map(|entry| entry.path.clone())
             .collect::<Vec<_>>();
         let user_removed = cx.update_jump_list(menus, entries);
+        let db = WorkspaceDb::global(cx);
         cx.spawn(async move |this, cx| {
             let user_removed = user_removed.await;
             if user_removed.is_empty() {
@@ -119,7 +122,7 @@ impl HistoryManager {
                 }
             }) {
                 for id in deleted_ids.iter() {
-                    WORKSPACE_DB.delete_workspace_by_id(*id).await.log_err();
+                    db.delete_workspace_by_id(*id).await.log_err();
                 }
             }
         })

crates/workspace/src/multi_workspace.rs 🔗

@@ -387,8 +387,9 @@ impl MultiWorkspace {
             active_workspace_id: self.workspace().read(cx).database_id(),
             sidebar_open: self.sidebar_open,
         };
+        let kvp = db::kvp::KeyValueStore::global(cx);
         self._serialize_task = Some(cx.background_spawn(async move {
-            crate::persistence::write_multi_workspace_state(window_id, state).await;
+            crate::persistence::write_multi_workspace_state(&kvp, window_id, state).await;
         }));
     }
 
@@ -560,8 +561,9 @@ impl MultiWorkspace {
         self.focus_active_workspace(window, cx);
 
         let weak_workspace = new_workspace.downgrade();
+        let db = crate::persistence::WorkspaceDb::global(cx);
         cx.spawn_in(window, async move |this, cx| {
-            let workspace_id = crate::persistence::DB.next_id().await.unwrap();
+            let workspace_id = db.next_id().await.unwrap();
             let workspace = weak_workspace.upgrade().unwrap();
             let task: Task<()> = this
                 .update_in(cx, |this, window, cx| {
@@ -571,9 +573,9 @@ impl MultiWorkspace {
                         workspace.set_database_id(workspace_id);
                     });
                     this.serialize(cx);
+                    let db = db.clone();
                     cx.background_spawn(async move {
-                        crate::persistence::DB
-                            .set_session_binding(workspace_id, session_id, Some(window_id))
+                        db.set_session_binding(workspace_id, session_id, Some(window_id))
                             .await
                             .log_err();
                     })
@@ -597,13 +599,13 @@ impl MultiWorkspace {
         }
 
         if let Some(workspace_id) = removed_workspace.read(cx).database_id() {
+            let db = crate::persistence::WorkspaceDb::global(cx);
             self.pending_removal_tasks.retain(|task| !task.is_ready());
             self.pending_removal_tasks
                 .push(cx.background_spawn(async move {
                     // Clear the session binding instead of deleting the row so
                     // the workspace still appears in the recent-projects list.
-                    crate::persistence::DB
-                        .set_session_binding(workspace_id, None, None)
+                    db.set_session_binding(workspace_id, None, None)
                         .await
                         .log_err();
                 }));

crates/workspace/src/persistence.rs 🔗

@@ -14,7 +14,7 @@ use fs::Fs;
 use anyhow::{Context as _, Result, bail};
 use collections::{HashMap, HashSet, IndexSet};
 use db::{
-    kvp::KEY_VALUE_STORE,
+    kvp::KeyValueStore,
     query,
     sqlez::{connection::Connection, domain::Domain},
     sqlez_macros::sql,
@@ -174,8 +174,8 @@ impl Column for SerializedWindowBounds {
 
 const DEFAULT_WINDOW_BOUNDS_KEY: &str = "default_window_bounds";
 
-pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> {
-    let json_str = KEY_VALUE_STORE
+pub fn read_default_window_bounds(kvp: &KeyValueStore) -> Option<(Uuid, WindowBounds)> {
+    let json_str = kvp
         .read_kvp(DEFAULT_WINDOW_BOUNDS_KEY)
         .log_err()
         .flatten()?;
@@ -186,13 +186,13 @@ pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> {
 }
 
 pub async fn write_default_window_bounds(
+    kvp: &KeyValueStore,
     bounds: WindowBounds,
     display_uuid: Uuid,
 ) -> anyhow::Result<()> {
     let persisted = WindowBoundsJson::from(bounds);
     let json_str = serde_json::to_string(&(display_uuid, persisted))?;
-    KEY_VALUE_STORE
-        .write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str)
+    kvp.write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str)
         .await?;
     Ok(())
 }
@@ -290,12 +290,9 @@ impl From<WindowBoundsJson> for WindowBounds {
     }
 }
 
-fn multi_workspace_states() -> db::kvp::ScopedKeyValueStore<'static> {
-    KEY_VALUE_STORE.scoped("multi_workspace_state")
-}
-
-fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState {
-    multi_workspace_states()
+fn read_multi_workspace_state(window_id: WindowId, cx: &App) -> model::MultiWorkspaceState {
+    let kvp = KeyValueStore::global(cx);
+    kvp.scoped("multi_workspace_state")
         .read(&window_id.as_u64().to_string())
         .log_err()
         .flatten()
@@ -303,9 +300,13 @@ fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState
         .unwrap_or_default()
 }
 
-pub async fn write_multi_workspace_state(window_id: WindowId, state: model::MultiWorkspaceState) {
+pub async fn write_multi_workspace_state(
+    kvp: &KeyValueStore,
+    window_id: WindowId,
+    state: model::MultiWorkspaceState,
+) {
     if let Ok(json_str) = serde_json::to_string(&state) {
-        multi_workspace_states()
+        kvp.scoped("multi_workspace_state")
             .write(window_id.as_u64().to_string(), json_str)
             .await
             .log_err();
@@ -314,6 +315,7 @@ pub async fn write_multi_workspace_state(window_id: WindowId, state: model::Mult
 
 pub fn read_serialized_multi_workspaces(
     session_workspaces: Vec<model::SessionWorkspace>,
+    cx: &App,
 ) -> Vec<model::SerializedMultiWorkspace> {
     let mut window_groups: Vec<Vec<model::SessionWorkspace>> = Vec::new();
     let mut window_id_to_group: HashMap<WindowId, usize> = HashMap::default();
@@ -338,7 +340,7 @@ pub fn read_serialized_multi_workspaces(
         .map(|group| {
             let window_id = group.first().and_then(|sw| sw.window_id);
             let state = window_id
-                .map(read_multi_workspace_state)
+                .map(|wid| read_multi_workspace_state(wid, cx))
                 .unwrap_or_default();
             model::SerializedMultiWorkspace {
                 workspaces: group,
@@ -350,19 +352,18 @@ pub fn read_serialized_multi_workspaces(
 
 const DEFAULT_DOCK_STATE_KEY: &str = "default_dock_state";
 
-pub fn read_default_dock_state() -> Option<DockStructure> {
-    let json_str = KEY_VALUE_STORE
-        .read_kvp(DEFAULT_DOCK_STATE_KEY)
-        .log_err()
-        .flatten()?;
+pub fn read_default_dock_state(kvp: &KeyValueStore) -> Option<DockStructure> {
+    let json_str = kvp.read_kvp(DEFAULT_DOCK_STATE_KEY).log_err().flatten()?;
 
     serde_json::from_str::<DockStructure>(&json_str).ok()
 }
 
-pub async fn write_default_dock_state(docks: DockStructure) -> anyhow::Result<()> {
+pub async fn write_default_dock_state(
+    kvp: &KeyValueStore,
+    docks: DockStructure,
+) -> anyhow::Result<()> {
     let json_str = serde_json::to_string(&docks)?;
-    KEY_VALUE_STORE
-        .write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str)
+    kvp.write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str)
         .await?;
     Ok(())
 }
@@ -980,7 +981,7 @@ impl Domain for WorkspaceDb {
     }
 }
 
-db::static_connection!(DB, WorkspaceDb, []);
+db::static_connection!(WorkspaceDb, []);
 
 impl WorkspaceDb {
     /// Returns a serialized workspace for the given worktree_roots. If the passed array
@@ -2252,7 +2253,7 @@ impl WorkspaceDb {
         use db::sqlez::statement::Statement;
         use itertools::Itertools as _;
 
-        DB.clear_trusted_worktrees()
+        self.clear_trusted_worktrees()
             .await
             .context("clearing previous trust state")?;
 
@@ -2319,7 +2320,7 @@ VALUES {placeholders};"#
     }
 
     pub fn fetch_trusted_worktrees(&self) -> Result<DbTrustedPaths> {
-        let trusted_worktrees = DB.trusted_worktrees()?;
+        let trusted_worktrees = self.trusted_worktrees()?;
         Ok(trusted_worktrees
             .into_iter()
             .filter_map(|(abs_path, user_name, host_name)| {
@@ -2450,7 +2451,7 @@ mod tests {
         cx.run_until_parked();
 
         // Read back the persisted state and check that the active workspace ID was written.
-        let state_after_add = read_multi_workspace_state(window_id);
+        let state_after_add = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
         let active_workspace2_db_id = workspace2.read_with(cx, |ws, _| ws.database_id());
         assert_eq!(
             state_after_add.active_workspace_id, active_workspace2_db_id,
@@ -2465,7 +2466,7 @@ mod tests {
 
         cx.run_until_parked();
 
-        let state_after_remove = read_multi_workspace_state(window_id);
+        let state_after_remove = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
         let remaining_db_id =
             multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id());
         assert_eq!(
@@ -3882,14 +3883,17 @@ mod tests {
     }
 
     #[gpui::test]
-    async fn test_read_serialized_multi_workspaces_with_state() {
+    async fn test_read_serialized_multi_workspaces_with_state(cx: &mut gpui::TestAppContext) {
         use crate::persistence::model::MultiWorkspaceState;
 
         // Write multi-workspace state for two windows via the scoped KVP.
         let window_10 = WindowId::from(10u64);
         let window_20 = WindowId::from(20u64);
 
+        let kvp = cx.update(|cx| KeyValueStore::global(cx));
+
         write_multi_workspace_state(
+            &kvp,
             window_10,
             MultiWorkspaceState {
                 active_workspace_id: Some(WorkspaceId(2)),
@@ -3899,6 +3903,7 @@ mod tests {
         .await;
 
         write_multi_workspace_state(
+            &kvp,
             window_20,
             MultiWorkspaceState {
                 active_workspace_id: Some(WorkspaceId(3)),
@@ -3935,7 +3940,7 @@ mod tests {
             },
         ];
 
-        let results = read_serialized_multi_workspaces(session_workspaces);
+        let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx));
 
         // Should produce 3 groups: window 10, window 20, and the orphan.
         assert_eq!(results.len(), 3);
@@ -3981,14 +3986,16 @@ mod tests {
 
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
+        let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
         // Assign a database_id so serialization will actually persist.
-        let workspace_id = DB.next_id().await.unwrap();
+        let workspace_id = db.next_id().await.unwrap();
         workspace.update(cx, |ws, _cx| {
             ws.set_database_id(workspace_id);
         });
 
         // Mutate some workspace state.
-        DB.set_centered_layout(workspace_id, true).await.unwrap();
+        db.set_centered_layout(workspace_id, true).await.unwrap();
 
         // Call flush_serialization and await the returned task directly
         // (without run_until_parked — the point is that awaiting the task
@@ -4000,7 +4007,7 @@ mod tests {
         task.await;
 
         // Read the workspace back from the DB and verify serialization happened.
-        let serialized = DB.workspace_for_id(workspace_id);
+        let serialized = db.workspace_for_id(workspace_id);
         assert!(
             serialized.is_some(),
             "flush_serialization should have persisted the workspace to DB"
@@ -4053,7 +4060,7 @@ mod tests {
         );
 
         // The multi-workspace state should record it as the active workspace.
-        let state = read_multi_workspace_state(window_id);
+        let state = cx.update(|_, cx| read_multi_workspace_state(window_id, cx));
         assert_eq!(
             state.active_workspace_id, new_workspace_db_id,
             "Serialized active_workspace_id should match the new workspace's database_id"
@@ -4062,7 +4069,8 @@ mod tests {
         // The individual workspace row should exist with real data
         // (not just the bare DEFAULT VALUES row from next_id).
         let workspace_id = new_workspace_db_id.unwrap();
-        let serialized = DB.workspace_for_id(workspace_id);
+        let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+        let serialized = db.workspace_for_id(workspace_id);
         assert!(
             serialized.is_some(),
             "Newly created workspace should be fully serialized in the DB after database_id assignment"
@@ -4095,8 +4103,10 @@ mod tests {
             mw.set_random_database_id(cx);
         });
 
+        let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
         // Get a real DB id for workspace2 so the row actually exists.
-        let workspace2_db_id = DB.next_id().await.unwrap();
+        let workspace2_db_id = db.next_id().await.unwrap();
 
         multi_workspace.update_in(cx, |mw, window, cx| {
             let workspace = cx.new(|cx| crate::Workspace::test_new(project2.clone(), window, cx));
@@ -4108,7 +4118,7 @@ mod tests {
 
         // Save a full workspace row to the DB directly.
         let session_id = format!("remove-test-session-{}", Uuid::new_v4());
-        DB.save_workspace(SerializedWorkspace {
+        db.save_workspace(SerializedWorkspace {
             id: workspace2_db_id,
             paths: PathList::new(&[&dir]),
             location: SerializedWorkspaceLocation::Local,
@@ -4125,7 +4135,7 @@ mod tests {
         .await;
 
         assert!(
-            DB.workspace_for_id(workspace2_db_id).is_some(),
+            db.workspace_for_id(workspace2_db_id).is_some(),
             "Workspace2 should exist in DB before removal"
         );
 
@@ -4140,11 +4150,11 @@ mod tests {
         // projects, but the session binding should be cleared so it is not
         // restored as part of any future session.
         assert!(
-            DB.workspace_for_id(workspace2_db_id).is_some(),
+            db.workspace_for_id(workspace2_db_id).is_some(),
             "Removed workspace's DB row should be preserved for recent projects"
         );
 
-        let session_workspaces = DB
+        let session_workspaces = db
             .last_session_workspace_locations("remove-test-session", None, fs.as_ref())
             .await
             .unwrap();
@@ -4181,9 +4191,11 @@ mod tests {
         let project1 = Project::test(fs.clone(), [], cx).await;
         let project2 = Project::test(fs.clone(), [], cx).await;
 
+        let db = cx.update(|cx| WorkspaceDb::global(cx));
+
         // Get real DB ids so the rows actually exist.
-        let ws1_id = DB.next_id().await.unwrap();
-        let ws2_id = DB.next_id().await.unwrap();
+        let ws1_id = db.next_id().await.unwrap();
+        let ws2_id = db.next_id().await.unwrap();
 
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
@@ -4205,7 +4217,7 @@ mod tests {
         let session_id = "test-zombie-session";
         let window_id_val: u64 = 42;
 
-        DB.save_workspace(SerializedWorkspace {
+        db.save_workspace(SerializedWorkspace {
             id: ws1_id,
             paths: PathList::new(&[dir1.path()]),
             location: SerializedWorkspaceLocation::Local,
@@ -4221,7 +4233,7 @@ mod tests {
         })
         .await;
 
-        DB.save_workspace(SerializedWorkspace {
+        db.save_workspace(SerializedWorkspace {
             id: ws2_id,
             paths: PathList::new(&[dir2.path()]),
             location: SerializedWorkspaceLocation::Local,
@@ -4245,7 +4257,7 @@ mod tests {
         cx.run_until_parked();
 
         // The removed workspace should NOT appear in session restoration.
-        let locations = DB
+        let locations = db
             .last_session_workspace_locations(session_id, None, fs.as_ref())
             .await
             .unwrap();
@@ -4281,8 +4293,10 @@ mod tests {
         let project1 = Project::test(fs.clone(), [], cx).await;
         let project2 = Project::test(fs.clone(), [], cx).await;
 
+        let db = cx.update(|cx| WorkspaceDb::global(cx));
+
         // Get a real DB id for workspace2 so the row actually exists.
-        let workspace2_db_id = DB.next_id().await.unwrap();
+        let workspace2_db_id = db.next_id().await.unwrap();
 
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
@@ -4301,7 +4315,7 @@ mod tests {
 
         // Save a full workspace row to the DB directly and let it settle.
         let session_id = format!("pending-removal-session-{}", Uuid::new_v4());
-        DB.save_workspace(SerializedWorkspace {
+        db.save_workspace(SerializedWorkspace {
             id: workspace2_db_id,
             paths: PathList::new(&[&dir]),
             location: SerializedWorkspaceLocation::Local,
@@ -4347,11 +4361,11 @@ mod tests {
         // The row should still exist (for recent projects), but the session
         // binding should have been cleared by the pending removal task.
         assert!(
-            DB.workspace_for_id(workspace2_db_id).is_some(),
+            db.workspace_for_id(workspace2_db_id).is_some(),
             "Workspace row should be preserved for recent projects"
         );
 
-        let session_workspaces = DB
+        let session_workspaces = db
             .last_session_workspace_locations("pending-removal-session", None, fs.as_ref())
             .await
             .unwrap();
@@ -4401,8 +4415,10 @@ mod tests {
 
         let workspace_id = new_workspace_db_id.unwrap();
 
+        let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+
         assert!(
-            DB.workspace_for_id(workspace_id).is_some(),
+            db.workspace_for_id(workspace_id).is_some(),
             "The workspace row should exist in the DB"
         );
 
@@ -4413,7 +4429,7 @@ mod tests {
         cx.executor().advance_clock(Duration::from_millis(200));
         cx.run_until_parked();
 
-        let serialized = DB
+        let serialized = db
             .workspace_for_id(workspace_id)
             .expect("workspace row should still exist");
         assert!(
@@ -4446,7 +4462,8 @@ mod tests {
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
 
-        let workspace_id = DB.next_id().await.unwrap();
+        let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+        let workspace_id = db.next_id().await.unwrap();
         multi_workspace.update_in(cx, |mw, _, cx| {
             mw.workspace().update(cx, |ws, _cx| {
                 ws.set_database_id(workspace_id);
@@ -4459,7 +4476,7 @@ mod tests {
         });
         task.await;
 
-        let after = DB
+        let after = db
             .workspace_for_id(workspace_id)
             .expect("workspace row should exist after flush_serialization");
         assert!(

crates/workspace/src/welcome.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{
-    NewFile, Open, PathList, SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId,
+    NewFile, Open, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceId,
     item::{Item, ItemEvent},
+    persistence::WorkspaceDb,
 };
 use chrono::{DateTime, Utc};
 use git::Clone as GitClone;
@@ -271,9 +272,10 @@ impl WelcomePage {
             let fs = workspace
                 .upgrade()
                 .map(|ws| ws.read(cx).app_state().fs.clone());
+            let db = WorkspaceDb::global(cx);
             cx.spawn_in(window, async move |this: WeakEntity<Self>, cx| {
                 let Some(fs) = fs else { return };
-                let workspaces = WORKSPACE_DB
+                let workspaces = db
                     .recent_workspaces_on_disk(fs.as_ref())
                     .await
                     .log_err()
@@ -518,7 +520,7 @@ impl crate::SerializableItem for WelcomePage {
             alive_items,
             workspace_id,
             "welcome_pages",
-            &persistence::WELCOME_PAGES,
+            &persistence::WelcomePagesDb::global(cx),
             cx,
         )
     }
@@ -531,7 +533,7 @@ impl crate::SerializableItem for WelcomePage {
         window: &mut Window,
         cx: &mut App,
     ) -> Task<gpui::Result<Entity<Self>>> {
-        if persistence::WELCOME_PAGES
+        if persistence::WelcomePagesDb::global(cx)
             .get_welcome_page(item_id, workspace_id)
             .ok()
             .is_some_and(|is_open| is_open)
@@ -553,11 +555,10 @@ impl crate::SerializableItem for WelcomePage {
         cx: &mut Context<Self>,
     ) -> Option<Task<gpui::Result<()>>> {
         let workspace_id = workspace.database_id()?;
-        Some(cx.background_spawn(async move {
-            persistence::WELCOME_PAGES
-                .save_welcome_page(item_id, workspace_id, true)
-                .await
-        }))
+        let db = persistence::WelcomePagesDb::global(cx);
+        Some(cx.background_spawn(
+            async move { db.save_welcome_page(item_id, workspace_id, true).await },
+        ))
     }
 
     fn should_serialize(&self, event: &Self::Event) -> bool {
@@ -591,7 +592,7 @@ mod persistence {
         )]);
     }
 
-    db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]);
+    db::static_connection!(WelcomePagesDb, [WorkspaceDb]);
 
     impl WelcomePagesDb {
         query! {

crates/workspace/src/workspace.rs 🔗

@@ -75,9 +75,9 @@ pub use pane_group::{
     ActivePaneDecorator, HANDLE_HITBOX_SIZE, Member, PaneAxis, PaneGroup, PaneRenderContext,
     SplitDirection,
 };
-use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace};
+use persistence::{SerializedWindowBounds, model::SerializedWorkspace};
 pub use persistence::{
-    DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
+    WorkspaceDb, delete_unloaded_items,
     model::{
         DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
         SessionWorkspace,
@@ -1382,10 +1382,10 @@ impl Workspace {
                                 |new_trusted_worktrees, cx| {
                                     let timeout =
                                         cx.background_executor().timer(SERIALIZATION_THROTTLE_TIME);
+                                    let db = WorkspaceDb::global(cx);
                                     cx.background_spawn(async move {
                                         timeout.await;
-                                        persistence::DB
-                                            .save_trusted_worktrees(new_trusted_worktrees)
+                                        db.save_trusted_worktrees(new_trusted_worktrees)
                                             .await
                                             .log_err();
                                     })
@@ -1770,6 +1770,8 @@ impl Workspace {
             cx,
         );
 
+        let db = WorkspaceDb::global(cx);
+        let kvp = db::kvp::KeyValueStore::global(cx);
         cx.spawn(async move |cx| {
             let mut paths_to_open = Vec::with_capacity(abs_paths.len());
             for path in abs_paths.into_iter() {
@@ -1780,8 +1782,7 @@ impl Workspace {
                 }
             }
 
-            let serialized_workspace =
-                persistence::DB.workspace_for_roots(paths_to_open.as_slice());
+            let serialized_workspace = db.workspace_for_roots(paths_to_open.as_slice());
 
             if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) {
                 paths_to_open = paths.ordered_paths().cloned().collect();
@@ -1813,10 +1814,10 @@ impl Workspace {
             let workspace_id = if let Some(serialized_workspace) = serialized_workspace.as_ref() {
                 serialized_workspace.id
             } else {
-                DB.next_id().await.unwrap_or_else(|_| Default::default())
+                db.next_id().await.unwrap_or_else(|_| Default::default())
             };
 
-            let toolchains = DB.toolchains(workspace_id).await?;
+            let toolchains = db.toolchains(workspace_id).await?;
 
             for (toolchain, worktree_path, path) in toolchains {
                 let toolchain_path = PathBuf::from(toolchain.path.clone().to_string());
@@ -1899,7 +1900,7 @@ impl Workspace {
                         // Reopening an existing workspace - restore its saved bounds
                         (Some(bounds.0), Some(display))
                     } else if let Some((display, bounds)) =
-                        persistence::read_default_window_bounds()
+                        persistence::read_default_window_bounds(&kvp)
                     {
                         // New or empty workspace - use the last known window bounds
                         (Some(bounds), Some(display))
@@ -1970,7 +1971,7 @@ impl Workspace {
             // 1. This is an empty workspace (no paths), AND
             // 2. The serialized workspace either doesn't exist or has no paths
             if is_empty_workspace && !serialized_workspace_has_paths {
-                if let Some(default_docks) = persistence::read_default_dock_state() {
+                if let Some(default_docks) = persistence::read_default_dock_state(&kvp) {
                     window
                         .update(cx, |_, window, cx| {
                             workspace.update(cx, |workspace, cx| {
@@ -5979,7 +5980,8 @@ impl Workspace {
             self.update_active_view_for_followers(window, cx);
 
             if let Some(database_id) = self.database_id {
-                cx.background_spawn(persistence::DB.update_timestamp(database_id))
+                let db = WorkspaceDb::global(cx);
+                cx.background_spawn(async move { db.update_timestamp(database_id).await })
                     .detach();
             }
         } else {
@@ -6048,15 +6050,17 @@ impl Workspace {
         let window_bounds = window.inner_window_bounds();
         let database_id = self.database_id;
         let has_paths = !self.root_paths(cx).is_empty();
+        let db = WorkspaceDb::global(cx);
+        let kvp = db::kvp::KeyValueStore::global(cx);
 
         cx.background_executor().spawn(async move {
             if !has_paths {
-                persistence::write_default_window_bounds(window_bounds, display_uuid)
+                persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid)
                     .await
                     .log_err();
             }
             if let Some(database_id) = database_id {
-                DB.set_window_open_status(
+                db.set_window_open_status(
                     database_id,
                     SerializedWindowBounds(window_bounds),
                     display_uuid,
@@ -6064,7 +6068,7 @@ impl Workspace {
                 .await
                 .log_err();
             } else {
-                persistence::write_default_window_bounds(window_bounds, display_uuid)
+                persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid)
                     .await
                     .log_err();
             }
@@ -6253,8 +6257,9 @@ impl Workspace {
                     user_toolchains,
                 };
 
+                let db = WorkspaceDb::global(cx);
                 window.spawn(cx, async move |_| {
-                    persistence::DB.save_workspace(serialized_workspace).await;
+                    db.save_workspace(serialized_workspace).await;
                 })
             }
             WorkspaceLocation::DetachFromSession => {
@@ -6262,27 +6267,30 @@ impl Workspace {
                 let display = window.display(cx).and_then(|d| d.uuid().ok());
                 // Save dock state for empty local workspaces
                 let docks = build_serialized_docks(self, window, cx);
+                let db = WorkspaceDb::global(cx);
+                let kvp = db::kvp::KeyValueStore::global(cx);
                 window.spawn(cx, async move |_| {
-                    persistence::DB
-                        .set_window_open_status(
-                            database_id,
-                            window_bounds,
-                            display.unwrap_or_default(),
-                        )
-                        .await
-                        .log_err();
-                    persistence::DB
-                        .set_session_id(database_id, None)
+                    db.set_window_open_status(
+                        database_id,
+                        window_bounds,
+                        display.unwrap_or_default(),
+                    )
+                    .await
+                    .log_err();
+                    db.set_session_id(database_id, None).await.log_err();
+                    persistence::write_default_dock_state(&kvp, docks)
                         .await
                         .log_err();
-                    persistence::write_default_dock_state(docks).await.log_err();
                 })
             }
             WorkspaceLocation::None => {
                 // Save dock state for empty non-local workspaces
                 let docks = build_serialized_docks(self, window, cx);
+                let kvp = db::kvp::KeyValueStore::global(cx);
                 window.spawn(cx, async move |_| {
-                    persistence::write_default_dock_state(docks).await.log_err();
+                    persistence::write_default_dock_state(&kvp, docks)
+                        .await
+                        .log_err();
                 })
             }
         }
@@ -6712,9 +6720,9 @@ impl Workspace {
                         trusted_worktrees.update(cx, |trusted_worktrees, _| {
                             trusted_worktrees.clear_trusted_paths()
                         });
-                        let clear_task = persistence::DB.clear_trusted_worktrees();
+                        let db = WorkspaceDb::global(cx);
                         cx.spawn(async move |_, cx| {
-                            if clear_task.await.log_err().is_some() {
+                            if db.clear_trusted_worktrees().await.log_err().is_some() {
                                 cx.update(|cx| reload(cx));
                             }
                         })
@@ -7020,8 +7028,12 @@ impl Workspace {
     ) {
         self.centered_layout = !self.centered_layout;
         if let Some(database_id) = self.database_id() {
-            cx.background_spawn(DB.set_centered_layout(database_id, self.centered_layout))
-                .detach_and_log_err(cx);
+            let db = WorkspaceDb::global(cx);
+            let centered_layout = self.centered_layout;
+            cx.background_spawn(async move {
+                db.set_centered_layout(database_id, centered_layout).await
+            })
+            .detach_and_log_err(cx);
         }
         cx.notify();
     }
@@ -8238,9 +8250,10 @@ impl WorkspaceHandle for Entity<Workspace> {
 }
 
 pub async fn last_opened_workspace_location(
+    db: &WorkspaceDb,
     fs: &dyn fs::Fs,
 ) -> Option<(WorkspaceId, SerializedWorkspaceLocation, PathList)> {
-    DB.last_workspace(fs)
+    db.last_workspace(fs)
         .await
         .log_err()
         .flatten()
@@ -8248,11 +8261,12 @@ pub async fn last_opened_workspace_location(
 }
 
 pub async fn last_session_workspace_locations(
+    db: &WorkspaceDb,
     last_session_id: &str,
     last_session_window_stack: Option<Vec<WindowId>>,
     fs: &dyn fs::Fs,
 ) -> Option<Vec<SessionWorkspace>> {
-    DB.last_session_workspace_locations(last_session_id, last_session_window_stack, fs)
+    db.last_session_workspace_locations(last_session_id, last_session_window_stack, fs)
         .await
         .log_err()
 }
@@ -8874,8 +8888,10 @@ pub fn open_workspace_by_id(
         cx,
     );
 
+    let db = WorkspaceDb::global(cx);
+    let kvp = db::kvp::KeyValueStore::global(cx);
     cx.spawn(async move |cx| {
-        let serialized_workspace = persistence::DB
+        let serialized_workspace = db
             .workspace_for_id(workspace_id)
             .with_context(|| format!("Workspace {workspace_id:?} not found"))?;
 
@@ -8907,7 +8923,7 @@ pub fn open_workspace_by_id(
                 && let Some(bounds) = serialized_workspace.window_bounds.as_ref()
             {
                 (Some(bounds.0), Some(display))
-            } else if let Some((display, bounds)) = persistence::read_default_window_bounds() {
+            } else if let Some((display, bounds)) = persistence::read_default_window_bounds(&kvp) {
                 (Some(bounds), Some(display))
             } else {
                 (None, None)
@@ -9275,7 +9291,8 @@ async fn open_remote_project_inner(
     window: WindowHandle<MultiWorkspace>,
     cx: &mut AsyncApp,
 ) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
-    let toolchains = DB.toolchains(workspace_id).await?;
+    let db = cx.update(|cx| WorkspaceDb::global(cx));
+    let toolchains = db.toolchains(workspace_id).await?;
     for (toolchain, worktree_path, path) in toolchains {
         project
             .update(cx, |this, cx| {
@@ -9365,20 +9382,20 @@ fn deserialize_remote_project(
     paths: Vec<PathBuf>,
     cx: &AsyncApp,
 ) -> Task<Result<(WorkspaceId, Option<SerializedWorkspace>)>> {
+    let db = cx.update(|cx| WorkspaceDb::global(cx));
     cx.background_spawn(async move {
-        let remote_connection_id = persistence::DB
+        let remote_connection_id = db
             .get_or_create_remote_connection(connection_options)
             .await?;
 
-        let serialized_workspace =
-            persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
+        let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id);
 
         let workspace_id = if let Some(workspace_id) =
             serialized_workspace.as_ref().map(|workspace| workspace.id)
         {
             workspace_id
         } else {
-            persistence::DB.next_id().await?
+            db.next_id().await?
         };
 
         Ok((workspace_id, serialized_workspace))
@@ -9997,14 +10014,15 @@ pub fn remote_workspace_position_from_db(
     cx: &App,
 ) -> Task<Result<WorkspacePosition>> {
     let paths = paths_to_open.to_vec();
+    let db = WorkspaceDb::global(cx);
+    let kvp = db::kvp::KeyValueStore::global(cx);
 
     cx.background_spawn(async move {
-        let remote_connection_id = persistence::DB
+        let remote_connection_id = db
             .get_or_create_remote_connection(connection_options)
             .await
             .context("fetching serialized ssh project")?;
-        let serialized_workspace =
-            persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
+        let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id);
 
         let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() {
             (Some(WindowBounds::Windowed(bounds)), None)
@@ -10014,7 +10032,7 @@ pub fn remote_workspace_position_from_db(
                 .and_then(|workspace| {
                     Some((workspace.display?, workspace.window_bounds.map(|b| b.0)?))
                 })
-                .or_else(|| persistence::read_default_window_bounds());
+                .or_else(|| persistence::read_default_window_bounds(&kvp));
 
             if let Some((serialized_display, serialized_bounds)) = restorable_bounds {
                 (Some(serialized_bounds), Some(serialized_display))
@@ -13644,6 +13662,7 @@ mod tests {
         cx.update(|cx| {
             let settings_store = SettingsStore::test(cx);
             cx.set_global(settings_store);
+            cx.set_global(db::AppDatabase::test_new());
             theme::init(theme::LoadThemes::JustBase, cx);
         });
     }

crates/zed/src/main.rs 🔗

@@ -14,7 +14,7 @@ use client::{Client, ProxySettings, UserStore, parse_zed_link};
 use collab_ui::channel_view::ChannelView;
 use collections::HashMap;
 use crashes::InitCrashHandler;
-use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE};
+use db::kvp::{GlobalKeyValueStore, KeyValueStore};
 use editor::Editor;
 use extension::ExtensionHostProxy;
 use fs::{Fs, RealFs};
@@ -325,12 +325,16 @@ fn main() {
     let app =
         Application::with_platform(gpui_platform::current_platform(false)).with_assets(Assets);
 
+    let app_db = db::AppDatabase::new();
     let system_id = app.background_executor().spawn(system_id());
-    let installation_id = app.background_executor().spawn(installation_id());
-    let session_id = Uuid::new_v4().to_string();
-    let session = app
+    let installation_id = app
         .background_executor()
-        .spawn(Session::new(session_id.clone()));
+        .spawn(installation_id(KeyValueStore::from_app_db(&app_db)));
+    let session_id = Uuid::new_v4().to_string();
+    let session = app.background_executor().spawn(Session::new(
+        session_id.clone(),
+        KeyValueStore::from_app_db(&app_db),
+    ));
 
     crashes::init(
         InitCrashHandler {
@@ -451,7 +455,8 @@ fn main() {
     });
 
     app.run(move |cx| {
-        let db_trusted_paths = match workspace::WORKSPACE_DB.fetch_trusted_worktrees() {
+        cx.set_global(app_db);
+        let db_trusted_paths = match workspace::WorkspaceDb::global(cx).fetch_trusted_worktrees() {
             Ok(trusted_paths) => trusted_paths,
             Err(e) => {
                 log::error!("Failed to do initial trusted worktrees fetch: {e:#}");
@@ -1300,42 +1305,37 @@ async fn authenticate(client: Arc<Client>, cx: &AsyncApp) -> Result<()> {
 
 async fn system_id() -> Result<IdType> {
     let key_name = "system_id".to_string();
+    let db = GlobalKeyValueStore::global();
 
-    if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) {
+    if let Ok(Some(system_id)) = db.read_kvp(&key_name) {
         return Ok(IdType::Existing(system_id));
     }
 
     let system_id = Uuid::new_v4().to_string();
 
-    GLOBAL_KEY_VALUE_STORE
-        .write_kvp(key_name, system_id.clone())
-        .await?;
+    db.write_kvp(key_name, system_id.clone()).await?;
 
     Ok(IdType::New(system_id))
 }
 
-async fn installation_id() -> Result<IdType> {
+async fn installation_id(db: KeyValueStore) -> Result<IdType> {
     let legacy_key_name = "device_id".to_string();
     let key_name = "installation_id".to_string();
 
     // Migrate legacy key to new key
-    if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&legacy_key_name) {
-        KEY_VALUE_STORE
-            .write_kvp(key_name, installation_id.clone())
-            .await?;
-        KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?;
+    if let Ok(Some(installation_id)) = db.read_kvp(&legacy_key_name) {
+        db.write_kvp(key_name, installation_id.clone()).await?;
+        db.delete_kvp(legacy_key_name).await?;
         return Ok(IdType::Existing(installation_id));
     }
 
-    if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) {
+    if let Ok(Some(installation_id)) = db.read_kvp(&key_name) {
         return Ok(IdType::Existing(installation_id));
     }
 
     let installation_id = Uuid::new_v4().to_string();
 
-    KEY_VALUE_STORE
-        .write_kvp(key_name, installation_id.clone())
-        .await?;
+    db.write_kvp(key_name, installation_id.clone()).await?;
 
     Ok(IdType::New(installation_id))
 }
@@ -1344,6 +1344,7 @@ pub(crate) async fn restore_or_create_workspace(
     app_state: Arc<AppState>,
     cx: &mut AsyncApp,
 ) -> Result<()> {
+    let kvp = cx.update(|cx| KeyValueStore::global(cx));
     if let Some((multi_workspaces, remote_workspaces)) = restorable_workspaces(cx, &app_state).await
     {
         let mut results: Vec<Result<(), Error>> = Vec::new();
@@ -1452,7 +1453,7 @@ pub(crate) async fn restore_or_create_workspace(
                 .await?;
             }
         }
-    } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
+    } else if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) {
         cx.update(|cx| show_onboarding_view(app_state, cx)).await?;
     } else {
         cx.update(|cx| {
@@ -1488,7 +1489,8 @@ async fn restorable_workspaces(
     let (remote_workspaces, local_workspaces) = locations
         .into_iter()
         .partition(|sw| matches!(sw.location, SerializedWorkspaceLocation::Remote(_)));
-    let multi_workspaces = workspace::read_serialized_multi_workspaces(local_workspaces);
+    let multi_workspaces =
+        cx.update(|cx| workspace::read_serialized_multi_workspaces(local_workspaces, cx));
     Some((multi_workspaces, remote_workspaces))
 }
 
@@ -1496,7 +1498,12 @@ pub(crate) async fn restorable_workspace_locations(
     cx: &mut AsyncApp,
     app_state: &Arc<AppState>,
 ) -> Option<Vec<SessionWorkspace>> {
-    let mut restore_behavior = cx.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup);
+    let (mut restore_behavior, db) = cx.update(|cx| {
+        (
+            WorkspaceSettings::get(None, cx).restore_on_startup,
+            workspace::WorkspaceDb::global(cx),
+        )
+    });
 
     let session_handle = app_state.session.clone();
     let (last_session_id, last_session_window_stack) = cx.update(|cx| {
@@ -1519,7 +1526,7 @@ pub(crate) async fn restorable_workspace_locations(
 
     match restore_behavior {
         workspace::RestoreOnStartupBehavior::LastWorkspace => {
-            workspace::last_opened_workspace_location(app_state.fs.as_ref())
+            workspace::last_opened_workspace_location(&db, app_state.fs.as_ref())
                 .await
                 .map(|(workspace_id, location, paths)| {
                     vec![SessionWorkspace {
@@ -1535,6 +1542,7 @@ pub(crate) async fn restorable_workspace_locations(
                 let ordered = last_session_window_stack.is_some();
 
                 let mut locations = workspace::last_session_workspace_locations(
+                    &db,
                     &last_session_id,
                     last_session_window_stack,
                     app_state.fs.as_ref(),

crates/zed/src/zed.rs 🔗

@@ -5960,9 +5960,11 @@ mod tests {
         cx.run_until_parked();
 
         // Verify all workspaces retained their session_ids.
-        let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref())
-            .await
-            .expect("expected session workspace locations");
+        let db = cx.update(|cx| workspace::WorkspaceDb::global(cx));
+        let locations =
+            workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref())
+                .await
+                .expect("expected session workspace locations");
         assert_eq!(
             locations.len(),
             3,
@@ -5989,9 +5991,10 @@ mod tests {
         });
 
         // --- Read back from DB and verify grouping ---
-        let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref())
-            .await
-            .expect("expected session workspace locations");
+        let locations =
+            workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref())
+                .await
+                .expect("expected session workspace locations");
 
         assert_eq!(locations.len(), 3, "expected 3 session workspaces");
 

crates/zed/src/zed/open_listener.rs 🔗

@@ -5,7 +5,7 @@ use anyhow::{Context as _, Result, anyhow};
 use cli::{CliRequest, CliResponse, ipc::IpcSender};
 use cli::{IpcHandshake, ipc};
 use client::{ZedLink, parse_zed_link};
-use db::kvp::KEY_VALUE_STORE;
+use db::kvp::KeyValueStore;
 use editor::Editor;
 use fs::Fs;
 use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
@@ -491,7 +491,8 @@ async fn open_workspaces(
 
     if grouped_locations.is_empty() {
         // If we have no paths to open, show the welcome screen if this is the first launch
-        if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
+        let kvp = cx.update(|cx| KeyValueStore::global(cx));
+        if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) {
             cx.update(|cx| show_onboarding_view(app_state, cx).detach());
         }
         // If not the first launch, show an empty window with empty editor