Detailed changes
@@ -122,7 +122,7 @@ mod tests {
#[gpui::test]
async fn test_saves_and_retrieves_command_invocation() {
let db =
- CommandPaletteDB(db::open_test_db("test_saves_and_retrieves_command_invocation").await);
+ CommandPaletteDB::open_test_db("test_saves_and_retrieves_command_invocation").await;
let retrieved_cmd = db.get_last_invoked("editor: backspace").unwrap();
@@ -142,7 +142,7 @@ mod tests {
#[gpui::test]
async fn test_gets_usage_history() {
- let db = CommandPaletteDB(db::open_test_db("test_gets_usage_history").await);
+ let db = CommandPaletteDB::open_test_db("test_gets_usage_history").await;
db.write_command_invocation("go to line: toggle", "200")
.await
.unwrap();
@@ -167,7 +167,7 @@ mod tests {
#[gpui::test]
async fn test_lists_ordered_by_usage() {
- let db = CommandPaletteDB(db::open_test_db("test_lists_ordered_by_usage").await);
+ let db = CommandPaletteDB::open_test_db("test_lists_ordered_by_usage").await;
let empty_commands = db.list_commands_used();
match &empty_commands {
@@ -200,7 +200,7 @@ mod tests {
#[gpui::test]
async fn test_handles_max_invocation_entries() {
- let db = CommandPaletteDB(db::open_test_db("test_handles_max_invocation_entries").await);
+ let db = CommandPaletteDB::open_test_db("test_handles_max_invocation_entries").await;
for i in 1..=1001 {
db.write_command_invocation("some-command", &i.to_string())
@@ -23,7 +23,7 @@ use project::Project;
use ui::{Divider, HighlightedLabel, ListItem, ListSubHeader, prelude::*};
use ui_input::SingleLineInput;
-use workspace::{AppState, ItemId, SerializableItem};
+use workspace::{AppState, ItemId, SerializableItem, delete_unloaded_items};
use workspace::{Item, Workspace, WorkspaceId, item::ItemEvent};
pub fn init(app_state: Arc<AppState>, cx: &mut App) {
@@ -860,11 +860,13 @@ impl SerializableItem for ComponentPreview {
_window: &mut Window,
cx: &mut App,
) -> Task<gpui::Result<()>> {
- cx.background_spawn(async move {
- COMPONENT_PREVIEW_DB
- .delete_unloaded_items(workspace_id, alive_items)
- .await
- })
+ delete_unloaded_items(
+ alive_items,
+ workspace_id,
+ "component_previews",
+ &COMPONENT_PREVIEW_DB,
+ cx,
+ )
}
fn serialize(
@@ -47,30 +47,4 @@ impl ComponentPreviewDb {
WHERE item_id = ? AND workspace_id = ?
}
}
-
- pub async fn delete_unloaded_items(
- &self,
- workspace: WorkspaceId,
- alive_items: Vec<ItemId>,
- ) -> Result<()> {
- let placeholders = alive_items
- .iter()
- .map(|_| "?")
- .collect::<Vec<&str>>()
- .join(", ");
-
- let query = format!(
- "DELETE FROM component_previews WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
- );
-
- self.write(move |conn| {
- let mut statement = Statement::prepare(conn, query)?;
- let mut next_index = statement.bind(&workspace, 1)?;
- for id in alive_items {
- next_index = statement.bind(&id, next_index)?;
- }
- statement.exec()
- })
- .await
- }
}
@@ -45,9 +45,9 @@ pub static ALL_FILE_DB_FAILED: LazyLock<AtomicBool> = LazyLock::new(|| AtomicBoo
/// This will retry a couple times if there are failures. If opening fails once, the db directory
/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created.
/// In either case, static variables are set so that the user can be notified.
-pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> ThreadSafeConnection<M> {
+pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> ThreadSafeConnection {
if *ZED_STATELESS {
- return open_fallback_db().await;
+ return open_fallback_db::<M>().await;
}
let main_db_dir = db_dir.join(format!("0-{}", scope));
@@ -58,7 +58,7 @@ pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> Threa
.context("Could not create db directory")
.log_err()?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
- open_main_db(&db_path).await
+ open_main_db::<M>(&db_path).await
})
.await;
@@ -70,12 +70,12 @@ pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> Threa
ALL_FILE_DB_FAILED.store(true, Ordering::Release);
// If still failed, create an in memory db with a known name
- open_fallback_db().await
+ open_fallback_db::<M>().await
}
-async fn open_main_db<M: Migrator>(db_path: &Path) -> Option<ThreadSafeConnection<M>> {
+async fn open_main_db<M: Migrator>(db_path: &Path) -> Option<ThreadSafeConnection> {
log::info!("Opening main db");
- ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
+ ThreadSafeConnection::builder::<M>(db_path.to_string_lossy().as_ref(), true)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
.build()
@@ -83,9 +83,9 @@ async fn open_main_db<M: Migrator>(db_path: &Path) -> Option<ThreadSafeConnectio
.log_err()
}
-async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
+async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection {
log::info!("Opening fallback db");
- ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false)
+ ThreadSafeConnection::builder::<M>(FALLBACK_DB_NAME, false)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
.build()
@@ -96,10 +96,10 @@ async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
}
#[cfg(any(test, feature = "test-support"))]
-pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M> {
+pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection {
use sqlez::thread_safe_connection::locking_queue;
- ThreadSafeConnection::<M>::builder(db_name, false)
+ ThreadSafeConnection::builder::<M>(db_name, false)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
// Serialize queued writes via a mutex and run them synchronously
@@ -113,10 +113,10 @@ pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M>
#[macro_export]
macro_rules! define_connection {
(pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => {
- pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>);
+ pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection);
impl ::std::ops::Deref for $t {
- type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>;
+ type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
fn deref(&self) -> &Self::Target {
&self.0
@@ -133,9 +133,16 @@ macro_rules! define_connection {
}
}
+ impl $t {
+ #[cfg(any(test, feature = "test-support"))]
+ pub async fn open_test_db(name: &'static str) -> Self {
+ $t($crate::open_test_db::<$t>(name).await)
+ }
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- $t($crate::smol::block_on($crate::open_test_db(stringify!($id))))
+ $t($crate::smol::block_on($crate::open_test_db::<$t>(stringify!($id))))
});
#[cfg(not(any(test, feature = "test-support")))]
@@ -146,14 +153,14 @@ macro_rules! define_connection {
} else {
$crate::RELEASE_CHANNEL.dev_name()
};
- $t($crate::smol::block_on($crate::open_db(db_dir, scope)))
+ $t($crate::smol::block_on($crate::open_db::<$t>(db_dir, scope)))
});
};
(pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => {
- pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>);
+ pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection);
impl ::std::ops::Deref for $t {
- type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<($($d),+, $t)>;
+ type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
fn deref(&self) -> &Self::Target {
&self.0
@@ -172,7 +179,7 @@ macro_rules! define_connection {
#[cfg(any(test, feature = "test-support"))]
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- $t($crate::smol::block_on($crate::open_test_db(stringify!($id))))
+ $t($crate::smol::block_on($crate::open_test_db::<($($d),+, $t)>(stringify!($id))))
});
#[cfg(not(any(test, feature = "test-support")))]
@@ -183,7 +190,7 @@ macro_rules! define_connection {
} else {
$crate::RELEASE_CHANNEL.dev_name()
};
- $t($crate::smol::block_on($crate::open_db(db_dir, scope)))
+ $t($crate::smol::block_on($crate::open_db::<($($d),+, $t)>(db_dir, scope)))
});
};
}
@@ -42,7 +42,7 @@ mod tests {
#[gpui::test]
async fn test_kvp() {
- let db = KeyValueStore(crate::open_test_db("test_kvp").await);
+ let db = KeyValueStore::open_test_db("test_kvp").await;
assert_eq!(db.read_kvp("key-1").unwrap(), None);
@@ -1014,12 +1014,10 @@ impl SerializableItem for Editor {
fn cleanup(
workspace_id: WorkspaceId,
alive_items: Vec<ItemId>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> Task<Result<()>> {
- window.spawn(cx, async move |_| {
- DB.delete_unloaded_items(workspace_id, alive_items).await
- })
+ workspace::delete_unloaded_items(alive_items, workspace_id, "editors", &DB, cx)
}
fn deserialize(
@@ -373,32 +373,6 @@ VALUES {placeholders};
}
Ok(())
}
-
- pub async fn delete_unloaded_items(
- &self,
- workspace: WorkspaceId,
- alive_items: Vec<ItemId>,
- ) -> Result<()> {
- let placeholders = alive_items
- .iter()
- .map(|_| "?")
- .collect::<Vec<&str>>()
- .join(", ");
-
- let query = format!(
- "DELETE FROM editors WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
- );
-
- self.write(move |conn| {
- let mut statement = Statement::prepare(conn, query)?;
- let mut next_index = statement.bind(&workspace, 1)?;
- for id in alive_items {
- next_index = statement.bind(&id, next_index)?;
- }
- statement.exec()
- })
- .await
- }
}
#[cfg(test)]
@@ -18,7 +18,7 @@ use theme::Theme;
use ui::prelude::*;
use util::paths::PathExt;
use workspace::{
- ItemId, ItemSettings, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
+ ItemId, ItemSettings, Pane, ToolbarItemLocation, Workspace, WorkspaceId, delete_unloaded_items,
item::{BreadcrumbText, Item, ProjectItem, SerializableItem, TabContentParams},
};
@@ -241,14 +241,16 @@ impl SerializableItem for ImageView {
fn cleanup(
workspace_id: WorkspaceId,
alive_items: Vec<ItemId>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> Task<gpui::Result<()>> {
- window.spawn(cx, async move |_| {
- IMAGE_VIEWER
- .delete_unloaded_items(workspace_id, alive_items)
- .await
- })
+ delete_unloaded_items(
+ alive_items,
+ workspace_id,
+ "image_viewers",
+ &IMAGE_VIEWER,
+ cx,
+ )
}
fn serialize(
@@ -380,10 +382,9 @@ pub fn init(cx: &mut App) {
}
mod persistence {
- use anyhow::Result;
use std::path::PathBuf;
- use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql};
+ use db::{define_connection, query, sqlez_macros::sql};
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
define_connection! {
@@ -421,31 +422,5 @@ mod persistence {
WHERE item_id = ? AND workspace_id = ?
}
}
-
- pub async fn delete_unloaded_items(
- &self,
- workspace: WorkspaceId,
- alive_items: Vec<ItemId>,
- ) -> Result<()> {
- let placeholders = alive_items
- .iter()
- .map(|_| "?")
- .collect::<Vec<&str>>()
- .join(", ");
-
- let query = format!(
- "DELETE FROM image_viewers WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
- );
-
- self.write(move |conn| {
- let mut statement = Statement::prepare(conn, query)?;
- let mut next_index = statement.bind(&workspace, 1)?;
- for id in alive_items {
- next_index = statement.bind(&id, next_index)?;
- }
- statement.exec()
- })
- .await
- }
}
}
@@ -27,21 +27,22 @@ static QUEUES: LazyLock<RwLock<HashMap<Arc<str>, WriteQueue>>> = LazyLock::new(D
/// Thread safe connection to a given database file or in memory db. This can be cloned, shared, static,
/// whatever. It derefs to a synchronous connection by thread that is read only. A write capable connection
/// may be accessed by passing a callback to the `write` function which will queue the callback
-pub struct ThreadSafeConnection<M: Migrator + 'static = ()> {
+#[derive(Clone)]
+pub struct ThreadSafeConnection {
uri: Arc<str>,
persistent: bool,
connection_initialize_query: Option<&'static str>,
connections: Arc<ThreadLocal<Connection>>,
- _migrator: PhantomData<*mut M>,
}
-unsafe impl<M: Migrator> Send for ThreadSafeConnection<M> {}
-unsafe impl<M: Migrator> Sync for ThreadSafeConnection<M> {}
+unsafe impl Send for ThreadSafeConnection {}
+unsafe impl Sync for ThreadSafeConnection {}
pub struct ThreadSafeConnectionBuilder<M: Migrator + 'static = ()> {
db_initialize_query: Option<&'static str>,
write_queue_constructor: Option<WriteQueueConstructor>,
- connection: ThreadSafeConnection<M>,
+ connection: ThreadSafeConnection,
+ _migrator: PhantomData<*mut M>,
}
impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
@@ -72,7 +73,7 @@ impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
self
}
- pub async fn build(self) -> anyhow::Result<ThreadSafeConnection<M>> {
+ pub async fn build(self) -> anyhow::Result<ThreadSafeConnection> {
self.connection
.initialize_queues(self.write_queue_constructor);
@@ -111,7 +112,7 @@ impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
}
}
-impl<M: Migrator> ThreadSafeConnection<M> {
+impl ThreadSafeConnection {
fn initialize_queues(&self, write_queue_constructor: Option<WriteQueueConstructor>) -> bool {
if !QUEUES.read().contains_key(&self.uri) {
let mut queues = QUEUES.write();
@@ -125,7 +126,7 @@ impl<M: Migrator> ThreadSafeConnection<M> {
false
}
- pub fn builder(uri: &str, persistent: bool) -> ThreadSafeConnectionBuilder<M> {
+ pub fn builder<M: Migrator>(uri: &str, persistent: bool) -> ThreadSafeConnectionBuilder<M> {
ThreadSafeConnectionBuilder::<M> {
db_initialize_query: None,
write_queue_constructor: None,
@@ -134,8 +135,8 @@ impl<M: Migrator> ThreadSafeConnection<M> {
persistent,
connection_initialize_query: None,
connections: Default::default(),
- _migrator: PhantomData,
},
+ _migrator: PhantomData,
}
}
@@ -200,7 +201,7 @@ impl<M: Migrator> ThreadSafeConnection<M> {
}
}
-impl ThreadSafeConnection<()> {
+impl ThreadSafeConnection {
/// Special constructor for ThreadSafeConnection which disallows db initialization and migrations.
/// This allows construction to be infallible and not write to the db.
pub fn new(
@@ -214,7 +215,6 @@ impl ThreadSafeConnection<()> {
persistent,
connection_initialize_query,
connections: Default::default(),
- _migrator: PhantomData,
};
connection.initialize_queues(write_queue_constructor);
@@ -222,19 +222,7 @@ impl ThreadSafeConnection<()> {
}
}
-impl<M: Migrator> Clone for ThreadSafeConnection<M> {
- fn clone(&self) -> Self {
- Self {
- uri: self.uri.clone(),
- persistent: self.persistent,
- connection_initialize_query: self.connection_initialize_query,
- connections: self.connections.clone(),
- _migrator: PhantomData,
- }
- }
-}
-
-impl<M: Migrator> Deref for ThreadSafeConnection<M> {
+impl Deref for ThreadSafeConnection {
type Target = Connection;
fn deref(&self) -> &Self::Target {
@@ -301,7 +289,7 @@ mod test {
for _ in 0..100 {
handles.push(thread::spawn(|| {
let builder =
- ThreadSafeConnection::<TestDomain>::builder("annoying-test.db", false)
+ ThreadSafeConnection::builder::<TestDomain>("annoying-test.db", false)
.with_db_initialization_query("PRAGMA journal_mode=WAL")
.with_connection_initialize_query(indoc! {"
PRAGMA synchronous=NORMAL;
@@ -353,7 +341,7 @@ mod test {
}
let builder =
- ThreadSafeConnection::<TestWorkspace>::builder("wild_zed_lost_failure", false)
+ ThreadSafeConnection::builder::<TestWorkspace>("wild_zed_lost_failure", false)
.with_connection_initialize_query("PRAGMA FOREIGN_KEYS=true");
smol::block_on(builder.build()).unwrap();
@@ -459,30 +459,4 @@ impl TerminalDb {
WHERE item_id = ? AND workspace_id = ?
}
}
-
- pub async fn delete_unloaded_items(
- &self,
- workspace: WorkspaceId,
- alive_items: Vec<ItemId>,
- ) -> Result<()> {
- let placeholders = alive_items
- .iter()
- .map(|_| "?")
- .collect::<Vec<&str>>()
- .join(", ");
-
- let query = format!(
- "DELETE FROM terminals WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
- );
-
- self.write(move |conn| {
- let mut statement = Statement::prepare(conn, query)?;
- let mut next_index = statement.bind(&workspace, 1)?;
- for id in alive_items {
- next_index = statement.bind(&id, next_index)?;
- }
- statement.exec()
- })
- .await
- }
}
@@ -34,7 +34,7 @@ use ui::{
use util::{ResultExt, debug_panic, paths::PathWithPosition};
use workspace::{
CloseActiveItem, NewCenterTerminal, NewTerminal, OpenOptions, OpenVisible, ToolbarItemLocation,
- Workspace, WorkspaceId,
+ Workspace, WorkspaceId, delete_unloaded_items,
item::{
BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent,
},
@@ -1582,14 +1582,10 @@ impl SerializableItem for TerminalView {
fn cleanup(
workspace_id: WorkspaceId,
alive_items: Vec<workspace::ItemId>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> Task<gpui::Result<()>> {
- window.spawn(cx, async move |_| {
- TERMINAL_DB
- .delete_unloaded_items(workspace_id, alive_items)
- .await
- })
+ delete_unloaded_items(alive_items, workspace_id, "terminals", &TERMINAL_DB, cx)
}
fn serialize(
@@ -11,7 +11,7 @@ use std::{
use anyhow::{Context, Result, anyhow, bail};
use client::DevServerProjectId;
use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
-use gpui::{Axis, Bounds, WindowBounds, WindowId, point, size};
+use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size};
use itertools::Itertools;
use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint};
@@ -21,16 +21,17 @@ use remote::ssh_session::SshProjectId;
use sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::{SqlType, Statement},
+ thread_safe_connection::ThreadSafeConnection,
};
-use ui::px;
+use ui::{App, px};
use util::{ResultExt, maybe};
use uuid::Uuid;
use crate::WorkspaceId;
use model::{
- GroupId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
+ GroupId, ItemId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
SerializedSshProject, SerializedWorkspace,
};
@@ -1422,6 +1423,37 @@ impl WorkspaceDb {
}
}
+pub fn delete_unloaded_items(
+ alive_items: Vec<ItemId>,
+ workspace_id: WorkspaceId,
+ table: &'static str,
+ db: &ThreadSafeConnection,
+ cx: &mut App,
+) -> Task<Result<()>> {
+ let db = db.clone();
+ cx.spawn(async move |_| {
+ let placeholders = alive_items
+ .iter()
+ .map(|_| "?")
+ .collect::<Vec<&str>>()
+ .join(", ");
+
+ let query = format!(
+ "DELETE FROM {table} WHERE workspace_id = ? AND item_id NOT IN ({placeholders})"
+ );
+
+ db.write(move |conn| {
+ let mut statement = Statement::prepare(conn, query)?;
+ let mut next_index = statement.bind(&workspace_id, 1)?;
+ for id in alive_items {
+ next_index = statement.bind(&id, next_index)?;
+ }
+ statement.exec()
+ })
+ .await
+ })
+}
+
#[cfg(test)]
mod tests {
use std::thread;
@@ -1430,14 +1462,13 @@ mod tests {
use super::*;
use crate::persistence::model::SerializedWorkspace;
use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
- use db::open_test_db;
use gpui;
#[gpui::test]
async fn test_breakpoints() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_breakpoints").await);
+ let db = WorkspaceDb::open_test_db("test_breakpoints").await;
let id = db.next_id().await.unwrap();
let path = Path::new("/tmp/test.rs");
@@ -1622,7 +1653,7 @@ mod tests {
async fn test_remove_last_breakpoint() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_remove_last_breakpoint").await);
+ let db = WorkspaceDb::open_test_db("test_remove_last_breakpoint").await;
let id = db.next_id().await.unwrap();
let singular_path = Path::new("/tmp/test_remove_last_breakpoint.rs");
@@ -1709,7 +1740,7 @@ mod tests {
async fn test_next_id_stability() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
+ let db = WorkspaceDb::open_test_db("test_next_id_stability").await;
db.write(|conn| {
conn.migrate(
@@ -1757,7 +1788,7 @@ mod tests {
async fn test_workspace_id_stability() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
+ let db = WorkspaceDb::open_test_db("test_workspace_id_stability").await;
db.write(|conn| {
conn.migrate(
@@ -1851,7 +1882,7 @@ mod tests {
async fn test_full_workspace_serialization() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
+ let db = WorkspaceDb::open_test_db("test_full_workspace_serialization").await;
// -----------------
// | 1,2 | 5,6 |
@@ -1926,7 +1957,7 @@ mod tests {
async fn test_workspace_assignment() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
+ let db = WorkspaceDb::open_test_db("test_basic_functionality").await;
let workspace_1 = SerializedWorkspace {
id: WorkspaceId(1),
@@ -2022,7 +2053,7 @@ mod tests {
async fn test_session_workspaces() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_serializing_workspaces_session_id").await);
+ let db = WorkspaceDb::open_test_db("test_serializing_workspaces_session_id").await;
let workspace_1 = SerializedWorkspace {
id: WorkspaceId(1),
@@ -2175,7 +2206,7 @@ mod tests {
let dir4 = tempfile::TempDir::with_prefix("dir4").unwrap();
let db =
- WorkspaceDb(open_test_db("test_serializing_workspaces_last_session_workspaces").await);
+ WorkspaceDb::open_test_db("test_serializing_workspaces_last_session_workspaces").await;
let workspaces = [
(1, vec![dir1.path()], vec![0], 9),
@@ -2264,9 +2295,10 @@ mod tests {
#[gpui::test]
async fn test_last_session_workspace_locations_ssh_projects() {
- let db = WorkspaceDb(
- open_test_db("test_serializing_workspaces_last_session_workspaces_ssh_projects").await,
- );
+ let db = WorkspaceDb::open_test_db(
+ "test_serializing_workspaces_last_session_workspaces_ssh_projects",
+ )
+ .await;
let ssh_projects = [
("host-1", "my-user-1"),
@@ -2340,7 +2372,7 @@ mod tests {
#[gpui::test]
async fn test_get_or_create_ssh_project() {
- let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await);
+ let db = WorkspaceDb::open_test_db("test_get_or_create_ssh_project").await;
let (host, port, paths, user) = (
"example.com".to_string(),
@@ -2386,7 +2418,7 @@ mod tests {
#[gpui::test]
async fn test_get_or_create_ssh_project_with_null_user() {
- let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await);
+ let db = WorkspaceDb::open_test_db("test_get_or_create_ssh_project_with_null_user").await;
let (host, port, paths, user) = (
"example.com".to_string(),
@@ -2415,7 +2447,7 @@ mod tests {
#[gpui::test]
async fn test_get_ssh_projects() {
- let db = WorkspaceDb(open_test_db("test_get_ssh_projects").await);
+ let db = WorkspaceDb::open_test_db("test_get_ssh_projects").await;
let projects = vec![
(
@@ -2458,7 +2490,7 @@ mod tests {
async fn test_simple_split() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("simple_split").await);
+ let db = WorkspaceDb::open_test_db("simple_split").await;
// -----------------
// | 1,2 | 5,6 |
@@ -2513,7 +2545,7 @@ mod tests {
async fn test_cleanup_panes() {
env_logger::try_init().ok();
- let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
+ let db = WorkspaceDb::open_test_db("test_cleanup_panes").await;
let center_pane = group(
Axis::Horizontal,
@@ -62,7 +62,7 @@ use persistence::{
model::{SerializedSshProject, SerializedWorkspace},
};
pub use persistence::{
- DB as WORKSPACE_DB, WorkspaceDb,
+ DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
model::{ItemId, LocalPaths, SerializedWorkspaceLocation},
};
use postage::stream::Stream;