From 140addc63d84953eb24c87a114031bd0e11e0b9d Mon Sep 17 00:00:00 2001 From: Amolith Date: Sun, 1 Mar 2026 17:11:41 -0700 Subject: [PATCH] fix compact cleanup and import merge behavior --- src/cmd/compact.rs | 2 ++ src/cmd/import.rs | 9 +++--- src/db.rs | 13 +++++++++ tests/cli_io.rs | 62 ++++++++++++++++++++++++++++++++++++++++++ tests/cli_query.rs | 21 +++++++++++++- tests/repro_compact.rs | 44 ++++++++++++++++++++++++++++++ 6 files changed, 145 insertions(+), 6 deletions(-) create mode 100644 tests/repro_compact.rs diff --git a/src/cmd/compact.rs b/src/cmd/compact.rs index df7e5f643f33af6a9a264f8d179d6e478ec31532..7131ab013b4ded3b3a0022a4ac539e17814d9744 100644 --- a/src/cmd/compact.rs +++ b/src/cmd/compact.rs @@ -8,6 +8,8 @@ pub fn run(root: &Path) -> Result<()> { let c = crate::color::stderr_theme(); eprintln!("{}info:{} writing compacted snapshot...", c.blue, c.reset); let out = store.write_snapshot()?; + let removed = store.purge_deltas()?; eprintln!("{}info:{} wrote {}", c.blue, c.reset, out.display()); + eprintln!("{}info:{} removed {removed} delta file(s)", c.blue, c.reset); Ok(()) } diff --git a/src/cmd/import.rs b/src/cmd/import.rs index b8ebcac638ec4aefbb396c8acbf0d1f843ee7231..c9aaaa54c21c0db90b66bfa7b77dd59d2a79a44e 100644 --- a/src/cmd/import.rs +++ b/src/cmd/import.rs @@ -1,5 +1,4 @@ use anyhow::{anyhow, Result}; -use loro::LoroMap; use serde::Deserialize; use std::io::BufRead; use std::path::Path; @@ -93,21 +92,21 @@ pub fn run(root: &Path, file: &str) -> Result<()> { task.insert("updated_at", t.updated_at.clone())?; task.insert("deleted_at", t.deleted_at.as_deref().unwrap_or(""))?; - let labels = task.insert_container("labels", LoroMap::new())?; + let labels = db::get_or_create_child_map(&task, "labels")?; for lbl in &t.labels { labels.insert(lbl, true)?; } - let blockers = task.insert_container("blockers", LoroMap::new())?; + let blockers = db::get_or_create_child_map(&task, "blockers")?; for blk in &t.blockers { let parsed = db::TaskId::parse(blk).map_err(|_| anyhow!("invalid blocker id '{blk}'"))?; blockers.insert(parsed.as_str(), true)?; } - let logs = task.insert_container("logs", LoroMap::new())?; + let logs = db::get_or_create_child_map(&task, "logs")?; for entry in &t.logs { let log_id = db::TaskId::parse(&entry.id) .map_err(|_| anyhow!("invalid log id '{}'", entry.id))?; - let record = logs.insert_container(log_id.as_str(), LoroMap::new())?; + let record = logs.get_or_create_container(log_id.as_str(), loro::LoroMap::new())?; record.insert("timestamp", entry.timestamp.clone())?; record.insert("message", entry.message.clone())?; } diff --git a/src/db.rs b/src/db.rs index 786aaceef627e445e34b7d218feeed2781ff81b2..03f05d3875a36756458f824bec023149162e8753 100644 --- a/src/db.rs +++ b/src/db.rs @@ -296,6 +296,19 @@ impl Store { Ok(out) } + /// Delete persisted delta files after a fresh snapshot has been written. + pub fn purge_deltas(&self) -> Result { + let project_dir = project_dir(&self.root, &self.project); + let paths = collect_delta_paths(&project_dir)?; + let mut removed = 0usize; + for path in paths { + fs::remove_file(&path) + .with_context(|| format!("failed removing delta '{}'", path.display()))?; + removed += 1; + } + Ok(removed) + } + /// Apply a local mutation and persist only the resulting delta. pub fn apply_and_persist(&self, mutator: F) -> Result where diff --git a/tests/cli_io.rs b/tests/cli_io.rs index 05cf8d43ae9b9b3e52f2dca50971199f8622b923..d27eb99220308aac55dff13d6e169beff7ff99a3 100644 --- a/tests/cli_io.rs +++ b/tests/cli_io.rs @@ -163,3 +163,65 @@ fn export_import_preserves_effort() { let v2: serde_json::Value = serde_json::from_slice(&out2.stdout).unwrap(); assert_eq!(v2[0]["effort"].as_str().unwrap(), "high"); } + +#[test] +fn import_merges_labels_and_logs_for_existing_task() { + let tmp = init_tmp(); + + let out = td(&tmp) + .args(["--json", "create", "Merge me", "-l", "local"]) + .current_dir(&tmp) + .output() + .unwrap(); + let created: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap(); + let id = created["id"].as_str().unwrap().to_string(); + + td(&tmp) + .args(["log", &id, "local note"]) + .current_dir(&tmp) + .assert() + .success(); + + let out = td(&tmp) + .args(["--json", "show", &id]) + .current_dir(&tmp) + .output() + .unwrap(); + let mut imported: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap(); + imported["labels"] = serde_json::json!(["remote"]); + imported["logs"] = serde_json::json!([ + { + "id": "01ARZ3NDEKTSV4RRFFQ69G5FAV", + "timestamp": "2026-03-01T00:00:00Z", + "message": "remote note" + } + ]); + + let import_file = tmp.path().join("merge.jsonl"); + std::fs::write(&import_file, format!("{}\n", imported)).unwrap(); + + td(&tmp) + .args(["import", import_file.to_str().unwrap()]) + .current_dir(&tmp) + .assert() + .success(); + + let out = td(&tmp) + .args(["--json", "show", &id]) + .current_dir(&tmp) + .output() + .unwrap(); + let merged: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap(); + + let labels = merged["labels"].as_array().unwrap(); + assert!(labels.contains(&serde_json::Value::String("local".into()))); + assert!(labels.contains(&serde_json::Value::String("remote".into()))); + + let logs = merged["logs"].as_array().unwrap(); + let messages: Vec<&str> = logs + .iter() + .filter_map(|entry| entry["message"].as_str()) + .collect(); + assert!(messages.contains(&"local note")); + assert!(messages.contains(&"remote note")); +} diff --git a/tests/cli_query.rs b/tests/cli_query.rs index 1f39d196d3ecc4764028c15f59f927a253a7e60d..a213940be83fa3e0c4537690ffc88bf9b863599d 100644 --- a/tests/cli_query.rs +++ b/tests/cli_query.rs @@ -159,11 +159,30 @@ fn stats_counts_tasks() { fn compact_succeeds() { let tmp = init_tmp(); create_task(&tmp, "Anything"); + create_task(&tmp, "Anything else"); + + let changes = tmp.path().join(".local/share/td/projects/main/changes"); + let count_before = std::fs::read_dir(&changes) + .unwrap() + .filter_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| path.extension().and_then(|ext| ext.to_str()) == Some("loro")) + .count(); + assert!(count_before > 0); td(&tmp) .arg("compact") .current_dir(&tmp) .assert() .success() - .stderr(predicate::str::contains("writing compacted snapshot")); + .stderr(predicate::str::contains("writing compacted snapshot")) + .stderr(predicate::str::contains("removed")); + + let count_after = std::fs::read_dir(&changes) + .unwrap() + .filter_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| path.extension().and_then(|ext| ext.to_str()) == Some("loro")) + .count(); + assert_eq!(count_after, 0); } diff --git a/tests/repro_compact.rs b/tests/repro_compact.rs new file mode 100644 index 0000000000000000000000000000000000000000..ad4eb3d179bfe87af9f1c64c3cce2578573f992d --- /dev/null +++ b/tests/repro_compact.rs @@ -0,0 +1,44 @@ +use assert_cmd::Command; +use tempfile::TempDir; + +fn td(home: &TempDir) -> Command { + let mut cmd = Command::cargo_bin("td").unwrap(); + cmd.env("HOME", home.path()); + cmd +} + +#[test] +fn compact_cleans_delta_files() { + let tmp = TempDir::new().unwrap(); + td(&tmp) + .args(["init", "main"]) + .current_dir(&tmp) + .assert() + .success(); + + // Generate some deltas + td(&tmp) + .args(["create", "Task 1"]) + .current_dir(&tmp) + .assert() + .success(); + td(&tmp) + .args(["create", "Task 2"]) + .current_dir(&tmp) + .assert() + .success(); + + let project_dir = tmp.path().join(".local/share/td/projects/main"); + let changes_dir = project_dir.join("changes"); + + // Check deltas exist + let deltas = std::fs::read_dir(&changes_dir).unwrap().count(); + assert!(deltas > 0, "Deltas should exist before compaction"); + + // Compact + td(&tmp).arg("compact").current_dir(&tmp).assert().success(); + + // Deltas are folded into the snapshot and removed. + let deltas_after = std::fs::read_dir(&changes_dir).unwrap().count(); + assert_eq!(deltas_after, 0, "Compaction should clean up delta files"); +}