fix compact cleanup and import merge behavior

Amolith created

Change summary

src/cmd/compact.rs     |  2 +
src/cmd/import.rs      |  9 ++---
src/db.rs              | 13 +++++++++
tests/cli_io.rs        | 62 ++++++++++++++++++++++++++++++++++++++++++++
tests/cli_query.rs     | 21 ++++++++++++++
tests/repro_compact.rs | 44 +++++++++++++++++++++++++++++++
6 files changed, 145 insertions(+), 6 deletions(-)

Detailed changes

src/cmd/compact.rs 🔗

@@ -8,6 +8,8 @@ pub fn run(root: &Path) -> Result<()> {
     let c = crate::color::stderr_theme();
     eprintln!("{}info:{} writing compacted snapshot...", c.blue, c.reset);
     let out = store.write_snapshot()?;
+    let removed = store.purge_deltas()?;
     eprintln!("{}info:{} wrote {}", c.blue, c.reset, out.display());
+    eprintln!("{}info:{} removed {removed} delta file(s)", c.blue, c.reset);
     Ok(())
 }

src/cmd/import.rs 🔗

@@ -1,5 +1,4 @@
 use anyhow::{anyhow, Result};
-use loro::LoroMap;
 use serde::Deserialize;
 use std::io::BufRead;
 use std::path::Path;
@@ -93,21 +92,21 @@ pub fn run(root: &Path, file: &str) -> Result<()> {
             task.insert("updated_at", t.updated_at.clone())?;
             task.insert("deleted_at", t.deleted_at.as_deref().unwrap_or(""))?;
 
-            let labels = task.insert_container("labels", LoroMap::new())?;
+            let labels = db::get_or_create_child_map(&task, "labels")?;
             for lbl in &t.labels {
                 labels.insert(lbl, true)?;
             }
-            let blockers = task.insert_container("blockers", LoroMap::new())?;
+            let blockers = db::get_or_create_child_map(&task, "blockers")?;
             for blk in &t.blockers {
                 let parsed =
                     db::TaskId::parse(blk).map_err(|_| anyhow!("invalid blocker id '{blk}'"))?;
                 blockers.insert(parsed.as_str(), true)?;
             }
-            let logs = task.insert_container("logs", LoroMap::new())?;
+            let logs = db::get_or_create_child_map(&task, "logs")?;
             for entry in &t.logs {
                 let log_id = db::TaskId::parse(&entry.id)
                     .map_err(|_| anyhow!("invalid log id '{}'", entry.id))?;
-                let record = logs.insert_container(log_id.as_str(), LoroMap::new())?;
+                let record = logs.get_or_create_container(log_id.as_str(), loro::LoroMap::new())?;
                 record.insert("timestamp", entry.timestamp.clone())?;
                 record.insert("message", entry.message.clone())?;
             }

src/db.rs 🔗

@@ -296,6 +296,19 @@ impl Store {
         Ok(out)
     }
 
+    /// Delete persisted delta files after a fresh snapshot has been written.
+    pub fn purge_deltas(&self) -> Result<usize> {
+        let project_dir = project_dir(&self.root, &self.project);
+        let paths = collect_delta_paths(&project_dir)?;
+        let mut removed = 0usize;
+        for path in paths {
+            fs::remove_file(&path)
+                .with_context(|| format!("failed removing delta '{}'", path.display()))?;
+            removed += 1;
+        }
+        Ok(removed)
+    }
+
     /// Apply a local mutation and persist only the resulting delta.
     pub fn apply_and_persist<F>(&self, mutator: F) -> Result<PathBuf>
     where

tests/cli_io.rs 🔗

@@ -163,3 +163,65 @@ fn export_import_preserves_effort() {
     let v2: serde_json::Value = serde_json::from_slice(&out2.stdout).unwrap();
     assert_eq!(v2[0]["effort"].as_str().unwrap(), "high");
 }
+
+#[test]
+fn import_merges_labels_and_logs_for_existing_task() {
+    let tmp = init_tmp();
+
+    let out = td(&tmp)
+        .args(["--json", "create", "Merge me", "-l", "local"])
+        .current_dir(&tmp)
+        .output()
+        .unwrap();
+    let created: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+    let id = created["id"].as_str().unwrap().to_string();
+
+    td(&tmp)
+        .args(["log", &id, "local note"])
+        .current_dir(&tmp)
+        .assert()
+        .success();
+
+    let out = td(&tmp)
+        .args(["--json", "show", &id])
+        .current_dir(&tmp)
+        .output()
+        .unwrap();
+    let mut imported: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+    imported["labels"] = serde_json::json!(["remote"]);
+    imported["logs"] = serde_json::json!([
+        {
+            "id": "01ARZ3NDEKTSV4RRFFQ69G5FAV",
+            "timestamp": "2026-03-01T00:00:00Z",
+            "message": "remote note"
+        }
+    ]);
+
+    let import_file = tmp.path().join("merge.jsonl");
+    std::fs::write(&import_file, format!("{}\n", imported)).unwrap();
+
+    td(&tmp)
+        .args(["import", import_file.to_str().unwrap()])
+        .current_dir(&tmp)
+        .assert()
+        .success();
+
+    let out = td(&tmp)
+        .args(["--json", "show", &id])
+        .current_dir(&tmp)
+        .output()
+        .unwrap();
+    let merged: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+
+    let labels = merged["labels"].as_array().unwrap();
+    assert!(labels.contains(&serde_json::Value::String("local".into())));
+    assert!(labels.contains(&serde_json::Value::String("remote".into())));
+
+    let logs = merged["logs"].as_array().unwrap();
+    let messages: Vec<&str> = logs
+        .iter()
+        .filter_map(|entry| entry["message"].as_str())
+        .collect();
+    assert!(messages.contains(&"local note"));
+    assert!(messages.contains(&"remote note"));
+}

tests/cli_query.rs 🔗

@@ -159,11 +159,30 @@ fn stats_counts_tasks() {
 fn compact_succeeds() {
     let tmp = init_tmp();
     create_task(&tmp, "Anything");
+    create_task(&tmp, "Anything else");
+
+    let changes = tmp.path().join(".local/share/td/projects/main/changes");
+    let count_before = std::fs::read_dir(&changes)
+        .unwrap()
+        .filter_map(Result::ok)
+        .map(|entry| entry.path())
+        .filter(|path| path.extension().and_then(|ext| ext.to_str()) == Some("loro"))
+        .count();
+    assert!(count_before > 0);
 
     td(&tmp)
         .arg("compact")
         .current_dir(&tmp)
         .assert()
         .success()
-        .stderr(predicate::str::contains("writing compacted snapshot"));
+        .stderr(predicate::str::contains("writing compacted snapshot"))
+        .stderr(predicate::str::contains("removed"));
+
+    let count_after = std::fs::read_dir(&changes)
+        .unwrap()
+        .filter_map(Result::ok)
+        .map(|entry| entry.path())
+        .filter(|path| path.extension().and_then(|ext| ext.to_str()) == Some("loro"))
+        .count();
+    assert_eq!(count_after, 0);
 }

tests/repro_compact.rs 🔗

@@ -0,0 +1,44 @@
+use assert_cmd::Command;
+use tempfile::TempDir;
+
+fn td(home: &TempDir) -> Command {
+    let mut cmd = Command::cargo_bin("td").unwrap();
+    cmd.env("HOME", home.path());
+    cmd
+}
+
+#[test]
+fn compact_cleans_delta_files() {
+    let tmp = TempDir::new().unwrap();
+    td(&tmp)
+        .args(["init", "main"])
+        .current_dir(&tmp)
+        .assert()
+        .success();
+
+    // Generate some deltas
+    td(&tmp)
+        .args(["create", "Task 1"])
+        .current_dir(&tmp)
+        .assert()
+        .success();
+    td(&tmp)
+        .args(["create", "Task 2"])
+        .current_dir(&tmp)
+        .assert()
+        .success();
+
+    let project_dir = tmp.path().join(".local/share/td/projects/main");
+    let changes_dir = project_dir.join("changes");
+
+    // Check deltas exist
+    let deltas = std::fs::read_dir(&changes_dir).unwrap().count();
+    assert!(deltas > 0, "Deltas should exist before compaction");
+
+    // Compact
+    td(&tmp).arg("compact").current_dir(&tmp).assert().success();
+
+    // Deltas are folded into the snapshot and removed.
+    let deltas_after = std::fs::read_dir(&changes_dir).unwrap().count();
+    assert_eq!(deltas_after, 0, "Compaction should clean up delta files");
+}