Detailed changes
@@ -5,8 +5,11 @@ description: Manages tasks with the td CLI. Use when tracking work items, creati
```bash
# New work — title should stand on its own a year from now
+# -p priority: low, medium (default), high
+# -e effort: low, medium (default), high
+# -t type -d desc -l labels (csv)
td create "panic in token_refresh when OAuth provider returns HTTP 429" \
- -p 1 -t bug -d "$(cat <<'DESC'
+ -p high -e medium -t bug -d "$(cat <<'DESC'
Reproduction:
1. Point OAuth at a rate-limiting provider (or stub with httpbin/status/429)
2. Let the access token expire
@@ -23,7 +26,7 @@ DESC
)"
td create "Add STARTTLS for outbound SMTP per RFC 3207" \
- -t feature -d "$(cat <<'DESC'
+ -e high -t feature -d "$(cat <<'DESC'
smtp::send() opens a plaintext socket and never upgrades. Per RFC 3207,
send EHLO, check for STARTTLS capability, then upgrade before AUTH.
@@ -36,7 +39,7 @@ DESC
)"
td create "Flaky: test_concurrent_writes times out ~1/5 CI runs" \
- -p 3 -t bug -l ci,flaky -d "$(cat <<'DESC'
+ -p low -e low -t bug -l ci,flaky -d "$(cat <<'DESC'
Passes locally, times out on CI. Likely a race on the shared tempdir —
each spawn should use its own database file.
@@ -46,12 +49,12 @@ DESC
)"
td create "Child task" --parent td-a1b2c3 # ID becomes <parent>.N
-# -p priority: 1=high 2=medium 3=low -t type -d desc -l labels (csv)
# What's on the board?
td list # all tasks
td list -s open # by status: open, in_progress, closed
-td list -p 1 # high-priority only
+td list -p high # high-priority only
+td list -e low # low-effort tasks
td list -l frontend # by label
# Full context on a task
@@ -59,7 +62,7 @@ td show td-a1b2c3
# Task status or details changed
td update td-a1b2c3 -s in_progress
-td update td-a1b2c3 -p 1 -t "Revised title" -d "Added context"
+td update td-a1b2c3 -p high -e low -t "Revised title" -d "Added context"
# Finished or needs reopening
td done td-a1b2c3 td-d4e5f6 # one or many
@@ -123,3 +123,55 @@ fn create_subtask_under_parent() {
);
assert_eq!(child["parent"].as_str().unwrap(), parent_id);
}
+
+#[test]
+fn create_with_effort() {
+ let tmp = init_tmp();
+
+ let out = td()
+ .args(["--json", "create", "Hard task", "-e", "high"])
+ .current_dir(&tmp)
+ .output()
+ .unwrap();
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ assert_eq!(v["effort"].as_i64().unwrap(), 3);
+}
+
+#[test]
+fn create_with_priority_label() {
+ let tmp = init_tmp();
+
+ let out = td()
+ .args(["--json", "create", "Low prio", "-p", "low"])
+ .current_dir(&tmp)
+ .output()
+ .unwrap();
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ assert_eq!(v["priority"].as_i64().unwrap(), 3);
+}
+
+#[test]
+fn create_rejects_invalid_priority() {
+ let tmp = init_tmp();
+
+ td().args(["create", "Bad", "-p", "urgent"])
+ .current_dir(&tmp)
+ .assert()
+ .failure()
+ .stderr(predicates::prelude::predicate::str::contains(
+ "invalid priority",
+ ));
+}
+
+#[test]
+fn create_rejects_invalid_effort() {
+ let tmp = init_tmp();
+
+ td().args(["create", "Bad", "-e", "huge"])
+ .current_dir(&tmp)
+ .assert()
+ .failure()
+ .stderr(predicates::prelude::predicate::str::contains(
+ "invalid effort",
+ ));
+}
@@ -109,3 +109,40 @@ fn import_round_trips_with_export() {
let labels = bravo["labels"].as_array().unwrap();
assert!(labels.contains(&serde_json::Value::String("important".into())));
}
+
+#[test]
+fn export_import_preserves_effort() {
+ let tmp = init_tmp();
+
+ td().args(["create", "High effort", "-e", "high"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
+
+ // Export.
+ let out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let exported = String::from_utf8(out.stdout).unwrap();
+
+ // Verify effort is in the JSONL.
+ let v: serde_json::Value = serde_json::from_str(exported.trim()).unwrap();
+ assert_eq!(v["effort"].as_i64().unwrap(), 3);
+
+ // Round-trip into a fresh database.
+ let export_file = tmp.path().join("effort.jsonl");
+ std::fs::write(&export_file, &exported).unwrap();
+
+ let tmp2 = TempDir::new().unwrap();
+ td().arg("init").current_dir(&tmp2).assert().success();
+ td().args(["import", export_file.to_str().unwrap()])
+ .current_dir(&tmp2)
+ .assert()
+ .success();
+
+ let out2 = td()
+ .args(["--json", "list"])
+ .current_dir(&tmp2)
+ .output()
+ .unwrap();
+ let v2: serde_json::Value = serde_json::from_slice(&out2.stdout).unwrap();
+ assert_eq!(v2[0]["effort"].as_i64().unwrap(), 3);
+}
@@ -118,6 +118,30 @@ fn list_filter_by_label() {
assert_eq!(tasks[0]["title"].as_str().unwrap(), "Tagged");
}
+#[test]
+fn list_filter_by_effort() {
+ let tmp = init_tmp();
+
+ td().args(["create", "Easy", "-e", "low"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
+ td().args(["create", "Hard", "-e", "high"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
+
+ let out = td()
+ .args(["--json", "list", "-e", "low"])
+ .current_dir(&tmp)
+ .output()
+ .unwrap();
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ let tasks = v.as_array().unwrap();
+ assert_eq!(tasks.len(), 1);
+ assert_eq!(tasks[0]["title"].as_str().unwrap(), "Easy");
+}
+
// ── show ─────────────────────────────────────────────────────────────
#[test]
@@ -0,0 +1,105 @@
+//! Integration tests for the migration system.
+
+use assert_cmd::Command;
+use tempfile::TempDir;
+
+fn td() -> Command {
+ Command::cargo_bin("td").unwrap()
+}
+
+fn init_tmp() -> TempDir {
+ let tmp = TempDir::new().unwrap();
+ td().arg("init").current_dir(&tmp).assert().success();
+ tmp
+}
+
+#[test]
+fn fresh_init_sets_latest_version() {
+ let tmp = init_tmp();
+ let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
+ let version: u32 = conn
+ .pragma_query_value(None, "user_version", |row| row.get(0))
+ .unwrap();
+ // Version should be 2 (migration 0001 + 0002).
+ assert_eq!(version, 2);
+}
+
+#[test]
+fn legacy_db_is_migrated_on_open() {
+ let tmp = TempDir::new().unwrap();
+ let td_dir = tmp.path().join(".td");
+ std::fs::create_dir_all(&td_dir).unwrap();
+
+ // Create a v0 database with the old schema (no effort column).
+ let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
+ conn.execute_batch(
+ "CREATE TABLE tasks (
+ id TEXT PRIMARY KEY,
+ title TEXT NOT NULL,
+ description TEXT DEFAULT '',
+ type TEXT DEFAULT 'task',
+ priority INTEGER DEFAULT 2,
+ status TEXT DEFAULT 'open',
+ parent TEXT DEFAULT '',
+ created TEXT NOT NULL,
+ updated TEXT NOT NULL
+ );
+ CREATE TABLE labels (
+ task_id TEXT, label TEXT,
+ PRIMARY KEY (task_id, label),
+ FOREIGN KEY (task_id) REFERENCES tasks(id)
+ );
+ CREATE TABLE blockers (
+ task_id TEXT, blocker_id TEXT,
+ PRIMARY KEY (task_id, blocker_id),
+ FOREIGN KEY (task_id) REFERENCES tasks(id)
+ );
+ INSERT INTO tasks (id, title, created, updated)
+ VALUES ('td-legacy', 'Old task', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z');",
+ )
+ .unwrap();
+ drop(conn);
+
+ // Opening via td (list) should migrate and succeed.
+ td().args(["--json", "list"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
+
+ // Verify the task survived migration and got default effort.
+ let out = td()
+ .args(["--json", "show", "td-legacy"])
+ .current_dir(&tmp)
+ .output()
+ .unwrap();
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ assert_eq!(v["title"].as_str().unwrap(), "Old task");
+ assert_eq!(v["effort"].as_i64().unwrap(), 2); // default medium
+
+ // Verify version is now latest.
+ let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
+ let version: u32 = conn
+ .pragma_query_value(None, "user_version", |row| row.get(0))
+ .unwrap();
+ assert_eq!(version, 2);
+}
+
+#[test]
+fn effort_column_exists_after_init() {
+ let tmp = init_tmp();
+ let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
+
+ // Verify the effort column is present by inserting a row that sets it.
+ conn.execute(
+ "INSERT INTO tasks (id, title, effort, created, updated) VALUES ('td-test', 'Test', 3, '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
+ [],
+ )
+ .unwrap();
+
+ let effort: i32 = conn
+ .query_row("SELECT effort FROM tasks WHERE id = 'td-test'", [], |r| {
+ r.get(0)
+ })
+ .unwrap();
+ assert_eq!(effort, 3);
+}
@@ -104,6 +104,20 @@ fn update_json_returns_task() {
assert_eq!(v["priority"].as_i64().unwrap(), 1);
}
+#[test]
+fn update_changes_effort() {
+ let tmp = init_tmp();
+ let id = create_task(&tmp, "Re-estimate");
+
+ td().args(["update", &id, "-e", "high"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
+
+ let t = get_task_json(&tmp, &id);
+ assert_eq!(t["effort"].as_i64().unwrap(), 3);
+}
+
// ── done ─────────────────────────────────────────────────────────────
#[test]