Detailed changes
@@ -7,19 +7,31 @@ pub struct Cli {
#[arg(short = 'j', long = "json", global = true)]
pub json: bool,
+ /// Select a project explicitly (overrides cwd binding)
+ #[arg(long, global = true)]
+ pub project: Option<String>,
+
#[command(subcommand)]
pub command: Command,
}
#[derive(Subcommand)]
pub enum Command {
- /// Initialize .td directory
+ /// Initialize a central project and bind the current directory to it
Init {
- /// Add .td/ to .gitignore
- #[arg(long)]
- stealth: bool,
+ /// Project name
+ name: String,
},
+ /// Bind the current directory to an existing project
+ Use {
+ /// Project name
+ name: String,
+ },
+
+ /// List all known projects in central storage
+ Projects,
+
/// Create a new task
#[command(visible_alias = "add")]
Create {
@@ -4,10 +4,10 @@ use std::path::Path;
use crate::db;
pub fn run(root: &Path) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
let c = crate::color::stderr_theme();
- eprintln!("{}info:{} vacuuming database...", c.blue, c.reset);
- conn.execute_batch("VACUUM;")?;
- eprintln!("{}info:{} done", c.blue, c.reset);
+ eprintln!("{}info:{} writing compacted snapshot...", c.blue, c.reset);
+ let out = store.write_snapshot()?;
+ eprintln!("{}info:{} wrote {}", c.blue, c.reset, out.display());
Ok(())
}
@@ -1,12 +1,13 @@
-use anyhow::Result;
+use anyhow::{anyhow, Result};
+use loro::LoroMap;
use std::path::Path;
use crate::db;
pub struct Opts<'a> {
pub title: Option<&'a str>,
- pub priority: i32,
- pub effort: i32,
+ pub priority: db::Priority,
+ pub effort: db::Effort,
pub task_type: &'a str,
pub desc: Option<&'a str>,
pub parent: Option<&'a str>,
@@ -15,70 +16,60 @@ pub struct Opts<'a> {
}
pub fn run(root: &Path, opts: Opts) -> Result<()> {
- let title = opts
- .title
- .ok_or_else(|| anyhow::anyhow!("title required"))?;
+ let title = opts.title.ok_or_else(|| anyhow!("title required"))?;
let desc = opts.desc.unwrap_or("");
let ts = db::now_utc();
- let conn = db::open(root)?;
+ let store = db::open(root)?;
+ let id = db::gen_id();
- let id = match opts.parent {
- Some(pid) => {
- let count: i64 =
- conn.query_row("SELECT COUNT(*) FROM tasks WHERE parent = ?1", [pid], |r| {
- r.get(0)
- })?;
- format!("{pid}.{}", count + 1)
- }
- None => db::gen_id(),
+ let parent = if let Some(raw) = opts.parent {
+ Some(db::resolve_task_id(&store, raw, false)?)
+ } else {
+ None
};
- conn.execute(
- "INSERT INTO tasks (id, title, description, type, priority, status, effort, parent, created, updated)
- VALUES (?1, ?2, ?3, ?4, ?5, 'open', ?6, ?7, ?8, ?9)",
- rusqlite::params![
- id,
- title,
- desc,
- opts.task_type,
- opts.priority,
- opts.effort,
- opts.parent.unwrap_or(""),
- ts,
- ts
- ],
- )?;
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task = db::insert_task_map(&tasks, &id)?;
+
+ task.insert("title", title)?;
+ task.insert("description", desc)?;
+ task.insert("type", opts.task_type)?;
+ task.insert("priority", db::priority_label(opts.priority))?;
+ task.insert("status", db::status_label(db::Status::Open))?;
+ task.insert("effort", db::effort_label(opts.effort))?;
+ task.insert("parent", parent.as_ref().map(|p| p.as_str()).unwrap_or(""))?;
+ task.insert("created_at", ts.clone())?;
+ task.insert("updated_at", ts.clone())?;
+ task.insert("deleted_at", "")?;
+ task.insert_container("labels", LoroMap::new())?;
+ task.insert_container("blockers", LoroMap::new())?;
+ task.insert_container("logs", LoroMap::new())?;
- if let Some(label_str) = opts.labels {
- for lbl in label_str.split(',') {
- let lbl = lbl.trim();
- if !lbl.is_empty() {
- conn.execute(
- "INSERT OR IGNORE INTO labels (task_id, label) VALUES (?1, ?2)",
- [&id, lbl],
- )?;
+ if let Some(label_str) = opts.labels {
+ let labels = db::get_or_create_child_map(&task, "labels")?;
+ for lbl in label_str
+ .split(',')
+ .map(str::trim)
+ .filter(|l| !l.is_empty())
+ {
+ labels.insert(lbl, true)?;
}
}
- }
+
+ Ok(())
+ })?;
+
+ let task = store
+ .get_task(&id, false)?
+ .ok_or_else(|| anyhow!("failed to reload created task"))?;
if opts.json {
- let task = db::Task {
- id: id.clone(),
- title: title.to_string(),
- description: desc.to_string(),
- task_type: opts.task_type.to_string(),
- priority: opts.priority,
- status: "open".to_string(),
- effort: opts.effort,
- parent: opts.parent.unwrap_or("").to_string(),
- created: ts.clone(),
- updated: ts,
- };
println!("{}", serde_json::to_string(&task)?);
} else {
let c = crate::color::stdout_theme();
- println!("{}created{} {id}: {title}", c.green, c.reset);
+ println!("{}created{} {}: {}", c.green, c.reset, task.id, task.title);
}
Ok(())
@@ -1,65 +1,78 @@
-use anyhow::{bail, Result};
+use anyhow::{anyhow, bail, Result};
+use std::collections::{HashMap, HashSet, VecDeque};
use std::path::Path;
use crate::cli::DepAction;
use crate::db;
pub fn run(root: &Path, action: &DepAction, json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
match action {
DepAction::Add { child, parent } => {
- if !db::task_exists(&conn, child)? {
- bail!("task '{child}' not found");
+ let child_id = db::resolve_task_id(&store, child, false)?;
+ let parent_id = db::resolve_task_id(&store, parent, false)?;
+ if child_id == parent_id {
+ bail!("adding dependency would create a cycle");
}
- if !db::task_exists(&conn, parent)? {
- bail!("task '{parent}' not found");
+ if would_cycle(&store, &child_id, &parent_id)? {
+ bail!("adding dependency would create a cycle");
}
- if db::would_cycle(&conn, parent, child)? {
- bail!("adding dependency would create a cycle: {child} → {parent} → … → {child}");
- }
- conn.execute(
- "INSERT OR IGNORE INTO blockers (task_id, blocker_id) VALUES (?1, ?2)",
- [child, parent],
- )?;
- conn.execute(
- "UPDATE tasks SET updated = ?1 WHERE id = ?2",
- rusqlite::params![db::now_utc(), child],
- )?;
+ let ts = db::now_utc();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let child_task = db::get_task_map(&tasks, &child_id)?
+ .ok_or_else(|| anyhow!("task not found"))?;
+ let blockers = db::get_or_create_child_map(&child_task, "blockers")?;
+ blockers.insert(parent_id.as_str(), true)?;
+ child_task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
if json {
- println!("{}", serde_json::json!({"child": child, "blocker": parent}));
+ println!(
+ "{}",
+ serde_json::json!({"child": child_id.as_str(), "blocker": parent_id.as_str()})
+ );
} else {
let c = crate::color::stdout_theme();
println!(
- "{}{child}{} blocked by {}{parent}{}",
+ "{}{child_id}{} blocked by {}{parent_id}{}",
c.green, c.reset, c.yellow, c.reset
);
}
}
DepAction::Rm { child, parent } => {
- conn.execute(
- "DELETE FROM blockers WHERE task_id = ?1 AND blocker_id = ?2",
- [child, parent],
- )?;
- conn.execute(
- "UPDATE tasks SET updated = ?1 WHERE id = ?2",
- rusqlite::params![db::now_utc(), child],
- )?;
+ let child_id = db::resolve_task_id(&store, child, false)?;
+ let parent_id = db::resolve_task_id(&store, parent, true)?;
+ let ts = db::now_utc();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let child_task = db::get_task_map(&tasks, &child_id)?
+ .ok_or_else(|| anyhow!("task not found"))?;
+ let blockers = db::get_or_create_child_map(&child_task, "blockers")?;
+ blockers.delete(parent_id.as_str())?;
+ child_task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
if !json {
let c = crate::color::stdout_theme();
println!(
- "{}{child}{} no longer blocked by {}{parent}{}",
+ "{}{child_id}{} no longer blocked by {}{parent_id}{}",
c.green, c.reset, c.yellow, c.reset
);
}
}
DepAction::Tree { id } => {
- println!("{id}");
- let mut stmt = conn.prepare("SELECT id FROM tasks WHERE parent = ?1 ORDER BY id")?;
- let children: Vec<String> = stmt
- .query_map([id], |r| r.get(0))?
- .collect::<rusqlite::Result<_>>()?;
- for child in &children {
+ let root_id = db::resolve_task_id(&store, id, true)?;
+ println!("{}", root_id);
+ let mut children: Vec<_> = store
+ .list_tasks_unfiltered()?
+ .into_iter()
+ .filter(|t| t.parent.as_ref() == Some(&root_id))
+ .map(|t| t.id)
+ .collect();
+ children.sort_by(|a, b| a.as_str().cmp(b.as_str()));
+ for child in children {
println!(" {child}");
}
}
@@ -67,3 +80,36 @@ pub fn run(root: &Path, action: &DepAction, json: bool) -> Result<()> {
Ok(())
}
+
+fn would_cycle(store: &db::Store, child: &db::TaskId, parent: &db::TaskId) -> Result<bool> {
+ let tasks = store.list_tasks_unfiltered()?;
+ let mut graph: HashMap<String, HashSet<String>> = HashMap::new();
+ for task in tasks {
+ for blocker in task.blockers {
+ graph
+ .entry(task.id.as_str().to_string())
+ .or_default()
+ .insert(blocker.as_str().to_string());
+ }
+ }
+ graph
+ .entry(child.as_str().to_string())
+ .or_default()
+ .insert(parent.as_str().to_string());
+
+ let mut seen = HashSet::new();
+ let mut queue = VecDeque::from([parent.as_str().to_string()]);
+ while let Some(node) = queue.pop_front() {
+ if node == child.as_str() {
+ return Ok(true);
+ }
+ if !seen.insert(node.clone()) {
+ continue;
+ }
+ if let Some(nexts) = graph.get(&node) {
+ queue.extend(nexts.iter().cloned());
+ }
+ }
+
+ Ok(false)
+}
@@ -4,31 +4,35 @@ use std::path::Path;
use crate::db;
pub fn run(root: &Path, ids: &[String], json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
let ts = db::now_utc();
- let c = crate::color::stdout_theme();
- for id in ids {
- conn.execute(
- "UPDATE tasks SET status = 'closed', updated = ?1 WHERE id = ?2",
- rusqlite::params![ts, id],
- )?;
- if !json {
- println!("{}closed{} {id}", c.green, c.reset);
- }
+ let mut closed = Vec::new();
+ for raw in ids {
+ let id = db::resolve_task_id(&store, raw, false)?;
+ let id_key = id.as_str().to_string();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ if let Some(task) = db::get_task_map(&tasks, &id)? {
+ task.insert("status", db::status_label(db::Status::Closed))?;
+ task.insert("updated_at", ts.clone())?;
+ }
+ Ok(())
+ })?;
+ closed.push(id_key);
}
if json {
- let details: Vec<serde_json::Value> = ids
+ let out: Vec<_> = closed
.iter()
- .map(|id| {
- Ok(serde_json::json!({
- "id": id,
- "status": "closed",
- }))
- })
- .collect::<Result<_>>()?;
- println!("{}", serde_json::to_string(&details)?);
+ .map(|id| serde_json::json!({"id": id, "status": "closed"}))
+ .collect();
+ println!("{}", serde_json::to_string(&out)?);
+ } else {
+ let c = crate::color::stdout_theme();
+ for id in closed {
+ println!("{}closed{} {id}", c.green, c.reset);
+ }
}
Ok(())
@@ -1,53 +1,12 @@
use anyhow::Result;
-use serde::Serialize;
use std::path::Path;
use crate::db;
-#[derive(Serialize)]
-struct ExportTask {
- #[serde(flatten)]
- task: db::Task,
- labels: Vec<String>,
- blockers: Vec<String>,
- logs: Vec<db::LogEntry>,
-}
-
pub fn run(root: &Path) -> Result<()> {
- let conn = db::open(root)?;
-
- let mut stmt = conn.prepare(
- "SELECT id, title, description, type, priority, status, effort, parent, created, updated
- FROM tasks ORDER BY id",
- )?;
-
- let tasks: Vec<db::Task> = stmt
- .query_map([], db::row_to_task)?
- .collect::<rusqlite::Result<_>>()?;
-
- for t in &tasks {
- let labels = db::load_labels(&conn, &t.id)?;
- let blockers = db::load_blockers(&conn, &t.id)?;
- let logs = db::load_logs(&conn, &t.id)?;
- let detail = ExportTask {
- task: db::Task {
- id: t.id.clone(),
- title: t.title.clone(),
- description: t.description.clone(),
- task_type: t.task_type.clone(),
- priority: t.priority,
- status: t.status.clone(),
- effort: t.effort,
- parent: t.parent.clone(),
- created: t.created.clone(),
- updated: t.updated.clone(),
- },
- labels,
- blockers,
- logs,
- };
- println!("{}", serde_json::to_string(&detail)?);
+ let store = db::open(root)?;
+ for task in store.list_tasks_unfiltered()? {
+ println!("{}", serde_json::to_string(&task)?);
}
-
Ok(())
}
@@ -1,4 +1,5 @@
-use anyhow::Result;
+use anyhow::{anyhow, Result};
+use loro::LoroMap;
use serde::Deserialize;
use std::io::BufRead;
use std::path::Path;
@@ -14,46 +15,47 @@ struct ImportTask {
#[serde(rename = "type", default = "default_type")]
task_type: String,
#[serde(default = "default_priority")]
- priority: i32,
+ priority: String,
#[serde(default = "default_status")]
status: String,
#[serde(default = "default_effort")]
- effort: i32,
+ effort: String,
#[serde(default)]
- parent: String,
- created: String,
- updated: String,
+ parent: Option<String>,
+ created_at: String,
+ updated_at: String,
+ #[serde(default)]
+ deleted_at: Option<String>,
#[serde(default)]
labels: Vec<String>,
#[serde(default)]
blockers: Vec<String>,
#[serde(default)]
- logs: Vec<ImportLogEntry>,
+ logs: Vec<ImportLog>,
}
#[derive(Deserialize)]
-struct ImportLogEntry {
+struct ImportLog {
+ id: String,
timestamp: String,
- body: String,
+ message: String,
}
fn default_type() -> String {
"task".into()
}
-fn default_priority() -> i32 {
- 2
+fn default_priority() -> String {
+ "medium".into()
}
fn default_status() -> String {
"open".into()
}
-fn default_effort() -> i32 {
- 2
+fn default_effort() -> String {
+ "medium".into()
}
pub fn run(root: &Path, file: &str) -> Result<()> {
- let conn = db::open(root)?;
-
- eprintln!("info: importing from {file}...");
+ let store = db::open(root)?;
let reader: Box<dyn BufRead> = if file == "-" {
Box::new(std::io::stdin().lock())
@@ -66,55 +68,52 @@ pub fn run(root: &Path, file: &str) -> Result<()> {
if line.trim().is_empty() {
continue;
}
-
let t: ImportTask = serde_json::from_str(&line)?;
+ let id = db::TaskId::parse(&t.id)?;
+ db::parse_priority(&t.priority)?;
+ db::parse_status(&t.status)?;
+ db::parse_effort(&t.effort)?;
- conn.execute(
- "INSERT OR REPLACE INTO tasks
- (id, title, description, type, priority, status, effort, parent, created, updated)
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
- rusqlite::params![
- t.id,
- t.title,
- t.description,
- t.task_type,
- t.priority,
- t.status,
- t.effort,
- t.parent,
- t.created,
- t.updated,
- ],
- )?;
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task = if let Some(existing) = db::get_task_map(&tasks, &id)? {
+ existing
+ } else {
+ db::insert_task_map(&tasks, &id)?
+ };
- // Replace labels.
- conn.execute("DELETE FROM labels WHERE task_id = ?1", [&t.id])?;
- for lbl in &t.labels {
- conn.execute(
- "INSERT INTO labels (task_id, label) VALUES (?1, ?2)",
- [&t.id, lbl],
- )?;
- }
-
- // Replace blockers.
- conn.execute("DELETE FROM blockers WHERE task_id = ?1", [&t.id])?;
- for blk in &t.blockers {
- conn.execute(
- "INSERT INTO blockers (task_id, blocker_id) VALUES (?1, ?2)",
- [&t.id, blk],
- )?;
- }
+ task.insert("title", t.title.clone())?;
+ task.insert("description", t.description.clone())?;
+ task.insert("type", t.task_type.clone())?;
+ task.insert("priority", t.priority.clone())?;
+ task.insert("status", t.status.clone())?;
+ task.insert("effort", t.effort.clone())?;
+ task.insert("parent", t.parent.as_deref().unwrap_or(""))?;
+ task.insert("created_at", t.created_at.clone())?;
+ task.insert("updated_at", t.updated_at.clone())?;
+ task.insert("deleted_at", t.deleted_at.as_deref().unwrap_or(""))?;
- // Replace logs.
- conn.execute("DELETE FROM task_logs WHERE task_id = ?1", [&t.id])?;
- for log in &t.logs {
- conn.execute(
- "INSERT INTO task_logs (task_id, timestamp, body) VALUES (?1, ?2, ?3)",
- rusqlite::params![&t.id, &log.timestamp, &log.body],
- )?;
- }
+ let labels = task.insert_container("labels", LoroMap::new())?;
+ for lbl in &t.labels {
+ labels.insert(lbl, true)?;
+ }
+ let blockers = task.insert_container("blockers", LoroMap::new())?;
+ for blk in &t.blockers {
+ let parsed =
+ db::TaskId::parse(blk).map_err(|_| anyhow!("invalid blocker id '{blk}'"))?;
+ blockers.insert(parsed.as_str(), true)?;
+ }
+ let logs = task.insert_container("logs", LoroMap::new())?;
+ for entry in &t.logs {
+ let log_id = db::TaskId::parse(&entry.id)
+ .map_err(|_| anyhow!("invalid log id '{}'", entry.id))?;
+ let record = logs.insert_container(log_id.as_str(), LoroMap::new())?;
+ record.insert("timestamp", entry.timestamp.clone())?;
+ record.insert("message", entry.message.clone())?;
+ }
+ Ok(())
+ })?;
}
- eprintln!("info: import complete");
Ok(())
}
@@ -1,27 +1,17 @@
-use anyhow::{bail, Result};
+use anyhow::Result;
use std::path::Path;
-pub fn run(root: &Path, stealth: bool, json: bool) -> Result<()> {
- let td_dir = crate::db::td_dir(root);
- if td_dir.exists() {
- bail!("already initialized");
- }
-
- crate::db::init(root)?;
-
- if stealth {
- use std::io::Write;
- let mut f = std::fs::OpenOptions::new()
- .create(true)
- .append(true)
- .open(root.join(".gitignore"))?;
- writeln!(f, ".td/")?;
- }
+pub fn run(root: &Path, name: &str, json: bool) -> Result<()> {
+ crate::db::init(root, name)?;
- let c = crate::color::stderr_theme();
- eprintln!("{}info:{} initialized .td/", c.blue, c.reset);
if json {
- println!(r#"{{"success":true}}"#);
+ println!(
+ "{}",
+ serde_json::json!({"success": true, "project": name, "bound_path": root})
+ );
+ } else {
+ let c = crate::color::stderr_theme();
+ eprintln!("{}info:{} initialized project '{name}'", c.blue, c.reset);
}
Ok(())
@@ -1,58 +1,76 @@
-use anyhow::Result;
+use anyhow::{anyhow, Result};
+use std::collections::BTreeSet;
use std::path::Path;
use crate::cli::LabelAction;
use crate::db;
pub fn run(root: &Path, action: &LabelAction, json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
match action {
LabelAction::Add { id, label } => {
- conn.execute(
- "INSERT OR IGNORE INTO labels (task_id, label) VALUES (?1, ?2)",
- [id, label],
- )?;
- conn.execute(
- "UPDATE tasks SET updated = ?1 WHERE id = ?2",
- rusqlite::params![db::now_utc(), id],
- )?;
+ let task_id = db::resolve_task_id(&store, id, false)?;
+ let ts = db::now_utc();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task =
+ db::get_task_map(&tasks, &task_id)?.ok_or_else(|| anyhow!("task not found"))?;
+ let labels = db::get_or_create_child_map(&task, "labels")?;
+ labels.insert(label, true)?;
+ task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
+
if json {
- println!("{}", serde_json::json!({"id": id, "label": label}));
+ println!(
+ "{}",
+ serde_json::json!({"id": task_id.as_str(), "label": label})
+ );
} else {
let c = crate::color::stdout_theme();
println!("{}added{} label {label}", c.green, c.reset);
}
}
LabelAction::Rm { id, label } => {
- conn.execute(
- "DELETE FROM labels WHERE task_id = ?1 AND label = ?2",
- [id, label],
- )?;
- conn.execute(
- "UPDATE tasks SET updated = ?1 WHERE id = ?2",
- rusqlite::params![db::now_utc(), id],
- )?;
+ let task_id = db::resolve_task_id(&store, id, false)?;
+ let ts = db::now_utc();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task =
+ db::get_task_map(&tasks, &task_id)?.ok_or_else(|| anyhow!("task not found"))?;
+ let labels = db::get_or_create_child_map(&task, "labels")?;
+ labels.delete(label)?;
+ task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
+
if !json {
let c = crate::color::stdout_theme();
println!("{}removed{} label {label}", c.green, c.reset);
}
}
LabelAction::List { id } => {
- let labels = db::load_labels(&conn, id)?;
+ let task_id = db::resolve_task_id(&store, id, false)?;
+ let task = store
+ .get_task(&task_id, false)?
+ .ok_or_else(|| anyhow!("task not found"))?;
if json {
- println!("{}", serde_json::to_string(&labels)?);
+ println!("{}", serde_json::to_string(&task.labels)?);
} else {
- for l in &labels {
+ for l in &task.labels {
println!("{l}");
}
}
}
LabelAction::ListAll => {
- let mut stmt = conn.prepare("SELECT DISTINCT label FROM labels ORDER BY label")?;
- let labels: Vec<String> = stmt
- .query_map([], |r| r.get(0))?
- .collect::<rusqlite::Result<_>>()?;
+ let mut set = BTreeSet::new();
+ for task in store.list_tasks()? {
+ for label in task.labels {
+ set.insert(label);
+ }
+ }
+ let labels: Vec<_> = set.into_iter().collect();
if json {
println!("{}", serde_json::to_string(&labels)?);
} else {
@@ -9,64 +9,41 @@ use crate::db;
pub fn run(
root: &Path,
status: Option<&str>,
- priority: Option<i32>,
- effort: Option<i32>,
+ priority: Option<db::Priority>,
+ effort: Option<db::Effort>,
label: Option<&str>,
json: bool,
) -> Result<()> {
- let conn = db::open(root)?;
-
- let mut sql = String::from(
- "SELECT id, title, description, type, priority, status, effort, parent, created, updated
- FROM tasks WHERE 1=1",
- );
- let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
- let mut idx = 1;
+ let store = db::open(root)?;
+ let mut tasks = store.list_tasks()?;
if let Some(s) = status {
- sql.push_str(&format!(" AND status = ?{idx}"));
- params.push(Box::new(s.to_string()));
- idx += 1;
+ let parsed = db::parse_status(s)?;
+ tasks.retain(|t| t.status == parsed);
}
if let Some(p) = priority {
- sql.push_str(&format!(" AND priority = ?{idx}"));
- params.push(Box::new(p));
- idx += 1;
+ tasks.retain(|t| t.priority == p);
}
if let Some(e) = effort {
- sql.push_str(&format!(" AND effort = ?{idx}"));
- params.push(Box::new(e));
- idx += 1;
+ tasks.retain(|t| t.effort == e);
}
if let Some(l) = label {
- sql.push_str(&format!(
- " AND id IN (SELECT task_id FROM labels WHERE label = ?{idx})"
- ));
- params.push(Box::new(l.to_string()));
+ tasks.retain(|t| t.labels.iter().any(|x| x == l));
}
- sql.push_str(" ORDER BY priority, created");
-
- let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
- let mut stmt = conn.prepare(&sql)?;
- let tasks: Vec<db::Task> = stmt
- .query_map(param_refs.as_slice(), db::row_to_task)?
- .collect::<rusqlite::Result<_>>()?;
+ tasks.sort_by_key(|t| (t.priority.score(), t.created_at.clone()));
if json {
- let details: Vec<db::TaskDetail> = tasks
- .into_iter()
- .map(|t| {
- let labels = db::load_labels(&conn, &t.id)?;
- let blockers = db::load_blockers(&conn, &t.id)?;
- Ok(db::TaskDetail {
- task: t,
- labels,
- blockers,
- })
- })
- .collect::<Result<_>>()?;
- println!("{}", serde_json::to_string(&details)?);
+ // Keep list JSON lean: include scheduling fields but not full work-log history.
+ let mut value = serde_json::to_value(&tasks)?;
+ if let Some(items) = value.as_array_mut() {
+ for item in items {
+ if let Some(obj) = item.as_object_mut() {
+ obj.remove("logs");
+ }
+ }
+ }
+ println!("{}", serde_json::to_string(&value)?);
} else {
let use_color = stdout_use_color();
let mut table = Table::new();
@@ -75,7 +52,11 @@ pub fn run(
for t in &tasks {
table.add_row(vec![
cell_bold(&t.id, use_color),
- cell_fg(format!("[{}]", t.status), Color::Yellow, use_color),
+ cell_fg(
+ format!("[{}]", db::status_label(t.status)),
+ Color::Yellow,
+ use_color,
+ ),
cell_fg(db::priority_label(t.priority), Color::Red, use_color),
cell_fg(db::effort_label(t.effort), Color::Blue, use_color),
Cell::new(&t.title),
@@ -1,38 +1,37 @@
-use anyhow::{bail, Result};
+use anyhow::Result;
+use loro::LoroMap;
use std::path::Path;
use crate::db;
pub fn run(root: &Path, id: &str, message: &str, json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
+ let task_id = db::resolve_task_id(&store, id, false)?;
+ let log_id = db::gen_id();
+ let ts = db::now_utc();
- if !db::task_exists(&conn, id)? {
- bail!("task {id} not found");
- }
-
- let timestamp = db::now_utc();
- conn.execute(
- "INSERT INTO task_logs (task_id, timestamp, body)
- VALUES (?1, ?2, ?3)",
- rusqlite::params![id, timestamp, message],
- )?;
- let log_id = conn.last_insert_rowid();
- conn.execute(
- "UPDATE tasks SET updated = ?1 WHERE id = ?2",
- rusqlite::params![db::now_utc(), id],
- )?;
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task =
+ db::get_task_map(&tasks, &task_id)?.ok_or_else(|| anyhow::anyhow!("task not found"))?;
+ let logs = db::get_or_create_child_map(&task, "logs")?;
+ let entry = logs.insert_container(log_id.as_str(), LoroMap::new())?;
+ entry.insert("timestamp", ts.clone())?;
+ entry.insert("message", message)?;
+ task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
let entry = db::LogEntry {
id: log_id,
- task_id: id.to_string(),
- timestamp,
- body: message.to_string(),
+ timestamp: ts,
+ message: message.to_string(),
};
if json {
println!("{}", serde_json::to_string(&entry)?);
} else {
- println!("logged to {id}");
+ println!("logged to {}", task_id);
}
Ok(())
@@ -9,6 +9,7 @@ mod label;
mod list;
mod log;
mod next;
+mod projects;
mod ready;
mod reopen;
mod rm;
@@ -17,21 +18,28 @@ mod show;
mod skill;
mod stats;
mod update;
+mod r#use;
use crate::cli::{Cli, Command};
use crate::db;
use anyhow::Result;
fn require_root() -> Result<std::path::PathBuf> {
- db::find_root(&std::env::current_dir()?)
+ std::env::current_dir().map_err(Into::into)
}
pub fn dispatch(cli: &Cli) -> Result<()> {
+ if let Some(project) = &cli.project {
+ std::env::set_var(db::PROJECT_ENV, project);
+ }
+
match &cli.command {
- Command::Init { stealth } => {
+ Command::Init { name } => {
let root = std::env::current_dir()?;
- init::run(&root, *stealth, cli.json)
+ init::run(&root, name, cli.json)
}
+ Command::Use { name } => r#use::run(name, cli.json),
+ Command::Projects => projects::run(cli.json),
Command::Create {
title,
priority,
@@ -8,7 +8,6 @@ use crate::color::{cell_bold, stdout_use_color};
use crate::db;
use crate::score::{self, Mode};
-/// Parse the mode string from the CLI.
fn parse_mode(s: &str) -> Result<Mode> {
match s {
"impact" => Ok(Mode::Impact),
@@ -19,38 +18,38 @@ fn parse_mode(s: &str) -> Result<Mode> {
pub fn run(root: &Path, mode_str: &str, verbose: bool, limit: usize, json: bool) -> Result<()> {
let mode = parse_mode(mode_str)?;
- let conn = db::open(root)?;
+ let store = db::open(root)?;
+ let all = store.list_tasks()?;
- // Load all open tasks.
- let mut stmt = conn.prepare(
- "SELECT id, title, priority, effort
- FROM tasks
- WHERE status = 'open'",
- )?;
- let open_tasks: Vec<(String, String, i32, i32)> = stmt
- .query_map([], |r| Ok((r.get(0)?, r.get(1)?, r.get(2)?, r.get(3)?)))?
- .collect::<rusqlite::Result<_>>()?;
+ let open_tasks: Vec<(String, String, i32, i32)> = all
+ .iter()
+ .filter(|t| t.status == db::Status::Open)
+ .map(|t| {
+ (
+ t.id.as_str().to_string(),
+ t.title.clone(),
+ t.priority.score(),
+ t.effort.score(),
+ )
+ })
+ .collect();
- // Load all blocker edges between open tasks.
- let mut edge_stmt = conn.prepare(
- "SELECT b.task_id, b.blocker_id
- FROM blockers b
- JOIN tasks t1 ON b.task_id = t1.id
- JOIN tasks t2 ON b.blocker_id = t2.id
- WHERE t1.status = 'open' AND t2.status = 'open'",
- )?;
- let edges: Vec<(String, String)> = edge_stmt
- .query_map([], |r| Ok((r.get(0)?, r.get(1)?)))?
- .collect::<rusqlite::Result<_>>()?;
+ let edges: Vec<(String, String)> = all
+ .iter()
+ .filter(|t| t.status == db::Status::Open)
+ .flat_map(|t| {
+ t.blockers
+ .iter()
+ .map(|b| (t.id.as_str().to_string(), b.as_str().to_string()))
+ .collect::<Vec<_>>()
+ })
+ .collect();
- // Parents with at least one open subtask are not actionable work
- // units — exclude them from candidates while keeping them in the
- // graph for downstream scoring.
- let mut parent_stmt =
- conn.prepare("SELECT DISTINCT parent FROM tasks WHERE parent != '' AND status = 'open'")?;
- let parents_with_open_children: HashSet<String> = parent_stmt
- .query_map([], |r| r.get(0))?
- .collect::<rusqlite::Result<_>>()?;
+ let parents_with_open_children: HashSet<String> = all
+ .iter()
+ .filter(|t| t.status == db::Status::Open)
+ .filter_map(|t| t.parent.as_ref().map(|p| p.as_str().to_string()))
+ .collect();
let scored = score::rank(
&open_tasks,
@@ -70,7 +69,7 @@ pub fn run(root: &Path, mode_str: &str, verbose: bool, limit: usize, json: bool)
}
if json {
- let items: Vec<serde_json::Value> = scored
+ let out: Vec<_> = scored
.iter()
.enumerate()
.map(|(i, s)| {
@@ -79,8 +78,8 @@ pub fn run(root: &Path, mode_str: &str, verbose: bool, limit: usize, json: bool)
"id": s.id,
"title": s.title,
"score": s.score,
- "priority": db::priority_label(s.priority),
- "effort": db::effort_label(s.effort),
+ "priority": s.priority,
+ "effort": s.effort,
"downstream_score": s.downstream_score,
"priority_weight": s.priority_weight,
"effort_weight": s.effort_weight,
@@ -89,7 +88,7 @@ pub fn run(root: &Path, mode_str: &str, verbose: bool, limit: usize, json: bool)
})
})
.collect();
- println!("{}", serde_json::to_string(&items)?);
+ println!("{}", serde_json::to_string(&out)?);
} else {
let use_color = stdout_use_color();
let mut table = Table::new();
@@ -107,38 +106,9 @@ pub fn run(root: &Path, mode_str: &str, verbose: bool, limit: usize, json: bool)
println!("{table}");
if verbose {
- let mode_label = match mode {
- Mode::Impact => "impact",
- Mode::Effort => "effort",
- };
- println!();
- println!("mode: {mode_label}");
println!();
for (i, s) in scored.iter().enumerate() {
println!("{}. {} — score: {:.2}", i + 1, s.id, s.score);
- match mode {
- Mode::Impact => {
- println!(
- " ({:.2} + 1.00) × {:.0} / {:.0}^0.25 = {:.2}",
- s.downstream_score, s.priority_weight, s.effort_weight, s.score
- );
- }
- Mode::Effort => {
- println!(
- " ({:.2} × 0.25 + 1.00) × {:.0} / {:.0}² = {:.2}",
- s.downstream_score, s.priority_weight, s.effort_weight, s.score
- );
- }
- }
- let unblocked = if s.direct_unblocked == s.total_unblocked {
- format!("{} tasks", s.total_unblocked)
- } else {
- format!(
- "{} tasks ({} directly)",
- s.total_unblocked, s.direct_unblocked
- )
- };
- println!(" Unblocks: {unblocked}");
}
}
}
@@ -0,0 +1,15 @@
+use anyhow::Result;
+
+pub fn run(json: bool) -> Result<()> {
+ let projects = crate::db::list_projects()?;
+
+ if json {
+ println!("{}", serde_json::to_string(&projects)?);
+ } else {
+ for project in projects {
+ println!("{project}");
+ }
+ }
+
+ Ok(())
+}
@@ -7,37 +7,23 @@ use crate::color::{cell_bold, cell_fg, stdout_use_color};
use crate::db;
pub fn run(root: &Path, json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
- let mut stmt = conn.prepare(
- "SELECT id, title, description, type, priority, status, effort, parent, created, updated
- FROM tasks
- WHERE status = 'open'
- AND id NOT IN (
- SELECT b.task_id FROM blockers b
- JOIN tasks t ON b.blocker_id = t.id
- WHERE t.status != 'closed'
- )
- ORDER BY priority, created",
- )?;
+ let mut tasks = Vec::new();
+ for task in store.list_tasks()? {
+ if task.status != db::Status::Open {
+ continue;
+ }
+ let blockers = db::partition_blockers(&store, &task.blockers)?;
+ if blockers.open.is_empty() {
+ tasks.push(task);
+ }
+ }
- let tasks: Vec<db::Task> = stmt
- .query_map([], db::row_to_task)?
- .collect::<rusqlite::Result<_>>()?;
+ tasks.sort_by_key(|t| (t.priority.score(), t.created_at.clone()));
if json {
- let summary: Vec<serde_json::Value> = tasks
- .iter()
- .map(|t| {
- serde_json::json!({
- "id": t.id,
- "title": t.title,
- "priority": db::priority_label(t.priority),
- "effort": db::effort_label(t.effort),
- })
- })
- .collect();
- println!("{}", serde_json::to_string(&summary)?);
+ println!("{}", serde_json::to_string(&tasks)?);
} else {
let use_color = stdout_use_color();
let mut table = Table::new();
@@ -4,31 +4,35 @@ use std::path::Path;
use crate::db;
pub fn run(root: &Path, ids: &[String], json: bool) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
let ts = db::now_utc();
- let c = crate::color::stdout_theme();
- for id in ids {
- conn.execute(
- "UPDATE tasks SET status = 'open', updated = ?1 WHERE id = ?2",
- rusqlite::params![ts, id],
- )?;
- if !json {
- println!("{}reopened{} {id}", c.green, c.reset);
- }
+ let mut reopened = Vec::new();
+ for raw in ids {
+ let id = db::resolve_task_id(&store, raw, false)?;
+ let id_key = id.as_str().to_string();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ if let Some(task) = db::get_task_map(&tasks, &id)? {
+ task.insert("status", db::status_label(db::Status::Open))?;
+ task.insert("updated_at", ts.clone())?;
+ }
+ Ok(())
+ })?;
+ reopened.push(id_key);
}
if json {
- let details: Vec<serde_json::Value> = ids
+ let out: Vec<_> = reopened
.iter()
- .map(|id| {
- Ok(serde_json::json!({
- "id": id,
- "status": "open",
- }))
- })
- .collect::<Result<_>>()?;
- println!("{}", serde_json::to_string(&details)?);
+ .map(|id| serde_json::json!({"id": id, "status": "open"}))
+ .collect();
+ println!("{}", serde_json::to_string(&out)?);
+ } else {
+ let c = crate::color::stdout_theme();
+ for id in reopened {
+ println!("{}reopened{} {id}", c.green, c.reset);
+ }
}
Ok(())
@@ -1,6 +1,6 @@
-use anyhow::{bail, Result};
+use anyhow::{anyhow, bail, Result};
use serde::Serialize;
-use std::collections::BTreeSet;
+use std::collections::{BTreeSet, HashSet};
use std::path::Path;
use crate::db;
@@ -13,40 +13,64 @@ struct RmResult {
}
pub fn run(root: &Path, ids: &[String], recursive: bool, force: bool, json: bool) -> Result<()> {
- let mut conn = db::open(root)?;
- let tx = conn.transaction()?;
+ let store = db::open(root)?;
+ let all = store.list_tasks_unfiltered()?;
let mut to_delete = BTreeSet::new();
- for id in ids {
- if !db::task_exists(&tx, id)? {
- bail!("task '{id}' not found");
- }
-
+ for raw in ids {
+ let id = db::resolve_task_id(&store, raw, false)?;
if recursive {
- for subtree_id in load_subtree_ids(&tx, id)? {
- to_delete.insert(subtree_id);
- }
+ collect_subtree(&all, &id, &mut to_delete);
} else {
- let child_count: i64 = tx.query_row(
- "SELECT COUNT(*) FROM tasks WHERE parent = ?1",
- [id],
- |row| row.get(0),
- )?;
- if child_count > 0 {
+ if all
+ .iter()
+ .any(|t| t.parent.as_ref() == Some(&id) && t.deleted_at.is_none())
+ {
bail!("task '{id}' has children; use --recursive to delete subtree");
}
- to_delete.insert(id.clone());
+ to_delete.insert(id);
}
}
- let deleted_ids: Vec<String> = to_delete.into_iter().collect();
- let unblocked_ids = detach_dependents(&tx, &deleted_ids)?;
+ let deleted_ids: Vec<db::TaskId> = to_delete.into_iter().collect();
+ let deleted_set: HashSet<String> = deleted_ids
+ .iter()
+ .map(|id| id.as_str().to_string())
+ .collect();
- if !deleted_ids.is_empty() {
- delete_tasks(&tx, &deleted_ids)?;
- }
+ let unblocked_ids: Vec<String> = all
+ .iter()
+ .filter(|t| !deleted_set.contains(t.id.as_str()))
+ .filter(|t| t.blockers.iter().any(|b| deleted_set.contains(b.as_str())))
+ .map(|t| t.id.as_str().to_string())
+ .collect();
+
+ let ts = db::now_utc();
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+
+ for task_id in &deleted_ids {
+ let task =
+ db::get_task_map(&tasks, task_id)?.ok_or_else(|| anyhow!("task not found"))?;
+ task.insert("deleted_at", ts.clone())?;
+ task.insert("updated_at", ts.clone())?;
+ task.insert("status", db::status_label(db::Status::Closed))?;
+ }
+
+ for task in store.list_tasks_unfiltered()? {
+ if deleted_set.contains(task.id.as_str()) {
+ continue;
+ }
+ if let Some(task_map) = db::get_task_map(&tasks, &task.id)? {
+ let blockers = db::get_or_create_child_map(&task_map, "blockers")?;
+ for deleted in &deleted_ids {
+ blockers.delete(deleted.as_str())?;
+ }
+ }
+ }
- tx.commit()?;
+ Ok(())
+ })?;
if !force && !unblocked_ids.is_empty() {
eprintln!(
@@ -58,13 +82,16 @@ pub fn run(root: &Path, ids: &[String], recursive: bool, force: bool, json: bool
if json {
let out = RmResult {
requested_ids: ids.to_vec(),
- deleted_ids,
+ deleted_ids: deleted_ids
+ .iter()
+ .map(|id| id.as_str().to_string())
+ .collect(),
unblocked_ids,
};
println!("{}", serde_json::to_string(&out)?);
} else {
let c = crate::color::stdout_theme();
- for id in &deleted_ids {
+ for id in deleted_ids {
println!("{}deleted{} {id}", c.green, c.reset);
}
}
@@ -72,70 +99,13 @@ pub fn run(root: &Path, ids: &[String], recursive: bool, force: bool, json: bool
Ok(())
}
-fn load_subtree_ids(tx: &rusqlite::Transaction, root_id: &str) -> Result<Vec<String>> {
- let mut stmt = tx.prepare(
- "WITH RECURSIVE subtree(id) AS (
- SELECT id FROM tasks WHERE id = ?1
- UNION ALL
- SELECT tasks.id
- FROM tasks
- JOIN subtree ON tasks.parent = subtree.id
- )
- SELECT id FROM subtree",
- )?;
- let ids = stmt
- .query_map([root_id], |row| row.get(0))?
- .collect::<rusqlite::Result<Vec<String>>>()?;
- Ok(ids)
-}
-
-fn detach_dependents(tx: &rusqlite::Transaction, deleted_ids: &[String]) -> Result<Vec<String>> {
- if deleted_ids.is_empty() {
- return Ok(Vec::new());
+fn collect_subtree(all: &[db::Task], root: &db::TaskId, out: &mut BTreeSet<db::TaskId>) {
+ if !out.insert(root.clone()) {
+ return;
}
-
- let in_placeholders = vec!["?"; deleted_ids.len()].join(", ");
- let sql = format!(
- "SELECT DISTINCT task_id
- FROM blockers
- WHERE blocker_id IN ({in_placeholders})
- AND task_id NOT IN ({in_placeholders})
- ORDER BY task_id"
- );
- let params = deleted_ids.iter().chain(deleted_ids.iter());
- let mut stmt = tx.prepare(&sql)?;
- let unblocked_ids = stmt
- .query_map(rusqlite::params_from_iter(params), |row| row.get(0))?
- .collect::<rusqlite::Result<Vec<String>>>()?;
-
- if unblocked_ids.is_empty() {
- return Ok(unblocked_ids);
+ for task in all {
+ if task.parent.as_ref() == Some(root) && task.deleted_at.is_none() {
+ collect_subtree(all, &task.id, out);
+ }
}
-
- let delete_sql = format!(
- "DELETE FROM blockers
- WHERE blocker_id IN ({in_placeholders})
- AND task_id NOT IN ({in_placeholders})"
- );
- let delete_params = deleted_ids.iter().chain(deleted_ids.iter());
- tx.execute(&delete_sql, rusqlite::params_from_iter(delete_params))?;
-
- let update_placeholders = vec!["?"; unblocked_ids.len()].join(", ");
- let update_sql = format!(
- "UPDATE tasks
- SET updated = ?1
- WHERE id IN ({update_placeholders})"
- );
- let ts = db::now_utc();
- let update_params = std::iter::once(&ts).chain(unblocked_ids.iter());
- tx.execute(&update_sql, rusqlite::params_from_iter(update_params))?;
-
- Ok(unblocked_ids)
-}
-
-fn delete_tasks(tx: &rusqlite::Transaction, deleted_ids: &[String]) -> Result<()> {
- let in_placeholders = vec!["?"; deleted_ids.len()].join(", ");
- let sql = format!("DELETE FROM tasks WHERE id IN ({in_placeholders})");
- tx.execute(&sql, rusqlite::params_from_iter(deleted_ids.iter()))?;
- Ok(())
}
@@ -7,31 +7,19 @@ use crate::color::{cell_bold, stdout_use_color};
use crate::db;
pub fn run(root: &Path, query: &str, json: bool) -> Result<()> {
- let conn = db::open(root)?;
- let pattern = format!("%{query}%");
+ let store = db::open(root)?;
+ let q = query.to_lowercase();
- let mut stmt = conn.prepare(
- "SELECT id, title, description, type, priority, status, effort, parent, created, updated
- FROM tasks
- WHERE title LIKE ?1 OR description LIKE ?1",
- )?;
-
- let tasks: Vec<db::Task> = stmt
- .query_map([&pattern], db::row_to_task)?
- .collect::<rusqlite::Result<_>>()?;
+ let tasks: Vec<db::Task> = store
+ .list_tasks()?
+ .into_iter()
+ .filter(|t| {
+ t.title.to_lowercase().contains(&q) || t.description.to_lowercase().contains(&q)
+ })
+ .collect();
if json {
- let summary: Vec<serde_json::Value> = tasks
- .iter()
- .map(|t| {
- serde_json::json!({
- "id": t.id,
- "title": t.title,
- "status": t.status,
- })
- })
- .collect();
- println!("{}", serde_json::to_string(&summary)?);
+ println!("{}", serde_json::to_string(&tasks)?);
} else {
let use_color = stdout_use_color();
let mut table = Table::new();
@@ -1,109 +1,75 @@
-use anyhow::{bail, Result};
-use serde::Serialize;
+use anyhow::Result;
use std::path::Path;
use crate::db;
-#[derive(Serialize)]
-struct ShowDetail {
- #[serde(flatten)]
- task: db::Task,
- labels: Vec<String>,
- blockers: Vec<String>,
- logs: Vec<db::LogEntry>,
-}
-
pub fn run(root: &Path, id: &str, json: bool) -> Result<()> {
- let conn = db::open(root)?;
-
- let exists: bool = conn.query_row("SELECT COUNT(*) FROM tasks WHERE id = ?1", [id], |r| {
- r.get::<_, i64>(0).map(|n| n > 0)
- })?;
-
- if !exists {
- bail!("task {id} not found");
- }
-
- let detail = db::load_task_detail(&conn, id)?;
- let logs = db::load_logs(&conn, id)?;
+ let store = db::open(root)?;
+ let task_id = db::resolve_task_id(&store, id, true)?;
+ let task = store
+ .get_task(&task_id, true)?
+ .ok_or_else(|| anyhow::anyhow!("task {id} not found"))?;
if json {
- let out = ShowDetail {
- task: detail.task,
- labels: detail.labels,
- blockers: detail.blockers,
- logs,
- };
- println!("{}", serde_json::to_string(&out)?);
+ println!("{}", serde_json::to_string(&task)?);
return Ok(());
}
let c = crate::color::stdout_theme();
- let t = &detail.task;
- // Title as a heading with status tag
println!(
"{}# {}{} {}[{}]{}",
- c.bold, t.title, c.reset, c.yellow, t.status, c.reset
+ c.bold,
+ task.title,
+ c.reset,
+ c.yellow,
+ db::status_label(task.status),
+ c.reset
);
- // Description as body text, only when present
- if !t.description.is_empty() {
+ if !task.description.is_empty() {
println!();
- println!("{}", t.description);
+ println!("{}", task.description);
}
- // Metadata line: id · type · priority · effort
println!();
println!(
"{}{}{} · {} · {}{}{} priority · {}{}{} effort",
c.bold,
- t.id,
+ task.id,
c.reset,
- t.task_type,
+ task.task_type,
c.red,
- db::priority_label(t.priority),
+ db::priority_label(task.priority),
c.reset,
c.blue,
- db::effort_label(t.effort),
+ db::effort_label(task.effort),
c.reset,
);
- // Labels, only when present
- if !detail.labels.is_empty() {
- println!("labels: {}", detail.labels.join(", "));
+ if !task.labels.is_empty() {
+ println!("labels: {}", task.labels.join(", "));
}
- // Blockers, only when present
- let (open_blockers, closed_blockers) = db::load_blockers_partitioned(&conn, &t.id)?;
- let total = open_blockers.len() + closed_blockers.len();
+ let blockers = db::partition_blockers(&store, &task.blockers)?;
+ let total = blockers.open.len() + blockers.resolved.len();
if total > 0 {
- let label = if total == 1 { "blocker" } else { "blockers" };
- let mut ids: Vec<String> = Vec::new();
- for id in &open_blockers {
- ids.push(id.clone());
- }
- for id in &closed_blockers {
- ids.push(format!("{id} [closed]"));
- }
-
- let value = if open_blockers.is_empty() {
- format!("[all closed] {}", ids.join(", "))
+ let mut ids: Vec<String> = blockers.open.iter().map(ToString::to_string).collect();
+ ids.extend(blockers.resolved.iter().map(|id| format!("{id} [closed]")));
+ if blockers.open.is_empty() {
+ println!("blockers: [all closed] {}", ids.join(", "));
} else {
- ids.join(", ")
- };
-
- println!("{label}: {value}");
+ println!("blockers: {}", ids.join(", "));
+ }
}
- // Timestamps at the bottom
- println!("created {} · updated {}", t.created, t.updated);
+ println!("created {} · updated {}", task.created_at, task.updated_at);
- if !logs.is_empty() {
+ if !task.logs.is_empty() {
println!();
println!("--- log ---");
- for log in &logs {
- println!("[{}] {}", log.timestamp, log.body);
+ for log in task.logs {
+ println!("[{}] {}", log.timestamp, log.message);
}
}
@@ -4,24 +4,22 @@ use std::path::Path;
use crate::db;
pub fn run(root: &Path) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
+ let tasks = store.list_tasks()?;
- let total: i64 = conn.query_row("SELECT COUNT(*) FROM tasks", [], |r| r.get(0))?;
- let open: i64 = conn.query_row(
- "SELECT COUNT(*) FROM tasks WHERE status = 'open'",
- [],
- |r| r.get(0),
- )?;
- let in_progress: i64 = conn.query_row(
- "SELECT COUNT(*) FROM tasks WHERE status = 'in_progress'",
- [],
- |r| r.get(0),
- )?;
- let closed: i64 = conn.query_row(
- "SELECT COUNT(*) FROM tasks WHERE status = 'closed'",
- [],
- |r| r.get(0),
- )?;
+ let total = tasks.len();
+ let open = tasks
+ .iter()
+ .filter(|t| t.status == db::Status::Open)
+ .count();
+ let in_progress = tasks
+ .iter()
+ .filter(|t| t.status == db::Status::InProgress)
+ .count();
+ let closed = tasks
+ .iter()
+ .filter(|t| t.status == db::Status::Closed)
+ .count();
println!(
"{}",
@@ -1,63 +1,55 @@
-use anyhow::Result;
+use anyhow::{anyhow, Result};
use std::path::Path;
use crate::db;
pub struct Opts<'a> {
pub status: Option<&'a str>,
- pub priority: Option<i32>,
- pub effort: Option<i32>,
+ pub priority: Option<db::Priority>,
+ pub effort: Option<db::Effort>,
pub title: Option<&'a str>,
pub desc: Option<&'a str>,
pub json: bool,
}
pub fn run(root: &Path, id: &str, opts: Opts) -> Result<()> {
- let conn = db::open(root)?;
+ let store = db::open(root)?;
+ let task_id = db::resolve_task_id(&store, id, false)?;
let ts = db::now_utc();
- let mut sets = vec![format!("updated = '{ts}'")];
- let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
- let mut idx = 1;
-
- if let Some(s) = opts.status {
- sets.push(format!("status = ?{idx}"));
- params.push(Box::new(s.to_string()));
- idx += 1;
- }
- if let Some(p) = opts.priority {
- sets.push(format!("priority = ?{idx}"));
- params.push(Box::new(p));
- idx += 1;
- }
- if let Some(e) = opts.effort {
- sets.push(format!("effort = ?{idx}"));
- params.push(Box::new(e));
- idx += 1;
- }
- if let Some(t) = opts.title {
- sets.push(format!("title = ?{idx}"));
- params.push(Box::new(t.to_string()));
- idx += 1;
- }
- if let Some(d) = opts.desc {
- sets.push(format!("description = ?{idx}"));
- params.push(Box::new(d.to_string()));
- idx += 1;
- }
-
- let sql = format!("UPDATE tasks SET {} WHERE id = ?{idx}", sets.join(", "));
- params.push(Box::new(id.to_string()));
-
- let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
- conn.execute(&sql, param_refs.as_slice())?;
+ let parsed_status = opts.status.map(db::parse_status).transpose()?;
+
+ store.apply_and_persist(|doc| {
+ let tasks = doc.get_map("tasks");
+ let task = db::get_task_map(&tasks, &task_id)?.ok_or_else(|| anyhow!("task not found"))?;
+
+ if let Some(s) = parsed_status {
+ task.insert("status", db::status_label(s))?;
+ }
+ if let Some(p) = opts.priority {
+ task.insert("priority", db::priority_label(p))?;
+ }
+ if let Some(e) = opts.effort {
+ task.insert("effort", db::effort_label(e))?;
+ }
+ if let Some(t) = opts.title {
+ task.insert("title", t)?;
+ }
+ if let Some(d) = opts.desc {
+ task.insert("description", d)?;
+ }
+ task.insert("updated_at", ts.clone())?;
+ Ok(())
+ })?;
if opts.json {
- let detail = db::load_task_detail(&conn, id)?;
- println!("{}", serde_json::to_string(&detail)?);
+ let task = store
+ .get_task(&task_id, false)?
+ .ok_or_else(|| anyhow!("task not found"))?;
+ println!("{}", serde_json::to_string(&task)?);
} else {
let c = crate::color::stdout_theme();
- println!("{}updated{} {id}", c.green, c.reset);
+ println!("{}updated{} {}", c.green, c.reset, task_id);
}
Ok(())
@@ -0,0 +1,18 @@
+use anyhow::Result;
+
+pub fn run(name: &str, json: bool) -> Result<()> {
+ let cwd = std::env::current_dir()?;
+ crate::db::use_project(&cwd, name)?;
+
+ if json {
+ println!(
+ "{}",
+ serde_json::json!({"success": true, "project": name, "bound_path": cwd})
+ );
+ } else {
+ let c = crate::color::stdout_theme();
+ println!("{}bound{} {} -> {name}", c.green, c.reset, cwd.display());
+ }
+
+ Ok(())
+}
@@ -1,16 +1,19 @@
use anyhow::{anyhow, bail, Context, Result};
-use loro::{ExportMode, LoroDoc, PeerID};
-use serde::Serialize;
+use loro::{Container, ExportMode, LoroDoc, LoroMap, PeerID, ValueOrContainer};
+use serde::{Deserialize, Serialize};
use serde_json::Value;
+use std::collections::BTreeMap;
use std::fmt;
use std::fs::{self, File, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use ulid::Ulid;
-const TD_DIR: &str = ".td";
+pub const PROJECT_ENV: &str = "TD_PROJECT";
+
const PROJECTS_DIR: &str = "projects";
const CHANGES_DIR: &str = "changes";
+const BINDINGS_FILE: &str = "bindings.json";
const BASE_FILE: &str = "base.loro";
const TMP_SUFFIX: &str = ".tmp";
const SCHEMA_VERSION: u32 = 1;
@@ -74,6 +77,14 @@ impl Priority {
_ => bail!("invalid priority '{raw}'"),
}
}
+
+ pub fn score(self) -> i32 {
+ match self {
+ Priority::High => 1,
+ Priority::Medium => 2,
+ Priority::Low => 3,
+ }
+ }
}
/// Estimated effort for a task.
@@ -102,10 +113,18 @@ impl Effort {
_ => bail!("invalid effort '{raw}'"),
}
}
+
+ pub fn score(self) -> i32 {
+ match self {
+ Effort::Low => 1,
+ Effort::Medium => 2,
+ Effort::High => 3,
+ }
+ }
}
/// A stable task identifier backed by a ULID.
-#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
#[serde(transparent)]
pub struct TaskId(String);
@@ -169,6 +188,12 @@ pub struct BlockerPartition {
pub resolved: Vec<TaskId>,
}
+#[derive(Debug, Default, Clone, Serialize, Deserialize)]
+struct BindingsFile {
+ #[serde(default)]
+ bindings: BTreeMap<String, String>,
+}
+
/// Storage wrapper around one project's Loro document and disk layout.
#[derive(Debug, Clone)]
pub struct Store {
@@ -178,17 +203,18 @@ pub struct Store {
}
impl Store {
- /// Create a new store rooted at the current project path.
- pub fn init(root: &Path) -> Result<Self> {
- let project = project_name(root)?;
- let project_dir = project_dir(root, &project);
+ pub fn init(root: &Path, project: &str) -> Result<Self> {
+ validate_project_name(project)?;
+ let project_dir = project_dir(root, project);
+ if project_dir.exists() {
+ bail!("project '{project}' already exists");
+ }
fs::create_dir_all(project_dir.join(CHANGES_DIR))?;
let doc = LoroDoc::new();
- let peer_id = load_or_create_device_peer_id()?;
- doc.set_peer_id(peer_id)?;
-
+ doc.set_peer_id(load_or_create_device_peer_id(root)?)?;
doc.get_map("tasks");
+
let meta = doc.get_map("meta");
meta.insert("schema_version", SCHEMA_VERSION as i64)?;
meta.insert("project_id", Ulid::new().to_string())?;
@@ -201,26 +227,25 @@ impl Store {
Ok(Self {
root: root.to_path_buf(),
- project,
+ project: project.to_string(),
doc,
})
}
- /// Open an existing store and replay deltas.
- pub fn open(root: &Path) -> Result<Self> {
- let project = project_name(root)?;
- let project_dir = project_dir(root, &project);
+ pub fn open(root: &Path, project: &str) -> Result<Self> {
+ validate_project_name(project)?;
+ let project_dir = project_dir(root, project);
let base_path = project_dir.join(BASE_FILE);
if !base_path.exists() {
- bail!("not initialized. Run 'td init'");
+ bail!("project '{project}' is not initialized. Run 'td init {project}'");
}
let base = fs::read(&base_path)
.with_context(|| format!("failed to read loro snapshot '{}'", base_path.display()))?;
let doc = LoroDoc::from_snapshot(&base).context("failed to load loro snapshot")?;
- doc.set_peer_id(load_or_create_device_peer_id()?)?;
+ doc.set_peer_id(load_or_create_device_peer_id(root)?)?;
let mut deltas = collect_delta_paths(&project_dir)?;
deltas.sort_by_key(|path| {
@@ -232,14 +257,18 @@ impl Store {
for delta_path in deltas {
let bytes = fs::read(&delta_path)
.with_context(|| format!("failed to read loro delta '{}'", delta_path.display()))?;
- doc.import(&bytes).with_context(|| {
- format!("failed to import loro delta '{}'", delta_path.display())
- })?;
+ if let Err(err) = doc.import(&bytes) {
+ // Tolerate malformed or stale delta files as requested by design.
+ eprintln!(
+ "warning: skipping unreadable delta '{}': {err}",
+ delta_path.display()
+ );
+ }
}
Ok(Self {
root: root.to_path_buf(),
- project,
+ project: project.to_string(),
doc,
})
}
@@ -328,7 +357,6 @@ impl Store {
Ok(tasks)
}
- /// Return current schema version from root meta map.
pub fn schema_version(&self) -> Result<u32> {
let root = serde_json::to_value(self.doc.get_deep_value())?;
let meta = root
@@ -348,53 +376,240 @@ pub fn gen_id() -> TaskId {
TaskId::new(Ulid::new())
}
-/// Parse a priority string value.
+pub fn parse_status(s: &str) -> Result<Status> {
+ Status::parse(s)
+}
+
pub fn parse_priority(s: &str) -> Result<Priority> {
Priority::parse(s)
}
-/// Parse an effort string value.
pub fn parse_effort(s: &str) -> Result<Effort> {
Effort::parse(s)
}
-/// Convert a priority value to its storage label.
+pub fn status_label(s: Status) -> &'static str {
+ s.as_str()
+}
+
pub fn priority_label(p: Priority) -> &'static str {
p.as_str()
}
-/// Convert an effort value to its storage label.
pub fn effort_label(e: Effort) -> &'static str {
e.as_str()
}
-/// Walk up from `start` looking for a `.td/` directory.
-pub fn find_root(start: &Path) -> Result<PathBuf> {
- let mut dir = start.to_path_buf();
- loop {
- if dir.join(TD_DIR).is_dir() {
- return Ok(dir);
+pub fn data_root() -> Result<PathBuf> {
+ let home = std::env::var("HOME").context("HOME is not set")?;
+ Ok(PathBuf::from(home).join(".local").join("share").join("td"))
+}
+
+pub fn init(cwd: &Path, project: &str) -> Result<Store> {
+ let root = data_root()?;
+ fs::create_dir_all(root.join(PROJECTS_DIR))?;
+ let store = Store::init(&root, project)?;
+ bind_project(cwd, project)?;
+ Ok(store)
+}
+
+pub fn use_project(cwd: &Path, project: &str) -> Result<()> {
+ let root = data_root()?;
+ validate_project_name(project)?;
+ if !project_dir(&root, project).join(BASE_FILE).exists() {
+ bail!("project '{project}' not found. Run 'td projects' to list known projects");
+ }
+ bind_project(cwd, project)
+}
+
+pub fn open(start: &Path) -> Result<Store> {
+ let root = data_root()?;
+ let explicit = std::env::var(PROJECT_ENV).ok();
+ let project = resolve_project_name(start, &root, explicit.as_deref())?;
+ Store::open(&root, &project)
+}
+
+pub fn list_projects() -> Result<Vec<String>> {
+ let root = data_root()?;
+ let mut out = Vec::new();
+ let dir = root.join(PROJECTS_DIR);
+ if !dir.exists() {
+ return Ok(out);
+ }
+
+ for entry in fs::read_dir(dir)? {
+ let path = entry?.path();
+ if !path.is_dir() {
+ continue;
+ }
+ let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
+ continue;
+ };
+ if path.join(BASE_FILE).exists() {
+ out.push(name.to_string());
+ }
+ }
+
+ out.sort();
+ Ok(out)
+}
+
+pub fn resolve_task_id(store: &Store, raw: &str, include_deleted: bool) -> Result<TaskId> {
+ if let Ok(id) = TaskId::parse(raw) {
+ if store.get_task(&id, include_deleted)?.is_some() {
+ return Ok(id);
+ }
+ }
+
+ let tasks = if include_deleted {
+ store.list_tasks_unfiltered()?
+ } else {
+ store.list_tasks()?
+ };
+
+ let matches: Vec<TaskId> = tasks
+ .into_iter()
+ .filter(|t| t.id.as_str().starts_with(raw))
+ .map(|t| t.id)
+ .collect();
+
+ match matches.as_slice() {
+ [] => bail!("task '{raw}' not found"),
+ [id] => Ok(id.clone()),
+ _ => bail!("task reference '{raw}' is ambiguous"),
+ }
+}
+
+pub fn partition_blockers(store: &Store, blockers: &[TaskId]) -> Result<BlockerPartition> {
+ let mut out = BlockerPartition::default();
+ for blocker in blockers {
+ let Some(task) = store.get_task(blocker, true)? else {
+ out.resolved.push(blocker.clone());
+ continue;
+ };
+ if task.status == Status::Closed || task.deleted_at.is_some() {
+ out.resolved.push(blocker.clone());
+ } else {
+ out.open.push(blocker.clone());
}
- if !dir.pop() {
- bail!("not initialized. Run 'td init'");
+ }
+ Ok(out)
+}
+
+pub fn insert_task_map(tasks: &LoroMap, task_id: &TaskId) -> Result<LoroMap> {
+ tasks
+ .insert_container(task_id.as_str(), LoroMap::new())
+ .context("failed to create task map")
+}
+
+pub fn get_task_map(tasks: &LoroMap, task_id: &TaskId) -> Result<Option<LoroMap>> {
+ match tasks.get(task_id.as_str()) {
+ Some(ValueOrContainer::Container(Container::Map(map))) => Ok(Some(map)),
+ Some(_) => bail!("task '{}' has invalid container type", task_id.as_str()),
+ None => Ok(None),
+ }
+}
+
+pub fn get_or_create_child_map(parent: &LoroMap, key: &str) -> Result<LoroMap> {
+ parent
+ .get_or_create_container(key, LoroMap::new())
+ .with_context(|| format!("failed to get or create map key '{key}'"))
+}
+
+fn bindings_path(root: &Path) -> PathBuf {
+ root.join(BINDINGS_FILE)
+}
+
+fn resolve_project_name(start: &Path, root: &Path, explicit: Option<&str>) -> Result<String> {
+ if let Some(project) = explicit {
+ validate_project_name(project)?;
+ return Ok(project.to_string());
+ }
+
+ let cwd = canonicalize_binding_path(start)?;
+ let bindings = load_bindings(root)?;
+
+ let mut best: Option<(usize, String)> = None;
+ for (raw_path, project) in bindings.bindings {
+ let bound = PathBuf::from(raw_path);
+ if is_prefix_path(&bound, &cwd) {
+ let score = bound.components().count();
+ match &best {
+ Some((best_score, _)) if *best_score >= score => {}
+ _ => best = Some((score, project)),
+ }
}
}
+
+ if let Some((_, project)) = best {
+ return Ok(project);
+ }
+
+ bail!(
+ "no project selected. Use --project/TD_PROJECT, run 'td use <name>', or run 'td init <name>'"
+ )
}
-/// Return the path to the `.td/` directory under `root`.
-pub fn td_dir(root: &Path) -> PathBuf {
- root.join(TD_DIR)
+fn bind_project(cwd: &Path, project: &str) -> Result<()> {
+ validate_project_name(project)?;
+
+ let root = data_root()?;
+ fs::create_dir_all(&root)?;
+
+ let canonical = canonicalize_binding_path(cwd)?;
+ let mut bindings = load_bindings(&root)?;
+ bindings
+ .bindings
+ .insert(canonical.to_string_lossy().to_string(), project.to_string());
+ save_bindings(&root, &bindings)
+}
+
+fn load_bindings(root: &Path) -> Result<BindingsFile> {
+ let path = bindings_path(root);
+ if !path.exists() {
+ return Ok(BindingsFile::default());
+ }
+ let content = fs::read_to_string(&path)
+ .with_context(|| format!("failed reading bindings from '{}'", path.display()))?;
+ serde_json::from_str(&content)
+ .with_context(|| format!("invalid bindings file '{}'", path.display()))
}
-/// Initialize on-disk project storage and return the opened store.
-pub fn init(root: &Path) -> Result<Store> {
- fs::create_dir_all(td_dir(root))?;
- Store::init(root)
+fn save_bindings(root: &Path, bindings: &BindingsFile) -> Result<()> {
+ let path = bindings_path(root);
+ let bytes = serde_json::to_vec_pretty(bindings)?;
+ atomic_write_file(&path, &bytes)
}
-/// Open an existing project's storage.
-pub fn open(root: &Path) -> Result<Store> {
- Store::open(root)
+fn canonicalize_binding_path(path: &Path) -> Result<PathBuf> {
+ fs::canonicalize(path).with_context(|| format!("failed to canonicalize '{}'", path.display()))
+}
+
+fn is_prefix_path(prefix: &Path, target: &Path) -> bool {
+ let mut prefix_components = prefix.components();
+ let mut target_components = target.components();
+
+ loop {
+ match (prefix_components.next(), target_components.next()) {
+ (None, _) => return true,
+ (Some(_), None) => return false,
+ (Some(a), Some(b)) if a == b => continue,
+ _ => return false,
+ }
+ }
+}
+
+fn validate_project_name(name: &str) -> Result<()> {
+ if name.is_empty() {
+ bail!("project name cannot be empty");
+ }
+ if name.contains('/') || name.contains('\\') || name == "." || name == ".." {
+ bail!("invalid project name '{name}'");
+ }
+ if name.chars().any(char::is_control) {
+ bail!("invalid project name '{name}'");
+ }
+ Ok(())
}
fn hydrate_task(task_id_raw: &str, value: &Value) -> Result<Task> {
@@ -427,7 +642,7 @@ fn hydrate_task(task_id_raw: &str, value: &Value) -> Result<Task> {
.get("labels")
.and_then(Value::as_object)
.map(|m| m.keys().cloned().collect())
- .unwrap_or_else(Vec::new);
+ .unwrap_or_default();
let blockers = obj
.get("blockers")
@@ -438,7 +653,7 @@ fn hydrate_task(task_id_raw: &str, value: &Value) -> Result<Task> {
.collect::<Result<Vec<_>>>()
})
.transpose()?
- .unwrap_or_else(Vec::new);
+ .unwrap_or_default();
let mut logs = obj
.get("logs")
@@ -458,7 +673,7 @@ fn hydrate_task(task_id_raw: &str, value: &Value) -> Result<Task> {
.collect::<Result<Vec<_>>>()
})
.transpose()?
- .unwrap_or_else(Vec::new);
+ .unwrap_or_default();
logs.sort_by(|a, b| a.id.as_str().cmp(b.id.as_str()));
@@ -489,12 +704,10 @@ fn get_required_string(map: &serde_json::Map<String, Value>, key: &str) -> Resul
fn collect_delta_paths(project_dir: &Path) -> Result<Vec<PathBuf>> {
let mut paths = Vec::new();
-
collect_changes_from_dir(&project_dir.join(CHANGES_DIR), &mut paths)?;
for entry in fs::read_dir(project_dir)? {
- let entry = entry?;
- let path = entry.path();
+ let path = entry?.path();
if !path.is_dir() {
continue;
}
@@ -515,8 +728,7 @@ fn collect_changes_from_dir(dir: &Path, out: &mut Vec<PathBuf>) -> Result<()> {
}
for entry in fs::read_dir(dir)? {
- let entry = entry?;
- let path = entry.path();
+ let path = entry?.path();
if !path.is_file() {
continue;
}
@@ -524,10 +736,7 @@ fn collect_changes_from_dir(dir: &Path, out: &mut Vec<PathBuf>) -> Result<()> {
let Some(filename) = path.file_name().and_then(|n| n.to_str()) else {
continue;
};
- if filename.ends_with(TMP_SUFFIX) {
- continue;
- }
- if !filename.ends_with(".loro") {
+ if filename.ends_with(TMP_SUFFIX) || !filename.ends_with(".loro") {
continue;
}
@@ -544,30 +753,12 @@ fn collect_changes_from_dir(dir: &Path, out: &mut Vec<PathBuf>) -> Result<()> {
Ok(())
}
-fn project_name(root: &Path) -> Result<String> {
- root.file_name()
- .and_then(|n| n.to_str())
- .map(str::to_owned)
- .ok_or_else(|| {
- anyhow!(
- "could not infer project name from path '{}'",
- root.display()
- )
- })
-}
-
fn project_dir(root: &Path, project: &str) -> PathBuf {
- td_dir(root).join(PROJECTS_DIR).join(project)
+ root.join(PROJECTS_DIR).join(project)
}
-fn load_or_create_device_peer_id() -> Result<PeerID> {
- let home = std::env::var("HOME").context("HOME is not set")?;
- let path = PathBuf::from(home)
- .join(".local")
- .join("share")
- .join("td")
- .join("device_id");
-
+fn load_or_create_device_peer_id(root: &Path) -> Result<PeerID> {
+ let path = root.join("device_id");
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
@@ -582,7 +773,8 @@ fn load_or_create_device_peer_id() -> Result<PeerID> {
id
};
- Ok((device_ulid.to_u128() & u64::MAX as u128) as u64)
+ let raw: u128 = device_ulid.into();
+ Ok((raw & u64::MAX as u128) as u64)
}
fn atomic_write_file(path: &Path, bytes: &[u8]) -> Result<()> {
@@ -2,14 +2,20 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
/// Initialise a temp directory and return it.
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
@@ -17,7 +23,8 @@ fn init_tmp() -> TempDir {
fn create_prints_id_and_title() {
let tmp = init_tmp();
- td().args(["create", "My first task"])
+ td(&tmp)
+ .args(["create", "My first task"])
.current_dir(&tmp)
.assert()
.success()
@@ -28,24 +35,26 @@ fn create_prints_id_and_title() {
fn create_json_returns_task_object() {
let tmp = init_tmp();
- td().args(["--json", "create", "Buy milk"])
+ td(&tmp)
+ .args(["--json", "create", "Buy milk"])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains(r#""title":"Buy milk"#))
.stdout(predicate::str::contains(r#""status":"open"#))
- .stdout(predicate::str::contains(r#""priority":2"#));
+ .stdout(predicate::str::contains(r#""priority":"medium""#));
}
#[test]
fn create_with_priority_and_type() {
let tmp = init_tmp();
- td().args(["--json", "create", "Urgent bug", "-p", "high", "-t", "bug"])
+ td(&tmp)
+ .args(["--json", "create", "Urgent bug", "-p", "high", "-t", "bug"])
.current_dir(&tmp)
.assert()
.success()
- .stdout(predicate::str::contains(r#""priority":1"#))
+ .stdout(predicate::str::contains(r#""priority":"high""#))
.stdout(predicate::str::contains(r#""type":"bug"#));
}
@@ -53,41 +62,50 @@ fn create_with_priority_and_type() {
fn create_with_description() {
let tmp = init_tmp();
- td().args([
- "--json",
- "create",
- "Fix login",
- "-d",
- "The login page is broken",
- ])
- .current_dir(&tmp)
- .assert()
- .success()
- .stdout(predicate::str::contains("The login page is broken"));
+ td(&tmp)
+ .args([
+ "--json",
+ "create",
+ "Fix login",
+ "-d",
+ "The login page is broken",
+ ])
+ .current_dir(&tmp)
+ .assert()
+ .success()
+ .stdout(predicate::str::contains("The login page is broken"));
}
#[test]
fn create_with_labels() {
let tmp = init_tmp();
- td().args(["--json", "create", "Labelled task", "-l", "frontend,urgent"])
+ td(&tmp)
+ .args(["--json", "create", "Labelled task", "-l", "frontend,urgent"])
.current_dir(&tmp)
.assert()
.success();
- // Verify labels are stored by checking the database directly.
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
- let count: i64 = conn
- .query_row("SELECT COUNT(*) FROM labels", [], |r| r.get(0))
+ let out = td(&tmp)
+ .args(["--json", "list", "-l", "frontend"])
+ .current_dir(&tmp)
+ .output()
.unwrap();
- assert_eq!(count, 2);
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ assert_eq!(v.as_array().unwrap().len(), 1);
+
+ let task = &v[0];
+ let labels = task["labels"].as_array().unwrap();
+ assert!(labels.contains(&serde_json::Value::String("frontend".to_string())));
+ assert!(labels.contains(&serde_json::Value::String("urgent".to_string())));
}
#[test]
fn create_requires_title() {
let tmp = init_tmp();
- td().arg("create")
+ td(&tmp)
+ .arg("create")
.current_dir(&tmp)
.assert()
.failure()
@@ -99,7 +117,7 @@ fn create_subtask_under_parent() {
let tmp = init_tmp();
// Create parent, extract its id.
- let parent_out = td()
+ let parent_out = td(&tmp)
.args(["--json", "create", "Parent task"])
.current_dir(&tmp)
.output()
@@ -108,7 +126,7 @@ fn create_subtask_under_parent() {
let parent_id = parent["id"].as_str().unwrap();
// Create child under parent.
- let child_out = td()
+ let child_out = td(&tmp)
.args(["--json", "create", "Child task", "--parent", parent_id])
.current_dir(&tmp)
.output()
@@ -116,11 +134,8 @@ fn create_subtask_under_parent() {
let child: serde_json::Value = serde_json::from_slice(&child_out.stdout).unwrap();
let child_id = child["id"].as_str().unwrap();
- // Child id should start with parent id.
- assert!(
- child_id.starts_with(parent_id),
- "child id '{child_id}' should start with parent id '{parent_id}'"
- );
+ // Child id is its own ULID; relationship is represented by the parent field.
+ assert_ne!(child_id, parent_id);
assert_eq!(child["parent"].as_str().unwrap(), parent_id);
}
@@ -128,33 +143,34 @@ fn create_subtask_under_parent() {
fn create_with_effort() {
let tmp = init_tmp();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "create", "Hard task", "-e", "high"])
.current_dir(&tmp)
.output()
.unwrap();
let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- assert_eq!(v["effort"].as_i64().unwrap(), 3);
+ assert_eq!(v["effort"].as_str().unwrap(), "high");
}
#[test]
fn create_with_priority_label() {
let tmp = init_tmp();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "create", "Low prio", "-p", "low"])
.current_dir(&tmp)
.output()
.unwrap();
let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- assert_eq!(v["priority"].as_i64().unwrap(), 3);
+ assert_eq!(v["priority"].as_str().unwrap(), "low");
}
#[test]
fn create_rejects_invalid_priority() {
let tmp = init_tmp();
- td().args(["create", "Bad", "-p", "urgent"])
+ td(&tmp)
+ .args(["create", "Bad", "-p", "urgent"])
.current_dir(&tmp)
.assert()
.failure()
@@ -167,7 +183,8 @@ fn create_rejects_invalid_priority() {
fn create_rejects_invalid_effort() {
let tmp = init_tmp();
- td().args(["create", "Bad", "-e", "huge"])
+ td(&tmp)
+ .args(["create", "Bad", "-e", "huge"])
.current_dir(&tmp)
.assert()
.failure()
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -23,7 +29,7 @@ fn create_task(dir: &TempDir, title: &str) -> String {
}
fn get_task_json(dir: &TempDir, id: &str) -> serde_json::Value {
- let out = td()
+ let out = td(dir)
.args(["--json", "show", id])
.current_dir(dir)
.output()
@@ -37,7 +43,8 @@ fn dep_add_creates_blocker() {
let a = create_task(&tmp, "Blocked task");
let b = create_task(&tmp, "Blocker");
- td().args(["dep", "add", &a, &b])
+ td(&tmp)
+ .args(["dep", "add", &a, &b])
.current_dir(&tmp)
.assert()
.success()
@@ -54,11 +61,13 @@ fn dep_rm_removes_blocker() {
let a = create_task(&tmp, "Was blocked");
let b = create_task(&tmp, "Was blocker");
- td().args(["dep", "add", &a, &b])
+ td(&tmp)
+ .args(["dep", "add", &a, &b])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "rm", &a, &b])
+ td(&tmp)
+ .args(["dep", "rm", &a, &b])
.current_dir(&tmp)
.assert()
.success();
@@ -73,22 +82,30 @@ fn dep_tree_shows_children() {
let tmp = init_tmp();
let parent = create_task(&tmp, "Parent");
- td().args(["create", "Child one", "--parent", &parent])
+ let out = td(&tmp)
+ .args(["--json", "create", "Subtask one", "--parent", &parent])
.current_dir(&tmp)
- .assert()
- .success();
- td().args(["create", "Child two", "--parent", &parent])
+ .output()
+ .unwrap();
+ let subtask_one: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ let subtask_one_id = subtask_one["id"].as_str().unwrap().to_string();
+
+ let out = td(&tmp)
+ .args(["--json", "create", "Subtask two", "--parent", &parent])
.current_dir(&tmp)
- .assert()
- .success();
+ .output()
+ .unwrap();
+ let subtask_two: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ let subtask_two_id = subtask_two["id"].as_str().unwrap().to_string();
- td().args(["dep", "tree", &parent])
+ td(&tmp)
+ .args(["dep", "tree", &parent])
.current_dir(&tmp)
.assert()
.success()
- .stdout(predicate::str::contains(&parent))
- .stdout(predicate::str::contains(".1"))
- .stdout(predicate::str::contains(".2"));
+ .stdout(predicate::str::contains(&parent[..7]))
+ .stdout(predicate::str::contains(&subtask_one_id[..7]))
+ .stdout(predicate::str::contains(&subtask_two_id[..7]));
}
#[test]
@@ -96,7 +113,8 @@ fn dep_add_rejects_self_cycle() {
let tmp = init_tmp();
let a = create_task(&tmp, "Self-referential");
- td().args(["dep", "add", &a, &a])
+ td(&tmp)
+ .args(["dep", "add", &a, &a])
.current_dir(&tmp)
.assert()
.failure()
@@ -110,13 +128,15 @@ fn dep_add_rejects_direct_cycle() {
let b = create_task(&tmp, "Task B");
// A blocked by B
- td().args(["dep", "add", &a, &b])
+ td(&tmp)
+ .args(["dep", "add", &a, &b])
.current_dir(&tmp)
.assert()
.success();
// B blocked by A would create A → B → A
- td().args(["dep", "add", &b, &a])
+ td(&tmp)
+ .args(["dep", "add", &b, &a])
.current_dir(&tmp)
.assert()
.failure()
@@ -131,17 +151,20 @@ fn dep_add_rejects_transitive_cycle() {
let c = create_task(&tmp, "Task C");
// A blocked by B, B blocked by C
- td().args(["dep", "add", &a, &b])
+ td(&tmp)
+ .args(["dep", "add", &a, &b])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &b, &c])
+ td(&tmp)
+ .args(["dep", "add", &b, &c])
.current_dir(&tmp)
.assert()
.success();
// C blocked by A would create A → B → C → A
- td().args(["dep", "add", &c, &a])
+ td(&tmp)
+ .args(["dep", "add", &c, &a])
.current_dir(&tmp)
.assert()
.failure()
@@ -157,19 +180,23 @@ fn dep_add_allows_diamond_without_cycle() {
let d = create_task(&tmp, "Task D");
// Diamond: D blocked by B and C, both blocked by A
- td().args(["dep", "add", &d, &b])
+ td(&tmp)
+ .args(["dep", "add", &d, &b])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &d, &c])
+ td(&tmp)
+ .args(["dep", "add", &d, &c])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &b, &a])
+ td(&tmp)
+ .args(["dep", "add", &b, &a])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &c, &a])
+ td(&tmp)
+ .args(["dep", "add", &c, &a])
.current_dir(&tmp)
.assert()
.success();
@@ -185,7 +212,8 @@ fn dep_add_rejects_nonexistent_child() {
let tmp = init_tmp();
let real = create_task(&tmp, "Real task");
- td().args(["dep", "add", "td-ghost", &real])
+ td(&tmp)
+ .args(["dep", "add", "td-ghost", &real])
.current_dir(&tmp)
.assert()
.failure()
@@ -197,7 +225,8 @@ fn dep_add_rejects_nonexistent_parent() {
let tmp = init_tmp();
let real = create_task(&tmp, "Real task");
- td().args(["dep", "add", &real, "td-phantom"])
+ td(&tmp)
+ .args(["dep", "add", &real, "td-phantom"])
.current_dir(&tmp)
.assert()
.failure()
@@ -2,79 +2,128 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
#[test]
-fn init_creates_td_directory_and_database() {
+fn init_creates_project_snapshot_and_binding() {
let tmp = TempDir::new().unwrap();
- td().arg("init")
+ td(&tmp)
+ .args(["init", "demo"])
.current_dir(&tmp)
.assert()
.success()
- .stderr(predicate::str::contains("initialized .td/"));
-
- assert!(tmp.path().join(".td").is_dir());
- assert!(tmp.path().join(".td/tasks.db").is_file());
+ .stderr(predicate::str::contains("initialized project 'demo'"));
+
+ let root = tmp.path().join(".local/share/td");
+ assert!(root.join("projects/demo/base.loro").is_file());
+ assert!(root.join("projects/demo/changes").is_dir());
+
+ let bindings_path = root.join("bindings.json");
+ assert!(bindings_path.is_file());
+ let bindings: serde_json::Value =
+ serde_json::from_str(&std::fs::read_to_string(bindings_path).unwrap()).unwrap();
+ let canonical_cwd = std::fs::canonicalize(tmp.path()).unwrap();
+ assert_eq!(
+ bindings["bindings"][canonical_cwd.to_string_lossy().as_ref()]
+ .as_str()
+ .unwrap(),
+ "demo"
+ );
}
#[test]
-fn init_creates_schema_with_expected_tables() {
+fn init_fails_when_project_already_exists() {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
-
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
-
- // Verify all three tables exist by querying sqlite_master.
- let tables: Vec<String> = conn
- .prepare("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
- .unwrap()
- .query_map([], |row| row.get(0))
- .unwrap()
- .map(|r| r.unwrap())
- .collect();
+ td(&tmp)
+ .args(["init", "demo"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
- assert!(tables.contains(&"tasks".to_string()));
- assert!(tables.contains(&"labels".to_string()));
- assert!(tables.contains(&"blockers".to_string()));
+ td(&tmp)
+ .args(["init", "demo"])
+ .current_dir(&tmp)
+ .assert()
+ .failure()
+ .stderr(predicate::str::contains("already exists"));
}
#[test]
-fn init_fails_when_already_initialized() {
+fn use_binds_another_directory_to_existing_project() {
let tmp = TempDir::new().unwrap();
+ let other = tmp.path().join("other");
+ std::fs::create_dir_all(&other).unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "demo"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
- td().arg("init")
+ td(&tmp)
+ .args(["create", "Created from original binding"])
.current_dir(&tmp)
.assert()
- .failure()
- .stderr(predicate::str::contains("already initialized"));
+ .success();
+
+ td(&tmp)
+ .args(["use", "demo"])
+ .current_dir(&other)
+ .assert()
+ .success();
+
+ td(&tmp)
+ .args(["list"])
+ .current_dir(&other)
+ .assert()
+ .success()
+ .stdout(predicate::str::contains("Created from original binding"));
}
#[test]
-fn init_stealth_adds_gitignore_entry() {
+fn init_json_outputs_success() {
let tmp = TempDir::new().unwrap();
- td().args(["init", "--stealth"])
+ td(&tmp)
+ .args(["--json", "init", "demo"])
.current_dir(&tmp)
.assert()
- .success();
-
- let gitignore = std::fs::read_to_string(tmp.path().join(".gitignore")).unwrap();
- assert!(gitignore.contains(".td/"));
+ .success()
+ .stdout(predicate::str::contains(r#""success":true"#))
+ .stdout(predicate::str::contains(r#""project":"demo""#));
}
#[test]
-fn init_json_outputs_success() {
+fn projects_lists_all_initialized_projects() {
let tmp = TempDir::new().unwrap();
+ let api_dir = tmp.path().join("api");
+ let web_dir = tmp.path().join("web");
+ std::fs::create_dir_all(&api_dir).unwrap();
+ std::fs::create_dir_all(&web_dir).unwrap();
+
+ td(&tmp)
+ .args(["init", "api"])
+ .current_dir(&api_dir)
+ .assert()
+ .success();
- td().args(["--json", "init"])
- .current_dir(&tmp)
+ td(&tmp)
+ .args(["init", "web"])
+ .current_dir(&web_dir)
+ .assert()
+ .success();
+
+ td(&tmp)
+ .args(["projects"])
+ .current_dir(&api_dir)
.assert()
.success()
- .stdout(predicate::str::contains(r#"{"success":true}"#));
+ .stdout(predicate::str::contains("api"))
+ .stdout(predicate::str::contains("web"));
}
@@ -1,19 +1,24 @@
use assert_cmd::Command;
-use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -28,7 +33,7 @@ fn export_produces_jsonl() {
create_task(&tmp, "First");
create_task(&tmp, "Second");
- let out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let out = td(&tmp).arg("export").current_dir(&tmp).output().unwrap();
let stdout = String::from_utf8(out.stdout).unwrap();
let lines: Vec<&str> = stdout.lines().collect();
assert_eq!(lines.len(), 2, "expected 2 JSONL lines, got: {stdout}");
@@ -43,12 +48,13 @@ fn export_produces_jsonl() {
#[test]
fn export_includes_labels_and_blockers() {
let tmp = init_tmp();
- td().args(["create", "With labels", "-l", "bug"])
+ td(&tmp)
+ .args(["create", "With labels", "-l", "bug"])
.current_dir(&tmp)
.assert()
.success();
- let out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let out = td(&tmp).arg("export").current_dir(&tmp).output().unwrap();
let line = String::from_utf8(out.stdout).unwrap();
let v: serde_json::Value = serde_json::from_str(line.trim()).unwrap();
assert!(v["labels"].is_array());
@@ -60,13 +66,14 @@ fn import_round_trips_with_export() {
let tmp = init_tmp();
create_task(&tmp, "Alpha");
- td().args(["create", "Bravo", "-l", "important"])
+ td(&tmp)
+ .args(["create", "Bravo", "-l", "important"])
.current_dir(&tmp)
.assert()
.success();
// Export.
- let export_out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let export_out = td(&tmp).arg("export").current_dir(&tmp).output().unwrap();
let exported = String::from_utf8(export_out.stdout).unwrap();
// Write to a file.
@@ -75,16 +82,20 @@ fn import_round_trips_with_export() {
// Create a fresh directory, init, import.
let tmp2 = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp2).assert().success();
+ td(&tmp2)
+ .args(["init", "mirror"])
+ .current_dir(&tmp2)
+ .assert()
+ .success();
- td().args(["import", export_file.to_str().unwrap()])
+ td(&tmp2)
+ .args(["import", export_file.to_str().unwrap()])
.current_dir(&tmp2)
.assert()
- .success()
- .stderr(predicate::str::contains("import complete"));
+ .success();
// Verify tasks exist in the new database.
- let out = td()
+ let out = td(&tmp2)
.args(["--json", "list"])
.current_dir(&tmp2)
.output()
@@ -114,35 +125,41 @@ fn import_round_trips_with_export() {
fn export_import_preserves_effort() {
let tmp = init_tmp();
- td().args(["create", "High effort", "-e", "high"])
+ td(&tmp)
+ .args(["create", "High effort", "-e", "high"])
.current_dir(&tmp)
.assert()
.success();
// Export.
- let out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let out = td(&tmp).arg("export").current_dir(&tmp).output().unwrap();
let exported = String::from_utf8(out.stdout).unwrap();
// Verify effort is in the JSONL.
let v: serde_json::Value = serde_json::from_str(exported.trim()).unwrap();
- assert_eq!(v["effort"].as_i64().unwrap(), 3);
+ assert_eq!(v["effort"].as_str().unwrap(), "high");
// Round-trip into a fresh database.
let export_file = tmp.path().join("effort.jsonl");
std::fs::write(&export_file, &exported).unwrap();
let tmp2 = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp2).assert().success();
- td().args(["import", export_file.to_str().unwrap()])
+ td(&tmp2)
+ .args(["init", "mirror"])
+ .current_dir(&tmp2)
+ .assert()
+ .success();
+ td(&tmp2)
+ .args(["import", export_file.to_str().unwrap()])
.current_dir(&tmp2)
.assert()
.success();
- let out2 = td()
+ let out2 = td(&tmp2)
.args(["--json", "list"])
.current_dir(&tmp2)
.output()
.unwrap();
let v2: serde_json::Value = serde_json::from_slice(&out2.stdout).unwrap();
- assert_eq!(v2[0]["effort"].as_i64().unwrap(), 3);
+ assert_eq!(v2[0]["effort"].as_str().unwrap(), "high");
}
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -27,13 +33,15 @@ fn label_add_and_list() {
let tmp = init_tmp();
let id = create_task(&tmp, "Tag me");
- td().args(["label", "add", &id, "important"])
+ td(&tmp)
+ .args(["label", "add", &id, "important"])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains("added"));
- td().args(["label", "list", &id])
+ td(&tmp)
+ .args(["label", "list", &id])
.current_dir(&tmp)
.assert()
.success()
@@ -45,16 +53,19 @@ fn label_rm_removes_label() {
let tmp = init_tmp();
let id = create_task(&tmp, "Untag me");
- td().args(["label", "add", &id, "temp"])
+ td(&tmp)
+ .args(["label", "add", &id, "temp"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["label", "rm", &id, "temp"])
+ td(&tmp)
+ .args(["label", "rm", &id, "temp"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["label", "list", &id])
+ td(&tmp)
+ .args(["label", "list", &id])
.current_dir(&tmp)
.assert()
.success()
@@ -67,20 +78,24 @@ fn label_list_all_shows_distinct_labels() {
let a = create_task(&tmp, "A");
let b = create_task(&tmp, "B");
- td().args(["label", "add", &a, "bug"])
+ td(&tmp)
+ .args(["label", "add", &a, "bug"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["label", "add", &b, "bug"])
+ td(&tmp)
+ .args(["label", "add", &b, "bug"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["label", "add", &b, "ui"])
+ td(&tmp)
+ .args(["label", "add", &b, "ui"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["label", "list-all"])
+ td(&tmp)
+ .args(["label", "list-all"])
.current_dir(&tmp)
.assert()
.success()
@@ -2,19 +2,25 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
/// Create a task and return its JSON id.
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -31,7 +37,8 @@ fn list_shows_created_tasks() {
create_task(&tmp, "Alpha");
create_task(&tmp, "Bravo");
- td().arg("list")
+ td(&tmp)
+ .arg("list")
.current_dir(&tmp)
.assert()
.success()
@@ -44,7 +51,7 @@ fn list_json_returns_array() {
let tmp = init_tmp();
create_task(&tmp, "One");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list"])
.current_dir(&tmp)
.output()
@@ -61,7 +68,7 @@ fn list_filter_by_status() {
create_task(&tmp, "Open task");
// No closed tasks yet.
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list", "-s", "closed"])
.current_dir(&tmp)
.output()
@@ -74,16 +81,18 @@ fn list_filter_by_status() {
fn list_filter_by_priority() {
let tmp = init_tmp();
- td().args(["create", "Low prio", "-p", "low"])
+ td(&tmp)
+ .args(["create", "Low prio", "-p", "low"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["create", "High prio", "-p", "high"])
+ td(&tmp)
+ .args(["create", "High prio", "-p", "high"])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list", "-p", "high"])
.current_dir(&tmp)
.output()
@@ -98,16 +107,18 @@ fn list_filter_by_priority() {
fn list_filter_by_label() {
let tmp = init_tmp();
- td().args(["create", "Tagged", "-l", "urgent"])
+ td(&tmp)
+ .args(["create", "Tagged", "-l", "urgent"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["create", "Untagged"])
+ td(&tmp)
+ .args(["create", "Untagged"])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list", "-l", "urgent"])
.current_dir(&tmp)
.output()
@@ -122,16 +133,18 @@ fn list_filter_by_label() {
fn list_filter_by_effort() {
let tmp = init_tmp();
- td().args(["create", "Easy", "-e", "low"])
+ td(&tmp)
+ .args(["create", "Easy", "-e", "low"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["create", "Hard", "-e", "high"])
+ td(&tmp)
+ .args(["create", "Hard", "-e", "high"])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list", "-e", "low"])
.current_dir(&tmp)
.output()
@@ -149,25 +162,27 @@ fn show_displays_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "Details here");
- td().args(["show", &id])
+ td(&tmp)
+ .args(["show", &id])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains("Details here"))
- .stdout(predicate::str::contains(&id));
+ .stdout(predicate::str::contains(&id[..7]));
}
#[test]
fn show_json_includes_labels_and_blockers() {
let tmp = init_tmp();
- td().args(["create", "With labels", "-l", "bug,ui"])
+ td(&tmp)
+ .args(["create", "With labels", "-l", "bug,ui"])
.current_dir(&tmp)
.assert()
.success();
// Get the id via list.
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list"])
.current_dir(&tmp)
.output()
@@ -175,7 +190,7 @@ fn show_json_includes_labels_and_blockers() {
let list: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
let id = list[0]["id"].as_str().unwrap();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "show", id])
.current_dir(&tmp)
.output()
@@ -195,7 +210,8 @@ fn show_json_includes_labels_and_blockers() {
fn show_nonexistent_task_fails() {
let tmp = init_tmp();
- td().args(["show", "td-nope"])
+ td(&tmp)
+ .args(["show", "td-nope"])
.current_dir(&tmp)
.assert()
.failure()
@@ -209,28 +225,33 @@ fn show_annotates_closed_blockers() {
let open_blocker = create_task(&tmp, "Still open");
let closed_blocker = create_task(&tmp, "Will close");
- td().args(["dep", "add", &task, &open_blocker])
+ td(&tmp)
+ .args(["dep", "add", &task, &open_blocker])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &task, &closed_blocker])
+ td(&tmp)
+ .args(["dep", "add", &task, &closed_blocker])
.current_dir(&tmp)
.assert()
.success();
- td().args(["done", &closed_blocker])
+ td(&tmp)
+ .args(["done", &closed_blocker])
.current_dir(&tmp)
.assert()
.success();
// Plural label, open blocker bare, closed one annotated.
- td().args(["show", &task])
+ td(&tmp)
+ .args(["show", &task])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains("blockers"))
- .stdout(predicate::str::contains(&open_blocker))
+ .stdout(predicate::str::contains(&open_blocker[..7]))
.stdout(predicate::str::contains(&format!(
- "{closed_blocker} [closed]"
+ "{} [closed]",
+ &closed_blocker[..7]
)));
}
@@ -240,23 +261,26 @@ fn show_all_closed_blockers_prefixed() {
let task = create_task(&tmp, "Was blocked");
let blocker = create_task(&tmp, "Done now");
- td().args(["dep", "add", &task, &blocker])
+ td(&tmp)
+ .args(["dep", "add", &task, &blocker])
.current_dir(&tmp)
.assert()
.success();
- td().args(["done", &blocker])
+ td(&tmp)
+ .args(["done", &blocker])
.current_dir(&tmp)
.assert()
.success();
// Singular label, [all closed] prefix.
- td().args(["show", &task])
+ td(&tmp)
+ .args(["show", &task])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains("blocker"))
.stdout(predicate::str::contains("[all closed]"))
- .stdout(predicate::str::contains(&blocker));
+ .stdout(predicate::str::contains(&blocker[..7]));
}
#[test]
@@ -265,12 +289,13 @@ fn show_single_open_blocker_singular_label() {
let task = create_task(&tmp, "Blocked");
let blocker = create_task(&tmp, "Blocking");
- td().args(["dep", "add", &task, &blocker])
+ td(&tmp)
+ .args(["dep", "add", &task, &blocker])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["show", &task])
.current_dir(&tmp)
.output()
@@ -279,7 +304,7 @@ fn show_single_open_blocker_singular_label() {
// Singular "blocker", no "blockers".
assert!(stdout.contains("blocker"));
- assert!(stdout.contains(&blocker));
+ assert!(stdout.contains(&blocker[..7]));
// Should not contain [closed] or [all closed].
assert!(!stdout.contains("[closed]"));
}
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -27,11 +33,12 @@ fn log_human_reports_task_id() {
let tmp = init_tmp();
let id = create_task(&tmp, "Write docs");
- td().args(["log", &id, "Drafted command docs"])
+ td(&tmp)
+ .args(["log", &id, "Drafted command docs"])
.current_dir(&tmp)
.assert()
.success()
- .stdout(predicate::str::contains(format!("logged to {id}")));
+ .stdout(predicate::str::contains(format!("logged to {}", &id[..7])));
}
#[test]
@@ -39,16 +46,15 @@ fn log_json_emits_created_log_entry() {
let tmp = init_tmp();
let id = create_task(&tmp, "Investigate timeout");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "log", &id, "Collected stack traces"])
.current_dir(&tmp)
.output()
.unwrap();
let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- assert!(v["id"].is_i64());
- assert_eq!(v["task_id"].as_str().unwrap(), id);
- assert_eq!(v["body"].as_str().unwrap(), "Collected stack traces");
+ assert!(v["id"].is_string());
+ assert_eq!(v["message"].as_str().unwrap(), "Collected stack traces");
assert!(v["timestamp"].as_str().unwrap().ends_with('Z'));
}
@@ -56,11 +62,12 @@ fn log_json_emits_created_log_entry() {
fn log_nonexistent_task_fails() {
let tmp = init_tmp();
- td().args(["log", "td-nope", "No task"])
+ td(&tmp)
+ .args(["log", "td-nope", "No task"])
.current_dir(&tmp)
.assert()
.failure()
- .stderr(predicate::str::contains("task td-nope not found"));
+ .stderr(predicate::str::contains("task 'td-nope' not found"));
}
#[test]
@@ -68,16 +75,22 @@ fn show_human_displays_logs_chronologically() {
let tmp = init_tmp();
let id = create_task(&tmp, "Investigate auth issue");
- td().args(["log", &id, "First note"])
+ td(&tmp)
+ .args(["log", &id, "First note"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["log", &id, "Second note"])
+ td(&tmp)
+ .args(["log", &id, "Second note"])
.current_dir(&tmp)
.assert()
.success();
- let out = td().args(["show", &id]).current_dir(&tmp).output().unwrap();
+ let out = td(&tmp)
+ .args(["show", &id])
+ .current_dir(&tmp)
+ .output()
+ .unwrap();
let stdout = String::from_utf8(out.stdout).unwrap();
let first = stdout.find("First note").unwrap();
let second = stdout.find("Second note").unwrap();
@@ -91,12 +104,13 @@ fn show_json_includes_logs_array() {
let tmp = init_tmp();
let id = create_task(&tmp, "Implement parser");
- td().args(["log", &id, "Mapped grammar rules"])
+ td(&tmp)
+ .args(["log", &id, "Mapped grammar rules"])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "show", &id])
.current_dir(&tmp)
.output()
@@ -105,7 +119,7 @@ fn show_json_includes_logs_array() {
let logs = v["logs"].as_array().unwrap();
assert_eq!(logs.len(), 1);
- assert_eq!(logs[0]["body"].as_str().unwrap(), "Mapped grammar rules");
+ assert_eq!(logs[0]["message"].as_str().unwrap(), "Mapped grammar rules");
}
#[test]
@@ -114,13 +128,14 @@ fn multiple_log_entries_are_ordered() {
let id = create_task(&tmp, "Refactor planner");
for msg in ["step one", "step two", "step three"] {
- td().args(["log", &id, msg])
+ td(&tmp)
+ .args(["log", &id, msg])
.current_dir(&tmp)
.assert()
.success();
}
- let out = td()
+ let out = td(&tmp)
.args(["--json", "show", &id])
.current_dir(&tmp)
.output()
@@ -129,37 +144,44 @@ fn multiple_log_entries_are_ordered() {
let logs = v["logs"].as_array().unwrap();
assert_eq!(logs.len(), 3);
- assert_eq!(logs[0]["body"].as_str().unwrap(), "step one");
- assert_eq!(logs[1]["body"].as_str().unwrap(), "step two");
- assert_eq!(logs[2]["body"].as_str().unwrap(), "step three");
+ assert_eq!(logs[0]["message"].as_str().unwrap(), "step one");
+ assert_eq!(logs[1]["message"].as_str().unwrap(), "step two");
+ assert_eq!(logs[2]["message"].as_str().unwrap(), "step three");
}
#[test]
fn export_import_round_trips_logs() {
let tmp = init_tmp();
let id = create_task(&tmp, "Port backend");
- td().args(["log", &id, "Measured baseline"])
+ td(&tmp)
+ .args(["log", &id, "Measured baseline"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["log", &id, "Applied optimization"])
+ td(&tmp)
+ .args(["log", &id, "Applied optimization"])
.current_dir(&tmp)
.assert()
.success();
- let export_out = td().arg("export").current_dir(&tmp).output().unwrap();
+ let export_out = td(&tmp).arg("export").current_dir(&tmp).output().unwrap();
let exported = String::from_utf8(export_out.stdout).unwrap();
let export_file = tmp.path().join("logs.jsonl");
std::fs::write(&export_file, &exported).unwrap();
let tmp2 = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp2).assert().success();
- td().args(["import", export_file.to_str().unwrap()])
+ td(&tmp2)
+ .args(["init", "mirror"])
+ .current_dir(&tmp2)
+ .assert()
+ .success();
+ td(&tmp2)
+ .args(["import", export_file.to_str().unwrap()])
.current_dir(&tmp2)
.assert()
.success();
- let out = td()
+ let out = td(&tmp2)
.args(["--json", "show", &id])
.current_dir(&tmp2)
.output()
@@ -168,20 +190,21 @@ fn export_import_round_trips_logs() {
let logs = v["logs"].as_array().unwrap();
assert_eq!(logs.len(), 2);
- assert_eq!(logs[0]["body"].as_str().unwrap(), "Measured baseline");
- assert_eq!(logs[1]["body"].as_str().unwrap(), "Applied optimization");
+ assert_eq!(logs[0]["message"].as_str().unwrap(), "Measured baseline");
+ assert_eq!(logs[1]["message"].as_str().unwrap(), "Applied optimization");
}
#[test]
fn list_json_does_not_include_logs() {
let tmp = init_tmp();
let id = create_task(&tmp, "Keep list lean");
- td().args(["log", &id, "This should not surface in list"])
+ td(&tmp)
+ .args(["log", &id, "This should not surface in list"])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "list"])
.current_dir(&tmp)
.output()
@@ -1,363 +1,132 @@
-//! Integration tests for the migration system.
-
use assert_cmd::Command;
+use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
-}
-
-fn init_tmp() -> TempDir {
- let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
- tmp
-}
-
-#[test]
-fn fresh_init_sets_latest_version() {
- let tmp = init_tmp();
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
- let version: u32 = conn
- .pragma_query_value(None, "user_version", |row| row.get(0))
- .unwrap();
- // Version should be 5 (migration 0001 + 0002 + 0003 + 0004 + 0005).
- assert_eq!(version, 5);
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
#[test]
-fn legacy_db_is_migrated_on_open() {
+fn init_sets_loro_schema_version_in_meta() {
let tmp = TempDir::new().unwrap();
- let td_dir = tmp.path().join(".td");
- std::fs::create_dir_all(&td_dir).unwrap();
- // Create a v0 database with the old schema (no effort column).
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- conn.execute_batch(
- "PRAGMA foreign_keys = OFF;
- CREATE TABLE tasks (
- id TEXT PRIMARY KEY,
- title TEXT NOT NULL,
- description TEXT DEFAULT '',
- type TEXT DEFAULT 'task',
- priority INTEGER DEFAULT 2,
- status TEXT DEFAULT 'open',
- parent TEXT DEFAULT '',
- created TEXT NOT NULL,
- updated TEXT NOT NULL
- );
- CREATE TABLE labels (
- task_id TEXT, label TEXT,
- PRIMARY KEY (task_id, label),
- FOREIGN KEY (task_id) REFERENCES tasks(id)
- );
- CREATE TABLE blockers (
- task_id TEXT, blocker_id TEXT,
- PRIMARY KEY (task_id, blocker_id),
- FOREIGN KEY (task_id) REFERENCES tasks(id)
- );
- INSERT INTO tasks (id, title, created, updated)
- VALUES ('td-legacy', 'Old task', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z');",
- )
- .unwrap();
- drop(conn);
-
- // Opening via td (list) should migrate and succeed.
- td().args(["--json", "list"])
+ td(&tmp)
+ .args(["init", "demo"])
.current_dir(&tmp)
.assert()
.success();
- // Verify the task survived migration and got default effort.
- let out = td()
- .args(["--json", "show", "td-legacy"])
- .current_dir(&tmp)
- .output()
- .unwrap();
- let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- assert_eq!(v["title"].as_str().unwrap(), "Old task");
- assert_eq!(v["effort"].as_i64().unwrap(), 2); // default medium
-
- // Verify version is now latest.
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- let version: u32 = conn
- .pragma_query_value(None, "user_version", |row| row.get(0))
- .unwrap();
- assert_eq!(version, 5);
+ let root = tmp.path().join(".local/share/td");
+ let store = yatd::db::Store::open(&root, "demo").unwrap();
+ assert_eq!(store.schema_version().unwrap(), 1);
}
#[test]
-fn effort_column_exists_after_init() {
- let tmp = init_tmp();
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
-
- // Verify the effort column is present by inserting a row that sets it.
- conn.execute(
- "INSERT INTO tasks (id, title, effort, created, updated) VALUES ('td-test', 'Test', 3, '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
+fn corrupted_delta_file_is_tolerated_on_open() {
+ let tmp = TempDir::new().unwrap();
- let effort: i32 = conn
- .query_row("SELECT effort FROM tasks WHERE id = 'td-test'", [], |r| {
- r.get(0)
- })
- .unwrap();
- assert_eq!(effort, 3);
-}
+ td(&tmp)
+ .args(["init", "demo"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
-#[test]
-fn blocker_fk_rejects_nonexistent_blocker_id() {
- let tmp = init_tmp();
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
- conn.execute_batch("PRAGMA foreign_keys = ON").unwrap();
+ td(&tmp)
+ .args(["create", "kept task"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
- conn.execute(
- "INSERT INTO tasks (id, title, created, updated) \
- VALUES ('td-real', 'Real task', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
+ let corrupted = tmp
+ .path()
+ .join(".local/share/td/projects/demo/changes")
+ .join("01ARZ3NDEKTSV4RRFFQ69G5FAV.loro");
+ std::fs::write(corrupted, b"not-a-valid-loro-delta").unwrap();
- // Inserting a blocker that references a nonexistent task should fail.
- let result = conn.execute(
- "INSERT INTO blockers (task_id, blocker_id) VALUES ('td-real', 'td-ghost')",
- [],
- );
- assert!(
- result.is_err(),
- "expected FK violation for nonexistent blocker_id"
- );
+ td(&tmp)
+ .args(["list"])
+ .current_dir(&tmp)
+ .assert()
+ .success()
+ .stdout(predicate::str::contains("kept task"))
+ .stderr(predicate::str::contains("skipping unreadable delta"));
}
#[test]
-fn labels_fk_cascades_on_task_delete() {
- let tmp = init_tmp();
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
- conn.execute_batch("PRAGMA foreign_keys = ON").unwrap();
-
- conn.execute(
- "INSERT INTO tasks (id, title, created, updated) \
- VALUES ('td-labeled', 'Labeled task', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
- conn.execute(
- "INSERT INTO labels (task_id, label) VALUES ('td-labeled', 'urgent')",
- [],
- )
- .unwrap();
+fn project_env_overrides_directory_binding() {
+ let tmp = TempDir::new().unwrap();
+ let alpha_dir = tmp.path().join("alpha");
+ let beta_dir = tmp.path().join("beta");
+ std::fs::create_dir_all(&alpha_dir).unwrap();
+ std::fs::create_dir_all(&beta_dir).unwrap();
+
+ td(&tmp)
+ .args(["init", "alpha"])
+ .current_dir(&alpha_dir)
+ .assert()
+ .success();
+ td(&tmp)
+ .args(["create", "alpha task"])
+ .current_dir(&alpha_dir)
+ .assert()
+ .success();
- conn.execute("DELETE FROM tasks WHERE id = 'td-labeled'", [])
- .unwrap();
+ td(&tmp)
+ .args(["init", "beta"])
+ .current_dir(&beta_dir)
+ .assert()
+ .success();
+ td(&tmp)
+ .args(["create", "beta task"])
+ .current_dir(&beta_dir)
+ .assert()
+ .success();
- let label_count: i32 = conn
- .query_row(
- "SELECT COUNT(*) FROM labels WHERE task_id = 'td-labeled'",
- [],
- |r| r.get(0),
- )
+ let out = td(&tmp)
+ .args(["--json", "list"])
+ .env("TD_PROJECT", "beta")
+ .current_dir(&alpha_dir)
+ .output()
.unwrap();
- assert_eq!(
- label_count, 0,
- "labels should be deleted via ON DELETE CASCADE"
- );
-}
-
-#[test]
-fn blockers_fk_cascades_on_task_delete() {
- let tmp = init_tmp();
- let conn = rusqlite::Connection::open(tmp.path().join(".td/tasks.db")).unwrap();
- conn.execute_batch("PRAGMA foreign_keys = ON").unwrap();
-
- conn.execute(
- "INSERT INTO tasks (id, title, created, updated) \
- VALUES ('td-a', 'Task A', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
- conn.execute(
- "INSERT INTO tasks (id, title, created, updated) \
- VALUES ('td-b', 'Task B', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
- conn.execute(
- "INSERT INTO tasks (id, title, created, updated) \
- VALUES ('td-c', 'Task C', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z')",
- [],
- )
- .unwrap();
-
- // td-b appears as both task_id and blocker_id across these rows.
- conn.execute(
- "INSERT INTO blockers (task_id, blocker_id) VALUES ('td-b', 'td-a')",
- [],
- )
- .unwrap();
- conn.execute(
- "INSERT INTO blockers (task_id, blocker_id) VALUES ('td-c', 'td-b')",
- [],
- )
- .unwrap();
+ let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- conn.execute("DELETE FROM tasks WHERE id = 'td-b'", [])
- .unwrap();
+ let titles: Vec<&str> = v
+ .as_array()
+ .unwrap()
+ .iter()
+ .map(|task| task["title"].as_str().unwrap())
+ .collect();
- let blocker_count: i32 = conn
- .query_row("SELECT COUNT(*) FROM blockers", [], |r| r.get(0))
- .unwrap();
- assert_eq!(
- blocker_count, 0,
- "rows referencing a deleted task should be deleted via ON DELETE CASCADE"
- );
+ assert!(titles.contains(&"beta task"));
+ assert!(!titles.contains(&"alpha task"));
}
#[test]
-fn migration_cleans_dangling_blocker_ids() {
+fn legacy_local_sqlite_artifacts_do_not_affect_commands() {
let tmp = TempDir::new().unwrap();
- let td_dir = tmp.path().join(".td");
- std::fs::create_dir_all(&td_dir).unwrap();
-
- // Create a v2 database (pre-0003) with a dangling blocker_id.
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- conn.execute_batch(
- "PRAGMA foreign_keys = OFF;
- CREATE TABLE tasks (
- id TEXT PRIMARY KEY,
- title TEXT NOT NULL,
- description TEXT DEFAULT '',
- type TEXT DEFAULT 'task',
- priority INTEGER DEFAULT 2,
- status TEXT DEFAULT 'open',
- parent TEXT DEFAULT '',
- created TEXT NOT NULL,
- updated TEXT NOT NULL,
- effort INTEGER NOT NULL DEFAULT 2
- );
- CREATE TABLE labels (
- task_id TEXT, label TEXT,
- PRIMARY KEY (task_id, label),
- FOREIGN KEY (task_id) REFERENCES tasks(id)
- );
- CREATE TABLE blockers (
- task_id TEXT, blocker_id TEXT,
- PRIMARY KEY (task_id, blocker_id),
- FOREIGN KEY (task_id) REFERENCES tasks(id)
- );
- INSERT INTO tasks (id, title, created, updated)
- VALUES ('td-a', 'Task A', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z');
- INSERT INTO tasks (id, title, created, updated)
- VALUES ('td-b', 'Task B', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z');
- -- Valid blocker
- INSERT INTO blockers (task_id, blocker_id) VALUES ('td-a', 'td-b');
- -- Dangling blocker referencing a task that doesn't exist
- INSERT INTO blockers (task_id, blocker_id) VALUES ('td-a', 'td-gone');
- PRAGMA user_version = 2;",
- )
- .unwrap();
- drop(conn);
- // Running any command triggers migration.
- td().args(["--json", "list"])
+ td(&tmp)
+ .args(["init", "demo"])
.current_dir(&tmp)
.assert()
.success();
- // The valid blocker should survive; the dangling one should be gone.
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- let count: i32 = conn
- .query_row(
- "SELECT COUNT(*) FROM blockers WHERE task_id = 'td-a'",
- [],
- |r| r.get(0),
- )
- .unwrap();
- assert_eq!(count, 1, "only the valid blocker should remain");
-
- let blocker: String = conn
- .query_row(
- "SELECT blocker_id FROM blockers WHERE task_id = 'td-a'",
- [],
- |r| r.get(0),
- )
- .unwrap();
- assert_eq!(blocker, "td-b");
-}
+ let legacy_dir = tmp.path().join(".td");
+ std::fs::create_dir_all(&legacy_dir).unwrap();
+ std::fs::write(legacy_dir.join("tasks.db"), b"legacy-sqlite-placeholder").unwrap();
-#[test]
-fn migration_cleans_dangling_labels() {
- let tmp = TempDir::new().unwrap();
- let td_dir = tmp.path().join(".td");
- std::fs::create_dir_all(&td_dir).unwrap();
-
- // Create a v4 database (pre-0005) with a dangling label row.
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- conn.execute_batch(
- "PRAGMA foreign_keys = OFF;
- CREATE TABLE tasks (
- id TEXT PRIMARY KEY,
- title TEXT NOT NULL,
- description TEXT DEFAULT '',
- type TEXT DEFAULT 'task',
- priority INTEGER DEFAULT 2,
- status TEXT DEFAULT 'open',
- parent TEXT DEFAULT '',
- created TEXT NOT NULL,
- updated TEXT NOT NULL,
- effort INTEGER NOT NULL DEFAULT 2
- );
- CREATE TABLE labels (
- task_id TEXT, label TEXT,
- PRIMARY KEY (task_id, label),
- FOREIGN KEY (task_id) REFERENCES tasks(id)
- );
- CREATE TABLE blockers (
- task_id TEXT, blocker_id TEXT,
- PRIMARY KEY (task_id, blocker_id),
- FOREIGN KEY (task_id) REFERENCES tasks(id),
- FOREIGN KEY (blocker_id) REFERENCES tasks(id)
- );
- CREATE TABLE task_logs (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- task_id TEXT NOT NULL,
- timestamp TEXT NOT NULL,
- body TEXT NOT NULL,
- FOREIGN KEY (task_id) REFERENCES tasks(id) ON DELETE CASCADE
- );
- INSERT INTO tasks (id, title, created, updated)
- VALUES ('td-real', 'Real task', '2024-01-01T00:00:00Z', '2024-01-01T00:00:00Z');
- INSERT INTO labels (task_id, label) VALUES ('td-real', 'kept');
- INSERT INTO labels (task_id, label) VALUES ('td-gone', 'orphan');
- PRAGMA user_version = 4;",
- )
- .unwrap();
- drop(conn);
-
- // Running any command triggers migration to v5.
- td().args(["--json", "list"])
+ td(&tmp)
+ .args(["create", "new storage path works"])
.current_dir(&tmp)
.assert()
.success();
- let conn = rusqlite::Connection::open(td_dir.join("tasks.db")).unwrap();
- let kept_count: i32 = conn
- .query_row(
- "SELECT COUNT(*) FROM labels WHERE task_id = 'td-real' AND label = 'kept'",
- [],
- |r| r.get(0),
- )
- .unwrap();
- assert_eq!(kept_count, 1, "valid label should survive migration");
-
- let orphan_count: i32 = conn
- .query_row(
- "SELECT COUNT(*) FROM labels WHERE task_id = 'td-gone'",
- [],
- |r| r.get(0),
- )
- .unwrap();
- assert_eq!(
- orphan_count, 0,
- "dangling label should be removed during migration"
- );
+ td(&tmp)
+ .args(["list"])
+ .current_dir(&tmp)
+ .assert()
+ .success()
+ .stdout(predicate::str::contains("new storage path works"));
}
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str, pri: &str, eff: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title, "-p", pri, "-e", eff])
.current_dir(dir)
.output()
@@ -26,7 +32,8 @@ fn create_task(dir: &TempDir, title: &str, pri: &str, eff: &str) -> String {
fn next_no_open_tasks() {
let tmp = init_tmp();
- td().arg("next")
+ td(&tmp)
+ .arg("next")
.current_dir(&tmp)
.assert()
.success()
@@ -38,7 +45,8 @@ fn next_single_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "Only task", "high", "low");
- td().arg("next")
+ td(&tmp)
+ .arg("next")
.current_dir(&tmp)
.assert()
.success()
@@ -54,12 +62,13 @@ fn next_impact_ranks_by_downstream() {
let a = create_task(&tmp, "Blocker", "medium", "medium");
let b = create_task(&tmp, "Blocked", "medium", "medium");
- td().args(["dep", "add", &b, &a])
+ td(&tmp)
+ .args(["dep", "add", &b, &a])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -80,7 +89,7 @@ fn next_effort_mode_prefers_low_effort() {
let a = create_task(&tmp, "Heavy", "medium", "high");
let b = create_task(&tmp, "Light", "medium", "low");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next", "--mode", "effort"])
.current_dir(&tmp)
.output()
@@ -99,12 +108,13 @@ fn next_verbose_shows_equation() {
let tmp = init_tmp();
create_task(&tmp, "Task A", "high", "low");
- td().args(["next", "--verbose"])
+ td(&tmp)
+ .args(["next", "--verbose"])
.current_dir(&tmp)
.assert()
.success()
- .stdout(predicate::str::contains("mode: impact"))
- .stdout(predicate::str::contains("Unblocks:"));
+ .stdout(predicate::str::contains("SCORE"))
+ .stdout(predicate::str::contains("score:"));
}
#[test]
@@ -112,12 +122,13 @@ fn next_verbose_effort_mode_shows_squared() {
let tmp = init_tmp();
create_task(&tmp, "Task A", "high", "medium");
- td().args(["next", "--verbose", "--mode", "effort"])
+ td(&tmp)
+ .args(["next", "--verbose", "--mode", "effort"])
.current_dir(&tmp)
.assert()
.success()
- .stdout(predicate::str::contains("mode: effort"))
- .stdout(predicate::str::contains("\u{00b2}"));
+ .stdout(predicate::str::contains("SCORE"))
+ .stdout(predicate::str::contains("score:"));
}
#[test]
@@ -127,7 +138,7 @@ fn next_limit_truncates() {
create_task(&tmp, "B", "medium", "medium");
create_task(&tmp, "C", "low", "high");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next", "-n", "2"])
.current_dir(&tmp)
.output()
@@ -140,7 +151,7 @@ fn next_limit_truncates() {
fn next_json_empty() {
let tmp = init_tmp();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -154,7 +165,8 @@ fn next_invalid_mode_fails() {
let tmp = init_tmp();
create_task(&tmp, "X", "medium", "medium");
- td().args(["next", "--mode", "bogus"])
+ td(&tmp)
+ .args(["next", "--mode", "bogus"])
.current_dir(&tmp)
.assert()
.failure()
@@ -169,16 +181,18 @@ fn next_transitive_chain_scores_correctly() {
let b = create_task(&tmp, "Mid", "high", "medium");
let c = create_task(&tmp, "Leaf", "low", "high");
- td().args(["dep", "add", &b, &a])
+ td(&tmp)
+ .args(["dep", "add", &b, &a])
.current_dir(&tmp)
.assert()
.success();
- td().args(["dep", "add", &c, &b])
+ td(&tmp)
+ .args(["dep", "add", &c, &b])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -200,9 +214,13 @@ fn next_ignores_closed_tasks() {
let a = create_task(&tmp, "Open", "high", "low");
let b = create_task(&tmp, "Closed", "high", "low");
- td().args(["done", &b]).current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["done", &b])
+ .current_dir(&tmp)
+ .assert()
+ .success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -219,7 +237,7 @@ fn next_excludes_parent_with_open_subtasks() {
let tmp = init_tmp();
let parent = create_task(&tmp, "Parent task", "high", "low");
// Create a subtask under the parent.
- let out = td()
+ let out = td(&tmp)
.args([
"--json",
"create",
@@ -237,7 +255,7 @@ fn next_excludes_parent_with_open_subtasks() {
let child: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
let child_id = child["id"].as_str().unwrap().to_string();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -258,7 +276,7 @@ fn next_excludes_parent_with_open_subtasks() {
fn next_includes_parent_when_all_subtasks_closed() {
let tmp = init_tmp();
let parent = create_task(&tmp, "Parent task", "high", "low");
- let out = td()
+ let out = td(&tmp)
.args([
"--json",
"create",
@@ -277,12 +295,13 @@ fn next_includes_parent_when_all_subtasks_closed() {
let child_id = child["id"].as_str().unwrap().to_string();
// Close the subtask.
- td().args(["done", &child_id])
+ td(&tmp)
+ .args(["done", &child_id])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -303,7 +322,7 @@ fn next_nested_parents_excluded_at_each_level() {
let tmp = init_tmp();
// grandparent → parent → child (nested subtasks)
let gp = create_task(&tmp, "Grandparent", "high", "low");
- let out = td()
+ let out = td(&tmp)
.args([
"--json", "create", "Parent", "-p", "medium", "-e", "medium", "--parent", &gp,
])
@@ -313,7 +332,7 @@ fn next_nested_parents_excluded_at_each_level() {
let p: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
let p_id = p["id"].as_str().unwrap().to_string();
- let out = td()
+ let out = td(&tmp)
.args([
"--json", "create", "Child", "-p", "low", "-e", "low", "--parent", &p_id,
])
@@ -323,7 +342,7 @@ fn next_nested_parents_excluded_at_each_level() {
let c: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
let c_id = c["id"].as_str().unwrap().to_string();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "next"])
.current_dir(&tmp)
.output()
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -30,7 +36,8 @@ fn search_matches_title() {
create_task(&tmp, "Fix login page");
create_task(&tmp, "Update docs");
- td().args(["search", "login"])
+ td(&tmp)
+ .args(["search", "login"])
.current_dir(&tmp)
.assert()
.success()
@@ -41,12 +48,14 @@ fn search_matches_title() {
fn search_matches_description() {
let tmp = init_tmp();
- td().args(["create", "Vague title", "-d", "The frobnicator is broken"])
+ td(&tmp)
+ .args(["create", "Vague title", "-d", "The frobnicator is broken"])
.current_dir(&tmp)
.assert()
.success();
- td().args(["search", "frobnicator"])
+ td(&tmp)
+ .args(["search", "frobnicator"])
.current_dir(&tmp)
.assert()
.success()
@@ -58,7 +67,7 @@ fn search_json_returns_array() {
let tmp = init_tmp();
create_task(&tmp, "Needle in haystack");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "search", "Needle"])
.current_dir(&tmp)
.output()
@@ -73,16 +82,17 @@ fn search_json_returns_array() {
#[test]
fn ready_excludes_blocked_tasks() {
let tmp = init_tmp();
- let a = create_task(&tmp, "Ready task");
+ let _a = create_task(&tmp, "Ready task");
let b = create_task(&tmp, "Blocked task");
let c = create_task(&tmp, "Blocker task");
- td().args(["dep", "add", &b, &c])
+ td(&tmp)
+ .args(["dep", "add", &b, &c])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "ready"])
.current_dir(&tmp)
.output()
@@ -100,9 +110,13 @@ fn ready_excludes_blocked_tasks() {
assert!(!titles.contains(&"Blocked task"));
// Close the blocker — now the blocked task should become ready.
- td().args(["done", &c]).current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["done", &c])
+ .current_dir(&tmp)
+ .assert()
+ .success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "ready"])
.current_dir(&tmp)
.output()
@@ -126,12 +140,13 @@ fn stats_counts_tasks() {
let tmp = init_tmp();
let id = create_task(&tmp, "Open one");
create_task(&tmp, "Open two");
- td().args(["done", &id])
+ td(&tmp)
+ .args(["done", &id])
.current_dir(&tmp)
.assert()
.success();
- let out = td().args(["stats"]).current_dir(&tmp).output().unwrap();
+ let out = td(&tmp).args(["stats"]).current_dir(&tmp).output().unwrap();
let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
assert_eq!(v["total"].as_i64().unwrap(), 2);
assert_eq!(v["open"].as_i64().unwrap(), 1);
@@ -145,9 +160,10 @@ fn compact_succeeds() {
let tmp = init_tmp();
create_task(&tmp, "Anything");
- td().arg("compact")
+ td(&tmp)
+ .arg("compact")
.current_dir(&tmp)
.assert()
.success()
- .stderr(predicate::str::contains("done"));
+ .stderr(predicate::str::contains("writing compacted snapshot"));
}
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -23,7 +29,7 @@ fn create_task(dir: &TempDir, title: &str) -> String {
}
fn get_task_json(dir: &TempDir, id: &str) -> serde_json::Value {
- let out = td()
+ let out = td(dir)
.args(["--json", "show", id])
.current_dir(dir)
.output()
@@ -36,17 +42,15 @@ fn rm_deletes_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "Delete me");
- td().args(["rm", &id])
+ td(&tmp)
+ .args(["rm", &id])
.current_dir(&tmp)
.assert()
.success()
.stdout(predicate::str::contains("deleted"));
- td().args(["show", &id])
- .current_dir(&tmp)
- .assert()
- .failure()
- .stderr(predicate::str::contains("not found"));
+ let task = get_task_json(&tmp, &id);
+ assert_eq!(task["status"].as_str().unwrap(), "closed");
}
#[test]
@@ -55,31 +59,28 @@ fn rm_deletes_multiple_ids() {
let id1 = create_task(&tmp, "First");
let id2 = create_task(&tmp, "Second");
- td().args(["rm", &id1, &id2])
+ td(&tmp)
+ .args(["rm", &id1, &id2])
.current_dir(&tmp)
.assert()
.success();
- td().args(["show", &id1])
- .current_dir(&tmp)
- .assert()
- .failure();
- td().args(["show", &id2])
- .current_dir(&tmp)
- .assert()
- .failure();
+ assert_eq!(get_task_json(&tmp, &id1)["status"], "closed");
+ assert_eq!(get_task_json(&tmp, &id2)["status"], "closed");
}
#[test]
fn rm_requires_recursive_for_parent_task() {
let tmp = init_tmp();
let parent = create_task(&tmp, "Parent");
- td().args(["create", "Child", "--parent", &parent])
+ td(&tmp)
+ .args(["create", "Child", "--parent", &parent])
.current_dir(&tmp)
.assert()
.success();
- td().args(["rm", &parent])
+ td(&tmp)
+ .args(["rm", &parent])
.current_dir(&tmp)
.assert()
.failure()
@@ -91,35 +92,31 @@ fn rm_recursive_deletes_subtree() {
let tmp = init_tmp();
let parent = create_task(&tmp, "Parent");
- td().args(["create", "Child", "--parent", &parent])
+ let out = td(&tmp)
+ .args(["--json", "create", "Child", "--parent", &parent])
.current_dir(&tmp)
- .assert()
- .success();
- let child_id = format!("{parent}.1");
+ .output()
+ .unwrap();
+ let child: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ let child_id = child["id"].as_str().unwrap().to_string();
- td().args(["create", "Grandchild", "--parent", &child_id])
+ let out = td(&tmp)
+ .args(["--json", "create", "Grandchild", "--parent", &child_id])
.current_dir(&tmp)
- .assert()
- .success();
- let grandchild_id = format!("{child_id}.1");
+ .output()
+ .unwrap();
+ let grandchild: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
+ let grandchild_id = grandchild["id"].as_str().unwrap().to_string();
- td().args(["rm", "--recursive", &parent])
+ td(&tmp)
+ .args(["rm", "--recursive", &parent])
.current_dir(&tmp)
.assert()
.success();
- td().args(["show", &parent])
- .current_dir(&tmp)
- .assert()
- .failure();
- td().args(["show", &child_id])
- .current_dir(&tmp)
- .assert()
- .failure();
- td().args(["show", &grandchild_id])
- .current_dir(&tmp)
- .assert()
- .failure();
+ assert_eq!(get_task_json(&tmp, &parent)["status"], "closed");
+ assert_eq!(get_task_json(&tmp, &child_id)["status"], "closed");
+ assert_eq!(get_task_json(&tmp, &grandchild_id)["status"], "closed");
}
#[test]
@@ -128,12 +125,14 @@ fn rm_detaches_dependents_and_warns() {
let dependent = create_task(&tmp, "Dependent");
let blocker = create_task(&tmp, "Blocker");
- td().args(["dep", "add", &dependent, &blocker])
+ td(&tmp)
+ .args(["dep", "add", &dependent, &blocker])
.current_dir(&tmp)
.assert()
.success();
- td().args(["rm", &blocker])
+ td(&tmp)
+ .args(["rm", &blocker])
.current_dir(&tmp)
.assert()
.success()
@@ -151,12 +150,14 @@ fn rm_force_suppresses_unblocked_warning() {
let dependent = create_task(&tmp, "Dependent");
let blocker = create_task(&tmp, "Blocker");
- td().args(["dep", "add", &dependent, &blocker])
+ td(&tmp)
+ .args(["dep", "add", &dependent, &blocker])
.current_dir(&tmp)
.assert()
.success();
- td().args(["rm", "--force", &blocker])
+ td(&tmp)
+ .args(["rm", "--force", &blocker])
.current_dir(&tmp)
.assert()
.success()
@@ -169,12 +170,13 @@ fn rm_json_includes_deleted_and_unblocked_ids() {
let dependent = create_task(&tmp, "Dependent");
let blocker = create_task(&tmp, "Blocker");
- td().args(["dep", "add", &dependent, &blocker])
+ td(&tmp)
+ .args(["dep", "add", &dependent, &blocker])
.current_dir(&tmp)
.assert()
.success();
- let out = td()
+ let out = td(&tmp)
.args(["--json", "rm", &blocker])
.current_dir(&tmp)
.output()
@@ -2,18 +2,24 @@ use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
-fn td() -> Command {
- Command::cargo_bin("td").unwrap()
+fn td(home: &TempDir) -> Command {
+ let mut cmd = Command::cargo_bin("td").unwrap();
+ cmd.env("HOME", home.path());
+ cmd
}
fn init_tmp() -> TempDir {
let tmp = TempDir::new().unwrap();
- td().arg("init").current_dir(&tmp).assert().success();
+ td(&tmp)
+ .args(["init", "main"])
+ .current_dir(&tmp)
+ .assert()
+ .success();
tmp
}
fn create_task(dir: &TempDir, title: &str) -> String {
- let out = td()
+ let out = td(dir)
.args(["--json", "create", title])
.current_dir(dir)
.output()
@@ -23,7 +29,7 @@ fn create_task(dir: &TempDir, title: &str) -> String {
}
fn get_task_json(dir: &TempDir, id: &str) -> serde_json::Value {
- let out = td()
+ let out = td(dir)
.args(["--json", "show", id])
.current_dir(dir)
.output()
@@ -38,7 +44,8 @@ fn update_changes_status() {
let tmp = init_tmp();
let id = create_task(&tmp, "In progress");
- td().args(["update", &id, "-s", "in_progress"])
+ td(&tmp)
+ .args(["update", &id, "-s", "in_progress"])
.current_dir(&tmp)
.assert()
.success()
@@ -53,13 +60,14 @@ fn update_changes_priority() {
let tmp = init_tmp();
let id = create_task(&tmp, "Reprioritise");
- td().args(["update", &id, "-p", "high"])
+ td(&tmp)
+ .args(["update", &id, "-p", "high"])
.current_dir(&tmp)
.assert()
.success();
let t = get_task_json(&tmp, &id);
- assert_eq!(t["priority"].as_i64().unwrap(), 1);
+ assert_eq!(t["priority"].as_str().unwrap(), "high");
}
#[test]
@@ -67,7 +75,8 @@ fn update_changes_title() {
let tmp = init_tmp();
let id = create_task(&tmp, "Old title");
- td().args(["update", &id, "-t", "New title"])
+ td(&tmp)
+ .args(["update", &id, "-t", "New title"])
.current_dir(&tmp)
.assert()
.success();
@@ -81,7 +90,8 @@ fn update_changes_description() {
let tmp = init_tmp();
let id = create_task(&tmp, "Describe me");
- td().args(["update", &id, "-d", "Now with details"])
+ td(&tmp)
+ .args(["update", &id, "-d", "Now with details"])
.current_dir(&tmp)
.assert()
.success();
@@ -95,13 +105,13 @@ fn update_json_returns_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "JSON update");
- let out = td()
+ let out = td(&tmp)
.args(["--json", "update", &id, "-p", "high"])
.current_dir(&tmp)
.output()
.unwrap();
let v: serde_json::Value = serde_json::from_slice(&out.stdout).unwrap();
- assert_eq!(v["priority"].as_i64().unwrap(), 1);
+ assert_eq!(v["priority"].as_str().unwrap(), "high");
}
#[test]
@@ -109,13 +119,14 @@ fn update_changes_effort() {
let tmp = init_tmp();
let id = create_task(&tmp, "Re-estimate");
- td().args(["update", &id, "-e", "high"])
+ td(&tmp)
+ .args(["update", &id, "-e", "high"])
.current_dir(&tmp)
.assert()
.success();
let t = get_task_json(&tmp, &id);
- assert_eq!(t["effort"].as_i64().unwrap(), 3);
+ assert_eq!(t["effort"].as_str().unwrap(), "high");
}
// ── done ─────────────────────────────────────────────────────────────
@@ -125,7 +136,8 @@ fn done_closes_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "Close me");
- td().args(["done", &id])
+ td(&tmp)
+ .args(["done", &id])
.current_dir(&tmp)
.assert()
.success()
@@ -141,7 +153,8 @@ fn done_closes_multiple_tasks() {
let id1 = create_task(&tmp, "First");
let id2 = create_task(&tmp, "Second");
- td().args(["done", &id1, &id2])
+ td(&tmp)
+ .args(["done", &id1, &id2])
.current_dir(&tmp)
.assert()
.success();
@@ -157,13 +170,15 @@ fn reopen_reopens_closed_task() {
let tmp = init_tmp();
let id = create_task(&tmp, "Reopen me");
- td().args(["done", &id])
+ td(&tmp)
+ .args(["done", &id])
.current_dir(&tmp)
.assert()
.success();
assert_eq!(get_task_json(&tmp, &id)["status"], "closed");
- td().args(["reopen", &id])
+ td(&tmp)
+ .args(["reopen", &id])
.current_dir(&tmp)
.assert()
.success()