db.rs

  1use anyhow::{anyhow, bail, Context, Result};
  2use loro::{Container, ExportMode, LoroDoc, LoroMap, PeerID, ValueOrContainer};
  3use serde::{Deserialize, Serialize};
  4use serde_json::Value;
  5use std::collections::BTreeMap;
  6use std::fmt;
  7use std::fs::{self, File, OpenOptions};
  8use std::io::Write;
  9use std::path::{Path, PathBuf};
 10use ulid::Ulid;
 11
 12pub const PROJECT_ENV: &str = "TD_PROJECT";
 13
 14const PROJECTS_DIR: &str = "projects";
 15const CHANGES_DIR: &str = "changes";
 16const BINDINGS_FILE: &str = "bindings.json";
 17const BASE_FILE: &str = "base.loro";
 18const TMP_SUFFIX: &str = ".tmp";
 19const SCHEMA_VERSION: u32 = 1;
 20
 21/// Current UTC time in ISO 8601 format.
 22pub fn now_utc() -> String {
 23    chrono::Utc::now().format("%Y-%m-%dT%H:%M:%SZ").to_string()
 24}
 25
 26/// Lifecycle state for a task.
 27#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
 28#[serde(rename_all = "snake_case")]
 29pub enum Status {
 30    Open,
 31    InProgress,
 32    Closed,
 33}
 34
 35impl Status {
 36    fn as_str(self) -> &'static str {
 37        match self {
 38            Status::Open => "open",
 39            Status::InProgress => "in_progress",
 40            Status::Closed => "closed",
 41        }
 42    }
 43
 44    fn parse(raw: &str) -> Result<Self> {
 45        match raw {
 46            "open" => Ok(Self::Open),
 47            "in_progress" => Ok(Self::InProgress),
 48            "closed" => Ok(Self::Closed),
 49            _ => bail!("invalid status '{raw}'"),
 50        }
 51    }
 52}
 53
 54/// Priority for task ordering.
 55#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
 56#[serde(rename_all = "snake_case")]
 57pub enum Priority {
 58    High,
 59    Medium,
 60    Low,
 61}
 62
 63impl Priority {
 64    fn as_str(self) -> &'static str {
 65        match self {
 66            Priority::High => "high",
 67            Priority::Medium => "medium",
 68            Priority::Low => "low",
 69        }
 70    }
 71
 72    fn parse(raw: &str) -> Result<Self> {
 73        match raw {
 74            "high" => Ok(Self::High),
 75            "medium" => Ok(Self::Medium),
 76            "low" => Ok(Self::Low),
 77            _ => bail!("invalid priority '{raw}'"),
 78        }
 79    }
 80
 81    pub fn score(self) -> i32 {
 82        match self {
 83            Priority::High => 1,
 84            Priority::Medium => 2,
 85            Priority::Low => 3,
 86        }
 87    }
 88}
 89
 90/// Estimated effort for a task.
 91#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
 92#[serde(rename_all = "snake_case")]
 93pub enum Effort {
 94    Low,
 95    Medium,
 96    High,
 97}
 98
 99impl Effort {
100    fn as_str(self) -> &'static str {
101        match self {
102            Effort::Low => "low",
103            Effort::Medium => "medium",
104            Effort::High => "high",
105        }
106    }
107
108    fn parse(raw: &str) -> Result<Self> {
109        match raw {
110            "low" => Ok(Self::Low),
111            "medium" => Ok(Self::Medium),
112            "high" => Ok(Self::High),
113            _ => bail!("invalid effort '{raw}'"),
114        }
115    }
116
117    pub fn score(self) -> i32 {
118        match self {
119            Effort::Low => 1,
120            Effort::Medium => 2,
121            Effort::High => 3,
122        }
123    }
124}
125
126/// A stable task identifier backed by a ULID.
127#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
128#[serde(transparent)]
129pub struct TaskId(String);
130
131impl TaskId {
132    pub fn new(id: Ulid) -> Self {
133        Self(id.to_string())
134    }
135
136    pub fn parse(raw: &str) -> Result<Self> {
137        let id = Ulid::from_string(raw).with_context(|| format!("invalid task id '{raw}'"))?;
138        Ok(Self::new(id))
139    }
140
141    pub fn as_str(&self) -> &str {
142        &self.0
143    }
144
145    pub fn short(&self) -> String {
146        self.0.chars().take(7).collect()
147    }
148}
149
150impl fmt::Display for TaskId {
151    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
152        write!(f, "{}", self.short())
153    }
154}
155
156/// A task log entry embedded in a task record.
157#[derive(Debug, Clone, Serialize)]
158pub struct LogEntry {
159    pub id: TaskId,
160    pub timestamp: String,
161    pub message: String,
162}
163
164/// Hydrated task data from the CRDT document.
165#[derive(Debug, Clone, Serialize)]
166pub struct Task {
167    pub id: TaskId,
168    pub title: String,
169    pub description: String,
170    #[serde(rename = "type")]
171    pub task_type: String,
172    pub priority: Priority,
173    pub status: Status,
174    pub effort: Effort,
175    pub parent: Option<TaskId>,
176    pub created_at: String,
177    pub updated_at: String,
178    pub deleted_at: Option<String>,
179    pub labels: Vec<String>,
180    pub blockers: Vec<TaskId>,
181    pub logs: Vec<LogEntry>,
182}
183
184/// Result type for partitioning blockers by task state.
185#[derive(Debug, Default, Clone, Serialize)]
186pub struct BlockerPartition {
187    pub open: Vec<TaskId>,
188    pub resolved: Vec<TaskId>,
189}
190
191#[derive(Debug, Default, Clone, Serialize, Deserialize)]
192struct BindingsFile {
193    #[serde(default)]
194    bindings: BTreeMap<String, String>,
195}
196
197/// Storage wrapper around one project's Loro document and disk layout.
198#[derive(Debug, Clone)]
199pub struct Store {
200    root: PathBuf,
201    project: String,
202    doc: LoroDoc,
203}
204
205impl Store {
206    pub fn init(root: &Path, project: &str) -> Result<Self> {
207        validate_project_name(project)?;
208        let project_dir = project_dir(root, project);
209        if project_dir.exists() {
210            bail!("project '{project}' already exists");
211        }
212        fs::create_dir_all(project_dir.join(CHANGES_DIR))?;
213
214        let doc = LoroDoc::new();
215        doc.set_peer_id(load_or_create_device_peer_id(root)?)?;
216        doc.get_map("tasks");
217
218        let meta = doc.get_map("meta");
219        meta.insert("schema_version", SCHEMA_VERSION as i64)?;
220        meta.insert("project_id", Ulid::new().to_string())?;
221        meta.insert("created_at", now_utc())?;
222
223        let snapshot = doc
224            .export(ExportMode::Snapshot)
225            .context("failed to export initial loro snapshot")?;
226        atomic_write_file(&project_dir.join(BASE_FILE), &snapshot)?;
227
228        Ok(Self {
229            root: root.to_path_buf(),
230            project: project.to_string(),
231            doc,
232        })
233    }
234
235    pub fn open(root: &Path, project: &str) -> Result<Self> {
236        validate_project_name(project)?;
237        let project_dir = project_dir(root, project);
238        let base_path = project_dir.join(BASE_FILE);
239
240        if !base_path.exists() {
241            bail!("project '{project}' is not initialized. Run 'td init {project}'");
242        }
243
244        let base = fs::read(&base_path)
245            .with_context(|| format!("failed to read loro snapshot '{}'", base_path.display()))?;
246
247        let doc = LoroDoc::from_snapshot(&base).context("failed to load loro snapshot")?;
248        doc.set_peer_id(load_or_create_device_peer_id(root)?)?;
249
250        let mut deltas = collect_delta_paths(&project_dir)?;
251        deltas.sort_by_key(|path| {
252            path.file_stem()
253                .and_then(|s| s.to_str())
254                .and_then(|s| Ulid::from_string(s).ok())
255        });
256
257        for delta_path in deltas {
258            let bytes = fs::read(&delta_path)
259                .with_context(|| format!("failed to read loro delta '{}'", delta_path.display()))?;
260            if let Err(err) = doc.import(&bytes) {
261                // Tolerate malformed or stale delta files as requested by design.
262                eprintln!(
263                    "warning: skipping unreadable delta '{}': {err}",
264                    delta_path.display()
265                );
266            }
267        }
268
269        Ok(Self {
270            root: root.to_path_buf(),
271            project: project.to_string(),
272            doc,
273        })
274    }
275
276    pub fn root(&self) -> &Path {
277        &self.root
278    }
279
280    pub fn project_name(&self) -> &str {
281        &self.project
282    }
283
284    pub fn doc(&self) -> &LoroDoc {
285        &self.doc
286    }
287
288    /// Export all current state to a fresh base snapshot.
289    pub fn write_snapshot(&self) -> Result<PathBuf> {
290        let out = project_dir(&self.root, &self.project).join(BASE_FILE);
291        let bytes = self
292            .doc
293            .export(ExportMode::Snapshot)
294            .context("failed to export loro snapshot")?;
295        atomic_write_file(&out, &bytes)?;
296        Ok(out)
297    }
298
299    /// Delete persisted delta files after a fresh snapshot has been written.
300    pub fn purge_deltas(&self) -> Result<usize> {
301        let project_dir = project_dir(&self.root, &self.project);
302        let paths = collect_delta_paths(&project_dir)?;
303        let mut removed = 0usize;
304        for path in paths {
305            fs::remove_file(&path)
306                .with_context(|| format!("failed removing delta '{}'", path.display()))?;
307            removed += 1;
308        }
309        Ok(removed)
310    }
311
312    /// Apply a local mutation and persist only the resulting delta.
313    pub fn apply_and_persist<F>(&self, mutator: F) -> Result<PathBuf>
314    where
315        F: FnOnce(&LoroDoc) -> Result<()>,
316    {
317        let before = self.doc.oplog_vv();
318        mutator(&self.doc)?;
319        self.doc.commit();
320
321        let delta = self
322            .doc
323            .export(ExportMode::updates(&before))
324            .context("failed to export loro update delta")?;
325
326        let filename = format!("{}.loro", Ulid::new());
327        let path = project_dir(&self.root, &self.project)
328            .join(CHANGES_DIR)
329            .join(filename);
330        atomic_write_file(&path, &delta)?;
331        Ok(path)
332    }
333
334    /// Return hydrated tasks, excluding tombstones.
335    pub fn list_tasks(&self) -> Result<Vec<Task>> {
336        self.list_tasks_inner(false)
337    }
338
339    /// Return hydrated tasks, including tombstoned rows.
340    pub fn list_tasks_unfiltered(&self) -> Result<Vec<Task>> {
341        self.list_tasks_inner(true)
342    }
343
344    /// Find a task by exact ULID string.
345    pub fn get_task(&self, id: &TaskId, include_deleted: bool) -> Result<Option<Task>> {
346        let tasks = if include_deleted {
347            self.list_tasks_unfiltered()?
348        } else {
349            self.list_tasks()?
350        };
351        Ok(tasks.into_iter().find(|task| task.id == *id))
352    }
353
354    fn list_tasks_inner(&self, include_deleted: bool) -> Result<Vec<Task>> {
355        let root = serde_json::to_value(self.doc.get_deep_value())?;
356        let tasks_obj = root
357            .get("tasks")
358            .and_then(Value::as_object)
359            .ok_or_else(|| anyhow!("missing root tasks map"))?;
360
361        let mut tasks = Vec::with_capacity(tasks_obj.len());
362        for (task_id_raw, task_json) in tasks_obj {
363            let task = hydrate_task(task_id_raw, task_json)?;
364            if include_deleted || task.deleted_at.is_none() {
365                tasks.push(task);
366            }
367        }
368
369        tasks.sort_by(|a, b| a.id.as_str().cmp(b.id.as_str()));
370        Ok(tasks)
371    }
372
373    pub fn schema_version(&self) -> Result<u32> {
374        let root = serde_json::to_value(self.doc.get_deep_value())?;
375        let meta = root
376            .get("meta")
377            .and_then(Value::as_object)
378            .ok_or_else(|| anyhow!("missing root meta map"))?;
379        let n = meta
380            .get("schema_version")
381            .and_then(Value::as_u64)
382            .ok_or_else(|| anyhow!("invalid or missing meta.schema_version"))?;
383        Ok(n as u32)
384    }
385}
386
387/// Generate a new task ULID.
388pub fn gen_id() -> TaskId {
389    TaskId::new(Ulid::new())
390}
391
392pub fn parse_status(s: &str) -> Result<Status> {
393    Status::parse(s)
394}
395
396pub fn parse_priority(s: &str) -> Result<Priority> {
397    Priority::parse(s)
398}
399
400pub fn parse_effort(s: &str) -> Result<Effort> {
401    Effort::parse(s)
402}
403
404pub fn status_label(s: Status) -> &'static str {
405    s.as_str()
406}
407
408pub fn priority_label(p: Priority) -> &'static str {
409    p.as_str()
410}
411
412pub fn effort_label(e: Effort) -> &'static str {
413    e.as_str()
414}
415
416pub fn data_root() -> Result<PathBuf> {
417    let home = std::env::var("HOME").context("HOME is not set")?;
418    Ok(PathBuf::from(home).join(".local").join("share").join("td"))
419}
420
421pub fn init(cwd: &Path, project: &str) -> Result<Store> {
422    let root = data_root()?;
423    fs::create_dir_all(root.join(PROJECTS_DIR))?;
424    let store = Store::init(&root, project)?;
425    bind_project(cwd, project)?;
426    Ok(store)
427}
428
429pub fn use_project(cwd: &Path, project: &str) -> Result<()> {
430    let root = data_root()?;
431    validate_project_name(project)?;
432    if !project_dir(&root, project).join(BASE_FILE).exists() {
433        bail!("project '{project}' not found. Run 'td projects' to list known projects");
434    }
435    bind_project(cwd, project)
436}
437
438pub fn open(start: &Path) -> Result<Store> {
439    let root = data_root()?;
440    let explicit = std::env::var(PROJECT_ENV).ok();
441    let project = resolve_project_name(start, &root, explicit.as_deref())?;
442    Store::open(&root, &project)
443}
444
445pub fn list_projects() -> Result<Vec<String>> {
446    let root = data_root()?;
447    let mut out = Vec::new();
448    let dir = root.join(PROJECTS_DIR);
449    if !dir.exists() {
450        return Ok(out);
451    }
452
453    for entry in fs::read_dir(dir)? {
454        let path = entry?.path();
455        if !path.is_dir() {
456            continue;
457        }
458        let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
459            continue;
460        };
461        if path.join(BASE_FILE).exists() {
462            out.push(name.to_string());
463        }
464    }
465
466    out.sort();
467    Ok(out)
468}
469
470pub fn resolve_task_id(store: &Store, raw: &str, include_deleted: bool) -> Result<TaskId> {
471    if let Ok(id) = TaskId::parse(raw) {
472        if store.get_task(&id, include_deleted)?.is_some() {
473            return Ok(id);
474        }
475    }
476
477    let tasks = if include_deleted {
478        store.list_tasks_unfiltered()?
479    } else {
480        store.list_tasks()?
481    };
482
483    let matches: Vec<TaskId> = tasks
484        .into_iter()
485        .filter(|t| t.id.as_str().starts_with(raw))
486        .map(|t| t.id)
487        .collect();
488
489    match matches.as_slice() {
490        [] => bail!("task '{raw}' not found"),
491        [id] => Ok(id.clone()),
492        _ => bail!("task reference '{raw}' is ambiguous"),
493    }
494}
495
496pub fn partition_blockers(store: &Store, blockers: &[TaskId]) -> Result<BlockerPartition> {
497    let mut out = BlockerPartition::default();
498    for blocker in blockers {
499        let Some(task) = store.get_task(blocker, true)? else {
500            out.resolved.push(blocker.clone());
501            continue;
502        };
503        if task.status == Status::Closed || task.deleted_at.is_some() {
504            out.resolved.push(blocker.clone());
505        } else {
506            out.open.push(blocker.clone());
507        }
508    }
509    Ok(out)
510}
511
512pub fn insert_task_map(tasks: &LoroMap, task_id: &TaskId) -> Result<LoroMap> {
513    tasks
514        .insert_container(task_id.as_str(), LoroMap::new())
515        .context("failed to create task map")
516}
517
518pub fn get_task_map(tasks: &LoroMap, task_id: &TaskId) -> Result<Option<LoroMap>> {
519    match tasks.get(task_id.as_str()) {
520        Some(ValueOrContainer::Container(Container::Map(map))) => Ok(Some(map)),
521        Some(_) => bail!("task '{}' has invalid container type", task_id.as_str()),
522        None => Ok(None),
523    }
524}
525
526pub fn get_or_create_child_map(parent: &LoroMap, key: &str) -> Result<LoroMap> {
527    parent
528        .get_or_create_container(key, LoroMap::new())
529        .with_context(|| format!("failed to get or create map key '{key}'"))
530}
531
532fn bindings_path(root: &Path) -> PathBuf {
533    root.join(BINDINGS_FILE)
534}
535
536fn resolve_project_name(start: &Path, root: &Path, explicit: Option<&str>) -> Result<String> {
537    if let Some(project) = explicit {
538        validate_project_name(project)?;
539        return Ok(project.to_string());
540    }
541
542    let cwd = canonicalize_binding_path(start)?;
543    let bindings = load_bindings(root)?;
544
545    let mut best: Option<(usize, String)> = None;
546    for (raw_path, project) in bindings.bindings {
547        let bound = PathBuf::from(raw_path);
548        if is_prefix_path(&bound, &cwd) {
549            let score = bound.components().count();
550            match &best {
551                Some((best_score, _)) if *best_score >= score => {}
552                _ => best = Some((score, project)),
553            }
554        }
555    }
556
557    if let Some((_, project)) = best {
558        return Ok(project);
559    }
560
561    bail!(
562        "no project selected. Use --project/TD_PROJECT, run 'td use <name>', or run 'td init <name>'"
563    )
564}
565
566fn bind_project(cwd: &Path, project: &str) -> Result<()> {
567    validate_project_name(project)?;
568
569    let root = data_root()?;
570    fs::create_dir_all(&root)?;
571
572    let canonical = canonicalize_binding_path(cwd)?;
573    let mut bindings = load_bindings(&root)?;
574    bindings
575        .bindings
576        .insert(canonical.to_string_lossy().to_string(), project.to_string());
577    save_bindings(&root, &bindings)
578}
579
580fn load_bindings(root: &Path) -> Result<BindingsFile> {
581    let path = bindings_path(root);
582    if !path.exists() {
583        return Ok(BindingsFile::default());
584    }
585    let content = fs::read_to_string(&path)
586        .with_context(|| format!("failed reading bindings from '{}'", path.display()))?;
587    serde_json::from_str(&content)
588        .with_context(|| format!("invalid bindings file '{}'", path.display()))
589}
590
591fn save_bindings(root: &Path, bindings: &BindingsFile) -> Result<()> {
592    let path = bindings_path(root);
593    let bytes = serde_json::to_vec_pretty(bindings)?;
594    atomic_write_file(&path, &bytes)
595}
596
597fn canonicalize_binding_path(path: &Path) -> Result<PathBuf> {
598    fs::canonicalize(path).with_context(|| format!("failed to canonicalize '{}'", path.display()))
599}
600
601fn is_prefix_path(prefix: &Path, target: &Path) -> bool {
602    let mut prefix_components = prefix.components();
603    let mut target_components = target.components();
604
605    loop {
606        match (prefix_components.next(), target_components.next()) {
607            (None, _) => return true,
608            (Some(_), None) => return false,
609            (Some(a), Some(b)) if a == b => continue,
610            _ => return false,
611        }
612    }
613}
614
615fn validate_project_name(name: &str) -> Result<()> {
616    if name.is_empty() {
617        bail!("project name cannot be empty");
618    }
619    if name.contains('/') || name.contains('\\') || name == "." || name == ".." {
620        bail!("invalid project name '{name}'");
621    }
622    if name.chars().any(char::is_control) {
623        bail!("invalid project name '{name}'");
624    }
625    Ok(())
626}
627
628fn hydrate_task(task_id_raw: &str, value: &Value) -> Result<Task> {
629    let obj = value
630        .as_object()
631        .ok_or_else(|| anyhow!("task '{task_id_raw}' is not an object"))?;
632
633    let id = TaskId::parse(task_id_raw)?;
634
635    let title = get_required_string(obj, "title")?;
636    let description = get_required_string(obj, "description")?;
637    let task_type = get_required_string(obj, "type")?;
638    let status = Status::parse(&get_required_string(obj, "status")?)?;
639    let priority = Priority::parse(&get_required_string(obj, "priority")?)?;
640    let effort = Effort::parse(&get_required_string(obj, "effort")?)?;
641    let parent = match obj.get("parent").and_then(Value::as_str) {
642        Some("") | None => None,
643        Some(raw) => Some(TaskId::parse(raw)?),
644    };
645
646    let created_at = get_required_string(obj, "created_at")?;
647    let updated_at = get_required_string(obj, "updated_at")?;
648    let deleted_at = obj
649        .get("deleted_at")
650        .and_then(Value::as_str)
651        .map(str::to_owned)
652        .filter(|s| !s.is_empty());
653
654    let labels = obj
655        .get("labels")
656        .and_then(Value::as_object)
657        .map(|m| m.keys().cloned().collect())
658        .unwrap_or_default();
659
660    let blockers = obj
661        .get("blockers")
662        .and_then(Value::as_object)
663        .map(|m| {
664            m.keys()
665                .map(|raw| TaskId::parse(raw))
666                .collect::<Result<Vec<_>>>()
667        })
668        .transpose()?
669        .unwrap_or_default();
670
671    let mut logs = obj
672        .get("logs")
673        .and_then(Value::as_object)
674        .map(|logs| {
675            logs.iter()
676                .map(|(log_id_raw, payload)| {
677                    let payload_obj = payload.as_object().ok_or_else(|| {
678                        anyhow!("log '{log_id_raw}' on task '{task_id_raw}' is not an object")
679                    })?;
680                    Ok(LogEntry {
681                        id: TaskId::parse(log_id_raw)?,
682                        timestamp: get_required_string(payload_obj, "timestamp")?,
683                        message: get_required_string(payload_obj, "message")?,
684                    })
685                })
686                .collect::<Result<Vec<_>>>()
687        })
688        .transpose()?
689        .unwrap_or_default();
690
691    logs.sort_by(|a, b| a.id.as_str().cmp(b.id.as_str()));
692
693    Ok(Task {
694        id,
695        title,
696        description,
697        task_type,
698        priority,
699        status,
700        effort,
701        parent,
702        created_at,
703        updated_at,
704        deleted_at,
705        labels,
706        blockers,
707        logs,
708    })
709}
710
711fn get_required_string(map: &serde_json::Map<String, Value>, key: &str) -> Result<String> {
712    map.get(key)
713        .and_then(Value::as_str)
714        .map(str::to_owned)
715        .ok_or_else(|| anyhow!("missing or non-string key '{key}'"))
716}
717
718fn collect_delta_paths(project_dir: &Path) -> Result<Vec<PathBuf>> {
719    let mut paths = Vec::new();
720    collect_changes_from_dir(&project_dir.join(CHANGES_DIR), &mut paths)?;
721
722    for entry in fs::read_dir(project_dir)? {
723        let path = entry?.path();
724        if !path.is_dir() {
725            continue;
726        }
727        let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
728            continue;
729        };
730        if name.starts_with("changes.compacting.") {
731            collect_changes_from_dir(&path, &mut paths)?;
732        }
733    }
734
735    Ok(paths)
736}
737
738fn collect_changes_from_dir(dir: &Path, out: &mut Vec<PathBuf>) -> Result<()> {
739    if !dir.exists() {
740        return Ok(());
741    }
742
743    for entry in fs::read_dir(dir)? {
744        let path = entry?.path();
745        if !path.is_file() {
746            continue;
747        }
748
749        let Some(filename) = path.file_name().and_then(|n| n.to_str()) else {
750            continue;
751        };
752        if filename.ends_with(TMP_SUFFIX) || !filename.ends_with(".loro") {
753            continue;
754        }
755
756        let Some(stem) = path.file_stem().and_then(|s| s.to_str()) else {
757            continue;
758        };
759        if Ulid::from_string(stem).is_err() {
760            continue;
761        }
762
763        out.push(path);
764    }
765
766    Ok(())
767}
768
769fn project_dir(root: &Path, project: &str) -> PathBuf {
770    root.join(PROJECTS_DIR).join(project)
771}
772
773fn load_or_create_device_peer_id(root: &Path) -> Result<PeerID> {
774    let path = root.join("device_id");
775    if let Some(parent) = path.parent() {
776        fs::create_dir_all(parent)?;
777    }
778
779    let device_ulid = if path.exists() {
780        let content = fs::read_to_string(&path)
781            .with_context(|| format!("failed reading device id from '{}'", path.display()))?;
782        Ulid::from_string(content.trim()).context("invalid persisted device id ULID")?
783    } else {
784        let id = Ulid::new();
785        atomic_write_file(&path, id.to_string().as_bytes())?;
786        id
787    };
788
789    let raw: u128 = device_ulid.into();
790    Ok((raw & u64::MAX as u128) as u64)
791}
792
793fn atomic_write_file(path: &Path, bytes: &[u8]) -> Result<()> {
794    let parent = path
795        .parent()
796        .ok_or_else(|| anyhow!("cannot atomically write root path '{}'", path.display()))?;
797    fs::create_dir_all(parent)?;
798
799    let tmp_name = format!(
800        "{}.{}{}",
801        path.file_name().and_then(|n| n.to_str()).unwrap_or("write"),
802        Ulid::new(),
803        TMP_SUFFIX
804    );
805    let tmp_path = parent.join(tmp_name);
806
807    {
808        let mut file = OpenOptions::new()
809            .create_new(true)
810            .write(true)
811            .open(&tmp_path)
812            .with_context(|| format!("failed to open temp file '{}'", tmp_path.display()))?;
813        file.write_all(bytes)?;
814        file.sync_all()?;
815    }
816
817    fs::rename(&tmp_path, path).with_context(|| {
818        format!(
819            "failed to atomically rename '{}' to '{}'",
820            tmp_path.display(),
821            path.display()
822        )
823    })?;
824
825    sync_dir(parent)?;
826    Ok(())
827}
828
829fn sync_dir(path: &Path) -> Result<()> {
830    let dir =
831        File::open(path).with_context(|| format!("failed opening dir '{}'", path.display()))?;
832    dir.sync_all()
833        .with_context(|| format!("failed fsync on dir '{}'", path.display()))?;
834    Ok(())
835}