1// Package bug contains the bug data model and low-level related functions
2package bug
3
4import (
5 "encoding/json"
6 "fmt"
7 "strings"
8
9 "github.com/pkg/errors"
10
11 "github.com/MichaelMure/git-bug/entity"
12 "github.com/MichaelMure/git-bug/identity"
13 "github.com/MichaelMure/git-bug/repository"
14 "github.com/MichaelMure/git-bug/util/git"
15 "github.com/MichaelMure/git-bug/util/lamport"
16)
17
18const bugsRefPattern = "refs/bugs/"
19const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
20
21const opsEntryName = "ops"
22const rootEntryName = "root"
23const mediaEntryName = "media"
24
25const createClockEntryPrefix = "create-clock-"
26const createClockEntryPattern = "create-clock-%d"
27const editClockEntryPrefix = "edit-clock-"
28const editClockEntryPattern = "edit-clock-%d"
29
30var ErrBugNotExist = errors.New("bug doesn't exist")
31
32func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
33 return entity.NewErrMultipleMatch("bug", matching)
34}
35
36func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
37 return entity.NewErrMultipleMatch("operation", matching)
38}
39
40var _ Interface = &Bug{}
41var _ entity.Interface = &Bug{}
42
43// Bug hold the data of a bug thread, organized in a way close to
44// how it will be persisted inside Git. This is the data structure
45// used to merge two different version of the same Bug.
46type Bug struct {
47
48 // A Lamport clock is a logical clock that allow to order event
49 // inside a distributed system.
50 // It must be the first field in this struct due to https://github.com/golang/go/issues/599
51 createTime lamport.Time
52 editTime lamport.Time
53
54 // Id used as unique identifier
55 id entity.Id
56
57 lastCommit git.Hash
58 rootPack git.Hash
59
60 // all the committed operations
61 packs []OperationPack
62
63 // a temporary pack of operations used for convenience to pile up new operations
64 // before a commit
65 staging OperationPack
66}
67
68// NewBug create a new Bug
69func NewBug() *Bug {
70 // No id yet
71 // No logical clock yet
72 return &Bug{}
73}
74
75// FindLocalBug find an existing Bug matching a prefix
76func FindLocalBug(repo repository.ClockedRepo, prefix string) (*Bug, error) {
77 ids, err := ListLocalIds(repo)
78
79 if err != nil {
80 return nil, err
81 }
82
83 // preallocate but empty
84 matching := make([]entity.Id, 0, 5)
85
86 for _, id := range ids {
87 if id.HasPrefix(prefix) {
88 matching = append(matching, id)
89 }
90 }
91
92 if len(matching) == 0 {
93 return nil, errors.New("no matching bug found.")
94 }
95
96 if len(matching) > 1 {
97 return nil, NewErrMultipleMatchBug(matching)
98 }
99
100 return ReadLocalBug(repo, matching[0])
101}
102
103// ReadLocalBug will read a local bug from its hash
104func ReadLocalBug(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
105 ref := bugsRefPattern + id.String()
106 return readBug(repo, ref)
107}
108
109// ReadRemoteBug will read a remote bug from its hash
110func ReadRemoteBug(repo repository.ClockedRepo, remote string, id string) (*Bug, error) {
111 ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id
112 return readBug(repo, ref)
113}
114
115// readBug will read and parse a Bug from git
116func readBug(repo repository.ClockedRepo, ref string) (*Bug, error) {
117 refSplit := strings.Split(ref, "/")
118 id := entity.Id(refSplit[len(refSplit)-1])
119
120 if err := id.Validate(); err != nil {
121 return nil, errors.Wrap(err, "invalid ref ")
122 }
123
124 hashes, err := repo.ListCommits(ref)
125
126 // TODO: this is not perfect, it might be a command invoke error
127 if err != nil {
128 return nil, ErrBugNotExist
129 }
130
131 bug := Bug{
132 id: id,
133 editTime: 0,
134 }
135
136 // Load each OperationPack
137 for _, hash := range hashes {
138 entries, err := repo.ListEntries(hash)
139 if err != nil {
140 return nil, errors.Wrap(err, "can't list git tree entries")
141 }
142
143 bug.lastCommit = hash
144
145 var opsEntry repository.TreeEntry
146 opsFound := false
147 var rootEntry repository.TreeEntry
148 rootFound := false
149 var createTime uint64
150 var editTime uint64
151
152 for _, entry := range entries {
153 if entry.Name == opsEntryName {
154 opsEntry = entry
155 opsFound = true
156 continue
157 }
158 if entry.Name == rootEntryName {
159 rootEntry = entry
160 rootFound = true
161 }
162 if strings.HasPrefix(entry.Name, createClockEntryPrefix) {
163 n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &createTime)
164 if err != nil {
165 return nil, errors.Wrap(err, "can't read create lamport time")
166 }
167 if n != 1 {
168 return nil, fmt.Errorf("could not parse create time lamport value")
169 }
170 }
171 if strings.HasPrefix(entry.Name, editClockEntryPrefix) {
172 n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &editTime)
173 if err != nil {
174 return nil, errors.Wrap(err, "can't read edit lamport time")
175 }
176 if n != 1 {
177 return nil, fmt.Errorf("could not parse edit time lamport value")
178 }
179 }
180 }
181
182 if !opsFound {
183 return nil, errors.New("invalid tree, missing the ops entry")
184 }
185 if !rootFound {
186 return nil, errors.New("invalid tree, missing the root entry")
187 }
188
189 if bug.rootPack == "" {
190 bug.rootPack = rootEntry.Hash
191 bug.createTime = lamport.Time(createTime)
192 }
193
194 // Due to rebase, edit Lamport time are not necessarily ordered
195 if editTime > uint64(bug.editTime) {
196 bug.editTime = lamport.Time(editTime)
197 }
198
199 // Update the clocks
200 if err := repo.WitnessCreate(bug.createTime); err != nil {
201 return nil, errors.Wrap(err, "failed to update create lamport clock")
202 }
203 if err := repo.WitnessEdit(bug.editTime); err != nil {
204 return nil, errors.Wrap(err, "failed to update edit lamport clock")
205 }
206
207 data, err := repo.ReadData(opsEntry.Hash)
208 if err != nil {
209 return nil, errors.Wrap(err, "failed to read git blob data")
210 }
211
212 opp := &OperationPack{}
213 err = json.Unmarshal(data, &opp)
214
215 if err != nil {
216 return nil, errors.Wrap(err, "failed to decode OperationPack json")
217 }
218
219 // tag the pack with the commit hash
220 opp.commitHash = hash
221
222 bug.packs = append(bug.packs, *opp)
223 }
224
225 // Make sure that the identities are properly loaded
226 resolver := identity.NewSimpleResolver(repo)
227 err = bug.EnsureIdentities(resolver)
228 if err != nil {
229 return nil, err
230 }
231
232 return &bug, nil
233}
234
235type StreamedBug struct {
236 Bug *Bug
237 Err error
238}
239
240// ReadAllLocalBugs read and parse all local bugs
241func ReadAllLocalBugs(repo repository.ClockedRepo) <-chan StreamedBug {
242 return readAllBugs(repo, bugsRefPattern)
243}
244
245// ReadAllRemoteBugs read and parse all remote bugs for a given remote
246func ReadAllRemoteBugs(repo repository.ClockedRepo, remote string) <-chan StreamedBug {
247 refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
248 return readAllBugs(repo, refPrefix)
249}
250
251// Read and parse all available bug with a given ref prefix
252func readAllBugs(repo repository.ClockedRepo, refPrefix string) <-chan StreamedBug {
253 out := make(chan StreamedBug)
254
255 go func() {
256 defer close(out)
257
258 refs, err := repo.ListRefs(refPrefix)
259 if err != nil {
260 out <- StreamedBug{Err: err}
261 return
262 }
263
264 for _, ref := range refs {
265 b, err := readBug(repo, ref)
266
267 if err != nil {
268 out <- StreamedBug{Err: err}
269 return
270 }
271
272 out <- StreamedBug{Bug: b}
273 }
274 }()
275
276 return out
277}
278
279// ListLocalIds list all the available local bug ids
280func ListLocalIds(repo repository.Repo) ([]entity.Id, error) {
281 refs, err := repo.ListRefs(bugsRefPattern)
282 if err != nil {
283 return nil, err
284 }
285
286 return refsToIds(refs), nil
287}
288
289func refsToIds(refs []string) []entity.Id {
290 ids := make([]entity.Id, len(refs))
291
292 for i, ref := range refs {
293 split := strings.Split(ref, "/")
294 ids[i] = entity.Id(split[len(split)-1])
295 }
296
297 return ids
298}
299
300// Validate check if the Bug data is valid
301func (bug *Bug) Validate() error {
302 // non-empty
303 if len(bug.packs) == 0 && bug.staging.IsEmpty() {
304 return fmt.Errorf("bug has no operations")
305 }
306
307 // check if each pack and operations are valid
308 for _, pack := range bug.packs {
309 if err := pack.Validate(); err != nil {
310 return err
311 }
312 }
313
314 // check if staging is valid if needed
315 if !bug.staging.IsEmpty() {
316 if err := bug.staging.Validate(); err != nil {
317 return errors.Wrap(err, "staging")
318 }
319 }
320
321 // The very first Op should be a CreateOp
322 firstOp := bug.FirstOp()
323 if firstOp == nil || firstOp.base().OperationType != CreateOp {
324 return fmt.Errorf("first operation should be a Create op")
325 }
326
327 // The bug Id should be the hash of the first commit
328 if len(bug.packs) > 0 && string(bug.packs[0].commitHash) != bug.id.String() {
329 return fmt.Errorf("bug id should be the first commit hash")
330 }
331
332 // Check that there is no more CreateOp op
333 // Check that there is no colliding operation's ID
334 it := NewOperationIterator(bug)
335 createCount := 0
336 ids := make(map[entity.Id]struct{})
337 for it.Next() {
338 if it.Value().base().OperationType == CreateOp {
339 createCount++
340 }
341 if _, ok := ids[it.Value().Id()]; ok {
342 return fmt.Errorf("id collision: %s", it.Value().Id())
343 }
344 ids[it.Value().Id()] = struct{}{}
345 }
346
347 if createCount != 1 {
348 return fmt.Errorf("only one Create op allowed")
349 }
350
351 return nil
352}
353
354// Append an operation into the staging area, to be committed later
355func (bug *Bug) Append(op Operation) {
356 bug.staging.Append(op)
357}
358
359// Commit write the staging area in Git and move the operations to the packs
360func (bug *Bug) Commit(repo repository.ClockedRepo) error {
361
362 if !bug.NeedCommit() {
363 return fmt.Errorf("can't commit a bug with no pending operation")
364 }
365
366 if err := bug.Validate(); err != nil {
367 return errors.Wrap(err, "can't commit a bug with invalid data")
368 }
369
370 // Write the Ops as a Git blob containing the serialized array
371 hash, err := bug.staging.Write(repo)
372 if err != nil {
373 return err
374 }
375
376 if bug.rootPack == "" {
377 bug.rootPack = hash
378 }
379
380 // Make a Git tree referencing this blob
381 tree := []repository.TreeEntry{
382 // the last pack of ops
383 {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
384 // always the first pack of ops (might be the same)
385 {ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName},
386 }
387
388 // Reference, if any, all the files required by the ops
389 // Git will check that they actually exist in the storage and will make sure
390 // to push/pull them as needed.
391 mediaTree := makeMediaTree(bug.staging)
392 if len(mediaTree) > 0 {
393 mediaTreeHash, err := repo.StoreTree(mediaTree)
394 if err != nil {
395 return err
396 }
397 tree = append(tree, repository.TreeEntry{
398 ObjectType: repository.Tree,
399 Hash: mediaTreeHash,
400 Name: mediaEntryName,
401 })
402 }
403
404 // Store the logical clocks as well
405 // --> edit clock for each OperationPack/commits
406 // --> create clock only for the first OperationPack/commits
407 //
408 // To avoid having one blob for each clock value, clocks are serialized
409 // directly into the entry name
410 emptyBlobHash, err := repo.StoreData([]byte{})
411 if err != nil {
412 return err
413 }
414
415 bug.editTime, err = repo.EditTimeIncrement()
416 if err != nil {
417 return err
418 }
419
420 tree = append(tree, repository.TreeEntry{
421 ObjectType: repository.Blob,
422 Hash: emptyBlobHash,
423 Name: fmt.Sprintf(editClockEntryPattern, bug.editTime),
424 })
425 if bug.lastCommit == "" {
426 bug.createTime, err = repo.CreateTimeIncrement()
427 if err != nil {
428 return err
429 }
430
431 tree = append(tree, repository.TreeEntry{
432 ObjectType: repository.Blob,
433 Hash: emptyBlobHash,
434 Name: fmt.Sprintf(createClockEntryPattern, bug.createTime),
435 })
436 }
437
438 // Store the tree
439 hash, err = repo.StoreTree(tree)
440 if err != nil {
441 return err
442 }
443
444 // Write a Git commit referencing the tree, with the previous commit as parent
445 if bug.lastCommit != "" {
446 hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
447 } else {
448 hash, err = repo.StoreCommit(hash)
449 }
450
451 if err != nil {
452 return err
453 }
454
455 bug.lastCommit = hash
456
457 // if it was the first commit, use the commit hash as bug id
458 if bug.id == "" {
459 bug.id = entity.Id(hash)
460 }
461
462 // Create or update the Git reference for this bug
463 // When pushing later, the remote will ensure that this ref update
464 // is fast-forward, that is no data has been overwritten
465 ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
466 err = repo.UpdateRef(ref, hash)
467
468 if err != nil {
469 return err
470 }
471
472 bug.staging.commitHash = hash
473 bug.packs = append(bug.packs, bug.staging)
474 bug.staging = OperationPack{}
475
476 return nil
477}
478
479func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error {
480 if !bug.NeedCommit() {
481 return nil
482 }
483 return bug.Commit(repo)
484}
485
486func (bug *Bug) NeedCommit() bool {
487 return !bug.staging.IsEmpty()
488}
489
490func makeMediaTree(pack OperationPack) []repository.TreeEntry {
491 var tree []repository.TreeEntry
492 counter := 0
493 added := make(map[git.Hash]interface{})
494
495 for _, ops := range pack.Operations {
496 for _, file := range ops.GetFiles() {
497 if _, has := added[file]; !has {
498 tree = append(tree, repository.TreeEntry{
499 ObjectType: repository.Blob,
500 Hash: file,
501 // The name is not important here, we only need to
502 // reference the blob.
503 Name: fmt.Sprintf("file%d", counter),
504 })
505 counter++
506 added[file] = struct{}{}
507 }
508 }
509 }
510
511 return tree
512}
513
514// Merge a different version of the same bug by rebasing operations of this bug
515// that are not present in the other on top of the chain of operations of the
516// other version.
517func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) {
518 var otherBug = bugFromInterface(other)
519
520 // Note: a faster merge should be possible without actually reading and parsing
521 // all operations pack of our side.
522 // Reading the other side is still necessary to validate remote data, at least
523 // for new operations
524
525 if bug.id != otherBug.id {
526 return false, errors.New("merging unrelated bugs is not supported")
527 }
528
529 if len(otherBug.staging.Operations) > 0 {
530 return false, errors.New("merging a bug with a non-empty staging is not supported")
531 }
532
533 if bug.lastCommit == "" || otherBug.lastCommit == "" {
534 return false, errors.New("can't merge a bug that has never been stored")
535 }
536
537 ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit)
538 if err != nil {
539 return false, errors.Wrap(err, "can't find common ancestor")
540 }
541
542 ancestorIndex := 0
543 newPacks := make([]OperationPack, 0, len(bug.packs))
544
545 // Find the root of the rebase
546 for i, pack := range bug.packs {
547 newPacks = append(newPacks, pack)
548
549 if pack.commitHash == ancestor {
550 ancestorIndex = i
551 break
552 }
553 }
554
555 if len(otherBug.packs) == ancestorIndex+1 {
556 // Nothing to rebase, return early
557 return false, nil
558 }
559
560 // get other bug's extra packs
561 for i := ancestorIndex + 1; i < len(otherBug.packs); i++ {
562 // clone is probably not necessary
563 newPack := otherBug.packs[i].Clone()
564
565 newPacks = append(newPacks, newPack)
566 bug.lastCommit = newPack.commitHash
567 }
568
569 // rebase our extra packs
570 for i := ancestorIndex + 1; i < len(bug.packs); i++ {
571 pack := bug.packs[i]
572
573 // get the referenced git tree
574 treeHash, err := repo.GetTreeHash(pack.commitHash)
575
576 if err != nil {
577 return false, err
578 }
579
580 // create a new commit with the correct ancestor
581 hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
582
583 if err != nil {
584 return false, err
585 }
586
587 // replace the pack
588 newPack := pack.Clone()
589 newPack.commitHash = hash
590 newPacks = append(newPacks, newPack)
591
592 // update the bug
593 bug.lastCommit = hash
594 }
595
596 bug.packs = newPacks
597
598 // Update the git ref
599 err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit)
600 if err != nil {
601 return false, err
602 }
603
604 return true, nil
605}
606
607// Id return the Bug identifier
608func (bug *Bug) Id() entity.Id {
609 if bug.id == "" {
610 // simply panic as it would be a coding error
611 // (using an id of a bug not stored yet)
612 panic("no id yet")
613 }
614 return bug.id
615}
616
617// CreateLamportTime return the Lamport time of creation
618func (bug *Bug) CreateLamportTime() lamport.Time {
619 return bug.createTime
620}
621
622// EditLamportTime return the Lamport time of the last edit
623func (bug *Bug) EditLamportTime() lamport.Time {
624 return bug.editTime
625}
626
627// Lookup for the very first operation of the bug.
628// For a valid Bug, this operation should be a CreateOp
629func (bug *Bug) FirstOp() Operation {
630 for _, pack := range bug.packs {
631 for _, op := range pack.Operations {
632 return op
633 }
634 }
635
636 if !bug.staging.IsEmpty() {
637 return bug.staging.Operations[0]
638 }
639
640 return nil
641}
642
643// Lookup for the very last operation of the bug.
644// For a valid Bug, should never be nil
645func (bug *Bug) LastOp() Operation {
646 if !bug.staging.IsEmpty() {
647 return bug.staging.Operations[len(bug.staging.Operations)-1]
648 }
649
650 if len(bug.packs) == 0 {
651 return nil
652 }
653
654 lastPack := bug.packs[len(bug.packs)-1]
655
656 if len(lastPack.Operations) == 0 {
657 return nil
658 }
659
660 return lastPack.Operations[len(lastPack.Operations)-1]
661}
662
663// Compile a bug in a easily usable snapshot
664func (bug *Bug) Compile() Snapshot {
665 snap := Snapshot{
666 id: bug.id,
667 Status: OpenStatus,
668 }
669
670 it := NewOperationIterator(bug)
671
672 for it.Next() {
673 op := it.Value()
674 op.Apply(&snap)
675 snap.Operations = append(snap.Operations, op)
676 }
677
678 return snap
679}
680
681// Sign post method for gqlgen
682func (bug *Bug) IsAuthored() {}