1// Package bug contains the bug data model and low-level related functions
2package bug
3
4import (
5 "encoding/json"
6 "fmt"
7 "strings"
8
9 "github.com/pkg/errors"
10
11 "github.com/MichaelMure/git-bug/entity"
12 "github.com/MichaelMure/git-bug/identity"
13 "github.com/MichaelMure/git-bug/repository"
14 "github.com/MichaelMure/git-bug/util/git"
15 "github.com/MichaelMure/git-bug/util/lamport"
16)
17
18const bugsRefPattern = "refs/bugs/"
19const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
20
21const opsEntryName = "ops"
22const rootEntryName = "root"
23const mediaEntryName = "media"
24
25const createClockEntryPrefix = "create-clock-"
26const createClockEntryPattern = "create-clock-%d"
27const editClockEntryPrefix = "edit-clock-"
28const editClockEntryPattern = "edit-clock-%d"
29
30var ErrBugNotExist = errors.New("bug doesn't exist")
31
32type ErrMultipleMatch struct {
33 Matching []entity.Id
34}
35
36func (e ErrMultipleMatch) Error() string {
37 matching := make([]string, len(e.Matching))
38
39 for i, match := range e.Matching {
40 matching[i] = match.String()
41 }
42
43 return fmt.Sprintf("Multiple matching bug found:\n%s", strings.Join(matching, "\n"))
44}
45
46var _ Interface = &Bug{}
47var _ entity.Interface = &Bug{}
48
49// Bug hold the data of a bug thread, organized in a way close to
50// how it will be persisted inside Git. This is the data structure
51// used to merge two different version of the same Bug.
52type Bug struct {
53
54 // A Lamport clock is a logical clock that allow to order event
55 // inside a distributed system.
56 // It must be the first field in this struct due to https://github.com/golang/go/issues/599
57 createTime lamport.Time
58 editTime lamport.Time
59
60 // Id used as unique identifier
61 id entity.Id
62
63 lastCommit git.Hash
64 rootPack git.Hash
65
66 // all the committed operations
67 packs []OperationPack
68
69 // a temporary pack of operations used for convenience to pile up new operations
70 // before a commit
71 staging OperationPack
72}
73
74// NewBug create a new Bug
75func NewBug() *Bug {
76 // No id yet
77 // No logical clock yet
78 return &Bug{}
79}
80
81// FindLocalBug find an existing Bug matching a prefix
82func FindLocalBug(repo repository.ClockedRepo, prefix string) (*Bug, error) {
83 ids, err := ListLocalIds(repo)
84
85 if err != nil {
86 return nil, err
87 }
88
89 // preallocate but empty
90 matching := make([]entity.Id, 0, 5)
91
92 for _, id := range ids {
93 if id.HasPrefix(prefix) {
94 matching = append(matching, id)
95 }
96 }
97
98 if len(matching) == 0 {
99 return nil, errors.New("no matching bug found.")
100 }
101
102 if len(matching) > 1 {
103 return nil, ErrMultipleMatch{Matching: matching}
104 }
105
106 return ReadLocalBug(repo, matching[0])
107}
108
109// ReadLocalBug will read a local bug from its hash
110func ReadLocalBug(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
111 ref := bugsRefPattern + id.String()
112 return readBug(repo, ref)
113}
114
115// ReadRemoteBug will read a remote bug from its hash
116func ReadRemoteBug(repo repository.ClockedRepo, remote string, id string) (*Bug, error) {
117 ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id
118 return readBug(repo, ref)
119}
120
121// readBug will read and parse a Bug from git
122func readBug(repo repository.ClockedRepo, ref string) (*Bug, error) {
123 refSplit := strings.Split(ref, "/")
124 id := entity.Id(refSplit[len(refSplit)-1])
125
126 if err := id.Validate(); err != nil {
127 return nil, errors.Wrap(err, "invalid ref ")
128 }
129
130 hashes, err := repo.ListCommits(ref)
131
132 // TODO: this is not perfect, it might be a command invoke error
133 if err != nil {
134 return nil, ErrBugNotExist
135 }
136
137 bug := Bug{
138 id: id,
139 editTime: 0,
140 }
141
142 // Load each OperationPack
143 for _, hash := range hashes {
144 entries, err := repo.ListEntries(hash)
145 if err != nil {
146 return nil, errors.Wrap(err, "can't list git tree entries")
147 }
148
149 bug.lastCommit = hash
150
151 var opsEntry repository.TreeEntry
152 opsFound := false
153 var rootEntry repository.TreeEntry
154 rootFound := false
155 var createTime uint64
156 var editTime uint64
157
158 for _, entry := range entries {
159 if entry.Name == opsEntryName {
160 opsEntry = entry
161 opsFound = true
162 continue
163 }
164 if entry.Name == rootEntryName {
165 rootEntry = entry
166 rootFound = true
167 }
168 if strings.HasPrefix(entry.Name, createClockEntryPrefix) {
169 n, err := fmt.Sscanf(string(entry.Name), createClockEntryPattern, &createTime)
170 if err != nil {
171 return nil, errors.Wrap(err, "can't read create lamport time")
172 }
173 if n != 1 {
174 return nil, fmt.Errorf("could not parse create time lamport value")
175 }
176 }
177 if strings.HasPrefix(entry.Name, editClockEntryPrefix) {
178 n, err := fmt.Sscanf(string(entry.Name), editClockEntryPattern, &editTime)
179 if err != nil {
180 return nil, errors.Wrap(err, "can't read edit lamport time")
181 }
182 if n != 1 {
183 return nil, fmt.Errorf("could not parse edit time lamport value")
184 }
185 }
186 }
187
188 if !opsFound {
189 return nil, errors.New("invalid tree, missing the ops entry")
190 }
191 if !rootFound {
192 return nil, errors.New("invalid tree, missing the root entry")
193 }
194
195 if bug.rootPack == "" {
196 bug.rootPack = rootEntry.Hash
197 bug.createTime = lamport.Time(createTime)
198 }
199
200 // Due to rebase, edit Lamport time are not necessarily ordered
201 if editTime > uint64(bug.editTime) {
202 bug.editTime = lamport.Time(editTime)
203 }
204
205 // Update the clocks
206 if err := repo.CreateWitness(bug.createTime); err != nil {
207 return nil, errors.Wrap(err, "failed to update create lamport clock")
208 }
209 if err := repo.EditWitness(bug.editTime); err != nil {
210 return nil, errors.Wrap(err, "failed to update edit lamport clock")
211 }
212
213 data, err := repo.ReadData(opsEntry.Hash)
214 if err != nil {
215 return nil, errors.Wrap(err, "failed to read git blob data")
216 }
217
218 opp := &OperationPack{}
219 err = json.Unmarshal(data, &opp)
220
221 if err != nil {
222 return nil, errors.Wrap(err, "failed to decode OperationPack json")
223 }
224
225 // tag the pack with the commit hash
226 opp.commitHash = hash
227
228 bug.packs = append(bug.packs, *opp)
229 }
230
231 // Make sure that the identities are properly loaded
232 resolver := identity.NewSimpleResolver(repo)
233 err = bug.EnsureIdentities(resolver)
234 if err != nil {
235 return nil, err
236 }
237
238 return &bug, nil
239}
240
241type StreamedBug struct {
242 Bug *Bug
243 Err error
244}
245
246// ReadAllLocalBugs read and parse all local bugs
247func ReadAllLocalBugs(repo repository.ClockedRepo) <-chan StreamedBug {
248 return readAllBugs(repo, bugsRefPattern)
249}
250
251// ReadAllRemoteBugs read and parse all remote bugs for a given remote
252func ReadAllRemoteBugs(repo repository.ClockedRepo, remote string) <-chan StreamedBug {
253 refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
254 return readAllBugs(repo, refPrefix)
255}
256
257// Read and parse all available bug with a given ref prefix
258func readAllBugs(repo repository.ClockedRepo, refPrefix string) <-chan StreamedBug {
259 out := make(chan StreamedBug)
260
261 go func() {
262 defer close(out)
263
264 refs, err := repo.ListRefs(refPrefix)
265 if err != nil {
266 out <- StreamedBug{Err: err}
267 return
268 }
269
270 for _, ref := range refs {
271 b, err := readBug(repo, ref)
272
273 if err != nil {
274 out <- StreamedBug{Err: err}
275 return
276 }
277
278 out <- StreamedBug{Bug: b}
279 }
280 }()
281
282 return out
283}
284
285// ListLocalIds list all the available local bug ids
286func ListLocalIds(repo repository.Repo) ([]entity.Id, error) {
287 refs, err := repo.ListRefs(bugsRefPattern)
288 if err != nil {
289 return nil, err
290 }
291
292 return refsToIds(refs), nil
293}
294
295func refsToIds(refs []string) []entity.Id {
296 ids := make([]entity.Id, len(refs))
297
298 for i, ref := range refs {
299 split := strings.Split(ref, "/")
300 ids[i] = entity.Id(split[len(split)-1])
301 }
302
303 return ids
304}
305
306// Validate check if the Bug data is valid
307func (bug *Bug) Validate() error {
308 // non-empty
309 if len(bug.packs) == 0 && bug.staging.IsEmpty() {
310 return fmt.Errorf("bug has no operations")
311 }
312
313 // check if each pack and operations are valid
314 for _, pack := range bug.packs {
315 if err := pack.Validate(); err != nil {
316 return err
317 }
318 }
319
320 // check if staging is valid if needed
321 if !bug.staging.IsEmpty() {
322 if err := bug.staging.Validate(); err != nil {
323 return errors.Wrap(err, "staging")
324 }
325 }
326
327 // The very first Op should be a CreateOp
328 firstOp := bug.FirstOp()
329 if firstOp == nil || firstOp.base().OperationType != CreateOp {
330 return fmt.Errorf("first operation should be a Create op")
331 }
332
333 // The bug Id should be the hash of the first commit
334 if len(bug.packs) > 0 && string(bug.packs[0].commitHash) != bug.id.String() {
335 return fmt.Errorf("bug id should be the first commit hash")
336 }
337
338 // Check that there is no more CreateOp op
339 it := NewOperationIterator(bug)
340 createCount := 0
341 for it.Next() {
342 if it.Value().base().OperationType == CreateOp {
343 createCount++
344 }
345 }
346
347 if createCount != 1 {
348 return fmt.Errorf("only one Create op allowed")
349 }
350
351 return nil
352}
353
354// Append an operation into the staging area, to be committed later
355func (bug *Bug) Append(op Operation) {
356 bug.staging.Append(op)
357}
358
359// HasPendingOp tell if the bug need to be committed
360func (bug *Bug) HasPendingOp() bool {
361 return !bug.staging.IsEmpty()
362}
363
364// Commit write the staging area in Git and move the operations to the packs
365func (bug *Bug) Commit(repo repository.ClockedRepo) error {
366
367 if !bug.NeedCommit() {
368 return fmt.Errorf("can't commit a bug with no pending operation")
369 }
370
371 if err := bug.Validate(); err != nil {
372 return errors.Wrap(err, "can't commit a bug with invalid data")
373 }
374
375 // Write the Ops as a Git blob containing the serialized array
376 hash, err := bug.staging.Write(repo)
377 if err != nil {
378 return err
379 }
380
381 if bug.rootPack == "" {
382 bug.rootPack = hash
383 }
384
385 // Make a Git tree referencing this blob
386 tree := []repository.TreeEntry{
387 // the last pack of ops
388 {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
389 // always the first pack of ops (might be the same)
390 {ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName},
391 }
392
393 // Reference, if any, all the files required by the ops
394 // Git will check that they actually exist in the storage and will make sure
395 // to push/pull them as needed.
396 mediaTree := makeMediaTree(bug.staging)
397 if len(mediaTree) > 0 {
398 mediaTreeHash, err := repo.StoreTree(mediaTree)
399 if err != nil {
400 return err
401 }
402 tree = append(tree, repository.TreeEntry{
403 ObjectType: repository.Tree,
404 Hash: mediaTreeHash,
405 Name: mediaEntryName,
406 })
407 }
408
409 // Store the logical clocks as well
410 // --> edit clock for each OperationPack/commits
411 // --> create clock only for the first OperationPack/commits
412 //
413 // To avoid having one blob for each clock value, clocks are serialized
414 // directly into the entry name
415 emptyBlobHash, err := repo.StoreData([]byte{})
416 if err != nil {
417 return err
418 }
419
420 bug.editTime, err = repo.EditTimeIncrement()
421 if err != nil {
422 return err
423 }
424
425 tree = append(tree, repository.TreeEntry{
426 ObjectType: repository.Blob,
427 Hash: emptyBlobHash,
428 Name: fmt.Sprintf(editClockEntryPattern, bug.editTime),
429 })
430 if bug.lastCommit == "" {
431 bug.createTime, err = repo.CreateTimeIncrement()
432 if err != nil {
433 return err
434 }
435
436 tree = append(tree, repository.TreeEntry{
437 ObjectType: repository.Blob,
438 Hash: emptyBlobHash,
439 Name: fmt.Sprintf(createClockEntryPattern, bug.createTime),
440 })
441 }
442
443 // Store the tree
444 hash, err = repo.StoreTree(tree)
445 if err != nil {
446 return err
447 }
448
449 // Write a Git commit referencing the tree, with the previous commit as parent
450 if bug.lastCommit != "" {
451 hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
452 } else {
453 hash, err = repo.StoreCommit(hash)
454 }
455
456 if err != nil {
457 return err
458 }
459
460 bug.lastCommit = hash
461
462 // if it was the first commit, use the commit hash as bug id
463 if bug.id == "" {
464 bug.id = entity.Id(hash)
465 }
466
467 // Create or update the Git reference for this bug
468 // When pushing later, the remote will ensure that this ref update
469 // is fast-forward, that is no data has been overwritten
470 ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
471 err = repo.UpdateRef(ref, hash)
472
473 if err != nil {
474 return err
475 }
476
477 bug.staging.commitHash = hash
478 bug.packs = append(bug.packs, bug.staging)
479 bug.staging = OperationPack{}
480
481 return nil
482}
483
484func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error {
485 if !bug.NeedCommit() {
486 return nil
487 }
488 return bug.Commit(repo)
489}
490
491func (bug *Bug) NeedCommit() bool {
492 return !bug.staging.IsEmpty()
493}
494
495func makeMediaTree(pack OperationPack) []repository.TreeEntry {
496 var tree []repository.TreeEntry
497 counter := 0
498 added := make(map[git.Hash]interface{})
499
500 for _, ops := range pack.Operations {
501 for _, file := range ops.GetFiles() {
502 if _, has := added[file]; !has {
503 tree = append(tree, repository.TreeEntry{
504 ObjectType: repository.Blob,
505 Hash: file,
506 // The name is not important here, we only need to
507 // reference the blob.
508 Name: fmt.Sprintf("file%d", counter),
509 })
510 counter++
511 added[file] = struct{}{}
512 }
513 }
514 }
515
516 return tree
517}
518
519// Merge a different version of the same bug by rebasing operations of this bug
520// that are not present in the other on top of the chain of operations of the
521// other version.
522func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) {
523 var otherBug = bugFromInterface(other)
524
525 // Note: a faster merge should be possible without actually reading and parsing
526 // all operations pack of our side.
527 // Reading the other side is still necessary to validate remote data, at least
528 // for new operations
529
530 if bug.id != otherBug.id {
531 return false, errors.New("merging unrelated bugs is not supported")
532 }
533
534 if len(otherBug.staging.Operations) > 0 {
535 return false, errors.New("merging a bug with a non-empty staging is not supported")
536 }
537
538 if bug.lastCommit == "" || otherBug.lastCommit == "" {
539 return false, errors.New("can't merge a bug that has never been stored")
540 }
541
542 ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit)
543 if err != nil {
544 return false, errors.Wrap(err, "can't find common ancestor")
545 }
546
547 ancestorIndex := 0
548 newPacks := make([]OperationPack, 0, len(bug.packs))
549
550 // Find the root of the rebase
551 for i, pack := range bug.packs {
552 newPacks = append(newPacks, pack)
553
554 if pack.commitHash == ancestor {
555 ancestorIndex = i
556 break
557 }
558 }
559
560 if len(otherBug.packs) == ancestorIndex+1 {
561 // Nothing to rebase, return early
562 return false, nil
563 }
564
565 // get other bug's extra packs
566 for i := ancestorIndex + 1; i < len(otherBug.packs); i++ {
567 // clone is probably not necessary
568 newPack := otherBug.packs[i].Clone()
569
570 newPacks = append(newPacks, newPack)
571 bug.lastCommit = newPack.commitHash
572 }
573
574 // rebase our extra packs
575 for i := ancestorIndex + 1; i < len(bug.packs); i++ {
576 pack := bug.packs[i]
577
578 // get the referenced git tree
579 treeHash, err := repo.GetTreeHash(pack.commitHash)
580
581 if err != nil {
582 return false, err
583 }
584
585 // create a new commit with the correct ancestor
586 hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
587
588 if err != nil {
589 return false, err
590 }
591
592 // replace the pack
593 newPack := pack.Clone()
594 newPack.commitHash = hash
595 newPacks = append(newPacks, newPack)
596
597 // update the bug
598 bug.lastCommit = hash
599 }
600
601 // Update the git ref
602 err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit)
603 if err != nil {
604 return false, err
605 }
606
607 return true, nil
608}
609
610// Id return the Bug identifier
611func (bug *Bug) Id() entity.Id {
612 if bug.id == "" {
613 // simply panic as it would be a coding error
614 // (using an id of a bug not stored yet)
615 panic("no id yet")
616 }
617 return bug.id
618}
619
620// CreateLamportTime return the Lamport time of creation
621func (bug *Bug) CreateLamportTime() lamport.Time {
622 return bug.createTime
623}
624
625// EditLamportTime return the Lamport time of the last edit
626func (bug *Bug) EditLamportTime() lamport.Time {
627 return bug.editTime
628}
629
630// Lookup for the very first operation of the bug.
631// For a valid Bug, this operation should be a CreateOp
632func (bug *Bug) FirstOp() Operation {
633 for _, pack := range bug.packs {
634 for _, op := range pack.Operations {
635 return op
636 }
637 }
638
639 if !bug.staging.IsEmpty() {
640 return bug.staging.Operations[0]
641 }
642
643 return nil
644}
645
646// Lookup for the very last operation of the bug.
647// For a valid Bug, should never be nil
648func (bug *Bug) LastOp() Operation {
649 if !bug.staging.IsEmpty() {
650 return bug.staging.Operations[len(bug.staging.Operations)-1]
651 }
652
653 if len(bug.packs) == 0 {
654 return nil
655 }
656
657 lastPack := bug.packs[len(bug.packs)-1]
658
659 if len(lastPack.Operations) == 0 {
660 return nil
661 }
662
663 return lastPack.Operations[len(lastPack.Operations)-1]
664}
665
666// Compile a bug in a easily usable snapshot
667func (bug *Bug) Compile() Snapshot {
668 snap := Snapshot{
669 id: bug.id,
670 Status: OpenStatus,
671 }
672
673 it := NewOperationIterator(bug)
674
675 for it.Next() {
676 op := it.Value()
677 op.Apply(&snap)
678 snap.Operations = append(snap.Operations, op)
679 }
680
681 return snap
682}
683
684// Sign post method for gqlgen
685func (bug *Bug) IsAuthored() {}