1// Package bug contains the bug data model and low-level related functions
2package bug
3
4import (
5 "encoding/json"
6 "fmt"
7
8 "github.com/pkg/errors"
9
10 "github.com/MichaelMure/git-bug/entity"
11 "github.com/MichaelMure/git-bug/identity"
12 "github.com/MichaelMure/git-bug/repository"
13 "github.com/MichaelMure/git-bug/util/lamport"
14)
15
16const bugsRefPattern = "refs/bugs/"
17const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
18
19const opsEntryName = "ops"
20const mediaEntryName = "media"
21
22const createClockEntryPrefix = "create-clock-"
23const createClockEntryPattern = "create-clock-%d"
24const editClockEntryPrefix = "edit-clock-"
25const editClockEntryPattern = "edit-clock-%d"
26
27const creationClockName = "bug-create"
28const editClockName = "bug-edit"
29
30var ErrBugNotExist = errors.New("bug doesn't exist")
31
32func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
33 return entity.NewErrMultipleMatch("bug", matching)
34}
35
36func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
37 return entity.NewErrMultipleMatch("operation", matching)
38}
39
40var _ Interface = &Bug{}
41var _ entity.Interface = &Bug{}
42
43// Bug hold the data of a bug thread, organized in a way close to
44// how it will be persisted inside Git. This is the data structure
45// used to merge two different version of the same Bug.
46type Bug struct {
47
48 // A Lamport clock is a logical clock that allow to order event
49 // inside a distributed system.
50 // It must be the first field in this struct due to https://github.com/golang/go/issues/599
51 createTime lamport.Time
52 editTime lamport.Time
53
54 // Id used as unique identifier
55 id entity.Id
56
57 lastCommit repository.Hash
58
59 // all the committed operations
60 packs []OperationPack
61
62 // a temporary pack of operations used for convenience to pile up new operations
63 // before a commit
64 staging OperationPack
65}
66
67// NewBug create a new Bug
68func NewBug() *Bug {
69 // No id yet
70 // No logical clock yet
71 return &Bug{id: entity.UnsetId}
72}
73
74// ReadLocal will read a local bug from its hash
75func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
76 ref := bugsRefPattern + id.String()
77 return read(repo, identity.NewSimpleResolver(repo), ref)
78}
79
80// ReadLocalWithResolver will read a local bug from its hash
81func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) {
82 ref := bugsRefPattern + id.String()
83 return read(repo, identityResolver, ref)
84}
85
86// ReadRemote will read a remote bug from its hash
87func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) {
88 ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
89 return read(repo, identity.NewSimpleResolver(repo), ref)
90}
91
92// ReadRemoteWithResolver will read a remote bug from its hash
93func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) {
94 ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
95 return read(repo, identityResolver, ref)
96}
97
98// read will read and parse a Bug from git
99func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) {
100 id := entity.RefToId(ref)
101
102 if err := id.Validate(); err != nil {
103 return nil, errors.Wrap(err, "invalid ref ")
104 }
105
106 hashes, err := repo.ListCommits(ref)
107 if err != nil {
108 return nil, ErrBugNotExist
109 }
110 if len(hashes) == 0 {
111 return nil, fmt.Errorf("empty bug")
112 }
113
114 bug := Bug{
115 id: id,
116 }
117
118 // Load each OperationPack
119 for _, hash := range hashes {
120 tree, err := readTree(repo, hash)
121 if err != nil {
122 return nil, err
123 }
124
125 // Due to rebase, edit Lamport time are not necessarily ordered
126 if tree.editTime > bug.editTime {
127 bug.editTime = tree.editTime
128 }
129
130 // Update the clocks
131 err = repo.Witness(creationClockName, bug.createTime)
132 if err != nil {
133 return nil, errors.Wrap(err, "failed to update create lamport clock")
134 }
135 err = repo.Witness(editClockName, bug.editTime)
136 if err != nil {
137 return nil, errors.Wrap(err, "failed to update edit lamport clock")
138 }
139
140 data, err := repo.ReadData(tree.opsEntry.Hash)
141 if err != nil {
142 return nil, errors.Wrap(err, "failed to read git blob data")
143 }
144
145 opp := &OperationPack{}
146 err = json.Unmarshal(data, &opp)
147 if err != nil {
148 return nil, errors.Wrap(err, "failed to decode OperationPack json")
149 }
150
151 // tag the pack with the commit hash
152 opp.commitHash = hash
153 bug.lastCommit = hash
154
155 // if it's the first OperationPack read
156 if len(bug.packs) == 0 {
157 bug.createTime = tree.createTime
158 }
159
160 bug.packs = append(bug.packs, *opp)
161 }
162
163 // Bug Id is the Id of the first operation
164 if len(bug.packs[0].Operations) == 0 {
165 return nil, fmt.Errorf("first OperationPack is empty")
166 }
167 if bug.id != bug.packs[0].Operations[0].Id() {
168 return nil, fmt.Errorf("bug ID doesn't match the first operation ID")
169 }
170
171 // Make sure that the identities are properly loaded
172 err = bug.EnsureIdentities(identityResolver)
173 if err != nil {
174 return nil, err
175 }
176
177 return &bug, nil
178}
179
180// RemoveBug will remove a local bug from its entity.Id
181func RemoveBug(repo repository.ClockedRepo, id entity.Id) error {
182 var fullMatches []string
183
184 refs, err := repo.ListRefs(bugsRefPattern + id.String())
185 if err != nil {
186 return err
187 }
188 if len(refs) > 1 {
189 return NewErrMultipleMatchBug(entity.RefsToIds(refs))
190 }
191 if len(refs) == 1 {
192 // we have the bug locally
193 fullMatches = append(fullMatches, refs[0])
194 }
195
196 remotes, err := repo.GetRemotes()
197 if err != nil {
198 return err
199 }
200
201 for remote := range remotes {
202 remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote)
203 remoteRefs, err := repo.ListRefs(remotePrefix)
204 if err != nil {
205 return err
206 }
207 if len(remoteRefs) > 1 {
208 return NewErrMultipleMatchBug(entity.RefsToIds(refs))
209 }
210 if len(remoteRefs) == 1 {
211 // found the bug in a remote
212 fullMatches = append(fullMatches, remoteRefs[0])
213 }
214 }
215
216 if len(fullMatches) == 0 {
217 return ErrBugNotExist
218 }
219
220 for _, ref := range fullMatches {
221 err = repo.RemoveRef(ref)
222 if err != nil {
223 return err
224 }
225 }
226
227 return nil
228}
229
230type StreamedBug struct {
231 Bug *Bug
232 Err error
233}
234
235// ReadAllLocal read and parse all local bugs
236func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug {
237 return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern)
238}
239
240// ReadAllLocalWithResolver read and parse all local bugs
241func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
242 return readAll(repo, identityResolver, bugsRefPattern)
243}
244
245// ReadAllRemote read and parse all remote bugs for a given remote
246func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug {
247 refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
248 return readAll(repo, identity.NewSimpleResolver(repo), refPrefix)
249}
250
251// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote
252func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug {
253 refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
254 return readAll(repo, identityResolver, refPrefix)
255}
256
257// Read and parse all available bug with a given ref prefix
258func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug {
259 out := make(chan StreamedBug)
260
261 go func() {
262 defer close(out)
263
264 refs, err := repo.ListRefs(refPrefix)
265 if err != nil {
266 out <- StreamedBug{Err: err}
267 return
268 }
269
270 for _, ref := range refs {
271 b, err := read(repo, identityResolver, ref)
272
273 if err != nil {
274 out <- StreamedBug{Err: err}
275 return
276 }
277
278 out <- StreamedBug{Bug: b}
279 }
280 }()
281
282 return out
283}
284
285// ListLocalIds list all the available local bug ids
286func ListLocalIds(repo repository.Repo) ([]entity.Id, error) {
287 refs, err := repo.ListRefs(bugsRefPattern)
288 if err != nil {
289 return nil, err
290 }
291
292 return entity.RefsToIds(refs), nil
293}
294
295// Validate check if the Bug data is valid
296func (bug *Bug) Validate() error {
297 // non-empty
298 if len(bug.packs) == 0 && bug.staging.IsEmpty() {
299 return fmt.Errorf("bug has no operations")
300 }
301
302 // check if each pack and operations are valid
303 for _, pack := range bug.packs {
304 if err := pack.Validate(); err != nil {
305 return err
306 }
307 }
308
309 // check if staging is valid if needed
310 if !bug.staging.IsEmpty() {
311 if err := bug.staging.Validate(); err != nil {
312 return errors.Wrap(err, "staging")
313 }
314 }
315
316 // The very first Op should be a CreateOp
317 firstOp := bug.FirstOp()
318 if firstOp == nil || firstOp.base().OperationType != CreateOp {
319 return fmt.Errorf("first operation should be a Create op")
320 }
321
322 // The bug Id should be the id of the first operation
323 if bug.FirstOp().Id() != bug.id {
324 return fmt.Errorf("bug id should be the first commit hash")
325 }
326
327 // Check that there is no more CreateOp op
328 // Check that there is no colliding operation's ID
329 it := NewOperationIterator(bug)
330 createCount := 0
331 ids := make(map[entity.Id]struct{})
332 for it.Next() {
333 if it.Value().base().OperationType == CreateOp {
334 createCount++
335 }
336 if _, ok := ids[it.Value().Id()]; ok {
337 return fmt.Errorf("id collision: %s", it.Value().Id())
338 }
339 ids[it.Value().Id()] = struct{}{}
340 }
341
342 if createCount != 1 {
343 return fmt.Errorf("only one Create op allowed")
344 }
345
346 return nil
347}
348
349// Append an operation into the staging area, to be committed later
350func (bug *Bug) Append(op Operation) {
351 if len(bug.packs) == 0 && len(bug.staging.Operations) == 0 {
352 if op.base().OperationType != CreateOp {
353 panic("first operation should be a Create")
354 }
355 bug.id = op.Id()
356 }
357 bug.staging.Append(op)
358}
359
360// Commit write the staging area in Git and move the operations to the packs
361func (bug *Bug) Commit(repo repository.ClockedRepo) error {
362 if !bug.NeedCommit() {
363 return fmt.Errorf("can't commit a bug with no pending operation")
364 }
365
366 if err := bug.Validate(); err != nil {
367 return errors.Wrap(err, "can't commit a bug with invalid data")
368 }
369
370 // update clocks
371 var err error
372 bug.editTime, err = repo.Increment(editClockName)
373 if err != nil {
374 return err
375 }
376 if bug.lastCommit == "" {
377 bug.createTime, err = repo.Increment(creationClockName)
378 if err != nil {
379 return err
380 }
381 }
382
383 // Write the Ops as a Git blob containing the serialized array
384 hash, err := bug.staging.Write(repo)
385 if err != nil {
386 return err
387 }
388
389 // Make a Git tree referencing this blob
390 tree := []repository.TreeEntry{
391 // the last pack of ops
392 {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
393 }
394
395 // Store the logical clocks as well
396 // --> edit clock for each OperationPack/commits
397 // --> create clock only for the first OperationPack/commits
398 //
399 // To avoid having one blob for each clock value, clocks are serialized
400 // directly into the entry name
401 emptyBlobHash, err := repo.StoreData([]byte{})
402 if err != nil {
403 return err
404 }
405 tree = append(tree, repository.TreeEntry{
406 ObjectType: repository.Blob,
407 Hash: emptyBlobHash,
408 Name: fmt.Sprintf(editClockEntryPattern, bug.editTime),
409 })
410 if bug.lastCommit == "" {
411 tree = append(tree, repository.TreeEntry{
412 ObjectType: repository.Blob,
413 Hash: emptyBlobHash,
414 Name: fmt.Sprintf(createClockEntryPattern, bug.createTime),
415 })
416 }
417
418 // Reference, if any, all the files required by the ops
419 // Git will check that they actually exist in the storage and will make sure
420 // to push/pull them as needed.
421 mediaTree := makeMediaTree(bug.staging)
422 if len(mediaTree) > 0 {
423 mediaTreeHash, err := repo.StoreTree(mediaTree)
424 if err != nil {
425 return err
426 }
427 tree = append(tree, repository.TreeEntry{
428 ObjectType: repository.Tree,
429 Hash: mediaTreeHash,
430 Name: mediaEntryName,
431 })
432 }
433
434 // Store the tree
435 hash, err = repo.StoreTree(tree)
436 if err != nil {
437 return err
438 }
439
440 // Write a Git commit referencing the tree, with the previous commit as parent
441 if bug.lastCommit != "" {
442 hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
443 } else {
444 hash, err = repo.StoreCommit(hash)
445 }
446 if err != nil {
447 return err
448 }
449
450 bug.lastCommit = hash
451 bug.staging.commitHash = hash
452 bug.packs = append(bug.packs, bug.staging)
453 bug.staging = OperationPack{}
454
455 // if it was the first commit, use the Id of the first op (create)
456 if bug.id == "" || bug.id == entity.UnsetId {
457 bug.id = bug.packs[0].Operations[0].Id()
458 }
459
460 // Create or update the Git reference for this bug
461 // When pushing later, the remote will ensure that this ref update
462 // is fast-forward, that is no data has been overwritten
463 ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
464 return repo.UpdateRef(ref, hash)
465}
466
467func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error {
468 if !bug.NeedCommit() {
469 return nil
470 }
471 return bug.Commit(repo)
472}
473
474func (bug *Bug) NeedCommit() bool {
475 return !bug.staging.IsEmpty()
476}
477
478// Merge a different version of the same bug by rebasing operations of this bug
479// that are not present in the other on top of the chain of operations of the
480// other version.
481func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) {
482 var otherBug = bugFromInterface(other)
483
484 // Note: a faster merge should be possible without actually reading and parsing
485 // all operations pack of our side.
486 // Reading the other side is still necessary to validate remote data, at least
487 // for new operations
488
489 if bug.id != otherBug.id {
490 return false, errors.New("merging unrelated bugs is not supported")
491 }
492
493 if len(otherBug.staging.Operations) > 0 {
494 return false, errors.New("merging a bug with a non-empty staging is not supported")
495 }
496
497 if bug.lastCommit == "" || otherBug.lastCommit == "" {
498 return false, errors.New("can't merge a bug that has never been stored")
499 }
500
501 ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit)
502 if err != nil {
503 return false, errors.Wrap(err, "can't find common ancestor")
504 }
505
506 ancestorIndex := 0
507 newPacks := make([]OperationPack, 0, len(bug.packs))
508
509 // Find the root of the rebase
510 for i, pack := range bug.packs {
511 newPacks = append(newPacks, pack)
512
513 if pack.commitHash == ancestor {
514 ancestorIndex = i
515 break
516 }
517 }
518
519 if len(otherBug.packs) == ancestorIndex+1 {
520 // Nothing to rebase, return early
521 return false, nil
522 }
523
524 // get other bug's extra packs
525 for i := ancestorIndex + 1; i < len(otherBug.packs); i++ {
526 // clone is probably not necessary
527 newPack := otherBug.packs[i].Clone()
528
529 newPacks = append(newPacks, newPack)
530 bug.lastCommit = newPack.commitHash
531 }
532
533 // rebase our extra packs
534 for i := ancestorIndex + 1; i < len(bug.packs); i++ {
535 pack := bug.packs[i]
536
537 // get the referenced git tree
538 treeHash, err := repo.GetTreeHash(pack.commitHash)
539
540 if err != nil {
541 return false, err
542 }
543
544 // create a new commit with the correct ancestor
545 hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
546
547 if err != nil {
548 return false, err
549 }
550
551 // replace the pack
552 newPack := pack.Clone()
553 newPack.commitHash = hash
554 newPacks = append(newPacks, newPack)
555
556 // update the bug
557 bug.lastCommit = hash
558 }
559
560 bug.packs = newPacks
561
562 // Update the git ref
563 err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit)
564 if err != nil {
565 return false, err
566 }
567
568 return true, nil
569}
570
571// Id return the Bug identifier
572func (bug *Bug) Id() entity.Id {
573 if bug.id == "" || bug.id == entity.UnsetId {
574 // simply panic as it would be a coding error
575 // (using an id of a bug without operation yet)
576 panic("no id yet")
577 }
578 return bug.id
579}
580
581// CreateLamportTime return the Lamport time of creation
582func (bug *Bug) CreateLamportTime() lamport.Time {
583 return bug.createTime
584}
585
586// EditLamportTime return the Lamport time of the last edit
587func (bug *Bug) EditLamportTime() lamport.Time {
588 return bug.editTime
589}
590
591// Lookup for the very first operation of the bug.
592// For a valid Bug, this operation should be a CreateOp
593func (bug *Bug) FirstOp() Operation {
594 for _, pack := range bug.packs {
595 for _, op := range pack.Operations {
596 return op
597 }
598 }
599
600 if !bug.staging.IsEmpty() {
601 return bug.staging.Operations[0]
602 }
603
604 return nil
605}
606
607// Lookup for the very last operation of the bug.
608// For a valid Bug, should never be nil
609func (bug *Bug) LastOp() Operation {
610 if !bug.staging.IsEmpty() {
611 return bug.staging.Operations[len(bug.staging.Operations)-1]
612 }
613
614 if len(bug.packs) == 0 {
615 return nil
616 }
617
618 lastPack := bug.packs[len(bug.packs)-1]
619
620 if len(lastPack.Operations) == 0 {
621 return nil
622 }
623
624 return lastPack.Operations[len(lastPack.Operations)-1]
625}
626
627// Compile a bug in a easily usable snapshot
628func (bug *Bug) Compile() Snapshot {
629 snap := Snapshot{
630 id: bug.id,
631 Status: OpenStatus,
632 }
633
634 it := NewOperationIterator(bug)
635
636 for it.Next() {
637 op := it.Value()
638 op.Apply(&snap)
639 snap.Operations = append(snap.Operations, op)
640 }
641
642 return snap
643}