1package bug
2
3import (
4 "errors"
5 "fmt"
6 "strings"
7
8 "github.com/MichaelMure/git-bug/repository"
9 "github.com/MichaelMure/git-bug/util"
10)
11
12const bugsRefPattern = "refs/bugs/"
13const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
14const opsEntryName = "ops"
15const rootEntryName = "root"
16const mediaEntryName = "media"
17
18const idLength = 40
19const humanIdLength = 7
20
21// Bug hold the data of a bug thread, organized in a way close to
22// how it will be persisted inside Git. This is the data structure
23// used for merge of two different version.
24type Bug struct {
25 // Id used as unique identifier
26 id string
27
28 lastCommit util.Hash
29 rootPack util.Hash
30
31 // TODO: need a way to order bugs, probably a Lamport clock
32
33 // all the commited operations
34 packs []OperationPack
35
36 // a temporary pack of operations used for convenience to pile up new operations
37 // before a commit
38 staging OperationPack
39}
40
41// Create a new Bug
42func NewBug() *Bug {
43 // No id yet
44 return &Bug{}
45}
46
47// Find an existing Bug matching a prefix
48func FindLocalBug(repo repository.Repo, prefix string) (*Bug, error) {
49 ids, err := repo.ListIds(bugsRefPattern)
50
51 if err != nil {
52 return nil, err
53 }
54
55 // preallocate but empty
56 matching := make([]string, 0, 5)
57
58 for _, id := range ids {
59 if strings.HasPrefix(id, prefix) {
60 matching = append(matching, id)
61 }
62 }
63
64 if len(matching) == 0 {
65 return nil, errors.New("No matching bug found.")
66 }
67
68 if len(matching) > 1 {
69 return nil, fmt.Errorf("Multiple matching bug found:\n%s", strings.Join(matching, "\n"))
70 }
71
72 return ReadLocalBug(repo, matching[0])
73}
74
75func ReadLocalBug(repo repository.Repo, id string) (*Bug, error) {
76 ref := bugsRefPattern + id
77 return readBug(repo, ref)
78}
79
80func ReadRemoteBug(repo repository.Repo, remote string, id string) (*Bug, error) {
81 ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id
82 return readBug(repo, ref)
83}
84
85// Read and parse a Bug from git
86func readBug(repo repository.Repo, ref string) (*Bug, error) {
87 hashes, err := repo.ListCommits(ref)
88
89 if err != nil {
90 return nil, err
91 }
92
93 refSplitted := strings.Split(ref, "/")
94 id := refSplitted[len(refSplitted)-1]
95
96 if len(id) != idLength {
97 return nil, fmt.Errorf("Invalid ref length")
98 }
99
100 bug := Bug{
101 id: id,
102 }
103
104 // Load each OperationPack
105 for _, hash := range hashes {
106 entries, err := repo.ListEntries(hash)
107
108 bug.lastCommit = hash
109
110 if err != nil {
111 return nil, err
112 }
113
114 var opsEntry repository.TreeEntry
115 opsFound := false
116 var rootEntry repository.TreeEntry
117 rootFound := false
118
119 for _, entry := range entries {
120 if entry.Name == opsEntryName {
121 opsEntry = entry
122 opsFound = true
123 continue
124 }
125 if entry.Name == rootEntryName {
126 rootEntry = entry
127 rootFound = true
128 }
129 }
130
131 if !opsFound {
132 return nil, errors.New("Invalid tree, missing the ops entry")
133 }
134
135 if !rootFound {
136 return nil, errors.New("Invalid tree, missing the root entry")
137 }
138
139 if bug.rootPack == "" {
140 bug.rootPack = rootEntry.Hash
141 }
142
143 data, err := repo.ReadData(opsEntry.Hash)
144
145 if err != nil {
146 return nil, err
147 }
148
149 op, err := ParseOperationPack(data)
150
151 if err != nil {
152 return nil, err
153 }
154
155 // tag the pack with the commit hash
156 op.commitHash = hash
157
158 if err != nil {
159 return nil, err
160 }
161
162 bug.packs = append(bug.packs, *op)
163 }
164
165 return &bug, nil
166}
167
168type StreamedBug struct {
169 Bug *Bug
170 Err error
171}
172
173// Read and parse all local bugs
174func ReadAllLocalBugs(repo repository.Repo) <-chan StreamedBug {
175 return readAllBugs(repo, bugsRefPattern)
176}
177
178// Read and parse all remote bugs for a given remote
179func ReadAllRemoteBugs(repo repository.Repo, remote string) <-chan StreamedBug {
180 refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
181 return readAllBugs(repo, refPrefix)
182}
183
184// Read and parse all available bug with a given ref prefix
185func readAllBugs(repo repository.Repo, refPrefix string) <-chan StreamedBug {
186 out := make(chan StreamedBug)
187
188 go func() {
189 defer close(out)
190
191 refs, err := repo.ListRefs(refPrefix)
192 if err != nil {
193 out <- StreamedBug{Err: err}
194 return
195 }
196
197 for _, ref := range refs {
198 b, err := readBug(repo, ref)
199
200 if err != nil {
201 out <- StreamedBug{Err: err}
202 return
203 }
204
205 out <- StreamedBug{Bug: b}
206 }
207 }()
208
209 return out
210}
211
212// List all the available local bug ids
213func ListLocalIds(repo repository.Repo) ([]string, error) {
214 return repo.ListIds(bugsRefPattern)
215}
216
217// IsValid check if the Bug data is valid
218func (bug *Bug) IsValid() bool {
219 // non-empty
220 if len(bug.packs) == 0 && bug.staging.IsEmpty() {
221 return false
222 }
223
224 // check if each pack is valid
225 for _, pack := range bug.packs {
226 if !pack.IsValid() {
227 return false
228 }
229 }
230
231 // check if staging is valid if needed
232 if !bug.staging.IsEmpty() {
233 if !bug.staging.IsValid() {
234 return false
235 }
236 }
237
238 // The very first Op should be a CreateOp
239 firstOp := bug.firstOp()
240 if firstOp == nil || firstOp.OpType() != CreateOp {
241 return false
242 }
243
244 // Check that there is no more CreateOp op
245 it := NewOperationIterator(bug)
246 createCount := 0
247 for it.Next() {
248 if it.Value().OpType() == CreateOp {
249 createCount++
250 }
251 }
252
253 if createCount != 1 {
254 return false
255 }
256
257 return true
258}
259
260// Append an operation into the staging area, to be commited later
261func (bug *Bug) Append(op Operation) {
262 bug.staging.Append(op)
263}
264
265// Write the staging area in Git and move the operations to the packs
266func (bug *Bug) Commit(repo repository.Repo) error {
267 if bug.staging.IsEmpty() {
268 return fmt.Errorf("can't commit a bug with no pending operation")
269 }
270
271 // Write the Ops as a Git blob containing the serialized array
272 hash, err := bug.staging.Write(repo)
273 if err != nil {
274 return err
275 }
276
277 if bug.rootPack == "" {
278 bug.rootPack = hash
279 }
280
281 // Make a Git tree referencing this blob
282 tree := []repository.TreeEntry{
283 // the last pack of ops
284 {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
285 // always the first pack of ops (might be the same)
286 {ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName},
287 }
288
289 // Also reference if any all the files required by the ops
290 // Git will check that they actually exist in the storage and will make sure
291 // to push/pull them as needed.
292 mediaTree := makeMediaTree(bug.staging)
293 if len(mediaTree) > 0 {
294 mediaTreeHash, err := repo.StoreTree(mediaTree)
295 if err != nil {
296 return err
297 }
298 tree = append(tree, repository.TreeEntry{
299 ObjectType: repository.Tree,
300 Hash: mediaTreeHash,
301 Name: mediaEntryName,
302 })
303 }
304
305 // Store the tree
306 hash, err = repo.StoreTree(tree)
307 if err != nil {
308 return err
309 }
310
311 // Write a Git commit referencing the tree, with the previous commit as parent
312 if bug.lastCommit != "" {
313 hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
314 } else {
315 hash, err = repo.StoreCommit(hash)
316 }
317
318 if err != nil {
319 return err
320 }
321
322 bug.lastCommit = hash
323
324 // if it was the first commit, use the commit hash as bug id
325 if bug.id == "" {
326 bug.id = string(hash)
327 }
328
329 // Create or update the Git reference for this bug
330 // When pushing later, the remote will ensure that this ref update
331 // is fast-forward, that is no data has been overwritten
332 ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
333 err = repo.UpdateRef(ref, hash)
334
335 if err != nil {
336 return err
337 }
338
339 bug.packs = append(bug.packs, bug.staging)
340 bug.staging = OperationPack{}
341
342 return nil
343}
344
345func makeMediaTree(pack OperationPack) []repository.TreeEntry {
346 var tree []repository.TreeEntry
347 counter := 0
348 added := make(map[util.Hash]interface{})
349
350 for _, ops := range pack.Operations {
351 for _, file := range ops.Files() {
352 if _, has := added[file]; !has {
353 tree = append(tree, repository.TreeEntry{
354 ObjectType: repository.Blob,
355 Hash: file,
356 // The name is not important here, we only need to
357 // reference the blob.
358 Name: fmt.Sprintf("file%d", counter),
359 })
360 counter++
361 added[file] = struct{}{}
362 }
363 }
364 }
365
366 return tree
367}
368
369// Merge a different version of the same bug by rebasing operations of this bug
370// that are not present in the other on top of the chain of operations of the
371// other version.
372func (bug *Bug) Merge(repo repository.Repo, other *Bug) (bool, error) {
373 // Note: a faster merge should be possible without actually reading and parsing
374 // all operations pack of our side.
375 // Reading the other side is still necessary to validate remote data, at least
376 // for new operations
377
378 if bug.id != other.id {
379 return false, errors.New("merging unrelated bugs is not supported")
380 }
381
382 if len(other.staging.Operations) > 0 {
383 return false, errors.New("merging a bug with a non-empty staging is not supported")
384 }
385
386 if bug.lastCommit == "" || other.lastCommit == "" {
387 return false, errors.New("can't merge a bug that has never been stored")
388 }
389
390 ancestor, err := repo.FindCommonAncestor(bug.lastCommit, other.lastCommit)
391
392 if err != nil {
393 return false, err
394 }
395
396 ancestorIndex := 0
397 newPacks := make([]OperationPack, 0, len(bug.packs))
398
399 // Find the root of the rebase
400 for i, pack := range bug.packs {
401 newPacks = append(newPacks, pack)
402
403 if pack.commitHash == ancestor {
404 ancestorIndex = i
405 break
406 }
407 }
408
409 if len(other.packs) == ancestorIndex+1 {
410 // Nothing to rebase, return early
411 return false, nil
412 }
413
414 // get other bug's extra packs
415 for i := ancestorIndex + 1; i < len(other.packs); i++ {
416 // clone is probably not necessary
417 newPack := other.packs[i].Clone()
418
419 newPacks = append(newPacks, newPack)
420 bug.lastCommit = newPack.commitHash
421 }
422
423 // rebase our extra packs
424 for i := ancestorIndex + 1; i < len(bug.packs); i++ {
425 pack := bug.packs[i]
426
427 // get the referenced git tree
428 treeHash, err := repo.GetTreeHash(pack.commitHash)
429
430 if err != nil {
431 return false, err
432 }
433
434 // create a new commit with the correct ancestor
435 hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
436
437 // replace the pack
438 newPack := pack.Clone()
439 newPack.commitHash = hash
440 newPacks = append(newPacks, newPack)
441
442 // update the bug
443 bug.lastCommit = hash
444 }
445
446 // Update the git ref
447 err = repo.UpdateRef(bugsRefPattern+bug.id, bug.lastCommit)
448 if err != nil {
449 return false, err
450 }
451
452 return true, nil
453}
454
455// Return the Bug identifier
456func (bug *Bug) Id() string {
457 if bug.id == "" {
458 // simply panic as it would be a coding error
459 // (using an id of a bug not stored yet)
460 panic("no id yet")
461 }
462 return bug.id
463}
464
465// Return the Bug identifier truncated for human consumption
466func (bug *Bug) HumanId() string {
467 return formatHumanId(bug.Id())
468}
469
470func formatHumanId(id string) string {
471 format := fmt.Sprintf("%%.%ds", humanIdLength)
472 return fmt.Sprintf(format, id)
473}
474
475// Lookup for the very first operation of the bug.
476// For a valid Bug, this operation should be a CreateOp
477func (bug *Bug) firstOp() Operation {
478 for _, pack := range bug.packs {
479 for _, op := range pack.Operations {
480 return op
481 }
482 }
483
484 if !bug.staging.IsEmpty() {
485 return bug.staging.Operations[0]
486 }
487
488 return nil
489}
490
491// Compile a bug in a easily usable snapshot
492func (bug *Bug) Compile() Snapshot {
493 snap := Snapshot{
494 id: bug.id,
495 Status: OpenStatus,
496 }
497
498 it := NewOperationIterator(bug)
499
500 for it.Next() {
501 op := it.Value()
502 snap = op.Apply(snap)
503 snap.Operations = append(snap.Operations, op)
504 }
505
506 return snap
507}