Merge remote-tracking branch 'origin/master' into dev-gh-bridge

Michael Muré created

Change summary

.github/dependabot.yml                                   |   8 
.github/workflows/go.yml                                 |   2 
.gitignore                                               |   1 
api/graphql/graph/gen_graph.go                           | 340 ++++
api/graphql/models/gen_models.go                         |  24 
api/graphql/models/lazy_identity.go                      |  49 
api/graphql/resolvers/mutation.go                        |  29 
api/graphql/resolvers/operations.go                      |  12 
api/graphql/resolvers/query.go                           |  13 
api/graphql/schema/mutations.graphql                     |  24 
api/graphql/schema/root.graphql                          |   2 
bridge/core/auth/credential.go                           |   6 
bridge/core/auth/credential_test.go                      |   4 
bridge/github/export.go                                  |   2 
bridge/github/export_test.go                             |   2 
bridge/github/import.go                                  |   2 
bridge/github/import_test.go                             |  63 
bridge/gitlab/export.go                                  |   2 
bridge/gitlab/export_test.go                             |   2 
bridge/gitlab/import.go                                  |   1 
bridge/gitlab/import_test.go                             |  71 
bridge/jira/export.go                                    |   2 
bridge/jira/import.go                                    |   1 
bridge/launchpad/import.go                               |   1 
bug/bug.go                                               | 703 +--------
bug/bug_actions.go                                       | 116 -
bug/bug_actions_test.go                                  | 390 -----
bug/bug_test.go                                          | 186 --
bug/clocks.go                                            |  40 
bug/err.go                                               |  17 
bug/identity.go                                          |  27 
bug/interface.go                                         |   8 
bug/op_add_comment.go                                    |  24 
bug/op_add_comment_test.go                               |  10 
bug/op_create.go                                         |  59 
bug/op_create_test.go                                    |  38 
bug/op_edit_comment.go                                   |  17 
bug/op_edit_comment_test.go                              |  71 
bug/op_label_change.go                                   |  17 
bug/op_label_change_test.go                              |  18 
bug/op_noop.go                                           |  13 
bug/op_noop_test.go                                      |  10 
bug/op_set_metadata.go                                   |  26 
bug/op_set_metadata_test.go                              |  57 
bug/op_set_status.go                                     |  17 
bug/op_set_status_test.go                                |  18 
bug/op_set_title.go                                      |  29 
bug/op_set_title_test.go                                 |  18 
bug/operation.go                                         | 202 ++
bug/operation_iterator.go                                |  72 -
bug/operation_iterator_test.go                           |  78 -
bug/operation_pack.go                                    | 188 --
bug/operation_pack_test.go                               |  79 -
bug/operation_test.go                                    |  38 
bug/snapshot.go                                          |   5 
bug/sorting.go                                           |   8 
bug/with_snapshot.go                                     |   8 
cache/bug_cache.go                                       |   4 
cache/bug_excerpt.go                                     |   2 
cache/filter.go                                          |   3 
cache/identity_cache.go                                  |   8 
cache/repo_cache.go                                      |   5 
cache/repo_cache_bug.go                                  |  75 
cache/repo_cache_common.go                               |  15 
cache/repo_cache_identity.go                             |  13 
cache/repo_cache_test.go                                 |  33 
cache/resolvers.go                                       |  29 
commands/comment.go                                      |   1 
commands/comment_edit.go                                 |  71 +
commands/show.go                                         |   3 
commands/user.go                                         |  14 
commands/user_create.go                                  |   2 
commands/webui.go                                        |  19 
doc/man/git-bug-comment-edit.1                           |  35 
doc/man/git-bug-comment.1                                |   2 
doc/man/git-bug-user.1                                   |   2 
doc/man/git-bug-webui.1                                  |  10 
doc/md/git-bug_comment.md                                |   1 
doc/md/git-bug_comment_edit.md                           |  20 
doc/md/git-bug_user.md                                   |   2 
doc/md/git-bug_webui.md                                  |  12 
entity/dag/clock.go                                      |  38 
entity/dag/common_test.go                                | 173 ++
entity/dag/entity.go                                     | 439 ++++++
entity/dag/entity_actions.go                             | 260 +++
entity/dag/entity_actions_test.go                        | 412 +++++
entity/dag/entity_test.go                                |  68 
entity/dag/operation.go                                  |  48 
entity/dag/operation_pack.go                             | 358 +++++
entity/dag/operation_pack_test.go                        | 159 ++
entity/doc.go                                            |   8 
entity/err.go                                            |  39 
entity/id.go                                             |  20 
entity/id_interleaved.go                                 |  68 
entity/id_interleaved_test.go                            |  36 
entity/interface.go                                      |   6 
entity/merge.go                                          |  53 
entity/refs.go                                           |   6 
go.mod                                                   |   9 
go.sum                                                   |  30 
identity/identity.go                                     | 290 +--
identity/identity_actions.go                             |  17 
identity/identity_actions_test.go                        |  40 
identity/identity_stub.go                                |  22 
identity/identity_test.go                                | 241 +-
identity/interface.go                                    |  28 
identity/key.go                                          | 218 +++
identity/key_test.go                                     |  60 
identity/resolver.go                                     |  35 
identity/version.go                                      | 173 +
identity/version_test.go                                 |  67 
misc/bash_completion/git-bug                             |  43 
misc/random_bugs/create_random_bugs.go                   |  57 
repository/common.go                                     |  67 
repository/config_mem.go                                 |  19 
repository/config_testing.go                             |  39 
repository/git.go                                        | 500 -------
repository/git_cli.go                                    |  56 
repository/git_config.go                                 | 221 ---
repository/git_test.go                                   |  10 
repository/git_testing.go                                |  72 -
repository/gogit.go                                      | 193 ++
repository/gogit_config.go                               |   2 
repository/gogit_testing.go                              |   8 
repository/keyring.go                                    |  12 
repository/mock_repo.go                                  | 257 ++-
repository/mock_repo_test.go                             |   6 
repository/repo.go                                       |  60 
repository/repo_testing.go                               |  84 +
repository/tree_entry.go                                 |  10 
tests/read_bugs_test.go                                  |   4 
util/lamport/clock_testing.go                            |   6 
util/lamport/mem_clock.go                                |  14 
webui/.eslintrc.js                                       |   1 
webui/package-lock.json                                  |  31 
webui/public/logo-alpha-flat-outline.svg                 |   2 
webui/src/App.tsx                                        |   2 
webui/src/components/BackToListButton.tsx                |  38 
webui/src/components/BugTitleForm/BugTitleForm.tsx       |  46 
webui/src/components/BugTitleForm/BugTitleInput.tsx      |  40 
webui/src/components/CloseBugButton/CloseBugButton.tsx   |  13 
webui/src/components/CommentInput/CommentInput.tsx       |   5 
webui/src/components/Content/PreTag.tsx                  |   2 
webui/src/components/Header/Header.tsx                   |  78 +
webui/src/components/ReopenBugButton/ReopenBugButton.tsx |   2 
webui/src/components/Themer.tsx                          |  65 
webui/src/index.tsx                                      |   9 
webui/src/pages/bug/Bug.tsx                              |  22 
webui/src/pages/bug/BugQuery.tsx                         |   4 
webui/src/pages/bug/CommentForm.tsx                      |   9 
webui/src/pages/bug/EditCommentForm.graphql              |  16 
webui/src/pages/bug/EditCommentForm.tsx                  | 123 +
webui/src/pages/bug/Message.tsx                          | 141 +
webui/src/pages/bug/MessageCommentFragment.graphql       |   5 
webui/src/pages/bug/MessageCreateFragment.graphql        |   5 
webui/src/pages/bug/MessageHistory.graphql               |  15 
webui/src/pages/bug/MessageHistoryDialog.tsx             | 235 +++
webui/src/pages/bug/Timeline.tsx                         |   8 
webui/src/pages/bug/TimelineQuery.tsx                    |   9 
webui/src/pages/list/BugRow.graphql                      |   3 
webui/src/pages/list/BugRow.tsx                          |  16 
webui/src/pages/list/Filter.tsx                          |  78 
webui/src/pages/list/FilterToolbar.tsx                   |  75 
webui/src/pages/list/ListIdentities.graphql              |  13 
webui/src/pages/list/ListLabels.graphql                  |   9 
webui/src/pages/list/ListQuery.tsx                       | 147 +
webui/src/pages/new/NewBugPage.tsx                       |  40 
webui/src/pages/notfound/NotFoundPage.tsx                |  52 
webui/src/theme.ts                                       |  11 
webui/src/themes/DefaultDark.ts                          |  26 
webui/src/themes/DefaultLight.ts                         |  26 
webui/src/themes/index.ts                                |   4 
172 files changed, 6,185 insertions(+), 4,053 deletions(-)

Detailed changes

.github/dependabot.yml 🔗

@@ -0,0 +1,8 @@
+version: 2
+updates:
+- package-ecosystem: gomod
+  directory: "/"
+  schedule:
+    interval: daily
+    time: "04:00"
+  open-pull-requests-limit: 10

.github/workflows/go.yml 🔗

@@ -12,7 +12,7 @@ jobs:
 
     strategy:
       matrix:
-        go-version: [1.13.x, 1.14.x, 1.15.x]
+        go-version: [1.15.x]
         platform: [ubuntu-latest, macos-latest, windows-latest]
 
     runs-on: ${{ matrix.platform }}

.gitignore 🔗

@@ -7,3 +7,4 @@ git-bug
 dist
 coverage.txt
 .idea/
+.git_bak*

api/graphql/graph/gen_graph.go 🔗

@@ -193,6 +193,12 @@ type ComplexityRoot struct {
 		Target  func(childComplexity int) int
 	}
 
+	EditCommentPayload struct {
+		Bug              func(childComplexity int) int
+		ClientMutationID func(childComplexity int) int
+		Operation        func(childComplexity int) int
+	}
+
 	Identity struct {
 		AvatarUrl   func(childComplexity int) int
 		DisplayName func(childComplexity int) int
@@ -258,6 +264,7 @@ type ComplexityRoot struct {
 		AddComment   func(childComplexity int, input models.AddCommentInput) int
 		ChangeLabels func(childComplexity int, input *models.ChangeLabelInput) int
 		CloseBug     func(childComplexity int, input models.CloseBugInput) int
+		EditComment  func(childComplexity int, input models.EditCommentInput) int
 		NewBug       func(childComplexity int, input models.NewBugInput) int
 		OpenBug      func(childComplexity int, input models.OpenBugInput) int
 		SetTitle     func(childComplexity int, input models.SetTitleInput) int
@@ -433,6 +440,7 @@ type LabelChangeTimelineItemResolver interface {
 type MutationResolver interface {
 	NewBug(ctx context.Context, input models.NewBugInput) (*models.NewBugPayload, error)
 	AddComment(ctx context.Context, input models.AddCommentInput) (*models.AddCommentPayload, error)
+	EditComment(ctx context.Context, input models.EditCommentInput) (*models.EditCommentPayload, error)
 	ChangeLabels(ctx context.Context, input *models.ChangeLabelInput) (*models.ChangeLabelPayload, error)
 	OpenBug(ctx context.Context, input models.OpenBugInput) (*models.OpenBugPayload, error)
 	CloseBug(ctx context.Context, input models.CloseBugInput) (*models.CloseBugPayload, error)
@@ -1059,6 +1067,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.EditCommentOperation.Target(childComplexity), true
 
+	case "EditCommentPayload.bug":
+		if e.complexity.EditCommentPayload.Bug == nil {
+			break
+		}
+
+		return e.complexity.EditCommentPayload.Bug(childComplexity), true
+
+	case "EditCommentPayload.clientMutationId":
+		if e.complexity.EditCommentPayload.ClientMutationID == nil {
+			break
+		}
+
+		return e.complexity.EditCommentPayload.ClientMutationID(childComplexity), true
+
+	case "EditCommentPayload.operation":
+		if e.complexity.EditCommentPayload.Operation == nil {
+			break
+		}
+
+		return e.complexity.EditCommentPayload.Operation(childComplexity), true
+
 	case "Identity.avatarUrl":
 		if e.complexity.Identity.AvatarUrl == nil {
 			break
@@ -1333,6 +1362,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.CloseBug(childComplexity, args["input"].(models.CloseBugInput)), true
 
+	case "Mutation.editComment":
+		if e.complexity.Mutation.EditComment == nil {
+			break
+		}
+
+		args, err := ec.field_Mutation_editComment_args(context.TODO(), rawArgs)
+		if err != nil {
+			return 0, false
+		}
+
+		return e.complexity.Mutation.EditComment(childComplexity, args["input"].(models.EditCommentInput)), true
+
 	case "Mutation.newBug":
 		if e.complexity.Mutation.NewBug == nil {
 			break
@@ -2034,6 +2075,30 @@ type AddCommentPayload {
     operation: AddCommentOperation!
 }
 
+input EditCommentInput {
+    """A unique identifier for the client performing the mutation."""
+    clientMutationId: String
+    """"The name of the repository. If not set, the default repository is used."""
+    repoRef: String
+    """The bug ID's prefix."""
+    prefix: String!
+    """The target."""
+    target: String!
+    """The new message to be set."""
+    message: String!
+    """The collection of file's hash required for the first message."""
+    files: [Hash!]
+}
+
+type EditCommentPayload {
+    """A unique identifier for the client performing the mutation."""
+    clientMutationId: String
+    """The affected bug."""
+    bug: Bug!
+    """The resulting operation."""
+    operation: EditCommentOperation!
+}
+
 input ChangeLabelInput {
     """A unique identifier for the client performing the mutation."""
     clientMutationId: String
@@ -2290,6 +2355,8 @@ type Mutation {
     newBug(input: NewBugInput!): NewBugPayload!
     """Add a new comment to a bug"""
     addComment(input: AddCommentInput!): AddCommentPayload!
+    """Change a comment of a bug"""
+    editComment(input: EditCommentInput!): EditCommentPayload!
     """Add or remove a set of label on a bug"""
     changeLabels(input: ChangeLabelInput): ChangeLabelPayload!
     """Change a bug's status to open"""
@@ -2657,6 +2724,20 @@ func (ec *executionContext) field_Mutation_closeBug_args(ctx context.Context, ra
 	return args, nil
 }
 
+func (ec *executionContext) field_Mutation_editComment_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+	var err error
+	args := map[string]interface{}{}
+	var arg0 models.EditCommentInput
+	if tmp, ok := rawArgs["input"]; ok {
+		arg0, err = ec.unmarshalNEditCommentInput2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐEditCommentInput(ctx, tmp)
+		if err != nil {
+			return nil, err
+		}
+	}
+	args["input"] = arg0
+	return args, nil
+}
+
 func (ec *executionContext) field_Mutation_newBug_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
 	var err error
 	args := map[string]interface{}{}
@@ -5591,6 +5672,105 @@ func (ec *executionContext) _EditCommentOperation_files(ctx context.Context, fie
 	return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋrepositoryᚐHashᚄ(ctx, field.Selections, res)
 }
 
+func (ec *executionContext) _EditCommentPayload_clientMutationId(ctx context.Context, field graphql.CollectedField, obj *models.EditCommentPayload) (ret graphql.Marshaler) {
+	defer func() {
+		if r := recover(); r != nil {
+			ec.Error(ctx, ec.Recover(ctx, r))
+			ret = graphql.Null
+		}
+	}()
+	fc := &graphql.FieldContext{
+		Object:   "EditCommentPayload",
+		Field:    field,
+		Args:     nil,
+		IsMethod: false,
+	}
+
+	ctx = graphql.WithFieldContext(ctx, fc)
+	resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
+		ctx = rctx // use context from middleware stack in children
+		return obj.ClientMutationID, nil
+	})
+	if err != nil {
+		ec.Error(ctx, err)
+		return graphql.Null
+	}
+	if resTmp == nil {
+		return graphql.Null
+	}
+	res := resTmp.(*string)
+	fc.Result = res
+	return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
+}
+
+func (ec *executionContext) _EditCommentPayload_bug(ctx context.Context, field graphql.CollectedField, obj *models.EditCommentPayload) (ret graphql.Marshaler) {
+	defer func() {
+		if r := recover(); r != nil {
+			ec.Error(ctx, ec.Recover(ctx, r))
+			ret = graphql.Null
+		}
+	}()
+	fc := &graphql.FieldContext{
+		Object:   "EditCommentPayload",
+		Field:    field,
+		Args:     nil,
+		IsMethod: false,
+	}
+
+	ctx = graphql.WithFieldContext(ctx, fc)
+	resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
+		ctx = rctx // use context from middleware stack in children
+		return obj.Bug, nil
+	})
+	if err != nil {
+		ec.Error(ctx, err)
+		return graphql.Null
+	}
+	if resTmp == nil {
+		if !graphql.HasFieldError(ctx, fc) {
+			ec.Errorf(ctx, "must not be null")
+		}
+		return graphql.Null
+	}
+	res := resTmp.(models.BugWrapper)
+	fc.Result = res
+	return ec.marshalNBug2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐBugWrapper(ctx, field.Selections, res)
+}
+
+func (ec *executionContext) _EditCommentPayload_operation(ctx context.Context, field graphql.CollectedField, obj *models.EditCommentPayload) (ret graphql.Marshaler) {
+	defer func() {
+		if r := recover(); r != nil {
+			ec.Error(ctx, ec.Recover(ctx, r))
+			ret = graphql.Null
+		}
+	}()
+	fc := &graphql.FieldContext{
+		Object:   "EditCommentPayload",
+		Field:    field,
+		Args:     nil,
+		IsMethod: false,
+	}
+
+	ctx = graphql.WithFieldContext(ctx, fc)
+	resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
+		ctx = rctx // use context from middleware stack in children
+		return obj.Operation, nil
+	})
+	if err != nil {
+		ec.Error(ctx, err)
+		return graphql.Null
+	}
+	if resTmp == nil {
+		if !graphql.HasFieldError(ctx, fc) {
+			ec.Errorf(ctx, "must not be null")
+		}
+		return graphql.Null
+	}
+	res := resTmp.(*bug.EditCommentOperation)
+	fc.Result = res
+	return ec.marshalNEditCommentOperation2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐEditCommentOperation(ctx, field.Selections, res)
+}
+
 func (ec *executionContext) _Identity_id(ctx context.Context, field graphql.CollectedField, obj models.IdentityWrapper) (ret graphql.Marshaler) {
 	defer func() {
 		if r := recover(); r != nil {
@@ -6817,6 +6997,47 @@ func (ec *executionContext) _Mutation_addComment(ctx context.Context, field grap
 	return ec.marshalNAddCommentPayload2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐAddCommentPayload(ctx, field.Selections, res)
 }
 
+func (ec *executionContext) _Mutation_editComment(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
+	defer func() {
+		if r := recover(); r != nil {
+			ec.Error(ctx, ec.Recover(ctx, r))
+			ret = graphql.Null
+		}
+	}()
+	fc := &graphql.FieldContext{
+		Object:   "Mutation",
+		Field:    field,
+		Args:     nil,
+		IsMethod: true,
+	}
+
+	ctx = graphql.WithFieldContext(ctx, fc)
+	rawArgs := field.ArgumentMap(ec.Variables)
+	args, err := ec.field_Mutation_editComment_args(ctx, rawArgs)
+	if err != nil {
+		ec.Error(ctx, err)
+		return graphql.Null
+	}
+	fc.Args = args
+	resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
+		ctx = rctx // use context from middleware stack in children
+		return ec.resolvers.Mutation().EditComment(rctx, args["input"].(models.EditCommentInput))
+	})
+	if err != nil {
+		ec.Error(ctx, err)
+		return graphql.Null
+	}
+	if resTmp == nil {
+		if !graphql.HasFieldError(ctx, fc) {
+			ec.Errorf(ctx, "must not be null")
+		}
+		return graphql.Null
+	}
+	res := resTmp.(*models.EditCommentPayload)
+	fc.Result = res
+	return ec.marshalNEditCommentPayload2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐEditCommentPayload(ctx, field.Selections, res)
+}
+
 func (ec *executionContext) _Mutation_changeLabels(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
 	defer func() {
 		if r := recover(); r != nil {
@@ -9971,6 +10192,54 @@ func (ec *executionContext) unmarshalInputCloseBugInput(ctx context.Context, obj
 	return it, nil
 }
 
+func (ec *executionContext) unmarshalInputEditCommentInput(ctx context.Context, obj interface{}) (models.EditCommentInput, error) {
+	var it models.EditCommentInput
+	var asMap = obj.(map[string]interface{})
+
+	for k, v := range asMap {
+		switch k {
+		case "clientMutationId":
+			var err error
+			it.ClientMutationID, err = ec.unmarshalOString2ᚖstring(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		case "repoRef":
+			var err error
+			it.RepoRef, err = ec.unmarshalOString2ᚖstring(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		case "prefix":
+			var err error
+			it.Prefix, err = ec.unmarshalNString2string(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		case "target":
+			var err error
+			it.Target, err = ec.unmarshalNString2string(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		case "message":
+			var err error
+			it.Message, err = ec.unmarshalNString2string(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		case "files":
+			var err error
+			it.Files, err = ec.unmarshalOHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋrepositoryᚐHashᚄ(ctx, v)
+			if err != nil {
+				return it, err
+			}
+		}
+	}
+
+	return it, nil
+}
+
 func (ec *executionContext) unmarshalInputNewBugInput(ctx context.Context, obj interface{}) (models.NewBugInput, error) {
 	var it models.NewBugInput
 	var asMap = obj.(map[string]interface{})
@@ -11254,6 +11523,40 @@ func (ec *executionContext) _EditCommentOperation(ctx context.Context, sel ast.S
 	return out
 }
 
+var editCommentPayloadImplementors = []string{"EditCommentPayload"}
+
+func (ec *executionContext) _EditCommentPayload(ctx context.Context, sel ast.SelectionSet, obj *models.EditCommentPayload) graphql.Marshaler {
+	fields := graphql.CollectFields(ec.OperationContext, sel, editCommentPayloadImplementors)
+
+	out := graphql.NewFieldSet(fields)
+	var invalids uint32
+	for i, field := range fields {
+		switch field.Name {
+		case "__typename":
+			out.Values[i] = graphql.MarshalString("EditCommentPayload")
+		case "clientMutationId":
+			out.Values[i] = ec._EditCommentPayload_clientMutationId(ctx, field, obj)
+		case "bug":
+			out.Values[i] = ec._EditCommentPayload_bug(ctx, field, obj)
+			if out.Values[i] == graphql.Null {
+				invalids++
+			}
+		case "operation":
+			out.Values[i] = ec._EditCommentPayload_operation(ctx, field, obj)
+			if out.Values[i] == graphql.Null {
+				invalids++
+			}
+		default:
+			panic("unknown field " + strconv.Quote(field.Name))
+		}
+	}
+	out.Dispatch()
+	if invalids > 0 {
+		return graphql.Null
+	}
+	return out
+}
+
 var identityImplementors = []string{"Identity"}
 
 func (ec *executionContext) _Identity(ctx context.Context, sel ast.SelectionSet, obj models.IdentityWrapper) graphql.Marshaler {
@@ -11734,6 +12037,11 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet)
 			if out.Values[i] == graphql.Null {
 				invalids++
 			}
+		case "editComment":
+			out.Values[i] = ec._Mutation_editComment(ctx, field)
+			if out.Values[i] == graphql.Null {
+				invalids++
+			}
 		case "changeLabels":
 			out.Values[i] = ec._Mutation_changeLabels(ctx, field)
 			if out.Values[i] == graphql.Null {
@@ -13130,6 +13438,38 @@ func (ec *executionContext) marshalNCreateOperation2ᚖgithubᚗcomᚋMichaelMur
 	return ec._CreateOperation(ctx, sel, v)
 }
 
+func (ec *executionContext) unmarshalNEditCommentInput2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐEditCommentInput(ctx context.Context, v interface{}) (models.EditCommentInput, error) {
+	return ec.unmarshalInputEditCommentInput(ctx, v)
+}
+
+func (ec *executionContext) marshalNEditCommentOperation2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐEditCommentOperation(ctx context.Context, sel ast.SelectionSet, v bug.EditCommentOperation) graphql.Marshaler {
+	return ec._EditCommentOperation(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNEditCommentOperation2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐEditCommentOperation(ctx context.Context, sel ast.SelectionSet, v *bug.EditCommentOperation) graphql.Marshaler {
+	if v == nil {
+		if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
+			ec.Errorf(ctx, "must not be null")
+		}
+		return graphql.Null
+	}
+	return ec._EditCommentOperation(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNEditCommentPayload2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐEditCommentPayload(ctx context.Context, sel ast.SelectionSet, v models.EditCommentPayload) graphql.Marshaler {
+	return ec._EditCommentPayload(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNEditCommentPayload2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋapiᚋgraphqlᚋmodelsᚐEditCommentPayload(ctx context.Context, sel ast.SelectionSet, v *models.EditCommentPayload) graphql.Marshaler {
+	if v == nil {
+		if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
+			ec.Errorf(ctx, "must not be null")
+		}
+		return graphql.Null
+	}
+	return ec._EditCommentPayload(ctx, sel, v)
+}
+
 func (ec *executionContext) unmarshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋrepositoryᚐHash(ctx context.Context, v interface{}) (repository.Hash, error) {
 	var res repository.Hash
 	return res, res.UnmarshalGQL(v)

api/graphql/models/gen_models.go 🔗

@@ -111,6 +111,30 @@ type CommentEdge struct {
 	Node   *bug.Comment `json:"node"`
 }
 
+type EditCommentInput struct {
+	// A unique identifier for the client performing the mutation.
+	ClientMutationID *string `json:"clientMutationId"`
+	// "The name of the repository. If not set, the default repository is used.
+	RepoRef *string `json:"repoRef"`
+	// The bug ID's prefix.
+	Prefix string `json:"prefix"`
+	// The target.
+	Target string `json:"target"`
+	// The new message to be set.
+	Message string `json:"message"`
+	// The collection of file's hash required for the first message.
+	Files []repository.Hash `json:"files"`
+}
+
+type EditCommentPayload struct {
+	// A unique identifier for the client performing the mutation.
+	ClientMutationID *string `json:"clientMutationId"`
+	// The affected bug.
+	Bug BugWrapper `json:"bug"`
+	// The resulting operation.
+	Operation *bug.EditCommentOperation `json:"operation"`
+}
+
 type IdentityConnection struct {
 	Edges      []*IdentityEdge   `json:"edges"`
 	Nodes      []IdentityWrapper `json:"nodes"`

api/graphql/models/lazy_identity.go 🔗

@@ -7,8 +7,6 @@ import (
 	"github.com/MichaelMure/git-bug/cache"
 	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/util/lamport"
-	"github.com/MichaelMure/git-bug/util/timestamp"
 )
 
 // IdentityWrapper is an interface used by the GraphQL resolvers to handle an identity.
@@ -21,11 +19,8 @@ type IdentityWrapper interface {
 	Login() (string, error)
 	AvatarUrl() (string, error)
 	Keys() ([]*identity.Key, error)
-	ValidKeysAtTime(time lamport.Time) ([]*identity.Key, error)
 	DisplayName() string
 	IsProtected() (bool, error)
-	LastModificationLamport() (lamport.Time, error)
-	LastModification() (timestamp.Timestamp, error)
 }
 
 var _ IdentityWrapper = &lazyIdentity{}
@@ -69,6 +64,10 @@ func (li *lazyIdentity) Name() string {
 	return li.excerpt.Name
 }
 
+func (li *lazyIdentity) DisplayName() string {
+	return li.excerpt.DisplayName()
+}
+
 func (li *lazyIdentity) Email() (string, error) {
 	id, err := li.load()
 	if err != nil {
@@ -101,18 +100,6 @@ func (li *lazyIdentity) Keys() ([]*identity.Key, error) {
 	return id.Keys(), nil
 }
 
-func (li *lazyIdentity) ValidKeysAtTime(time lamport.Time) ([]*identity.Key, error) {
-	id, err := li.load()
-	if err != nil {
-		return nil, err
-	}
-	return id.ValidKeysAtTime(time), nil
-}
-
-func (li *lazyIdentity) DisplayName() string {
-	return li.excerpt.DisplayName()
-}
-
 func (li *lazyIdentity) IsProtected() (bool, error) {
 	id, err := li.load()
 	if err != nil {
@@ -121,22 +108,6 @@ func (li *lazyIdentity) IsProtected() (bool, error) {
 	return id.IsProtected(), nil
 }
 
-func (li *lazyIdentity) LastModificationLamport() (lamport.Time, error) {
-	id, err := li.load()
-	if err != nil {
-		return 0, err
-	}
-	return id.LastModificationLamport(), nil
-}
-
-func (li *lazyIdentity) LastModification() (timestamp.Timestamp, error) {
-	id, err := li.load()
-	if err != nil {
-		return 0, err
-	}
-	return id.LastModification(), nil
-}
-
 var _ IdentityWrapper = &loadedIdentity{}
 
 type loadedIdentity struct {
@@ -163,18 +134,6 @@ func (l loadedIdentity) Keys() ([]*identity.Key, error) {
 	return l.Interface.Keys(), nil
 }
 
-func (l loadedIdentity) ValidKeysAtTime(time lamport.Time) ([]*identity.Key, error) {
-	return l.Interface.ValidKeysAtTime(time), nil
-}
-
 func (l loadedIdentity) IsProtected() (bool, error) {
 	return l.Interface.IsProtected(), nil
 }
-
-func (l loadedIdentity) LastModificationLamport() (lamport.Time, error) {
-	return l.Interface.LastModificationLamport(), nil
-}
-
-func (l loadedIdentity) LastModification() (timestamp.Timestamp, error) {
-	return l.Interface.LastModification(), nil
-}

api/graphql/resolvers/mutation.go 🔗

@@ -5,6 +5,7 @@ import (
 	"time"
 
 	"github.com/MichaelMure/git-bug/api/auth"
+	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/api/graphql/graph"
 	"github.com/MichaelMure/git-bug/api/graphql/models"
 	"github.com/MichaelMure/git-bug/bug"
@@ -89,6 +90,34 @@ func (r mutationResolver) AddComment(ctx context.Context, input models.AddCommen
 	}, nil
 }
 
+func (r mutationResolver) EditComment(ctx context.Context, input models.EditCommentInput) (*models.EditCommentPayload, error) {
+	repo, b, err := r.getBug(input.RepoRef, input.Prefix)
+	if err != nil {
+		return nil, err
+	}
+
+	author, err := auth.UserFromCtx(ctx, repo)
+	if err != nil {
+		return nil, err
+	}
+
+	op, err := b.EditCommentRaw(author, time.Now().Unix(), entity.Id(input.Target), input.Message, nil)
+	if err != nil {
+		return nil, err
+	}
+
+	err = b.Commit()
+	if err != nil {
+		return nil, err
+	}
+
+	return &models.EditCommentPayload{
+		ClientMutationID: input.ClientMutationID,
+		Bug:              models.NewLoadedBug(b.Snapshot()),
+		Operation:        op,
+	}, nil
+}
+
 func (r mutationResolver) ChangeLabels(ctx context.Context, input *models.ChangeLabelInput) (*models.ChangeLabelPayload, error) {
 	repo, b, err := r.getBug(input.RepoRef, input.Prefix)
 	if err != nil {

api/graphql/resolvers/operations.go 🔗

@@ -19,7 +19,7 @@ func (createOperationResolver) ID(_ context.Context, obj *bug.CreateOperation) (
 }
 
 func (createOperationResolver) Author(_ context.Context, obj *bug.CreateOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (createOperationResolver) Date(_ context.Context, obj *bug.CreateOperation) (*time.Time, error) {
@@ -36,7 +36,7 @@ func (addCommentOperationResolver) ID(_ context.Context, obj *bug.AddCommentOper
 }
 
 func (addCommentOperationResolver) Author(_ context.Context, obj *bug.AddCommentOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (addCommentOperationResolver) Date(_ context.Context, obj *bug.AddCommentOperation) (*time.Time, error) {
@@ -57,7 +57,7 @@ func (editCommentOperationResolver) Target(_ context.Context, obj *bug.EditComme
 }
 
 func (editCommentOperationResolver) Author(_ context.Context, obj *bug.EditCommentOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (editCommentOperationResolver) Date(_ context.Context, obj *bug.EditCommentOperation) (*time.Time, error) {
@@ -74,7 +74,7 @@ func (labelChangeOperationResolver) ID(_ context.Context, obj *bug.LabelChangeOp
 }
 
 func (labelChangeOperationResolver) Author(_ context.Context, obj *bug.LabelChangeOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (labelChangeOperationResolver) Date(_ context.Context, obj *bug.LabelChangeOperation) (*time.Time, error) {
@@ -91,7 +91,7 @@ func (setStatusOperationResolver) ID(_ context.Context, obj *bug.SetStatusOperat
 }
 
 func (setStatusOperationResolver) Author(_ context.Context, obj *bug.SetStatusOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (setStatusOperationResolver) Date(_ context.Context, obj *bug.SetStatusOperation) (*time.Time, error) {
@@ -112,7 +112,7 @@ func (setTitleOperationResolver) ID(_ context.Context, obj *bug.SetTitleOperatio
 }
 
 func (setTitleOperationResolver) Author(_ context.Context, obj *bug.SetTitleOperation) (models.IdentityWrapper, error) {
-	return models.NewLoadedIdentity(obj.Author), nil
+	return models.NewLoadedIdentity(obj.Author()), nil
 }
 
 func (setTitleOperationResolver) Date(_ context.Context, obj *bug.SetTitleOperation) (*time.Time, error) {

api/graphql/resolvers/query.go 🔗

@@ -14,19 +14,6 @@ type rootQueryResolver struct {
 	cache *cache.MultiRepoCache
 }
 
-func (r rootQueryResolver) DefaultRepository(_ context.Context) (*models.Repository, error) {
-	repo, err := r.cache.DefaultRepo()
-
-	if err != nil {
-		return nil, err
-	}
-
-	return &models.Repository{
-		Cache: r.cache,
-		Repo:  repo,
-	}, nil
-}
-
 func (r rootQueryResolver) Repository(_ context.Context, ref *string) (*models.Repository, error) {
 	var repo *cache.RepoCache
 	var err error

api/graphql/schema/mutations.graphql 🔗

@@ -42,6 +42,30 @@ type AddCommentPayload {
     operation: AddCommentOperation!
 }
 
+input EditCommentInput {
+    """A unique identifier for the client performing the mutation."""
+    clientMutationId: String
+    """"The name of the repository. If not set, the default repository is used."""
+    repoRef: String
+    """The bug ID's prefix."""
+    prefix: String!
+    """The ID of the comment to be changed."""
+    target: String!
+    """The new message to be set."""
+    message: String!
+    """The collection of file's hash required for the first message."""
+    files: [Hash!]
+}
+
+type EditCommentPayload {
+    """A unique identifier for the client performing the mutation."""
+    clientMutationId: String
+    """The affected bug."""
+    bug: Bug!
+    """The resulting operation."""
+    operation: EditCommentOperation!
+}
+
 input ChangeLabelInput {
     """A unique identifier for the client performing the mutation."""
     clientMutationId: String

api/graphql/schema/root.graphql 🔗

@@ -8,6 +8,8 @@ type Mutation {
     newBug(input: NewBugInput!): NewBugPayload!
     """Add a new comment to a bug"""
     addComment(input: AddCommentInput!): AddCommentPayload!
+    """Change a comment of a bug"""
+    editComment(input: EditCommentInput!): EditCommentPayload!
     """Add or remove a set of label on a bug"""
     changeLabels(input: ChangeLabelInput): ChangeLabelPayload!
     """Change a bug's status to open"""

bridge/core/auth/credential.go 🔗

@@ -3,12 +3,13 @@ package auth
 import (
 	"encoding/base64"
 	"encoding/json"
-	"errors"
 	"fmt"
 	"strconv"
 	"strings"
 	"time"
 
+	"github.com/pkg/errors"
+
 	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/repository"
 )
@@ -159,7 +160,8 @@ func List(repo repository.RepoKeyring, opts ...ListOption) ([]Credential, error)
 
 		item, err := repo.Keyring().Get(key)
 		if err != nil {
-			return nil, err
+			// skip unreadable items, nothing much we can do for them anyway
+			continue
 		}
 
 		cred, err := decode(item)

bridge/core/auth/credential_test.go 🔗

@@ -11,7 +11,7 @@ import (
 )
 
 func TestCredential(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
+	repo := repository.NewMockRepo()
 
 	storeToken := func(val string, target string) *Token {
 		token := NewToken(target, val)
@@ -102,7 +102,7 @@ func sameIds(t *testing.T, a []Credential, b []Credential) {
 }
 
 func testCredentialSerial(t *testing.T, original Credential) Credential {
-	repo := repository.NewMockRepoForTest()
+	repo := repository.NewMockRepo()
 
 	original.SetMetadata("test", "value")
 

bridge/github/export.go 🔗

@@ -294,7 +294,7 @@ func (ge *githubExporter) exportBug(ctx context.Context, b *cache.BugCache, out
 			continue
 		}
 
-		opAuthor := op.GetAuthor()
+		opAuthor := op.Author()
 		client, err := ge.getClientForIdentity(opAuthor.Id())
 		if err != nil {
 			continue

bridge/github/export_test.go 🔗

@@ -126,7 +126,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
 	}
 }
 
-func TestPushPull(t *testing.T) {
+func TestGithubPushPull(t *testing.T) {
 	// repo owner
 	envUser := os.Getenv("GITHUB_TEST_USER")
 

bridge/github/import.go 🔗

@@ -543,6 +543,7 @@ func (gi *githubImporter) ensurePerson(ctx context.Context, repo *cache.RepoCach
 		email,
 		string(actor.Login),
 		string(actor.AvatarUrl),
+		nil,
 		map[string]string{
 			metaKeyGithubLogin: string(actor.Login),
 		},
@@ -576,6 +577,7 @@ func (gi *githubImporter) getGhost(ctx context.Context, repo *cache.RepoCache) (
 		"",
 		string(user.Login),
 		string(user.AvatarUrl),
+		nil,
 		map[string]string{
 			metaKeyGithubLogin: string(user.Login),
 		},

bridge/github/import_test.go 🔗

@@ -7,7 +7,6 @@ import (
 	"testing"
 	"time"
 
-	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 
 	"github.com/MichaelMure/git-bug/bridge/core"
@@ -19,8 +18,23 @@ import (
 	"github.com/MichaelMure/git-bug/util/interrupt"
 )
 
-func Test_Importer(t *testing.T) {
-	author := identity.NewIdentity("Michael Muré", "batolettre@gmail.com")
+func TestGithubImporter(t *testing.T) {
+	envToken := os.Getenv("GITHUB_TOKEN_PRIVATE")
+	if envToken == "" {
+		t.Skip("Env var GITHUB_TOKEN_PRIVATE missing")
+	}
+
+	repo := repository.CreateGoGitTestRepo(false)
+	defer repository.CleanupTestRepos(repo)
+
+	backend, err := cache.NewRepoCache(repo)
+	require.NoError(t, err)
+
+	defer backend.Close()
+	interrupt.RegisterCleaner(backend.Close)
+
+	author, err := identity.NewIdentity(repo, "Michael Muré", "batolettre@gmail.com")
+	require.NoError(t, err)
 
 	tests := []struct {
 		name string
@@ -127,20 +141,6 @@ func Test_Importer(t *testing.T) {
 		},
 	}
 
-	repo := repository.CreateGoGitTestRepo(false)
-	defer repository.CleanupTestRepos(repo)
-
-	backend, err := cache.NewRepoCache(repo)
-	require.NoError(t, err)
-
-	defer backend.Close()
-	interrupt.RegisterCleaner(backend.Close)
-
-	envToken := os.Getenv("GITHUB_TOKEN_PRIVATE")
-	if envToken == "" {
-		t.Skip("Env var GITHUB_TOKEN_PRIVATE missing")
-	}
-
 	login := "test-identity"
 	author.SetMetadata(metaKeyGithubLogin, login)
 
@@ -178,33 +178,28 @@ func Test_Importer(t *testing.T) {
 			require.NoError(t, err)
 
 			ops := b.Snapshot().Operations
-			assert.Len(t, tt.bug.Operations, len(b.Snapshot().Operations))
+			require.Len(t, tt.bug.Operations, len(b.Snapshot().Operations))
 
 			for i, op := range tt.bug.Operations {
 				require.IsType(t, ops[i], op)
+				require.Equal(t, op.Author().Name(), ops[i].Author().Name())
 
-				switch op.(type) {
+				switch op := op.(type) {
 				case *bug.CreateOperation:
-					assert.Equal(t, op.(*bug.CreateOperation).Title, ops[i].(*bug.CreateOperation).Title)
-					assert.Equal(t, op.(*bug.CreateOperation).Message, ops[i].(*bug.CreateOperation).Message)
-					assert.Equal(t, op.(*bug.CreateOperation).Author.Name(), ops[i].(*bug.CreateOperation).Author.Name())
+					require.Equal(t, op.Title, ops[i].(*bug.CreateOperation).Title)
+					require.Equal(t, op.Message, ops[i].(*bug.CreateOperation).Message)
 				case *bug.SetStatusOperation:
-					assert.Equal(t, op.(*bug.SetStatusOperation).Status, ops[i].(*bug.SetStatusOperation).Status)
-					assert.Equal(t, op.(*bug.SetStatusOperation).Author.Name(), ops[i].(*bug.SetStatusOperation).Author.Name())
+					require.Equal(t, op.Status, ops[i].(*bug.SetStatusOperation).Status)
 				case *bug.SetTitleOperation:
-					assert.Equal(t, op.(*bug.SetTitleOperation).Was, ops[i].(*bug.SetTitleOperation).Was)
-					assert.Equal(t, op.(*bug.SetTitleOperation).Title, ops[i].(*bug.SetTitleOperation).Title)
-					assert.Equal(t, op.(*bug.SetTitleOperation).Author.Name(), ops[i].(*bug.SetTitleOperation).Author.Name())
+					require.Equal(t, op.Was, ops[i].(*bug.SetTitleOperation).Was)
+					require.Equal(t, op.Title, ops[i].(*bug.SetTitleOperation).Title)
 				case *bug.LabelChangeOperation:
-					assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, ops[i].(*bug.LabelChangeOperation).Added)
-					assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, ops[i].(*bug.LabelChangeOperation).Removed)
-					assert.Equal(t, op.(*bug.LabelChangeOperation).Author.Name(), ops[i].(*bug.LabelChangeOperation).Author.Name())
+					require.ElementsMatch(t, op.Added, ops[i].(*bug.LabelChangeOperation).Added)
+					require.ElementsMatch(t, op.Removed, ops[i].(*bug.LabelChangeOperation).Removed)
 				case *bug.AddCommentOperation:
-					assert.Equal(t, op.(*bug.AddCommentOperation).Message, ops[i].(*bug.AddCommentOperation).Message)
-					assert.Equal(t, op.(*bug.AddCommentOperation).Author.Name(), ops[i].(*bug.AddCommentOperation).Author.Name())
+					require.Equal(t, op.Message, ops[i].(*bug.AddCommentOperation).Message)
 				case *bug.EditCommentOperation:
-					assert.Equal(t, op.(*bug.EditCommentOperation).Message, ops[i].(*bug.EditCommentOperation).Message)
-					assert.Equal(t, op.(*bug.EditCommentOperation).Author.Name(), ops[i].(*bug.EditCommentOperation).Author.Name())
+					require.Equal(t, op.Message, ops[i].(*bug.EditCommentOperation).Message)
 
 				default:
 					panic("unknown operation type")

bridge/gitlab/export.go 🔗

@@ -267,7 +267,7 @@ func (ge *gitlabExporter) exportBug(ctx context.Context, b *cache.BugCache, out
 			continue
 		}
 
-		opAuthor := op.GetAuthor()
+		opAuthor := op.Author()
 		client, err := ge.getIdentityClient(opAuthor.Id())
 		if err != nil {
 			continue

bridge/gitlab/export_test.go 🔗

@@ -134,7 +134,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
 	}
 }
 
-func TestPushPull(t *testing.T) {
+func TestGitlabPushPull(t *testing.T) {
 	// token must have 'repo' and 'delete_repo' scopes
 	envToken := os.Getenv("GITLAB_API_TOKEN")
 	if envToken == "" {

bridge/gitlab/import.go 🔗

@@ -403,6 +403,7 @@ func (gi *gitlabImporter) ensurePerson(repo *cache.RepoCache, id int) (*cache.Id
 		user.PublicEmail,
 		user.Username,
 		user.AvatarURL,
+		nil,
 		map[string]string{
 			// because Gitlab
 			metaKeyGitlabId:    strconv.Itoa(id),

bridge/gitlab/import_test.go 🔗

@@ -7,7 +7,6 @@ import (
 	"testing"
 	"time"
 
-	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 
 	"github.com/MichaelMure/git-bug/bridge/core"
@@ -19,8 +18,28 @@ import (
 	"github.com/MichaelMure/git-bug/util/interrupt"
 )
 
-func TestImport(t *testing.T) {
-	author := identity.NewIdentity("Amine Hilaly", "hilalyamine@gmail.com")
+func TestGitlabImport(t *testing.T) {
+	envToken := os.Getenv("GITLAB_API_TOKEN")
+	if envToken == "" {
+		t.Skip("Env var GITLAB_API_TOKEN missing")
+	}
+
+	projectID := os.Getenv("GITLAB_PROJECT_ID")
+	if projectID == "" {
+		t.Skip("Env var GITLAB_PROJECT_ID missing")
+	}
+
+	repo := repository.CreateGoGitTestRepo(false)
+	defer repository.CleanupTestRepos(repo)
+
+	backend, err := cache.NewRepoCache(repo)
+	require.NoError(t, err)
+
+	defer backend.Close()
+	interrupt.RegisterCleaner(backend.Close)
+
+	author, err := identity.NewIdentity(repo, "Amine Hilaly", "hilalyamine@gmail.com")
+	require.NoError(t, err)
 
 	tests := []struct {
 		name string
@@ -76,25 +95,6 @@ func TestImport(t *testing.T) {
 		},
 	}
 
-	repo := repository.CreateGoGitTestRepo(false)
-	defer repository.CleanupTestRepos(repo)
-
-	backend, err := cache.NewRepoCache(repo)
-	require.NoError(t, err)
-
-	defer backend.Close()
-	interrupt.RegisterCleaner(backend.Close)
-
-	envToken := os.Getenv("GITLAB_API_TOKEN")
-	if envToken == "" {
-		t.Skip("Env var GITLAB_API_TOKEN missing")
-	}
-
-	projectID := os.Getenv("GITLAB_PROJECT_ID")
-	if projectID == "" {
-		t.Skip("Env var GITLAB_PROJECT_ID missing")
-	}
-
 	login := "test-identity"
 	author.SetMetadata(metaKeyGitlabLogin, login)
 
@@ -138,29 +138,24 @@ func TestImport(t *testing.T) {
 			for i, op := range tt.bug.Operations {
 
 				require.IsType(t, ops[i], op)
+				require.Equal(t, op.Author().Name(), ops[i].Author().Name())
 
-				switch op.(type) {
+				switch op := op.(type) {
 				case *bug.CreateOperation:
-					assert.Equal(t, op.(*bug.CreateOperation).Title, ops[i].(*bug.CreateOperation).Title)
-					assert.Equal(t, op.(*bug.CreateOperation).Message, ops[i].(*bug.CreateOperation).Message)
-					assert.Equal(t, op.(*bug.CreateOperation).Author.Name(), ops[i].(*bug.CreateOperation).Author.Name())
+					require.Equal(t, op.Title, ops[i].(*bug.CreateOperation).Title)
+					require.Equal(t, op.Message, ops[i].(*bug.CreateOperation).Message)
 				case *bug.SetStatusOperation:
-					assert.Equal(t, op.(*bug.SetStatusOperation).Status, ops[i].(*bug.SetStatusOperation).Status)
-					assert.Equal(t, op.(*bug.SetStatusOperation).Author.Name(), ops[i].(*bug.SetStatusOperation).Author.Name())
+					require.Equal(t, op.Status, ops[i].(*bug.SetStatusOperation).Status)
 				case *bug.SetTitleOperation:
-					assert.Equal(t, op.(*bug.SetTitleOperation).Was, ops[i].(*bug.SetTitleOperation).Was)
-					assert.Equal(t, op.(*bug.SetTitleOperation).Title, ops[i].(*bug.SetTitleOperation).Title)
-					assert.Equal(t, op.(*bug.SetTitleOperation).Author.Name(), ops[i].(*bug.SetTitleOperation).Author.Name())
+					require.Equal(t, op.Was, ops[i].(*bug.SetTitleOperation).Was)
+					require.Equal(t, op.Title, ops[i].(*bug.SetTitleOperation).Title)
 				case *bug.LabelChangeOperation:
-					assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, ops[i].(*bug.LabelChangeOperation).Added)
-					assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, ops[i].(*bug.LabelChangeOperation).Removed)
-					assert.Equal(t, op.(*bug.LabelChangeOperation).Author.Name(), ops[i].(*bug.LabelChangeOperation).Author.Name())
+					require.ElementsMatch(t, op.Added, ops[i].(*bug.LabelChangeOperation).Added)
+					require.ElementsMatch(t, op.Removed, ops[i].(*bug.LabelChangeOperation).Removed)
 				case *bug.AddCommentOperation:
-					assert.Equal(t, op.(*bug.AddCommentOperation).Message, ops[i].(*bug.AddCommentOperation).Message)
-					assert.Equal(t, op.(*bug.AddCommentOperation).Author.Name(), ops[i].(*bug.AddCommentOperation).Author.Name())
+					require.Equal(t, op.Message, ops[i].(*bug.AddCommentOperation).Message)
 				case *bug.EditCommentOperation:
-					assert.Equal(t, op.(*bug.EditCommentOperation).Message, ops[i].(*bug.EditCommentOperation).Message)
-					assert.Equal(t, op.(*bug.EditCommentOperation).Author.Name(), ops[i].(*bug.EditCommentOperation).Author.Name())
+					require.Equal(t, op.Message, ops[i].(*bug.EditCommentOperation).Message)
 
 				default:
 					panic("unknown operation type")

bridge/jira/export.go 🔗

@@ -309,7 +309,7 @@ func (je *jiraExporter) exportBug(ctx context.Context, b *cache.BugCache, out ch
 			continue
 		}
 
-		opAuthor := op.GetAuthor()
+		opAuthor := op.Author()
 		client, err := je.getClientForIdentity(opAuthor.Id())
 		if err != nil {
 			out <- core.NewExportError(

bridge/jira/import.go 🔗

@@ -198,6 +198,7 @@ func (ji *jiraImporter) ensurePerson(repo *cache.RepoCache, user User) (*cache.I
 		user.EmailAddress,
 		user.Key,
 		"",
+		nil,
 		map[string]string{
 			metaKeyJiraUser: user.Key,
 		},

bridge/launchpad/import.go 🔗

@@ -35,6 +35,7 @@ func (li *launchpadImporter) ensurePerson(repo *cache.RepoCache, owner LPPerson)
 		"",
 		owner.Login,
 		"",
+		nil,
 		map[string]string{
 			metaKeyLaunchpadLogin: owner.Login,
 		},

bug/bug.go 🔗

@@ -2,277 +2,62 @@
 package bug
 
 import (
-	"encoding/json"
 	"fmt"
-	"strings"
-
-	"github.com/pkg/errors"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
-	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
-const bugsRefPattern = "refs/bugs/"
-const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
-
-const opsEntryName = "ops"
-const rootEntryName = "root"
-const mediaEntryName = "media"
-
-const createClockEntryPrefix = "create-clock-"
-const createClockEntryPattern = "create-clock-%d"
-const editClockEntryPrefix = "edit-clock-"
-const editClockEntryPattern = "edit-clock-%d"
-
-const creationClockName = "bug-create"
-const editClockName = "bug-edit"
-
-var ErrBugNotExist = errors.New("bug doesn't exist")
+var _ Interface = &Bug{}
+var _ entity.Interface = &Bug{}
 
-func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
-	return entity.NewErrMultipleMatch("bug", matching)
-}
+// 1: original format
+// 2: no more legacy identities
+// 3: Ids are generated from the create operation serialized data instead of from the first git commit
+// 4: with DAG entity framework
+const formatVersion = 4
 
-func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
-	return entity.NewErrMultipleMatch("operation", matching)
+var def = dag.Definition{
+	Typename:             "bug",
+	Namespace:            "bugs",
+	OperationUnmarshaler: operationUnmarshaller,
+	FormatVersion:        formatVersion,
 }
 
-var _ Interface = &Bug{}
-var _ entity.Interface = &Bug{}
+var ClockLoader = dag.ClockLoader(def)
 
 // Bug hold the data of a bug thread, organized in a way close to
 // how it will be persisted inside Git. This is the data structure
 // used to merge two different version of the same Bug.
 type Bug struct {
-
-	// A Lamport clock is a logical clock that allow to order event
-	// inside a distributed system.
-	// It must be the first field in this struct due to https://github.com/golang/go/issues/599
-	createTime lamport.Time
-	editTime   lamport.Time
-
-	// Id used as unique identifier
-	id entity.Id
-
-	lastCommit repository.Hash
-	rootPack   repository.Hash
-
-	// all the committed operations
-	packs []OperationPack
-
-	// a temporary pack of operations used for convenience to pile up new operations
-	// before a commit
-	staging OperationPack
+	*dag.Entity
 }
 
 // NewBug create a new Bug
 func NewBug() *Bug {
-	// No id yet
-	// No logical clock yet
-	return &Bug{}
-}
-
-// ReadLocal will read a local bug from its hash
-func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
-	ref := bugsRefPattern + id.String()
-	return read(repo, identity.NewSimpleResolver(repo), ref)
-}
-
-// ReadLocalWithResolver will read a local bug from its hash
-func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) {
-	ref := bugsRefPattern + id.String()
-	return read(repo, identityResolver, ref)
-}
-
-// ReadRemote will read a remote bug from its hash
-func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) {
-	ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
-	return read(repo, identity.NewSimpleResolver(repo), ref)
-}
-
-// ReadRemoteWithResolver will read a remote bug from its hash
-func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) {
-	ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
-	return read(repo, identityResolver, ref)
-}
-
-// read will read and parse a Bug from git
-func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) {
-	refSplit := strings.Split(ref, "/")
-	id := entity.Id(refSplit[len(refSplit)-1])
-
-	if err := id.Validate(); err != nil {
-		return nil, errors.Wrap(err, "invalid ref ")
-	}
-
-	hashes, err := repo.ListCommits(ref)
-
-	// TODO: this is not perfect, it might be a command invoke error
-	if err != nil {
-		return nil, ErrBugNotExist
-	}
-
-	bug := Bug{
-		id:       id,
-		editTime: 0,
-	}
-
-	// Load each OperationPack
-	for _, hash := range hashes {
-		entries, err := repo.ReadTree(hash)
-		if err != nil {
-			return nil, errors.Wrap(err, "can't list git tree entries")
-		}
-
-		bug.lastCommit = hash
-
-		var opsEntry repository.TreeEntry
-		opsFound := false
-		var rootEntry repository.TreeEntry
-		rootFound := false
-		var createTime uint64
-		var editTime uint64
-
-		for _, entry := range entries {
-			if entry.Name == opsEntryName {
-				opsEntry = entry
-				opsFound = true
-				continue
-			}
-			if entry.Name == rootEntryName {
-				rootEntry = entry
-				rootFound = true
-			}
-			if strings.HasPrefix(entry.Name, createClockEntryPrefix) {
-				n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &createTime)
-				if err != nil {
-					return nil, errors.Wrap(err, "can't read create lamport time")
-				}
-				if n != 1 {
-					return nil, fmt.Errorf("could not parse create time lamport value")
-				}
-			}
-			if strings.HasPrefix(entry.Name, editClockEntryPrefix) {
-				n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &editTime)
-				if err != nil {
-					return nil, errors.Wrap(err, "can't read edit lamport time")
-				}
-				if n != 1 {
-					return nil, fmt.Errorf("could not parse edit time lamport value")
-				}
-			}
-		}
-
-		if !opsFound {
-			return nil, errors.New("invalid tree, missing the ops entry")
-		}
-		if !rootFound {
-			return nil, errors.New("invalid tree, missing the root entry")
-		}
-
-		if bug.rootPack == "" {
-			bug.rootPack = rootEntry.Hash
-			bug.createTime = lamport.Time(createTime)
-		}
-
-		// Due to rebase, edit Lamport time are not necessarily ordered
-		if editTime > uint64(bug.editTime) {
-			bug.editTime = lamport.Time(editTime)
-		}
-
-		// Update the clocks
-		createClock, err := repo.GetOrCreateClock(creationClockName)
-		if err != nil {
-			return nil, err
-		}
-		if err := createClock.Witness(bug.createTime); err != nil {
-			return nil, errors.Wrap(err, "failed to update create lamport clock")
-		}
-		editClock, err := repo.GetOrCreateClock(editClockName)
-		if err != nil {
-			return nil, err
-		}
-		if err := editClock.Witness(bug.editTime); err != nil {
-			return nil, errors.Wrap(err, "failed to update edit lamport clock")
-		}
-
-		data, err := repo.ReadData(opsEntry.Hash)
-		if err != nil {
-			return nil, errors.Wrap(err, "failed to read git blob data")
-		}
-
-		opp := &OperationPack{}
-		err = json.Unmarshal(data, &opp)
-
-		if err != nil {
-			return nil, errors.Wrap(err, "failed to decode OperationPack json")
-		}
-
-		// tag the pack with the commit hash
-		opp.commitHash = hash
-
-		bug.packs = append(bug.packs, *opp)
+	return &Bug{
+		Entity: dag.New(def),
 	}
+}
 
-	// Make sure that the identities are properly loaded
-	err = bug.EnsureIdentities(identityResolver)
+// Read will read a bug from a repository
+func Read(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
+	e, err := dag.Read(def, repo, identity.NewSimpleResolver(repo), id)
 	if err != nil {
 		return nil, err
 	}
-
-	return &bug, nil
+	return &Bug{Entity: e}, nil
 }
 
-// RemoveBug will remove a local bug from its entity.Id
-func RemoveBug(repo repository.ClockedRepo, id entity.Id) error {
-	var fullMatches []string
-
-	refs, err := repo.ListRefs(bugsRefPattern + id.String())
-	if err != nil {
-		return err
-	}
-	if len(refs) > 1 {
-		return NewErrMultipleMatchBug(entity.RefsToIds(refs))
-	}
-	if len(refs) == 1 {
-		// we have the bug locally
-		fullMatches = append(fullMatches, refs[0])
-	}
-
-	remotes, err := repo.GetRemotes()
+// ReadWithResolver will read a bug from its Id, with a custom identity.Resolver
+func ReadWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) {
+	e, err := dag.Read(def, repo, identityResolver, id)
 	if err != nil {
-		return err
-	}
-
-	for remote := range remotes {
-		remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote)
-		remoteRefs, err := repo.ListRefs(remotePrefix)
-		if err != nil {
-			return err
-		}
-		if len(remoteRefs) > 1 {
-			return NewErrMultipleMatchBug(entity.RefsToIds(refs))
-		}
-		if len(remoteRefs) == 1 {
-			// found the bug in a remote
-			fullMatches = append(fullMatches, remoteRefs[0])
-		}
-	}
-
-	if len(fullMatches) == 0 {
-		return ErrBugNotExist
-	}
-
-	for _, ref := range fullMatches {
-		err = repo.RemoveRef(ref)
-		if err != nil {
-			return err
-		}
+		return nil, err
 	}
-
-	return nil
+	return &Bug{Entity: e}, nil
 }
 
 type StreamedBug struct {
@@ -280,50 +65,33 @@ type StreamedBug struct {
 	Err error
 }
 
-// ReadAllLocal read and parse all local bugs
-func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug {
-	return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern)
-}
-
-// ReadAllLocalWithResolver read and parse all local bugs
-func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
-	return readAll(repo, identityResolver, bugsRefPattern)
-}
-
-// ReadAllRemote read and parse all remote bugs for a given remote
-func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug {
-	refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
-	return readAll(repo, identity.NewSimpleResolver(repo), refPrefix)
+// ReadAll read and parse all local bugs
+func ReadAll(repo repository.ClockedRepo) <-chan StreamedBug {
+	return readAll(repo, identity.NewSimpleResolver(repo))
 }
 
-// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote
-func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug {
-	refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
-	return readAll(repo, identityResolver, refPrefix)
+// ReadAllWithResolver read and parse all local bugs
+func ReadAllWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
+	return readAll(repo, identityResolver)
 }
 
 // Read and parse all available bug with a given ref prefix
-func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug {
+func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
 	out := make(chan StreamedBug)
 
 	go func() {
 		defer close(out)
 
-		refs, err := repo.ListRefs(refPrefix)
-		if err != nil {
-			out <- StreamedBug{Err: err}
-			return
-		}
-
-		for _, ref := range refs {
-			b, err := read(repo, identityResolver, ref)
-
-			if err != nil {
-				out <- StreamedBug{Err: err}
-				return
+		for streamedEntity := range dag.ReadAll(def, repo, identityResolver) {
+			if streamedEntity.Err != nil {
+				out <- StreamedBug{
+					Err: streamedEntity.Err,
+				}
+			} else {
+				out <- StreamedBug{
+					Bug: &Bug{Entity: streamedEntity.Entity},
+				}
 			}
-
-			out <- StreamedBug{Bug: b}
 		}
 	}()
 
@@ -332,399 +100,78 @@ func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, re
 
 // ListLocalIds list all the available local bug ids
 func ListLocalIds(repo repository.Repo) ([]entity.Id, error) {
-	refs, err := repo.ListRefs(bugsRefPattern)
-	if err != nil {
-		return nil, err
-	}
-
-	return entity.RefsToIds(refs), nil
+	return dag.ListLocalIds(def, repo)
 }
 
 // Validate check if the Bug data is valid
 func (bug *Bug) Validate() error {
-	// non-empty
-	if len(bug.packs) == 0 && bug.staging.IsEmpty() {
-		return fmt.Errorf("bug has no operations")
-	}
-
-	// check if each pack and operations are valid
-	for _, pack := range bug.packs {
-		if err := pack.Validate(); err != nil {
-			return err
-		}
-	}
-
-	// check if staging is valid if needed
-	if !bug.staging.IsEmpty() {
-		if err := bug.staging.Validate(); err != nil {
-			return errors.Wrap(err, "staging")
-		}
+	if err := bug.Entity.Validate(); err != nil {
+		return err
 	}
 
 	// The very first Op should be a CreateOp
 	firstOp := bug.FirstOp()
-	if firstOp == nil || firstOp.base().OperationType != CreateOp {
+	if firstOp == nil || firstOp.Type() != CreateOp {
 		return fmt.Errorf("first operation should be a Create op")
 	}
 
-	// The bug Id should be the hash of the first commit
-	if len(bug.packs) > 0 && string(bug.packs[0].commitHash) != bug.id.String() {
-		return fmt.Errorf("bug id should be the first commit hash")
-	}
-
 	// Check that there is no more CreateOp op
-	// Check that there is no colliding operation's ID
-	it := NewOperationIterator(bug)
-	createCount := 0
-	ids := make(map[entity.Id]struct{})
-	for it.Next() {
-		if it.Value().base().OperationType == CreateOp {
-			createCount++
+	for i, op := range bug.Operations() {
+		if i == 0 {
+			continue
 		}
-		if _, ok := ids[it.Value().Id()]; ok {
-			return fmt.Errorf("id collision: %s", it.Value().Id())
+		if op.Type() == CreateOp {
+			return fmt.Errorf("only one Create op allowed")
 		}
-		ids[it.Value().Id()] = struct{}{}
-	}
-
-	if createCount != 1 {
-		return fmt.Errorf("only one Create op allowed")
 	}
 
 	return nil
 }
 
-// Append an operation into the staging area, to be committed later
+// Append add a new Operation to the Bug
 func (bug *Bug) Append(op Operation) {
-	bug.staging.Append(op)
+	bug.Entity.Append(op)
 }
 
-// Commit write the staging area in Git and move the operations to the packs
-func (bug *Bug) Commit(repo repository.ClockedRepo) error {
-
-	if !bug.NeedCommit() {
-		return fmt.Errorf("can't commit a bug with no pending operation")
-	}
-
-	if err := bug.Validate(); err != nil {
-		return errors.Wrap(err, "can't commit a bug with invalid data")
+// Operations return the ordered operations
+func (bug *Bug) Operations() []Operation {
+	source := bug.Entity.Operations()
+	result := make([]Operation, len(source))
+	for i, op := range source {
+		result[i] = op.(Operation)
 	}
-
-	// Write the Ops as a Git blob containing the serialized array
-	hash, err := bug.staging.Write(repo)
-	if err != nil {
-		return err
-	}
-
-	if bug.rootPack == "" {
-		bug.rootPack = hash
-	}
-
-	// Make a Git tree referencing this blob
-	tree := []repository.TreeEntry{
-		// the last pack of ops
-		{ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
-		// always the first pack of ops (might be the same)
-		{ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName},
-	}
-
-	// Reference, if any, all the files required by the ops
-	// Git will check that they actually exist in the storage and will make sure
-	// to push/pull them as needed.
-	mediaTree := makeMediaTree(bug.staging)
-	if len(mediaTree) > 0 {
-		mediaTreeHash, err := repo.StoreTree(mediaTree)
-		if err != nil {
-			return err
-		}
-		tree = append(tree, repository.TreeEntry{
-			ObjectType: repository.Tree,
-			Hash:       mediaTreeHash,
-			Name:       mediaEntryName,
-		})
-	}
-
-	// Store the logical clocks as well
-	// --> edit clock for each OperationPack/commits
-	// --> create clock only for the first OperationPack/commits
-	//
-	// To avoid having one blob for each clock value, clocks are serialized
-	// directly into the entry name
-	emptyBlobHash, err := repo.StoreData([]byte{})
-	if err != nil {
-		return err
-	}
-
-	editClock, err := repo.GetOrCreateClock(editClockName)
-	if err != nil {
-		return err
-	}
-	bug.editTime, err = editClock.Increment()
-	if err != nil {
-		return err
-	}
-
-	tree = append(tree, repository.TreeEntry{
-		ObjectType: repository.Blob,
-		Hash:       emptyBlobHash,
-		Name:       fmt.Sprintf(editClockEntryPattern, bug.editTime),
-	})
-	if bug.lastCommit == "" {
-		createClock, err := repo.GetOrCreateClock(creationClockName)
-		if err != nil {
-			return err
-		}
-		bug.createTime, err = createClock.Increment()
-		if err != nil {
-			return err
-		}
-
-		tree = append(tree, repository.TreeEntry{
-			ObjectType: repository.Blob,
-			Hash:       emptyBlobHash,
-			Name:       fmt.Sprintf(createClockEntryPattern, bug.createTime),
-		})
-	}
-
-	// Store the tree
-	hash, err = repo.StoreTree(tree)
-	if err != nil {
-		return err
-	}
-
-	// Write a Git commit referencing the tree, with the previous commit as parent
-	if bug.lastCommit != "" {
-		hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
-	} else {
-		hash, err = repo.StoreCommit(hash)
-	}
-
-	if err != nil {
-		return err
-	}
-
-	bug.lastCommit = hash
-
-	// if it was the first commit, use the commit hash as bug id
-	if bug.id == "" {
-		bug.id = entity.Id(hash)
-	}
-
-	// Create or update the Git reference for this bug
-	// When pushing later, the remote will ensure that this ref update
-	// is fast-forward, that is no data has been overwritten
-	ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
-	err = repo.UpdateRef(ref, hash)
-
-	if err != nil {
-		return err
-	}
-
-	bug.staging.commitHash = hash
-	bug.packs = append(bug.packs, bug.staging)
-	bug.staging = OperationPack{}
-
-	return nil
-}
-
-func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error {
-	if !bug.NeedCommit() {
-		return nil
-	}
-	return bug.Commit(repo)
-}
-
-func (bug *Bug) NeedCommit() bool {
-	return !bug.staging.IsEmpty()
+	return result
 }
 
-func makeMediaTree(pack OperationPack) []repository.TreeEntry {
-	var tree []repository.TreeEntry
-	counter := 0
-	added := make(map[repository.Hash]interface{})
-
-	for _, ops := range pack.Operations {
-		for _, file := range ops.GetFiles() {
-			if _, has := added[file]; !has {
-				tree = append(tree, repository.TreeEntry{
-					ObjectType: repository.Blob,
-					Hash:       file,
-					// The name is not important here, we only need to
-					// reference the blob.
-					Name: fmt.Sprintf("file%d", counter),
-				})
-				counter++
-				added[file] = struct{}{}
-			}
-		}
-	}
-
-	return tree
-}
-
-// Merge a different version of the same bug by rebasing operations of this bug
-// that are not present in the other on top of the chain of operations of the
-// other version.
-func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) {
-	var otherBug = bugFromInterface(other)
-
-	// Note: a faster merge should be possible without actually reading and parsing
-	// all operations pack of our side.
-	// Reading the other side is still necessary to validate remote data, at least
-	// for new operations
-
-	if bug.id != otherBug.id {
-		return false, errors.New("merging unrelated bugs is not supported")
-	}
-
-	if len(otherBug.staging.Operations) > 0 {
-		return false, errors.New("merging a bug with a non-empty staging is not supported")
-	}
-
-	if bug.lastCommit == "" || otherBug.lastCommit == "" {
-		return false, errors.New("can't merge a bug that has never been stored")
-	}
-
-	ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit)
-	if err != nil {
-		return false, errors.Wrap(err, "can't find common ancestor")
-	}
-
-	ancestorIndex := 0
-	newPacks := make([]OperationPack, 0, len(bug.packs))
-
-	// Find the root of the rebase
-	for i, pack := range bug.packs {
-		newPacks = append(newPacks, pack)
-
-		if pack.commitHash == ancestor {
-			ancestorIndex = i
-			break
-		}
-	}
-
-	if len(otherBug.packs) == ancestorIndex+1 {
-		// Nothing to rebase, return early
-		return false, nil
-	}
-
-	// get other bug's extra packs
-	for i := ancestorIndex + 1; i < len(otherBug.packs); i++ {
-		// clone is probably not necessary
-		newPack := otherBug.packs[i].Clone()
-
-		newPacks = append(newPacks, newPack)
-		bug.lastCommit = newPack.commitHash
-	}
-
-	// rebase our extra packs
-	for i := ancestorIndex + 1; i < len(bug.packs); i++ {
-		pack := bug.packs[i]
-
-		// get the referenced git tree
-		treeHash, err := repo.GetTreeHash(pack.commitHash)
-
-		if err != nil {
-			return false, err
-		}
-
-		// create a new commit with the correct ancestor
-		hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
-
-		if err != nil {
-			return false, err
-		}
-
-		// replace the pack
-		newPack := pack.Clone()
-		newPack.commitHash = hash
-		newPacks = append(newPacks, newPack)
-
-		// update the bug
-		bug.lastCommit = hash
-	}
-
-	bug.packs = newPacks
-
-	// Update the git ref
-	err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit)
-	if err != nil {
-		return false, err
+// Compile a bug in a easily usable snapshot
+func (bug *Bug) Compile() Snapshot {
+	snap := Snapshot{
+		id:     bug.Id(),
+		Status: OpenStatus,
 	}
 
-	return true, nil
-}
-
-// Id return the Bug identifier
-func (bug *Bug) Id() entity.Id {
-	if bug.id == "" {
-		// simply panic as it would be a coding error
-		// (using an id of a bug not stored yet)
-		panic("no id yet")
+	for _, op := range bug.Operations() {
+		op.Apply(&snap)
+		snap.Operations = append(snap.Operations, op)
 	}
-	return bug.id
-}
-
-// CreateLamportTime return the Lamport time of creation
-func (bug *Bug) CreateLamportTime() lamport.Time {
-	return bug.createTime
-}
 
-// EditLamportTime return the Lamport time of the last edit
-func (bug *Bug) EditLamportTime() lamport.Time {
-	return bug.editTime
+	return snap
 }
 
 // Lookup for the very first operation of the bug.
 // For a valid Bug, this operation should be a CreateOp
 func (bug *Bug) FirstOp() Operation {
-	for _, pack := range bug.packs {
-		for _, op := range pack.Operations {
-			return op
-		}
-	}
-
-	if !bug.staging.IsEmpty() {
-		return bug.staging.Operations[0]
+	if fo := bug.Entity.FirstOp(); fo != nil {
+		return fo.(Operation)
 	}
-
 	return nil
 }
 
 // Lookup for the very last operation of the bug.
 // For a valid Bug, should never be nil
 func (bug *Bug) LastOp() Operation {
-	if !bug.staging.IsEmpty() {
-		return bug.staging.Operations[len(bug.staging.Operations)-1]
-	}
-
-	if len(bug.packs) == 0 {
-		return nil
-	}
-
-	lastPack := bug.packs[len(bug.packs)-1]
-
-	if len(lastPack.Operations) == 0 {
-		return nil
-	}
-
-	return lastPack.Operations[len(lastPack.Operations)-1]
-}
-
-// Compile a bug in a easily usable snapshot
-func (bug *Bug) Compile() Snapshot {
-	snap := Snapshot{
-		id:     bug.id,
-		Status: OpenStatus,
+	if lo := bug.Entity.LastOp(); lo != nil {
+		return lo.(Operation)
 	}
-
-	it := NewOperationIterator(bug)
-
-	for it.Next() {
-		op := it.Value()
-		op.Apply(&snap)
-		snap.Operations = append(snap.Operations, op)
-	}
-
-	return snap
+	return nil
 }

bug/bug_actions.go 🔗

@@ -1,42 +1,34 @@
 package bug
 
 import (
-	"fmt"
-	"strings"
+	"github.com/pkg/errors"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
-	"github.com/pkg/errors"
 )
 
 // Fetch retrieve updates from a remote
 // This does not change the local bugs state
 func Fetch(repo repository.Repo, remote string) (string, error) {
-	// "refs/bugs/*:refs/remotes/<remote>>/bugs/*"
-	remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote)
-	fetchRefSpec := fmt.Sprintf("%s*:%s*", bugsRefPattern, remoteRefSpec)
-
-	return repo.FetchRefs(remote, fetchRefSpec)
+	return dag.Fetch(def, repo, remote)
 }
 
 // Push update a remote with the local changes
 func Push(repo repository.Repo, remote string) (string, error) {
-	// "refs/bugs/*:refs/bugs/*"
-	refspec := fmt.Sprintf("%s*:%s*", bugsRefPattern, bugsRefPattern)
-
-	return repo.PushRefs(remote, refspec)
+	return dag.Push(def, repo, remote)
 }
 
 // Pull will do a Fetch + MergeAll
 // This function will return an error if a merge fail
-func Pull(repo repository.ClockedRepo, remote string) error {
+func Pull(repo repository.ClockedRepo, remote string, author identity.Interface) error {
 	_, err := Fetch(repo, remote)
 	if err != nil {
 		return err
 	}
 
-	for merge := range MergeAll(repo, remote) {
+	for merge := range MergeAll(repo, remote, author) {
 		if merge.Err != nil {
 			return merge.Err
 		}
@@ -48,96 +40,38 @@ func Pull(repo repository.ClockedRepo, remote string) error {
 	return nil
 }
 
-// MergeAll will merge all the available remote bug:
-//
-// - If the remote has new commit, the local bug is updated to match the same history
-//   (fast-forward update)
-// - if the local bug has new commits but the remote don't, nothing is changed
-// - if both local and remote bug have new commits (that is, we have a concurrent edition),
-//   new local commits are rewritten at the head of the remote history (that is, a rebase)
-func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult {
-	out := make(chan entity.MergeResult)
-
+// MergeAll will merge all the available remote bug
+// Note: an author is necessary for the case where a merge commit is created, as this commit will
+// have an author and may be signed if a signing key is available.
+func MergeAll(repo repository.ClockedRepo, remote string, author identity.Interface) <-chan entity.MergeResult {
 	// no caching for the merge, we load everything from git even if that means multiple
 	// copy of the same entity in memory. The cache layer will intercept the results to
 	// invalidate entities if necessary.
 	identityResolver := identity.NewSimpleResolver(repo)
 
+	out := make(chan entity.MergeResult)
+
 	go func() {
 		defer close(out)
 
-		remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote)
-		remoteRefs, err := repo.ListRefs(remoteRefSpec)
+		results := dag.MergeAll(def, repo, identityResolver, remote, author)
 
-		if err != nil {
-			out <- entity.MergeResult{Err: err}
-			return
-		}
-
-		for _, remoteRef := range remoteRefs {
-			refSplit := strings.Split(remoteRef, "/")
-			id := entity.Id(refSplit[len(refSplit)-1])
-
-			if err := id.Validate(); err != nil {
-				out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error())
-				continue
-			}
-
-			remoteBug, err := read(repo, identityResolver, remoteRef)
-
-			if err != nil {
-				out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is not readable").Error())
-				continue
-			}
-
-			// Check for error in remote data
-			if err := remoteBug.Validate(); err != nil {
-				out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is invalid").Error())
-				continue
-			}
-
-			localRef := bugsRefPattern + remoteBug.Id().String()
-			localExist, err := repo.RefExist(localRef)
-
-			if err != nil {
-				out <- entity.NewMergeError(err, id)
-				continue
-			}
-
-			// the bug is not local yet, simply create the reference
-			if !localExist {
-				err := repo.CopyRef(remoteRef, localRef)
-
-				if err != nil {
-					out <- entity.NewMergeError(err, id)
-					return
+		// wrap the dag.Entity into a complete Bug
+		for result := range results {
+			result := result
+			if result.Entity != nil {
+				result.Entity = &Bug{
+					Entity: result.Entity.(*dag.Entity),
 				}
-
-				out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteBug)
-				continue
-			}
-
-			localBug, err := read(repo, identityResolver, localRef)
-
-			if err != nil {
-				out <- entity.NewMergeError(errors.Wrap(err, "local bug is not readable"), id)
-				return
-			}
-
-			updated, err := localBug.Merge(repo, remoteBug)
-
-			if err != nil {
-				out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error())
-				return
-			}
-
-			if updated {
-				out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localBug)
-			} else {
-				out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localBug)
 			}
+			out <- result
 		}
 	}()
 
 	return out
 }
+
+// RemoveBug will remove a local bug from its entity.Id
+func RemoveBug(repo repository.ClockedRepo, id entity.Id) error {
+	return dag.Remove(def, repo, id)
+}

bug/bug_actions_test.go 🔗

@@ -1,390 +0,0 @@
-package bug
-
-import (
-	"testing"
-	"time"
-
-	"github.com/stretchr/testify/assert"
-	"github.com/stretchr/testify/require"
-
-	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-func TestPushPull(t *testing.T) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
-	defer repository.CleanupTestRepos(repoA, repoB, remote)
-
-	reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := reneA.Commit(repoA)
-	require.NoError(t, err)
-
-	bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
-	require.NoError(t, err)
-	assert.True(t, bug1.NeedCommit())
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-	assert.False(t, bug1.NeedCommit())
-
-	// distribute the identity
-	_, err = identity.Push(repoA, "origin")
-	require.NoError(t, err)
-	err = identity.Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	// A --> remote --> B
-	_, err = Push(repoA, "origin")
-	require.NoError(t, err)
-
-	err = Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	bugs := allBugs(t, ReadAllLocal(repoB))
-
-	if len(bugs) != 1 {
-		t.Fatal("Unexpected number of bugs")
-	}
-
-	// B --> remote --> A
-	reneB, err := identity.ReadLocal(repoA, reneA.Id())
-	require.NoError(t, err)
-
-	bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message")
-	require.NoError(t, err)
-	err = bug2.Commit(repoB)
-	require.NoError(t, err)
-
-	_, err = Push(repoB, "origin")
-	require.NoError(t, err)
-
-	err = Pull(repoA, "origin")
-	require.NoError(t, err)
-
-	bugs = allBugs(t, ReadAllLocal(repoA))
-
-	if len(bugs) != 2 {
-		t.Fatal("Unexpected number of bugs")
-	}
-}
-
-func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug {
-	var result []*Bug
-	for streamed := range bugs {
-		if streamed.Err != nil {
-			t.Fatal(streamed.Err)
-		}
-		result = append(result, streamed.Bug)
-	}
-	return result
-}
-
-func TestRebaseTheirs(t *testing.T) {
-	_RebaseTheirs(t)
-}
-
-func BenchmarkRebaseTheirs(b *testing.B) {
-	for n := 0; n < b.N; n++ {
-		_RebaseTheirs(b)
-	}
-}
-
-func _RebaseTheirs(t testing.TB) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
-	defer repository.CleanupTestRepos(repoA, repoB, remote)
-
-	reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := reneA.Commit(repoA)
-	require.NoError(t, err)
-
-	bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
-	require.NoError(t, err)
-	assert.True(t, bug1.NeedCommit())
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-	assert.False(t, bug1.NeedCommit())
-
-	// distribute the identity
-	_, err = identity.Push(repoA, "origin")
-	require.NoError(t, err)
-	err = identity.Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	// A --> remote
-
-	_, err = Push(repoA, "origin")
-	require.NoError(t, err)
-
-	// remote --> B
-	err = Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	bug2, err := ReadLocal(repoB, bug1.Id())
-	require.NoError(t, err)
-	assert.False(t, bug2.NeedCommit())
-
-	reneB, err := identity.ReadLocal(repoA, reneA.Id())
-	require.NoError(t, err)
-
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message2")
-	require.NoError(t, err)
-	assert.True(t, bug2.NeedCommit())
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message3")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message4")
-	require.NoError(t, err)
-	err = bug2.Commit(repoB)
-	require.NoError(t, err)
-	assert.False(t, bug2.NeedCommit())
-
-	// B --> remote
-	_, err = Push(repoB, "origin")
-	require.NoError(t, err)
-
-	// remote --> A
-	err = Pull(repoA, "origin")
-	require.NoError(t, err)
-
-	bugs := allBugs(t, ReadAllLocal(repoB))
-
-	if len(bugs) != 1 {
-		t.Fatal("Unexpected number of bugs")
-	}
-
-	bug3, err := ReadLocal(repoA, bug1.Id())
-	require.NoError(t, err)
-
-	if nbOps(bug3) != 4 {
-		t.Fatal("Unexpected number of operations")
-	}
-}
-
-func TestRebaseOurs(t *testing.T) {
-	_RebaseOurs(t)
-}
-
-func BenchmarkRebaseOurs(b *testing.B) {
-	for n := 0; n < b.N; n++ {
-		_RebaseOurs(b)
-	}
-}
-
-func _RebaseOurs(t testing.TB) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
-	defer repository.CleanupTestRepos(repoA, repoB, remote)
-
-	reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := reneA.Commit(repoA)
-	require.NoError(t, err)
-
-	bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	// distribute the identity
-	_, err = identity.Push(repoA, "origin")
-	require.NoError(t, err)
-	err = identity.Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	// A --> remote
-	_, err = Push(repoA, "origin")
-	require.NoError(t, err)
-
-	// remote --> B
-	err = Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message2")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message3")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message4")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message5")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message6")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message7")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message8")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message9")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message10")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	// remote --> A
-	err = Pull(repoA, "origin")
-	require.NoError(t, err)
-
-	bugs := allBugs(t, ReadAllLocal(repoA))
-
-	if len(bugs) != 1 {
-		t.Fatal("Unexpected number of bugs")
-	}
-
-	bug2, err := ReadLocal(repoA, bug1.Id())
-	require.NoError(t, err)
-
-	if nbOps(bug2) != 10 {
-		t.Fatal("Unexpected number of operations")
-	}
-}
-
-func nbOps(b *Bug) int {
-	it := NewOperationIterator(b)
-	counter := 0
-	for it.Next() {
-		counter++
-	}
-	return counter
-}
-
-func TestRebaseConflict(t *testing.T) {
-	_RebaseConflict(t)
-}
-
-func BenchmarkRebaseConflict(b *testing.B) {
-	for n := 0; n < b.N; n++ {
-		_RebaseConflict(b)
-	}
-}
-
-func _RebaseConflict(t testing.TB) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
-	defer repository.CleanupTestRepos(repoA, repoB, remote)
-
-	reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := reneA.Commit(repoA)
-	require.NoError(t, err)
-
-	bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	// distribute the identity
-	_, err = identity.Push(repoA, "origin")
-	require.NoError(t, err)
-	err = identity.Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	// A --> remote
-	_, err = Push(repoA, "origin")
-	require.NoError(t, err)
-
-	// remote --> B
-	err = Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message2")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message3")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message4")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message5")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message6")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message7")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message8")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message9")
-	require.NoError(t, err)
-	_, err = AddComment(bug1, reneA, time.Now().Unix(), "message10")
-	require.NoError(t, err)
-	err = bug1.Commit(repoA)
-	require.NoError(t, err)
-
-	bug2, err := ReadLocal(repoB, bug1.Id())
-	require.NoError(t, err)
-
-	reneB, err := identity.ReadLocal(repoA, reneA.Id())
-	require.NoError(t, err)
-
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message11")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message12")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message13")
-	require.NoError(t, err)
-	err = bug2.Commit(repoB)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message14")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message15")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message16")
-	require.NoError(t, err)
-	err = bug2.Commit(repoB)
-	require.NoError(t, err)
-
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message17")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message18")
-	require.NoError(t, err)
-	_, err = AddComment(bug2, reneB, time.Now().Unix(), "message19")
-	require.NoError(t, err)
-	err = bug2.Commit(repoB)
-	require.NoError(t, err)
-
-	// A --> remote
-	_, err = Push(repoA, "origin")
-	require.NoError(t, err)
-
-	// remote --> B
-	err = Pull(repoB, "origin")
-	require.NoError(t, err)
-
-	bugs := allBugs(t, ReadAllLocal(repoB))
-
-	if len(bugs) != 1 {
-		t.Fatal("Unexpected number of bugs")
-	}
-
-	bug3, err := ReadLocal(repoB, bug1.Id())
-	require.NoError(t, err)
-
-	if nbOps(bug3) != 19 {
-		t.Fatal("Unexpected number of operations")
-	}
-
-	// B --> remote
-	_, err = Push(repoB, "origin")
-	require.NoError(t, err)
-
-	// remote --> A
-	err = Pull(repoA, "origin")
-	require.NoError(t, err)
-
-	bugs = allBugs(t, ReadAllLocal(repoA))
-
-	if len(bugs) != 1 {
-		t.Fatal("Unexpected number of bugs")
-	}
-
-	bug4, err := ReadLocal(repoA, bug1.Id())
-	require.NoError(t, err)
-
-	if nbOps(bug4) != 19 {
-		t.Fatal("Unexpected number of operations")
-	}
-}

bug/bug_test.go 🔗

@@ -1,186 +0,0 @@
-package bug
-
-import (
-	"fmt"
-	"testing"
-	"time"
-
-	"github.com/stretchr/testify/require"
-
-	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-func TestBugId(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
-
-	bug1 := NewBug()
-
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(mockRepo)
-	require.NoError(t, err)
-
-	createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-
-	bug1.Append(createOp)
-
-	err = bug1.Commit(mockRepo)
-
-	if err != nil {
-		t.Fatal(err)
-	}
-
-	bug1.Id()
-}
-
-func TestBugValidity(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
-
-	bug1 := NewBug()
-
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(mockRepo)
-	require.NoError(t, err)
-
-	createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-
-	if bug1.Validate() == nil {
-		t.Fatal("Empty bug should be invalid")
-	}
-
-	bug1.Append(createOp)
-
-	if bug1.Validate() != nil {
-		t.Fatal("Bug with just a CreateOp should be valid")
-	}
-
-	err = bug1.Commit(mockRepo)
-	if err != nil {
-		t.Fatal(err)
-	}
-
-	bug1.Append(createOp)
-
-	if bug1.Validate() == nil {
-		t.Fatal("Bug with multiple CreateOp should be invalid")
-	}
-
-	err = bug1.Commit(mockRepo)
-	if err == nil {
-		t.Fatal("Invalid bug should not commit")
-	}
-}
-
-func TestBugCommitLoad(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-
-	bug1 := NewBug()
-
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
-	require.NoError(t, err)
-
-	createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-	setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1")
-	addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil)
-
-	bug1.Append(createOp)
-	bug1.Append(setTitleOp)
-
-	require.True(t, bug1.NeedCommit())
-
-	err = bug1.Commit(repo)
-	require.Nil(t, err)
-	require.False(t, bug1.NeedCommit())
-
-	bug2, err := ReadLocal(repo, bug1.Id())
-	require.NoError(t, err)
-	equivalentBug(t, bug1, bug2)
-
-	// add more op
-
-	bug1.Append(addCommentOp)
-
-	require.True(t, bug1.NeedCommit())
-
-	err = bug1.Commit(repo)
-	require.Nil(t, err)
-	require.False(t, bug1.NeedCommit())
-
-	bug3, err := ReadLocal(repo, bug1.Id())
-	require.NoError(t, err)
-	equivalentBug(t, bug1, bug3)
-}
-
-func equivalentBug(t *testing.T, expected, actual *Bug) {
-	require.Equal(t, len(expected.packs), len(actual.packs))
-
-	for i := range expected.packs {
-		for j := range expected.packs[i].Operations {
-			actual.packs[i].Operations[j].base().id = expected.packs[i].Operations[j].base().id
-		}
-	}
-
-	require.Equal(t, expected, actual)
-}
-
-func TestBugRemove(t *testing.T) {
-	repo := repository.CreateGoGitTestRepo(false)
-	remoteA := repository.CreateGoGitTestRepo(true)
-	remoteB := repository.CreateGoGitTestRepo(true)
-	defer repository.CleanupTestRepos(repo, remoteA, remoteB)
-
-	err := repo.AddRemote("remoteA", remoteA.GetLocalRemote())
-	require.NoError(t, err)
-
-	err = repo.AddRemote("remoteB", remoteB.GetLocalRemote())
-	require.NoError(t, err)
-
-	// generate a bunch of bugs
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err = rene.Commit(repo)
-	require.NoError(t, err)
-
-	for i := 0; i < 100; i++ {
-		b := NewBug()
-		createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil)
-		b.Append(createOp)
-		err = b.Commit(repo)
-		require.NoError(t, err)
-	}
-
-	// and one more for testing
-	b := NewBug()
-	createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-	b.Append(createOp)
-	err = b.Commit(repo)
-	require.NoError(t, err)
-
-	_, err = Push(repo, "remoteA")
-	require.NoError(t, err)
-
-	_, err = Push(repo, "remoteB")
-	require.NoError(t, err)
-
-	_, err = Fetch(repo, "remoteA")
-	require.NoError(t, err)
-
-	_, err = Fetch(repo, "remoteB")
-	require.NoError(t, err)
-
-	err = RemoveBug(repo, b.Id())
-	require.NoError(t, err)
-
-	_, err = ReadLocal(repo, b.Id())
-	require.Error(t, ErrBugNotExist, err)
-
-	_, err = ReadRemote(repo, "remoteA", b.Id())
-	require.Error(t, ErrBugNotExist, err)
-
-	_, err = ReadRemote(repo, "remoteB", b.Id())
-	require.Error(t, ErrBugNotExist, err)
-
-	ids, err := ListLocalIds(repo)
-	require.NoError(t, err)
-	require.Len(t, ids, 100)
-}

bug/clocks.go 🔗

@@ -1,40 +0,0 @@
-package bug
-
-import (
-	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-// ClockLoader is the repository.ClockLoader for the Bug entity
-var ClockLoader = repository.ClockLoader{
-	Clocks: []string{creationClockName, editClockName},
-	Witnesser: func(repo repository.ClockedRepo) error {
-		// We don't care about the actual identity so an IdentityStub will do
-		resolver := identity.NewStubResolver()
-		for b := range ReadAllLocalWithResolver(repo, resolver) {
-			if b.Err != nil {
-				return b.Err
-			}
-
-			createClock, err := repo.GetOrCreateClock(creationClockName)
-			if err != nil {
-				return err
-			}
-			err = createClock.Witness(b.Bug.createTime)
-			if err != nil {
-				return err
-			}
-
-			editClock, err := repo.GetOrCreateClock(editClockName)
-			if err != nil {
-				return err
-			}
-			err = editClock.Witness(b.Bug.editTime)
-			if err != nil {
-				return err
-			}
-		}
-
-		return nil
-	},
-}

bug/err.go 🔗

@@ -0,0 +1,17 @@
+package bug
+
+import (
+	"errors"
+
+	"github.com/MichaelMure/git-bug/entity"
+)
+
+var ErrBugNotExist = errors.New("bug doesn't exist")
+
+func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
+	return entity.NewErrMultipleMatch("bug", matching)
+}
+
+func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
+	return entity.NewErrMultipleMatch("operation", matching)
+}

bug/identity.go 🔗

@@ -1,27 +0,0 @@
-package bug
-
-import (
-	"github.com/MichaelMure/git-bug/identity"
-)
-
-// EnsureIdentities walk the graph of operations and make sure that all Identity
-// are properly loaded. That is, it replace all the IdentityStub with the full
-// Identity, loaded through a Resolver.
-func (bug *Bug) EnsureIdentities(resolver identity.Resolver) error {
-	it := NewOperationIterator(bug)
-
-	for it.Next() {
-		op := it.Value()
-		base := op.base()
-
-		if stub, ok := base.Author.(*identity.IdentityStub); ok {
-			i, err := resolver.ResolveIdentity(stub.Id())
-			if err != nil {
-				return err
-			}
-
-			base.Author = i
-		}
-	}
-	return nil
-}

bug/interface.go 🔗

@@ -16,17 +16,15 @@ type Interface interface {
 	// Append an operation into the staging area, to be committed later
 	Append(op Operation)
 
+	// Operations return the ordered operations
+	Operations() []Operation
+
 	// Indicate that the in-memory state changed and need to be commit in the repository
 	NeedCommit() bool
 
 	// Commit write the staging area in Git and move the operations to the packs
 	Commit(repo repository.ClockedRepo) error
 
-	// Merge a different version of the same bug by rebasing operations of this bug
-	// that are not present in the other on top of the chain of operations of the
-	// other version.
-	Merge(repo repository.Repo, other Interface) (bool, error)
-
 	// Lookup for the very first operation of the bug.
 	// For a valid Bug, this operation should be a CreateOp
 	FirstOp() Operation

bug/op_add_comment.go 🔗

@@ -5,6 +5,7 @@ import (
 	"fmt"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
 	"github.com/MichaelMure/git-bug/util/text"
@@ -12,6 +13,7 @@ import (
 )
 
 var _ Operation = &AddCommentOperation{}
+var _ dag.OperationWithFiles = &AddCommentOperation{}
 
 // AddCommentOperation will add a new comment in the bug
 type AddCommentOperation struct {
@@ -21,25 +23,19 @@ type AddCommentOperation struct {
 	Files []repository.Hash `json:"files"`
 }
 
-// Sign-post method for gqlgen
-func (op *AddCommentOperation) IsOperation() {}
-
-func (op *AddCommentOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *AddCommentOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *AddCommentOperation) Apply(snapshot *Snapshot) {
-	snapshot.addActor(op.Author)
-	snapshot.addParticipant(op.Author)
+	snapshot.addActor(op.Author_)
+	snapshot.addParticipant(op.Author_)
 
+	commentId := entity.CombineIds(snapshot.Id(), op.Id())
 	comment := Comment{
-		id:       op.Id(),
+		id:       commentId,
 		Message:  op.Message,
-		Author:   op.Author,
+		Author:   op.Author_,
 		Files:    op.Files,
 		UnixTime: timestamp.Timestamp(op.UnixTime),
 	}
@@ -47,7 +43,7 @@ func (op *AddCommentOperation) Apply(snapshot *Snapshot) {
 	snapshot.Comments = append(snapshot.Comments, comment)
 
 	item := &AddCommentTimelineItem{
-		CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment),
+		CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
 	}
 
 	snapshot.Timeline = append(snapshot.Timeline, item)
@@ -58,7 +54,7 @@ func (op *AddCommentOperation) GetFiles() []repository.Hash {
 }
 
 func (op *AddCommentOperation) Validate() error {
-	if err := opBaseValidate(op, AddCommentOp); err != nil {
+	if err := op.OpBase.Validate(op, AddCommentOp); err != nil {
 		return err
 	}
 

bug/op_add_comment_test.go 🔗

@@ -13,9 +13,9 @@ import (
 )
 
 func TestAddCommentSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
@@ -32,8 +32,8 @@ func TestAddCommentSerialize(t *testing.T) {
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	assert.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
 	assert.Equal(t, before, &after)
 }

bug/op_create.go 🔗

@@ -6,6 +6,7 @@ import (
 	"strings"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
 	"github.com/MichaelMure/git-bug/util/text"
@@ -13,6 +14,7 @@ import (
 )
 
 var _ Operation = &CreateOperation{}
+var _ dag.OperationWithFiles = &CreateOperation{}
 
 // CreateOperation define the initial creation of a bug
 type CreateOperation struct {
@@ -22,37 +24,53 @@ type CreateOperation struct {
 	Files   []repository.Hash `json:"files"`
 }
 
-// Sign-post method for gqlgen
-func (op *CreateOperation) IsOperation() {}
-
-func (op *CreateOperation) base() *OpBase {
-	return &op.OpBase
+func (op *CreateOperation) Id() entity.Id {
+	return idOperation(op, &op.OpBase)
 }
 
-func (op *CreateOperation) Id() entity.Id {
-	return idOperation(op)
+// OVERRIDE
+func (op *CreateOperation) SetMetadata(key string, value string) {
+	// sanity check: we make sure we are not in the following scenario:
+	// - the bug is created with a first operation
+	// - Id() is used
+	// - metadata are added, which will change the Id
+	// - Id() is used again
+
+	if op.id != entity.UnsetId {
+		panic("usage of Id() after changing the first operation")
+	}
+
+	op.OpBase.SetMetadata(key, value)
 }
 
 func (op *CreateOperation) Apply(snapshot *Snapshot) {
-	snapshot.addActor(op.Author)
-	snapshot.addParticipant(op.Author)
+	// sanity check: will fail when adding a second Create
+	if snapshot.id != "" && snapshot.id != entity.UnsetId && snapshot.id != op.Id() {
+		panic("adding a second Create operation")
+	}
+
+	snapshot.id = op.Id()
+
+	snapshot.addActor(op.Author_)
+	snapshot.addParticipant(op.Author_)
 
 	snapshot.Title = op.Title
 
+	commentId := entity.CombineIds(snapshot.Id(), op.Id())
 	comment := Comment{
-		id:       op.Id(),
+		id:       commentId,
 		Message:  op.Message,
-		Author:   op.Author,
+		Author:   op.Author_,
 		UnixTime: timestamp.Timestamp(op.UnixTime),
 	}
 
 	snapshot.Comments = []Comment{comment}
-	snapshot.Author = op.Author
+	snapshot.Author = op.Author_
 	snapshot.CreateTime = op.Time()
 
 	snapshot.Timeline = []TimelineItem{
 		&CreateTimelineItem{
-			CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment),
+			CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
 		},
 	}
 }
@@ -62,18 +80,23 @@ func (op *CreateOperation) GetFiles() []repository.Hash {
 }
 
 func (op *CreateOperation) Validate() error {
-	if err := opBaseValidate(op, CreateOp); err != nil {
+	if err := op.OpBase.Validate(op, CreateOp); err != nil {
 		return err
 	}
 
+	if len(op.Nonce) > 64 {
+		return fmt.Errorf("create nonce is too big")
+	}
+	if len(op.Nonce) < 20 {
+		return fmt.Errorf("create nonce is too small")
+	}
+
 	if text.Empty(op.Title) {
 		return fmt.Errorf("title is empty")
 	}
-
 	if strings.Contains(op.Title, "\n") {
 		return fmt.Errorf("title should be a single line")
 	}
-
 	if !text.Safe(op.Title) {
 		return fmt.Errorf("title is not fully printable")
 	}
@@ -85,7 +108,7 @@ func (op *CreateOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *CreateOperation) UnmarshalJSON(data []byte) error {
@@ -98,6 +121,7 @@ func (op *CreateOperation) UnmarshalJSON(data []byte) error {
 	}
 
 	aux := struct {
+		Nonce   []byte            `json:"nonce"`
 		Title   string            `json:"title"`
 		Message string            `json:"message"`
 		Files   []repository.Hash `json:"files"`
@@ -109,6 +133,7 @@ func (op *CreateOperation) UnmarshalJSON(data []byte) error {
 	}
 
 	op.OpBase = base
+	op.Nonce = aux.Nonce
 	op.Title = aux.Title
 	op.Message = aux.Message
 	op.Files = aux.Files

bug/op_create_test.go 🔗

@@ -5,17 +5,22 @@ import (
 	"testing"
 	"time"
 
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
 	"github.com/MichaelMure/git-bug/util/timestamp"
-	"github.com/stretchr/testify/assert"
-	"github.com/stretchr/testify/require"
 )
 
 func TestCreate(t *testing.T) {
 	snapshot := Snapshot{}
 
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+	repo := repository.NewMockRepoClock()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+	require.NoError(t, err)
+
 	unix := time.Now().Unix()
 
 	create := NewCreateOp(rene, unix, "title", "message", nil)
@@ -23,16 +28,19 @@ func TestCreate(t *testing.T) {
 	create.Apply(&snapshot)
 
 	id := create.Id()
-	assert.NoError(t, id.Validate())
+	require.NoError(t, id.Validate())
+
+	commentId := entity.CombineIds(create.Id(), create.Id())
 
 	comment := Comment{
-		id:       id,
+		id:       commentId,
 		Author:   rene,
 		Message:  "message",
 		UnixTime: timestamp.Timestamp(create.UnixTime),
 	}
 
 	expected := Snapshot{
+		id:    create.Id(),
 		Title: "title",
 		Comments: []Comment{
 			comment,
@@ -43,36 +51,36 @@ func TestCreate(t *testing.T) {
 		CreateTime:   create.Time(),
 		Timeline: []TimelineItem{
 			&CreateTimelineItem{
-				CommentTimelineItem: NewCommentTimelineItem(id, comment),
+				CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
 			},
 		},
 	}
 
-	assert.Equal(t, expected, snapshot)
+	require.Equal(t, expected, snapshot)
 }
 
 func TestCreateSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
 	before := NewCreateOp(rene, unix, "title", "message", nil)
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after CreateOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/op_edit_comment.go 🔗

@@ -7,6 +7,7 @@ import (
 	"github.com/pkg/errors"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
 	"github.com/MichaelMure/git-bug/util/timestamp"
@@ -15,6 +16,7 @@ import (
 )
 
 var _ Operation = &EditCommentOperation{}
+var _ dag.OperationWithFiles = &EditCommentOperation{}
 
 // EditCommentOperation will change a comment in the bug
 type EditCommentOperation struct {
@@ -24,22 +26,15 @@ type EditCommentOperation struct {
 	Files   []repository.Hash `json:"files"`
 }
 
-// Sign-post method for gqlgen
-func (op *EditCommentOperation) IsOperation() {}
-
-func (op *EditCommentOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *EditCommentOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *EditCommentOperation) Apply(snapshot *Snapshot) {
 	// Todo: currently any message can be edited, even by a different author
 	// crypto signature are needed.
 
-	snapshot.addActor(op.Author)
+	snapshot.addActor(op.Author_)
 
 	var target TimelineItem
 
@@ -85,7 +80,7 @@ func (op *EditCommentOperation) GetFiles() []repository.Hash {
 }
 
 func (op *EditCommentOperation) Validate() error {
-	if err := opBaseValidate(op, EditCommentOp); err != nil {
+	if err := op.OpBase.Validate(op, EditCommentOp); err != nil {
 		return err
 	}
 
@@ -100,7 +95,7 @@ func (op *EditCommentOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *EditCommentOperation) UnmarshalJSON(data []byte) error {

bug/op_edit_comment_test.go 🔗

@@ -5,7 +5,6 @@ import (
 	"testing"
 	"time"
 
-	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 
 	"github.com/MichaelMure/git-bug/identity"
@@ -15,9 +14,9 @@ import (
 func TestEdit(t *testing.T) {
 	snapshot := Snapshot{}
 
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
@@ -44,62 +43,62 @@ func TestEdit(t *testing.T) {
 	id3 := comment2.Id()
 	require.NoError(t, id3.Validate())
 
-	edit := NewEditCommentOp(rene, unix, id1, "create edited", nil)
+	edit := NewEditCommentOp(rene, unix, snapshot.Comments[0].Id(), "create edited", nil)
 	edit.Apply(&snapshot)
 
-	assert.Equal(t, len(snapshot.Timeline), 4)
-	assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
-	assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1)
-	assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1)
-	assert.Equal(t, snapshot.Comments[0].Message, "create edited")
-	assert.Equal(t, snapshot.Comments[1].Message, "comment 1")
-	assert.Equal(t, snapshot.Comments[2].Message, "comment 2")
+	require.Len(t, snapshot.Timeline, 4)
+	require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+	require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 1)
+	require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 1)
+	require.Equal(t, snapshot.Comments[0].Message, "create edited")
+	require.Equal(t, snapshot.Comments[1].Message, "comment 1")
+	require.Equal(t, snapshot.Comments[2].Message, "comment 2")
 
-	edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil)
+	edit2 := NewEditCommentOp(rene, unix, snapshot.Comments[1].Id(), "comment 1 edited", nil)
 	edit2.Apply(&snapshot)
 
-	assert.Equal(t, len(snapshot.Timeline), 4)
-	assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
-	assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2)
-	assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1)
-	assert.Equal(t, snapshot.Comments[0].Message, "create edited")
-	assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
-	assert.Equal(t, snapshot.Comments[2].Message, "comment 2")
+	require.Len(t, snapshot.Timeline, 4)
+	require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+	require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 2)
+	require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 1)
+	require.Equal(t, snapshot.Comments[0].Message, "create edited")
+	require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
+	require.Equal(t, snapshot.Comments[2].Message, "comment 2")
 
-	edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil)
+	edit3 := NewEditCommentOp(rene, unix, snapshot.Comments[2].Id(), "comment 2 edited", nil)
 	edit3.Apply(&snapshot)
 
-	assert.Equal(t, len(snapshot.Timeline), 4)
-	assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
-	assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2)
-	assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2)
-	assert.Equal(t, snapshot.Comments[0].Message, "create edited")
-	assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
-	assert.Equal(t, snapshot.Comments[2].Message, "comment 2 edited")
+	require.Len(t, snapshot.Timeline, 4)
+	require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+	require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 2)
+	require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 2)
+	require.Equal(t, snapshot.Comments[0].Message, "create edited")
+	require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
+	require.Equal(t, snapshot.Comments[2].Message, "comment 2 edited")
 }
 
 func TestEditCommentSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
 	before := NewEditCommentOp(rene, unix, "target", "message", nil)
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after EditCommentOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/op_label_change.go 🔗

@@ -21,20 +21,13 @@ type LabelChangeOperation struct {
 	Removed []Label `json:"removed"`
 }
 
-// Sign-post method for gqlgen
-func (op *LabelChangeOperation) IsOperation() {}
-
-func (op *LabelChangeOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *LabelChangeOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 // Apply apply the operation
 func (op *LabelChangeOperation) Apply(snapshot *Snapshot) {
-	snapshot.addActor(op.Author)
+	snapshot.addActor(op.Author_)
 
 	// Add in the set
 AddLoop:
@@ -66,7 +59,7 @@ AddLoop:
 
 	item := &LabelChangeTimelineItem{
 		id:       op.Id(),
-		Author:   op.Author,
+		Author:   op.Author_,
 		UnixTime: timestamp.Timestamp(op.UnixTime),
 		Added:    op.Added,
 		Removed:  op.Removed,
@@ -76,7 +69,7 @@ AddLoop:
 }
 
 func (op *LabelChangeOperation) Validate() error {
-	if err := opBaseValidate(op, LabelChangeOp); err != nil {
+	if err := op.OpBase.Validate(op, LabelChangeOp); err != nil {
 		return err
 	}
 
@@ -99,7 +92,7 @@ func (op *LabelChangeOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *LabelChangeOperation) UnmarshalJSON(data []byte) error {

bug/op_label_change_test.go 🔗

@@ -9,32 +9,30 @@ import (
 
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
-
-	"github.com/stretchr/testify/assert"
 )
 
 func TestLabelChangeSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
 	before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"})
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after LabelChangeOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/op_noop.go 🔗

@@ -16,15 +16,8 @@ type NoOpOperation struct {
 	OpBase
 }
 
-// Sign-post method for gqlgen
-func (op *NoOpOperation) IsOperation() {}
-
-func (op *NoOpOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *NoOpOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *NoOpOperation) Apply(snapshot *Snapshot) {
@@ -32,10 +25,10 @@ func (op *NoOpOperation) Apply(snapshot *Snapshot) {
 }
 
 func (op *NoOpOperation) Validate() error {
-	return opBaseValidate(op, NoOpOp)
+	return op.OpBase.Validate(op, NoOpOp)
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *NoOpOperation) UnmarshalJSON(data []byte) error {

bug/op_noop_test.go 🔗

@@ -14,9 +14,9 @@ import (
 )
 
 func TestNoopSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
@@ -33,8 +33,8 @@ func TestNoopSerialize(t *testing.T) {
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	assert.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
 	assert.Equal(t, before, &after)
 }

bug/op_set_metadata.go 🔗

@@ -17,41 +17,25 @@ type SetMetadataOperation struct {
 	NewMetadata map[string]string `json:"new_metadata"`
 }
 
-// Sign-post method for gqlgen
-func (op *SetMetadataOperation) IsOperation() {}
-
-func (op *SetMetadataOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *SetMetadataOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *SetMetadataOperation) Apply(snapshot *Snapshot) {
 	for _, target := range snapshot.Operations {
 		if target.Id() == op.Target {
-			base := target.base()
-
-			if base.extraMetadata == nil {
-				base.extraMetadata = make(map[string]string)
-			}
-
 			// Apply the metadata in an immutable way: if a metadata already
 			// exist, it's not possible to override it.
-			for key, val := range op.NewMetadata {
-				if _, exist := base.extraMetadata[key]; !exist {
-					base.extraMetadata[key] = val
-				}
+			for key, value := range op.NewMetadata {
+				target.setExtraMetadataImmutable(key, value)
 			}
-
 			return
 		}
 	}
 }
 
 func (op *SetMetadataOperation) Validate() error {
-	if err := opBaseValidate(op, SetMetadataOp); err != nil {
+	if err := op.OpBase.Validate(op, SetMetadataOp); err != nil {
 		return err
 	}
 
@@ -62,7 +46,7 @@ func (op *SetMetadataOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *SetMetadataOperation) UnmarshalJSON(data []byte) error {

bug/op_set_metadata_test.go 🔗

@@ -8,16 +8,15 @@ import (
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
 
-	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 )
 
 func TestSetMetadata(t *testing.T) {
 	snapshot := Snapshot{}
 
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
@@ -47,15 +46,15 @@ func TestSetMetadata(t *testing.T) {
 	snapshot.Operations = append(snapshot.Operations, op1)
 
 	createMetadata := snapshot.Operations[0].AllMetadata()
-	assert.Equal(t, len(createMetadata), 2)
+	require.Len(t, createMetadata, 2)
 	// original key is not overrided
-	assert.Equal(t, createMetadata["key"], "value")
+	require.Equal(t, createMetadata["key"], "value")
 	// new key is set
-	assert.Equal(t, createMetadata["key2"], "value")
+	require.Equal(t, createMetadata["key2"], "value")
 
 	commentMetadata := snapshot.Operations[1].AllMetadata()
-	assert.Equal(t, len(commentMetadata), 1)
-	assert.Equal(t, commentMetadata["key2"], "value2")
+	require.Len(t, commentMetadata, 1)
+	require.Equal(t, commentMetadata["key2"], "value2")
 
 	op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{
 		"key2": "value",
@@ -66,16 +65,16 @@ func TestSetMetadata(t *testing.T) {
 	snapshot.Operations = append(snapshot.Operations, op2)
 
 	createMetadata = snapshot.Operations[0].AllMetadata()
-	assert.Equal(t, len(createMetadata), 2)
-	assert.Equal(t, createMetadata["key"], "value")
-	assert.Equal(t, createMetadata["key2"], "value")
+	require.Len(t, createMetadata, 2)
+	require.Equal(t, createMetadata["key"], "value")
+	require.Equal(t, createMetadata["key2"], "value")
 
 	commentMetadata = snapshot.Operations[1].AllMetadata()
-	assert.Equal(t, len(commentMetadata), 2)
+	require.Len(t, commentMetadata, 2)
 	// original key is not overrided
-	assert.Equal(t, commentMetadata["key2"], "value2")
+	require.Equal(t, commentMetadata["key2"], "value2")
 	// new key is set
-	assert.Equal(t, commentMetadata["key3"], "value3")
+	require.Equal(t, commentMetadata["key3"], "value3")
 
 	op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{
 		"key":  "override",
@@ -86,22 +85,22 @@ func TestSetMetadata(t *testing.T) {
 	snapshot.Operations = append(snapshot.Operations, op3)
 
 	createMetadata = snapshot.Operations[0].AllMetadata()
-	assert.Equal(t, len(createMetadata), 2)
+	require.Len(t, createMetadata, 2)
 	// original key is not overrided
-	assert.Equal(t, createMetadata["key"], "value")
+	require.Equal(t, createMetadata["key"], "value")
 	// previously set key is not overrided
-	assert.Equal(t, createMetadata["key2"], "value")
+	require.Equal(t, createMetadata["key2"], "value")
 
 	commentMetadata = snapshot.Operations[1].AllMetadata()
-	assert.Equal(t, len(commentMetadata), 2)
-	assert.Equal(t, commentMetadata["key2"], "value2")
-	assert.Equal(t, commentMetadata["key3"], "value3")
+	require.Len(t, commentMetadata, 2)
+	require.Equal(t, commentMetadata["key2"], "value2")
+	require.Equal(t, commentMetadata["key3"], "value3")
 }
 
 func TestSetMetadataSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
@@ -111,18 +110,18 @@ func TestSetMetadataSerialize(t *testing.T) {
 	})
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after SetMetadataOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/op_set_status.go 🔗

@@ -18,24 +18,17 @@ type SetStatusOperation struct {
 	Status Status `json:"status"`
 }
 
-// Sign-post method for gqlgen
-func (op *SetStatusOperation) IsOperation() {}
-
-func (op *SetStatusOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *SetStatusOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *SetStatusOperation) Apply(snapshot *Snapshot) {
 	snapshot.Status = op.Status
-	snapshot.addActor(op.Author)
+	snapshot.addActor(op.Author_)
 
 	item := &SetStatusTimelineItem{
 		id:       op.Id(),
-		Author:   op.Author,
+		Author:   op.Author_,
 		UnixTime: timestamp.Timestamp(op.UnixTime),
 		Status:   op.Status,
 	}
@@ -44,7 +37,7 @@ func (op *SetStatusOperation) Apply(snapshot *Snapshot) {
 }
 
 func (op *SetStatusOperation) Validate() error {
-	if err := opBaseValidate(op, SetStatusOp); err != nil {
+	if err := op.OpBase.Validate(op, SetStatusOp); err != nil {
 		return err
 	}
 
@@ -55,7 +48,7 @@ func (op *SetStatusOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *SetStatusOperation) UnmarshalJSON(data []byte) error {

bug/op_set_status_test.go 🔗

@@ -9,32 +9,30 @@ import (
 
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
-
-	"github.com/stretchr/testify/assert"
 )
 
 func TestSetStatusSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
 	before := NewSetStatusOp(rene, unix, ClosedStatus)
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after SetStatusOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/op_set_title.go 🔗

@@ -21,24 +21,17 @@ type SetTitleOperation struct {
 	Was   string `json:"was"`
 }
 
-// Sign-post method for gqlgen
-func (op *SetTitleOperation) IsOperation() {}
-
-func (op *SetTitleOperation) base() *OpBase {
-	return &op.OpBase
-}
-
 func (op *SetTitleOperation) Id() entity.Id {
-	return idOperation(op)
+	return idOperation(op, &op.OpBase)
 }
 
 func (op *SetTitleOperation) Apply(snapshot *Snapshot) {
 	snapshot.Title = op.Title
-	snapshot.addActor(op.Author)
+	snapshot.addActor(op.Author_)
 
 	item := &SetTitleTimelineItem{
 		id:       op.Id(),
-		Author:   op.Author,
+		Author:   op.Author_,
 		UnixTime: timestamp.Timestamp(op.UnixTime),
 		Title:    op.Title,
 		Was:      op.Was,
@@ -48,7 +41,7 @@ func (op *SetTitleOperation) Apply(snapshot *Snapshot) {
 }
 
 func (op *SetTitleOperation) Validate() error {
-	if err := opBaseValidate(op, SetTitleOp); err != nil {
+	if err := op.OpBase.Validate(op, SetTitleOp); err != nil {
 		return err
 	}
 
@@ -75,7 +68,7 @@ func (op *SetTitleOperation) Validate() error {
 	return nil
 }
 
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
 // This workaround is necessary to avoid the inner OpBase.MarshalJSON
 // overriding the outer op's MarshalJSON
 func (op *SetTitleOperation) UnmarshalJSON(data []byte) error {
@@ -132,19 +125,17 @@ func (s *SetTitleTimelineItem) IsAuthored() {}
 
 // Convenience function to apply the operation
 func SetTitle(b Interface, author identity.Interface, unixTime int64, title string) (*SetTitleOperation, error) {
-	it := NewOperationIterator(b)
-
-	var lastTitleOp Operation
-	for it.Next() {
-		op := it.Value()
-		if op.base().OperationType == SetTitleOp {
+	var lastTitleOp *SetTitleOperation
+	for _, op := range b.Operations() {
+		switch op := op.(type) {
+		case *SetTitleOperation:
 			lastTitleOp = op
 		}
 	}
 
 	var was string
 	if lastTitleOp != nil {
-		was = lastTitleOp.(*SetTitleOperation).Title
+		was = lastTitleOp.Title
 	} else {
 		was = b.FirstOp().(*CreateOperation).Title
 	}

bug/op_set_title_test.go 🔗

@@ -9,32 +9,30 @@ import (
 
 	"github.com/MichaelMure/git-bug/identity"
 	"github.com/MichaelMure/git-bug/repository"
-
-	"github.com/stretchr/testify/assert"
 )
 
 func TestSetTitleSerialize(t *testing.T) {
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
+	repo := repository.NewMockRepo()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
 	require.NoError(t, err)
 
 	unix := time.Now().Unix()
 	before := NewSetTitleOp(rene, unix, "title", "was")
 
 	data, err := json.Marshal(before)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	var after SetTitleOperation
 	err = json.Unmarshal(data, &after)
-	assert.NoError(t, err)
+	require.NoError(t, err)
 
 	// enforce creating the ID
 	before.Id()
 
 	// Replace the identity stub with the real thing
-	assert.Equal(t, rene.Id(), after.base().Author.Id())
-	after.Author = rene
+	require.Equal(t, rene.Id(), after.Author().Id())
+	after.Author_ = rene
 
-	assert.Equal(t, before, &after)
+	require.Equal(t, before, &after)
 }

bug/operation.go 🔗

@@ -1,7 +1,7 @@
 package bug
 
 import (
-	"crypto/sha256"
+	"crypto/rand"
 	"encoding/json"
 	"fmt"
 	"time"
@@ -9,8 +9,8 @@ import (
 	"github.com/pkg/errors"
 
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/entity/dag"
 	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
 )
 
 // OperationType is an operation type identifier
@@ -30,39 +30,27 @@ const (
 
 // Operation define the interface to fulfill for an edit operation of a Bug
 type Operation interface {
-	// base return the OpBase of the Operation, for package internal use
-	base() *OpBase
-	// Id return the identifier of the operation, to be used for back references
-	Id() entity.Id
+	dag.Operation
+
+	// Type return the type of the operation
+	Type() OperationType
+
 	// Time return the time when the operation was added
 	Time() time.Time
-	// GetFiles return the files needed by this operation
-	GetFiles() []repository.Hash
 	// Apply the operation to a Snapshot to create the final state
 	Apply(snapshot *Snapshot)
-	// Validate check if the operation is valid (ex: a title is a single line)
-	Validate() error
+
 	// SetMetadata store arbitrary metadata about the operation
 	SetMetadata(key string, value string)
 	// GetMetadata retrieve arbitrary metadata about the operation
 	GetMetadata(key string) (string, bool)
 	// AllMetadata return all metadata for this operation
 	AllMetadata() map[string]string
-	// GetAuthor return the author identity
-	GetAuthor() identity.Interface
-
-	// sign-post method for gqlgen
-	IsOperation()
-}
 
-func deriveId(data []byte) entity.Id {
-	sum := sha256.Sum256(data)
-	return entity.Id(fmt.Sprintf("%x", sum))
+	setExtraMetadataImmutable(key string, value string)
 }
 
-func idOperation(op Operation) entity.Id {
-	base := op.base()
-
+func idOperation(op Operation, base *OpBase) entity.Id {
 	if base.id == "" {
 		// something went really wrong
 		panic("op's id not set")
@@ -78,18 +66,85 @@ func idOperation(op Operation) entity.Id {
 			panic(err)
 		}
 
-		base.id = deriveId(data)
+		base.id = entity.DeriveId(data)
 	}
 	return base.id
 }
 
+func operationUnmarshaller(author identity.Interface, raw json.RawMessage) (dag.Operation, error) {
+	var t struct {
+		OperationType OperationType `json:"type"`
+	}
+
+	if err := json.Unmarshal(raw, &t); err != nil {
+		return nil, err
+	}
+
+	var op Operation
+
+	switch t.OperationType {
+	case AddCommentOp:
+		op = &AddCommentOperation{}
+	case CreateOp:
+		op = &CreateOperation{}
+	case EditCommentOp:
+		op = &EditCommentOperation{}
+	case LabelChangeOp:
+		op = &LabelChangeOperation{}
+	case NoOpOp:
+		op = &NoOpOperation{}
+	case SetMetadataOp:
+		op = &SetMetadataOperation{}
+	case SetStatusOp:
+		op = &SetStatusOperation{}
+	case SetTitleOp:
+		op = &SetTitleOperation{}
+	default:
+		panic(fmt.Sprintf("unknown operation type %v", t.OperationType))
+	}
+
+	err := json.Unmarshal(raw, &op)
+	if err != nil {
+		return nil, err
+	}
+
+	switch op := op.(type) {
+	case *AddCommentOperation:
+		op.Author_ = author
+	case *CreateOperation:
+		op.Author_ = author
+	case *EditCommentOperation:
+		op.Author_ = author
+	case *LabelChangeOperation:
+		op.Author_ = author
+	case *NoOpOperation:
+		op.Author_ = author
+	case *SetMetadataOperation:
+		op.Author_ = author
+	case *SetStatusOperation:
+		op.Author_ = author
+	case *SetTitleOperation:
+		op.Author_ = author
+	default:
+		panic(fmt.Sprintf("unknown operation type %T", op))
+	}
+
+	return op, nil
+}
+
 // OpBase implement the common code for all operations
 type OpBase struct {
 	OperationType OperationType      `json:"type"`
-	Author        identity.Interface `json:"author"`
+	Author_       identity.Interface `json:"author"`
 	// TODO: part of the data model upgrade, this should eventually be a timestamp + lamport
 	UnixTime int64             `json:"timestamp"`
 	Metadata map[string]string `json:"metadata,omitempty"`
+
+	// mandatory random bytes to ensure a better randomness of the data used to later generate the ID
+	// len(Nonce) should be > 20 and < 64 bytes
+	// It has no functional purpose and should be ignored.
+	Nonce []byte `json:"nonce"`
+
 	// Not serialized. Store the op's id in memory.
 	id entity.Id
 	// Not serialized. Store the extra metadata in memory,
@@ -101,21 +156,32 @@ type OpBase struct {
 func newOpBase(opType OperationType, author identity.Interface, unixTime int64) OpBase {
 	return OpBase{
 		OperationType: opType,
-		Author:        author,
+		Author_:       author,
 		UnixTime:      unixTime,
+		Nonce:         makeNonce(20),
 		id:            entity.UnsetId,
 	}
 }
 
-func (op *OpBase) UnmarshalJSON(data []byte) error {
+func makeNonce(len int) []byte {
+	result := make([]byte, len)
+	_, err := rand.Read(result)
+	if err != nil {
+		panic(err)
+	}
+	return result
+}
+
+func (base *OpBase) UnmarshalJSON(data []byte) error {
 	// Compute the Id when loading the op from disk.
-	op.id = deriveId(data)
+	base.id = entity.DeriveId(data)
 
 	aux := struct {
 		OperationType OperationType     `json:"type"`
 		Author        json.RawMessage   `json:"author"`
 		UnixTime      int64             `json:"timestamp"`
 		Metadata      map[string]string `json:"metadata,omitempty"`
+		Nonce         []byte            `json:"nonce"`
 	}{}
 
 	if err := json.Unmarshal(data, &aux); err != nil {
@@ -128,92 +194,110 @@ func (op *OpBase) UnmarshalJSON(data []byte) error {
 		return err
 	}
 
-	op.OperationType = aux.OperationType
-	op.Author = author
-	op.UnixTime = aux.UnixTime
-	op.Metadata = aux.Metadata
+	base.OperationType = aux.OperationType
+	base.Author_ = author
+	base.UnixTime = aux.UnixTime
+	base.Metadata = aux.Metadata
+	base.Nonce = aux.Nonce
 
 	return nil
 }
 
-// Time return the time when the operation was added
-func (op *OpBase) Time() time.Time {
-	return time.Unix(op.UnixTime, 0)
+func (base *OpBase) Type() OperationType {
+	return base.OperationType
 }
 
-// GetFiles return the files needed by this operation
-func (op *OpBase) GetFiles() []repository.Hash {
-	return nil
+// Time return the time when the operation was added
+func (base *OpBase) Time() time.Time {
+	return time.Unix(base.UnixTime, 0)
 }
 
 // Validate check the OpBase for errors
-func opBaseValidate(op Operation, opType OperationType) error {
-	if op.base().OperationType != opType {
-		return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, op.base().OperationType)
+func (base *OpBase) Validate(op Operation, opType OperationType) error {
+	if base.OperationType != opType {
+		return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, base.OperationType)
 	}
 
 	if op.Time().Unix() == 0 {
 		return fmt.Errorf("time not set")
 	}
 
-	if op.base().Author == nil {
+	if base.Author_ == nil {
 		return fmt.Errorf("author not set")
 	}
 
-	if err := op.base().Author.Validate(); err != nil {
+	if err := op.Author().Validate(); err != nil {
 		return errors.Wrap(err, "author")
 	}
 
-	for _, hash := range op.GetFiles() {
-		if !hash.IsValid() {
-			return fmt.Errorf("file with invalid hash %v", hash)
+	if op, ok := op.(dag.OperationWithFiles); ok {
+		for _, hash := range op.GetFiles() {
+			if !hash.IsValid() {
+				return fmt.Errorf("file with invalid hash %v", hash)
+			}
 		}
 	}
 
+	if len(base.Nonce) > 64 {
+		return fmt.Errorf("nonce is too big")
+	}
+	if len(base.Nonce) < 20 {
+		return fmt.Errorf("nonce is too small")
+	}
+
 	return nil
 }
 
 // SetMetadata store arbitrary metadata about the operation
-func (op *OpBase) SetMetadata(key string, value string) {
-	if op.Metadata == nil {
-		op.Metadata = make(map[string]string)
+func (base *OpBase) SetMetadata(key string, value string) {
+	if base.Metadata == nil {
+		base.Metadata = make(map[string]string)
 	}
 
-	op.Metadata[key] = value
-	op.id = entity.UnsetId
+	base.Metadata[key] = value
+	base.id = entity.UnsetId
 }
 
 // GetMetadata retrieve arbitrary metadata about the operation
-func (op *OpBase) GetMetadata(key string) (string, bool) {
-	val, ok := op.Metadata[key]
+func (base *OpBase) GetMetadata(key string) (string, bool) {
+	val, ok := base.Metadata[key]
 
 	if ok {
 		return val, true
 	}
 
 	// extraMetadata can't replace the original operations value if any
-	val, ok = op.extraMetadata[key]
+	val, ok = base.extraMetadata[key]
 
 	return val, ok
 }
 
 // AllMetadata return all metadata for this operation
-func (op *OpBase) AllMetadata() map[string]string {
+func (base *OpBase) AllMetadata() map[string]string {
 	result := make(map[string]string)
 
-	for key, val := range op.extraMetadata {
+	for key, val := range base.extraMetadata {
 		result[key] = val
 	}
 
 	// Original metadata take precedence
-	for key, val := range op.Metadata {
+	for key, val := range base.Metadata {
 		result[key] = val
 	}
 
 	return result
 }
 
-// GetAuthor return author identity
-func (op *OpBase) GetAuthor() identity.Interface {
-	return op.Author
+func (base *OpBase) setExtraMetadataImmutable(key string, value string) {
+	if base.extraMetadata == nil {
+		base.extraMetadata = make(map[string]string)
+	}
+	if _, exist := base.extraMetadata[key]; !exist {
+		base.extraMetadata[key] = value
+	}
+}
+
+// Author return author identity
+func (base *OpBase) Author() identity.Interface {
+	return base.Author_
 }

bug/operation_iterator.go 🔗

@@ -1,72 +0,0 @@
-package bug
-
-type OperationIterator struct {
-	bug       *Bug
-	packIndex int
-	opIndex   int
-}
-
-func NewOperationIterator(bug Interface) *OperationIterator {
-	return &OperationIterator{
-		bug:       bugFromInterface(bug),
-		packIndex: 0,
-		opIndex:   -1,
-	}
-}
-
-func (it *OperationIterator) Next() bool {
-	// Special case of the staging area
-	if it.packIndex == len(it.bug.packs) {
-		pack := it.bug.staging
-		it.opIndex++
-		return it.opIndex < len(pack.Operations)
-	}
-
-	if it.packIndex >= len(it.bug.packs) {
-		return false
-	}
-
-	pack := it.bug.packs[it.packIndex]
-
-	it.opIndex++
-
-	if it.opIndex < len(pack.Operations) {
-		return true
-	}
-
-	// Note: this iterator doesn't handle the empty pack case
-	it.opIndex = 0
-	it.packIndex++
-
-	// Special case of the non-empty staging area
-	if it.packIndex == len(it.bug.packs) && len(it.bug.staging.Operations) > 0 {
-		return true
-	}
-
-	return it.packIndex < len(it.bug.packs)
-}
-
-func (it *OperationIterator) Value() Operation {
-	// Special case of the staging area
-	if it.packIndex == len(it.bug.packs) {
-		pack := it.bug.staging
-
-		if it.opIndex >= len(pack.Operations) {
-			panic("Iterator is not valid anymore")
-		}
-
-		return pack.Operations[it.opIndex]
-	}
-
-	if it.packIndex >= len(it.bug.packs) {
-		panic("Iterator is not valid anymore")
-	}
-
-	pack := it.bug.packs[it.packIndex]
-
-	if it.opIndex >= len(pack.Operations) {
-		panic("Iterator is not valid anymore")
-	}
-
-	return pack.Operations[it.opIndex]
-}

bug/operation_iterator_test.go 🔗

@@ -1,78 +0,0 @@
-package bug
-
-import (
-	"fmt"
-	"testing"
-	"time"
-
-	"github.com/stretchr/testify/require"
-
-	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-func ExampleOperationIterator() {
-	b := NewBug()
-
-	// add operations
-
-	it := NewOperationIterator(b)
-
-	for it.Next() {
-		// do something with each operations
-		_ = it.Value()
-	}
-}
-
-func TestOpIterator(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
-
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(mockRepo)
-	require.NoError(t, err)
-
-	unix := time.Now().Unix()
-
-	createOp := NewCreateOp(rene, unix, "title", "message", nil)
-	addCommentOp := NewAddCommentOp(rene, unix, "message2", nil)
-	setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus)
-	labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"})
-
-	var i int
-	genTitleOp := func() Operation {
-		i++
-		return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "")
-	}
-
-	bug1 := NewBug()
-
-	// first pack
-	bug1.Append(createOp)
-	bug1.Append(addCommentOp)
-	bug1.Append(setStatusOp)
-	bug1.Append(labelChangeOp)
-	err = bug1.Commit(mockRepo)
-	require.NoError(t, err)
-
-	// second pack
-	bug1.Append(genTitleOp())
-	bug1.Append(genTitleOp())
-	bug1.Append(genTitleOp())
-	err = bug1.Commit(mockRepo)
-	require.NoError(t, err)
-
-	// staging
-	bug1.Append(genTitleOp())
-	bug1.Append(genTitleOp())
-	bug1.Append(genTitleOp())
-
-	it := NewOperationIterator(bug1)
-
-	counter := 0
-	for it.Next() {
-		_ = it.Value()
-		counter++
-	}
-
-	require.Equal(t, 10, counter)
-}

bug/operation_pack.go 🔗

@@ -1,188 +0,0 @@
-package bug
-
-import (
-	"encoding/json"
-	"fmt"
-
-	"github.com/pkg/errors"
-
-	"github.com/MichaelMure/git-bug/entity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-// 1: original format
-// 2: no more legacy identities
-const formatVersion = 2
-
-// OperationPack represent an ordered set of operation to apply
-// to a Bug. These operations are stored in a single Git commit.
-//
-// These commits will be linked together in a linear chain of commits
-// inside Git to form the complete ordered chain of operation to
-// apply to get the final state of the Bug
-type OperationPack struct {
-	Operations []Operation
-
-	// Private field so not serialized
-	commitHash repository.Hash
-}
-
-func (opp *OperationPack) MarshalJSON() ([]byte, error) {
-	return json.Marshal(struct {
-		Version    uint        `json:"version"`
-		Operations []Operation `json:"ops"`
-	}{
-		Version:    formatVersion,
-		Operations: opp.Operations,
-	})
-}
-
-func (opp *OperationPack) UnmarshalJSON(data []byte) error {
-	aux := struct {
-		Version    uint              `json:"version"`
-		Operations []json.RawMessage `json:"ops"`
-	}{}
-
-	if err := json.Unmarshal(data, &aux); err != nil {
-		return err
-	}
-
-	if aux.Version < formatVersion {
-		return entity.NewErrOldFormatVersion(aux.Version)
-	}
-	if aux.Version > formatVersion {
-		return entity.NewErrNewFormatVersion(aux.Version)
-	}
-
-	for _, raw := range aux.Operations {
-		var t struct {
-			OperationType OperationType `json:"type"`
-		}
-
-		if err := json.Unmarshal(raw, &t); err != nil {
-			return err
-		}
-
-		// delegate to specialized unmarshal function
-		op, err := opp.unmarshalOp(raw, t.OperationType)
-		if err != nil {
-			return err
-		}
-
-		opp.Operations = append(opp.Operations, op)
-	}
-
-	return nil
-}
-
-func (opp *OperationPack) unmarshalOp(raw []byte, _type OperationType) (Operation, error) {
-	switch _type {
-	case AddCommentOp:
-		op := &AddCommentOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case CreateOp:
-		op := &CreateOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case EditCommentOp:
-		op := &EditCommentOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case LabelChangeOp:
-		op := &LabelChangeOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case NoOpOp:
-		op := &NoOpOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case SetMetadataOp:
-		op := &SetMetadataOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case SetStatusOp:
-		op := &SetStatusOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	case SetTitleOp:
-		op := &SetTitleOperation{}
-		err := json.Unmarshal(raw, &op)
-		return op, err
-	default:
-		return nil, fmt.Errorf("unknown operation type %v", _type)
-	}
-}
-
-// Append a new operation to the pack
-func (opp *OperationPack) Append(op Operation) {
-	opp.Operations = append(opp.Operations, op)
-}
-
-// IsEmpty tell if the OperationPack is empty
-func (opp *OperationPack) IsEmpty() bool {
-	return len(opp.Operations) == 0
-}
-
-// IsValid tell if the OperationPack is considered valid
-func (opp *OperationPack) Validate() error {
-	if opp.IsEmpty() {
-		return fmt.Errorf("empty")
-	}
-
-	for _, op := range opp.Operations {
-		if err := op.Validate(); err != nil {
-			return errors.Wrap(err, "op")
-		}
-	}
-
-	return nil
-}
-
-// Write will serialize and store the OperationPack as a git blob and return
-// its hash
-func (opp *OperationPack) Write(repo repository.ClockedRepo) (repository.Hash, error) {
-	// make sure we don't write invalid data
-	err := opp.Validate()
-	if err != nil {
-		return "", errors.Wrap(err, "validation error")
-	}
-
-	// First, make sure that all the identities are properly Commit as well
-	// TODO: this might be downgraded to "make sure it exist in git" but then, what make
-	// sure no data is lost on identities ?
-	for _, op := range opp.Operations {
-		if op.base().Author.NeedCommit() {
-			return "", fmt.Errorf("identity need commmit")
-		}
-	}
-
-	data, err := json.Marshal(opp)
-
-	if err != nil {
-		return "", err
-	}
-
-	hash, err := repo.StoreData(data)
-
-	if err != nil {
-		return "", err
-	}
-
-	return hash, nil
-}
-
-// Make a deep copy
-func (opp *OperationPack) Clone() OperationPack {
-
-	clone := OperationPack{
-		Operations: make([]Operation, len(opp.Operations)),
-		commitHash: opp.commitHash,
-	}
-
-	for i, op := range opp.Operations {
-		clone.Operations[i] = op
-	}
-
-	return clone
-}

bug/operation_pack_test.go 🔗

@@ -1,79 +0,0 @@
-package bug
-
-import (
-	"encoding/json"
-	"testing"
-	"time"
-
-	"github.com/stretchr/testify/assert"
-	"github.com/stretchr/testify/require"
-
-	"github.com/MichaelMure/git-bug/identity"
-	"github.com/MichaelMure/git-bug/repository"
-)
-
-func TestOperationPackSerialize(t *testing.T) {
-	opp := &OperationPack{}
-
-	repo := repository.NewMockRepoForTest()
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-	err := rene.Commit(repo)
-	require.NoError(t, err)
-
-	createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-	setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1")
-	addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil)
-	setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus)
-	labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"})
-
-	opp.Append(createOp)
-	opp.Append(setTitleOp)
-	opp.Append(addCommentOp)
-	opp.Append(setStatusOp)
-	opp.Append(labelChangeOp)
-
-	opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2")
-	opMeta.SetMetadata("key", "value")
-	opp.Append(opMeta)
-
-	assert.Equal(t, 1, len(opMeta.Metadata))
-
-	opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{
-		"abcdef",
-		"ghijkl",
-	})
-	opp.Append(opFile)
-
-	assert.Equal(t, 2, len(opFile.Files))
-
-	data, err := json.Marshal(opp)
-	assert.NoError(t, err)
-
-	var opp2 *OperationPack
-	err = json.Unmarshal(data, &opp2)
-	assert.NoError(t, err)
-
-	ensureIds(opp)
-	ensureAuthors(t, opp, opp2)
-
-	assert.Equal(t, opp, opp2)
-}
-
-func ensureIds(opp *OperationPack) {
-	for _, op := range opp.Operations {
-		op.Id()
-	}
-}
-
-func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) {
-	require.Equal(t, len(opp1.Operations), len(opp2.Operations))
-	for i := 0; i < len(opp1.Operations); i++ {
-		op1 := opp1.Operations[i]
-		op2 := opp2.Operations[i]
-
-		// ensure we have equivalent authors (IdentityStub vs Identity) then
-		// enforce equality
-		require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id())
-		op1.base().Author = op2.base().Author
-	}
-}

bug/operation_test.go 🔗

@@ -11,7 +11,16 @@ import (
 )
 
 func TestValidate(t *testing.T) {
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+	repo := repository.NewMockRepoClock()
+
+	makeIdentity := func(t *testing.T, name, email string) *identity.Identity {
+		i, err := identity.NewIdentity(repo, name, email)
+		require.NoError(t, err)
+		return i
+	}
+
+	rene := makeIdentity(t, "René Descartes", "rene@descartes.fr")
+
 	unix := time.Now().Unix()
 
 	good := []Operation{
@@ -30,13 +39,13 @@ func TestValidate(t *testing.T) {
 
 	bad := []Operation{
 		// opbase
-		NewSetStatusOp(identity.NewIdentity("", "rene@descartes.fr"), unix, ClosedStatus),
-		NewSetStatusOp(identity.NewIdentity("René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus),
-		NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus),
-		NewSetStatusOp(identity.NewIdentity("René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus),
-		NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus),
+		NewSetStatusOp(makeIdentity(t, "", "rene@descartes.fr"), unix, ClosedStatus),
+		NewSetStatusOp(makeIdentity(t, "René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus),
+		NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus),
+		NewSetStatusOp(makeIdentity(t, "René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus),
+		NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus),
 		&CreateOperation{OpBase: OpBase{
-			Author:        rene,
+			Author_:       rene,
 			UnixTime:      0,
 			OperationType: CreateOp,
 		},
@@ -68,7 +77,11 @@ func TestValidate(t *testing.T) {
 }
 
 func TestMetadata(t *testing.T) {
-	rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+	repo := repository.NewMockRepoClock()
+
+	rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+	require.NoError(t, err)
+
 	op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
 
 	op.SetMetadata("key", "value")
@@ -83,13 +96,14 @@ func TestID(t *testing.T) {
 	defer repository.CleanupTestRepos(repo)
 
 	repos := []repository.ClockedRepo{
-		repository.NewMockRepoForTest(),
+		repository.NewMockRepo(),
 		repo,
 	}
 
 	for _, repo := range repos {
-		rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
-		err := rene.Commit(repo)
+		rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+		require.NoError(t, err)
+		err = rene.Commit(repo)
 		require.NoError(t, err)
 
 		b, op, err := Create(rene, time.Now().Unix(), "title", "message")
@@ -107,7 +121,7 @@ func TestID(t *testing.T) {
 		require.NoError(t, id2.Validate())
 		require.Equal(t, id1, id2)
 
-		b2, err := ReadLocal(repo, b.Id())
+		b2, err := Read(repo, b.Id())
 		require.NoError(t, err)
 
 		op3 := b2.FirstOp()

bug/snapshot.go 🔗

@@ -28,6 +28,11 @@ type Snapshot struct {
 
 // Return the Bug identifier
 func (snap *Snapshot) Id() entity.Id {
+	if snap.id == "" {
+		// simply panic as it would be a coding error
+		// (using an id of a bug not stored yet)
+		panic("no id yet")
+	}
 	return snap.id
 }
 

bug/sorting.go 🔗

@@ -7,11 +7,11 @@ func (b BugsByCreationTime) Len() int {
 }
 
 func (b BugsByCreationTime) Less(i, j int) bool {
-	if b[i].createTime < b[j].createTime {
+	if b[i].CreateLamportTime() < b[j].CreateLamportTime() {
 		return true
 	}
 
-	if b[i].createTime > b[j].createTime {
+	if b[i].CreateLamportTime() > b[j].CreateLamportTime() {
 		return false
 	}
 
@@ -35,11 +35,11 @@ func (b BugsByEditTime) Len() int {
 }
 
 func (b BugsByEditTime) Less(i, j int) bool {
-	if b[i].editTime < b[j].editTime {
+	if b[i].EditLamportTime() < b[j].EditLamportTime() {
 		return true
 	}
 
-	if b[i].editTime > b[j].editTime {
+	if b[i].EditLamportTime() > b[j].EditLamportTime() {
 		return false
 	}
 

bug/with_snapshot.go 🔗

@@ -47,12 +47,6 @@ func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error {
 		return nil
 	}
 
-	b.snap.id = b.Bug.id
+	b.snap.id = b.Bug.Id()
 	return nil
 }
-
-// Merge intercept Bug.Merge() and clear the snapshot
-func (b *WithSnapshot) Merge(repo repository.Repo, other Interface) (bool, error) {
-	b.snap = nil
-	return b.Bug.Merge(repo, other)
-}

cache/bug_cache.go 🔗

@@ -51,9 +51,7 @@ func (c *BugCache) ResolveOperationWithMetadata(key string, value string) (entit
 	// preallocate but empty
 	matching := make([]entity.Id, 0, 5)
 
-	it := bug.NewOperationIterator(c.bug)
-	for it.Next() {
-		op := it.Value()
+	for _, op := range c.bug.Operations() {
 		opValue, ok := op.GetMetadata(key)
 		if ok && value == opValue {
 			matching = append(matching, op.Id())

cache/bug_excerpt.go 🔗

@@ -87,7 +87,7 @@ func NewBugExcerpt(b bug.Interface, snap *bug.Snapshot) *BugExcerpt {
 	}
 
 	switch snap.Author.(type) {
-	case *identity.Identity, *IdentityCache:
+	case *identity.Identity, *identity.IdentityStub, *IdentityCache:
 		e.AuthorId = snap.Author.Id()
 	default:
 		panic("unhandled identity type")

cache/filter.go 🔗

@@ -153,6 +153,9 @@ func compileMatcher(filters query.Filters) *Matcher {
 	for _, value := range filters.Title {
 		result.Title = append(result.Title, TitleFilter(value))
 	}
+	if filters.NoLabel {
+		result.NoFilters = append(result.NoFilters, NoLabelFilter())
+	}
 
 	return result
 }

cache/identity_cache.go 🔗

@@ -2,6 +2,7 @@ package cache
 
 import (
 	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
 )
 
 var _ identity.Interface = &IdentityCache{}
@@ -23,8 +24,11 @@ func (i *IdentityCache) notifyUpdated() error {
 	return i.repoCache.identityUpdated(i.Identity.Id())
 }
 
-func (i *IdentityCache) Mutate(f func(identity.Mutator) identity.Mutator) error {
-	i.Identity.Mutate(f)
+func (i *IdentityCache) Mutate(repo repository.RepoClock, f func(*identity.Mutator)) error {
+	err := i.Identity.Mutate(repo, f)
+	if err != nil {
+		return err
+	}
 	return i.notifyUpdated()
 }
 

cache/repo_cache.go 🔗

@@ -18,7 +18,8 @@ import (
 // 1: original format
 // 2: added cache for identities with a reference in the bug cache
 // 3: no more legacy identity
-const formatVersion = 3
+// 4: entities make their IDs from data, not git commit
+const formatVersion = 4
 
 // The maximum number of bugs loaded in memory. After that, eviction will be done.
 const defaultMaxLoadedBugs = 1000
@@ -194,7 +195,7 @@ func (c *RepoCache) buildCache() error {
 
 	c.bugExcerpts = make(map[entity.Id]*BugExcerpt)
 
-	allBugs := bug.ReadAllLocal(c.repo)
+	allBugs := bug.ReadAllWithResolver(c.repo, newIdentityCacheResolverNoLock(c))
 
 	// wipe the index just to be sure
 	err := c.repo.ClearBleveIndex("bug")

cache/repo_cache_bug.go 🔗

@@ -8,18 +8,17 @@ import (
 	"sort"
 	"strings"
 	"time"
+	"unicode/utf8"
+
+	"github.com/blevesearch/bleve"
 
 	"github.com/MichaelMure/git-bug/bug"
 	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/query"
 	"github.com/MichaelMure/git-bug/repository"
-	"github.com/blevesearch/bleve"
 )
 
-const (
-	bugCacheFile   = "bug-cache"
-	searchCacheDir = "search-cache"
-)
+const bugCacheFile = "bug-cache"
 
 var errBugNotInCache = errors.New("bug missing from cache")
 
@@ -154,7 +153,7 @@ func (c *RepoCache) ResolveBug(id entity.Id) (*BugCache, error) {
 	}
 	c.muBug.RUnlock()
 
-	b, err := bug.ReadLocalWithResolver(c.repo, newIdentityCacheResolver(c), id)
+	b, err := bug.ReadWithResolver(c.repo, newIdentityCacheResolver(c), id)
 	if err != nil {
 		return nil, err
 	}
@@ -261,6 +260,53 @@ func (c *RepoCache) resolveBugMatcher(f func(*BugExcerpt) bool) (entity.Id, erro
 	return matching[0], nil
 }
 
+// ResolveComment search for a Bug/Comment combination matching the merged
+// bug/comment Id prefix. Returns the Bug containing the Comment and the Comment's
+// Id.
+func (c *RepoCache) ResolveComment(prefix string) (*BugCache, entity.Id, error) {
+	bugPrefix, _ := entity.SeparateIds(prefix)
+	bugCandidate := make([]entity.Id, 0, 5)
+
+	// build a list of possible matching bugs
+	c.muBug.RLock()
+	for _, excerpt := range c.bugExcerpts {
+		if excerpt.Id.HasPrefix(bugPrefix) {
+			bugCandidate = append(bugCandidate, excerpt.Id)
+		}
+	}
+	c.muBug.RUnlock()
+
+	matchingBugIds := make([]entity.Id, 0, 5)
+	matchingCommentId := entity.UnsetId
+	var matchingBug *BugCache
+
+	// search for matching comments
+	// searching every bug candidate allow for some collision with the bug prefix only,
+	// before being refined with the full comment prefix
+	for _, bugId := range bugCandidate {
+		b, err := c.ResolveBug(bugId)
+		if err != nil {
+			return nil, entity.UnsetId, err
+		}
+
+		for _, comment := range b.Snapshot().Comments {
+			if comment.Id().HasPrefix(prefix) {
+				matchingBugIds = append(matchingBugIds, bugId)
+				matchingBug = b
+				matchingCommentId = comment.Id()
+			}
+		}
+	}
+
+	if len(matchingBugIds) > 1 {
+		return nil, entity.UnsetId, entity.NewErrMultipleMatch("bug/comment", matchingBugIds)
+	} else if len(matchingBugIds) == 0 {
+		return nil, entity.UnsetId, errors.New("comment doesn't exist")
+	}
+
+	return matchingBug, matchingCommentId, nil
+}
+
 // QueryBugs return the id of all Bug matching the given Query
 func (c *RepoCache) QueryBugs(q *query.Query) ([]entity.Id, error) {
 	c.muBug.RLock()
@@ -479,11 +525,24 @@ func (c *RepoCache) addBugToSearchIndex(snap *bug.Snapshot) error {
 		Text []string
 	}{}
 
+	// See https://github.com/blevesearch/bleve/issues/1576
+	var sb strings.Builder
+	normalize := func(text string) string {
+		sb.Reset()
+		for _, field := range strings.Fields(text) {
+			if utf8.RuneCountInString(field) < 100 {
+				sb.WriteString(field)
+				sb.WriteRune(' ')
+			}
+		}
+		return sb.String()
+	}
+
 	for _, comment := range snap.Comments {
-		searchableBug.Text = append(searchableBug.Text, comment.Message)
+		searchableBug.Text = append(searchableBug.Text, normalize(comment.Message))
 	}
 
-	searchableBug.Text = append(searchableBug.Text, snap.Title)
+	searchableBug.Text = append(searchableBug.Text, normalize(snap.Title))
 
 	index, err := c.repo.GetBleveIndex("bug")
 	if err != nil {

cache/repo_cache_common.go 🔗

@@ -95,6 +95,12 @@ func (c *RepoCache) MergeAll(remote string) <-chan entity.MergeResult {
 	go func() {
 		defer close(out)
 
+		author, err := c.GetUserIdentity()
+		if err != nil {
+			out <- entity.NewMergeError(err, "")
+			return
+		}
+
 		results := identity.MergeAll(c.repo, remote)
 		for result := range results {
 			out <- result
@@ -112,7 +118,7 @@ func (c *RepoCache) MergeAll(remote string) <-chan entity.MergeResult {
 			}
 		}
 
-		results = bug.MergeAll(c.repo, remote)
+		results = bug.MergeAll(c.repo, remote, author)
 		for result := range results {
 			out <- result
 
@@ -130,11 +136,10 @@ func (c *RepoCache) MergeAll(remote string) <-chan entity.MergeResult {
 			}
 		}
 
-		err := c.write()
-
-		// No easy way out here ..
+		err = c.write()
 		if err != nil {
-			panic(err)
+			out <- entity.NewMergeError(err, "")
+			return
 		}
 	}()
 

cache/repo_cache_identity.go 🔗

@@ -225,17 +225,20 @@ func (c *RepoCache) NewIdentityFromGitUserRaw(metadata map[string]string) (*Iden
 // NewIdentity create a new identity
 // The new identity is written in the repository (commit)
 func (c *RepoCache) NewIdentity(name string, email string) (*IdentityCache, error) {
-	return c.NewIdentityRaw(name, email, "", "", nil)
+	return c.NewIdentityRaw(name, email, "", "", nil, nil)
 }
 
 // NewIdentityFull create a new identity
 // The new identity is written in the repository (commit)
-func (c *RepoCache) NewIdentityFull(name string, email string, login string, avatarUrl string) (*IdentityCache, error) {
-	return c.NewIdentityRaw(name, email, login, avatarUrl, nil)
+func (c *RepoCache) NewIdentityFull(name string, email string, login string, avatarUrl string, keys []*identity.Key) (*IdentityCache, error) {
+	return c.NewIdentityRaw(name, email, login, avatarUrl, keys, nil)
 }
 
-func (c *RepoCache) NewIdentityRaw(name string, email string, login string, avatarUrl string, metadata map[string]string) (*IdentityCache, error) {
-	i := identity.NewIdentityFull(name, email, login, avatarUrl)
+func (c *RepoCache) NewIdentityRaw(name string, email string, login string, avatarUrl string, keys []*identity.Key, metadata map[string]string) (*IdentityCache, error) {
+	i, err := identity.NewIdentityFull(c.repo, name, email, login, avatarUrl, keys)
+	if err != nil {
+		return nil, err
+	}
 	return c.finishIdentity(i, metadata)
 }
 

cache/repo_cache_test.go 🔗

@@ -1,7 +1,9 @@
 package cache
 
 import (
+	"strings"
 	"testing"
+	"time"
 
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
@@ -84,11 +86,12 @@ func TestCache(t *testing.T) {
 	require.Empty(t, cache.identities)
 	require.Empty(t, cache.identitiesExcerpts)
 
-	// Reload, only excerpt are loaded
+	// Reload, only excerpt are loaded, but as we need to load the identities used in the bugs
+	// to check the signatures, we also load the identity used above
 	cache, err = NewRepoCache(repo)
 	require.NoError(t, err)
 	require.Empty(t, cache.bugs)
-	require.Empty(t, cache.identities)
+	require.Len(t, cache.identities, 1)
 	require.Len(t, cache.bugExcerpts, 2)
 	require.Len(t, cache.identitiesExcerpts, 2)
 
@@ -108,8 +111,8 @@ func TestCache(t *testing.T) {
 	require.NoError(t, err)
 }
 
-func TestPushPull(t *testing.T) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
+func TestCachePushPull(t *testing.T) {
+	repoA, repoB, remote := repository.SetupGoGitReposAndRemote()
 	defer repository.CleanupTestRepos(repoA, repoB, remote)
 
 	cacheA, err := NewRepoCache(repoA)
@@ -123,6 +126,10 @@ func TestPushPull(t *testing.T) {
 	require.NoError(t, err)
 	err = cacheA.SetUserIdentity(reneA)
 	require.NoError(t, err)
+	isaacB, err := cacheB.NewIdentity("Isaac Newton", "isaac@newton.uk")
+	require.NoError(t, err)
+	err = cacheB.SetUserIdentity(isaacB)
+	require.NoError(t, err)
 
 	// distribute the identity
 	_, err = cacheA.Push("origin")
@@ -274,3 +281,21 @@ func checkBugPresence(t *testing.T, cache *RepoCache, bug *BugCache, presence bo
 		require.Equal(t, bug, b)
 	}
 }
+
+func TestLongDescription(t *testing.T) {
+	// See https://github.com/MichaelMure/git-bug/issues/606
+
+	text := strings.Repeat("x", 65536)
+
+	repo := repository.CreateGoGitTestRepo(false)
+	defer repository.CleanupTestRepos(repo)
+
+	backend, err := NewRepoCache(repo)
+	require.NoError(t, err)
+
+	i, err := backend.NewIdentity("René Descartes", "rene@descartes.fr")
+	require.NoError(t, err)
+
+	_, _, err = backend.NewBugRaw(i, time.Now().Unix(), text, text, nil, nil)
+	require.NoError(t, err)
+}

cache/resolvers.go 🔗

@@ -20,3 +20,32 @@ func newIdentityCacheResolver(cache *RepoCache) *identityCacheResolver {
 func (i *identityCacheResolver) ResolveIdentity(id entity.Id) (identity.Interface, error) {
 	return i.cache.ResolveIdentity(id)
 }
+
+var _ identity.Resolver = &identityCacheResolverNoLock{}
+
+// identityCacheResolverNoLock is an identity Resolver that retrieve identities from
+// the cache, without locking it.
+type identityCacheResolverNoLock struct {
+	cache *RepoCache
+}
+
+func newIdentityCacheResolverNoLock(cache *RepoCache) *identityCacheResolverNoLock {
+	return &identityCacheResolverNoLock{cache: cache}
+}
+
+func (ir *identityCacheResolverNoLock) ResolveIdentity(id entity.Id) (identity.Interface, error) {
+	cached, ok := ir.cache.identities[id]
+	if ok {
+		return cached, nil
+	}
+
+	i, err := identity.ReadLocal(ir.cache.repo, id)
+	if err != nil {
+		return nil, err
+	}
+
+	cached = NewIdentityCache(ir.cache, i)
+	ir.cache.identities[id] = cached
+
+	return cached, nil
+}

commands/comment.go 🔗

@@ -22,6 +22,7 @@ func newCommentCommand() *cobra.Command {
 	}
 
 	cmd.AddCommand(newCommentAddCommand())
+	cmd.AddCommand(newCommentEditCommand())
 
 	return cmd
 }

commands/comment_edit.go 🔗

@@ -0,0 +1,71 @@
+package commands
+
+import (
+	"github.com/spf13/cobra"
+
+	"github.com/MichaelMure/git-bug/input"
+)
+
+type commentEditOptions struct {
+	messageFile string
+	message     string
+}
+
+func newCommentEditCommand() *cobra.Command {
+	env := newEnv()
+	options := commentEditOptions{}
+
+	cmd := &cobra.Command{
+		Use:      "edit [COMMENT_ID]",
+		Short:    "Edit an existing comment on a bug.",
+		Args:     cobra.ExactArgs(1),
+		PreRunE:  loadBackendEnsureUser(env),
+		PostRunE: closeBackend(env),
+		RunE: func(cmd *cobra.Command, args []string) error {
+			return runCommentEdit(env, options, args)
+		},
+	}
+
+	flags := cmd.Flags()
+	flags.SortFlags = false
+
+	flags.StringVarP(&options.messageFile, "file", "F", "",
+		"Take the message from the given file. Use - to read the message from the standard input")
+
+	flags.StringVarP(&options.message, "message", "m", "",
+		"Provide the new message from the command line")
+
+	return cmd
+}
+
+func runCommentEdit(env *Env, opts commentEditOptions, args []string) error {
+	b, commentId, err := env.backend.ResolveComment(args[0])
+	if err != nil {
+		return err
+	}
+
+	if opts.messageFile != "" && opts.message == "" {
+		opts.message, err = input.BugCommentFileInput(opts.messageFile)
+		if err != nil {
+			return err
+		}
+	}
+
+	if opts.messageFile == "" && opts.message == "" {
+		opts.message, err = input.BugCommentEditorInput(env.backend, "")
+		if err == input.ErrEmptyMessage {
+			env.err.Println("Empty message, aborting.")
+			return nil
+		}
+		if err != nil {
+			return err
+		}
+	}
+
+	_, err = b.EditComment(commentId, opts.message)
+	if err != nil {
+		return err
+	}
+
+	return b.Commit()
+}

commands/show.go 🔗

@@ -158,8 +158,9 @@ func showDefaultFormatter(env *Env, snapshot *bug.Snapshot) error {
 
 	for i, comment := range snapshot.Comments {
 		var message string
-		env.out.Printf("%s#%d %s <%s>\n\n",
+		env.out.Printf("%s%s #%d %s <%s>\n\n",
 			indent,
+			comment.Id().Human(),
 			i,
 			comment.Author.DisplayName(),
 			comment.Author.Email(),

commands/user.go 🔗

@@ -35,7 +35,7 @@ func newUserCommand() *cobra.Command {
 	flags.SortFlags = false
 
 	flags.StringVarP(&options.fields, "field", "f", "",
-		"Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamport,login,metadata,name]")
+		"Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamports,login,metadata,name]")
 
 	return cmd
 }
@@ -71,7 +71,9 @@ func runUser(env *Env, opts userOptions, args []string) error {
 			env.out.Printf("%s\n", id.LastModification().
 				Time().Format("Mon Jan 2 15:04:05 2006 +0200"))
 		case "lastModificationLamport":
-			env.out.Printf("%d\n", id.LastModificationLamport())
+			for name, t := range id.LastModificationLamports() {
+				env.out.Printf("%s\n%d\n", name, t)
+			}
 		case "metadata":
 			for key, value := range id.ImmutableMetadata() {
 				env.out.Printf("%s\n%s\n", key, value)
@@ -90,9 +92,11 @@ func runUser(env *Env, opts userOptions, args []string) error {
 	env.out.Printf("Name: %s\n", id.Name())
 	env.out.Printf("Email: %s\n", id.Email())
 	env.out.Printf("Login: %s\n", id.Login())
-	env.out.Printf("Last modification: %s (lamport %d)\n",
-		id.LastModification().Time().Format("Mon Jan 2 15:04:05 2006 +0200"),
-		id.LastModificationLamport())
+	env.out.Printf("Last modification: %s\n", id.LastModification().Time().Format("Mon Jan 2 15:04:05 2006 +0200"))
+	env.out.Printf("Last moditication (lamport):\n")
+	for name, t := range id.LastModificationLamports() {
+		env.out.Printf("\t%s: %d", name, t)
+	}
 	env.out.Println("Metadata:")
 	for key, value := range id.ImmutableMetadata() {
 		env.out.Printf("    %s --> %s\n", key, value)

commands/user_create.go 🔗

@@ -48,7 +48,7 @@ func runUserCreate(env *Env) error {
 		return err
 	}
 
-	id, err := env.backend.NewIdentityRaw(name, email, "", avatarURL, nil)
+	id, err := env.backend.NewIdentityRaw(name, email, "", avatarURL, nil, nil)
 	if err != nil {
 		return err
 	}

commands/webui.go 🔗

@@ -4,9 +4,12 @@ import (
 	"context"
 	"fmt"
 	"log"
+	"net"
 	"net/http"
+	"net/url"
 	"os"
 	"os/signal"
+	"strconv"
 	"time"
 
 	"github.com/99designs/gqlgen/graphql/playground"
@@ -27,10 +30,12 @@ import (
 const webUIOpenConfigKey = "git-bug.webui.open"
 
 type webUIOptions struct {
+	host     string
 	port     int
 	open     bool
 	noOpen   bool
 	readOnly bool
+	query    string
 }
 
 func newWebUICommand() *cobra.Command {
@@ -54,10 +59,12 @@ Available git config:
 	flags := cmd.Flags()
 	flags.SortFlags = false
 
+	flags.StringVar(&options.host, "host", "127.0.0.1", "Network address or hostname to listen to (default to 127.0.0.1)")
 	flags.BoolVar(&options.open, "open", false, "Automatically open the web UI in the default browser")
 	flags.BoolVar(&options.noOpen, "no-open", false, "Prevent the automatic opening of the web UI in the default browser")
-	flags.IntVarP(&options.port, "port", "p", 0, "Port to listen to (default is random)")
+	flags.IntVarP(&options.port, "port", "p", 0, "Port to listen to (default to random available port)")
 	flags.BoolVar(&options.readOnly, "read-only", false, "Whether to run the web UI in read-only mode")
+	flags.StringVarP(&options.query, "query", "q", "", "The query to open in the web UI bug list")
 
 	return cmd
 }
@@ -71,8 +78,14 @@ func runWebUI(env *Env, opts webUIOptions, args []string) error {
 		}
 	}
 
-	addr := fmt.Sprintf("127.0.0.1:%d", opts.port)
+	addr := net.JoinHostPort(opts.host, strconv.Itoa(opts.port))
 	webUiAddr := fmt.Sprintf("http://%s", addr)
+	toOpen := webUiAddr
+
+	if len(opts.query) > 0 {
+		// Explicitly set the query parameter instead of going with a default one.
+		toOpen = fmt.Sprintf("%s/?q=%s", webUiAddr, url.QueryEscape(opts.query))
+	}
 
 	router := mux.NewRouter()
 
@@ -150,7 +163,7 @@ func runWebUI(env *Env, opts webUIOptions, args []string) error {
 	shouldOpen := (configOpen && !opts.noOpen) || opts.open
 
 	if shouldOpen {
-		err = open.Run(webUiAddr)
+		err = open.Run(toOpen)
 		if err != nil {
 			env.out.Println(err)
 		}

doc/man/git-bug-comment-edit.1 🔗

@@ -0,0 +1,35 @@
+.nh
+.TH "GIT\-BUG" "1" "Apr 2019" "Generated from git\-bug's source code" ""
+
+.SH NAME
+.PP
+git\-bug\-comment\-edit \- Edit an existing comment on a bug.
+
+
+.SH SYNOPSIS
+.PP
+\fBgit\-bug comment edit [COMMENT\_ID] [flags]\fP
+
+
+.SH DESCRIPTION
+.PP
+Edit an existing comment on a bug.
+
+
+.SH OPTIONS
+.PP
+\fB\-F\fP, \fB\-\-file\fP=""
+	Take the message from the given file. Use \- to read the message from the standard input
+
+.PP
+\fB\-m\fP, \fB\-\-message\fP=""
+	Provide the new message from the command line
+
+.PP
+\fB\-h\fP, \fB\-\-help\fP[=false]
+	help for edit
+
+
+.SH SEE ALSO
+.PP
+\fBgit\-bug\-comment(1)\fP

doc/man/git-bug-comment.1 🔗

@@ -24,4 +24,4 @@ Display or add comments to a bug.
 
 .SH SEE ALSO
 .PP
-\fBgit\-bug(1)\fP, \fBgit\-bug\-comment\-add(1)\fP
+\fBgit\-bug(1)\fP, \fBgit\-bug\-comment\-add(1)\fP, \fBgit\-bug\-comment\-edit(1)\fP

doc/man/git-bug-user.1 🔗

@@ -19,7 +19,7 @@ Display or change the user identity.
 .SH OPTIONS
 .PP
 \fB\-f\fP, \fB\-\-field\fP=""
-	Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamport,login,metadata,name]
+	Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamports,login,metadata,name]
 
 .PP
 \fB\-h\fP, \fB\-\-help\fP[=false]

doc/man/git-bug-webui.1 🔗

@@ -21,6 +21,10 @@ Available git config:
 
 
 .SH OPTIONS
+.PP
+\fB\-\-host\fP="127.0.0.1"
+	Network address or hostname to listen to (default to 127.0.0.1)
+
 .PP
 \fB\-\-open\fP[=false]
 	Automatically open the web UI in the default browser
@@ -31,12 +35,16 @@ Available git config:
 
 .PP
 \fB\-p\fP, \fB\-\-port\fP=0
-	Port to listen to (default is random)
+	Port to listen to (default to random available port)
 
 .PP
 \fB\-\-read\-only\fP[=false]
 	Whether to run the web UI in read\-only mode
 
+.PP
+\fB\-q\fP, \fB\-\-query\fP=""
+	The query to open in the web UI bug list
+
 .PP
 \fB\-h\fP, \fB\-\-help\fP[=false]
 	help for webui

doc/md/git-bug_comment.md 🔗

@@ -16,4 +16,5 @@ git-bug comment [ID] [flags]
 
 * [git-bug](git-bug.md)	 - A bug tracker embedded in Git.
 * [git-bug comment add](git-bug_comment_add.md)	 - Add a new comment to a bug.
+* [git-bug comment edit](git-bug_comment_edit.md)	 - Edit an existing comment on a bug.
 

doc/md/git-bug_comment_edit.md 🔗

@@ -0,0 +1,20 @@
+## git-bug comment edit
+
+Edit an existing comment on a bug.
+
+```
+git-bug comment edit [COMMENT_ID] [flags]
+```
+
+### Options
+
+```
+  -F, --file string      Take the message from the given file. Use - to read the message from the standard input
+  -m, --message string   Provide the new message from the command line
+  -h, --help             help for edit
+```
+
+### SEE ALSO
+
+* [git-bug comment](git-bug_comment.md)	 - Display or add comments to a bug.
+

doc/md/git-bug_user.md 🔗

@@ -9,7 +9,7 @@ git-bug user [USER-ID] [flags]
 ### Options
 
 ```
-  -f, --field string   Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamport,login,metadata,name]
+  -f, --field string   Select field to display. Valid values are [email,humanId,id,lastModification,lastModificationLamports,login,metadata,name]
   -h, --help           help for user
 ```
 

doc/md/git-bug_webui.md 🔗

@@ -17,11 +17,13 @@ git-bug webui [flags]
 ### Options
 
 ```
-      --open        Automatically open the web UI in the default browser
-      --no-open     Prevent the automatic opening of the web UI in the default browser
-  -p, --port int    Port to listen to (default is random)
-      --read-only   Whether to run the web UI in read-only mode
-  -h, --help        help for webui
+      --host string    Network address or hostname to listen to (default to 127.0.0.1) (default "127.0.0.1")
+      --open           Automatically open the web UI in the default browser
+      --no-open        Prevent the automatic opening of the web UI in the default browser
+  -p, --port int       Port to listen to (default to random available port)
+      --read-only      Whether to run the web UI in read-only mode
+  -q, --query string   The query to open in the web UI bug list
+  -h, --help           help for webui
 ```
 
 ### SEE ALSO

entity/dag/clock.go 🔗

@@ -0,0 +1,38 @@
+package dag
+
+import (
+	"fmt"
+
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+// ClockLoader is the repository.ClockLoader for Entity
+func ClockLoader(defs ...Definition) repository.ClockLoader {
+	clocks := make([]string, 0, len(defs)*2)
+	for _, def := range defs {
+		clocks = append(clocks, fmt.Sprintf(creationClockPattern, def.Namespace))
+		clocks = append(clocks, fmt.Sprintf(editClockPattern, def.Namespace))
+	}
+
+	return repository.ClockLoader{
+		Clocks: clocks,
+		Witnesser: func(repo repository.ClockedRepo) error {
+			// we need to actually load the identities because of the commit signature check when reading,
+			// which require the full identities with crypto keys
+			resolver := identity.NewCachedResolver(identity.NewSimpleResolver(repo))
+
+			for _, def := range defs {
+				// we actually just need to read all entities,
+				// as that will create and update the clocks
+				// TODO: concurrent loading to be faster?
+				for b := range ReadAll(def, repo, resolver) {
+					if b.Err != nil {
+						return b.Err
+					}
+				}
+			}
+			return nil
+		},
+	}
+}

entity/dag/common_test.go 🔗

@@ -0,0 +1,173 @@
+package dag
+
+import (
+	"encoding/json"
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+// This file contains an example dummy entity to be used in the tests
+
+/*
+ Operations
+*/
+
+type op1 struct {
+	author identity.Interface
+
+	OperationType int               `json:"type"`
+	Field1        string            `json:"field_1"`
+	Files         []repository.Hash `json:"files"`
+}
+
+func newOp1(author identity.Interface, field1 string, files ...repository.Hash) *op1 {
+	return &op1{author: author, OperationType: 1, Field1: field1, Files: files}
+}
+
+func (o *op1) Id() entity.Id {
+	data, _ := json.Marshal(o)
+	return entity.DeriveId(data)
+}
+
+func (o *op1) Validate() error { return nil }
+
+func (o *op1) Author() identity.Interface {
+	return o.author
+}
+
+func (o *op1) GetFiles() []repository.Hash {
+	return o.Files
+}
+
+type op2 struct {
+	author identity.Interface
+
+	OperationType int    `json:"type"`
+	Field2        string `json:"field_2"`
+}
+
+func newOp2(author identity.Interface, field2 string) *op2 {
+	return &op2{author: author, OperationType: 2, Field2: field2}
+}
+
+func (o *op2) Id() entity.Id {
+	data, _ := json.Marshal(o)
+	return entity.DeriveId(data)
+}
+
+func (o *op2) Validate() error { return nil }
+
+func (o *op2) Author() identity.Interface {
+	return o.author
+}
+
+func unmarshaler(author identity.Interface, raw json.RawMessage) (Operation, error) {
+	var t struct {
+		OperationType int `json:"type"`
+	}
+
+	if err := json.Unmarshal(raw, &t); err != nil {
+		return nil, err
+	}
+
+	switch t.OperationType {
+	case 1:
+		op := &op1{}
+		err := json.Unmarshal(raw, &op)
+		op.author = author
+		return op, err
+	case 2:
+		op := &op2{}
+		err := json.Unmarshal(raw, &op)
+		op.author = author
+		return op, err
+	default:
+		return nil, fmt.Errorf("unknown operation type %v", t.OperationType)
+	}
+}
+
+/*
+  Identities + repo + definition
+*/
+
+func makeTestContext() (repository.ClockedRepo, identity.Interface, identity.Interface, identity.Resolver, Definition) {
+	repo := repository.NewMockRepo()
+	id1, id2, resolver, def := makeTestContextInternal(repo)
+	return repo, id1, id2, resolver, def
+}
+
+func makeTestContextRemote(t *testing.T) (repository.ClockedRepo, repository.ClockedRepo, repository.ClockedRepo, identity.Interface, identity.Interface, identity.Resolver, Definition) {
+	repoA := repository.CreateGoGitTestRepo(false)
+	repoB := repository.CreateGoGitTestRepo(false)
+	remote := repository.CreateGoGitTestRepo(true)
+
+	err := repoA.AddRemote("remote", remote.GetLocalRemote())
+	require.NoError(t, err)
+	err = repoA.AddRemote("repoB", repoB.GetLocalRemote())
+	require.NoError(t, err)
+	err = repoB.AddRemote("remote", remote.GetLocalRemote())
+	require.NoError(t, err)
+	err = repoB.AddRemote("repoA", repoA.GetLocalRemote())
+	require.NoError(t, err)
+
+	id1, id2, resolver, def := makeTestContextInternal(repoA)
+
+	// distribute the identities
+	_, err = identity.Push(repoA, "remote")
+	require.NoError(t, err)
+	err = identity.Pull(repoB, "remote")
+	require.NoError(t, err)
+
+	return repoA, repoB, remote, id1, id2, resolver, def
+}
+
+func makeTestContextInternal(repo repository.ClockedRepo) (identity.Interface, identity.Interface, identity.Resolver, Definition) {
+	id1, err := identity.NewIdentity(repo, "name1", "email1")
+	if err != nil {
+		panic(err)
+	}
+	err = id1.Commit(repo)
+	if err != nil {
+		panic(err)
+	}
+	id2, err := identity.NewIdentity(repo, "name2", "email2")
+	if err != nil {
+		panic(err)
+	}
+	err = id2.Commit(repo)
+	if err != nil {
+		panic(err)
+	}
+
+	resolver := identityResolverFunc(func(id entity.Id) (identity.Interface, error) {
+		switch id {
+		case id1.Id():
+			return id1, nil
+		case id2.Id():
+			return id2, nil
+		default:
+			return nil, identity.ErrIdentityNotExist
+		}
+	})
+
+	def := Definition{
+		Typename:             "foo",
+		Namespace:            "foos",
+		OperationUnmarshaler: unmarshaler,
+		FormatVersion:        1,
+	}
+
+	return id1, id2, resolver, def
+}
+
+type identityResolverFunc func(id entity.Id) (identity.Interface, error)
+
+func (fn identityResolverFunc) ResolveIdentity(id entity.Id) (identity.Interface, error) {
+	return fn(id)
+}

entity/dag/entity.go 🔗

@@ -0,0 +1,439 @@
+// Package dag contains the base common code to define an entity stored
+// in a chain of git objects, supporting actions like Push, Pull and Merge.
+package dag
+
+import (
+	"encoding/json"
+	"fmt"
+	"sort"
+
+	"github.com/pkg/errors"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+	"github.com/MichaelMure/git-bug/util/lamport"
+)
+
+const refsPattern = "refs/%s/%s"
+const creationClockPattern = "%s-create"
+const editClockPattern = "%s-edit"
+
+// Definition hold the details defining one specialization of an Entity.
+type Definition struct {
+	// the name of the entity (bug, pull-request, ...)
+	Typename string
+	// the Namespace in git (bugs, prs, ...)
+	Namespace string
+	// a function decoding a JSON message into an Operation
+	OperationUnmarshaler func(author identity.Interface, raw json.RawMessage) (Operation, error)
+	// the expected format version number, that can be used for data migration/upgrade
+	FormatVersion uint
+}
+
+// Entity is a data structure stored in a chain of git objects, supporting actions like Push, Pull and Merge.
+type Entity struct {
+	// A Lamport clock is a logical clock that allow to order event
+	// inside a distributed system.
+	// It must be the first field in this struct due to https://github.com/golang/go/issues/36606
+	createTime lamport.Time
+	editTime   lamport.Time
+
+	Definition
+
+	// operations that are already stored in the repository
+	ops []Operation
+	// operations not yet stored in the repository
+	staging []Operation
+
+	lastCommit repository.Hash
+}
+
+// New create an empty Entity
+func New(definition Definition) *Entity {
+	return &Entity{
+		Definition: definition,
+	}
+}
+
+// Read will read and decode a stored local Entity from a repository
+func Read(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, id entity.Id) (*Entity, error) {
+	if err := id.Validate(); err != nil {
+		return nil, errors.Wrap(err, "invalid id")
+	}
+
+	ref := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String())
+
+	return read(def, repo, resolver, ref)
+}
+
+// readRemote will read and decode a stored remote Entity from a repository
+func readRemote(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, id entity.Id) (*Entity, error) {
+	if err := id.Validate(); err != nil {
+		return nil, errors.Wrap(err, "invalid id")
+	}
+
+	ref := fmt.Sprintf("refs/remotes/%s/%s/%s", def.Namespace, remote, id.String())
+
+	return read(def, repo, resolver, ref)
+}
+
+// read fetch from git and decode an Entity at an arbitrary git reference.
+func read(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, ref string) (*Entity, error) {
+	rootHash, err := repo.ResolveRef(ref)
+	if err != nil {
+		return nil, err
+	}
+
+	// Perform a breadth-first search to get a topological order of the DAG where we discover the
+	// parents commit and go back in time up to the chronological root
+
+	queue := make([]repository.Hash, 0, 32)
+	visited := make(map[repository.Hash]struct{})
+	BFSOrder := make([]repository.Commit, 0, 32)
+
+	queue = append(queue, rootHash)
+	visited[rootHash] = struct{}{}
+
+	for len(queue) > 0 {
+		// pop
+		hash := queue[0]
+		queue = queue[1:]
+
+		commit, err := repo.ReadCommit(hash)
+		if err != nil {
+			return nil, err
+		}
+
+		BFSOrder = append(BFSOrder, commit)
+
+		for _, parent := range commit.Parents {
+			if _, ok := visited[parent]; !ok {
+				queue = append(queue, parent)
+				// mark as visited
+				visited[parent] = struct{}{}
+			}
+		}
+	}
+
+	// Now, we can reverse this topological order and read the commits in an order where
+	// we are sure to have read all the chronological ancestors when we read a commit.
+
+	// Next step is to:
+	// 1) read the operationPacks
+	// 2) make sure that the clocks causality respect the DAG topology.
+
+	oppMap := make(map[repository.Hash]*operationPack)
+	var opsCount int
+
+	for i := len(BFSOrder) - 1; i >= 0; i-- {
+		commit := BFSOrder[i]
+		isFirstCommit := i == len(BFSOrder)-1
+		isMerge := len(commit.Parents) > 1
+
+		// Verify DAG structure: single chronological root, so only the root
+		// can have no parents. Said otherwise, the DAG need to have exactly
+		// one leaf.
+		if !isFirstCommit && len(commit.Parents) == 0 {
+			return nil, fmt.Errorf("multiple leafs in the entity DAG")
+		}
+
+		opp, err := readOperationPack(def, repo, resolver, commit)
+		if err != nil {
+			return nil, err
+		}
+
+		err = opp.Validate()
+		if err != nil {
+			return nil, err
+		}
+
+		if isMerge && len(opp.Operations) > 0 {
+			return nil, fmt.Errorf("merge commit cannot have operations")
+		}
+
+		// Check that the create lamport clock is set (not checked in Validate() as it's optional)
+		if isFirstCommit && opp.CreateTime <= 0 {
+			return nil, fmt.Errorf("creation lamport time not set")
+		}
+
+		// make sure that the lamport clocks causality match the DAG topology
+		for _, parentHash := range commit.Parents {
+			parentPack, ok := oppMap[parentHash]
+			if !ok {
+				panic("DFS failed")
+			}
+
+			if parentPack.EditTime >= opp.EditTime {
+				return nil, fmt.Errorf("lamport clock ordering doesn't match the DAG")
+			}
+
+			// to avoid an attack where clocks are pushed toward the uint64 rollover, make sure
+			// that the clocks don't jump too far in the future
+			// we ignore merge commits here to allow merging after a loooong time without breaking anything,
+			// as long as there is one valid chain of small hops, it's fine.
+			if !isMerge && opp.EditTime-parentPack.EditTime > 1_000_000 {
+				return nil, fmt.Errorf("lamport clock jumping too far in the future, likely an attack")
+			}
+		}
+
+		oppMap[commit.Hash] = opp
+		opsCount += len(opp.Operations)
+	}
+
+	// The clocks are fine, we witness them
+	for _, opp := range oppMap {
+		err = repo.Witness(fmt.Sprintf(creationClockPattern, def.Namespace), opp.CreateTime)
+		if err != nil {
+			return nil, err
+		}
+		err = repo.Witness(fmt.Sprintf(editClockPattern, def.Namespace), opp.EditTime)
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	// Now that we know that the topological order and clocks are fine, we order the operationPacks
+	// based on the logical clocks, entirely ignoring the DAG topology
+
+	oppSlice := make([]*operationPack, 0, len(oppMap))
+	for _, pack := range oppMap {
+		oppSlice = append(oppSlice, pack)
+	}
+	sort.Slice(oppSlice, func(i, j int) bool {
+		// Primary ordering with the EditTime.
+		if oppSlice[i].EditTime != oppSlice[j].EditTime {
+			return oppSlice[i].EditTime < oppSlice[j].EditTime
+		}
+		// We have equal EditTime, which means we have concurrent edition over different machines and we
+		// can't tell which one came first. So, what now? We still need a total ordering and the most stable possible.
+		// As a secondary ordering, we can order based on a hash of the serialized Operations in the
+		// operationPack. It doesn't carry much meaning but it's unbiased and hard to abuse.
+		// This is a lexicographic ordering on the stringified ID.
+		return oppSlice[i].Id() < oppSlice[j].Id()
+	})
+
+	// Now that we ordered the operationPacks, we have the order of the Operations
+
+	ops := make([]Operation, 0, opsCount)
+	var createTime lamport.Time
+	var editTime lamport.Time
+	for _, pack := range oppSlice {
+		for _, operation := range pack.Operations {
+			ops = append(ops, operation)
+		}
+		if pack.CreateTime > createTime {
+			createTime = pack.CreateTime
+		}
+		if pack.EditTime > editTime {
+			editTime = pack.EditTime
+		}
+	}
+
+	return &Entity{
+		Definition: def,
+		ops:        ops,
+		lastCommit: rootHash,
+		createTime: createTime,
+		editTime:   editTime,
+	}, nil
+}
+
+type StreamedEntity struct {
+	Entity *Entity
+	Err    error
+}
+
+// ReadAll read and parse all local Entity
+func ReadAll(def Definition, repo repository.ClockedRepo, resolver identity.Resolver) <-chan StreamedEntity {
+	out := make(chan StreamedEntity)
+
+	go func() {
+		defer close(out)
+
+		refPrefix := fmt.Sprintf("refs/%s/", def.Namespace)
+
+		refs, err := repo.ListRefs(refPrefix)
+		if err != nil {
+			out <- StreamedEntity{Err: err}
+			return
+		}
+
+		for _, ref := range refs {
+			e, err := read(def, repo, resolver, ref)
+
+			if err != nil {
+				out <- StreamedEntity{Err: err}
+				return
+			}
+
+			out <- StreamedEntity{Entity: e}
+		}
+	}()
+
+	return out
+}
+
+// Id return the Entity identifier
+func (e *Entity) Id() entity.Id {
+	// id is the id of the first operation
+	return e.FirstOp().Id()
+}
+
+// Validate check if the Entity data is valid
+func (e *Entity) Validate() error {
+	// non-empty
+	if len(e.ops) == 0 && len(e.staging) == 0 {
+		return fmt.Errorf("entity has no operations")
+	}
+
+	// check if each operations are valid
+	for _, op := range e.ops {
+		if err := op.Validate(); err != nil {
+			return err
+		}
+	}
+
+	// check if staging is valid if needed
+	for _, op := range e.staging {
+		if err := op.Validate(); err != nil {
+			return err
+		}
+	}
+
+	// Check that there is no colliding operation's ID
+	ids := make(map[entity.Id]struct{})
+	for _, op := range e.Operations() {
+		if _, ok := ids[op.Id()]; ok {
+			return fmt.Errorf("id collision: %s", op.Id())
+		}
+		ids[op.Id()] = struct{}{}
+	}
+
+	return nil
+}
+
+// Operations return the ordered operations
+func (e *Entity) Operations() []Operation {
+	return append(e.ops, e.staging...)
+}
+
+// FirstOp lookup for the very first operation of the Entity
+func (e *Entity) FirstOp() Operation {
+	for _, op := range e.ops {
+		return op
+	}
+	for _, op := range e.staging {
+		return op
+	}
+	return nil
+}
+
+// LastOp lookup for the very last operation of the Entity
+func (e *Entity) LastOp() Operation {
+	if len(e.staging) > 0 {
+		return e.staging[len(e.staging)-1]
+	}
+	if len(e.ops) > 0 {
+		return e.ops[len(e.ops)-1]
+	}
+	return nil
+}
+
+// Append add a new Operation to the Entity
+func (e *Entity) Append(op Operation) {
+	e.staging = append(e.staging, op)
+}
+
+// NeedCommit indicate if the in-memory state changed and need to be commit in the repository
+func (e *Entity) NeedCommit() bool {
+	return len(e.staging) > 0
+}
+
+// CommitAsNeeded execute a Commit only if necessary. This function is useful to avoid getting an error if the Entity
+// is already in sync with the repository.
+func (e *Entity) CommitAsNeeded(repo repository.ClockedRepo) error {
+	if e.NeedCommit() {
+		return e.Commit(repo)
+	}
+	return nil
+}
+
+// Commit write the appended operations in the repository
+func (e *Entity) Commit(repo repository.ClockedRepo) error {
+	if !e.NeedCommit() {
+		return fmt.Errorf("can't commit an entity with no pending operation")
+	}
+
+	err := e.Validate()
+	if err != nil {
+		return errors.Wrapf(err, "can't commit a %s with invalid data", e.Definition.Typename)
+	}
+
+	for len(e.staging) > 0 {
+		var author identity.Interface
+		var toCommit []Operation
+
+		// Split into chunks with the same author
+		for len(e.staging) > 0 {
+			op := e.staging[0]
+			if author != nil && op.Author().Id() != author.Id() {
+				break
+			}
+			author = e.staging[0].Author()
+			toCommit = append(toCommit, op)
+			e.staging = e.staging[1:]
+		}
+
+		e.editTime, err = repo.Increment(fmt.Sprintf(editClockPattern, e.Namespace))
+		if err != nil {
+			return err
+		}
+
+		opp := &operationPack{
+			Author:     author,
+			Operations: toCommit,
+			EditTime:   e.editTime,
+		}
+
+		if e.lastCommit == "" {
+			e.createTime, err = repo.Increment(fmt.Sprintf(creationClockPattern, e.Namespace))
+			if err != nil {
+				return err
+			}
+			opp.CreateTime = e.createTime
+		}
+
+		var parentCommit []repository.Hash
+		if e.lastCommit != "" {
+			parentCommit = []repository.Hash{e.lastCommit}
+		}
+
+		commitHash, err := opp.Write(e.Definition, repo, parentCommit...)
+		if err != nil {
+			return err
+		}
+
+		e.lastCommit = commitHash
+		e.ops = append(e.ops, toCommit...)
+	}
+
+	// not strictly necessary but make equality testing easier in tests
+	e.staging = nil
+
+	// Create or update the Git reference for this entity
+	// When pushing later, the remote will ensure that this ref update
+	// is fast-forward, that is no data has been overwritten.
+	ref := fmt.Sprintf(refsPattern, e.Namespace, e.Id().String())
+	return repo.UpdateRef(ref, e.lastCommit)
+}
+
+// CreateLamportTime return the Lamport time of creation
+func (e *Entity) CreateLamportTime() lamport.Time {
+	return e.createTime
+}
+
+// EditLamportTime return the Lamport time of the last edition
+func (e *Entity) EditLamportTime() lamport.Time {
+	return e.editTime
+}

entity/dag/entity_actions.go 🔗

@@ -0,0 +1,260 @@
+package dag
+
+import (
+	"fmt"
+
+	"github.com/pkg/errors"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+// ListLocalIds list all the available local Entity's Id
+func ListLocalIds(def Definition, repo repository.RepoData) ([]entity.Id, error) {
+	refs, err := repo.ListRefs(fmt.Sprintf("refs/%s/", def.Namespace))
+	if err != nil {
+		return nil, err
+	}
+	return entity.RefsToIds(refs), nil
+}
+
+// Fetch retrieve updates from a remote
+// This does not change the local entity state
+func Fetch(def Definition, repo repository.Repo, remote string) (string, error) {
+	return repo.FetchRefs(remote, def.Namespace)
+}
+
+// Push update a remote with the local changes
+func Push(def Definition, repo repository.Repo, remote string) (string, error) {
+	return repo.PushRefs(remote, def.Namespace)
+}
+
+// Pull will do a Fetch + MergeAll
+// Contrary to MergeAll, this function will return an error if a merge fail.
+func Pull(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, author identity.Interface) error {
+	_, err := Fetch(def, repo, remote)
+	if err != nil {
+		return err
+	}
+
+	for merge := range MergeAll(def, repo, resolver, remote, author) {
+		if merge.Err != nil {
+			return merge.Err
+		}
+		if merge.Status == entity.MergeStatusInvalid {
+			return errors.Errorf("merge failure: %s", merge.Reason)
+		}
+	}
+
+	return nil
+}
+
+// MergeAll will merge all the available remote Entity:
+//
+// Multiple scenario exist:
+// 1. if the remote Entity doesn't exist locally, it's created
+//    --> emit entity.MergeStatusNew
+// 2. if the remote and local Entity have the same state, nothing is changed
+//    --> emit entity.MergeStatusNothing
+// 3. if the local Entity has new commits but the remote don't, nothing is changed
+//    --> emit entity.MergeStatusNothing
+// 4. if the remote has new commit, the local bug is updated to match the same history
+//    (fast-forward update)
+//    --> emit entity.MergeStatusUpdated
+// 5. if both local and remote Entity have new commits (that is, we have a concurrent edition),
+//    a merge commit with an empty operationPack is created to join both branch and form a DAG.
+//    --> emit entity.MergeStatusUpdated
+//
+// Note: an author is necessary for the case where a merge commit is created, as this commit will
+// have an author and may be signed if a signing key is available.
+func MergeAll(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, author identity.Interface) <-chan entity.MergeResult {
+	out := make(chan entity.MergeResult)
+
+	go func() {
+		defer close(out)
+
+		remoteRefSpec := fmt.Sprintf("refs/remotes/%s/%s/", remote, def.Namespace)
+		remoteRefs, err := repo.ListRefs(remoteRefSpec)
+		if err != nil {
+			out <- entity.MergeResult{Err: err}
+			return
+		}
+
+		for _, remoteRef := range remoteRefs {
+			out <- merge(def, repo, resolver, remoteRef, author)
+		}
+	}()
+
+	return out
+}
+
+// merge perform a merge to make sure a local Entity is up to date.
+// See MergeAll for more details.
+func merge(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remoteRef string, author identity.Interface) entity.MergeResult {
+	id := entity.RefToId(remoteRef)
+
+	if err := id.Validate(); err != nil {
+		return entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error())
+	}
+
+	remoteEntity, err := read(def, repo, resolver, remoteRef)
+	if err != nil {
+		return entity.NewMergeInvalidStatus(id,
+			errors.Wrapf(err, "remote %s is not readable", def.Typename).Error())
+	}
+
+	// Check for error in remote data
+	if err := remoteEntity.Validate(); err != nil {
+		return entity.NewMergeInvalidStatus(id,
+			errors.Wrapf(err, "remote %s data is invalid", def.Typename).Error())
+	}
+
+	localRef := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String())
+
+	// SCENARIO 1
+	// if the remote Entity doesn't exist locally, it's created
+
+	localExist, err := repo.RefExist(localRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	if !localExist {
+		// the bug is not local yet, simply create the reference
+		err := repo.CopyRef(remoteRef, localRef)
+		if err != nil {
+			return entity.NewMergeError(err, id)
+		}
+
+		return entity.NewMergeNewStatus(id, remoteEntity)
+	}
+
+	localCommit, err := repo.ResolveRef(localRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	remoteCommit, err := repo.ResolveRef(remoteRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	// SCENARIO 2
+	// if the remote and local Entity have the same state, nothing is changed
+
+	if localCommit == remoteCommit {
+		// nothing to merge
+		return entity.NewMergeNothingStatus(id)
+	}
+
+	// SCENARIO 3
+	// if the local Entity has new commits but the remote don't, nothing is changed
+
+	localCommits, err := repo.ListCommits(localRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	for _, hash := range localCommits {
+		if hash == remoteCommit {
+			return entity.NewMergeNothingStatus(id)
+		}
+	}
+
+	// SCENARIO 4
+	// if the remote has new commit, the local bug is updated to match the same history
+	// (fast-forward update)
+
+	remoteCommits, err := repo.ListCommits(remoteRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	// fast-forward is possible if otherRef include ref
+	fastForwardPossible := false
+	for _, hash := range remoteCommits {
+		if hash == localCommit {
+			fastForwardPossible = true
+			break
+		}
+	}
+
+	if fastForwardPossible {
+		err = repo.UpdateRef(localRef, remoteCommit)
+		if err != nil {
+			return entity.NewMergeError(err, id)
+		}
+		return entity.NewMergeUpdatedStatus(id, remoteEntity)
+	}
+
+	// SCENARIO 5
+	// if both local and remote Entity have new commits (that is, we have a concurrent edition),
+	// a merge commit with an empty operationPack is created to join both branch and form a DAG.
+
+	// fast-forward is not possible, we need to create a merge commit
+	// For simplicity when reading and to have clocks that record this change, we store
+	// an empty operationPack.
+	// First step is to collect those clocks.
+
+	localEntity, err := read(def, repo, resolver, localRef)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	editTime, err := repo.Increment(fmt.Sprintf(editClockPattern, def.Namespace))
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	opp := &operationPack{
+		Author:     author,
+		Operations: nil,
+		CreateTime: 0,
+		EditTime:   editTime,
+	}
+
+	commitHash, err := opp.Write(def, repo, localCommit, remoteCommit)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	// finally update the ref
+	err = repo.UpdateRef(localRef, commitHash)
+	if err != nil {
+		return entity.NewMergeError(err, id)
+	}
+
+	// Note: we don't need to update localEntity state (lastCommit, operations...) as we
+	// discard it entirely anyway.
+
+	return entity.NewMergeUpdatedStatus(id, localEntity)
+}
+
+// Remove delete an Entity.
+// Remove is idempotent.
+func Remove(def Definition, repo repository.ClockedRepo, id entity.Id) error {
+	var matches []string
+
+	ref := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String())
+	matches = append(matches, ref)
+
+	remotes, err := repo.GetRemotes()
+	if err != nil {
+		return err
+	}
+
+	for remote := range remotes {
+		ref = fmt.Sprintf("refs/remotes/%s/%s/%s", remote, def.Namespace, id.String())
+		matches = append(matches, ref)
+	}
+
+	for _, ref = range matches {
+		err = repo.RemoveRef(ref)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}

entity/dag/entity_actions_test.go 🔗

@@ -0,0 +1,412 @@
+package dag
+
+import (
+	"sort"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+func allEntities(t testing.TB, bugs <-chan StreamedEntity) []*Entity {
+	t.Helper()
+
+	var result []*Entity
+	for streamed := range bugs {
+		require.NoError(t, streamed.Err)
+
+		result = append(result, streamed.Entity)
+	}
+	return result
+}
+
+func TestEntityPushPull(t *testing.T) {
+	repoA, repoB, remote, id1, id2, resolver, def := makeTestContextRemote(t)
+	defer repository.CleanupTestRepos(repoA, repoB, remote)
+
+	// A --> remote --> B
+	e := New(def)
+	e.Append(newOp1(id1, "foo"))
+
+	err := e.Commit(repoA)
+	require.NoError(t, err)
+
+	_, err = Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	err = Pull(def, repoB, resolver, "remote", id1)
+	require.NoError(t, err)
+
+	entities := allEntities(t, ReadAll(def, repoB, resolver))
+	require.Len(t, entities, 1)
+
+	// B --> remote --> A
+	e = New(def)
+	e.Append(newOp2(id2, "bar"))
+
+	err = e.Commit(repoB)
+	require.NoError(t, err)
+
+	_, err = Push(def, repoB, "remote")
+	require.NoError(t, err)
+
+	err = Pull(def, repoA, resolver, "remote", id1)
+	require.NoError(t, err)
+
+	entities = allEntities(t, ReadAll(def, repoB, resolver))
+	require.Len(t, entities, 2)
+}
+
+func TestListLocalIds(t *testing.T) {
+	repoA, repoB, remote, id1, id2, resolver, def := makeTestContextRemote(t)
+	defer repository.CleanupTestRepos(repoA, repoB, remote)
+
+	// A --> remote --> B
+	e := New(def)
+	e.Append(newOp1(id1, "foo"))
+	err := e.Commit(repoA)
+	require.NoError(t, err)
+
+	e = New(def)
+	e.Append(newOp2(id2, "bar"))
+	err = e.Commit(repoA)
+	require.NoError(t, err)
+
+	listLocalIds(t, def, repoA, 2)
+	listLocalIds(t, def, repoB, 0)
+
+	_, err = Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	_, err = Fetch(def, repoB, "remote")
+	require.NoError(t, err)
+
+	listLocalIds(t, def, repoA, 2)
+	listLocalIds(t, def, repoB, 0)
+
+	err = Pull(def, repoB, resolver, "remote", id1)
+	require.NoError(t, err)
+
+	listLocalIds(t, def, repoA, 2)
+	listLocalIds(t, def, repoB, 2)
+}
+
+func listLocalIds(t *testing.T, def Definition, repo repository.RepoData, expectedCount int) {
+	ids, err := ListLocalIds(def, repo)
+	require.NoError(t, err)
+	require.Len(t, ids, expectedCount)
+}
+
+func assertMergeResults(t *testing.T, expected []entity.MergeResult, results <-chan entity.MergeResult) {
+	t.Helper()
+
+	var allResults []entity.MergeResult
+	for result := range results {
+		allResults = append(allResults, result)
+	}
+
+	require.Equal(t, len(expected), len(allResults))
+
+	sort.Slice(allResults, func(i, j int) bool {
+		return allResults[i].Id < allResults[j].Id
+	})
+	sort.Slice(expected, func(i, j int) bool {
+		return expected[i].Id < expected[j].Id
+	})
+
+	for i, result := range allResults {
+		require.NoError(t, result.Err)
+
+		require.Equal(t, expected[i].Id, result.Id)
+		require.Equal(t, expected[i].Status, result.Status)
+
+		switch result.Status {
+		case entity.MergeStatusNew, entity.MergeStatusUpdated:
+			require.NotNil(t, result.Entity)
+			require.Equal(t, expected[i].Id, result.Entity.Id())
+		}
+
+		i++
+	}
+}
+
+func assertEqualRefs(t *testing.T, repoA, repoB repository.RepoData, prefix string) {
+	t.Helper()
+
+	refsA, err := repoA.ListRefs("")
+	require.NoError(t, err)
+
+	var refsAFiltered []string
+	for _, ref := range refsA {
+		if strings.HasPrefix(ref, prefix) {
+			refsAFiltered = append(refsAFiltered, ref)
+		}
+	}
+
+	refsB, err := repoB.ListRefs("")
+	require.NoError(t, err)
+
+	var refsBFiltered []string
+	for _, ref := range refsB {
+		if strings.HasPrefix(ref, prefix) {
+			refsBFiltered = append(refsBFiltered, ref)
+		}
+	}
+
+	require.NotEmpty(t, refsAFiltered)
+	require.Equal(t, refsAFiltered, refsBFiltered)
+
+	for _, ref := range refsAFiltered {
+		commitA, err := repoA.ResolveRef(ref)
+		require.NoError(t, err)
+		commitB, err := repoB.ResolveRef(ref)
+		require.NoError(t, err)
+
+		require.Equal(t, commitA, commitB)
+	}
+}
+
+func assertNotEqualRefs(t *testing.T, repoA, repoB repository.RepoData, prefix string) {
+	t.Helper()
+
+	refsA, err := repoA.ListRefs("")
+	require.NoError(t, err)
+
+	var refsAFiltered []string
+	for _, ref := range refsA {
+		if strings.HasPrefix(ref, prefix) {
+			refsAFiltered = append(refsAFiltered, ref)
+		}
+	}
+
+	refsB, err := repoB.ListRefs("")
+	require.NoError(t, err)
+
+	var refsBFiltered []string
+	for _, ref := range refsB {
+		if strings.HasPrefix(ref, prefix) {
+			refsBFiltered = append(refsBFiltered, ref)
+		}
+	}
+
+	require.NotEmpty(t, refsAFiltered)
+	require.Equal(t, refsAFiltered, refsBFiltered)
+
+	for _, ref := range refsAFiltered {
+		commitA, err := repoA.ResolveRef(ref)
+		require.NoError(t, err)
+		commitB, err := repoB.ResolveRef(ref)
+		require.NoError(t, err)
+
+		require.NotEqual(t, commitA, commitB)
+	}
+}
+
+func TestMerge(t *testing.T) {
+	repoA, repoB, remote, id1, id2, resolver, def := makeTestContextRemote(t)
+	defer repository.CleanupTestRepos(repoA, repoB, remote)
+
+	// SCENARIO 1
+	// if the remote Entity doesn't exist locally, it's created
+
+	// 2 entities in repoA + push to remote
+	e1A := New(def)
+	e1A.Append(newOp1(id1, "foo"))
+	err := e1A.Commit(repoA)
+	require.NoError(t, err)
+
+	e2A := New(def)
+	e2A.Append(newOp2(id2, "bar"))
+	err = e2A.Commit(repoA)
+	require.NoError(t, err)
+
+	_, err = Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	// repoB: fetch + merge from remote
+
+	_, err = Fetch(def, repoB, "remote")
+	require.NoError(t, err)
+
+	results := MergeAll(def, repoB, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusNew,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusNew,
+		},
+	}, results)
+
+	assertEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+
+	// SCENARIO 2
+	// if the remote and local Entity have the same state, nothing is changed
+
+	results = MergeAll(def, repoB, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusNothing,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusNothing,
+		},
+	}, results)
+
+	assertEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+
+	// SCENARIO 3
+	// if the local Entity has new commits but the remote don't, nothing is changed
+
+	e1A.Append(newOp1(id1, "barbar"))
+	err = e1A.Commit(repoA)
+	require.NoError(t, err)
+
+	e2A.Append(newOp2(id2, "barbarbar"))
+	err = e2A.Commit(repoA)
+	require.NoError(t, err)
+
+	results = MergeAll(def, repoA, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusNothing,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusNothing,
+		},
+	}, results)
+
+	assertNotEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+
+	// SCENARIO 4
+	// if the remote has new commit, the local bug is updated to match the same history
+	// (fast-forward update)
+
+	_, err = Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	_, err = Fetch(def, repoB, "remote")
+	require.NoError(t, err)
+
+	results = MergeAll(def, repoB, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+	}, results)
+
+	assertEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+
+	// SCENARIO 5
+	// if both local and remote Entity have new commits (that is, we have a concurrent edition),
+	// a merge commit with an empty operationPack is created to join both branch and form a DAG.
+
+	e1A.Append(newOp1(id1, "barbarfoo"))
+	err = e1A.Commit(repoA)
+	require.NoError(t, err)
+
+	e2A.Append(newOp2(id2, "barbarbarfoo"))
+	err = e2A.Commit(repoA)
+	require.NoError(t, err)
+
+	e1B, err := Read(def, repoB, resolver, e1A.Id())
+	require.NoError(t, err)
+
+	e2B, err := Read(def, repoB, resolver, e2A.Id())
+	require.NoError(t, err)
+
+	e1B.Append(newOp1(id1, "barbarfoofoo"))
+	err = e1B.Commit(repoB)
+	require.NoError(t, err)
+
+	e2B.Append(newOp2(id2, "barbarbarfoofoo"))
+	err = e2B.Commit(repoB)
+	require.NoError(t, err)
+
+	_, err = Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	_, err = Fetch(def, repoB, "remote")
+	require.NoError(t, err)
+
+	results = MergeAll(def, repoB, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+	}, results)
+
+	assertNotEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+
+	_, err = Push(def, repoB, "remote")
+	require.NoError(t, err)
+
+	_, err = Fetch(def, repoA, "remote")
+	require.NoError(t, err)
+
+	results = MergeAll(def, repoA, resolver, "remote", id1)
+
+	assertMergeResults(t, []entity.MergeResult{
+		{
+			Id:     e1A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+		{
+			Id:     e2A.Id(),
+			Status: entity.MergeStatusUpdated,
+		},
+	}, results)
+
+	// make sure that the graphs become stable over multiple repo, due to the
+	// fast-forward
+	assertEqualRefs(t, repoA, repoB, "refs/"+def.Namespace)
+}
+
+func TestRemove(t *testing.T) {
+	repoA, repoB, remote, id1, _, resolver, def := makeTestContextRemote(t)
+	defer repository.CleanupTestRepos(repoA, repoB, remote)
+
+	e := New(def)
+	e.Append(newOp1(id1, "foo"))
+	require.NoError(t, e.Commit(repoA))
+
+	_, err := Push(def, repoA, "remote")
+	require.NoError(t, err)
+
+	err = Remove(def, repoA, e.Id())
+	require.NoError(t, err)
+
+	_, err = Read(def, repoA, resolver, e.Id())
+	require.Error(t, err)
+
+	_, err = readRemote(def, repoA, resolver, "remote", e.Id())
+	require.Error(t, err)
+
+	// Remove is idempotent
+	err = Remove(def, repoA, e.Id())
+	require.NoError(t, err)
+}

entity/dag/entity_test.go 🔗

@@ -0,0 +1,68 @@
+package dag
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestWriteRead(t *testing.T) {
+	repo, id1, id2, resolver, def := makeTestContext()
+
+	entity := New(def)
+	require.False(t, entity.NeedCommit())
+
+	entity.Append(newOp1(id1, "foo"))
+	entity.Append(newOp2(id1, "bar"))
+
+	require.True(t, entity.NeedCommit())
+	require.NoError(t, entity.CommitAsNeeded(repo))
+	require.False(t, entity.NeedCommit())
+
+	entity.Append(newOp2(id2, "foobar"))
+	require.True(t, entity.NeedCommit())
+	require.NoError(t, entity.CommitAsNeeded(repo))
+	require.False(t, entity.NeedCommit())
+
+	read, err := Read(def, repo, resolver, entity.Id())
+	require.NoError(t, err)
+
+	assertEqualEntities(t, entity, read)
+}
+
+func TestWriteReadMultipleAuthor(t *testing.T) {
+	repo, id1, id2, resolver, def := makeTestContext()
+
+	entity := New(def)
+
+	entity.Append(newOp1(id1, "foo"))
+	entity.Append(newOp2(id2, "bar"))
+
+	require.NoError(t, entity.CommitAsNeeded(repo))
+
+	entity.Append(newOp2(id1, "foobar"))
+	require.NoError(t, entity.CommitAsNeeded(repo))
+
+	read, err := Read(def, repo, resolver, entity.Id())
+	require.NoError(t, err)
+
+	assertEqualEntities(t, entity, read)
+}
+
+func assertEqualEntities(t *testing.T, a, b *Entity) {
+	// testify doesn't support comparing functions and systematically fail if they are not nil
+	// so we have to set them to nil temporarily
+
+	backOpUnA := a.Definition.OperationUnmarshaler
+	backOpUnB := b.Definition.OperationUnmarshaler
+
+	a.Definition.OperationUnmarshaler = nil
+	b.Definition.OperationUnmarshaler = nil
+
+	defer func() {
+		a.Definition.OperationUnmarshaler = backOpUnA
+		b.Definition.OperationUnmarshaler = backOpUnB
+	}()
+
+	require.Equal(t, a, b)
+}

entity/dag/operation.go 🔗

@@ -0,0 +1,48 @@
+package dag
+
+import (
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+// Operation is a piece of data defining a change to reflect on the state of an Entity.
+// What this Operation or Entity's state looks like is not of the resort of this package as it only deals with the
+// data structure and storage.
+type Operation interface {
+	// Id return the Operation identifier
+	//
+	// Some care need to be taken to define a correct Id derivation and enough entropy in the data used to avoid
+	// collisions. Notably:
+	// - the Id of the first Operation will be used as the Id of the Entity. Collision need to be avoided across entities
+	//   of the same type (example: no collision within the "bug" namespace).
+	// - collisions can also happen within the set of Operations of an Entity. Simple Operation might not have enough
+	//   entropy to yield unique Ids (example: two "close" operation within the same second, same author).
+	//   If this is a concern, it is recommended to include a piece of random data in the operation's data, to guarantee
+	//   a minimal amount of entropy and avoid collision.
+	//
+	//   Author's note: I tried to find a clever way around that inelegance (stuffing random useless data into the stored
+	//   structure is not exactly elegant) but I failed to find a proper way. Essentially, anything that would reuse some
+	//   other data (parent operation's Id, lamport clock) or the graph structure (depth) impose that the Id would only
+	//   make sense in the context of the graph and yield some deep coupling between Entity and Operation. This in turn
+	//   make the whole thing even less elegant.
+	//
+	// A common way to derive an Id will be to use the entity.DeriveId() function on the serialized operation data.
+	Id() entity.Id
+	// Validate check if the Operation data is valid
+	Validate() error
+	// Author returns the author of this operation
+	Author() identity.Interface
+}
+
+// OperationWithFiles is an extended Operation that has files dependency, stored in git.
+type OperationWithFiles interface {
+	Operation
+
+	// GetFiles return the files needed by this operation
+	// This implies that the Operation maintain and store internally the references to those files. This is how
+	// this information is read later, when loading from storage.
+	// For example, an operation that has a text value referencing some files would maintain a mapping (text ref -->
+	// hash).
+	GetFiles() []repository.Hash
+}

entity/dag/operation_pack.go 🔗

@@ -0,0 +1,358 @@
+package dag
+
+import (
+	"encoding/json"
+	"fmt"
+	"strconv"
+	"strings"
+
+	"github.com/pkg/errors"
+	"golang.org/x/crypto/openpgp"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+	"github.com/MichaelMure/git-bug/util/lamport"
+)
+
+const opsEntryName = "ops"
+const extraEntryName = "extra"
+const versionEntryPrefix = "version-"
+const createClockEntryPrefix = "create-clock-"
+const editClockEntryPrefix = "edit-clock-"
+
+// operationPack is a wrapper structure to store multiple operations in a single git blob.
+// Additionally, it holds and store the metadata for those operations.
+type operationPack struct {
+	// An identifier, taken from a hash of the serialized Operations.
+	id entity.Id
+
+	// The author of the Operations. Must be the same author for all the Operations.
+	Author identity.Interface
+	// The list of Operation stored in the operationPack
+	Operations []Operation
+	// Encode the entity's logical time of creation across all entities of the same type.
+	// Only exist on the root operationPack
+	CreateTime lamport.Time
+	// Encode the entity's logical time of last edition across all entities of the same type.
+	// Exist on all operationPack
+	EditTime lamport.Time
+}
+
+func (opp *operationPack) Id() entity.Id {
+	if opp.id == "" || opp.id == entity.UnsetId {
+		// This means we are trying to get the opp's Id *before* it has been stored.
+		// As the Id is computed based on the actual bytes written on the disk, we are going to predict
+		// those and then get the Id. This is safe as it will be the exact same code writing on disk later.
+
+		data, err := json.Marshal(opp)
+		if err != nil {
+			panic(err)
+		}
+		opp.id = entity.DeriveId(data)
+	}
+
+	return opp.id
+}
+
+func (opp *operationPack) MarshalJSON() ([]byte, error) {
+	return json.Marshal(struct {
+		Author     identity.Interface `json:"author"`
+		Operations []Operation        `json:"ops"`
+	}{
+		Author:     opp.Author,
+		Operations: opp.Operations,
+	})
+}
+
+func (opp *operationPack) Validate() error {
+	if opp.Author == nil {
+		return fmt.Errorf("missing author")
+	}
+	for _, op := range opp.Operations {
+		if op.Author().Id() != opp.Author.Id() {
+			return fmt.Errorf("operation has different author than the operationPack's")
+		}
+	}
+	if opp.EditTime == 0 {
+		return fmt.Errorf("lamport edit time is zero")
+	}
+	return nil
+}
+
+// Write write the OperationPack in git, with zero, one or more parent commits.
+// If the repository has a keypair able to sign (that is, with a private key), the resulting commit is signed with that key.
+// Return the hash of the created commit.
+func (opp *operationPack) Write(def Definition, repo repository.Repo, parentCommit ...repository.Hash) (repository.Hash, error) {
+	if err := opp.Validate(); err != nil {
+		return "", err
+	}
+
+	// For different reason, we store the clocks and format version directly in the git tree.
+	// Version has to be accessible before any attempt to decode to return early with a unique error.
+	// Clocks could possibly be stored in the git blob but it's nice to separate data and metadata, and
+	// we are storing something directly in the tree already so why not.
+	//
+	// To have a valid Tree, we point the "fake" entries to always the same value, the empty blob.
+	emptyBlobHash, err := repo.StoreData([]byte{})
+	if err != nil {
+		return "", err
+	}
+
+	// Write the Ops as a Git blob containing the serialized array of operations
+	data, err := json.Marshal(opp)
+	if err != nil {
+		return "", err
+	}
+
+	// compute the Id while we have the serialized data
+	opp.id = entity.DeriveId(data)
+
+	hash, err := repo.StoreData(data)
+	if err != nil {
+		return "", err
+	}
+
+	// Make a Git tree referencing this blob and encoding the other values:
+	// - format version
+	// - clocks
+	// - extra data
+	tree := []repository.TreeEntry{
+		{ObjectType: repository.Blob, Hash: emptyBlobHash,
+			Name: fmt.Sprintf(versionEntryPrefix+"%d", def.FormatVersion)},
+		{ObjectType: repository.Blob, Hash: hash,
+			Name: opsEntryName},
+		{ObjectType: repository.Blob, Hash: emptyBlobHash,
+			Name: fmt.Sprintf(editClockEntryPrefix+"%d", opp.EditTime)},
+	}
+	if opp.CreateTime > 0 {
+		tree = append(tree, repository.TreeEntry{
+			ObjectType: repository.Blob,
+			Hash:       emptyBlobHash,
+			Name:       fmt.Sprintf(createClockEntryPrefix+"%d", opp.CreateTime),
+		})
+	}
+	if extraTree := opp.makeExtraTree(); len(extraTree) > 0 {
+		extraTreeHash, err := repo.StoreTree(extraTree)
+		if err != nil {
+			return "", err
+		}
+		tree = append(tree, repository.TreeEntry{
+			ObjectType: repository.Tree,
+			Hash:       extraTreeHash,
+			Name:       extraEntryName,
+		})
+	}
+
+	// Store the tree
+	treeHash, err := repo.StoreTree(tree)
+	if err != nil {
+		return "", err
+	}
+
+	// Write a Git commit referencing the tree, with the previous commit as parent
+	// If we have keys, sign.
+	var commitHash repository.Hash
+
+	// Sign the commit if we have a key
+	signingKey, err := opp.Author.SigningKey(repo)
+	if err != nil {
+		return "", err
+	}
+
+	if signingKey != nil {
+		commitHash, err = repo.StoreSignedCommit(treeHash, signingKey.PGPEntity(), parentCommit...)
+	} else {
+		commitHash, err = repo.StoreCommit(treeHash, parentCommit...)
+	}
+
+	if err != nil {
+		return "", err
+	}
+
+	return commitHash, nil
+}
+
+func (opp *operationPack) makeExtraTree() []repository.TreeEntry {
+	var tree []repository.TreeEntry
+	counter := 0
+	added := make(map[repository.Hash]interface{})
+
+	for _, ops := range opp.Operations {
+		ops, ok := ops.(OperationWithFiles)
+		if !ok {
+			continue
+		}
+
+		for _, file := range ops.GetFiles() {
+			if _, has := added[file]; !has {
+				tree = append(tree, repository.TreeEntry{
+					ObjectType: repository.Blob,
+					Hash:       file,
+					// The name is not important here, we only need to
+					// reference the blob.
+					Name: fmt.Sprintf("file%d", counter),
+				})
+				counter++
+				added[file] = struct{}{}
+			}
+		}
+	}
+
+	return tree
+}
+
+// readOperationPack read the operationPack encoded in git at the given Tree hash.
+//
+// Validity of the Lamport clocks is left for the caller to decide.
+func readOperationPack(def Definition, repo repository.RepoData, resolver identity.Resolver, commit repository.Commit) (*operationPack, error) {
+	entries, err := repo.ReadTree(commit.TreeHash)
+	if err != nil {
+		return nil, err
+	}
+
+	// check the format version first, fail early instead of trying to read something
+	var version uint
+	for _, entry := range entries {
+		if strings.HasPrefix(entry.Name, versionEntryPrefix) {
+			v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, versionEntryPrefix), 10, 64)
+			if err != nil {
+				return nil, errors.Wrap(err, "can't read format version")
+			}
+			if v > 1<<12 {
+				return nil, fmt.Errorf("format version too big")
+			}
+			version = uint(v)
+			break
+		}
+	}
+	if version == 0 {
+		return nil, entity.NewErrUnknownFormat(def.FormatVersion)
+	}
+	if version != def.FormatVersion {
+		return nil, entity.NewErrInvalidFormat(version, def.FormatVersion)
+	}
+
+	var id entity.Id
+	var author identity.Interface
+	var ops []Operation
+	var createTime lamport.Time
+	var editTime lamport.Time
+
+	for _, entry := range entries {
+		switch {
+		case entry.Name == opsEntryName:
+			data, err := repo.ReadData(entry.Hash)
+			if err != nil {
+				return nil, errors.Wrap(err, "failed to read git blob data")
+			}
+			ops, author, err = unmarshallPack(def, resolver, data)
+			if err != nil {
+				return nil, err
+			}
+			id = entity.DeriveId(data)
+
+		case strings.HasPrefix(entry.Name, createClockEntryPrefix):
+			v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, createClockEntryPrefix), 10, 64)
+			if err != nil {
+				return nil, errors.Wrap(err, "can't read creation lamport time")
+			}
+			createTime = lamport.Time(v)
+
+		case strings.HasPrefix(entry.Name, editClockEntryPrefix):
+			v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, editClockEntryPrefix), 10, 64)
+			if err != nil {
+				return nil, errors.Wrap(err, "can't read edit lamport time")
+			}
+			editTime = lamport.Time(v)
+		}
+	}
+
+	// Verify signature if we expect one
+	keys := author.ValidKeysAtTime(fmt.Sprintf(editClockPattern, def.Namespace), editTime)
+	if len(keys) > 0 {
+		keyring := PGPKeyring(keys)
+		_, err = openpgp.CheckDetachedSignature(keyring, commit.SignedData, commit.Signature)
+		if err != nil {
+			return nil, fmt.Errorf("signature failure: %v", err)
+		}
+	}
+
+	return &operationPack{
+		id:         id,
+		Author:     author,
+		Operations: ops,
+		CreateTime: createTime,
+		EditTime:   editTime,
+	}, nil
+}
+
+// unmarshallPack delegate the unmarshalling of the Operation's JSON to the decoding
+// function provided by the concrete entity. This gives access to the concrete type of each
+// Operation.
+func unmarshallPack(def Definition, resolver identity.Resolver, data []byte) ([]Operation, identity.Interface, error) {
+	aux := struct {
+		Author     identity.IdentityStub `json:"author"`
+		Operations []json.RawMessage     `json:"ops"`
+	}{}
+
+	if err := json.Unmarshal(data, &aux); err != nil {
+		return nil, nil, err
+	}
+
+	if aux.Author.Id() == "" || aux.Author.Id() == entity.UnsetId {
+		return nil, nil, fmt.Errorf("missing author")
+	}
+
+	author, err := resolver.ResolveIdentity(aux.Author.Id())
+	if err != nil {
+		return nil, nil, err
+	}
+
+	ops := make([]Operation, 0, len(aux.Operations))
+
+	for _, raw := range aux.Operations {
+		// delegate to specialized unmarshal function
+		op, err := def.OperationUnmarshaler(author, raw)
+		if err != nil {
+			return nil, nil, err
+		}
+		ops = append(ops, op)
+	}
+
+	return ops, author, nil
+}
+
+var _ openpgp.KeyRing = &PGPKeyring{}
+
+// PGPKeyring implement a openpgp.KeyRing from an slice of Key
+type PGPKeyring []*identity.Key
+
+func (pk PGPKeyring) KeysById(id uint64) []openpgp.Key {
+	var result []openpgp.Key
+	for _, key := range pk {
+		if key.Public().KeyId == id {
+			result = append(result, openpgp.Key{
+				PublicKey:  key.Public(),
+				PrivateKey: key.Private(),
+			})
+		}
+	}
+	return result
+}
+
+func (pk PGPKeyring) KeysByIdUsage(id uint64, requiredUsage byte) []openpgp.Key {
+	// the only usage we care about is the ability to sign, which all keys should already be capable of
+	return pk.KeysById(id)
+}
+
+func (pk PGPKeyring) DecryptionKeys() []openpgp.Key {
+	result := make([]openpgp.Key, len(pk))
+	for i, key := range pk {
+		result[i] = openpgp.Key{
+			PublicKey:  key.Public(),
+			PrivateKey: key.Private(),
+		}
+	}
+	return result
+}

entity/dag/operation_pack_test.go 🔗

@@ -0,0 +1,159 @@
+package dag
+
+import (
+	"math/rand"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/identity"
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+func TestOperationPackReadWrite(t *testing.T) {
+	repo, id1, _, resolver, def := makeTestContext()
+
+	opp := &operationPack{
+		Author: id1,
+		Operations: []Operation{
+			newOp1(id1, "foo"),
+			newOp2(id1, "bar"),
+		},
+		CreateTime: 123,
+		EditTime:   456,
+	}
+
+	commitHash, err := opp.Write(def, repo)
+	require.NoError(t, err)
+
+	commit, err := repo.ReadCommit(commitHash)
+	require.NoError(t, err)
+
+	opp2, err := readOperationPack(def, repo, resolver, commit)
+	require.NoError(t, err)
+
+	require.Equal(t, opp, opp2)
+
+	// make sure we get the same Id with the same data
+	opp3 := &operationPack{
+		Author: id1,
+		Operations: []Operation{
+			newOp1(id1, "foo"),
+			newOp2(id1, "bar"),
+		},
+		CreateTime: 123,
+		EditTime:   456,
+	}
+	require.Equal(t, opp.Id(), opp3.Id())
+}
+
+func TestOperationPackSignedReadWrite(t *testing.T) {
+	repo, id1, _, resolver, def := makeTestContext()
+
+	err := id1.(*identity.Identity).Mutate(repo, func(orig *identity.Mutator) {
+		orig.Keys = append(orig.Keys, identity.GenerateKey())
+	})
+	require.NoError(t, err)
+
+	opp := &operationPack{
+		Author: id1,
+		Operations: []Operation{
+			newOp1(id1, "foo"),
+			newOp2(id1, "bar"),
+		},
+		CreateTime: 123,
+		EditTime:   456,
+	}
+
+	commitHash, err := opp.Write(def, repo)
+	require.NoError(t, err)
+
+	commit, err := repo.ReadCommit(commitHash)
+	require.NoError(t, err)
+
+	opp2, err := readOperationPack(def, repo, resolver, commit)
+	require.NoError(t, err)
+
+	require.Equal(t, opp, opp2)
+
+	// make sure we get the same Id with the same data
+	opp3 := &operationPack{
+		Author: id1,
+		Operations: []Operation{
+			newOp1(id1, "foo"),
+			newOp2(id1, "bar"),
+		},
+		CreateTime: 123,
+		EditTime:   456,
+	}
+	require.Equal(t, opp.Id(), opp3.Id())
+}
+
+func TestOperationPackFiles(t *testing.T) {
+	repo, id1, _, resolver, def := makeTestContext()
+
+	blobHash1, err := repo.StoreData(randomData())
+	require.NoError(t, err)
+
+	blobHash2, err := repo.StoreData(randomData())
+	require.NoError(t, err)
+
+	opp := &operationPack{
+		Author: id1,
+		Operations: []Operation{
+			newOp1(id1, "foo", blobHash1, blobHash2),
+			newOp1(id1, "foo", blobHash2),
+		},
+		CreateTime: 123,
+		EditTime:   456,
+	}
+
+	commitHash, err := opp.Write(def, repo)
+	require.NoError(t, err)
+
+	commit, err := repo.ReadCommit(commitHash)
+	require.NoError(t, err)
+
+	opp2, err := readOperationPack(def, repo, resolver, commit)
+	require.NoError(t, err)
+
+	require.Equal(t, opp, opp2)
+
+	require.ElementsMatch(t, opp2.Operations[0].(OperationWithFiles).GetFiles(), []repository.Hash{
+		blobHash1,
+		blobHash2,
+	})
+	require.ElementsMatch(t, opp2.Operations[1].(OperationWithFiles).GetFiles(), []repository.Hash{
+		blobHash2,
+	})
+
+	tree, err := repo.ReadTree(commit.TreeHash)
+	require.NoError(t, err)
+
+	extraTreeHash, ok := repository.SearchTreeEntry(tree, extraEntryName)
+	require.True(t, ok)
+
+	extraTree, err := repo.ReadTree(extraTreeHash.Hash)
+	require.NoError(t, err)
+	require.ElementsMatch(t, extraTree, []repository.TreeEntry{
+		{
+			ObjectType: repository.Blob,
+			Hash:       blobHash1,
+			Name:       "file0",
+		},
+		{
+			ObjectType: repository.Blob,
+			Hash:       blobHash2,
+			Name:       "file1",
+		},
+	})
+}
+
+func randomData() []byte {
+	var letterRunes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+	b := make([]byte, 32)
+	for i := range b {
+		b[i] = letterRunes[rand.Intn(len(letterRunes))]
+	}
+	return b
+}

entity/doc.go 🔗

@@ -1,8 +0,0 @@
-// Package entity contains the base common code to define an entity stored
-// in a chain of git objects, supporting actions like Push, Pull and Merge.
-package entity
-
-// TODO: Bug and Identity are very similar, right ? I expect that this package
-// will eventually hold the common code to define an entity and the related
-// helpers, errors and so on. When this work is done, it will become easier
-// to add new entities, for example to support pull requests.

entity/err.go 🔗

@@ -31,28 +31,31 @@ func IsErrMultipleMatch(err error) bool {
 	return ok
 }
 
-// ErrOldFormatVersion indicate that the read data has a too old format.
-type ErrOldFormatVersion struct {
-	formatVersion uint
+type ErrInvalidFormat struct {
+	version  uint
+	expected uint
 }
 
-func NewErrOldFormatVersion(formatVersion uint) *ErrOldFormatVersion {
-	return &ErrOldFormatVersion{formatVersion: formatVersion}
-}
-
-func (e ErrOldFormatVersion) Error() string {
-	return fmt.Sprintf("outdated repository format %v, please use https://github.com/MichaelMure/git-bug-migration to upgrade", e.formatVersion)
-}
-
-// ErrNewFormatVersion indicate that the read data is too new for this software.
-type ErrNewFormatVersion struct {
-	formatVersion uint
+func NewErrInvalidFormat(version uint, expected uint) *ErrInvalidFormat {
+	return &ErrInvalidFormat{
+		version:  version,
+		expected: expected,
+	}
 }
 
-func NewErrNewFormatVersion(formatVersion uint) *ErrNewFormatVersion {
-	return &ErrNewFormatVersion{formatVersion: formatVersion}
+func NewErrUnknownFormat(expected uint) *ErrInvalidFormat {
+	return &ErrInvalidFormat{
+		version:  0,
+		expected: expected,
+	}
 }
 
-func (e ErrNewFormatVersion) Error() string {
-	return fmt.Sprintf("your version of git-bug is too old for this repository (version %v), please upgrade to the latest version", e.formatVersion)
+func (e ErrInvalidFormat) Error() string {
+	if e.version == 0 {
+		return fmt.Sprintf("unreadable data, you likely have an outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade to format version %v", e.expected)
+	}
+	if e.version < e.expected {
+		return fmt.Sprintf("outdated repository format %v, please use https://github.com/MichaelMure/git-bug-migration to upgrade to format version %v", e.version, e.expected)
+	}
+	return fmt.Sprintf("your version of git-bug is too old for this repository (format version %v, expected %v), please upgrade to the latest version", e.version, e.expected)
 }

entity/id.go 🔗

@@ -1,6 +1,7 @@
 package entity
 
 import (
+	"crypto/sha256"
 	"fmt"
 	"io"
 	"strings"
@@ -8,8 +9,8 @@ import (
 	"github.com/pkg/errors"
 )
 
-const IdLengthSHA1 = 40
-const IdLengthSHA256 = 64
+// sha-256
+const idLength = 64
 const humanIdLength = 7
 
 const UnsetId = Id("unset")
@@ -17,6 +18,15 @@ const UnsetId = Id("unset")
 // Id is an identifier for an entity or part of an entity
 type Id string
 
+// DeriveId generate an Id from the serialization of the object or part of the object.
+func DeriveId(data []byte) Id {
+	// My understanding is that sha256 is enough to prevent collision (git use that, so ...?)
+	// If you read this code, I'd be happy to be schooled.
+
+	sum := sha256.Sum256(data)
+	return Id(fmt.Sprintf("%x", sum))
+}
+
 // String return the identifier as a string
 func (i Id) String() string {
 	return string(i)
@@ -55,7 +65,11 @@ func (i Id) MarshalGQL(w io.Writer) {
 
 // IsValid tell if the Id is valid
 func (i Id) Validate() error {
-	if len(i) != IdLengthSHA1 && len(i) != IdLengthSHA256 {
+	// Special case to detect outdated repo
+	if len(i) == 40 {
+		return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade")
+	}
+	if len(i) != idLength {
 		return fmt.Errorf("invalid length")
 	}
 	for _, r := range i {

entity/id_interleaved.go 🔗

@@ -0,0 +1,68 @@
+package entity
+
+import (
+	"strings"
+)
+
+// CombineIds compute a merged Id holding information from both the primary Id
+// and the secondary Id.
+//
+// This allow to later find efficiently a secondary element because we can access
+// the primary one directly instead of searching for a primary that has a
+// secondary matching the Id.
+//
+// An example usage is Comment in a Bug. The interleaved Id will hold part of the
+// Bug Id and part of the Comment Id.
+//
+// To allow the use of an arbitrary length prefix of this Id, Ids from primary
+// and secondary are interleaved with this irregular pattern to give the
+// best chance to find the secondary even with a 7 character prefix.
+//
+// Format is: PSPSPSPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPP
+//
+// A complete interleaved Id hold 50 characters for the primary and 14 for the
+// secondary, which give a key space of 36^50 for the primary (~6 * 10^77) and
+// 36^14 for the secondary (~6 * 10^21). This asymmetry assume a reasonable number
+// of secondary within a primary Entity, while still allowing for a vast key space
+// for the primary (that is, a globally merged database) with a low risk of collision.
+//
+// Here is the breakdown of several common prefix length:
+//
+// 5:    3P, 2S
+// 7:    4P, 3S
+// 10:   6P, 4S
+// 16:  11P, 5S
+func CombineIds(primary Id, secondary Id) Id {
+	var id strings.Builder
+
+	for i := 0; i < idLength; i++ {
+		switch {
+		default:
+			id.WriteByte(primary[0])
+			primary = primary[1:]
+		case i == 1, i == 3, i == 5, i == 9, i >= 10 && i%5 == 4:
+			id.WriteByte(secondary[0])
+			secondary = secondary[1:]
+		}
+	}
+
+	return Id(id.String())
+}
+
+// SeparateIds extract primary and secondary prefix from an arbitrary length prefix
+// of an Id created with CombineIds.
+func SeparateIds(prefix string) (primaryPrefix string, secondaryPrefix string) {
+	var primary strings.Builder
+	var secondary strings.Builder
+
+	for i, r := range prefix {
+		switch {
+		default:
+			primary.WriteRune(r)
+		case i == 1, i == 3, i == 5, i == 9, i >= 10 && i%5 == 4:
+			secondary.WriteRune(r)
+		}
+	}
+
+	return primary.String(), secondary.String()
+}

entity/id_interleaved_test.go 🔗

@@ -0,0 +1,36 @@
+package entity
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestInterleaved(t *testing.T) {
+	primary := Id("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWX______________")
+	secondary := Id("YZ0123456789+/________________________________________________")
+	expectedId := Id("aYbZc0def1ghij2klmn3opqr4stuv5wxyz6ABCD7EFGH8IJKL9MNOP+QRST/UVWX")
+
+	interleaved := CombineIds(primary, secondary)
+	require.Equal(t, expectedId, interleaved)
+
+	// full length
+	splitPrimary, splitSecondary := SeparateIds(interleaved.String())
+	require.Equal(t, string(primary[:50]), splitPrimary)
+	require.Equal(t, string(secondary[:14]), splitSecondary)
+
+	// partial
+	splitPrimary, splitSecondary = SeparateIds(string(expectedId[:7]))
+	require.Equal(t, string(primary[:4]), splitPrimary)
+	require.Equal(t, string(secondary[:3]), splitSecondary)
+
+	// partial
+	splitPrimary, splitSecondary = SeparateIds(string(expectedId[:10]))
+	require.Equal(t, string(primary[:6]), splitPrimary)
+	require.Equal(t, string(secondary[:4]), splitSecondary)
+
+	// partial
+	splitPrimary, splitSecondary = SeparateIds(string(expectedId[:16]))
+	require.Equal(t, string(primary[:11]), splitPrimary)
+	require.Equal(t, string(secondary[:5]), splitSecondary)
+}

entity/interface.go 🔗

@@ -2,5 +2,11 @@ package entity
 
 type Interface interface {
 	// Id return the Entity identifier
+	//
+	// This Id need to be immutable without having to store the entity somewhere (ie, an entity only in memory
+	// should have a valid Id, and it should not change if further edit are done on this entity).
+	// How to achieve that is up to the entity itself. A common way would be to take a hash of an immutable data at
+	// the root of the entity.
+	// It is acceptable to use such a hash and keep mutating that data as long as Id() is not called.
 	Id() Id
 }

entity/merge.go 🔗

@@ -8,14 +8,15 @@ import (
 type MergeStatus int
 
 const (
-	_ MergeStatus = iota
-	MergeStatusNew
-	MergeStatusInvalid
-	MergeStatusUpdated
-	MergeStatusNothing
-	MergeStatusError
+	_                  MergeStatus = iota
+	MergeStatusNew                 // a new Entity was created locally
+	MergeStatusInvalid             // the remote data is invalid
+	MergeStatusUpdated             // a local Entity has been updated
+	MergeStatusNothing             // no changes were made to a local Entity (already up to date)
+	MergeStatusError               // a terminal error happened
 )
 
+// MergeResult hold the result of a merge operation on an Entity.
 type MergeResult struct {
 	// Err is set when a terminal error occur in the process
 	Err error
@@ -23,10 +24,10 @@ type MergeResult struct {
 	Id     Id
 	Status MergeStatus
 
-	// Only set for invalid status
+	// Only set for Invalid status
 	Reason string
 
-	// Not set for invalid status
+	// Only set for New or Updated status
 	Entity Interface
 }
 
@@ -41,34 +42,50 @@ func (mr MergeResult) String() string {
 	case MergeStatusNothing:
 		return "nothing to do"
 	case MergeStatusError:
-		return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error())
+		if mr.Id != "" {
+			return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error())
+		}
+		return fmt.Sprintf("merge error: %s", mr.Err.Error())
 	default:
 		panic("unknown merge status")
 	}
 }
 
-func NewMergeError(err error, id Id) MergeResult {
+func NewMergeNewStatus(id Id, entity Interface) MergeResult {
 	return MergeResult{
-		Err:    err,
 		Id:     id,
-		Status: MergeStatusError,
+		Status: MergeStatusNew,
+		Entity: entity,
 	}
 }
 
-func NewMergeStatus(status MergeStatus, id Id, entity Interface) MergeResult {
+func NewMergeInvalidStatus(id Id, reason string) MergeResult {
 	return MergeResult{
 		Id:     id,
-		Status: status,
+		Status: MergeStatusInvalid,
+		Reason: reason,
+	}
+}
 
-		// Entity is not set for an invalid merge result
+func NewMergeUpdatedStatus(id Id, entity Interface) MergeResult {
+	return MergeResult{
+		Id:     id,
+		Status: MergeStatusUpdated,
 		Entity: entity,
 	}
 }
 
-func NewMergeInvalidStatus(id Id, reason string) MergeResult {
+func NewMergeNothingStatus(id Id) MergeResult {
 	return MergeResult{
 		Id:     id,
-		Status: MergeStatusInvalid,
-		Reason: reason,
+		Status: MergeStatusNothing,
+	}
+}
+
+func NewMergeError(err error, id Id) MergeResult {
+	return MergeResult{
+		Id:     id,
+		Status: MergeStatusError,
+		Err:    err,
 	}
 }

entity/refs.go 🔗

@@ -2,17 +2,19 @@ package entity
 
 import "strings"
 
+// RefsToIds parse a slice of git references and return the corresponding Entity's Id.
 func RefsToIds(refs []string) []Id {
 	ids := make([]Id, len(refs))
 
 	for i, ref := range refs {
-		ids[i] = refToId(ref)
+		ids[i] = RefToId(ref)
 	}
 
 	return ids
 }
 
-func refToId(ref string) Id {
+// RefsToIds parse a git reference and return the corresponding Entity's Id.
+func RefToId(ref string) Id {
 	split := strings.Split(ref, "/")
 	return Id(split[len(split)-1])
 }

go.mod 🔗

@@ -1,6 +1,6 @@
 module github.com/MichaelMure/git-bug
 
-go 1.13
+go 1.15
 
 require (
 	github.com/99designs/gqlgen v0.10.3-0.20200209012558-b7a58a1c0e4b
@@ -8,13 +8,12 @@ require (
 	github.com/MichaelMure/go-term-text v0.2.10
 	github.com/araddon/dateparse v0.0.0-20190622164848-0fb0a474d195
 	github.com/awesome-gocui/gocui v0.6.1-0.20191115151952-a34ffb055986
-	github.com/blang/semver v3.5.1+incompatible
 	github.com/blevesearch/bleve v1.0.14
 	github.com/cheekybits/genny v0.0.0-20170328200008-9127e812e1e9
 	github.com/corpix/uarand v0.1.1 // indirect
 	github.com/dustin/go-humanize v1.0.0
 	github.com/fatih/color v1.10.0
-	github.com/go-git/go-billy/v5 v5.0.0
+	github.com/go-git/go-billy/v5 v5.1.0
 	github.com/go-git/go-git/v5 v5.2.0
 	github.com/golang/protobuf v1.4.3 // indirect
 	github.com/gorilla/mux v1.8.0
@@ -37,8 +36,8 @@ require (
 	golang.org/x/net v0.0.0-20201024042810-be3efd7ff127 // indirect
 	golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43
 	golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208
-	golang.org/x/sys v0.0.0-20201020230747-6e5568b54d1a // indirect
-	golang.org/x/text v0.3.5
+	golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4
+	golang.org/x/text v0.3.6
 	golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
 	google.golang.org/appengine v1.6.7 // indirect
 )

go.sum 🔗

@@ -74,8 +74,6 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
 github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
 github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
-github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
-github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
 github.com/blevesearch/bleve v1.0.14 h1:Q8r+fHTt35jtGXJUM0ULwM3Tzg+MRfyai4ZkWDy2xO4=
 github.com/blevesearch/bleve v1.0.14/go.mod h1:e/LJTr+E7EaoVdkQZTfoz7dt4KoDNvDbLb8MSKuNTLQ=
 github.com/blevesearch/blevex v1.0.0 h1:pnilj2Qi3YSEGdWgLj1Pn9Io7ukfXPoQcpAI1Bv8n/o=
@@ -90,12 +88,20 @@ github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH
 github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
 github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
 github.com/blevesearch/zap/v11 v11.0.14 h1:IrDAvtlzDylh6H2QCmS0OGcN9Hpf6mISJlfKjcwJs7k=
+github.com/blevesearch/zap/v11 v11.0.14 h1:IrDAvtlzDylh6H2QCmS0OGcN9Hpf6mISJlfKjcwJs7k=
+github.com/blevesearch/zap/v11 v11.0.14/go.mod h1:MUEZh6VHGXv1PKx3WnCbdP404LGG2IZVa/L66pyFwnY=
 github.com/blevesearch/zap/v11 v11.0.14/go.mod h1:MUEZh6VHGXv1PKx3WnCbdP404LGG2IZVa/L66pyFwnY=
 github.com/blevesearch/zap/v12 v12.0.14 h1:2o9iRtl1xaRjsJ1xcqTyLX414qPAwykHNV7wNVmbp3w=
+github.com/blevesearch/zap/v12 v12.0.14 h1:2o9iRtl1xaRjsJ1xcqTyLX414qPAwykHNV7wNVmbp3w=
+github.com/blevesearch/zap/v12 v12.0.14/go.mod h1:rOnuZOiMKPQj18AEKEHJxuI14236tTQ1ZJz4PAnWlUg=
 github.com/blevesearch/zap/v12 v12.0.14/go.mod h1:rOnuZOiMKPQj18AEKEHJxuI14236tTQ1ZJz4PAnWlUg=
 github.com/blevesearch/zap/v13 v13.0.6 h1:r+VNSVImi9cBhTNNR+Kfl5uiGy8kIbb0JMz/h8r6+O4=
+github.com/blevesearch/zap/v13 v13.0.6 h1:r+VNSVImi9cBhTNNR+Kfl5uiGy8kIbb0JMz/h8r6+O4=
+github.com/blevesearch/zap/v13 v13.0.6/go.mod h1:L89gsjdRKGyGrRN6nCpIScCvvkyxvmeDCwZRcjjPCrw=
 github.com/blevesearch/zap/v13 v13.0.6/go.mod h1:L89gsjdRKGyGrRN6nCpIScCvvkyxvmeDCwZRcjjPCrw=
 github.com/blevesearch/zap/v14 v14.0.5 h1:NdcT+81Nvmp2zL+NhwSvGSLh7xNgGL8QRVZ67njR0NU=
+github.com/blevesearch/zap/v14 v14.0.5 h1:NdcT+81Nvmp2zL+NhwSvGSLh7xNgGL8QRVZ67njR0NU=
+github.com/blevesearch/zap/v14 v14.0.5/go.mod h1:bWe8S7tRrSBTIaZ6cLRbgNH4TUDaC9LZSpRGs85AsGY=
 github.com/blevesearch/zap/v14 v14.0.5/go.mod h1:bWe8S7tRrSBTIaZ6cLRbgNH4TUDaC9LZSpRGs85AsGY=
 github.com/blevesearch/zap/v15 v15.0.3 h1:Ylj8Oe+mo0P25tr9iLPp33lN6d4qcztGjaIsP51UxaY=
 github.com/blevesearch/zap/v15 v15.0.3/go.mod h1:iuwQrImsh1WjWJ0Ue2kBqY83a0rFtJTqfa9fp1rbVVU=
@@ -176,8 +182,9 @@ github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6
 github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
 github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
 github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
-github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM=
 github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
+github.com/go-git/go-billy/v5 v5.1.0 h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk=
+github.com/go-git/go-billy/v5 v5.1.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
 github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=
 github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
 github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2JgI=
@@ -419,11 +426,9 @@ github.com/shurcooL/githubv4 v0.0.0-20190601194912-068505affed7 h1:Vk3RiBQpF0Ja+
 github.com/shurcooL/githubv4 v0.0.0-20190601194912-068505affed7/go.mod h1:hAF0iLZy4td2EX+/8Tw+4nodhlMrwN3HupfaXj3zkGo=
 github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=
 github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg=
-github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371 h1:SWV2fHctRpRrp49VXJ6UZja7gU9QLHwRpIPBN89SKEo=
 github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
 github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
 github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
-github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0 h1:JJV9CsgM9EC9w2iVkwuz+sMx8yRFe89PJRUrv6hPCIA=
 github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
 github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
 github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
@@ -436,8 +441,6 @@ github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasO
 github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
 github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
 github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
-github.com/spf13/cobra v1.1.1 h1:KfztREH0tPxJJ+geloSLaAkaPkr4ki2Er5quFV1TDo4=
-github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI=
 github.com/spf13/cobra v1.1.3 h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=
 github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo=
 github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
@@ -474,8 +477,7 @@ github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqf
 github.com/vektah/gqlparser v1.3.1/go.mod h1:bkVf0FX+Stjg/MHnm8mEyubuaArhNEqfQhF+OTiAL74=
 github.com/willf/bitset v1.1.10 h1:NotGKqX0KwQ72NUzqrjZq5ipPNDQex9lo3WpaS8L2sc=
 github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
-github.com/xanzy/go-gitlab v0.40.1 h1:jHueLh5Inzv20TL5Yki+CaLmyvtw3Yq7blbWx7GmglQ=
-github.com/xanzy/go-gitlab v0.40.1/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
+github.com/xanzy/go-gitlab v0.44.0 h1:cEiGhqu7EpFGuei2a2etAwB+x6403E5CvpLn35y+GPs=
 github.com/xanzy/go-gitlab v0.44.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
 github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
 github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
@@ -629,15 +631,15 @@ golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7w
 golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201020230747-6e5568b54d1a h1:e3IU37lwO4aq3uoRKINC7JikojFmE5gO7xhfxs8VC34=
-golang.org/x/sys v0.0.0-20201020230747-6e5568b54d1a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 h1:EZ2mChiOa8udjfp6rRmswTbtZN/QzUQp4ptM4rnjHvc=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
-golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
 golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
 golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -790,8 +792,6 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl
 gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
 gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
 gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

identity/identity.go 🔗

@@ -5,8 +5,6 @@ import (
 	"encoding/json"
 	"fmt"
 	"reflect"
-	"strings"
-	"time"
 
 	"github.com/pkg/errors"
 
@@ -35,47 +33,27 @@ var _ Interface = &Identity{}
 var _ entity.Interface = &Identity{}
 
 type Identity struct {
-	// Id used as unique identifier
-	id entity.Id
-
 	// all the successive version of the identity
-	versions []*Version
-
-	// not serialized
-	lastCommit repository.Hash
+	versions []*version
 }
 
-func NewIdentity(name string, email string) *Identity {
-	return &Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
-			{
-				name:  name,
-				email: email,
-				nonce: makeNonce(20),
-			},
-		},
-	}
+func NewIdentity(repo repository.RepoClock, name string, email string) (*Identity, error) {
+	return NewIdentityFull(repo, name, email, "", "", nil)
 }
 
-func NewIdentityFull(name string, email string, login string, avatarUrl string) *Identity {
-	return &Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
-			{
-				name:      name,
-				email:     email,
-				login:     login,
-				avatarURL: avatarUrl,
-				nonce:     makeNonce(20),
-			},
-		},
+func NewIdentityFull(repo repository.RepoClock, name string, email string, login string, avatarUrl string, keys []*Key) (*Identity, error) {
+	v, err := newVersion(repo, name, email, login, avatarUrl, keys)
+	if err != nil {
+		return nil, err
 	}
+	return &Identity{
+		versions: []*version{v},
+	}, nil
 }
 
 // NewFromGitUser will query the repository for user detail and
 // build the corresponding Identity
-func NewFromGitUser(repo repository.Repo) (*Identity, error) {
+func NewFromGitUser(repo repository.ClockedRepo) (*Identity, error) {
 	name, err := repo.GetUserName()
 	if err != nil {
 		return nil, err
@@ -92,13 +70,13 @@ func NewFromGitUser(repo repository.Repo) (*Identity, error) {
 		return nil, errors.New("user name is not configured in git yet. Please use `git config --global user.email johndoe@example.com`")
 	}
 
-	return NewIdentity(name, email), nil
+	return NewIdentity(repo, name, email)
 }
 
 // MarshalJSON will only serialize the id
 func (i *Identity) MarshalJSON() ([]byte, error) {
 	return json.Marshal(&IdentityStub{
-		id: i.id,
+		id: i.Id(),
 	})
 }
 
@@ -123,36 +101,32 @@ func ReadRemote(repo repository.Repo, remote string, id string) (*Identity, erro
 
 // read will load and parse an identity from git
 func read(repo repository.Repo, ref string) (*Identity, error) {
-	refSplit := strings.Split(ref, "/")
-	id := entity.Id(refSplit[len(refSplit)-1])
+	id := entity.RefToId(ref)
 
 	if err := id.Validate(); err != nil {
 		return nil, errors.Wrap(err, "invalid ref")
 	}
 
 	hashes, err := repo.ListCommits(ref)
-
-	// TODO: this is not perfect, it might be a command invoke error
 	if err != nil {
 		return nil, ErrIdentityNotExist
 	}
-
-	i := &Identity{
-		id: id,
+	if len(hashes) == 0 {
+		return nil, fmt.Errorf("empty identity")
 	}
 
+	i := &Identity{}
+
 	for _, hash := range hashes {
 		entries, err := repo.ReadTree(hash)
 		if err != nil {
 			return nil, errors.Wrap(err, "can't list git tree entries")
 		}
-
 		if len(entries) != 1 {
 			return nil, fmt.Errorf("invalid identity data at hash %s", hash)
 		}
 
 		entry := entries[0]
-
 		if entry.Name != versionEntryName {
 			return nil, fmt.Errorf("invalid identity data at hash %s", hash)
 		}
@@ -162,20 +136,22 @@ func read(repo repository.Repo, ref string) (*Identity, error) {
 			return nil, errors.Wrap(err, "failed to read git blob data")
 		}
 
-		var version Version
+		var version version
 		err = json.Unmarshal(data, &version)
-
 		if err != nil {
 			return nil, errors.Wrapf(err, "failed to decode Identity version json %s", hash)
 		}
 
 		// tag the version with the commit hash
 		version.commitHash = hash
-		i.lastCommit = hash
 
 		i.versions = append(i.versions, &version)
 	}
 
+	if id != i.versions[0].Id() {
+		return nil, fmt.Errorf("identity ID doesn't math the first version ID")
+	}
+
 	return i, nil
 }
 
@@ -292,32 +268,49 @@ type Mutator struct {
 }
 
 // Mutate allow to create a new version of the Identity in one go
-func (i *Identity) Mutate(f func(orig Mutator) Mutator) {
+func (i *Identity) Mutate(repo repository.RepoClock, f func(orig *Mutator)) error {
+	copyKeys := func(keys []*Key) []*Key {
+		result := make([]*Key, len(keys))
+		for i, key := range keys {
+			result[i] = key.Clone()
+		}
+		return result
+	}
+
 	orig := Mutator{
 		Name:      i.Name(),
 		Email:     i.Email(),
 		Login:     i.Login(),
 		AvatarUrl: i.AvatarUrl(),
-		Keys:      i.Keys(),
+		Keys:      copyKeys(i.Keys()),
 	}
-	mutated := f(orig)
+	mutated := orig
+	mutated.Keys = copyKeys(orig.Keys)
+
+	f(&mutated)
+
 	if reflect.DeepEqual(orig, mutated) {
-		return
-	}
-	i.versions = append(i.versions, &Version{
-		name:      mutated.Name,
-		email:     mutated.Email,
-		login:     mutated.Login,
-		avatarURL: mutated.AvatarUrl,
-		keys:      mutated.Keys,
-	})
+		return nil
+	}
+
+	v, err := newVersion(repo,
+		mutated.Name,
+		mutated.Email,
+		mutated.Login,
+		mutated.AvatarUrl,
+		mutated.Keys,
+	)
+	if err != nil {
+		return err
+	}
+
+	i.versions = append(i.versions, v)
+	return nil
 }
 
 // Write the identity into the Repository. In particular, this ensure that
 // the Id is properly set.
 func (i *Identity) Commit(repo repository.ClockedRepo) error {
-	// Todo: check for mismatch between memory and commit data
-
 	if !i.NeedCommit() {
 		return fmt.Errorf("can't commit an identity with no pending version")
 	}
@@ -326,24 +319,14 @@ func (i *Identity) Commit(repo repository.ClockedRepo) error {
 		return errors.Wrap(err, "can't commit an identity with invalid data")
 	}
 
+	var lastCommit repository.Hash
 	for _, v := range i.versions {
 		if v.commitHash != "" {
-			i.lastCommit = v.commitHash
+			lastCommit = v.commitHash
 			// ignore already commit versions
 			continue
 		}
 
-		// get the times where new versions starts to be valid
-		// TODO: instead of this hardcoded clock for bugs only, this need to be
-		// a vector of edit clock, one for each entity (bug, PR, config ..)
-		bugEditClock, err := repo.GetOrCreateClock("bug-edit")
-		if err != nil {
-			return err
-		}
-
-		v.time = bugEditClock.Time()
-		v.unixTime = time.Now().Unix()
-
 		blobHash, err := v.Write(repo)
 		if err != nil {
 			return err
@@ -360,37 +343,21 @@ func (i *Identity) Commit(repo repository.ClockedRepo) error {
 		}
 
 		var commitHash repository.Hash
-		if i.lastCommit != "" {
-			commitHash, err = repo.StoreCommitWithParent(treeHash, i.lastCommit)
+		if lastCommit != "" {
+			commitHash, err = repo.StoreCommit(treeHash, lastCommit)
 		} else {
 			commitHash, err = repo.StoreCommit(treeHash)
 		}
-
 		if err != nil {
 			return err
 		}
 
-		i.lastCommit = commitHash
+		lastCommit = commitHash
 		v.commitHash = commitHash
-
-		// if it was the first commit, use the commit hash as the Identity id
-		if i.id == "" || i.id == entity.UnsetId {
-			i.id = entity.Id(commitHash)
-		}
-	}
-
-	if i.id == "" {
-		panic("identity with no id")
 	}
 
-	ref := fmt.Sprintf("%s%s", identityRefPattern, i.id)
-	err := repo.UpdateRef(ref, i.lastCommit)
-
-	if err != nil {
-		return err
-	}
-
-	return nil
+	ref := fmt.Sprintf("%s%s", identityRefPattern, i.Id().String())
+	return repo.UpdateRef(ref, lastCommit)
 }
 
 func (i *Identity) CommitAsNeeded(repo repository.ClockedRepo) error {
@@ -433,20 +400,17 @@ func (i *Identity) NeedCommit() bool {
 // confident enough to implement that. I choose the strict fast-forward only approach,
 // despite it's potential problem with two different version as mentioned above.
 func (i *Identity) Merge(repo repository.Repo, other *Identity) (bool, error) {
-	if i.id != other.id {
+	if i.Id() != other.Id() {
 		return false, errors.New("merging unrelated identities is not supported")
 	}
 
-	if i.lastCommit == "" || other.lastCommit == "" {
-		return false, errors.New("can't merge identities that has never been stored")
-	}
-
 	modified := false
+	var lastCommit repository.Hash
 	for j, otherVersion := range other.versions {
 		// if there is more version in other, take them
 		if len(i.versions) == j {
 			i.versions = append(i.versions, otherVersion)
-			i.lastCommit = otherVersion.commitHash
+			lastCommit = otherVersion.commitHash
 			modified = true
 		}
 
@@ -458,7 +422,7 @@ func (i *Identity) Merge(repo repository.Repo, other *Identity) (bool, error) {
 	}
 
 	if modified {
-		err := repo.UpdateRef(identityRefPattern+i.id.String(), i.lastCommit)
+		err := repo.UpdateRef(identityRefPattern+i.Id().String(), lastCommit)
 		if err != nil {
 			return false, err
 		}
@@ -469,7 +433,7 @@ func (i *Identity) Merge(repo repository.Repo, other *Identity) (bool, error) {
 
 // Validate check if the Identity data is valid
 func (i *Identity) Validate() error {
-	lastTime := lamport.Time(0)
+	lastTimes := make(map[string]lamport.Time)
 
 	if len(i.versions) == 0 {
 		return fmt.Errorf("no version")
@@ -480,22 +444,27 @@ func (i *Identity) Validate() error {
 			return err
 		}
 
-		if v.commitHash != "" && v.time < lastTime {
-			return fmt.Errorf("non-chronological version (%d --> %d)", lastTime, v.time)
+		// check for always increasing lamport time
+		// check that a new version didn't drop a clock
+		for name, previous := range lastTimes {
+			if now, ok := v.times[name]; ok {
+				if now < previous {
+					return fmt.Errorf("non-chronological lamport clock %s (%d --> %d)", name, previous, now)
+				}
+			} else {
+				return fmt.Errorf("version has less lamport clocks than before (missing %s)", name)
+			}
 		}
 
-		lastTime = v.time
-	}
-
-	// The identity Id should be the hash of the first commit
-	if i.versions[0].commitHash != "" && string(i.versions[0].commitHash) != i.id.String() {
-		return fmt.Errorf("identity id should be the first commit hash")
+		for name, now := range v.times {
+			lastTimes[name] = now
+		}
 	}
 
 	return nil
 }
 
-func (i *Identity) lastVersion() *Version {
+func (i *Identity) lastVersion() *version {
 	if len(i.versions) <= 0 {
 		panic("no version at all")
 	}
@@ -505,12 +474,8 @@ func (i *Identity) lastVersion() *Version {
 
 // Id return the Identity identifier
 func (i *Identity) Id() entity.Id {
-	if i.id == "" || i.id == entity.UnsetId {
-		// simply panic as it would be a coding error
-		// (using an id of an identity not stored yet)
-		panic("no id yet")
-	}
-	return i.id
+	// id is the id of the first version
+	return i.versions[0].Id()
 }
 
 // Name return the last version of the name
@@ -518,6 +483,21 @@ func (i *Identity) Name() string {
 	return i.lastVersion().name
 }
 
+// DisplayName return a non-empty string to display, representing the
+// identity, based on the non-empty values.
+func (i *Identity) DisplayName() string {
+	switch {
+	case i.Name() == "" && i.Login() != "":
+		return i.Login()
+	case i.Name() != "" && i.Login() == "":
+		return i.Name()
+	case i.Name() != "" && i.Login() != "":
+		return fmt.Sprintf("%s (%s)", i.Name(), i.Login())
+	}
+
+	panic("invalid person data")
+}
+
 // Email return the last version of the email
 func (i *Identity) Email() string {
 	return i.lastVersion().email
@@ -538,12 +518,35 @@ func (i *Identity) Keys() []*Key {
 	return i.lastVersion().keys
 }
 
+// SigningKey return the key that should be used to sign new messages. If no key is available, return nil.
+func (i *Identity) SigningKey(repo repository.RepoKeyring) (*Key, error) {
+	keys := i.Keys()
+	for _, key := range keys {
+		err := key.ensurePrivateKey(repo)
+		if err == errNoPrivateKey {
+			continue
+		}
+		if err != nil {
+			return nil, err
+		}
+		return key, nil
+	}
+	return nil, nil
+}
+
 // ValidKeysAtTime return the set of keys valid at a given lamport time
-func (i *Identity) ValidKeysAtTime(time lamport.Time) []*Key {
+func (i *Identity) ValidKeysAtTime(clockName string, time lamport.Time) []*Key {
 	var result []*Key
 
+	var lastTime lamport.Time
 	for _, v := range i.versions {
-		if v.time > time {
+		refTime, ok := v.times[clockName]
+		if !ok {
+			refTime = lastTime
+		}
+		lastTime = refTime
+
+		if refTime > time {
 			return result
 		}
 
@@ -553,19 +556,14 @@ func (i *Identity) ValidKeysAtTime(time lamport.Time) []*Key {
 	return result
 }
 
-// DisplayName return a non-empty string to display, representing the
-// identity, based on the non-empty values.
-func (i *Identity) DisplayName() string {
-	switch {
-	case i.Name() == "" && i.Login() != "":
-		return i.Login()
-	case i.Name() != "" && i.Login() == "":
-		return i.Name()
-	case i.Name() != "" && i.Login() != "":
-		return fmt.Sprintf("%s (%s)", i.Name(), i.Login())
-	}
+// LastModification return the timestamp at which the last version of the identity became valid.
+func (i *Identity) LastModification() timestamp.Timestamp {
+	return timestamp.Timestamp(i.lastVersion().unixTime)
+}
 
-	panic("invalid person data")
+// LastModificationLamports return the lamport times at which the last version of the identity became valid.
+func (i *Identity) LastModificationLamports() map[string]lamport.Time {
+	return i.lastVersion().times
 }
 
 // IsProtected return true if the chain of git commits started to be signed.
@@ -575,27 +573,23 @@ func (i *Identity) IsProtected() bool {
 	return false
 }
 
-// LastModificationLamportTime return the Lamport time at which the last version of the identity became valid.
-func (i *Identity) LastModificationLamport() lamport.Time {
-	return i.lastVersion().time
-}
-
-// LastModification return the timestamp at which the last version of the identity became valid.
-func (i *Identity) LastModification() timestamp.Timestamp {
-	return timestamp.Timestamp(i.lastVersion().unixTime)
-}
-
-// SetMetadata store arbitrary metadata along the last not-commit Version.
-// If the Version has been commit to git already, a new identical version is added and will need to be
+// SetMetadata store arbitrary metadata along the last not-commit version.
+// If the version has been commit to git already, a new identical version is added and will need to be
 // commit.
 func (i *Identity) SetMetadata(key string, value string) {
+	// once commit, data is immutable so we create a new version
 	if i.lastVersion().commitHash != "" {
 		i.versions = append(i.versions, i.lastVersion().Clone())
 	}
+	// if Id() has been called, we can't change the first version anymore, so we create a new version
+	if len(i.versions) == 1 && i.versions[0].id != entity.UnsetId && i.versions[0].id != "" {
+		i.versions = append(i.versions, i.lastVersion().Clone())
+	}
+
 	i.lastVersion().SetMetadata(key, value)
 }
 
-// ImmutableMetadata return all metadata for this Identity, accumulated from each Version.
+// ImmutableMetadata return all metadata for this Identity, accumulated from each version.
 // If multiple value are found, the first defined takes precedence.
 func (i *Identity) ImmutableMetadata() map[string]string {
 	metadata := make(map[string]string)
@@ -611,7 +605,7 @@ func (i *Identity) ImmutableMetadata() map[string]string {
 	return metadata
 }
 
-// MutableMetadata return all metadata for this Identity, accumulated from each Version.
+// MutableMetadata return all metadata for this Identity, accumulated from each version.
 // If multiple value are found, the last defined takes precedence.
 func (i *Identity) MutableMetadata() map[string]string {
 	metadata := make(map[string]string)
@@ -624,9 +618,3 @@ func (i *Identity) MutableMetadata() map[string]string {
 
 	return metadata
 }
-
-// addVersionForTest add a new version to the identity
-// Only for testing !
-func (i *Identity) addVersionForTest(version *Version) {
-	i.versions = append(i.versions, version)
-}

identity/identity_actions.go 🔗

@@ -13,19 +13,12 @@ import (
 // Fetch retrieve updates from a remote
 // This does not change the local identities state
 func Fetch(repo repository.Repo, remote string) (string, error) {
-	// "refs/identities/*:refs/remotes/<remote>/identities/*"
-	remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote)
-	fetchRefSpec := fmt.Sprintf("%s*:%s*", identityRefPattern, remoteRefSpec)
-
-	return repo.FetchRefs(remote, fetchRefSpec)
+	return repo.FetchRefs(remote, "identities")
 }
 
 // Push update a remote with the local changes
 func Push(repo repository.Repo, remote string) (string, error) {
-	// "refs/identities/*:refs/identities/*"
-	refspec := fmt.Sprintf("%s*:%s*", identityRefPattern, identityRefPattern)
-
-	return repo.PushRefs(remote, refspec)
+	return repo.PushRefs(remote, "identities")
 }
 
 // Pull will do a Fetch + MergeAll
@@ -102,7 +95,7 @@ func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeRes
 					return
 				}
 
-				out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteIdentity)
+				out <- entity.NewMergeNewStatus(id, remoteIdentity)
 				continue
 			}
 
@@ -121,9 +114,9 @@ func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeRes
 			}
 
 			if updated {
-				out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localIdentity)
+				out <- entity.NewMergeUpdatedStatus(id, localIdentity)
 			} else {
-				out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localIdentity)
+				out <- entity.NewMergeNothingStatus(id)
 			}
 		}
 	}()

identity/identity_actions_test.go 🔗

@@ -8,12 +8,13 @@ import (
 	"github.com/MichaelMure/git-bug/repository"
 )
 
-func TestPushPull(t *testing.T) {
-	repoA, repoB, remote := repository.SetupReposAndRemote()
+func TestIdentityPushPull(t *testing.T) {
+	repoA, repoB, remote := repository.SetupGoGitReposAndRemote()
 	defer repository.CleanupTestRepos(repoA, repoB, remote)
 
-	identity1 := NewIdentity("name1", "email1")
-	err := identity1.Commit(repoA)
+	identity1, err := NewIdentity(repoA, "name1", "email1")
+	require.NoError(t, err)
+	err = identity1.Commit(repoA)
 	require.NoError(t, err)
 
 	// A --> remote --> B
@@ -30,7 +31,8 @@ func TestPushPull(t *testing.T) {
 	}
 
 	// B --> remote --> A
-	identity2 := NewIdentity("name2", "email2")
+	identity2, err := NewIdentity(repoB, "name2", "email2")
+	require.NoError(t, err)
 	err = identity2.Commit(repoB)
 	require.NoError(t, err)
 
@@ -48,17 +50,19 @@ func TestPushPull(t *testing.T) {
 
 	// Update both
 
-	identity1.addVersionForTest(&Version{
-		name:  "name1b",
-		email: "email1b",
+	err = identity1.Mutate(repoA, func(orig *Mutator) {
+		orig.Name = "name1b"
+		orig.Email = "email1b"
 	})
+	require.NoError(t, err)
 	err = identity1.Commit(repoA)
 	require.NoError(t, err)
 
-	identity2.addVersionForTest(&Version{
-		name:  "name2b",
-		email: "email2b",
+	err = identity2.Mutate(repoB, func(orig *Mutator) {
+		orig.Name = "name2b"
+		orig.Email = "email2b"
 	})
+	require.NoError(t, err)
 	err = identity2.Commit(repoB)
 	require.NoError(t, err)
 
@@ -92,20 +96,22 @@ func TestPushPull(t *testing.T) {
 
 	// Concurrent update
 
-	identity1.addVersionForTest(&Version{
-		name:  "name1c",
-		email: "email1c",
+	err = identity1.Mutate(repoA, func(orig *Mutator) {
+		orig.Name = "name1c"
+		orig.Email = "email1c"
 	})
+	require.NoError(t, err)
 	err = identity1.Commit(repoA)
 	require.NoError(t, err)
 
 	identity1B, err := ReadLocal(repoB, identity1.Id())
 	require.NoError(t, err)
 
-	identity1B.addVersionForTest(&Version{
-		name:  "name1concurrent",
-		email: "email1concurrent",
+	err = identity1B.Mutate(repoB, func(orig *Mutator) {
+		orig.Name = "name1concurrent"
+		orig.Email = "name1concurrent"
 	})
+	require.NoError(t, err)
 	err = identity1B.Commit(repoB)
 	require.NoError(t, err)
 

identity/identity_stub.go 🔗

@@ -52,6 +52,10 @@ func (IdentityStub) Name() string {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
+func (IdentityStub) DisplayName() string {
+	panic("identities needs to be properly loaded with identity.ReadLocal()")
+}
+
 func (IdentityStub) Email() string {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
@@ -68,23 +72,19 @@ func (IdentityStub) Keys() []*Key {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
-func (IdentityStub) ValidKeysAtTime(_ lamport.Time) []*Key {
+func (i *IdentityStub) SigningKey(repo repository.RepoKeyring) (*Key, error) {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
-func (IdentityStub) DisplayName() string {
+func (IdentityStub) ValidKeysAtTime(_ string, _ lamport.Time) []*Key {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
-func (IdentityStub) Validate() error {
-	panic("identities needs to be properly loaded with identity.ReadLocal()")
-}
-
-func (IdentityStub) CommitWithRepo(repo repository.ClockedRepo) error {
+func (i *IdentityStub) LastModification() timestamp.Timestamp {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
-func (i *IdentityStub) CommitAsNeededWithRepo(repo repository.ClockedRepo) error {
+func (i *IdentityStub) LastModificationLamports() map[string]lamport.Time {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
@@ -92,11 +92,7 @@ func (IdentityStub) IsProtected() bool {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 
-func (i *IdentityStub) LastModificationLamport() lamport.Time {
-	panic("identities needs to be properly loaded with identity.ReadLocal()")
-}
-
-func (i *IdentityStub) LastModification() timestamp.Timestamp {
+func (IdentityStub) Validate() error {
 	panic("identities needs to be properly loaded with identity.ReadLocal()")
 }
 

identity/identity_test.go 🔗

@@ -6,120 +6,108 @@ import (
 
 	"github.com/stretchr/testify/require"
 
-	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/repository"
+	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
 // Test the commit and load of an Identity with multiple versions
 func TestIdentityCommitLoad(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
+	repo := makeIdentityTestRepo(t)
 
 	// single version
 
-	identity := &Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
-			{
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-			},
-		},
-	}
+	identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com")
+	require.NoError(t, err)
 
-	err := identity.Commit(mockRepo)
+	idBeforeCommit := identity.Id()
 
+	err = identity.Commit(repo)
 	require.NoError(t, err)
-	require.NotEmpty(t, identity.id)
 
-	loaded, err := ReadLocal(mockRepo, identity.id)
+	commitsAreSet(t, identity)
+	require.NotEmpty(t, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.versions[0].Id())
+
+	loaded, err := ReadLocal(repo, identity.Id())
 	require.NoError(t, err)
 	commitsAreSet(t, loaded)
 	require.Equal(t, identity, loaded)
 
-	// multiple version
+	// multiple versions
 
-	identity = &Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
-			{
-				time:  100,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyA"},
-				},
-			},
-			{
-				time:  200,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyB"},
-				},
-			},
-			{
-				time:  201,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyC"},
-				},
-			},
-		},
-	}
+	identity, err = NewIdentityFull(repo, "René Descartes", "rene.descartes@example.com", "", "", []*Key{generatePublicKey()})
+	require.NoError(t, err)
 
-	err = identity.Commit(mockRepo)
+	idBeforeCommit = identity.Id()
 
+	err = identity.Mutate(repo, func(orig *Mutator) {
+		orig.Keys = []*Key{generatePublicKey()}
+	})
 	require.NoError(t, err)
-	require.NotEmpty(t, identity.id)
 
-	loaded, err = ReadLocal(mockRepo, identity.id)
+	err = identity.Mutate(repo, func(orig *Mutator) {
+		orig.Keys = []*Key{generatePublicKey()}
+	})
+	require.NoError(t, err)
+
+	require.Equal(t, idBeforeCommit, identity.Id())
+
+	err = identity.Commit(repo)
+	require.NoError(t, err)
+
+	commitsAreSet(t, identity)
+	require.NotEmpty(t, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.versions[0].Id())
+
+	loaded, err = ReadLocal(repo, identity.Id())
 	require.NoError(t, err)
 	commitsAreSet(t, loaded)
 	require.Equal(t, identity, loaded)
 
 	// add more version
 
-	identity.addVersionForTest(&Version{
-		time:  201,
-		name:  "René Descartes",
-		email: "rene.descartes@example.com",
-		keys: []*Key{
-			{PubKey: "pubkeyD"},
-		},
+	err = identity.Mutate(repo, func(orig *Mutator) {
+		orig.Email = "rene@descartes.com"
+		orig.Keys = []*Key{generatePublicKey()}
 	})
+	require.NoError(t, err)
 
-	identity.addVersionForTest(&Version{
-		time:  300,
-		name:  "René Descartes",
-		email: "rene.descartes@example.com",
-		keys: []*Key{
-			{PubKey: "pubkeyE"},
-		},
+	err = identity.Mutate(repo, func(orig *Mutator) {
+		orig.Email = "rene@descartes.com"
+		orig.Keys = []*Key{generatePublicKey(), generatePublicKey()}
 	})
+	require.NoError(t, err)
 
-	err = identity.Commit(mockRepo)
-
+	err = identity.Commit(repo)
 	require.NoError(t, err)
-	require.NotEmpty(t, identity.id)
 
-	loaded, err = ReadLocal(mockRepo, identity.id)
+	commitsAreSet(t, identity)
+	require.NotEmpty(t, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.Id())
+	require.Equal(t, idBeforeCommit, identity.versions[0].Id())
+
+	loaded, err = ReadLocal(repo, identity.Id())
 	require.NoError(t, err)
 	commitsAreSet(t, loaded)
 	require.Equal(t, identity, loaded)
 }
 
 func TestIdentityMutate(t *testing.T) {
-	identity := NewIdentity("René Descartes", "rene.descartes@example.com")
+	repo := makeIdentityTestRepo(t)
+
+	identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com")
+	require.NoError(t, err)
 
 	require.Len(t, identity.versions, 1)
 
-	identity.Mutate(func(orig Mutator) Mutator {
+	err = identity.Mutate(repo, func(orig *Mutator) {
 		orig.Email = "rene@descartes.fr"
 		orig.Name = "René"
 		orig.Login = "rene"
-		return orig
 	})
+	require.NoError(t, err)
 
 	require.Len(t, identity.versions, 2)
 	require.Equal(t, identity.Email(), "rene@descartes.fr")
@@ -135,97 +123,93 @@ func commitsAreSet(t *testing.T, identity *Identity) {
 
 // Test that the correct crypto keys are returned for a given lamport time
 func TestIdentity_ValidKeysAtTime(t *testing.T) {
+	pubKeyA := generatePublicKey()
+	pubKeyB := generatePublicKey()
+	pubKeyC := generatePublicKey()
+	pubKeyD := generatePublicKey()
+	pubKeyE := generatePublicKey()
+
 	identity := Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
+		versions: []*version{
 			{
-				time:  100,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyA"},
-				},
+				times: map[string]lamport.Time{"foo": 100},
+				keys:  []*Key{pubKeyA},
 			},
 			{
-				time:  200,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyB"},
-				},
+				times: map[string]lamport.Time{"foo": 200},
+				keys:  []*Key{pubKeyB},
 			},
 			{
-				time:  201,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyC"},
-				},
+				times: map[string]lamport.Time{"foo": 201},
+				keys:  []*Key{pubKeyC},
 			},
 			{
-				time:  201,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyD"},
-				},
+				times: map[string]lamport.Time{"foo": 201},
+				keys:  []*Key{pubKeyD},
 			},
 			{
-				time:  300,
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-				keys: []*Key{
-					{PubKey: "pubkeyE"},
-				},
+				times: map[string]lamport.Time{"foo": 300},
+				keys:  []*Key{pubKeyE},
 			},
 		},
 	}
 
-	require.Nil(t, identity.ValidKeysAtTime(10))
-	require.Equal(t, identity.ValidKeysAtTime(100), []*Key{{PubKey: "pubkeyA"}})
-	require.Equal(t, identity.ValidKeysAtTime(140), []*Key{{PubKey: "pubkeyA"}})
-	require.Equal(t, identity.ValidKeysAtTime(200), []*Key{{PubKey: "pubkeyB"}})
-	require.Equal(t, identity.ValidKeysAtTime(201), []*Key{{PubKey: "pubkeyD"}})
-	require.Equal(t, identity.ValidKeysAtTime(202), []*Key{{PubKey: "pubkeyD"}})
-	require.Equal(t, identity.ValidKeysAtTime(300), []*Key{{PubKey: "pubkeyE"}})
-	require.Equal(t, identity.ValidKeysAtTime(3000), []*Key{{PubKey: "pubkeyE"}})
+	require.Nil(t, identity.ValidKeysAtTime("foo", 10))
+	require.Equal(t, identity.ValidKeysAtTime("foo", 100), []*Key{pubKeyA})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 140), []*Key{pubKeyA})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 200), []*Key{pubKeyB})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 201), []*Key{pubKeyD})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 202), []*Key{pubKeyD})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 300), []*Key{pubKeyE})
+	require.Equal(t, identity.ValidKeysAtTime("foo", 3000), []*Key{pubKeyE})
 }
 
 // Test the immutable or mutable metadata search
 func TestMetadata(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
+	repo := makeIdentityTestRepo(t)
 
-	identity := NewIdentity("René Descartes", "rene.descartes@example.com")
+	identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com")
+	require.NoError(t, err)
 
 	identity.SetMetadata("key1", "value1")
 	assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1")
 	assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1")
 
-	err := identity.Commit(mockRepo)
+	err = identity.Commit(repo)
 	require.NoError(t, err)
 
 	assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1")
 	assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1")
 
 	// try override
-	identity.addVersionForTest(&Version{
-		name:  "René Descartes",
-		email: "rene.descartes@example.com",
+	err = identity.Mutate(repo, func(orig *Mutator) {
+		orig.Email = "rene@descartes.fr"
 	})
+	require.NoError(t, err)
 
 	identity.SetMetadata("key1", "value2")
 	assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1")
 	assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value2")
 
-	err = identity.Commit(mockRepo)
+	err = identity.Commit(repo)
 	require.NoError(t, err)
 
 	// reload
-	loaded, err := ReadLocal(mockRepo, identity.id)
+	loaded, err := ReadLocal(repo, identity.Id())
 	require.NoError(t, err)
 
 	assertHasKeyValue(t, loaded.ImmutableMetadata(), "key1", "value1")
 	assertHasKeyValue(t, loaded.MutableMetadata(), "key1", "value2")
+
+	// set metadata after commit
+	versionCount := len(identity.versions)
+	identity.SetMetadata("foo", "bar")
+	require.True(t, identity.NeedCommit())
+	require.Len(t, identity.versions, versionCount+1)
+
+	err = identity.Commit(repo)
+	require.NoError(t, err)
+	require.Len(t, identity.versions, versionCount+1)
 }
 
 func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value string) {
@@ -235,22 +219,15 @@ func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value stri
 }
 
 func TestJSON(t *testing.T) {
-	mockRepo := repository.NewMockRepoForTest()
+	repo := makeIdentityTestRepo(t)
 
-	identity := &Identity{
-		id: entity.UnsetId,
-		versions: []*Version{
-			{
-				name:  "René Descartes",
-				email: "rene.descartes@example.com",
-			},
-		},
-	}
+	identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com")
+	require.NoError(t, err)
 
 	// commit to make sure we have an Id
-	err := identity.Commit(mockRepo)
+	err = identity.Commit(repo)
 	require.NoError(t, err)
-	require.NotEmpty(t, identity.id)
+	require.NotEmpty(t, identity.Id())
 
 	// serialize
 	data, err := json.Marshal(identity)
@@ -260,10 +237,10 @@ func TestJSON(t *testing.T) {
 	var i Interface
 	i, err = UnmarshalJSON(data)
 	require.NoError(t, err)
-	require.Equal(t, identity.id, i.Id())
+	require.Equal(t, identity.Id(), i.Id())
 
 	// make sure we can load the identity properly
-	i, err = ReadLocal(mockRepo, i.Id())
+	i, err = ReadLocal(repo, i.Id())
 	require.NoError(t, err)
 }
 
@@ -280,7 +257,9 @@ func TestIdentityRemove(t *testing.T) {
 	require.NoError(t, err)
 
 	// generate an identity for testing
-	rene := NewIdentity("René Descartes", "rene@descartes.fr")
+	rene, err := NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+	require.NoError(t, err)
+
 	err = rene.Commit(repo)
 	require.NoError(t, err)
 

identity/interface.go 🔗

@@ -2,6 +2,7 @@ package identity
 
 import (
 	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/repository"
 	"github.com/MichaelMure/git-bug/util/lamport"
 	"github.com/MichaelMure/git-bug/util/timestamp"
 )
@@ -13,6 +14,10 @@ type Interface interface {
 	// Can be empty.
 	Name() string
 
+	// DisplayName return a non-empty string to display, representing the
+	// identity, based on the non-empty values.
+	DisplayName() string
+
 	// Email return the last version of the email
 	// Can be empty.
 	Email() string
@@ -32,26 +37,25 @@ type Interface interface {
 	// Can be empty.
 	Keys() []*Key
 
-	// ValidKeysAtTime return the set of keys valid at a given lamport time
+	// SigningKey return the key that should be used to sign new messages. If no key is available, return nil.
+	SigningKey(repo repository.RepoKeyring) (*Key, error)
+
+	// ValidKeysAtTime return the set of keys valid at a given lamport time for a given clock of another entity
 	// Can be empty.
-	ValidKeysAtTime(time lamport.Time) []*Key
+	ValidKeysAtTime(clockName string, time lamport.Time) []*Key
 
-	// DisplayName return a non-empty string to display, representing the
-	// identity, based on the non-empty values.
-	DisplayName() string
+	// LastModification return the timestamp at which the last version of the identity became valid.
+	LastModification() timestamp.Timestamp
 
-	// Validate check if the Identity data is valid
-	Validate() error
+	// LastModificationLamports return the lamport times at which the last version of the identity became valid.
+	LastModificationLamports() map[string]lamport.Time
 
 	// IsProtected return true if the chain of git commits started to be signed.
 	// If that's the case, only signed commit with a valid key for this identity can be added.
 	IsProtected() bool
 
-	// LastModificationLamportTime return the Lamport time at which the last version of the identity became valid.
-	LastModificationLamport() lamport.Time
-
-	// LastModification return the timestamp at which the last version of the identity became valid.
-	LastModification() timestamp.Timestamp
+	// Validate check if the Identity data is valid
+	Validate() error
 
 	// Indicate that the in-memory state changed and need to be commit in the repository
 	NeedCommit() bool

identity/key.go 🔗

@@ -1,18 +1,224 @@
 package identity
 
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"io"
+	"strings"
+	"time"
+
+	"github.com/pkg/errors"
+	"golang.org/x/crypto/openpgp"
+	"golang.org/x/crypto/openpgp/armor"
+	"golang.org/x/crypto/openpgp/packet"
+
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+var errNoPrivateKey = fmt.Errorf("no private key")
+
 type Key struct {
-	// The GPG fingerprint of the key
-	Fingerprint string `json:"fingerprint"`
-	PubKey      string `json:"pub_key"`
+	public  *packet.PublicKey
+	private *packet.PrivateKey
+}
+
+// GenerateKey generate a keypair (public+private)
+// The type and configuration of the key is determined by the default value in go's OpenPGP.
+func GenerateKey() *Key {
+	entity, err := openpgp.NewEntity("", "", "", &packet.Config{
+		// The armored format doesn't include the creation time, which makes the round-trip data not being fully equal.
+		// We don't care about the creation time so we can set it to the zero value.
+		Time: func() time.Time {
+			return time.Time{}
+		},
+	})
+	if err != nil {
+		panic(err)
+	}
+	return &Key{
+		public:  entity.PrimaryKey,
+		private: entity.PrivateKey,
+	}
+}
+
+// generatePublicKey generate only a public key (only useful for testing)
+// See GenerateKey for the details.
+func generatePublicKey() *Key {
+	k := GenerateKey()
+	k.private = nil
+	return k
+}
+
+func (k *Key) Public() *packet.PublicKey {
+	return k.public
+}
+
+func (k *Key) Private() *packet.PrivateKey {
+	return k.private
 }
 
 func (k *Key) Validate() error {
-	// Todo
+	if k.public == nil {
+		return fmt.Errorf("nil public key")
+	}
+	if !k.public.CanSign() {
+		return fmt.Errorf("public key can't sign")
+	}
+
+	if k.private != nil {
+		if !k.private.CanSign() {
+			return fmt.Errorf("private key can't sign")
+		}
+	}
 
 	return nil
 }
 
 func (k *Key) Clone() *Key {
-	clone := *k
-	return &clone
+	clone := &Key{}
+
+	pub := *k.public
+	clone.public = &pub
+
+	if k.private != nil {
+		priv := *k.private
+		clone.private = &priv
+	}
+
+	return clone
+}
+
+func (k *Key) MarshalJSON() ([]byte, error) {
+	// Serialize only the public key, in the armored format.
+	var buf bytes.Buffer
+	w, err := armor.Encode(&buf, openpgp.PublicKeyType, nil)
+	if err != nil {
+		return nil, err
+	}
+
+	err = k.public.Serialize(w)
+	if err != nil {
+		return nil, err
+	}
+	err = w.Close()
+	if err != nil {
+		return nil, err
+	}
+	return json.Marshal(buf.String())
+}
+
+func (k *Key) UnmarshalJSON(data []byte) error {
+	// De-serialize only the public key, in the armored format.
+	var armored string
+	err := json.Unmarshal(data, &armored)
+	if err != nil {
+		return err
+	}
+
+	block, err := armor.Decode(strings.NewReader(armored))
+	if err == io.EOF {
+		return fmt.Errorf("no armored data found")
+	}
+	if err != nil {
+		return err
+	}
+
+	if block.Type != openpgp.PublicKeyType {
+		return fmt.Errorf("invalid key type")
+	}
+
+	p, err := packet.Read(block.Body)
+	if err != nil {
+		return errors.Wrap(err, "failed to read public key packet")
+	}
+
+	public, ok := p.(*packet.PublicKey)
+	if !ok {
+		return errors.New("got no packet.publicKey")
+	}
+
+	// The armored format doesn't include the creation time, which makes the round-trip data not being fully equal.
+	// We don't care about the creation time so we can set it to the zero value.
+	public.CreationTime = time.Time{}
+
+	k.public = public
+	return nil
+}
+
+func (k *Key) loadPrivate(repo repository.RepoKeyring) error {
+	item, err := repo.Keyring().Get(k.public.KeyIdString())
+	if err == repository.ErrKeyringKeyNotFound {
+		return errNoPrivateKey
+	}
+	if err != nil {
+		return err
+	}
+
+	block, err := armor.Decode(bytes.NewReader(item.Data))
+	if err == io.EOF {
+		return fmt.Errorf("no armored data found")
+	}
+	if err != nil {
+		return err
+	}
+
+	if block.Type != openpgp.PrivateKeyType {
+		return fmt.Errorf("invalid key type")
+	}
+
+	p, err := packet.Read(block.Body)
+	if err != nil {
+		return errors.Wrap(err, "failed to read private key packet")
+	}
+
+	private, ok := p.(*packet.PrivateKey)
+	if !ok {
+		return errors.New("got no packet.privateKey")
+	}
+
+	// The armored format doesn't include the creation time, which makes the round-trip data not being fully equal.
+	// We don't care about the creation time so we can set it to the zero value.
+	private.CreationTime = time.Time{}
+
+	k.private = private
+	return nil
+}
+
+// ensurePrivateKey attempt to load the corresponding private key if it is not loaded already.
+// If no private key is found, returns errNoPrivateKey
+func (k *Key) ensurePrivateKey(repo repository.RepoKeyring) error {
+	if k.private != nil {
+		return nil
+	}
+
+	return k.loadPrivate(repo)
+}
+
+func (k *Key) storePrivate(repo repository.RepoKeyring) error {
+	var buf bytes.Buffer
+	w, err := armor.Encode(&buf, openpgp.PrivateKeyType, nil)
+	if err != nil {
+		return err
+	}
+	err = k.private.Serialize(w)
+	if err != nil {
+		return err
+	}
+	err = w.Close()
+	if err != nil {
+		return err
+	}
+
+	return repo.Keyring().Set(repository.Item{
+		Key:  k.public.KeyIdString(),
+		Data: buf.Bytes(),
+	})
+}
+
+func (k *Key) PGPEntity() *openpgp.Entity {
+	return &openpgp.Entity{
+		PrimaryKey: k.public,
+		PrivateKey: k.private,
+	}
 }

identity/key_test.go 🔗

@@ -0,0 +1,60 @@
+package identity
+
+import (
+	"crypto/rsa"
+	"encoding/json"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/repository"
+)
+
+func TestPublicKeyJSON(t *testing.T) {
+	k := generatePublicKey()
+
+	dataJSON, err := json.Marshal(k)
+	require.NoError(t, err)
+
+	var read Key
+	err = json.Unmarshal(dataJSON, &read)
+	require.NoError(t, err)
+
+	require.Equal(t, k, &read)
+}
+
+func TestStoreLoad(t *testing.T) {
+	repo := repository.NewMockRepoKeyring()
+
+	// public + private
+	k := GenerateKey()
+
+	// Store
+
+	dataJSON, err := json.Marshal(k)
+	require.NoError(t, err)
+
+	err = k.storePrivate(repo)
+	require.NoError(t, err)
+
+	// Load
+
+	var read Key
+	err = json.Unmarshal(dataJSON, &read)
+	require.NoError(t, err)
+
+	err = read.ensurePrivateKey(repo)
+	require.NoError(t, err)
+
+	require.Equal(t, k.public, read.public)
+
+	require.IsType(t, (*rsa.PrivateKey)(nil), k.private.PrivateKey)
+
+	// See https://github.com/golang/crypto/pull/175
+	rsaPriv := read.private.PrivateKey.(*rsa.PrivateKey)
+	back := rsaPriv.Primes[0]
+	rsaPriv.Primes[0] = rsaPriv.Primes[1]
+	rsaPriv.Primes[1] = back
+
+	require.True(t, k.private.PrivateKey.(*rsa.PrivateKey).Equal(read.private.PrivateKey))
+}

identity/resolver.go 🔗

@@ -1,6 +1,8 @@
 package identity
 
 import (
+	"sync"
+
 	"github.com/MichaelMure/git-bug/entity"
 	"github.com/MichaelMure/git-bug/repository"
 )
@@ -34,3 +36,36 @@ func NewStubResolver() *StubResolver {
 func (s *StubResolver) ResolveIdentity(id entity.Id) (Interface, error) {
 	return &IdentityStub{id: id}, nil
 }
+
+// CachedResolver is a resolver ensuring that loading is done only once through another Resolver.
+type CachedResolver struct {
+	mu         sync.RWMutex
+	resolver   Resolver
+	identities map[entity.Id]Interface
+}
+
+func NewCachedResolver(resolver Resolver) *CachedResolver {
+	return &CachedResolver{
+		resolver:   resolver,
+		identities: make(map[entity.Id]Interface),
+	}
+}
+
+func (c *CachedResolver) ResolveIdentity(id entity.Id) (Interface, error) {
+	c.mu.RLock()
+	if i, ok := c.identities[id]; ok {
+		c.mu.RUnlock()
+		return i, nil
+	}
+	c.mu.RUnlock()
+
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	i, err := c.resolver.ResolveIdentity(id)
+	if err != nil {
+		return nil, err
+	}
+	c.identities[id] = i
+	return i, nil
+}

identity/version.go 🔗

@@ -5,6 +5,7 @@ import (
 	"encoding/json"
 	"fmt"
 	"strings"
+	"time"
 
 	"github.com/pkg/errors"
 
@@ -15,76 +16,131 @@ import (
 )
 
 // 1: original format
-const formatVersion = 1
-
-// Version is a complete set of information about an Identity at a point in time.
-type Version struct {
-	// The lamport time at which this version become effective
-	// The reference time is the bug edition lamport clock
-	// It must be the first field in this struct due to https://github.com/golang/go/issues/599
-	//
-	// TODO: BREAKING CHANGE - this need to actually be one edition lamport time **per entity**
-	// This is not a problem right now but will be when more entities are added (pull-request, config ...)
-	time     lamport.Time
-	unixTime int64
+// 2: Identity Ids are generated from the first version serialized data instead of from the first git commit
+//    + Identity hold multiple lamport clocks from other entities, instead of just bug edit
+const formatVersion = 2
 
+// version is a complete set of information about an Identity at a point in time.
+type version struct {
 	name      string
 	email     string // as defined in git or from a bridge when importing the identity
 	login     string // from a bridge when importing the identity
 	avatarURL string
 
+	// The lamport times of the other entities at which this version become effective
+	times    map[string]lamport.Time
+	unixTime int64
+
 	// The set of keys valid at that time, from this version onward, until they get removed
 	// in a new version. This allow to have multiple key for the same identity (e.g. one per
 	// device) as well as revoke key.
 	keys []*Key
 
-	// This optional array is here to ensure a better randomness of the identity id to avoid collisions.
+	// mandatory random bytes to ensure a better randomness of the data of the first
+	// version of an identity, used to later generate the ID
+	// len(Nonce) should be > 20 and < 64 bytes
 	// It has no functional purpose and should be ignored.
-	// It is advised to fill this array if there is not enough entropy, e.g. if there is no keys.
+	// TODO: optional after first version?
 	nonce []byte
 
 	// A set of arbitrary key/value to store metadata about a version or about an Identity in general.
 	metadata map[string]string
 
+	// Not serialized. Store the version's id in memory.
+	id entity.Id
 	// Not serialized
 	commitHash repository.Hash
 }
 
-type VersionJSON struct {
+func newVersion(repo repository.RepoClock, name string, email string, login string, avatarURL string, keys []*Key) (*version, error) {
+	clocks, err := repo.AllClocks()
+	if err != nil {
+		return nil, err
+	}
+
+	times := make(map[string]lamport.Time)
+	for name, clock := range clocks {
+		times[name] = clock.Time()
+	}
+
+	return &version{
+		id:        entity.UnsetId,
+		name:      name,
+		email:     email,
+		login:     login,
+		avatarURL: avatarURL,
+		times:     times,
+		unixTime:  time.Now().Unix(),
+		keys:      keys,
+		nonce:     makeNonce(20),
+	}, nil
+}
+
+type versionJSON struct {
 	// Additional field to version the data
 	FormatVersion uint `json:"version"`
 
-	Time      lamport.Time      `json:"time"`
-	UnixTime  int64             `json:"unix_time"`
-	Name      string            `json:"name,omitempty"`
-	Email     string            `json:"email,omitempty"`
-	Login     string            `json:"login,omitempty"`
-	AvatarUrl string            `json:"avatar_url,omitempty"`
-	Keys      []*Key            `json:"pub_keys,omitempty"`
-	Nonce     []byte            `json:"nonce,omitempty"`
-	Metadata  map[string]string `json:"metadata,omitempty"`
+	Times     map[string]lamport.Time `json:"times"`
+	UnixTime  int64                   `json:"unix_time"`
+	Name      string                  `json:"name,omitempty"`
+	Email     string                  `json:"email,omitempty"`
+	Login     string                  `json:"login,omitempty"`
+	AvatarUrl string                  `json:"avatar_url,omitempty"`
+	Keys      []*Key                  `json:"pub_keys,omitempty"`
+	Nonce     []byte                  `json:"nonce"`
+	Metadata  map[string]string       `json:"metadata,omitempty"`
+}
+
+// Id return the identifier of the version
+func (v *version) Id() entity.Id {
+	if v.id == "" {
+		// something went really wrong
+		panic("version's id not set")
+	}
+	if v.id == entity.UnsetId {
+		// This means we are trying to get the version's Id *before* it has been stored.
+		// As the Id is computed based on the actual bytes written on the disk, we are going to predict
+		// those and then get the Id. This is safe as it will be the exact same code writing on disk later.
+		data, err := json.Marshal(v)
+		if err != nil {
+			panic(err)
+		}
+		v.id = entity.DeriveId(data)
+	}
+	return v.id
 }
 
 // Make a deep copy
-func (v *Version) Clone() *Version {
-	clone := &Version{
-		name:      v.name,
-		email:     v.email,
-		avatarURL: v.avatarURL,
-		keys:      make([]*Key, len(v.keys)),
+func (v *version) Clone() *version {
+	// copy direct fields
+	clone := *v
+
+	// reset some fields
+	clone.commitHash = ""
+	clone.id = entity.UnsetId
+
+	clone.times = make(map[string]lamport.Time)
+	for name, t := range v.times {
+		clone.times[name] = t
 	}
 
+	clone.keys = make([]*Key, len(v.keys))
 	for i, key := range v.keys {
 		clone.keys[i] = key.Clone()
 	}
 
-	return clone
+	clone.nonce = make([]byte, len(v.nonce))
+	copy(clone.nonce, v.nonce)
+
+	// not copying metadata
+
+	return &clone
 }
 
-func (v *Version) MarshalJSON() ([]byte, error) {
-	return json.Marshal(VersionJSON{
+func (v *version) MarshalJSON() ([]byte, error) {
+	return json.Marshal(versionJSON{
 		FormatVersion: formatVersion,
-		Time:          v.time,
+		Times:         v.times,
 		UnixTime:      v.unixTime,
 		Name:          v.name,
 		Email:         v.email,
@@ -96,21 +152,19 @@ func (v *Version) MarshalJSON() ([]byte, error) {
 	})
 }
 
-func (v *Version) UnmarshalJSON(data []byte) error {
-	var aux VersionJSON
+func (v *version) UnmarshalJSON(data []byte) error {
+	var aux versionJSON
 
 	if err := json.Unmarshal(data, &aux); err != nil {
 		return err
 	}
 
-	if aux.FormatVersion < formatVersion {
-		return entity.NewErrOldFormatVersion(aux.FormatVersion)
-	}
-	if aux.FormatVersion > formatVersion {
-		return entity.NewErrNewFormatVersion(aux.FormatVersion)
+	if aux.FormatVersion != formatVersion {
+		return entity.NewErrInvalidFormat(aux.FormatVersion, formatVersion)
 	}
 
-	v.time = aux.Time
+	v.id = entity.DeriveId(data)
+	v.times = aux.Times
 	v.unixTime = aux.UnixTime
 	v.name = aux.Name
 	v.email = aux.Email
@@ -123,23 +177,18 @@ func (v *Version) UnmarshalJSON(data []byte) error {
 	return nil
 }
 
-func (v *Version) Validate() error {
+func (v *version) Validate() error {
 	// time must be set after a commit
 	if v.commitHash != "" && v.unixTime == 0 {
 		return fmt.Errorf("unix time not set")
 	}
-	if v.commitHash != "" && v.time == 0 {
-		return fmt.Errorf("lamport time not set")
-	}
 
 	if text.Empty(v.name) && text.Empty(v.login) {
 		return fmt.Errorf("either name or login should be set")
 	}
-
 	if strings.Contains(v.name, "\n") {
 		return fmt.Errorf("name should be a single line")
 	}
-
 	if !text.Safe(v.name) {
 		return fmt.Errorf("name is not fully printable")
 	}
@@ -147,7 +196,6 @@ func (v *Version) Validate() error {
 	if strings.Contains(v.login, "\n") {
 		return fmt.Errorf("login should be a single line")
 	}
-
 	if !text.Safe(v.login) {
 		return fmt.Errorf("login is not fully printable")
 	}
@@ -155,7 +203,6 @@ func (v *Version) Validate() error {
 	if strings.Contains(v.email, "\n") {
 		return fmt.Errorf("email should be a single line")
 	}
-
 	if !text.Safe(v.email) {
 		return fmt.Errorf("email is not fully printable")
 	}
@@ -167,6 +214,9 @@ func (v *Version) Validate() error {
 	if len(v.nonce) > 64 {
 		return fmt.Errorf("nonce is too big")
 	}
+	if len(v.nonce) < 20 {
+		return fmt.Errorf("nonce is too small")
+	}
 
 	for _, k := range v.keys {
 		if err := k.Validate(); err != nil {
@@ -177,9 +227,9 @@ func (v *Version) Validate() error {
 	return nil
 }
 
-// Write will serialize and store the Version as a git blob and return
+// Write will serialize and store the version as a git blob and return
 // its hash
-func (v *Version) Write(repo repository.Repo) (repository.Hash, error) {
+func (v *version) Write(repo repository.Repo) (repository.Hash, error) {
 	// make sure we don't write invalid data
 	err := v.Validate()
 	if err != nil {
@@ -187,17 +237,18 @@ func (v *Version) Write(repo repository.Repo) (repository.Hash, error) {
 	}
 
 	data, err := json.Marshal(v)
-
 	if err != nil {
 		return "", err
 	}
 
 	hash, err := repo.StoreData(data)
-
 	if err != nil {
 		return "", err
 	}
 
+	// make sure we set the Id when writing in the repo
+	v.id = entity.DeriveId(data)
+
 	return hash, nil
 }
 
@@ -211,22 +262,22 @@ func makeNonce(len int) []byte {
 }
 
 // SetMetadata store arbitrary metadata about a version or an Identity in general
-// If the Version has been commit to git already, it won't be overwritten.
-func (v *Version) SetMetadata(key string, value string) {
+// If the version has been commit to git already, it won't be overwritten.
+// Beware: changing the metadata on a version will change it's ID
+func (v *version) SetMetadata(key string, value string) {
 	if v.metadata == nil {
 		v.metadata = make(map[string]string)
 	}
-
 	v.metadata[key] = value
 }
 
-// GetMetadata retrieve arbitrary metadata about the Version
-func (v *Version) GetMetadata(key string) (string, bool) {
+// GetMetadata retrieve arbitrary metadata about the version
+func (v *version) GetMetadata(key string) (string, bool) {
 	val, ok := v.metadata[key]
 	return val, ok
 }
 
-// AllMetadata return all metadata for this Version
-func (v *Version) AllMetadata() map[string]string {
+// AllMetadata return all metadata for this version
+func (v *version) AllMetadata() map[string]string {
 	return v.metadata
 }

identity/version_test.go 🔗

@@ -3,39 +3,76 @@ package identity
 import (
 	"encoding/json"
 	"testing"
+	"time"
 
 	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+
+	"github.com/MichaelMure/git-bug/entity"
+	"github.com/MichaelMure/git-bug/repository"
+	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
-func TestVersionSerialize(t *testing.T) {
-	before := &Version{
+func makeIdentityTestRepo(t *testing.T) repository.ClockedRepo {
+	repo := repository.NewMockRepo()
+
+	clock1, err := repo.GetOrCreateClock("foo")
+	require.NoError(t, err)
+	err = clock1.Witness(42)
+	require.NoError(t, err)
+
+	clock2, err := repo.GetOrCreateClock("bar")
+	require.NoError(t, err)
+	err = clock2.Witness(34)
+	require.NoError(t, err)
+
+	return repo
+}
+
+func TestVersionJSON(t *testing.T) {
+	repo := makeIdentityTestRepo(t)
+
+	keys := []*Key{
+		generatePublicKey(),
+		generatePublicKey(),
+	}
+
+	before, err := newVersion(repo, "name", "email", "login", "avatarUrl", keys)
+	require.NoError(t, err)
+
+	before.SetMetadata("key1", "value1")
+	before.SetMetadata("key2", "value2")
+
+	expected := &version{
+		id:        entity.UnsetId,
 		name:      "name",
 		email:     "email",
+		login:     "login",
 		avatarURL: "avatarUrl",
-		keys: []*Key{
-			{
-				Fingerprint: "fingerprint1",
-				PubKey:      "pubkey1",
-			},
-			{
-				Fingerprint: "fingerprint2",
-				PubKey:      "pubkey2",
-			},
+		unixTime:  time.Now().Unix(),
+		times: map[string]lamport.Time{
+			"foo": 42,
+			"bar": 34,
 		},
-		nonce: makeNonce(20),
+		keys:  keys,
+		nonce: before.nonce,
 		metadata: map[string]string{
 			"key1": "value1",
 			"key2": "value2",
 		},
-		time: 3,
 	}
 
+	require.Equal(t, expected, before)
+
 	data, err := json.Marshal(before)
 	assert.NoError(t, err)
 
-	var after Version
+	var after version
 	err = json.Unmarshal(data, &after)
 	assert.NoError(t, err)
 
-	assert.Equal(t, before, &after)
+	// make sure we now have an Id
+	expected.Id()
+
+	assert.Equal(t, expected, &after)
 }

misc/bash_completion/git-bug 🔗

@@ -722,6 +722,38 @@ _git-bug_comment_add()
     noun_aliases=()
 }
 
+_git-bug_comment_edit()
+{
+    last_command="git-bug_comment_edit"
+
+    command_aliases=()
+
+    commands=()
+
+    flags=()
+    two_word_flags=()
+    local_nonpersistent_flags=()
+    flags_with_completion=()
+    flags_completion=()
+
+    flags+=("--file=")
+    two_word_flags+=("--file")
+    two_word_flags+=("-F")
+    local_nonpersistent_flags+=("--file")
+    local_nonpersistent_flags+=("--file=")
+    local_nonpersistent_flags+=("-F")
+    flags+=("--message=")
+    two_word_flags+=("--message")
+    two_word_flags+=("-m")
+    local_nonpersistent_flags+=("--message")
+    local_nonpersistent_flags+=("--message=")
+    local_nonpersistent_flags+=("-m")
+
+    must_have_one_flag=()
+    must_have_one_noun=()
+    noun_aliases=()
+}
+
 _git-bug_comment()
 {
     last_command="git-bug_comment"
@@ -730,6 +762,7 @@ _git-bug_comment()
 
     commands=()
     commands+=("add")
+    commands+=("edit")
 
     flags=()
     two_word_flags=()
@@ -1331,6 +1364,10 @@ _git-bug_webui()
     flags_with_completion=()
     flags_completion=()
 
+    flags+=("--host=")
+    two_word_flags+=("--host")
+    local_nonpersistent_flags+=("--host")
+    local_nonpersistent_flags+=("--host=")
     flags+=("--open")
     local_nonpersistent_flags+=("--open")
     flags+=("--no-open")
@@ -1343,6 +1380,12 @@ _git-bug_webui()
     local_nonpersistent_flags+=("-p")
     flags+=("--read-only")
     local_nonpersistent_flags+=("--read-only")
+    flags+=("--query=")
+    two_word_flags+=("--query")
+    two_word_flags+=("-q")
+    local_nonpersistent_flags+=("--query")
+    local_nonpersistent_flags+=("--query=")
+    local_nonpersistent_flags+=("-q")
 
     must_have_one_flag=()
     must_have_one_noun=()

misc/random_bugs/create_random_bugs.go 🔗

@@ -111,54 +111,8 @@ func generateRandomBugsWithSeed(opts Options, seed int64) []*bug.Bug {
 	return result
 }
 
-func GenerateRandomOperationPacks(packNumber int, opNumber int) []*bug.OperationPack {
-	return GenerateRandomOperationPacksWithSeed(packNumber, opNumber, time.Now().UnixNano())
-}
-
-func GenerateRandomOperationPacksWithSeed(packNumber int, opNumber int, seed int64) []*bug.OperationPack {
-	// Note: this is a bit crude, only generate a Create + Comments
-
-	panic("this piece of code needs to be updated to make sure that the identities " +
-		"are properly commit before usage. That is, generateRandomPersons() need to be called.")
-
-	rand.Seed(seed)
-	fake.Seed(seed)
-
-	result := make([]*bug.OperationPack, packNumber)
-
-	for i := 0; i < packNumber; i++ {
-		opp := &bug.OperationPack{}
-
-		var op bug.Operation
-
-		op = bug.NewCreateOp(
-			randomPerson(),
-			time.Now().Unix(),
-			fake.Sentence(),
-			paragraphs(),
-			nil,
-		)
-
-		opp.Append(op)
-
-		for j := 0; j < opNumber-1; j++ {
-			op = bug.NewAddCommentOp(
-				randomPerson(),
-				time.Now().Unix(),
-				paragraphs(),
-				nil,
-			)
-			opp.Append(op)
-		}
-
-		result[i] = opp
-	}
-
-	return result
-}
-
-func person() *identity.Identity {
-	return identity.NewIdentity(fake.FullName(), fake.EmailAddress())
+func person(repo repository.RepoClock) (*identity.Identity, error) {
+	return identity.NewIdentity(repo, fake.FullName(), fake.EmailAddress())
 }
 
 var persons []*identity.Identity
@@ -166,8 +120,11 @@ var persons []*identity.Identity
 func generateRandomPersons(repo repository.ClockedRepo, n int) {
 	persons = make([]*identity.Identity, n)
 	for i := range persons {
-		p := person()
-		err := p.Commit(repo)
+		p, err := person(repo)
+		if err != nil {
+			panic(err)
+		}
+		err = p.Commit(repo)
 		if err != nil {
 			panic(err)
 		}

repository/common.go 🔗

@@ -0,0 +1,67 @@
+package repository
+
+import (
+	"io"
+
+	"golang.org/x/crypto/openpgp"
+	"golang.org/x/crypto/openpgp/armor"
+	"golang.org/x/crypto/openpgp/errors"
+)
+
+// nonNativeListCommits is an implementation for ListCommits, for the case where
+// the underlying git implementation doesn't support if natively.
+func nonNativeListCommits(repo RepoData, ref string) ([]Hash, error) {
+	var result []Hash
+
+	stack := make([]Hash, 0, 32)
+	visited := make(map[Hash]struct{})
+
+	hash, err := repo.ResolveRef(ref)
+	if err != nil {
+		return nil, err
+	}
+
+	stack = append(stack, hash)
+
+	for len(stack) > 0 {
+		// pop
+		hash := stack[len(stack)-1]
+		stack = stack[:len(stack)-1]
+
+		if _, ok := visited[hash]; ok {
+			continue
+		}
+
+		// mark as visited
+		visited[hash] = struct{}{}
+		result = append(result, hash)
+
+		commit, err := repo.ReadCommit(hash)
+		if err != nil {
+			return nil, err
+		}
+
+		for _, parent := range commit.Parents {
+			stack = append(stack, parent)
+		}
+	}
+
+	// reverse
+	for i, j := 0, len(result)-1; i < j; i, j = i+1, j-1 {
+		result[i], result[j] = result[j], result[i]
+	}
+
+	return result, nil
+}
+
+// deArmorSignature convert an armored (text serialized) signature into raw binary
+func deArmorSignature(armoredSig io.Reader) (io.Reader, error) {
+	block, err := armor.Decode(armoredSig)
+	if err != nil {
+		return nil, err
+	}
+	if block.Type != openpgp.SignatureType {
+		return nil, errors.InvalidArgumentError("expected '" + openpgp.SignatureType + "', got: " + block.Type)
+	}
+	return block.Body, nil
+}

repository/config_mem.go 🔗

@@ -20,6 +20,7 @@ func NewMemConfig() *MemConfig {
 }
 
 func (mc *MemConfig) StoreString(key, value string) error {
+	key = normalizeKey(key)
 	mc.config[key] = value
 	return nil
 }
@@ -33,6 +34,7 @@ func (mc *MemConfig) StoreTimestamp(key string, value time.Time) error {
 }
 
 func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) {
+	keyPrefix = normalizeKey(keyPrefix)
 	result := make(map[string]string)
 	for key, val := range mc.config {
 		if strings.HasPrefix(key, keyPrefix) {
@@ -44,6 +46,7 @@ func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) {
 
 func (mc *MemConfig) ReadString(key string) (string, error) {
 	// unlike git, the mock can only store one value for the same key
+	key = normalizeKey(key)
 	val, ok := mc.config[key]
 	if !ok {
 		return "", ErrNoConfigEntry
@@ -54,9 +57,9 @@ func (mc *MemConfig) ReadString(key string) (string, error) {
 
 func (mc *MemConfig) ReadBool(key string) (bool, error) {
 	// unlike git, the mock can only store one value for the same key
-	val, ok := mc.config[key]
-	if !ok {
-		return false, ErrNoConfigEntry
+	val, err := mc.ReadString(key)
+	if err != nil {
+		return false, err
 	}
 
 	return strconv.ParseBool(val)
@@ -78,6 +81,7 @@ func (mc *MemConfig) ReadTimestamp(key string) (time.Time, error) {
 
 // RmConfigs remove all key/value pair matching the key prefix
 func (mc *MemConfig) RemoveAll(keyPrefix string) error {
+	keyPrefix = normalizeKey(keyPrefix)
 	found := false
 	for key := range mc.config {
 		if strings.HasPrefix(key, keyPrefix) {
@@ -92,3 +96,12 @@ func (mc *MemConfig) RemoveAll(keyPrefix string) error {
 
 	return nil
 }
+
+func normalizeKey(key string) string {
+	// this feels so wrong, but that's apparently how git behave.
+	// only section and final segment are case insensitive, subsection in between are not.
+	s := strings.Split(key, ".")
+	s[0] = strings.ToLower(s[0])
+	s[len(s)-1] = strings.ToLower(s[len(s)-1])
+	return strings.Join(s, ".")
+}

repository/config_testing.go 🔗

@@ -113,4 +113,43 @@ func testConfig(t *testing.T, config Config) {
 		"section.subsection.subsection.opt1": "foo5",
 		"section.subsection.subsection.opt2": "foo6",
 	}, all)
+
+	// missing section + case insensitive
+	val, err = config.ReadString("section2.opt1")
+	require.Error(t, err)
+
+	val, err = config.ReadString("section.opt1")
+	require.NoError(t, err)
+	require.Equal(t, "foo", val)
+
+	val, err = config.ReadString("SECTION.OPT1")
+	require.NoError(t, err)
+	require.Equal(t, "foo", val)
+
+	_, err = config.ReadString("SECTION2.OPT3")
+	require.Error(t, err)
+
+	// missing subsection + case insensitive
+	val, err = config.ReadString("section.subsection.opt1")
+	require.NoError(t, err)
+	require.Equal(t, "foo3", val)
+
+	// for some weird reason, subsection ARE case sensitive
+	_, err = config.ReadString("SECTION.SUBSECTION.OPT1")
+	require.Error(t, err)
+
+	_, err = config.ReadString("SECTION.SUBSECTION1.OPT1")
+	require.Error(t, err)
+
+	// missing sub-subsection + case insensitive
+	val, err = config.ReadString("section.subsection.subsection.opt1")
+	require.NoError(t, err)
+	require.Equal(t, "foo5", val)
+
+	// for some weird reason, subsection ARE case sensitive
+	_, err = config.ReadString("SECTION.SUBSECTION.SUBSECTION.OPT1")
+	require.Error(t, err)
+
+	_, err = config.ReadString("SECTION.SUBSECTION.SUBSECTION1.OPT1")
+	require.Error(t, err)
 }

repository/git.go 🔗

@@ -1,500 +0,0 @@
-// Package repository contains helper methods for working with the Git repo.
-package repository
-
-import (
-	"bytes"
-	"fmt"
-	"os"
-	"path/filepath"
-	"strings"
-	"sync"
-
-	"github.com/blevesearch/bleve"
-	"github.com/go-git/go-billy/v5"
-	"github.com/go-git/go-billy/v5/osfs"
-
-	"github.com/MichaelMure/git-bug/util/lamport"
-)
-
-const (
-	clockPath = "git-bug"
-)
-
-var _ ClockedRepo = &GitRepo{}
-var _ TestedRepo = &GitRepo{}
-
-// GitRepo represents an instance of a (local) git repository.
-type GitRepo struct {
-	gitCli
-	path string
-
-	clocksMutex sync.Mutex
-	clocks      map[string]lamport.Clock
-
-	indexesMutex sync.Mutex
-	indexes      map[string]bleve.Index
-
-	keyring Keyring
-}
-
-// OpenGitRepo determines if the given working directory is inside of a git repository,
-// and returns the corresponding GitRepo instance if it is.
-func OpenGitRepo(path string, clockLoaders []ClockLoader) (*GitRepo, error) {
-	k, err := defaultKeyring()
-	if err != nil {
-		return nil, err
-	}
-
-	repo := &GitRepo{
-		gitCli:  gitCli{path: path},
-		path:    path,
-		clocks:  make(map[string]lamport.Clock),
-		indexes: make(map[string]bleve.Index),
-		keyring: k,
-	}
-
-	// Check the repo and retrieve the root path
-	stdout, err := repo.runGitCommand("rev-parse", "--absolute-git-dir")
-
-	// Now dir is fetched with "git rev-parse --git-dir". May be it can
-	// still return nothing in some cases. Then empty stdout check is
-	// kept.
-	if err != nil || stdout == "" {
-		return nil, ErrNotARepo
-	}
-
-	// Fix the path to be sure we are at the root
-	repo.path = stdout
-	repo.gitCli.path = stdout
-
-	for _, loader := range clockLoaders {
-		allExist := true
-		for _, name := range loader.Clocks {
-			if _, err := repo.getClock(name); err != nil {
-				allExist = false
-			}
-		}
-
-		if !allExist {
-			err = loader.Witnesser(repo)
-			if err != nil {
-				return nil, err
-			}
-		}
-	}
-
-	return repo, nil
-}
-
-// InitGitRepo create a new empty git repo at the given path
-func InitGitRepo(path string) (*GitRepo, error) {
-	repo := &GitRepo{
-		gitCli:  gitCli{path: path},
-		path:    path + "/.git",
-		clocks:  make(map[string]lamport.Clock),
-		indexes: make(map[string]bleve.Index),
-	}
-
-	_, err := repo.runGitCommand("init", path)
-	if err != nil {
-		return nil, err
-	}
-
-	return repo, nil
-}
-
-// InitBareGitRepo create a new --bare empty git repo at the given path
-func InitBareGitRepo(path string) (*GitRepo, error) {
-	repo := &GitRepo{
-		gitCli:  gitCli{path: path},
-		path:    path,
-		clocks:  make(map[string]lamport.Clock),
-		indexes: make(map[string]bleve.Index),
-	}
-
-	_, err := repo.runGitCommand("init", "--bare", path)
-	if err != nil {
-		return nil, err
-	}
-
-	return repo, nil
-}
-
-func (repo *GitRepo) Close() error {
-	var firstErr error
-	for _, index := range repo.indexes {
-		err := index.Close()
-		if err != nil && firstErr == nil {
-			firstErr = err
-		}
-	}
-	return firstErr
-}
-
-// LocalConfig give access to the repository scoped configuration
-func (repo *GitRepo) LocalConfig() Config {
-	return newGitConfig(repo.gitCli, false)
-}
-
-// GlobalConfig give access to the global scoped configuration
-func (repo *GitRepo) GlobalConfig() Config {
-	return newGitConfig(repo.gitCli, true)
-}
-
-// AnyConfig give access to a merged local/global configuration
-func (repo *GitRepo) AnyConfig() ConfigRead {
-	return mergeConfig(repo.LocalConfig(), repo.GlobalConfig())
-}
-
-// Keyring give access to a user-wide storage for secrets
-func (repo *GitRepo) Keyring() Keyring {
-	return repo.keyring
-}
-
-// GetPath returns the path to the repo.
-func (repo *GitRepo) GetPath() string {
-	return repo.path
-}
-
-// GetUserName returns the name the the user has used to configure git
-func (repo *GitRepo) GetUserName() (string, error) {
-	return repo.runGitCommand("config", "user.name")
-}
-
-// GetUserEmail returns the email address that the user has used to configure git.
-func (repo *GitRepo) GetUserEmail() (string, error) {
-	return repo.runGitCommand("config", "user.email")
-}
-
-// GetCoreEditor returns the name of the editor that the user has used to configure git.
-func (repo *GitRepo) GetCoreEditor() (string, error) {
-	return repo.runGitCommand("var", "GIT_EDITOR")
-}
-
-// GetRemotes returns the configured remotes repositories.
-func (repo *GitRepo) GetRemotes() (map[string]string, error) {
-	stdout, err := repo.runGitCommand("remote", "--verbose")
-	if err != nil {
-		return nil, err
-	}
-
-	lines := strings.Split(stdout, "\n")
-	remotes := make(map[string]string, len(lines))
-
-	for _, line := range lines {
-		if strings.TrimSpace(line) == "" {
-			continue
-		}
-		elements := strings.Fields(line)
-		if len(elements) != 3 {
-			return nil, fmt.Errorf("git remote: unexpected output format: %s", line)
-		}
-
-		remotes[elements[0]] = elements[1]
-	}
-
-	return remotes, nil
-}
-
-// LocalStorage return a billy.Filesystem giving access to $RepoPath/.git/git-bug
-func (repo *GitRepo) LocalStorage() billy.Filesystem {
-	return osfs.New(repo.path)
-}
-
-// GetBleveIndex return a bleve.Index that can be used to index documents
-func (repo *GitRepo) GetBleveIndex(name string) (bleve.Index, error) {
-	repo.indexesMutex.Lock()
-	defer repo.indexesMutex.Unlock()
-
-	if index, ok := repo.indexes[name]; ok {
-		return index, nil
-	}
-
-	path := filepath.Join(repo.path, "indexes", name)
-
-	index, err := bleve.Open(path)
-	if err == nil {
-		repo.indexes[name] = index
-		return index, nil
-	}
-
-	err = os.MkdirAll(path, os.ModeDir)
-	if err != nil {
-		return nil, err
-	}
-
-	mapping := bleve.NewIndexMapping()
-	mapping.DefaultAnalyzer = "en"
-
-	index, err = bleve.New(path, mapping)
-	if err != nil {
-		return nil, err
-	}
-
-	repo.indexes[name] = index
-
-	return index, nil
-}
-
-// ClearBleveIndex will wipe the given index
-func (repo *GitRepo) ClearBleveIndex(name string) error {
-	repo.indexesMutex.Lock()
-	defer repo.indexesMutex.Unlock()
-
-	path := filepath.Join(repo.path, "indexes", name)
-
-	err := os.RemoveAll(path)
-	if err != nil {
-		return err
-	}
-
-	delete(repo.indexes, name)
-
-	return nil
-}
-
-// FetchRefs fetch git refs from a remote
-func (repo *GitRepo) FetchRefs(remote, refSpec string) (string, error) {
-	stdout, err := repo.runGitCommand("fetch", remote, refSpec)
-
-	if err != nil {
-		return stdout, fmt.Errorf("failed to fetch from the remote '%s': %v", remote, err)
-	}
-
-	return stdout, err
-}
-
-// PushRefs push git refs to a remote
-func (repo *GitRepo) PushRefs(remote string, refSpec string) (string, error) {
-	stdout, stderr, err := repo.runGitCommandRaw(nil, "push", remote, refSpec)
-
-	if err != nil {
-		return stdout + stderr, fmt.Errorf("failed to push to the remote '%s': %v", remote, stderr)
-	}
-	return stdout + stderr, nil
-}
-
-// StoreData will store arbitrary data and return the corresponding hash
-func (repo *GitRepo) StoreData(data []byte) (Hash, error) {
-	var stdin = bytes.NewReader(data)
-
-	stdout, err := repo.runGitCommandWithStdin(stdin, "hash-object", "--stdin", "-w")
-
-	return Hash(stdout), err
-}
-
-// ReadData will attempt to read arbitrary data from the given hash
-func (repo *GitRepo) ReadData(hash Hash) ([]byte, error) {
-	var stdout bytes.Buffer
-	var stderr bytes.Buffer
-
-	err := repo.runGitCommandWithIO(nil, &stdout, &stderr, "cat-file", "-p", string(hash))
-
-	if err != nil {
-		return []byte{}, err
-	}
-
-	return stdout.Bytes(), nil
-}
-
-// StoreTree will store a mapping key-->Hash as a Git tree
-func (repo *GitRepo) StoreTree(entries []TreeEntry) (Hash, error) {
-	buffer := prepareTreeEntries(entries)
-
-	stdout, err := repo.runGitCommandWithStdin(&buffer, "mktree")
-
-	if err != nil {
-		return "", err
-	}
-
-	return Hash(stdout), nil
-}
-
-// StoreCommit will store a Git commit with the given Git tree
-func (repo *GitRepo) StoreCommit(treeHash Hash) (Hash, error) {
-	stdout, err := repo.runGitCommand("commit-tree", string(treeHash))
-
-	if err != nil {
-		return "", err
-	}
-
-	return Hash(stdout), nil
-}
-
-// StoreCommitWithParent will store a Git commit with the given Git tree
-func (repo *GitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) {
-	stdout, err := repo.runGitCommand("commit-tree", string(treeHash),
-		"-p", string(parent))
-
-	if err != nil {
-		return "", err
-	}
-
-	return Hash(stdout), nil
-}
-
-// UpdateRef will create or update a Git reference
-func (repo *GitRepo) UpdateRef(ref string, hash Hash) error {
-	_, err := repo.runGitCommand("update-ref", ref, string(hash))
-
-	return err
-}
-
-// RemoveRef will remove a Git reference
-func (repo *GitRepo) RemoveRef(ref string) error {
-	_, err := repo.runGitCommand("update-ref", "-d", ref)
-
-	return err
-}
-
-// ListRefs will return a list of Git ref matching the given refspec
-func (repo *GitRepo) ListRefs(refPrefix string) ([]string, error) {
-	stdout, err := repo.runGitCommand("for-each-ref", "--format=%(refname)", refPrefix)
-
-	if err != nil {
-		return nil, err
-	}
-
-	split := strings.Split(stdout, "\n")
-
-	if len(split) == 1 && split[0] == "" {
-		return []string{}, nil
-	}
-
-	return split, nil
-}
-
-// RefExist will check if a reference exist in Git
-func (repo *GitRepo) RefExist(ref string) (bool, error) {
-	stdout, err := repo.runGitCommand("for-each-ref", ref)
-
-	if err != nil {
-		return false, err
-	}
-
-	return stdout != "", nil
-}
-
-// CopyRef will create a new reference with the same value as another one
-func (repo *GitRepo) CopyRef(source string, dest string) error {
-	_, err := repo.runGitCommand("update-ref", dest, source)
-
-	return err
-}
-
-// ListCommits will return the list of commit hashes of a ref, in chronological order
-func (repo *GitRepo) ListCommits(ref string) ([]Hash, error) {
-	stdout, err := repo.runGitCommand("rev-list", "--first-parent", "--reverse", ref)
-
-	if err != nil {
-		return nil, err
-	}
-
-	split := strings.Split(stdout, "\n")
-
-	casted := make([]Hash, len(split))
-	for i, line := range split {
-		casted[i] = Hash(line)
-	}
-
-	return casted, nil
-
-}
-
-// ReadTree will return the list of entries in a Git tree
-func (repo *GitRepo) ReadTree(hash Hash) ([]TreeEntry, error) {
-	stdout, err := repo.runGitCommand("ls-tree", string(hash))
-
-	if err != nil {
-		return nil, err
-	}
-
-	return readTreeEntries(stdout)
-}
-
-// FindCommonAncestor will return the last common ancestor of two chain of commit
-func (repo *GitRepo) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) {
-	stdout, err := repo.runGitCommand("merge-base", string(hash1), string(hash2))
-
-	if err != nil {
-		return "", err
-	}
-
-	return Hash(stdout), nil
-}
-
-// GetTreeHash return the git tree hash referenced in a commit
-func (repo *GitRepo) GetTreeHash(commit Hash) (Hash, error) {
-	stdout, err := repo.runGitCommand("rev-parse", string(commit)+"^{tree}")
-
-	if err != nil {
-		return "", err
-	}
-
-	return Hash(stdout), nil
-}
-
-// GetOrCreateClock return a Lamport clock stored in the Repo.
-// If the clock doesn't exist, it's created.
-func (repo *GitRepo) GetOrCreateClock(name string) (lamport.Clock, error) {
-	repo.clocksMutex.Lock()
-	defer repo.clocksMutex.Unlock()
-
-	c, err := repo.getClock(name)
-	if err == nil {
-		return c, nil
-	}
-	if err != ErrClockNotExist {
-		return nil, err
-	}
-
-	c, err = lamport.NewPersistedClock(repo.LocalStorage(), name+"-clock")
-	if err != nil {
-		return nil, err
-	}
-
-	repo.clocks[name] = c
-	return c, nil
-}
-
-func (repo *GitRepo) getClock(name string) (lamport.Clock, error) {
-	if c, ok := repo.clocks[name]; ok {
-		return c, nil
-	}
-
-	c, err := lamport.LoadPersistedClock(repo.LocalStorage(), name+"-clock")
-	if err == nil {
-		repo.clocks[name] = c
-		return c, nil
-	}
-	if err == lamport.ErrClockNotExist {
-		return nil, ErrClockNotExist
-	}
-	return nil, err
-}
-
-// AddRemote add a new remote to the repository
-// Not in the interface because it's only used for testing
-func (repo *GitRepo) AddRemote(name string, url string) error {
-	_, err := repo.runGitCommand("remote", "add", name, url)
-
-	return err
-}
-
-// GetLocalRemote return the URL to use to add this repo as a local remote
-func (repo *GitRepo) GetLocalRemote() string {
-	return repo.path
-}
-
-// EraseFromDisk delete this repository entirely from the disk
-func (repo *GitRepo) EraseFromDisk() error {
-	err := repo.Close()
-	if err != nil {
-		return err
-	}
-
-	path := filepath.Clean(strings.TrimSuffix(repo.path, string(filepath.Separator)+".git"))
-
-	// fmt.Println("Cleaning repo:", path)
-	return os.RemoveAll(path)
-}

repository/git_cli.go 🔗

@@ -1,56 +0,0 @@
-package repository
-
-import (
-	"bytes"
-	"fmt"
-	"io"
-	"os/exec"
-	"strings"
-)
-
-// gitCli is a helper to launch CLI git commands
-type gitCli struct {
-	path string
-}
-
-// Run the given git command with the given I/O reader/writers, returning an error if it fails.
-func (cli gitCli) runGitCommandWithIO(stdin io.Reader, stdout, stderr io.Writer, args ...string) error {
-	// make sure that the working directory for the command
-	// always exist, in particular when running "git init".
-	path := strings.TrimSuffix(cli.path, ".git")
-
-	// fmt.Printf("[%s] Running git %s\n", path, strings.Join(args, " "))
-
-	cmd := exec.Command("git", args...)
-	cmd.Dir = path
-	cmd.Stdin = stdin
-	cmd.Stdout = stdout
-	cmd.Stderr = stderr
-
-	return cmd.Run()
-}
-
-// Run the given git command and return its stdout, or an error if the command fails.
-func (cli gitCli) runGitCommandRaw(stdin io.Reader, args ...string) (string, string, error) {
-	var stdout bytes.Buffer
-	var stderr bytes.Buffer
-	err := cli.runGitCommandWithIO(stdin, &stdout, &stderr, args...)
-	return strings.TrimSpace(stdout.String()), strings.TrimSpace(stderr.String()), err
-}
-
-// Run the given git command and return its stdout, or an error if the command fails.
-func (cli gitCli) runGitCommandWithStdin(stdin io.Reader, args ...string) (string, error) {
-	stdout, stderr, err := cli.runGitCommandRaw(stdin, args...)
-	if err != nil {
-		if stderr == "" {
-			stderr = "Error running git command: " + strings.Join(args, " ")
-		}
-		err = fmt.Errorf(stderr)
-	}
-	return stdout, err
-}
-
-// Run the given git command and return its stdout, or an error if the command fails.
-func (cli gitCli) runGitCommand(args ...string) (string, error) {
-	return cli.runGitCommandWithStdin(nil, args...)
-}

repository/git_config.go 🔗

@@ -1,221 +0,0 @@
-package repository
-
-import (
-	"fmt"
-	"regexp"
-	"strconv"
-	"strings"
-	"time"
-
-	"github.com/blang/semver"
-	"github.com/pkg/errors"
-)
-
-var _ Config = &gitConfig{}
-
-type gitConfig struct {
-	cli          gitCli
-	localityFlag string
-}
-
-func newGitConfig(cli gitCli, global bool) *gitConfig {
-	localityFlag := "--local"
-	if global {
-		localityFlag = "--global"
-	}
-	return &gitConfig{
-		cli:          cli,
-		localityFlag: localityFlag,
-	}
-}
-
-// StoreString store a single key/value pair in the config of the repo
-func (gc *gitConfig) StoreString(key string, value string) error {
-	_, err := gc.cli.runGitCommand("config", gc.localityFlag, "--replace-all", key, value)
-	return err
-}
-
-func (gc *gitConfig) StoreBool(key string, value bool) error {
-	return gc.StoreString(key, strconv.FormatBool(value))
-}
-
-func (gc *gitConfig) StoreTimestamp(key string, value time.Time) error {
-	return gc.StoreString(key, strconv.Itoa(int(value.Unix())))
-}
-
-// ReadAll read all key/value pair matching the key prefix
-func (gc *gitConfig) ReadAll(keyPrefix string) (map[string]string, error) {
-	stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-regexp", keyPrefix)
-
-	//   / \
-	//  / ! \
-	// -------
-	//
-	// There can be a legitimate error here, but I see no portable way to
-	// distinguish them from the git error that say "no matching value exist"
-	if err != nil {
-		return nil, nil
-	}
-
-	lines := strings.Split(stdout, "\n")
-
-	result := make(map[string]string, len(lines))
-
-	for _, line := range lines {
-		if strings.TrimSpace(line) == "" {
-			continue
-		}
-
-		parts := strings.SplitN(line, " ", 2)
-		result[parts[0]] = parts[1]
-	}
-
-	return result, nil
-}
-
-func (gc *gitConfig) ReadString(key string) (string, error) {
-	stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-all", key)
-
-	//   / \
-	//  / ! \
-	// -------
-	//
-	// There can be a legitimate error here, but I see no portable way to
-	// distinguish them from the git error that say "no matching value exist"
-	if err != nil {
-		return "", ErrNoConfigEntry
-	}
-
-	lines := strings.Split(stdout, "\n")
-
-	if len(lines) == 0 {
-		return "", ErrNoConfigEntry
-	}
-	if len(lines) > 1 {
-		return "", ErrMultipleConfigEntry
-	}
-
-	return lines[0], nil
-}
-
-func (gc *gitConfig) ReadBool(key string) (bool, error) {
-	val, err := gc.ReadString(key)
-	if err != nil {
-		return false, err
-	}
-
-	return strconv.ParseBool(val)
-}
-
-func (gc *gitConfig) ReadTimestamp(key string) (time.Time, error) {
-	value, err := gc.ReadString(key)
-	if err != nil {
-		return time.Time{}, err
-	}
-	return ParseTimestamp(value)
-}
-
-func (gc *gitConfig) rmSection(keyPrefix string) error {
-	_, err := gc.cli.runGitCommand("config", gc.localityFlag, "--remove-section", keyPrefix)
-	return err
-}
-
-func (gc *gitConfig) unsetAll(keyPrefix string) error {
-	_, err := gc.cli.runGitCommand("config", gc.localityFlag, "--unset-all", keyPrefix)
-	return err
-}
-
-// return keyPrefix section
-// example: sectionFromKey(a.b.c.d) return a.b.c
-func sectionFromKey(keyPrefix string) string {
-	s := strings.Split(keyPrefix, ".")
-	if len(s) == 1 {
-		return keyPrefix
-	}
-
-	return strings.Join(s[:len(s)-1], ".")
-}
-
-// rmConfigs with git version lesser than 2.18
-func (gc *gitConfig) rmConfigsGitVersionLT218(keyPrefix string) error {
-	// try to remove key/value pair by key
-	err := gc.unsetAll(keyPrefix)
-	if err != nil {
-		return gc.rmSection(keyPrefix)
-	}
-
-	m, err := gc.ReadAll(sectionFromKey(keyPrefix))
-	if err != nil {
-		return err
-	}
-
-	// if section doesn't have any left key/value remove the section
-	if len(m) == 0 {
-		return gc.rmSection(sectionFromKey(keyPrefix))
-	}
-
-	return nil
-}
-
-// RmConfigs remove all key/value pair matching the key prefix
-func (gc *gitConfig) RemoveAll(keyPrefix string) error {
-	// starting from git 2.18.0 sections are automatically deleted when the last existing
-	// key/value is removed. Before 2.18.0 we should remove the section
-	// see https://github.com/git/git/blob/master/Documentation/RelNotes/2.18.0.txt#L379
-	lt218, err := gc.gitVersionLT218()
-	if err != nil {
-		return errors.Wrap(err, "getting git version")
-	}
-
-	if lt218 {
-		return gc.rmConfigsGitVersionLT218(keyPrefix)
-	}
-
-	err = gc.unsetAll(keyPrefix)
-	if err != nil {
-		return gc.rmSection(keyPrefix)
-	}
-
-	return nil
-}
-
-func (gc *gitConfig) gitVersion() (*semver.Version, error) {
-	versionOut, err := gc.cli.runGitCommand("version")
-	if err != nil {
-		return nil, err
-	}
-	return parseGitVersion(versionOut)
-}
-
-func parseGitVersion(versionOut string) (*semver.Version, error) {
-	// extract the version and truncate potential bad parts
-	// ex: 2.23.0.rc1 instead of 2.23.0-rc1
-	r := regexp.MustCompile(`(\d+\.){1,2}\d+`)
-
-	extracted := r.FindString(versionOut)
-	if extracted == "" {
-		return nil, fmt.Errorf("unreadable git version %s", versionOut)
-	}
-
-	version, err := semver.Make(extracted)
-	if err != nil {
-		return nil, err
-	}
-
-	return &version, nil
-}
-
-func (gc *gitConfig) gitVersionLT218() (bool, error) {
-	version, err := gc.gitVersion()
-	if err != nil {
-		return false, err
-	}
-
-	version218string := "2.18.0"
-	gitVersion218, err := semver.Make(version218string)
-	if err != nil {
-		return false, err
-	}
-
-	return version.LT(gitVersion218), nil
-}

repository/git_test.go 🔗

@@ -1,10 +0,0 @@
-// Package repository contains helper methods for working with the Git repo.
-package repository
-
-import (
-	"testing"
-)
-
-func TestGitRepo(t *testing.T) {
-	RepoTest(t, CreateTestRepo, CleanupTestRepos)
-}

repository/git_testing.go 🔗

@@ -1,72 +0,0 @@
-package repository
-
-import (
-	"io/ioutil"
-	"log"
-
-	"github.com/99designs/keyring"
-)
-
-// This is intended for testing only
-
-func CreateTestRepo(bare bool) TestedRepo {
-	dir, err := ioutil.TempDir("", "")
-	if err != nil {
-		log.Fatal(err)
-	}
-
-	var creator func(string) (*GitRepo, error)
-
-	if bare {
-		creator = InitBareGitRepo
-	} else {
-		creator = InitGitRepo
-	}
-
-	repo, err := creator(dir)
-	if err != nil {
-		log.Fatal(err)
-	}
-
-	config := repo.LocalConfig()
-	if err := config.StoreString("user.name", "testuser"); err != nil {
-		log.Fatal("failed to set user.name for test repository: ", err)
-	}
-	if err := config.StoreString("user.email", "testuser@example.com"); err != nil {
-		log.Fatal("failed to set user.email for test repository: ", err)
-	}
-
-	// make sure we use a mock keyring for testing to not interact with the global system
-	return &replaceKeyring{
-		TestedRepo: repo,
-		keyring:    keyring.NewArrayKeyring(nil),
-	}
-}
-
-func SetupReposAndRemote() (repoA, repoB, remote TestedRepo) {
-	repoA = CreateGoGitTestRepo(false)
-	repoB = CreateGoGitTestRepo(false)
-	remote = CreateGoGitTestRepo(true)
-
-	err := repoA.AddRemote("origin", remote.GetLocalRemote())
-	if err != nil {
-		log.Fatal(err)
-	}
-
-	err = repoB.AddRemote("origin", remote.GetLocalRemote())
-	if err != nil {
-		log.Fatal(err)
-	}
-
-	return repoA, repoB, remote
-}
-
-// replaceKeyring allow to replace the Keyring of the underlying repo
-type replaceKeyring struct {
-	TestedRepo
-	keyring Keyring
-}
-
-func (rk replaceKeyring) Keyring() Keyring {
-	return rk.keyring
-}

repository/gogit.go 🔗

@@ -5,7 +5,6 @@ import (
 	"fmt"
 	"io/ioutil"
 	"os"
-	"os/exec"
 	"path/filepath"
 	"sort"
 	"strings"
@@ -20,10 +19,14 @@ import (
 	"github.com/go-git/go-git/v5/plumbing"
 	"github.com/go-git/go-git/v5/plumbing/filemode"
 	"github.com/go-git/go-git/v5/plumbing/object"
+	"golang.org/x/crypto/openpgp"
+	"golang.org/x/sys/execabs"
 
 	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
+const clockPath = "clocks"
+
 var _ ClockedRepo = &GoGitRepo{}
 var _ TestedRepo = &GoGitRepo{}
 
@@ -261,7 +264,7 @@ func (repo *GoGitRepo) GetCoreEditor() (string, error) {
 	}
 
 	for _, cmd := range priorities {
-		if _, err = exec.LookPath(cmd); err == nil {
+		if _, err = execabs.LookPath(cmd); err == nil {
 			return cmd, nil
 		}
 
@@ -332,7 +335,7 @@ func (repo *GoGitRepo) ClearBleveIndex(name string) error {
 	repo.indexesMutex.Lock()
 	defer repo.indexesMutex.Unlock()
 
-	path := filepath.Join(repo.path, "indexes", name)
+	path := filepath.Join(repo.path, "git-bug", "indexes", name)
 
 	err := os.RemoveAll(path)
 	if err != nil {
@@ -350,13 +353,17 @@ func (repo *GoGitRepo) ClearBleveIndex(name string) error {
 	return nil
 }
 
-// FetchRefs fetch git refs from a remote
-func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error) {
+// FetchRefs fetch git refs matching a directory prefix to a remote
+// Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally.
+// The equivalent git refspec would be "refs/foo/*:refs/remotes/<remote>/foo/*"
+func (repo *GoGitRepo) FetchRefs(remote string, prefix string) (string, error) {
+	refspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix)
+
 	buf := bytes.NewBuffer(nil)
 
 	err := repo.r.Fetch(&gogit.FetchOptions{
 		RemoteName: remote,
-		RefSpecs:   []config.RefSpec{config.RefSpec(refSpec)},
+		RefSpecs:   []config.RefSpec{config.RefSpec(refspec)},
 		Progress:   buf,
 	})
 	if err == gogit.NoErrAlreadyUpToDate {
@@ -369,13 +376,41 @@ func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error)
 	return buf.String(), nil
 }
 
-// PushRefs push git refs to a remote
-func (repo *GoGitRepo) PushRefs(remote string, refSpec string) (string, error) {
+// PushRefs push git refs matching a directory prefix to a remote
+// Ex: prefix="foo" will push any local refs matching "refs/foo/*" to the remote.
+// The equivalent git refspec would be "refs/foo/*:refs/foo/*"
+//
+// Additionally, PushRefs will update the local references in refs/remotes/<remote>/foo to match
+// the remote state.
+func (repo *GoGitRepo) PushRefs(remote string, prefix string) (string, error) {
+	refspec := fmt.Sprintf("refs/%s/*:refs/%s/*", prefix, prefix)
+
+	remo, err := repo.r.Remote(remote)
+	if err != nil {
+		return "", err
+	}
+
+	// to make sure that the push also create the corresponding refs/remotes/<remote>/... references,
+	// we need to have a default fetch refspec configured on the remote, to make our refs "track" the remote ones.
+	// This does not change the config on disk, only on memory.
+	hasCustomFetch := false
+	fetchRefspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix)
+	for _, r := range remo.Config().Fetch {
+		if string(r) == fetchRefspec {
+			hasCustomFetch = true
+			break
+		}
+	}
+
+	if !hasCustomFetch {
+		remo.Config().Fetch = append(remo.Config().Fetch, config.RefSpec(fetchRefspec))
+	}
+
 	buf := bytes.NewBuffer(nil)
 
-	err := repo.r.Push(&gogit.PushOptions{
+	err = remo.Push(&gogit.PushOptions{
 		RemoteName: remote,
-		RefSpecs:   []config.RefSpec{config.RefSpec(refSpec)},
+		RefSpecs:   []config.RefSpec{config.RefSpec(refspec)},
 		Progress:   buf,
 	})
 	if err == gogit.NoErrAlreadyUpToDate {
@@ -519,12 +554,13 @@ func (repo *GoGitRepo) ReadTree(hash Hash) ([]TreeEntry, error) {
 }
 
 // StoreCommit will store a Git commit with the given Git tree
-func (repo *GoGitRepo) StoreCommit(treeHash Hash) (Hash, error) {
-	return repo.StoreCommitWithParent(treeHash, "")
+func (repo *GoGitRepo) StoreCommit(treeHash Hash, parents ...Hash) (Hash, error) {
+	return repo.StoreSignedCommit(treeHash, nil, parents...)
 }
 
-// StoreCommit will store a Git commit with the given Git tree
-func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) {
+// StoreCommit will store a Git commit with the given Git tree. If signKey is not nil, the commit
+// will be signed accordingly.
+func (repo *GoGitRepo) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error) {
 	cfg, err := repo.r.Config()
 	if err != nil {
 		return "", err
@@ -545,8 +581,28 @@ func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash,
 		TreeHash: plumbing.NewHash(treeHash.String()),
 	}
 
-	if parent != "" {
-		commit.ParentHashes = []plumbing.Hash{plumbing.NewHash(parent.String())}
+	for _, parent := range parents {
+		commit.ParentHashes = append(commit.ParentHashes, plumbing.NewHash(parent.String()))
+	}
+
+	// Compute the signature if needed
+	if signKey != nil {
+		// first get the serialized commit
+		encoded := &plumbing.MemoryObject{}
+		if err := commit.Encode(encoded); err != nil {
+			return "", err
+		}
+		r, err := encoded.Reader()
+		if err != nil {
+			return "", err
+		}
+
+		// sign the data
+		var sig bytes.Buffer
+		if err := openpgp.ArmoredDetachSign(&sig, signKey, r, nil); err != nil {
+			return "", err
+		}
+		commit.PGPSignature = sig.String()
 	}
 
 	obj := repo.r.Storer.NewEncodedObject()
@@ -593,6 +649,14 @@ func (repo *GoGitRepo) FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, err
 	return Hash(commits[0].Hash.String()), nil
 }
 
+func (repo *GoGitRepo) ResolveRef(ref string) (Hash, error) {
+	r, err := repo.r.Reference(plumbing.ReferenceName(ref), false)
+	if err != nil {
+		return "", err
+	}
+	return Hash(r.Hash().String()), nil
+}
+
 // UpdateRef will create or update a Git reference
 func (repo *GoGitRepo) UpdateRef(ref string, hash Hash) error {
 	return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(ref), plumbing.NewHash(hash.String())))
@@ -647,34 +711,79 @@ func (repo *GoGitRepo) CopyRef(source string, dest string) error {
 
 // ListCommits will return the list of tree hashes of a ref, in chronological order
 func (repo *GoGitRepo) ListCommits(ref string) ([]Hash, error) {
-	r, err := repo.r.Reference(plumbing.ReferenceName(ref), false)
+	return nonNativeListCommits(repo, ref)
+}
+
+func (repo *GoGitRepo) ReadCommit(hash Hash) (Commit, error) {
+	commit, err := repo.r.CommitObject(plumbing.NewHash(hash.String()))
 	if err != nil {
-		return nil, err
+		return Commit{}, err
 	}
 
-	commit, err := repo.r.CommitObject(r.Hash())
-	if err != nil {
-		return nil, err
+	parents := make([]Hash, len(commit.ParentHashes))
+	for i, parentHash := range commit.ParentHashes {
+		parents[i] = Hash(parentHash.String())
 	}
-	hashes := []Hash{Hash(commit.Hash.String())}
 
-	for {
-		commit, err = commit.Parent(0)
-		if err == object.ErrParentNotFound {
-			break
+	result := Commit{
+		Hash:     hash,
+		Parents:  parents,
+		TreeHash: Hash(commit.TreeHash.String()),
+	}
+
+	if commit.PGPSignature != "" {
+		// I can't find a way to just remove the signature when reading the encoded commit so we need to
+		// re-encode the commit without signature.
+
+		encoded := &plumbing.MemoryObject{}
+		err := commit.EncodeWithoutSignature(encoded)
+		if err != nil {
+			return Commit{}, err
 		}
+
+		result.SignedData, err = encoded.Reader()
 		if err != nil {
-			return nil, err
+			return Commit{}, err
 		}
 
-		if commit.NumParents() > 1 {
-			return nil, fmt.Errorf("multiple parents")
+		result.Signature, err = deArmorSignature(strings.NewReader(commit.PGPSignature))
+		if err != nil {
+			return Commit{}, err
 		}
+	}
 
-		hashes = append([]Hash{Hash(commit.Hash.String())}, hashes...)
+	return result, nil
+}
+
+func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) {
+	repo.clocksMutex.Lock()
+	defer repo.clocksMutex.Unlock()
+
+	result := make(map[string]lamport.Clock)
+
+	files, err := ioutil.ReadDir(filepath.Join(repo.path, "git-bug", clockPath))
+	if os.IsNotExist(err) {
+		return nil, nil
+	}
+	if err != nil {
+		return nil, err
 	}
 
-	return hashes, nil
+	for _, file := range files {
+		name := file.Name()
+		if c, ok := repo.clocks[name]; ok {
+			result[name] = c
+		} else {
+			c, err := lamport.LoadPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name))
+			if err != nil {
+				return nil, err
+			}
+			repo.clocks[name] = c
+			result[name] = c
+		}
+	}
+
+	return result, nil
 }
 
 // GetOrCreateClock return a Lamport clock stored in the Repo.
@@ -691,7 +800,7 @@ func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) {
 		return nil, err
 	}
 
-	c, err = lamport.NewPersistedClock(repo.localStorage, name+"-clock")
+	c, err = lamport.NewPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name))
 	if err != nil {
 		return nil, err
 	}
@@ -705,7 +814,7 @@ func (repo *GoGitRepo) getClock(name string) (lamport.Clock, error) {
 		return c, nil
 	}
 
-	c, err := lamport.LoadPersistedClock(repo.localStorage, name+"-clock")
+	c, err := lamport.LoadPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name))
 	if err == nil {
 		repo.clocks[name] = c
 		return c, nil
@@ -716,6 +825,24 @@ func (repo *GoGitRepo) getClock(name string) (lamport.Clock, error) {
 	return nil, err
 }
 
+// Increment is equivalent to c = GetOrCreateClock(name) + c.Increment()
+func (repo *GoGitRepo) Increment(name string) (lamport.Time, error) {
+	c, err := repo.GetOrCreateClock(name)
+	if err != nil {
+		return lamport.Time(0), err
+	}
+	return c.Increment()
+}
+
+// Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time)
+func (repo *GoGitRepo) Witness(name string, time lamport.Time) error {
+	c, err := repo.GetOrCreateClock(name)
+	if err != nil {
+		return err
+	}
+	return c.Witness(time)
+}
+
 // AddRemote add a new remote to the repository
 // Not in the interface because it's only used for testing
 func (repo *GoGitRepo) AddRemote(name string, url string) error {

repository/gogit_config.go 🔗

@@ -134,7 +134,7 @@ func (cr *goGitConfigReader) ReadString(key string) (string, error) {
 		}
 		return section.Option(optionName), nil
 	default:
-		subsectionName := strings.Join(split[1:len(split)-2], ".")
+		subsectionName := strings.Join(split[1:len(split)-1], ".")
 		optionName := split[len(split)-1]
 		if !section.HasSubsection(subsectionName) {
 			return "", ErrNoConfigEntry

repository/gogit_testing.go 🔗

@@ -3,6 +3,8 @@ package repository
 import (
 	"io/ioutil"
 	"log"
+
+	"github.com/99designs/keyring"
 )
 
 // This is intended for testing only
@@ -34,7 +36,11 @@ func CreateGoGitTestRepo(bare bool) TestedRepo {
 		log.Fatal("failed to set user.email for test repository: ", err)
 	}
 
-	return repo
+	// make sure we use a mock keyring for testing to not interact with the global system
+	return &replaceKeyring{
+		TestedRepo: repo,
+		keyring:    keyring.NewArrayKeyring(nil),
+	}
 }
 
 func SetupGoGitReposAndRemote() (repoA, repoB, remote TestedRepo) {

repository/keyring.go 🔗

@@ -15,7 +15,7 @@ var ErrKeyringKeyNotFound = keyring.ErrKeyNotFound
 type Keyring interface {
 	// Returns an Item matching the key or ErrKeyringKeyNotFound
 	Get(key string) (Item, error)
-	// Stores an Item on the keyring
+	// Stores an Item on the keyring. Set is idempotent.
 	Set(item Item) error
 	// Removes the item with matching key
 	Remove(key string) error
@@ -48,3 +48,13 @@ func defaultKeyring() (Keyring, error) {
 		},
 	})
 }
+
+// replaceKeyring allow to replace the Keyring of the underlying repo
+type replaceKeyring struct {
+	TestedRepo
+	keyring Keyring
+}
+
+func (rk replaceKeyring) Keyring() Keyring {
+	return rk.keyring
+}

repository/mock_repo.go 🔗

@@ -1,6 +1,7 @@
 package repository
 
 import (
+	"bytes"
 	"crypto/sha1"
 	"fmt"
 	"strings"
@@ -10,15 +11,16 @@ import (
 	"github.com/blevesearch/bleve"
 	"github.com/go-git/go-billy/v5"
 	"github.com/go-git/go-billy/v5/memfs"
+	"golang.org/x/crypto/openpgp"
 
 	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
-var _ ClockedRepo = &mockRepoForTest{}
-var _ TestedRepo = &mockRepoForTest{}
+var _ ClockedRepo = &mockRepo{}
+var _ TestedRepo = &mockRepo{}
 
-// mockRepoForTest defines an instance of Repo that can be used for testing.
-type mockRepoForTest struct {
+// mockRepo defines an instance of Repo that can be used for testing.
+type mockRepo struct {
 	*mockRepoConfig
 	*mockRepoKeyring
 	*mockRepoCommon
@@ -26,12 +28,13 @@ type mockRepoForTest struct {
 	*mockRepoBleve
 	*mockRepoData
 	*mockRepoClock
+	*mockRepoTest
 }
 
-func (m *mockRepoForTest) Close() error { return nil }
+func (m *mockRepo) Close() error { return nil }
 
-func NewMockRepoForTest() *mockRepoForTest {
-	return &mockRepoForTest{
+func NewMockRepo() *mockRepo {
+	return &mockRepo{
 		mockRepoConfig:  NewMockRepoConfig(),
 		mockRepoKeyring: NewMockRepoKeyring(),
 		mockRepoCommon:  NewMockRepoCommon(),
@@ -39,6 +42,7 @@ func NewMockRepoForTest() *mockRepoForTest {
 		mockRepoBleve:   newMockRepoBleve(),
 		mockRepoData:    NewMockRepoData(),
 		mockRepoClock:   NewMockRepoClock(),
+		mockRepoTest:    NewMockRepoTest(),
 	}
 }
 
@@ -177,7 +181,8 @@ var _ RepoData = &mockRepoData{}
 
 type commit struct {
 	treeHash Hash
-	parent   Hash
+	parents  []Hash
+	sig      string
 }
 
 type mockRepoData struct {
@@ -196,13 +201,13 @@ func NewMockRepoData() *mockRepoData {
 	}
 }
 
-// PushRefs push git refs to a remote
-func (r *mockRepoData) PushRefs(remote string, refSpec string) (string, error) {
-	return "", nil
+func (r *mockRepoData) FetchRefs(remote string, prefix string) (string, error) {
+	panic("implement me")
 }
 
-func (r *mockRepoData) FetchRefs(remote string, refSpec string) (string, error) {
-	return "", nil
+// PushRefs push git refs to a remote
+func (r *mockRepoData) PushRefs(remote string, prefix string) (string, error) {
+	panic("implement me")
 }
 
 func (r *mockRepoData) StoreData(data []byte) (Hash, error) {
@@ -214,7 +219,6 @@ func (r *mockRepoData) StoreData(data []byte) (Hash, error) {
 
 func (r *mockRepoData) ReadData(hash Hash) ([]byte, error) {
 	data, ok := r.blobs[hash]
-
 	if !ok {
 		return nil, fmt.Errorf("unknown hash")
 	}
@@ -231,48 +235,103 @@ func (r *mockRepoData) StoreTree(entries []TreeEntry) (Hash, error) {
 	return hash, nil
 }
 
-func (r *mockRepoData) StoreCommit(treeHash Hash) (Hash, error) {
-	rawHash := sha1.Sum([]byte(treeHash))
-	hash := Hash(fmt.Sprintf("%x", rawHash))
-	r.commits[hash] = commit{
-		treeHash: treeHash,
+func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) {
+	var data string
+
+	data, ok := r.trees[hash]
+
+	if !ok {
+		// Git will understand a commit hash to reach a tree
+		commit, ok := r.commits[hash]
+
+		if !ok {
+			return nil, fmt.Errorf("unknown hash")
+		}
+
+		data, ok = r.trees[commit.treeHash]
+
+		if !ok {
+			return nil, fmt.Errorf("unknown hash")
+		}
 	}
-	return hash, nil
+
+	return readTreeEntries(data)
+}
+
+func (r *mockRepoData) StoreCommit(treeHash Hash, parents ...Hash) (Hash, error) {
+	return r.StoreSignedCommit(treeHash, nil, parents...)
 }
 
-func (r *mockRepoData) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) {
-	rawHash := sha1.Sum([]byte(treeHash + parent))
+func (r *mockRepoData) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error) {
+	hasher := sha1.New()
+	hasher.Write([]byte(treeHash))
+	for _, parent := range parents {
+		hasher.Write([]byte(parent))
+	}
+	rawHash := hasher.Sum(nil)
 	hash := Hash(fmt.Sprintf("%x", rawHash))
-	r.commits[hash] = commit{
+	c := commit{
 		treeHash: treeHash,
-		parent:   parent,
+		parents:  parents,
+	}
+	if signKey != nil {
+		// unlike go-git, we only sign the tree hash for simplicity instead of all the fields (parents ...)
+		var sig bytes.Buffer
+		if err := openpgp.DetachSign(&sig, signKey, strings.NewReader(string(treeHash)), nil); err != nil {
+			return "", err
+		}
+		c.sig = sig.String()
 	}
+	r.commits[hash] = c
 	return hash, nil
 }
 
-func (r *mockRepoData) UpdateRef(ref string, hash Hash) error {
-	r.refs[ref] = hash
-	return nil
-}
+func (r *mockRepoData) ReadCommit(hash Hash) (Commit, error) {
+	c, ok := r.commits[hash]
+	if !ok {
+		return Commit{}, fmt.Errorf("unknown commit")
+	}
 
-func (r *mockRepoData) RemoveRef(ref string) error {
-	delete(r.refs, ref)
-	return nil
-}
+	result := Commit{
+		Hash:     hash,
+		Parents:  c.parents,
+		TreeHash: c.treeHash,
+	}
 
-func (r *mockRepoData) RefExist(ref string) (bool, error) {
-	_, exist := r.refs[ref]
-	return exist, nil
+	if c.sig != "" {
+		// Note: this is actually incorrect as the signed data should be the full commit (+comment, +date ...)
+		// but only the tree hash work for our purpose here.
+		result.SignedData = strings.NewReader(string(c.treeHash))
+		result.Signature = strings.NewReader(c.sig)
+	}
+
+	return result, nil
 }
 
-func (r *mockRepoData) CopyRef(source string, dest string) error {
-	hash, exist := r.refs[source]
+func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) {
+	c, ok := r.commits[commit]
+	if !ok {
+		return "", fmt.Errorf("unknown commit")
+	}
 
-	if !exist {
-		return fmt.Errorf("Unknown ref")
+	return c.treeHash, nil
+}
+
+func (r *mockRepoData) ResolveRef(ref string) (Hash, error) {
+	h, ok := r.refs[ref]
+	if !ok {
+		return "", fmt.Errorf("unknown ref")
 	}
+	return h, nil
+}
 
-	r.refs[dest] = hash
+func (r *mockRepoData) UpdateRef(ref string, hash Hash) error {
+	r.refs[ref] = hash
+	return nil
+}
+
+func (r *mockRepoData) RemoveRef(ref string) error {
+	delete(r.refs, ref)
 	return nil
 }
 
@@ -288,46 +347,20 @@ func (r *mockRepoData) ListRefs(refPrefix string) ([]string, error) {
 	return keys, nil
 }
 
-func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) {
-	var hashes []Hash
-
-	hash := r.refs[ref]
-
-	for {
-		commit, ok := r.commits[hash]
-
-		if !ok {
-			break
-		}
-
-		hashes = append([]Hash{hash}, hashes...)
-		hash = commit.parent
-	}
-
-	return hashes, nil
+func (r *mockRepoData) RefExist(ref string) (bool, error) {
+	_, exist := r.refs[ref]
+	return exist, nil
 }
 
-func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) {
-	var data string
-
-	data, ok := r.trees[hash]
-
-	if !ok {
-		// Git will understand a commit hash to reach a tree
-		commit, ok := r.commits[hash]
-
-		if !ok {
-			return nil, fmt.Errorf("unknown hash")
-		}
-
-		data, ok = r.trees[commit.treeHash]
+func (r *mockRepoData) CopyRef(source string, dest string) error {
+	hash, exist := r.refs[source]
 
-		if !ok {
-			return nil, fmt.Errorf("unknown hash")
-		}
+	if !exist {
+		return fmt.Errorf("Unknown ref")
 	}
 
-	return readTreeEntries(data)
+	r.refs[dest] = hash
+	return nil
 }
 
 func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) {
@@ -338,8 +371,11 @@ func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error)
 		if !ok {
 			return "", fmt.Errorf("unknown commit %v", hash1)
 		}
-		ancestor1 = append(ancestor1, c.parent)
-		hash1 = c.parent
+		if len(c.parents) == 0 {
+			break
+		}
+		ancestor1 = append(ancestor1, c.parents[0])
+		hash1 = c.parents[0]
 	}
 
 	for {
@@ -354,35 +390,19 @@ func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error)
 			return "", fmt.Errorf("unknown commit %v", hash1)
 		}
 
-		if c.parent == "" {
+		if c.parents[0] == "" {
 			return "", fmt.Errorf("no ancestor found")
 		}
 
-		hash2 = c.parent
-	}
-}
-
-func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) {
-	c, ok := r.commits[commit]
-	if !ok {
-		return "", fmt.Errorf("unknown commit")
+		hash2 = c.parents[0]
 	}
-
-	return c.treeHash, nil
 }
 
-func (r *mockRepoData) AddRemote(name string, url string) error {
-	panic("implement me")
-}
-
-func (m mockRepoForTest) GetLocalRemote() string {
-	panic("implement me")
+func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) {
+	return nonNativeListCommits(r, ref)
 }
 
-func (m mockRepoForTest) EraseFromDisk() error {
-	// nothing to do
-	return nil
-}
+var _ RepoClock = &mockRepoClock{}
 
 type mockRepoClock struct {
 	mu     sync.Mutex
@@ -395,6 +415,10 @@ func NewMockRepoClock() *mockRepoClock {
 	}
 }
 
+func (r *mockRepoClock) AllClocks() (map[string]lamport.Clock, error) {
+	return r.clocks, nil
+}
+
 func (r *mockRepoClock) GetOrCreateClock(name string) (lamport.Clock, error) {
 	r.mu.Lock()
 	defer r.mu.Unlock()
@@ -407,3 +431,40 @@ func (r *mockRepoClock) GetOrCreateClock(name string) (lamport.Clock, error) {
 	r.clocks[name] = c
 	return c, nil
 }
+
+func (r *mockRepoClock) Increment(name string) (lamport.Time, error) {
+	c, err := r.GetOrCreateClock(name)
+	if err != nil {
+		return lamport.Time(0), err
+	}
+	return c.Increment()
+}
+
+func (r *mockRepoClock) Witness(name string, time lamport.Time) error {
+	c, err := r.GetOrCreateClock(name)
+	if err != nil {
+		return err
+	}
+	return c.Witness(time)
+}
+
+var _ repoTest = &mockRepoTest{}
+
+type mockRepoTest struct{}
+
+func NewMockRepoTest() *mockRepoTest {
+	return &mockRepoTest{}
+}
+
+func (r *mockRepoTest) AddRemote(name string, url string) error {
+	panic("implement me")
+}
+
+func (r mockRepoTest) GetLocalRemote() string {
+	panic("implement me")
+}
+
+func (r mockRepoTest) EraseFromDisk() error {
+	// nothing to do
+	return nil
+}

repository/mock_repo_test.go 🔗

@@ -1,9 +1,11 @@
 package repository
 
-import "testing"
+import (
+	"testing"
+)
 
 func TestMockRepo(t *testing.T) {
-	creator := func(bare bool) TestedRepo { return NewMockRepoForTest() }
+	creator := func(bare bool) TestedRepo { return NewMockRepo() }
 	cleaner := func(repos ...Repo) {}
 
 	RepoTest(t, creator, cleaner)

repository/repo.go 🔗

@@ -3,15 +3,17 @@ package repository
 
 import (
 	"errors"
+	"io"
 
 	"github.com/blevesearch/bleve"
 	"github.com/go-git/go-billy/v5"
+	"golang.org/x/crypto/openpgp"
 
 	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
 var (
-	// ErrNotARepo is the error returned when the git repo root wan't be found
+	// ErrNotARepo is the error returned when the git repo root can't be found
 	ErrNotARepo = errors.New("not a git repository")
 	// ErrClockNotExist is the error returned when a clock can't be found
 	ErrClockNotExist = errors.New("clock doesn't exist")
@@ -22,9 +24,9 @@ type Repo interface {
 	RepoConfig
 	RepoKeyring
 	RepoCommon
-	RepoData
 	RepoStorage
 	RepoBleve
+	RepoData
 
 	Close() error
 }
@@ -88,13 +90,28 @@ type RepoBleve interface {
 	ClearBleveIndex(name string) error
 }
 
+type Commit struct {
+	Hash       Hash
+	Parents    []Hash    // hashes of the parents, if any
+	TreeHash   Hash      // hash of the git Tree
+	SignedData io.Reader // if signed, reader for the signed data (likely, the serialized commit)
+	Signature  io.Reader // if signed, reader for the (non-armored) signature
+}
+
 // RepoData give access to the git data storage
 type RepoData interface {
-	// FetchRefs fetch git refs from a remote
-	FetchRefs(remote string, refSpec string) (string, error)
-
-	// PushRefs push git refs to a remote
-	PushRefs(remote string, refSpec string) (string, error)
+	// FetchRefs fetch git refs matching a directory prefix to a remote
+	// Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally.
+	// The equivalent git refspec would be "refs/foo/*:refs/remotes/<remote>/foo/*"
+	FetchRefs(remote string, prefix string) (string, error)
+
+	// PushRefs push git refs matching a directory prefix to a remote
+	// Ex: prefix="foo" will push any local refs matching "refs/foo/*" to the remote.
+	// The equivalent git refspec would be "refs/foo/*:refs/foo/*"
+	//
+	// Additionally, PushRefs will update the local references in refs/remotes/<remote>/foo to match
+	// the remote state.
+	PushRefs(remote string, prefix string) (string, error)
 
 	// StoreData will store arbitrary data and return the corresponding hash
 	StoreData(data []byte) (Hash, error)
@@ -110,21 +127,27 @@ type RepoData interface {
 	ReadTree(hash Hash) ([]TreeEntry, error)
 
 	// StoreCommit will store a Git commit with the given Git tree
-	StoreCommit(treeHash Hash) (Hash, error)
+	StoreCommit(treeHash Hash, parents ...Hash) (Hash, error)
 
-	// StoreCommit will store a Git commit with the given Git tree
-	StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error)
+	// StoreCommit will store a Git commit with the given Git tree. If signKey is not nil, the commit
+	// will be signed accordingly.
+	StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error)
+
+	// ReadCommit read a Git commit and returns some of its characteristic
+	ReadCommit(hash Hash) (Commit, error)
 
 	// GetTreeHash return the git tree hash referenced in a commit
+	// Deprecated
 	GetTreeHash(commit Hash) (Hash, error)
 
-	// FindCommonAncestor will return the last common ancestor of two chain of commit
-	FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error)
+	// ResolveRef returns the hash of the target commit of the given ref
+	ResolveRef(ref string) (Hash, error)
 
 	// UpdateRef will create or update a Git reference
 	UpdateRef(ref string, hash Hash) error
 
 	// RemoveRef will remove a Git reference
+	// RemoveRef is idempotent.
 	RemoveRef(ref string) error
 
 	// ListRefs will return a list of Git ref matching the given refspec
@@ -136,15 +159,28 @@ type RepoData interface {
 	// CopyRef will create a new reference with the same value as another one
 	CopyRef(source string, dest string) error
 
+	// FindCommonAncestor will return the last common ancestor of two chain of commit
+	// Deprecated
+	FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error)
+
 	// ListCommits will return the list of tree hashes of a ref, in chronological order
 	ListCommits(ref string) ([]Hash, error)
 }
 
 // RepoClock give access to Lamport clocks
 type RepoClock interface {
+	// AllClocks return all the known clocks
+	AllClocks() (map[string]lamport.Clock, error)
+
 	// GetOrCreateClock return a Lamport clock stored in the Repo.
 	// If the clock doesn't exist, it's created.
 	GetOrCreateClock(name string) (lamport.Clock, error)
+
+	// Increment is equivalent to c = GetOrCreateClock(name) + c.Increment()
+	Increment(name string) (lamport.Time, error)
+
+	// Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time)
+	Witness(name string, time lamport.Time) error
 }
 
 // ClockLoader hold which logical clock need to exist for an entity and

repository/repo_testing.go 🔗

@@ -6,10 +6,14 @@ import (
 	"testing"
 
 	"github.com/stretchr/testify/require"
+	"golang.org/x/crypto/openpgp"
 
 	"github.com/MichaelMure/git-bug/util/lamport"
 )
 
+// TODO: add tests for RepoBleve
+// TODO: add tests for RepoStorage
+
 func CleanupTestRepos(repos ...Repo) {
 	var firstErr error
 	for _, repo := range repos {
@@ -44,6 +48,7 @@ func RepoTest(t *testing.T, creator RepoCreator, cleaner RepoCleaner) {
 
 			t.Run("Data", func(t *testing.T) {
 				RepoDataTest(t, repo)
+				RepoDataSignatureTest(t, repo)
 			})
 
 			t.Run("Config", func(t *testing.T) {
@@ -135,7 +140,8 @@ func RepoDataTest(t *testing.T, repo RepoData) {
 	require.NoError(t, err)
 	require.Equal(t, treeHash1, treeHash1Read)
 
-	commit2, err := repo.StoreCommitWithParent(treeHash2, commit1)
+	// commit with a parent
+	commit2, err := repo.StoreCommit(treeHash2, commit1)
 	require.NoError(t, err)
 	require.True(t, commit2.IsValid())
 
@@ -148,6 +154,11 @@ func RepoDataTest(t *testing.T, repo RepoData) {
 	require.NoError(t, err)
 	require.Equal(t, tree1read, tree1)
 
+	c2, err := repo.ReadCommit(commit2)
+	require.NoError(t, err)
+	c2expected := Commit{Hash: commit2, Parents: []Hash{commit1}, TreeHash: treeHash2}
+	require.Equal(t, c2expected, c2)
+
 	// Ref
 
 	exist1, err := repo.RefExist("refs/bugs/ref1")
@@ -161,6 +172,10 @@ func RepoDataTest(t *testing.T, repo RepoData) {
 	require.NoError(t, err)
 	require.True(t, exist1)
 
+	h, err := repo.ResolveRef("refs/bugs/ref1")
+	require.NoError(t, err)
+	require.Equal(t, commit2, h)
+
 	ls, err := repo.ListRefs("refs/bugs")
 	require.NoError(t, err)
 	require.ElementsMatch(t, []string{"refs/bugs/ref1"}, ls)
@@ -178,7 +193,7 @@ func RepoDataTest(t *testing.T, repo RepoData) {
 
 	// Graph
 
-	commit3, err := repo.StoreCommitWithParent(treeHash1, commit1)
+	commit3, err := repo.StoreCommit(treeHash1, commit1)
 	require.NoError(t, err)
 
 	ancestorHash, err := repo.FindCommonAncestor(commit2, commit3)
@@ -187,17 +202,73 @@ func RepoDataTest(t *testing.T, repo RepoData) {
 
 	err = repo.RemoveRef("refs/bugs/ref1")
 	require.NoError(t, err)
+
+	// RemoveRef is idempotent
+	err = repo.RemoveRef("refs/bugs/ref1")
+	require.NoError(t, err)
+}
+
+func RepoDataSignatureTest(t *testing.T, repo RepoData) {
+	data := randomData()
+
+	blobHash, err := repo.StoreData(data)
+	require.NoError(t, err)
+
+	treeHash, err := repo.StoreTree([]TreeEntry{
+		{
+			ObjectType: Blob,
+			Hash:       blobHash,
+			Name:       "blob",
+		},
+	})
+	require.NoError(t, err)
+
+	pgpEntity1, err := openpgp.NewEntity("", "", "", nil)
+	require.NoError(t, err)
+	keyring1 := openpgp.EntityList{pgpEntity1}
+
+	pgpEntity2, err := openpgp.NewEntity("", "", "", nil)
+	require.NoError(t, err)
+	keyring2 := openpgp.EntityList{pgpEntity2}
+
+	commitHash1, err := repo.StoreSignedCommit(treeHash, pgpEntity1)
+	require.NoError(t, err)
+
+	commit1, err := repo.ReadCommit(commitHash1)
+	require.NoError(t, err)
+
+	_, err = openpgp.CheckDetachedSignature(keyring1, commit1.SignedData, commit1.Signature)
+	require.NoError(t, err)
+
+	_, err = openpgp.CheckDetachedSignature(keyring2, commit1.SignedData, commit1.Signature)
+	require.Error(t, err)
+
+	commitHash2, err := repo.StoreSignedCommit(treeHash, pgpEntity1, commitHash1)
+	require.NoError(t, err)
+
+	commit2, err := repo.ReadCommit(commitHash2)
+	require.NoError(t, err)
+
+	_, err = openpgp.CheckDetachedSignature(keyring1, commit2.SignedData, commit2.Signature)
+	require.NoError(t, err)
+
+	_, err = openpgp.CheckDetachedSignature(keyring2, commit2.SignedData, commit2.Signature)
+	require.Error(t, err)
 }
 
 // helper to test a RepoClock
 func RepoClockTest(t *testing.T, repo RepoClock) {
+	allClocks, err := repo.AllClocks()
+	require.NoError(t, err)
+	require.Len(t, allClocks, 0)
+
 	clock, err := repo.GetOrCreateClock("foo")
 	require.NoError(t, err)
 	require.Equal(t, lamport.Time(1), clock.Time())
 
 	time, err := clock.Increment()
 	require.NoError(t, err)
-	require.Equal(t, lamport.Time(1), time)
+	require.Equal(t, lamport.Time(2), time)
 	require.Equal(t, lamport.Time(2), clock.Time())
 
 	clock2, err := repo.GetOrCreateClock("foo")
@@ -207,6 +278,13 @@ func RepoClockTest(t *testing.T, repo RepoClock) {
 	clock3, err := repo.GetOrCreateClock("bar")
 	require.NoError(t, err)
 	require.Equal(t, lamport.Time(1), clock3.Time())
+
+	allClocks, err = repo.AllClocks()
+	require.NoError(t, err)
+	require.Equal(t, map[string]lamport.Clock{
+		"foo": clock,
+		"bar": clock3,
+	}, allClocks)
 }
 
 func randomData() []byte {

repository/tree_entry.go 🔗

@@ -100,3 +100,13 @@ func readTreeEntries(s string) ([]TreeEntry, error) {
 
 	return casted, nil
 }
+
+// SearchTreeEntry search a TreeEntry by name from an array
+func SearchTreeEntry(entries []TreeEntry, name string) (TreeEntry, bool) {
+	for _, entry := range entries {
+		if entry.Name == name {
+			return entry, true
+		}
+	}
+	return TreeEntry{}, false
+}

tests/read_bugs_test.go 🔗

@@ -14,7 +14,7 @@ func TestReadBugs(t *testing.T) {
 
 	random_bugs.FillRepoWithSeed(repo, 15, 42)
 
-	bugs := bug.ReadAllLocal(repo)
+	bugs := bug.ReadAll(repo)
 	for b := range bugs {
 		if b.Err != nil {
 			t.Fatal(b.Err)
@@ -30,7 +30,7 @@ func benchmarkReadBugs(bugNumber int, t *testing.B) {
 	t.ResetTimer()
 
 	for n := 0; n < t.N; n++ {
-		bugs := bug.ReadAllLocal(repo)
+		bugs := bug.ReadAll(repo)
 		for b := range bugs {
 			if b.Err != nil {
 				t.Fatal(b.Err)

util/lamport/clock_testing.go 🔗

@@ -11,14 +11,14 @@ func testClock(t *testing.T, c Clock) {
 
 	val, err := c.Increment()
 	assert.NoError(t, err)
-	assert.Equal(t, Time(1), val)
+	assert.Equal(t, Time(2), val)
 	assert.Equal(t, Time(2), c.Time())
 
-	err = c.Witness(41)
+	err = c.Witness(42)
 	assert.NoError(t, err)
 	assert.Equal(t, Time(42), c.Time())
 
-	err = c.Witness(41)
+	err = c.Witness(42)
 	assert.NoError(t, err)
 	assert.Equal(t, Time(42), c.Time())
 

util/lamport/mem_clock.go 🔗

@@ -25,6 +25,14 @@
 
 */
 
+// Note: this code originally originate from Hashicorp's Serf but has been changed since to fit git-bug's need.
+
+// Note: this Lamport clock implementation is different than the algorithms you can find, notably Wikipedia or the
+//       original Serf implementation. The reason is lie to what constitute an event in this distributed system.
+//       Commonly, events happen when messages are sent or received, whereas in git-bug events happen when some data is
+//       written, but *not* when read. This is why Witness set the time to the max seen value instead of max seen value +1.
+//       See https://cs.stackexchange.com/a/133730/129795
+
 package lamport
 
 import (
@@ -62,7 +70,7 @@ func (mc *MemClock) Time() Time {
 
 // Increment is used to return the value of the lamport clock and increment it afterwards
 func (mc *MemClock) Increment() (Time, error) {
-	return Time(atomic.AddUint64(&mc.counter, 1) - 1), nil
+	return Time(atomic.AddUint64(&mc.counter, 1)), nil
 }
 
 // Witness is called to update our local clock if necessary after
@@ -72,12 +80,12 @@ WITNESS:
 	// If the other value is old, we do not need to do anything
 	cur := atomic.LoadUint64(&mc.counter)
 	other := uint64(v)
-	if other < cur {
+	if other <= cur {
 		return nil
 	}
 
 	// Ensure that our local clock is at least one ahead.
-	if !atomic.CompareAndSwapUint64(&mc.counter, cur, other+1) {
+	if !atomic.CompareAndSwapUint64(&mc.counter, cur, other) {
 		// CAS: CompareAndSwap
 		// The CAS failed, so we just retry. Eventually our CAS should
 		// succeed or a future witness will pass us by and our witness

webui/.eslintrc.js 🔗

@@ -38,4 +38,5 @@ module.exports = {
   settings: {
     'import/internal-regex': '^src/',
   },
+  ignorePatterns: ['**/*.generated.tsx'],
 };

webui/package-lock.json 🔗

@@ -12572,8 +12572,7 @@
     "growly": {
       "version": "1.3.0",
       "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz",
-      "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=",
-      "optional": true
+      "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE="
     },
     "gzip-size": {
       "version": "5.1.1",
@@ -16116,10 +16115,9 @@
       "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA="
     },
     "node-notifier": {
-      "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-8.0.0.tgz",
-      "integrity": "sha512-46z7DUmcjoYdaWyXouuFNNfUo6eFa94t23c53c+lG/9Cvauk4a98rAUp9672X5dxGdQmLpPzTxzu8f/OeEPaFA==",
-      "optional": true,
+      "version": "8.0.1",
+      "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-8.0.1.tgz",
+      "integrity": "sha512-BvEXF+UmsnAfYfoapKM9nGxnP+Wn7P91YfXmrKnfcYCx6VBeoN5Ez5Ogck6I8Bi5k4RlpqRYaw75pAwzX9OphA==",
       "requires": {
         "growly": "^1.3.0",
         "is-wsl": "^2.2.0",
@@ -16130,22 +16128,22 @@
       },
       "dependencies": {
         "semver": {
-          "version": "7.3.2",
-          "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
-          "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==",
-          "optional": true
+          "version": "7.3.4",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz",
+          "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==",
+          "requires": {
+            "lru-cache": "^6.0.0"
+          }
         },
         "uuid": {
-          "version": "8.3.0",
-          "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.0.tgz",
-          "integrity": "sha512-fX6Z5o4m6XsXBdli9g7DtWgAx+osMsRRZFKma1mIUsLCz6vRvv+pz5VNbyu9UEDzpMWulZfvpgb/cmDXVulYFQ==",
-          "optional": true
+          "version": "8.3.2",
+          "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+          "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
         },
         "which": {
           "version": "2.0.2",
           "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
           "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
-          "optional": true,
           "requires": {
             "isexe": "^2.0.0"
           }
@@ -20221,8 +20219,7 @@
     "shellwords": {
       "version": "0.1.1",
       "resolved": "https://registry.npmjs.org/shellwords/-/shellwords-0.1.1.tgz",
-      "integrity": "sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==",
-      "optional": true
+      "integrity": "sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww=="
     },
     "side-channel": {
       "version": "1.0.3",

webui/src/App.tsx 🔗

@@ -5,6 +5,7 @@ import Layout from './components/Header';
 import BugPage from './pages/bug';
 import ListPage from './pages/list';
 import NewBugPage from './pages/new/NewBugPage';
+import NotFoundPage from './pages/notfound/NotFoundPage';
 
 export default function App() {
   return (
@@ -13,6 +14,7 @@ export default function App() {
         <Route path="/" exact component={ListPage} />
         <Route path="/new" exact component={NewBugPage} />
         <Route path="/bug/:id" exact component={BugPage} />
+        <Route component={NotFoundPage} />
       </Switch>
     </Layout>
   );

webui/src/components/BackToListButton.tsx 🔗

@@ -0,0 +1,38 @@
+import React from 'react';
+import { Link } from 'react-router-dom';
+
+import Button from '@material-ui/core/Button';
+import { makeStyles } from '@material-ui/core/styles';
+import ArrowBackIcon from '@material-ui/icons/ArrowBack';
+
+const useStyles = makeStyles((theme) => ({
+  backButton: {
+    position: 'sticky',
+    top: '80px',
+    backgroundColor: theme.palette.primary.dark,
+    color: theme.palette.primary.contrastText,
+    '&:hover': {
+      backgroundColor: theme.palette.primary.main,
+      color: theme.palette.primary.contrastText,
+    },
+  },
+}));
+
+function BackToListButton() {
+  const classes = useStyles();
+
+  return (
+    <Button
+      variant="contained"
+      className={classes.backButton}
+      aria-label="back to issue list"
+      component={Link}
+      to="/"
+    >
+      <ArrowBackIcon />
+      Back to List
+    </Button>
+  );
+}
+
+export default BackToListButton;

webui/src/components/BugTitleForm/BugTitleForm.tsx 🔗

@@ -1,12 +1,7 @@
 import React, { useState } from 'react';
+import { Link } from 'react-router-dom';
 
-import {
-  Button,
-  fade,
-  makeStyles,
-  TextField,
-  Typography,
-} from '@material-ui/core';
+import { Button, makeStyles, Typography } from '@material-ui/core';
 
 import { TimelineDocument } from '../../pages/bug/TimelineQuery.generated';
 import IfLoggedIn from '../IfLoggedIn/IfLoggedIn';
@@ -14,6 +9,7 @@ import Author from 'src/components/Author';
 import Date from 'src/components/Date';
 import { BugFragment } from 'src/pages/bug/Bug.generated';
 
+import BugTitleInput from './BugTitleInput';
 import { useSetTitleMutation } from './SetTitle.generated';
 
 /**
@@ -45,26 +41,16 @@ const useStyles = makeStyles((theme) => ({
     marginLeft: theme.spacing(2),
   },
   greenButton: {
-    marginLeft: '8px',
-    backgroundColor: '#2ea44fd9',
-    color: '#fff',
+    marginLeft: theme.spacing(1),
+    backgroundColor: theme.palette.success.main,
+    color: theme.palette.success.contrastText,
     '&:hover': {
-      backgroundColor: '#2ea44f',
+      backgroundColor: theme.palette.success.dark,
+      color: theme.palette.primary.contrastText,
     },
   },
-  titleInput: {
-    borderRadius: theme.shape.borderRadius,
-    borderColor: fade(theme.palette.primary.main, 0.2),
-    borderStyle: 'solid',
-    borderWidth: '1px',
-    backgroundColor: fade(theme.palette.primary.main, 0.05),
-    padding: theme.spacing(0, 0),
-    minWidth: 336,
-    transition: theme.transitions.create([
-      'width',
-      'borderColor',
-      'backgroundColor',
-    ]),
+  saveButton: {
+    marginRight: theme.spacing(1),
   },
 }));
 
@@ -85,7 +71,7 @@ function BugTitleForm({ bug }: Props) {
 
   function isFormValid() {
     if (issueTitleInput) {
-      return issueTitleInput.value.length > 0 ? true : false;
+      return issueTitleInput.value.length > 0;
     } else {
       return false;
     }
@@ -122,11 +108,11 @@ function BugTitleForm({ bug }: Props) {
   function editableBugTitle() {
     return (
       <form className={classes.headerTitle} onSubmit={submitNewTitle}>
-        <TextField
+        <BugTitleInput
           inputRef={(node) => {
             issueTitleInput = node;
           }}
-          className={classes.titleInput}
+          label="Title"
           variant="outlined"
           fullWidth
           margin="dense"
@@ -135,6 +121,7 @@ function BugTitleForm({ bug }: Props) {
         />
         <div className={classes.editButtonContainer}>
           <Button
+            className={classes.saveButton}
             size="small"
             variant="contained"
             type="submit"
@@ -171,9 +158,10 @@ function BugTitleForm({ bug }: Props) {
                 className={classes.greenButton}
                 size="small"
                 variant="contained"
-                href="/new"
+                component={Link}
+                to="/new"
               >
-                New issue
+                New bug
               </Button>
             </div>
           )}

webui/src/components/BugTitleForm/BugTitleInput.tsx 🔗

@@ -0,0 +1,40 @@
+import { createStyles, fade, withStyles, TextField } from '@material-ui/core';
+import { Theme } from '@material-ui/core/styles';
+
+const BugTitleInput = withStyles((theme: Theme) =>
+  createStyles({
+    root: {
+      '& .MuiInputLabel-outlined': {
+        color: theme.palette.text.primary,
+      },
+      '& input:valid + fieldset': {
+        color: theme.palette.text.primary,
+        borderColor: theme.palette.divider,
+        borderWidth: 2,
+      },
+      '& input:valid:hover + fieldset': {
+        color: theme.palette.text.primary,
+        borderColor: fade(theme.palette.divider, 0.3),
+        borderWidth: 2,
+      },
+      '& input:valid:focus + fieldset': {
+        color: theme.palette.text.primary,
+        borderColor: theme.palette.divider,
+      },
+      '& input:invalid + fieldset': {
+        borderColor: theme.palette.error.main,
+        borderWidth: 2,
+      },
+      '& input:invalid:hover + fieldset': {
+        borderColor: theme.palette.error.main,
+        borderWidth: 2,
+      },
+      '& input:invalid:focus + fieldset': {
+        borderColor: theme.palette.error.main,
+        borderWidth: 2,
+      },
+    },
+  })
+)(TextField);
+
+export default BugTitleInput;

webui/src/components/CloseBugButton/CloseBugButton.tsx 🔗

@@ -1,12 +1,21 @@
 import React from 'react';
 
 import Button from '@material-ui/core/Button';
+import { makeStyles, Theme } from '@material-ui/core/styles';
+import ErrorOutlineIcon from '@material-ui/icons/ErrorOutline';
 
 import { BugFragment } from 'src/pages/bug/Bug.generated';
 import { TimelineDocument } from 'src/pages/bug/TimelineQuery.generated';
 
 import { useCloseBugMutation } from './CloseBug.generated';
 
+const useStyles = makeStyles((theme: Theme) => ({
+  closeIssueIcon: {
+    color: theme.palette.secondary.dark,
+    paddingTop: '0.1rem',
+  },
+}));
+
 interface Props {
   bug: BugFragment;
   disabled: boolean;
@@ -14,6 +23,7 @@ interface Props {
 
 function CloseBugButton({ bug, disabled }: Props) {
   const [closeBug, { loading, error }] = useCloseBugMutation();
+  const classes = useStyles();
 
   function closeBugAction() {
     closeBug({
@@ -45,8 +55,9 @@ function CloseBugButton({ bug, disabled }: Props) {
         variant="contained"
         onClick={() => closeBugAction()}
         disabled={bug.status === 'CLOSED' || disabled}
+        startIcon={<ErrorOutlineIcon className={classes.closeIssueIcon} />}
       >
-        Close issue
+        Close bug
       </Button>
     </div>
   );

webui/src/components/CommentInput/CommentInput.tsx 🔗

@@ -51,6 +51,7 @@ const a11yProps = (index: number) => ({
 
 type Props = {
   inputProps?: any;
+  inputText?: string;
   loading: boolean;
   onChange: (comment: string) => void;
 };
@@ -62,8 +63,8 @@ type Props = {
  * @param loading Disable input when component not ready yet
  * @param onChange Callback to return input value changes
  */
-function CommentInput({ inputProps, loading, onChange }: Props) {
-  const [input, setInput] = useState<string>('');
+function CommentInput({ inputProps, inputText, loading, onChange }: Props) {
+  const [input, setInput] = useState<string>(inputText ? inputText : '');
   const [tab, setTab] = useState(0);
   const classes = useStyles();
 

webui/src/components/Content/PreTag.tsx 🔗

@@ -11,7 +11,7 @@ const useStyles = makeStyles({
 
 const PreTag = (props: React.HTMLProps<HTMLPreElement>) => {
   const classes = useStyles();
-  return <pre className={classes.tag} {...props}></pre>;
+  return <pre className={classes.tag} {...props} />;
 };
 
 export default PreTag;

webui/src/components/Header/Header.tsx 🔗

@@ -1,11 +1,15 @@
 import React from 'react';
-import { Link } from 'react-router-dom';
+import { Link, useLocation } from 'react-router-dom';
 
 import AppBar from '@material-ui/core/AppBar';
+import Tab, { TabProps } from '@material-ui/core/Tab';
+import Tabs from '@material-ui/core/Tabs';
 import Toolbar from '@material-ui/core/Toolbar';
+import Tooltip from '@material-ui/core/Tooltip/Tooltip';
 import { makeStyles } from '@material-ui/core/styles';
 
 import CurrentIdentity from '../CurrentIdentity/CurrentIdentity';
+import { LightSwitch } from '../Themer';
 
 const useStyles = makeStyles((theme) => ({
   offset: {
@@ -14,35 +18,99 @@ const useStyles = makeStyles((theme) => ({
   filler: {
     flexGrow: 1,
   },
+  appBar: {
+    backgroundColor: theme.palette.primary.dark,
+    color: theme.palette.primary.contrastText,
+  },
   appTitle: {
     ...theme.typography.h6,
-    color: 'white',
+    color: theme.palette.primary.contrastText,
     textDecoration: 'none',
     display: 'flex',
     alignItems: 'center',
   },
+  lightSwitch: {
+    padding: '0 20px',
+  },
   logo: {
     height: '42px',
     marginRight: theme.spacing(2),
   },
 }));
 
+function a11yProps(index: any) {
+  return {
+    id: `nav-tab-${index}`,
+    'aria-controls': `nav-tabpanel-${index}`,
+  };
+}
+
+const DisabledTabWithTooltip = (props: TabProps) => {
+  /*The span elements around disabled tabs are needed, as the tooltip
+   * won't be triggered by disabled elements.
+   * See: https://material-ui.com/components/tooltips/#disabled-elements
+   * This must be done in a wrapper component, otherwise the TabS component
+   * cannot pass it styles down to the Tab component. Resulting in (console)
+   * warnings. This wrapper acceps the passed down TabProps and pass it around
+   * the span element to the Tab component.
+   */
+  const msg = `This feature doesn't exist yet. Come help us build it.`;
+  return (
+    <Tooltip title={msg}>
+      <span>
+        <Tab disabled {...props} />
+      </span>
+    </Tooltip>
+  );
+};
+
 function Header() {
   const classes = useStyles();
+  const location = useLocation();
+  const [selectedTab, setTab] = React.useState(location.pathname);
+
+  const handleTabClick = (
+    event: React.ChangeEvent<{}>,
+    newTabValue: string
+  ) => {
+    setTab(newTabValue);
+  };
 
   return (
     <>
-      <AppBar position="fixed" color="primary">
+      <AppBar position="fixed" className={classes.appBar}>
         <Toolbar>
           <Link to="/" className={classes.appTitle}>
-            <img src="/logo.svg" className={classes.logo} alt="git-bug" />
+            <img src="/logo.svg" className={classes.logo} alt="git-bug logo" />
             git-bug
           </Link>
-          <div className={classes.filler}></div>
+          <div className={classes.filler} />
+          <div className={classes.lightSwitch}>
+            <LightSwitch />
+          </div>
           <CurrentIdentity />
         </Toolbar>
       </AppBar>
       <div className={classes.offset} />
+      <Tabs
+        centered
+        value={selectedTab}
+        onChange={handleTabClick}
+        aria-label="nav tabs"
+      >
+        <DisabledTabWithTooltip label="Code" value="/code" {...a11yProps(1)} />
+        <Tab label="Bugs" value="/" component={Link} to="/" {...a11yProps(2)} />
+        <DisabledTabWithTooltip
+          label="Pull Requests"
+          value="/pulls"
+          {...a11yProps(3)}
+        />
+        <DisabledTabWithTooltip
+          label="Settings"
+          value="/settings"
+          {...a11yProps(4)}
+        />
+      </Tabs>
     </>
   );
 }

webui/src/components/Themer.tsx 🔗

@@ -0,0 +1,65 @@
+import React, { createContext, useContext, useState } from 'react';
+
+import { fade, ThemeProvider } from '@material-ui/core';
+import IconButton from '@material-ui/core/IconButton/IconButton';
+import Tooltip from '@material-ui/core/Tooltip/Tooltip';
+import { Theme } from '@material-ui/core/styles';
+import { NightsStayRounded, WbSunnyRounded } from '@material-ui/icons';
+import { makeStyles } from '@material-ui/styles';
+
+const ThemeContext = createContext({
+  toggleMode: () => {},
+  mode: '',
+});
+
+const useStyles = makeStyles((theme: Theme) => ({
+  iconButton: {
+    color: fade(theme.palette.primary.contrastText, 0.5),
+  },
+}));
+
+const LightSwitch = () => {
+  const { mode, toggleMode } = useContext(ThemeContext);
+  const nextMode = mode === 'light' ? 'dark' : 'light';
+  const description = `Switch to ${nextMode} theme`;
+  const classes = useStyles();
+
+  return (
+    <Tooltip title={description}>
+      <IconButton
+        onClick={toggleMode}
+        aria-label={description}
+        className={classes.iconButton}
+      >
+        {mode === 'light' ? <WbSunnyRounded /> : <NightsStayRounded />}
+      </IconButton>
+    </Tooltip>
+  );
+};
+
+type Props = {
+  children: React.ReactNode;
+  lightTheme: Theme;
+  darkTheme: Theme;
+};
+const Themer = ({ children, lightTheme, darkTheme }: Props) => {
+  const savedMode = localStorage.getItem('themeMode');
+  const preferedMode = savedMode != null ? savedMode : 'light';
+  const [mode, setMode] = useState(preferedMode);
+
+  const toggleMode = () => {
+    const preferedMode = mode === 'light' ? 'dark' : 'light';
+    localStorage.setItem('themeMode', preferedMode);
+    setMode(preferedMode);
+  };
+
+  const preferedTheme = mode === 'dark' ? darkTheme : lightTheme;
+
+  return (
+    <ThemeContext.Provider value={{ toggleMode: toggleMode, mode: mode }}>
+      <ThemeProvider theme={preferedTheme}>{children}</ThemeProvider>
+    </ThemeContext.Provider>
+  );
+};
+
+export { Themer as default, LightSwitch };

webui/src/index.tsx 🔗

@@ -3,18 +3,17 @@ import React from 'react';
 import ReactDOM from 'react-dom';
 import { BrowserRouter } from 'react-router-dom';
 
-import ThemeProvider from '@material-ui/styles/ThemeProvider';
-
 import App from './App';
 import apolloClient from './apollo';
-import theme from './theme';
+import Themer from './components/Themer';
+import { defaultLightTheme, defaultDarkTheme } from './themes/index';
 
 ReactDOM.render(
   <ApolloProvider client={apolloClient}>
     <BrowserRouter>
-      <ThemeProvider theme={theme}>
+      <Themer lightTheme={defaultLightTheme} darkTheme={defaultDarkTheme}>
         <App />
-      </ThemeProvider>
+      </Themer>
     </BrowserRouter>
   </ApolloProvider>,
   document.getElementById('root')

webui/src/pages/bug/Bug.tsx 🔗

@@ -18,11 +18,17 @@ const useStyles = makeStyles((theme) => ({
     maxWidth: 1000,
     margin: 'auto',
     marginTop: theme.spacing(4),
-    overflow: 'hidden',
   },
   header: {
-    marginLeft: theme.spacing(3) + 40,
     marginRight: theme.spacing(2),
+    marginLeft: theme.spacing(3) + 40,
+  },
+  title: {
+    ...theme.typography.h5,
+  },
+  id: {
+    ...theme.typography.subtitle1,
+    marginLeft: theme.spacing(1),
   },
   container: {
     display: 'flex',
@@ -36,11 +42,11 @@ const useStyles = makeStyles((theme) => ({
     marginRight: theme.spacing(2),
     minWidth: 400,
   },
-  sidebar: {
+  rightSidebar: {
     marginTop: theme.spacing(2),
     flex: '0 0 200px',
   },
-  sidebarTitle: {
+  rightSidebarTitle: {
     fontWeight: 'bold',
   },
   labelList: {
@@ -59,6 +65,7 @@ const useStyles = makeStyles((theme) => ({
     ...theme.typography.body2,
   },
   commentForm: {
+    marginTop: theme.spacing(2),
     marginLeft: 48,
   },
 }));
@@ -75,10 +82,9 @@ function Bug({ bug }: Props) {
       <div className={classes.header}>
         <BugTitleForm bug={bug} />
       </div>
-
       <div className={classes.container}>
         <div className={classes.timeline}>
-          <TimelineQuery id={bug.id} />
+          <TimelineQuery bug={bug} />
           <IfLoggedIn>
             {() => (
               <div className={classes.commentForm}>
@@ -87,8 +93,8 @@ function Bug({ bug }: Props) {
             )}
           </IfLoggedIn>
         </div>
-        <div className={classes.sidebar}>
-          <span className={classes.sidebarTitle}>Labels</span>
+        <div className={classes.rightSidebar}>
+          <span className={classes.rightSidebarTitle}>Labels</span>
           <ul className={classes.labelList}>
             {bug.labels.length === 0 && (
               <span className={classes.noLabel}>None yet</span>

webui/src/pages/bug/BugQuery.tsx 🔗

@@ -3,6 +3,8 @@ import { RouteComponentProps } from 'react-router-dom';
 
 import CircularProgress from '@material-ui/core/CircularProgress';
 
+import NotFoundPage from '../notfound/NotFoundPage';
+
 import Bug from './Bug';
 import { useGetBugQuery } from './BugQuery.generated';
 
@@ -15,8 +17,8 @@ const BugQuery: React.FC<Props> = ({ match }: Props) => {
     variables: { id: match.params.id },
   });
   if (loading) return <CircularProgress />;
+  if (!data?.repository?.bug) return <NotFoundPage />;
   if (error) return <p>Error: {error}</p>;
-  if (!data?.repository?.bug) return <p>404.</p>;
   return <Bug bug={data.repository.bug} />;
 };
 

webui/src/pages/bug/CommentForm.tsx 🔗

@@ -15,7 +15,6 @@ import { TimelineDocument } from './TimelineQuery.generated';
 type StyleProps = { loading: boolean };
 const useStyles = makeStyles<Theme, StyleProps>((theme) => ({
   container: {
-    margin: theme.spacing(2, 0),
     padding: theme.spacing(0, 2, 2, 2),
   },
   textarea: {},
@@ -28,14 +27,16 @@ const useStyles = makeStyles<Theme, StyleProps>((theme) => ({
   },
   actions: {
     display: 'flex',
+    gap: '1em',
     justifyContent: 'flex-end',
   },
   greenButton: {
     marginLeft: '8px',
-    backgroundColor: '#2ea44fd9',
-    color: '#fff',
+    backgroundColor: theme.palette.success.main,
+    color: theme.palette.success.contrastText,
     '&:hover': {
-      backgroundColor: '#2ea44f',
+      backgroundColor: theme.palette.success.dark,
+      color: theme.palette.primary.contrastText,
     },
   },
 }));

webui/src/pages/bug/EditCommentForm.graphql 🔗

@@ -0,0 +1,16 @@
+#import "./MessageCommentFragment.graphql"
+#import "./MessageCreateFragment.graphql"
+
+mutation EditComment($input: EditCommentInput!) {
+  editComment(input: $input) {
+    bug {
+      id
+      timeline {
+        comments: nodes {
+          ...Create
+          ...AddComment
+        }
+      }
+    }
+  }
+}

webui/src/pages/bug/EditCommentForm.tsx 🔗

@@ -0,0 +1,123 @@
+import React, { useState, useRef } from 'react';
+
+import Button from '@material-ui/core/Button';
+import Paper from '@material-ui/core/Paper';
+import { makeStyles, Theme } from '@material-ui/core/styles';
+
+import CommentInput from '../../components/CommentInput/CommentInput';
+
+import { BugFragment } from './Bug.generated';
+import { useEditCommentMutation } from './EditCommentForm.generated';
+import { AddCommentFragment } from './MessageCommentFragment.generated';
+import { CreateFragment } from './MessageCreateFragment.generated';
+
+type StyleProps = { loading: boolean };
+const useStyles = makeStyles<Theme, StyleProps>((theme) => ({
+  container: {
+    padding: theme.spacing(0, 2, 2, 2),
+  },
+  textarea: {},
+  tabContent: {
+    margin: theme.spacing(2, 0),
+  },
+  preview: {
+    borderBottom: `solid 3px ${theme.palette.grey['200']}`,
+    minHeight: '5rem',
+  },
+  actions: {
+    display: 'flex',
+    justifyContent: 'flex-end',
+  },
+  greenButton: {
+    marginLeft: '8px',
+    backgroundColor: theme.palette.success.main,
+    color: theme.palette.success.contrastText,
+    '&:hover': {
+      backgroundColor: theme.palette.success.dark,
+      color: theme.palette.success.contrastText,
+    },
+  },
+}));
+
+type Props = {
+  bug: BugFragment;
+  comment: AddCommentFragment | CreateFragment;
+  onCancel?: () => void;
+  onPostSubmit?: (comments: any) => void;
+};
+
+function EditCommentForm({ bug, comment, onCancel, onPostSubmit }: Props) {
+  const [editComment, { loading }] = useEditCommentMutation();
+  const [message, setMessage] = useState<string>(comment.message);
+  const [inputProp, setInputProp] = useState<any>('');
+  const classes = useStyles({ loading });
+  const form = useRef<HTMLFormElement>(null);
+
+  const submit = () => {
+    editComment({
+      variables: {
+        input: {
+          prefix: bug.id,
+          message: message,
+          target: comment.id,
+        },
+      },
+    }).then((result) => {
+      const comments = result.data?.editComment.bug.timeline.comments as (
+        | AddCommentFragment
+        | CreateFragment
+      )[];
+      // NOTE Searching for the changed comment could be dropped if GraphQL get
+      // filter by id argument for timelineitems
+      const modifiedComment = comments.find((elem) => elem.id === comment.id);
+      if (onPostSubmit) onPostSubmit(modifiedComment);
+    });
+    resetForm();
+  };
+
+  function resetForm() {
+    setInputProp({
+      value: '',
+    });
+  }
+
+  const handleSubmit = (e: React.FormEvent<HTMLFormElement>) => {
+    e.preventDefault();
+    if (message.length > 0) submit();
+  };
+
+  function getCancelButton() {
+    return (
+      <Button onClick={onCancel} variant="contained">
+        Cancel
+      </Button>
+    );
+  }
+
+  return (
+    <Paper className={classes.container}>
+      <form onSubmit={handleSubmit} ref={form}>
+        <CommentInput
+          inputProps={inputProp}
+          loading={loading}
+          onChange={(message: string) => setMessage(message)}
+          inputText={comment.message}
+        />
+        <div className={classes.actions}>
+          {onCancel && getCancelButton()}
+          <Button
+            className={classes.greenButton}
+            variant="contained"
+            color="primary"
+            type="submit"
+            disabled={loading || message.length === 0}
+          >
+            Update Comment
+          </Button>
+        </div>
+      </form>
+    </Paper>
+  );
+}
+
+export default EditCommentForm;

webui/src/pages/bug/Message.tsx 🔗

@@ -1,14 +1,22 @@
-import React from 'react';
+import React, { useState } from 'react';
 
+import IconButton from '@material-ui/core/IconButton';
 import Paper from '@material-ui/core/Paper';
+import Tooltip from '@material-ui/core/Tooltip/Tooltip';
 import { makeStyles } from '@material-ui/core/styles';
+import EditIcon from '@material-ui/icons/Edit';
+import HistoryIcon from '@material-ui/icons/History';
 
 import Author, { Avatar } from 'src/components/Author';
 import Content from 'src/components/Content';
 import Date from 'src/components/Date';
+import IfLoggedIn from 'src/components/IfLoggedIn/IfLoggedIn';
 
+import { BugFragment } from './Bug.generated';
+import EditCommentForm from './EditCommentForm';
 import { AddCommentFragment } from './MessageCommentFragment.generated';
 import { CreateFragment } from './MessageCreateFragment.generated';
+import MessageHistoryDialog from './MessageHistoryDialog';
 
 const useStyles = makeStyles((theme) => ({
   author: {
@@ -27,11 +35,13 @@ const useStyles = makeStyles((theme) => ({
   },
   header: {
     ...theme.typography.body1,
-    color: '#444',
     padding: '0.5rem 1rem',
-    borderBottom: '1px solid #ddd',
+    borderBottom: `1px solid ${theme.palette.divider}`,
     display: 'flex',
-    backgroundColor: '#e2f1ff',
+    borderTopRightRadius: theme.shape.borderRadius,
+    borderTopLeftRadius: theme.shape.borderRadius,
+    backgroundColor: theme.palette.info.main,
+    color: theme.palette.info.contrastText,
   },
   title: {
     flex: 1,
@@ -47,32 +57,135 @@ const useStyles = makeStyles((theme) => ({
   },
   body: {
     ...theme.typography.body2,
-    padding: '0 1rem',
+    padding: '0.5rem',
+  },
+  headerActions: {
+    color: theme.palette.info.contrastText,
+    padding: '0rem',
+    marginLeft: theme.spacing(1),
+    fontSize: '0.75rem',
+    '&:hover': {
+      backgroundColor: 'inherit',
+    },
   },
 }));
 
+type HistBtnProps = {
+  bugId: string;
+  commentId: string;
+};
+function HistoryMenuToggleButton({ bugId, commentId }: HistBtnProps) {
+  const classes = useStyles();
+  const [open, setOpen] = React.useState(false);
+
+  const handleClickOpen = () => {
+    setOpen(true);
+  };
+
+  const handleClose = () => {
+    setOpen(false);
+  };
+
+  return (
+    <div>
+      <IconButton
+        aria-label="more"
+        aria-controls="long-menu"
+        aria-haspopup="true"
+        onClick={handleClickOpen}
+        className={classes.headerActions}
+      >
+        <HistoryIcon />
+      </IconButton>
+      {
+        // Render CustomizedDialogs on open to prevent fetching the history
+        // before opening the history menu.
+        open && (
+          <MessageHistoryDialog
+            bugId={bugId}
+            commentId={commentId}
+            open={open}
+            onClose={handleClose}
+          />
+        )
+      }
+    </div>
+  );
+}
+
 type Props = {
+  bug: BugFragment;
   op: AddCommentFragment | CreateFragment;
 };
-
-function Message({ op }: Props) {
+function Message({ bug, op }: Props) {
   const classes = useStyles();
-  return (
-    <article className={classes.container}>
-      <Avatar author={op.author} className={classes.avatar} />
+  const [editMode, switchToEditMode] = useState(false);
+  const [comment, setComment] = useState(op);
+
+  const editComment = (id: String) => {
+    switchToEditMode(true);
+  };
+
+  function readMessageView() {
+    return (
       <Paper elevation={1} className={classes.bubble}>
         <header className={classes.header}>
           <div className={classes.title}>
-            <Author className={classes.author} author={op.author} />
+            <Author className={classes.author} author={comment.author} />
             <span> commented </span>
-            <Date date={op.createdAt} />
+            <Date date={comment.createdAt} />
           </div>
-          {op.edited && <div className={classes.tag}>Edited</div>}
+          {comment.edited && (
+            <HistoryMenuToggleButton bugId={bug.id} commentId={comment.id} />
+          )}
+          <IfLoggedIn>
+            {() => (
+              <Tooltip title="Edit Message" placement="top" arrow={true}>
+                <IconButton
+                  disableRipple
+                  className={classes.headerActions}
+                  aria-label="edit message"
+                  onClick={() => editComment(comment.id)}
+                >
+                  <EditIcon />
+                </IconButton>
+              </Tooltip>
+            )}
+          </IfLoggedIn>
         </header>
         <section className={classes.body}>
-          <Content markdown={op.message} />
+          <Content markdown={comment.message} />
         </section>
       </Paper>
+    );
+  }
+
+  function editMessageView() {
+    const cancelEdition = () => {
+      switchToEditMode(false);
+    };
+
+    const onPostSubmit = (comment: AddCommentFragment | CreateFragment) => {
+      setComment(comment);
+      switchToEditMode(false);
+    };
+
+    return (
+      <div className={classes.bubble}>
+        <EditCommentForm
+          bug={bug}
+          onCancel={cancelEdition}
+          onPostSubmit={onPostSubmit}
+          comment={comment}
+        />
+      </div>
+    );
+  }
+
+  return (
+    <article className={classes.container}>
+      <Avatar author={comment.author} className={classes.avatar} />
+      {editMode ? editMessageView() : readMessageView()}
     </article>
   );
 }

webui/src/pages/bug/MessageHistory.graphql 🔗

@@ -0,0 +1,15 @@
+#import "./MessageCommentFragment.graphql"
+#import "./MessageCreateFragment.graphql"
+
+query MessageHistory($bugIdPrefix: String!) {
+  repository {
+    bug(prefix: $bugIdPrefix) {
+      timeline {
+        comments: nodes {
+          ...Create
+          ...AddComment
+        }
+      }
+    }
+  }
+}

webui/src/pages/bug/MessageHistoryDialog.tsx 🔗

@@ -0,0 +1,235 @@
+import moment from 'moment';
+import React from 'react';
+import Moment from 'react-moment';
+
+import MuiAccordion from '@material-ui/core/Accordion';
+import MuiAccordionDetails from '@material-ui/core/AccordionDetails';
+import MuiAccordionSummary from '@material-ui/core/AccordionSummary';
+import CircularProgress from '@material-ui/core/CircularProgress';
+import Dialog from '@material-ui/core/Dialog';
+import MuiDialogContent from '@material-ui/core/DialogContent';
+import MuiDialogTitle from '@material-ui/core/DialogTitle';
+import Grid from '@material-ui/core/Grid';
+import IconButton from '@material-ui/core/IconButton';
+import Tooltip from '@material-ui/core/Tooltip/Tooltip';
+import Typography from '@material-ui/core/Typography';
+import {
+  createStyles,
+  Theme,
+  withStyles,
+  WithStyles,
+} from '@material-ui/core/styles';
+import CloseIcon from '@material-ui/icons/Close';
+import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
+
+import { AddCommentFragment } from './MessageCommentFragment.generated';
+import { CreateFragment } from './MessageCreateFragment.generated';
+import { useMessageHistoryQuery } from './MessageHistory.generated';
+
+const styles = (theme: Theme) =>
+  createStyles({
+    root: {
+      margin: 0,
+      padding: theme.spacing(2),
+    },
+    closeButton: {
+      position: 'absolute',
+      right: theme.spacing(1),
+      top: theme.spacing(1),
+    },
+  });
+
+export interface DialogTitleProps extends WithStyles<typeof styles> {
+  id: string;
+  children: React.ReactNode;
+  onClose: () => void;
+}
+
+const DialogTitle = withStyles(styles)((props: DialogTitleProps) => {
+  const { children, classes, onClose, ...other } = props;
+  return (
+    <MuiDialogTitle disableTypography className={classes.root} {...other}>
+      <Typography variant="h6">{children}</Typography>
+      {onClose ? (
+        <IconButton
+          aria-label="close"
+          className={classes.closeButton}
+          onClick={onClose}
+        >
+          <CloseIcon />
+        </IconButton>
+      ) : null}
+    </MuiDialogTitle>
+  );
+});
+
+const DialogContent = withStyles((theme: Theme) => ({
+  root: {
+    padding: theme.spacing(2),
+  },
+}))(MuiDialogContent);
+
+const Accordion = withStyles({
+  root: {
+    border: '1px solid rgba(0, 0, 0, .125)',
+    boxShadow: 'none',
+    '&:not(:last-child)': {
+      borderBottom: 0,
+    },
+    '&:before': {
+      display: 'none',
+    },
+    '&$expanded': {
+      margin: 'auto',
+    },
+  },
+  expanded: {},
+})(MuiAccordion);
+
+const AccordionSummary = withStyles((theme) => ({
+  root: {
+    backgroundColor: theme.palette.primary.light,
+    borderBottomWidth: '1px',
+    borderBottomStyle: 'solid',
+    borderBottomColor: theme.palette.divider,
+    marginBottom: -1,
+    minHeight: 56,
+    '&$expanded': {
+      minHeight: 56,
+    },
+  },
+  content: {
+    '&$expanded': {
+      margin: '12px 0',
+    },
+  },
+  expanded: {},
+}))(MuiAccordionSummary);
+
+const AccordionDetails = withStyles((theme) => ({
+  root: {
+    padding: theme.spacing(2),
+  },
+}))(MuiAccordionDetails);
+
+type Props = {
+  bugId: string;
+  commentId: string;
+  open: boolean;
+  onClose: () => void;
+};
+function MessageHistoryDialog({ bugId, commentId, open, onClose }: Props) {
+  const [expanded, setExpanded] = React.useState<string | false>('panel0');
+
+  const { loading, error, data } = useMessageHistoryQuery({
+    variables: { bugIdPrefix: bugId },
+  });
+  if (loading) {
+    return (
+      <Dialog
+        onClose={onClose}
+        aria-labelledby="customized-dialog-title"
+        open={open}
+        fullWidth
+        maxWidth="sm"
+      >
+        <DialogTitle id="customized-dialog-title" onClose={onClose}>
+          Loading...
+        </DialogTitle>
+        <DialogContent dividers>
+          <Grid container justify="center">
+            <CircularProgress />
+          </Grid>
+        </DialogContent>
+      </Dialog>
+    );
+  }
+  if (error) {
+    return (
+      <Dialog
+        onClose={onClose}
+        aria-labelledby="customized-dialog-title"
+        open={open}
+        fullWidth
+        maxWidth="sm"
+      >
+        <DialogTitle id="customized-dialog-title" onClose={onClose}>
+          Something went wrong...
+        </DialogTitle>
+        <DialogContent dividers>
+          <p>Error: {error}</p>
+        </DialogContent>
+      </Dialog>
+    );
+  }
+
+  const comments = data?.repository?.bug?.timeline.comments as (
+    | AddCommentFragment
+    | CreateFragment
+  )[];
+  // NOTE Searching for the changed comment could be dropped if GraphQL get
+  // filter by id argument for timelineitems
+  const comment = comments.find((elem) => elem.id === commentId);
+  // Sort by most recent edit. Must create a copy of constant history as
+  // reverse() modifies inplace.
+  const history = comment?.history.slice().reverse();
+  const editCount = history?.length === undefined ? 0 : history?.length - 1;
+
+  const handleChange = (panel: string) => (
+    event: React.ChangeEvent<{}>,
+    newExpanded: boolean
+  ) => {
+    setExpanded(newExpanded ? panel : false);
+  };
+
+  const getSummary = (index: number, date: Date) => {
+    const desc =
+      index === editCount ? 'Created ' : `#${editCount - index} • Edited `;
+    const mostRecent = index === 0 ? ' (most recent)' : '';
+    return (
+      <>
+        <Tooltip title={moment(date).format('LLLL')}>
+          <span>
+            {desc}
+            <Moment date={date} format="on ll" />
+            {mostRecent}
+          </span>
+        </Tooltip>
+      </>
+    );
+  };
+
+  return (
+    <Dialog
+      onClose={onClose}
+      aria-labelledby="customized-dialog-title"
+      open={open}
+      fullWidth
+      maxWidth="md"
+    >
+      <DialogTitle id="customized-dialog-title" onClose={onClose}>
+        {`Edited ${editCount} ${editCount > 1 ? 'times' : 'time'}.`}
+      </DialogTitle>
+      <DialogContent dividers>
+        {history?.map((edit, index) => (
+          <Accordion
+            square
+            expanded={expanded === 'panel' + index}
+            onChange={handleChange('panel' + index)}
+          >
+            <AccordionSummary
+              expandIcon={<ExpandMoreIcon />}
+              aria-controls="panel1d-content"
+              id="panel1d-header"
+            >
+              <Typography>{getSummary(index, edit.date)}</Typography>
+            </AccordionSummary>
+            <AccordionDetails>{edit.message}</AccordionDetails>
+          </Accordion>
+        ))}
+      </DialogContent>
+    </Dialog>
+  );
+}
+
+export default MessageHistoryDialog;

webui/src/pages/bug/Timeline.tsx 🔗

@@ -2,6 +2,7 @@ import React from 'react';
 
 import { makeStyles } from '@material-ui/core/styles';
 
+import { BugFragment } from './Bug.generated';
 import LabelChange from './LabelChange';
 import Message from './Message';
 import SetStatus from './SetStatus';
@@ -18,9 +19,10 @@ const useStyles = makeStyles((theme) => ({
 
 type Props = {
   ops: Array<TimelineItemFragment>;
+  bug: BugFragment;
 };
 
-function Timeline({ ops }: Props) {
+function Timeline({ bug, ops }: Props) {
   const classes = useStyles();
 
   return (
@@ -28,9 +30,9 @@ function Timeline({ ops }: Props) {
       {ops.map((op, index) => {
         switch (op.__typename) {
           case 'CreateTimelineItem':
-            return <Message key={index} op={op} />;
+            return <Message key={index} op={op} bug={bug} />;
           case 'AddCommentTimelineItem':
-            return <Message key={index} op={op} />;
+            return <Message key={index} op={op} bug={bug} />;
           case 'LabelChangeTimelineItem':
             return <LabelChange key={index} op={op} />;
           case 'SetTitleTimelineItem':

webui/src/pages/bug/TimelineQuery.tsx 🔗

@@ -2,17 +2,18 @@ import React from 'react';
 
 import CircularProgress from '@material-ui/core/CircularProgress';
 
+import { BugFragment } from './Bug.generated';
 import Timeline from './Timeline';
 import { useTimelineQuery } from './TimelineQuery.generated';
 
 type Props = {
-  id: string;
+  bug: BugFragment;
 };
 
-const TimelineQuery = ({ id }: Props) => {
+const TimelineQuery = ({ bug }: Props) => {
   const { loading, error, data } = useTimelineQuery({
     variables: {
-      id,
+      id: bug.id,
       first: 100,
     },
   });
@@ -25,7 +26,7 @@ const TimelineQuery = ({ id }: Props) => {
     return null;
   }
 
-  return <Timeline ops={nodes} />;
+  return <Timeline ops={nodes} bug={bug} />;
 };
 
 export default TimelineQuery;

webui/src/pages/list/BugRow.tsx 🔗

@@ -6,6 +6,7 @@ import TableRow from '@material-ui/core/TableRow/TableRow';
 import Tooltip from '@material-ui/core/Tooltip/Tooltip';
 import { makeStyles } from '@material-ui/core/styles';
 import CheckCircleOutline from '@material-ui/icons/CheckCircleOutline';
+import CommentOutlinedIcon from '@material-ui/icons/CommentOutlined';
 import ErrorOutline from '@material-ui/icons/ErrorOutline';
 
 import Date from 'src/components/Date';
@@ -74,6 +75,13 @@ const useStyles = makeStyles((theme) => ({
       display: 'inline-block',
     },
   },
+  commentCount: {
+    fontSize: '1rem',
+    marginLeft: theme.spacing(0.5),
+  },
+  commentCountCell: {
+    display: 'inline-flex',
+  },
 }));
 
 type Props = {
@@ -82,6 +90,8 @@ type Props = {
 
 function BugRow({ bug }: Props) {
   const classes = useStyles();
+  // Subtract 1 from totalCount as 1 comment is the bug description
+  const commentCount = bug.comments.totalCount - 1;
   return (
     <TableRow hover>
       <TableCell className={classes.cell}>
@@ -105,6 +115,12 @@ function BugRow({ bug }: Props) {
             &nbsp;by {bug.author.displayName}
           </div>
         </div>
+        {commentCount > 0 && (
+          <span className={classes.commentCountCell}>
+            <CommentOutlinedIcon aria-label="Comment count" />
+            <span className={classes.commentCount}>{commentCount}</span>
+          </span>
+        )}
       </TableCell>
     </TableRow>
   );

webui/src/pages/list/Filter.tsx 🔗

@@ -1,14 +1,33 @@
 import clsx from 'clsx';
 import { LocationDescriptor } from 'history';
-import React, { useState, useRef } from 'react';
+import React, { useRef, useState, useEffect } from 'react';
 import { Link } from 'react-router-dom';
 
 import Menu from '@material-ui/core/Menu';
 import MenuItem from '@material-ui/core/MenuItem';
 import { SvgIconProps } from '@material-ui/core/SvgIcon';
-import { makeStyles } from '@material-ui/core/styles';
+import TextField from '@material-ui/core/TextField';
+import { makeStyles, withStyles } from '@material-ui/core/styles';
 import ArrowDropDown from '@material-ui/icons/ArrowDropDown';
 
+const CustomTextField = withStyles((theme) => ({
+  root: {
+    margin: '0 8px 12px 8px',
+    '& label.Mui-focused': {
+      margin: '0 2px',
+      color: theme.palette.text.secondary,
+    },
+    '& .MuiInput-underline::before': {
+      borderBottomColor: theme.palette.divider,
+    },
+    '& .MuiInput-underline::after': {
+      borderBottomColor: theme.palette.divider,
+    },
+  },
+}))(TextField);
+
+const ITEM_HEIGHT = 48;
+
 export type Query = { [key: string]: string[] };
 
 function parse(query: string): Query {
@@ -65,7 +84,7 @@ function stringify(params: Query): string {
 const useStyles = makeStyles((theme) => ({
   element: {
     ...theme.typography.body2,
-    color: '#444',
+    color: theme.palette.text.secondary,
     padding: theme.spacing(0, 1),
     fontWeight: 400,
     textDecoration: 'none',
@@ -75,7 +94,7 @@ const useStyles = makeStyles((theme) => ({
   },
   itemActive: {
     fontWeight: 600,
-    color: '#333',
+    color: theme.palette.text.primary,
   },
   icon: {
     paddingRight: theme.spacing(0.5),
@@ -90,6 +109,7 @@ type FilterDropdownProps = {
   itemActive: (key: string) => boolean;
   icon?: React.ComponentType<SvgIconProps>;
   to: (key: string) => LocationDescriptor;
+  hasFilter?: boolean;
 } & React.ButtonHTMLAttributes<HTMLButtonElement>;
 
 function FilterDropdown({
@@ -98,12 +118,19 @@ function FilterDropdown({
   itemActive,
   icon: Icon,
   to,
+  hasFilter,
   ...props
 }: FilterDropdownProps) {
   const [open, setOpen] = useState(false);
+  const [filter, setFilter] = useState<string>('');
   const buttonRef = useRef<HTMLButtonElement>(null);
+  const searchRef = useRef<HTMLButtonElement>(null);
   const classes = useStyles({ active: false });
 
+  useEffect(() => {
+    searchRef && searchRef.current && searchRef.current.focus();
+  }, [filter]);
+
   const content = (
     <>
       {Icon && <Icon fontSize="small" classes={{ root: classes.icon }} />}
@@ -124,6 +151,7 @@ function FilterDropdown({
       </button>
       <Menu
         getContentAnchorEl={null}
+        ref={searchRef}
         anchorOrigin={{
           vertical: 'bottom',
           horizontal: 'left',
@@ -135,18 +163,37 @@ function FilterDropdown({
         open={open}
         onClose={() => setOpen(false)}
         anchorEl={buttonRef.current}
+        PaperProps={{
+          style: {
+            maxHeight: ITEM_HEIGHT * 4.5,
+            width: '25ch',
+          },
+        }}
       >
-        {dropdown.map(([key, value]) => (
-          <MenuItem
-            component={Link}
-            to={to(key)}
-            className={itemActive(key) ? classes.itemActive : undefined}
-            onClick={() => setOpen(false)}
-            key={key}
-          >
-            {value}
-          </MenuItem>
-        ))}
+        {hasFilter && (
+          <CustomTextField
+            onChange={(e) => {
+              const { value } = e.target;
+              setFilter(value);
+            }}
+            onKeyDown={(e) => e.stopPropagation()}
+            value={filter}
+            label={`Filter ${children}`}
+          />
+        )}
+        {dropdown
+          .filter((d) => d[1].toLowerCase().includes(filter.toLowerCase()))
+          .map(([key, value]) => (
+            <MenuItem
+              component={Link}
+              to={to(key)}
+              className={itemActive(key) ? classes.itemActive : undefined}
+              onClick={() => setOpen(false)}
+              key={key}
+            >
+              {value}
+            </MenuItem>
+          ))}
       </Menu>
     </>
   );
@@ -158,6 +205,7 @@ export type FilterProps = {
   icon?: React.ComponentType<SvgIconProps>;
   children: React.ReactNode;
 };
+
 function Filter({ active, to, children, icon: Icon }: FilterProps) {
   const classes = useStyles();
 

webui/src/pages/list/FilterToolbar.tsx 🔗

@@ -8,19 +8,21 @@ import CheckCircleOutline from '@material-ui/icons/CheckCircleOutline';
 import ErrorOutline from '@material-ui/icons/ErrorOutline';
 
 import {
+  Filter,
   FilterDropdown,
   FilterProps,
-  Filter,
   parse,
-  stringify,
   Query,
+  stringify,
 } from './Filter';
 import { useBugCountQuery } from './FilterToolbar.generated';
+import { useListIdentitiesQuery } from './ListIdentities.generated';
+import { useListLabelsQuery } from './ListLabels.generated';
 
 const useStyles = makeStyles((theme) => ({
   toolbar: {
-    backgroundColor: theme.palette.grey['100'],
-    borderColor: theme.palette.grey['300'],
+    backgroundColor: theme.palette.primary.light,
+    borderColor: theme.palette.divider,
     borderWidth: '1px 0',
     borderStyle: 'solid',
     margin: theme.spacing(0, -1),
@@ -35,12 +37,13 @@ type CountingFilterProps = {
   query: string; // the query used as a source to count the number of element
   children: React.ReactNode;
 } & FilterProps;
+
 function CountingFilter({ query, children, ...props }: CountingFilterProps) {
   const { data, loading, error } = useBugCountQuery({
     variables: { query },
   });
 
-  var prefix;
+  let prefix;
   if (loading) prefix = '...';
   else if (error || !data?.repository) prefix = '???';
   // TODO: better prefixes & error handling
@@ -57,14 +60,44 @@ type Props = {
   query: string;
   queryLocation: (query: string) => LocationDescriptor;
 };
+
 function FilterToolbar({ query, queryLocation }: Props) {
   const classes = useStyles();
   const params: Query = parse(query);
+  const { data: identitiesData } = useListIdentitiesQuery();
+  const { data: labelsData } = useListLabelsQuery();
+
+  let identities: any = [];
+  let labels: any = [];
+
+  if (
+    identitiesData?.repository &&
+    identitiesData.repository.allIdentities &&
+    identitiesData.repository.allIdentities.nodes
+  ) {
+    identities = identitiesData.repository.allIdentities.nodes.map((node) => [
+      node.name,
+      node.name,
+    ]);
+  }
+
+  if (
+    labelsData?.repository &&
+    labelsData.repository.validLabels &&
+    labelsData.repository.validLabels.nodes
+  ) {
+    labels = labelsData.repository.validLabels.nodes.map((node) => [
+      node.name,
+      node.name,
+    ]);
+  }
 
   const hasKey = (key: string): boolean =>
     params[key] && params[key].length > 0;
   const hasValue = (key: string, value: string): boolean =>
     hasKey(key) && params[key].includes(value);
+  const containsValue = (key: string, value: string): boolean =>
+    hasKey(key) && params[key].indexOf(value) !== -1;
   const loc = pipe(stringify, queryLocation);
   const replaceParam = (key: string, value: string) => (
     params: Query
@@ -78,6 +111,20 @@ function FilterToolbar({ query, queryLocation }: Props) {
     ...params,
     [key]: params[key] && params[key].includes(value) ? [] : [value],
   });
+  const toggleOrAddParam = (key: string, value: string) => (
+    params: Query
+  ): Query => {
+    const values = params[key];
+    return {
+      ...params,
+      [key]:
+        params[key] && params[key].includes(value)
+          ? values.filter((v) => v !== value)
+          : values
+          ? [...values, value]
+          : [value],
+    };
+  };
   const clearParam = (key: string) => (params: Query): Query => ({
     ...params,
     [key]: [],
@@ -115,6 +162,22 @@ function FilterToolbar({ query, queryLocation }: Props) {
       <Filter active={hasKey('author')}>Author</Filter>
       <Filter active={hasKey('label')}>Label</Filter>
       */}
+      <FilterDropdown
+        dropdown={identities}
+        itemActive={(key) => hasValue('author', key)}
+        to={(key) => pipe(toggleOrAddParam('author', key), loc)(params)}
+        hasFilter
+      >
+        Author
+      </FilterDropdown>
+      <FilterDropdown
+        dropdown={labels}
+        itemActive={(key) => containsValue('label', key)}
+        to={(key) => pipe(toggleOrAddParam('label', key), loc)(params)}
+        hasFilter
+      >
+        Labels
+      </FilterDropdown>
       <FilterDropdown
         dropdown={[
           ['id', 'ID'],
@@ -124,7 +187,7 @@ function FilterToolbar({ query, queryLocation }: Props) {
           ['edit-asc', 'Least recently updated'],
         ]}
         itemActive={(key) => hasValue('sort', key)}
-        to={(key) => pipe(replaceParam('sort', key), loc)(params)}
+        to={(key) => pipe(toggleParam('sort', key), loc)(params)}
       >
         Sort
       </FilterDropdown>

webui/src/pages/list/ListQuery.tsx 🔗

@@ -1,19 +1,23 @@
 import { ApolloError } from '@apollo/client';
+import { pipe } from '@arrows/composition';
 import React, { useState, useEffect, useRef } from 'react';
 import { useLocation, useHistory, Link } from 'react-router-dom';
 
-import { Button } from '@material-ui/core';
+import { Button, FormControl, Menu, MenuItem } from '@material-ui/core';
 import IconButton from '@material-ui/core/IconButton';
 import InputBase from '@material-ui/core/InputBase';
 import Paper from '@material-ui/core/Paper';
-import { fade, makeStyles, Theme } from '@material-ui/core/styles';
+import { makeStyles, Theme } from '@material-ui/core/styles';
+import ArrowDropDownIcon from '@material-ui/icons/ArrowDropDown';
 import ErrorOutline from '@material-ui/icons/ErrorOutline';
 import KeyboardArrowLeft from '@material-ui/icons/KeyboardArrowLeft';
 import KeyboardArrowRight from '@material-ui/icons/KeyboardArrowRight';
 import Skeleton from '@material-ui/lab/Skeleton';
 
+import { useCurrentIdentityQuery } from '../../components/CurrentIdentity/CurrentIdentity.generated';
 import IfLoggedIn from 'src/components/IfLoggedIn/IfLoggedIn';
 
+import { parse, Query, stringify } from './Filter';
 import FilterToolbar from './FilterToolbar';
 import List from './List';
 import { useListBugsQuery } from './ListQuery.generated';
@@ -35,33 +39,27 @@ const useStyles = makeStyles<Theme, StylesProps>((theme) => ({
   },
   header: {
     display: 'flex',
-    padding: theme.spacing(2),
-    '& > h1': {
-      ...theme.typography.h6,
-      margin: theme.spacing(0, 2),
-    },
-    alignItems: 'center',
-    justifyContent: 'space-between',
+    padding: theme.spacing(1),
   },
   filterissueLabel: {
     fontSize: '14px',
     fontWeight: 'bold',
     paddingRight: '12px',
   },
-  filterissueContainer: {
+  form: {
     display: 'flex',
-    flexDirection: 'row',
-    alignItems: 'flex-start',
-    justifyContents: 'left',
+    flexGrow: 1,
+    marginRight: theme.spacing(1),
   },
   search: {
     borderRadius: theme.shape.borderRadius,
-    borderColor: fade(theme.palette.primary.main, 0.2),
+    color: theme.palette.text.secondary,
+    borderColor: theme.palette.divider,
     borderStyle: 'solid',
     borderWidth: '1px',
-    backgroundColor: fade(theme.palette.primary.main, 0.05),
+    backgroundColor: theme.palette.primary.light,
     padding: theme.spacing(0, 1),
-    width: ({ searching }) => (searching ? '20rem' : '15rem'),
+    width: '100%',
     transition: theme.transitions.create([
       'width',
       'borderColor',
@@ -69,13 +67,11 @@ const useStyles = makeStyles<Theme, StylesProps>((theme) => ({
     ]),
   },
   searchFocused: {
-    borderColor: fade(theme.palette.primary.main, 0.4),
     backgroundColor: theme.palette.background.paper,
-    width: '20rem!important',
   },
   placeholderRow: {
     padding: theme.spacing(1),
-    borderBottomColor: theme.palette.grey['300'],
+    borderBottomColor: theme.palette.divider,
     borderBottomWidth: '1px',
     borderBottomStyle: 'solid',
     display: 'flex',
@@ -91,7 +87,8 @@ const useStyles = makeStyles<Theme, StylesProps>((theme) => ({
     ...theme.typography.h5,
     padding: theme.spacing(8),
     textAlign: 'center',
-    borderBottomColor: theme.palette.grey['300'],
+    color: theme.palette.text.hint,
+    borderBottomColor: theme.palette.divider,
     borderBottomWidth: '1px',
     borderBottomStyle: 'solid',
     '& > p': {
@@ -99,21 +96,25 @@ const useStyles = makeStyles<Theme, StylesProps>((theme) => ({
     },
   },
   errorBox: {
-    color: theme.palette.error.main,
+    color: theme.palette.error.dark,
     '& > pre': {
       fontSize: '1rem',
       textAlign: 'left',
-      backgroundColor: theme.palette.grey['900'],
-      color: theme.palette.common.white,
+      borderColor: theme.palette.divider,
+      borderWidth: '1px',
+      borderRadius: theme.shape.borderRadius,
+      borderStyle: 'solid',
+      color: theme.palette.text.primary,
       marginTop: theme.spacing(4),
       padding: theme.spacing(2, 3),
     },
   },
   greenButton: {
-    backgroundColor: '#2ea44fd9',
-    color: '#fff',
+    backgroundColor: theme.palette.success.main,
+    color: theme.palette.success.contrastText,
     '&:hover': {
-      backgroundColor: '#2ea44f',
+      backgroundColor: theme.palette.success.dark,
+      color: theme.palette.primary.contrastText,
     },
   },
 }));
@@ -188,6 +189,8 @@ function ListQuery() {
   const query = params.has('q') ? params.get('q') || '' : 'status:open';
 
   const [input, setInput] = useState(query);
+  const [filterMenuIsOpen, setFilterMenuIsOpen] = useState(false);
+  const filterButtonRef = useRef<HTMLButtonElement>(null);
 
   const classes = useStyles({ searching: !!input });
 
@@ -289,37 +292,87 @@ function ListQuery() {
     history.push(queryLocation(input));
   };
 
+  const {
+    loading: ciqLoading,
+    error: ciqError,
+    data: ciqData,
+  } = useCurrentIdentityQuery();
+  if (ciqError || ciqLoading || !ciqData?.repository?.userIdentity) {
+    return null;
+  }
+  const user = ciqData.repository.userIdentity;
+
+  const loc = pipe(stringify, queryLocation);
+  const qparams: Query = parse(query);
+  const replaceParam = (key: string, value: string) => (
+    params: Query
+  ): Query => ({
+    ...params,
+    [key]: [value],
+  });
+
   return (
     <Paper className={classes.main}>
       <header className={classes.header}>
-        <div className="filterissueContainer">
-          <form onSubmit={formSubmit}>
-            <label className={classes.filterissueLabel} htmlFor="issuefilter">
-              Filter
-            </label>
-            <InputBase
-              id="issuefilter"
-              placeholder="Filter"
-              value={input}
-              onInput={(e: any) => setInput(e.target.value)}
-              classes={{
-                root: classes.search,
-                focused: classes.searchFocused,
+        <form className={classes.form} onSubmit={formSubmit}>
+          <FormControl>
+            <Button
+              aria-haspopup="true"
+              ref={filterButtonRef}
+              onClick={(e) => setFilterMenuIsOpen(true)}
+            >
+              Filter <ArrowDropDownIcon />
+            </Button>
+            <Menu
+              open={filterMenuIsOpen}
+              onClose={() => setFilterMenuIsOpen(false)}
+              getContentAnchorEl={null}
+              anchorEl={filterButtonRef.current}
+              anchorOrigin={{
+                vertical: 'bottom',
+                horizontal: 'left',
               }}
-            />
-            <button type="submit" hidden>
-              Search
-            </button>
-          </form>
-        </div>
+              transformOrigin={{
+                vertical: 'top',
+                horizontal: 'left',
+              }}
+            >
+              <MenuItem
+                component={Link}
+                to={pipe(
+                  replaceParam('author', user.displayName),
+                  replaceParam('sort', 'creation'),
+                  loc
+                )(qparams)}
+                onClick={() => setFilterMenuIsOpen(false)}
+              >
+                Your newest issues
+              </MenuItem>
+            </Menu>
+          </FormControl>
+          <InputBase
+            id="issuefilter"
+            placeholder="Filter"
+            value={input}
+            onInput={(e: any) => setInput(e.target.value)}
+            classes={{
+              root: classes.search,
+              focused: classes.searchFocused,
+            }}
+          />
+          <button type="submit" hidden>
+            Search
+          </button>
+        </form>
         <IfLoggedIn>
           {() => (
             <Button
               className={classes.greenButton}
               variant="contained"
-              href="/new"
+              component={Link}
+              to="/new"
             >
-              New issue
+              New bug
             </Button>
           )}
         </IfLoggedIn>

webui/src/pages/new/NewBugPage.tsx 🔗

@@ -1,10 +1,10 @@
 import React, { FormEvent, useState } from 'react';
+import { useHistory } from 'react-router-dom';
 
-import { Button } from '@material-ui/core';
-import Paper from '@material-ui/core/Paper';
-import TextField from '@material-ui/core/TextField/TextField';
-import { fade, makeStyles, Theme } from '@material-ui/core/styles';
+import { Button, Paper } from '@material-ui/core';
+import { makeStyles, Theme } from '@material-ui/core/styles';
 
+import BugTitleInput from '../../components/BugTitleForm/BugTitleInput';
 import CommentInput from '../../components/CommentInput/CommentInput';
 
 import { useNewBugMutation } from './NewBug.generated';
@@ -21,19 +21,6 @@ const useStyles = makeStyles((theme: Theme) => ({
     padding: theme.spacing(2),
     overflow: 'hidden',
   },
-  titleInput: {
-    borderRadius: theme.shape.borderRadius,
-    borderColor: fade(theme.palette.primary.main, 0.2),
-    borderStyle: 'solid',
-    borderWidth: '1px',
-    backgroundColor: fade(theme.palette.primary.main, 0.05),
-    padding: theme.spacing(0, 0),
-    transition: theme.transitions.create([
-      'width',
-      'borderColor',
-      'backgroundColor',
-    ]),
-  },
   form: {
     display: 'flex',
     flexDirection: 'column',
@@ -43,10 +30,11 @@ const useStyles = makeStyles((theme: Theme) => ({
     justifyContent: 'flex-end',
   },
   greenButton: {
-    backgroundColor: '#2ea44fd9',
-    color: '#fff',
+    backgroundColor: theme.palette.success.main,
+    color: theme.palette.success.contrastText,
     '&:hover': {
-      backgroundColor: '#2ea44f',
+      backgroundColor: theme.palette.success.dark,
+      color: theme.palette.primary.contrastText,
     },
   },
 }));
@@ -59,7 +47,9 @@ function NewBugPage() {
   const [issueTitle, setIssueTitle] = useState('');
   const [issueComment, setIssueComment] = useState('');
   const classes = useStyles();
+
   let issueTitleInput: any;
+  let history = useHistory();
 
   function submitNewIssue(e: FormEvent) {
     e.preventDefault();
@@ -71,12 +61,15 @@ function NewBugPage() {
           message: issueComment,
         },
       },
+    }).then(function (data) {
+      const id = data.data?.newBug.bug.humanId;
+      history.push('/bug/' + id);
     });
     issueTitleInput.value = '';
   }
 
   function isFormValid() {
-    return issueTitle.length > 0 && issueComment.length > 0 ? true : false;
+    return issueTitle.length > 0;
   }
 
   if (loading) return <div>Loading...</div>;
@@ -85,12 +78,11 @@ function NewBugPage() {
   return (
     <Paper className={classes.main}>
       <form className={classes.form} onSubmit={submitNewIssue}>
-        <TextField
+        <BugTitleInput
           inputRef={(node) => {
             issueTitleInput = node;
           }}
           label="Title"
-          className={classes.titleInput}
           variant="outlined"
           fullWidth
           margin="dense"
@@ -107,7 +99,7 @@ function NewBugPage() {
             type="submit"
             disabled={isFormValid() ? false : true}
           >
-            Submit new issue
+            Submit new bug
           </Button>
         </div>
       </form>

webui/src/pages/notfound/NotFoundPage.tsx 🔗

@@ -0,0 +1,52 @@
+import React from 'react';
+
+import { makeStyles } from '@material-ui/core/styles';
+
+import BackToListButton from '../../components/BackToListButton';
+
+const useStyles = makeStyles((theme) => ({
+  main: {
+    maxWidth: 1000,
+    margin: 'auto',
+    marginTop: theme.spacing(10),
+  },
+  logo: {
+    height: '350px',
+    display: 'block',
+    marginLeft: 'auto',
+    marginRight: 'auto',
+  },
+  icon: {
+    display: 'block',
+    marginLeft: 'auto',
+    marginRight: 'auto',
+    fontSize: '80px',
+  },
+  backLink: {
+    marginTop: theme.spacing(1),
+    textAlign: 'center',
+  },
+  header: {
+    fontSize: '30px',
+    textAlign: 'center',
+  },
+}));
+
+function NotFoundPage() {
+  const classes = useStyles();
+  return (
+    <main className={classes.main}>
+      <h1 className={classes.header}>404 – Page not found</h1>
+      <img
+        src="/logo-alpha-flat-outline.svg"
+        className={classes.logo}
+        alt="git-bug Logo"
+      />
+      <div className={classes.backLink}>
+        <BackToListButton />
+      </div>
+    </main>
+  );
+}
+
+export default NotFoundPage;

webui/src/theme.ts 🔗

@@ -1,11 +0,0 @@
-import { createMuiTheme } from '@material-ui/core/styles';
-
-const theme = createMuiTheme({
-  palette: {
-    primary: {
-      main: '#263238',
-    },
-  },
-});
-
-export default theme;

webui/src/themes/DefaultDark.ts 🔗

@@ -0,0 +1,26 @@
+import { createMuiTheme } from '@material-ui/core/styles';
+
+const defaultDarkTheme = createMuiTheme({
+  palette: {
+    type: 'dark',
+    primary: {
+      dark: '#263238',
+      main: '#2a393e',
+      light: '#525252',
+    },
+    error: {
+      main: '#f44336',
+      dark: '#ff4949',
+    },
+    info: {
+      main: '#2a393e',
+      contrastText: '#ffffffb3',
+    },
+    success: {
+      main: '#2ea44fd9',
+      contrastText: '#fff',
+    },
+  },
+});
+
+export default defaultDarkTheme;

webui/src/themes/DefaultLight.ts 🔗

@@ -0,0 +1,26 @@
+import { createMuiTheme } from '@material-ui/core/styles';
+
+const defaultLightTheme = createMuiTheme({
+  palette: {
+    type: 'light',
+    primary: {
+      dark: '#263238',
+      main: '#5a6b73',
+      light: '#f5f5f5',
+      contrastText: '#fff',
+    },
+    info: {
+      main: '#e2f1ff',
+      contrastText: '#555',
+    },
+    success: {
+      main: '#2ea44fd9',
+      contrastText: '#fff',
+    },
+    text: {
+      secondary: '#555',
+    },
+  },
+});
+
+export default defaultLightTheme;

webui/src/themes/index.ts 🔗

@@ -0,0 +1,4 @@
+import defaultDarkTheme from './DefaultDark';
+import defaultLightTheme from './DefaultLight';
+
+export { defaultLightTheme, defaultDarkTheme };