Merge pull request #134 from A-Hilaly/gqlgen

Michael Muré created

Upgrade gqlgen version to v0.9.0

Change summary

Gopkg.lock                                                        |   6 
Gopkg.toml                                                        |   2 
graphql/connections/connection_template.go                        |  16 
graphql/connections/gen_comment.go                                |  16 
graphql/connections/gen_identity.go                               |  16 
graphql/connections/gen_lazy_bug.go                               |  16 
graphql/connections/gen_lazy_identity.go                          |  16 
graphql/connections/gen_operation.go                              |  16 
graphql/connections/gen_timeline.go                               |  16 
graphql/graph/gen_graph.go                                        | 277 
graphql/models/gen_models.go                                      |  40 
graphql/resolvers/bug.go                                          |  18 
graphql/resolvers/repo.go                                         |  20 
vendor/github.com/99designs/gqlgen/codegen/complexity.go          |   6 
vendor/github.com/99designs/gqlgen/codegen/config/binder.go       |  27 
vendor/github.com/99designs/gqlgen/codegen/config/config.go       |   8 
vendor/github.com/99designs/gqlgen/codegen/generated!.gotpl       |  33 
vendor/github.com/99designs/gqlgen/codegen/object.gotpl           |  20 
vendor/github.com/99designs/gqlgen/codegen/templates/templates.go |  77 
vendor/github.com/99designs/gqlgen/codegen/type.gotpl             |  17 
vendor/github.com/99designs/gqlgen/graphql/any.go                 |  19 
vendor/github.com/99designs/gqlgen/graphql/context.go             |   4 
vendor/github.com/99designs/gqlgen/graphql/exec.go                |  19 
vendor/github.com/99designs/gqlgen/graphql/introspection/type.go  |   4 
vendor/github.com/99designs/gqlgen/graphql/time.go                |   4 
vendor/github.com/99designs/gqlgen/graphql/upload.go              |  26 
vendor/github.com/99designs/gqlgen/graphql/version.go             |   2 
vendor/github.com/99designs/gqlgen/handler/graphql.go             | 220 
vendor/github.com/99designs/gqlgen/handler/mock.go                |  57 
vendor/github.com/99designs/gqlgen/internal/code/imports.go       |  90 
vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go      |  33 
vendor/github.com/99designs/gqlgen/plugin/modelgen/models.gotpl   |   2 
32 files changed, 798 insertions(+), 345 deletions(-)

Detailed changes

Gopkg.lock 🔗

@@ -2,7 +2,7 @@
 
 
 [[projects]]
-  digest = "1:d28d302ab337b9ea3af38c04429e198ab422c1e026592aebcf7aec66ebbc4645"
+  digest = "1:36a35bcd0d31f5082578d975e0c23e5e14c98be262b3832fc2c30f35ca5fc776"
   name = "github.com/99designs/gqlgen"
   packages = [
     "api",
@@ -22,8 +22,8 @@
     "plugin/servergen",
   ]
   pruneopts = "UT"
-  revision = "010a79b66f08732cb70d133dcab297a8ee895572"
-  version = "v0.8.3"
+  revision = "ea4652d223c441dc77b31882781ce08488763d67"
+  version = "v0.9.0"
 
 [[projects]]
   branch = "master"

Gopkg.toml 🔗

@@ -58,7 +58,7 @@
 
 [[constraint]]
   name = "github.com/99designs/gqlgen"
-  version = "0.8.3"
+  version = "0.9.0"
 
 [[constraint]]
   name = "github.com/MichaelMure/gocui"

graphql/connections/connection_template.go 🔗

@@ -26,17 +26,17 @@ type NameEdgeMaker func(value NodeType, offset int) Edge
 
 // NameConMaker define a function that create a ConnectionType
 type NameConMaker func(
-	edges []EdgeType,
+	edges []*EdgeType,
 	nodes []NodeType,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*ConnectionType, error)
 
 // NameCon will paginate a source according to the input of a relay connection
 func NameCon(source []NodeType, edgeMaker NameEdgeMaker, conMaker NameConMaker, input models.ConnectionInput) (*ConnectionType, error) {
 	var nodes []NodeType
-	var edges []EdgeType
+	var edges []*EdgeType
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -66,18 +66,20 @@ func NameCon(source []NodeType, edgeMaker NameEdgeMaker, conMaker NameConMaker,
 				break
 			}
 
-			edges = append(edges, edge.(EdgeType))
+			e := edge.(EdgeType)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]EdgeType, len(source))
+		edges = make([]*EdgeType, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(EdgeType)
+			e := edge.(EdgeType)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_comment.go 🔗

@@ -17,17 +17,17 @@ type CommentEdgeMaker func(value bug.Comment, offset int) Edge
 
 // CommentConMaker define a function that create a models.CommentConnection
 type CommentConMaker func(
-	edges []models.CommentEdge,
+	edges []*models.CommentEdge,
 	nodes []bug.Comment,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.CommentConnection, error)
 
 // CommentCon will paginate a source according to the input of a relay connection
 func CommentCon(source []bug.Comment, edgeMaker CommentEdgeMaker, conMaker CommentConMaker, input models.ConnectionInput) (*models.CommentConnection, error) {
 	var nodes []bug.Comment
-	var edges []models.CommentEdge
+	var edges []*models.CommentEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -57,18 +57,20 @@ func CommentCon(source []bug.Comment, edgeMaker CommentEdgeMaker, conMaker Comme
 				break
 			}
 
-			edges = append(edges, edge.(models.CommentEdge))
+			e := edge.(models.CommentEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]models.CommentEdge, len(source))
+		edges = make([]*models.CommentEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(models.CommentEdge)
+			e := edge.(models.CommentEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_identity.go 🔗

@@ -17,17 +17,17 @@ type IdentityEdgeMaker func(value identity.Interface, offset int) Edge
 
 // IdentityConMaker define a function that create a models.IdentityConnection
 type IdentityConMaker func(
-	edges []models.IdentityEdge,
+	edges []*models.IdentityEdge,
 	nodes []identity.Interface,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.IdentityConnection, error)
 
 // IdentityCon will paginate a source according to the input of a relay connection
 func IdentityCon(source []identity.Interface, edgeMaker IdentityEdgeMaker, conMaker IdentityConMaker, input models.ConnectionInput) (*models.IdentityConnection, error) {
 	var nodes []identity.Interface
-	var edges []models.IdentityEdge
+	var edges []*models.IdentityEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -57,18 +57,20 @@ func IdentityCon(source []identity.Interface, edgeMaker IdentityEdgeMaker, conMa
 				break
 			}
 
-			edges = append(edges, edge.(models.IdentityEdge))
+			e := edge.(models.IdentityEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]models.IdentityEdge, len(source))
+		edges = make([]*models.IdentityEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(models.IdentityEdge)
+			e := edge.(models.IdentityEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_lazy_bug.go 🔗

@@ -16,17 +16,17 @@ type LazyBugEdgeMaker func(value string, offset int) Edge
 
 // LazyBugConMaker define a function that create a models.BugConnection
 type LazyBugConMaker func(
-	edges []LazyBugEdge,
+	edges []*LazyBugEdge,
 	nodes []string,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.BugConnection, error)
 
 // LazyBugCon will paginate a source according to the input of a relay connection
 func LazyBugCon(source []string, edgeMaker LazyBugEdgeMaker, conMaker LazyBugConMaker, input models.ConnectionInput) (*models.BugConnection, error) {
 	var nodes []string
-	var edges []LazyBugEdge
+	var edges []*LazyBugEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -56,18 +56,20 @@ func LazyBugCon(source []string, edgeMaker LazyBugEdgeMaker, conMaker LazyBugCon
 				break
 			}
 
-			edges = append(edges, edge.(LazyBugEdge))
+			e := edge.(LazyBugEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]LazyBugEdge, len(source))
+		edges = make([]*LazyBugEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(LazyBugEdge)
+			e := edge.(LazyBugEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_lazy_identity.go 🔗

@@ -16,17 +16,17 @@ type LazyIdentityEdgeMaker func(value string, offset int) Edge
 
 // LazyIdentityConMaker define a function that create a models.IdentityConnection
 type LazyIdentityConMaker func(
-	edges []LazyIdentityEdge,
+	edges []*LazyIdentityEdge,
 	nodes []string,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.IdentityConnection, error)
 
 // LazyIdentityCon will paginate a source according to the input of a relay connection
 func LazyIdentityCon(source []string, edgeMaker LazyIdentityEdgeMaker, conMaker LazyIdentityConMaker, input models.ConnectionInput) (*models.IdentityConnection, error) {
 	var nodes []string
-	var edges []LazyIdentityEdge
+	var edges []*LazyIdentityEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -56,18 +56,20 @@ func LazyIdentityCon(source []string, edgeMaker LazyIdentityEdgeMaker, conMaker
 				break
 			}
 
-			edges = append(edges, edge.(LazyIdentityEdge))
+			e := edge.(LazyIdentityEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]LazyIdentityEdge, len(source))
+		edges = make([]*LazyIdentityEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(LazyIdentityEdge)
+			e := edge.(LazyIdentityEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_operation.go 🔗

@@ -17,17 +17,17 @@ type OperationEdgeMaker func(value bug.Operation, offset int) Edge
 
 // OperationConMaker define a function that create a models.OperationConnection
 type OperationConMaker func(
-	edges []models.OperationEdge,
+	edges []*models.OperationEdge,
 	nodes []bug.Operation,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.OperationConnection, error)
 
 // OperationCon will paginate a source according to the input of a relay connection
 func OperationCon(source []bug.Operation, edgeMaker OperationEdgeMaker, conMaker OperationConMaker, input models.ConnectionInput) (*models.OperationConnection, error) {
 	var nodes []bug.Operation
-	var edges []models.OperationEdge
+	var edges []*models.OperationEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -57,18 +57,20 @@ func OperationCon(source []bug.Operation, edgeMaker OperationEdgeMaker, conMaker
 				break
 			}
 
-			edges = append(edges, edge.(models.OperationEdge))
+			e := edge.(models.OperationEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]models.OperationEdge, len(source))
+		edges = make([]*models.OperationEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(models.OperationEdge)
+			e := edge.(models.OperationEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/connections/gen_timeline.go 🔗

@@ -17,17 +17,17 @@ type TimelineItemEdgeMaker func(value bug.TimelineItem, offset int) Edge
 
 // TimelineItemConMaker define a function that create a models.TimelineItemConnection
 type TimelineItemConMaker func(
-	edges []models.TimelineItemEdge,
+	edges []*models.TimelineItemEdge,
 	nodes []bug.TimelineItem,
-	info models.PageInfo,
+	info *models.PageInfo,
 	totalCount int) (*models.TimelineItemConnection, error)
 
 // TimelineItemCon will paginate a source according to the input of a relay connection
 func TimelineItemCon(source []bug.TimelineItem, edgeMaker TimelineItemEdgeMaker, conMaker TimelineItemConMaker, input models.ConnectionInput) (*models.TimelineItemConnection, error) {
 	var nodes []bug.TimelineItem
-	var edges []models.TimelineItemEdge
+	var edges []*models.TimelineItemEdge
 	var cursors []string
-	var pageInfo models.PageInfo
+	var pageInfo = &models.PageInfo{}
 	var totalCount = len(source)
 
 	emptyCon, _ := conMaker(edges, nodes, pageInfo, 0)
@@ -57,18 +57,20 @@ func TimelineItemCon(source []bug.TimelineItem, edgeMaker TimelineItemEdgeMaker,
 				break
 			}
 
-			edges = append(edges, edge.(models.TimelineItemEdge))
+			e := edge.(models.TimelineItemEdge)
+			edges = append(edges, &e)
 			cursors = append(cursors, edge.GetCursor())
 			nodes = append(nodes, value)
 		}
 	} else {
-		edges = make([]models.TimelineItemEdge, len(source))
+		edges = make([]*models.TimelineItemEdge, len(source))
 		cursors = make([]string, len(source))
 		nodes = source
 
 		for i, value := range source {
 			edge := edgeMaker(value, i+offset)
-			edges[i] = edge.(models.TimelineItemEdge)
+			e := edge.(models.TimelineItemEdge)
+			edges[i] = &e
 			cursors[i] = edge.GetCursor()
 		}
 	}

graphql/graph/gen_graph.go 🔗

@@ -9,6 +9,7 @@ import (
 	"fmt"
 	"strconv"
 	"sync"
+	"sync/atomic"
 	"time"
 
 	"github.com/99designs/gqlgen/graphql"
@@ -385,105 +386,105 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 	_ = ec
 	switch typeName + "." + field {
 
-	case "AddCommentOperation.Author":
+	case "AddCommentOperation.author":
 		if e.complexity.AddCommentOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.AddCommentOperation.Author(childComplexity), true
 
-	case "AddCommentOperation.Date":
+	case "AddCommentOperation.date":
 		if e.complexity.AddCommentOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.AddCommentOperation.Date(childComplexity), true
 
-	case "AddCommentOperation.Files":
+	case "AddCommentOperation.files":
 		if e.complexity.AddCommentOperation.Files == nil {
 			break
 		}
 
 		return e.complexity.AddCommentOperation.Files(childComplexity), true
 
-	case "AddCommentOperation.Hash":
+	case "AddCommentOperation.hash":
 		if e.complexity.AddCommentOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.AddCommentOperation.Hash(childComplexity), true
 
-	case "AddCommentOperation.Message":
+	case "AddCommentOperation.message":
 		if e.complexity.AddCommentOperation.Message == nil {
 			break
 		}
 
 		return e.complexity.AddCommentOperation.Message(childComplexity), true
 
-	case "AddCommentTimelineItem.Author":
+	case "AddCommentTimelineItem.author":
 		if e.complexity.AddCommentTimelineItem.Author == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.Author(childComplexity), true
 
-	case "AddCommentTimelineItem.CreatedAt":
+	case "AddCommentTimelineItem.createdAt":
 		if e.complexity.AddCommentTimelineItem.CreatedAt == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.CreatedAt(childComplexity), true
 
-	case "AddCommentTimelineItem.Edited":
+	case "AddCommentTimelineItem.edited":
 		if e.complexity.AddCommentTimelineItem.Edited == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.Edited(childComplexity), true
 
-	case "AddCommentTimelineItem.Files":
+	case "AddCommentTimelineItem.files":
 		if e.complexity.AddCommentTimelineItem.Files == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.Files(childComplexity), true
 
-	case "AddCommentTimelineItem.Hash":
+	case "AddCommentTimelineItem.hash":
 		if e.complexity.AddCommentTimelineItem.Hash == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.Hash(childComplexity), true
 
-	case "AddCommentTimelineItem.History":
+	case "AddCommentTimelineItem.history":
 		if e.complexity.AddCommentTimelineItem.History == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.History(childComplexity), true
 
-	case "AddCommentTimelineItem.LastEdit":
+	case "AddCommentTimelineItem.lastEdit":
 		if e.complexity.AddCommentTimelineItem.LastEdit == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.LastEdit(childComplexity), true
 
-	case "AddCommentTimelineItem.Message":
+	case "AddCommentTimelineItem.message":
 		if e.complexity.AddCommentTimelineItem.Message == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.Message(childComplexity), true
 
-	case "AddCommentTimelineItem.MessageIsEmpty":
+	case "AddCommentTimelineItem.messageIsEmpty":
 		if e.complexity.AddCommentTimelineItem.MessageIsEmpty == nil {
 			break
 		}
 
 		return e.complexity.AddCommentTimelineItem.MessageIsEmpty(childComplexity), true
 
-	case "Bug.Actors":
+	case "Bug.actors":
 		if e.complexity.Bug.Actors == nil {
 			break
 		}
@@ -495,14 +496,14 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Bug.Actors(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Bug.Author":
+	case "Bug.author":
 		if e.complexity.Bug.Author == nil {
 			break
 		}
 
 		return e.complexity.Bug.Author(childComplexity), true
 
-	case "Bug.Comments":
+	case "Bug.comments":
 		if e.complexity.Bug.Comments == nil {
 			break
 		}
@@ -514,42 +515,42 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Bug.Comments(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Bug.CreatedAt":
+	case "Bug.createdAt":
 		if e.complexity.Bug.CreatedAt == nil {
 			break
 		}
 
 		return e.complexity.Bug.CreatedAt(childComplexity), true
 
-	case "Bug.HumanId":
+	case "Bug.humanId":
 		if e.complexity.Bug.HumanId == nil {
 			break
 		}
 
 		return e.complexity.Bug.HumanId(childComplexity), true
 
-	case "Bug.Id":
+	case "Bug.id":
 		if e.complexity.Bug.Id == nil {
 			break
 		}
 
 		return e.complexity.Bug.Id(childComplexity), true
 
-	case "Bug.Labels":
+	case "Bug.labels":
 		if e.complexity.Bug.Labels == nil {
 			break
 		}
 
 		return e.complexity.Bug.Labels(childComplexity), true
 
-	case "Bug.LastEdit":
+	case "Bug.lastEdit":
 		if e.complexity.Bug.LastEdit == nil {
 			break
 		}
 
 		return e.complexity.Bug.LastEdit(childComplexity), true
 
-	case "Bug.Operations":
+	case "Bug.operations":
 		if e.complexity.Bug.Operations == nil {
 			break
 		}
@@ -561,7 +562,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Bug.Operations(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Bug.Participants":
+	case "Bug.participants":
 		if e.complexity.Bug.Participants == nil {
 			break
 		}
@@ -573,14 +574,14 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Bug.Participants(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Bug.Status":
+	case "Bug.status":
 		if e.complexity.Bug.Status == nil {
 			break
 		}
 
 		return e.complexity.Bug.Status(childComplexity), true
 
-	case "Bug.Timeline":
+	case "Bug.timeline":
 		if e.complexity.Bug.Timeline == nil {
 			break
 		}
@@ -592,448 +593,448 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Bug.Timeline(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Bug.Title":
+	case "Bug.title":
 		if e.complexity.Bug.Title == nil {
 			break
 		}
 
 		return e.complexity.Bug.Title(childComplexity), true
 
-	case "BugConnection.Edges":
+	case "BugConnection.edges":
 		if e.complexity.BugConnection.Edges == nil {
 			break
 		}
 
 		return e.complexity.BugConnection.Edges(childComplexity), true
 
-	case "BugConnection.Nodes":
+	case "BugConnection.nodes":
 		if e.complexity.BugConnection.Nodes == nil {
 			break
 		}
 
 		return e.complexity.BugConnection.Nodes(childComplexity), true
 
-	case "BugConnection.PageInfo":
+	case "BugConnection.pageInfo":
 		if e.complexity.BugConnection.PageInfo == nil {
 			break
 		}
 
 		return e.complexity.BugConnection.PageInfo(childComplexity), true
 
-	case "BugConnection.TotalCount":
+	case "BugConnection.totalCount":
 		if e.complexity.BugConnection.TotalCount == nil {
 			break
 		}
 
 		return e.complexity.BugConnection.TotalCount(childComplexity), true
 
-	case "BugEdge.Cursor":
+	case "BugEdge.cursor":
 		if e.complexity.BugEdge.Cursor == nil {
 			break
 		}
 
 		return e.complexity.BugEdge.Cursor(childComplexity), true
 
-	case "BugEdge.Node":
+	case "BugEdge.node":
 		if e.complexity.BugEdge.Node == nil {
 			break
 		}
 
 		return e.complexity.BugEdge.Node(childComplexity), true
 
-	case "Comment.Author":
+	case "Comment.author":
 		if e.complexity.Comment.Author == nil {
 			break
 		}
 
 		return e.complexity.Comment.Author(childComplexity), true
 
-	case "Comment.Files":
+	case "Comment.files":
 		if e.complexity.Comment.Files == nil {
 			break
 		}
 
 		return e.complexity.Comment.Files(childComplexity), true
 
-	case "Comment.Message":
+	case "Comment.message":
 		if e.complexity.Comment.Message == nil {
 			break
 		}
 
 		return e.complexity.Comment.Message(childComplexity), true
 
-	case "CommentConnection.Edges":
+	case "CommentConnection.edges":
 		if e.complexity.CommentConnection.Edges == nil {
 			break
 		}
 
 		return e.complexity.CommentConnection.Edges(childComplexity), true
 
-	case "CommentConnection.Nodes":
+	case "CommentConnection.nodes":
 		if e.complexity.CommentConnection.Nodes == nil {
 			break
 		}
 
 		return e.complexity.CommentConnection.Nodes(childComplexity), true
 
-	case "CommentConnection.PageInfo":
+	case "CommentConnection.pageInfo":
 		if e.complexity.CommentConnection.PageInfo == nil {
 			break
 		}
 
 		return e.complexity.CommentConnection.PageInfo(childComplexity), true
 
-	case "CommentConnection.TotalCount":
+	case "CommentConnection.totalCount":
 		if e.complexity.CommentConnection.TotalCount == nil {
 			break
 		}
 
 		return e.complexity.CommentConnection.TotalCount(childComplexity), true
 
-	case "CommentEdge.Cursor":
+	case "CommentEdge.cursor":
 		if e.complexity.CommentEdge.Cursor == nil {
 			break
 		}
 
 		return e.complexity.CommentEdge.Cursor(childComplexity), true
 
-	case "CommentEdge.Node":
+	case "CommentEdge.node":
 		if e.complexity.CommentEdge.Node == nil {
 			break
 		}
 
 		return e.complexity.CommentEdge.Node(childComplexity), true
 
-	case "CommentHistoryStep.Date":
+	case "CommentHistoryStep.date":
 		if e.complexity.CommentHistoryStep.Date == nil {
 			break
 		}
 
 		return e.complexity.CommentHistoryStep.Date(childComplexity), true
 
-	case "CommentHistoryStep.Message":
+	case "CommentHistoryStep.message":
 		if e.complexity.CommentHistoryStep.Message == nil {
 			break
 		}
 
 		return e.complexity.CommentHistoryStep.Message(childComplexity), true
 
-	case "CreateOperation.Author":
+	case "CreateOperation.author":
 		if e.complexity.CreateOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Author(childComplexity), true
 
-	case "CreateOperation.Date":
+	case "CreateOperation.date":
 		if e.complexity.CreateOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Date(childComplexity), true
 
-	case "CreateOperation.Files":
+	case "CreateOperation.files":
 		if e.complexity.CreateOperation.Files == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Files(childComplexity), true
 
-	case "CreateOperation.Hash":
+	case "CreateOperation.hash":
 		if e.complexity.CreateOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Hash(childComplexity), true
 
-	case "CreateOperation.Message":
+	case "CreateOperation.message":
 		if e.complexity.CreateOperation.Message == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Message(childComplexity), true
 
-	case "CreateOperation.Title":
+	case "CreateOperation.title":
 		if e.complexity.CreateOperation.Title == nil {
 			break
 		}
 
 		return e.complexity.CreateOperation.Title(childComplexity), true
 
-	case "CreateTimelineItem.Author":
+	case "CreateTimelineItem.author":
 		if e.complexity.CreateTimelineItem.Author == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.Author(childComplexity), true
 
-	case "CreateTimelineItem.CreatedAt":
+	case "CreateTimelineItem.createdAt":
 		if e.complexity.CreateTimelineItem.CreatedAt == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.CreatedAt(childComplexity), true
 
-	case "CreateTimelineItem.Edited":
+	case "CreateTimelineItem.edited":
 		if e.complexity.CreateTimelineItem.Edited == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.Edited(childComplexity), true
 
-	case "CreateTimelineItem.Files":
+	case "CreateTimelineItem.files":
 		if e.complexity.CreateTimelineItem.Files == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.Files(childComplexity), true
 
-	case "CreateTimelineItem.Hash":
+	case "CreateTimelineItem.hash":
 		if e.complexity.CreateTimelineItem.Hash == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.Hash(childComplexity), true
 
-	case "CreateTimelineItem.History":
+	case "CreateTimelineItem.history":
 		if e.complexity.CreateTimelineItem.History == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.History(childComplexity), true
 
-	case "CreateTimelineItem.LastEdit":
+	case "CreateTimelineItem.lastEdit":
 		if e.complexity.CreateTimelineItem.LastEdit == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.LastEdit(childComplexity), true
 
-	case "CreateTimelineItem.Message":
+	case "CreateTimelineItem.message":
 		if e.complexity.CreateTimelineItem.Message == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.Message(childComplexity), true
 
-	case "CreateTimelineItem.MessageIsEmpty":
+	case "CreateTimelineItem.messageIsEmpty":
 		if e.complexity.CreateTimelineItem.MessageIsEmpty == nil {
 			break
 		}
 
 		return e.complexity.CreateTimelineItem.MessageIsEmpty(childComplexity), true
 
-	case "EditCommentOperation.Author":
+	case "EditCommentOperation.author":
 		if e.complexity.EditCommentOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Author(childComplexity), true
 
-	case "EditCommentOperation.Date":
+	case "EditCommentOperation.date":
 		if e.complexity.EditCommentOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Date(childComplexity), true
 
-	case "EditCommentOperation.Files":
+	case "EditCommentOperation.files":
 		if e.complexity.EditCommentOperation.Files == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Files(childComplexity), true
 
-	case "EditCommentOperation.Hash":
+	case "EditCommentOperation.hash":
 		if e.complexity.EditCommentOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Hash(childComplexity), true
 
-	case "EditCommentOperation.Message":
+	case "EditCommentOperation.message":
 		if e.complexity.EditCommentOperation.Message == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Message(childComplexity), true
 
-	case "EditCommentOperation.Target":
+	case "EditCommentOperation.target":
 		if e.complexity.EditCommentOperation.Target == nil {
 			break
 		}
 
 		return e.complexity.EditCommentOperation.Target(childComplexity), true
 
-	case "Identity.AvatarURL":
+	case "Identity.avatarUrl":
 		if e.complexity.Identity.AvatarURL == nil {
 			break
 		}
 
 		return e.complexity.Identity.AvatarURL(childComplexity), true
 
-	case "Identity.DisplayName":
+	case "Identity.displayName":
 		if e.complexity.Identity.DisplayName == nil {
 			break
 		}
 
 		return e.complexity.Identity.DisplayName(childComplexity), true
 
-	case "Identity.Email":
+	case "Identity.email":
 		if e.complexity.Identity.Email == nil {
 			break
 		}
 
 		return e.complexity.Identity.Email(childComplexity), true
 
-	case "Identity.HumanID":
+	case "Identity.humanId":
 		if e.complexity.Identity.HumanID == nil {
 			break
 		}
 
 		return e.complexity.Identity.HumanID(childComplexity), true
 
-	case "Identity.ID":
+	case "Identity.id":
 		if e.complexity.Identity.ID == nil {
 			break
 		}
 
 		return e.complexity.Identity.ID(childComplexity), true
 
-	case "Identity.IsProtected":
+	case "Identity.isProtected":
 		if e.complexity.Identity.IsProtected == nil {
 			break
 		}
 
 		return e.complexity.Identity.IsProtected(childComplexity), true
 
-	case "Identity.Login":
+	case "Identity.login":
 		if e.complexity.Identity.Login == nil {
 			break
 		}
 
 		return e.complexity.Identity.Login(childComplexity), true
 
-	case "Identity.Name":
+	case "Identity.name":
 		if e.complexity.Identity.Name == nil {
 			break
 		}
 
 		return e.complexity.Identity.Name(childComplexity), true
 
-	case "IdentityConnection.Edges":
+	case "IdentityConnection.edges":
 		if e.complexity.IdentityConnection.Edges == nil {
 			break
 		}
 
 		return e.complexity.IdentityConnection.Edges(childComplexity), true
 
-	case "IdentityConnection.Nodes":
+	case "IdentityConnection.nodes":
 		if e.complexity.IdentityConnection.Nodes == nil {
 			break
 		}
 
 		return e.complexity.IdentityConnection.Nodes(childComplexity), true
 
-	case "IdentityConnection.PageInfo":
+	case "IdentityConnection.pageInfo":
 		if e.complexity.IdentityConnection.PageInfo == nil {
 			break
 		}
 
 		return e.complexity.IdentityConnection.PageInfo(childComplexity), true
 
-	case "IdentityConnection.TotalCount":
+	case "IdentityConnection.totalCount":
 		if e.complexity.IdentityConnection.TotalCount == nil {
 			break
 		}
 
 		return e.complexity.IdentityConnection.TotalCount(childComplexity), true
 
-	case "IdentityEdge.Cursor":
+	case "IdentityEdge.cursor":
 		if e.complexity.IdentityEdge.Cursor == nil {
 			break
 		}
 
 		return e.complexity.IdentityEdge.Cursor(childComplexity), true
 
-	case "IdentityEdge.Node":
+	case "IdentityEdge.node":
 		if e.complexity.IdentityEdge.Node == nil {
 			break
 		}
 
 		return e.complexity.IdentityEdge.Node(childComplexity), true
 
-	case "LabelChangeOperation.Added":
+	case "LabelChangeOperation.added":
 		if e.complexity.LabelChangeOperation.Added == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeOperation.Added(childComplexity), true
 
-	case "LabelChangeOperation.Author":
+	case "LabelChangeOperation.author":
 		if e.complexity.LabelChangeOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeOperation.Author(childComplexity), true
 
-	case "LabelChangeOperation.Date":
+	case "LabelChangeOperation.date":
 		if e.complexity.LabelChangeOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeOperation.Date(childComplexity), true
 
-	case "LabelChangeOperation.Hash":
+	case "LabelChangeOperation.hash":
 		if e.complexity.LabelChangeOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeOperation.Hash(childComplexity), true
 
-	case "LabelChangeOperation.Removed":
+	case "LabelChangeOperation.removed":
 		if e.complexity.LabelChangeOperation.Removed == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeOperation.Removed(childComplexity), true
 
-	case "LabelChangeTimelineItem.Added":
+	case "LabelChangeTimelineItem.added":
 		if e.complexity.LabelChangeTimelineItem.Added == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeTimelineItem.Added(childComplexity), true
 
-	case "LabelChangeTimelineItem.Author":
+	case "LabelChangeTimelineItem.author":
 		if e.complexity.LabelChangeTimelineItem.Author == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeTimelineItem.Author(childComplexity), true
 
-	case "LabelChangeTimelineItem.Date":
+	case "LabelChangeTimelineItem.date":
 		if e.complexity.LabelChangeTimelineItem.Date == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeTimelineItem.Date(childComplexity), true
 
-	case "LabelChangeTimelineItem.Hash":
+	case "LabelChangeTimelineItem.hash":
 		if e.complexity.LabelChangeTimelineItem.Hash == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeTimelineItem.Hash(childComplexity), true
 
-	case "LabelChangeTimelineItem.Removed":
+	case "LabelChangeTimelineItem.removed":
 		if e.complexity.LabelChangeTimelineItem.Removed == nil {
 			break
 		}
 
 		return e.complexity.LabelChangeTimelineItem.Removed(childComplexity), true
 
-	case "Mutation.AddComment":
+	case "Mutation.addComment":
 		if e.complexity.Mutation.AddComment == nil {
 			break
 		}
@@ -1045,7 +1046,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.AddComment(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash)), true
 
-	case "Mutation.ChangeLabels":
+	case "Mutation.changeLabels":
 		if e.complexity.Mutation.ChangeLabels == nil {
 			break
 		}
@@ -1057,7 +1058,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.ChangeLabels(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string)), true
 
-	case "Mutation.Close":
+	case "Mutation.close":
 		if e.complexity.Mutation.Close == nil {
 			break
 		}
@@ -1069,7 +1070,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.Close(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
 
-	case "Mutation.Commit":
+	case "Mutation.commit":
 		if e.complexity.Mutation.Commit == nil {
 			break
 		}
@@ -1081,7 +1082,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.Commit(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
 
-	case "Mutation.NewBug":
+	case "Mutation.newBug":
 		if e.complexity.Mutation.NewBug == nil {
 			break
 		}
@@ -1093,7 +1094,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.NewBug(childComplexity, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash)), true
 
-	case "Mutation.Open":
+	case "Mutation.open":
 		if e.complexity.Mutation.Open == nil {
 			break
 		}
@@ -1105,7 +1106,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.Open(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
 
-	case "Mutation.SetTitle":
+	case "Mutation.setTitle":
 		if e.complexity.Mutation.SetTitle == nil {
 			break
 		}
@@ -1117,84 +1118,84 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Mutation.SetTitle(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string)), true
 
-	case "OperationConnection.Edges":
+	case "OperationConnection.edges":
 		if e.complexity.OperationConnection.Edges == nil {
 			break
 		}
 
 		return e.complexity.OperationConnection.Edges(childComplexity), true
 
-	case "OperationConnection.Nodes":
+	case "OperationConnection.nodes":
 		if e.complexity.OperationConnection.Nodes == nil {
 			break
 		}
 
 		return e.complexity.OperationConnection.Nodes(childComplexity), true
 
-	case "OperationConnection.PageInfo":
+	case "OperationConnection.pageInfo":
 		if e.complexity.OperationConnection.PageInfo == nil {
 			break
 		}
 
 		return e.complexity.OperationConnection.PageInfo(childComplexity), true
 
-	case "OperationConnection.TotalCount":
+	case "OperationConnection.totalCount":
 		if e.complexity.OperationConnection.TotalCount == nil {
 			break
 		}
 
 		return e.complexity.OperationConnection.TotalCount(childComplexity), true
 
-	case "OperationEdge.Cursor":
+	case "OperationEdge.cursor":
 		if e.complexity.OperationEdge.Cursor == nil {
 			break
 		}
 
 		return e.complexity.OperationEdge.Cursor(childComplexity), true
 
-	case "OperationEdge.Node":
+	case "OperationEdge.node":
 		if e.complexity.OperationEdge.Node == nil {
 			break
 		}
 
 		return e.complexity.OperationEdge.Node(childComplexity), true
 
-	case "PageInfo.EndCursor":
+	case "PageInfo.endCursor":
 		if e.complexity.PageInfo.EndCursor == nil {
 			break
 		}
 
 		return e.complexity.PageInfo.EndCursor(childComplexity), true
 
-	case "PageInfo.HasNextPage":
+	case "PageInfo.hasNextPage":
 		if e.complexity.PageInfo.HasNextPage == nil {
 			break
 		}
 
 		return e.complexity.PageInfo.HasNextPage(childComplexity), true
 
-	case "PageInfo.HasPreviousPage":
+	case "PageInfo.hasPreviousPage":
 		if e.complexity.PageInfo.HasPreviousPage == nil {
 			break
 		}
 
 		return e.complexity.PageInfo.HasPreviousPage(childComplexity), true
 
-	case "PageInfo.StartCursor":
+	case "PageInfo.startCursor":
 		if e.complexity.PageInfo.StartCursor == nil {
 			break
 		}
 
 		return e.complexity.PageInfo.StartCursor(childComplexity), true
 
-	case "Query.DefaultRepository":
+	case "Query.defaultRepository":
 		if e.complexity.Query.DefaultRepository == nil {
 			break
 		}
 
 		return e.complexity.Query.DefaultRepository(childComplexity), true
 
-	case "Query.Repository":
+	case "Query.repository":
 		if e.complexity.Query.Repository == nil {
 			break
 		}
@@ -1206,7 +1207,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Query.Repository(childComplexity, args["id"].(string)), true
 
-	case "Repository.AllBugs":
+	case "Repository.allBugs":
 		if e.complexity.Repository.AllBugs == nil {
 			break
 		}
@@ -1218,7 +1219,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Repository.AllBugs(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int), args["query"].(*string)), true
 
-	case "Repository.AllIdentities":
+	case "Repository.allIdentities":
 		if e.complexity.Repository.AllIdentities == nil {
 			break
 		}
@@ -1230,7 +1231,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Repository.AllIdentities(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
 
-	case "Repository.Bug":
+	case "Repository.bug":
 		if e.complexity.Repository.Bug == nil {
 			break
 		}
@@ -1242,7 +1243,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Repository.Bug(childComplexity, args["prefix"].(string)), true
 
-	case "Repository.Identity":
+	case "Repository.identity":
 		if e.complexity.Repository.Identity == nil {
 			break
 		}
@@ -1254,182 +1255,182 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 
 		return e.complexity.Repository.Identity(childComplexity, args["prefix"].(string)), true
 
-	case "Repository.UserIdentity":
+	case "Repository.userIdentity":
 		if e.complexity.Repository.UserIdentity == nil {
 			break
 		}
 
 		return e.complexity.Repository.UserIdentity(childComplexity), true
 
-	case "Repository.ValidLabels":
+	case "Repository.validLabels":
 		if e.complexity.Repository.ValidLabels == nil {
 			break
 		}
 
 		return e.complexity.Repository.ValidLabels(childComplexity), true
 
-	case "SetStatusOperation.Author":
+	case "SetStatusOperation.author":
 		if e.complexity.SetStatusOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.SetStatusOperation.Author(childComplexity), true
 
-	case "SetStatusOperation.Date":
+	case "SetStatusOperation.date":
 		if e.complexity.SetStatusOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.SetStatusOperation.Date(childComplexity), true
 
-	case "SetStatusOperation.Hash":
+	case "SetStatusOperation.hash":
 		if e.complexity.SetStatusOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.SetStatusOperation.Hash(childComplexity), true
 
-	case "SetStatusOperation.Status":
+	case "SetStatusOperation.status":
 		if e.complexity.SetStatusOperation.Status == nil {
 			break
 		}
 
 		return e.complexity.SetStatusOperation.Status(childComplexity), true
 
-	case "SetStatusTimelineItem.Author":
+	case "SetStatusTimelineItem.author":
 		if e.complexity.SetStatusTimelineItem.Author == nil {
 			break
 		}
 
 		return e.complexity.SetStatusTimelineItem.Author(childComplexity), true
 
-	case "SetStatusTimelineItem.Date":
+	case "SetStatusTimelineItem.date":
 		if e.complexity.SetStatusTimelineItem.Date == nil {
 			break
 		}
 
 		return e.complexity.SetStatusTimelineItem.Date(childComplexity), true
 
-	case "SetStatusTimelineItem.Hash":
+	case "SetStatusTimelineItem.hash":
 		if e.complexity.SetStatusTimelineItem.Hash == nil {
 			break
 		}
 
 		return e.complexity.SetStatusTimelineItem.Hash(childComplexity), true
 
-	case "SetStatusTimelineItem.Status":
+	case "SetStatusTimelineItem.status":
 		if e.complexity.SetStatusTimelineItem.Status == nil {
 			break
 		}
 
 		return e.complexity.SetStatusTimelineItem.Status(childComplexity), true
 
-	case "SetTitleOperation.Author":
+	case "SetTitleOperation.author":
 		if e.complexity.SetTitleOperation.Author == nil {
 			break
 		}
 
 		return e.complexity.SetTitleOperation.Author(childComplexity), true
 
-	case "SetTitleOperation.Date":
+	case "SetTitleOperation.date":
 		if e.complexity.SetTitleOperation.Date == nil {
 			break
 		}
 
 		return e.complexity.SetTitleOperation.Date(childComplexity), true
 
-	case "SetTitleOperation.Hash":
+	case "SetTitleOperation.hash":
 		if e.complexity.SetTitleOperation.Hash == nil {
 			break
 		}
 
 		return e.complexity.SetTitleOperation.Hash(childComplexity), true
 
-	case "SetTitleOperation.Title":
+	case "SetTitleOperation.title":
 		if e.complexity.SetTitleOperation.Title == nil {
 			break
 		}
 
 		return e.complexity.SetTitleOperation.Title(childComplexity), true
 
-	case "SetTitleOperation.Was":
+	case "SetTitleOperation.was":
 		if e.complexity.SetTitleOperation.Was == nil {
 			break
 		}
 
 		return e.complexity.SetTitleOperation.Was(childComplexity), true
 
-	case "SetTitleTimelineItem.Author":
+	case "SetTitleTimelineItem.author":
 		if e.complexity.SetTitleTimelineItem.Author == nil {
 			break
 		}
 
 		return e.complexity.SetTitleTimelineItem.Author(childComplexity), true
 
-	case "SetTitleTimelineItem.Date":
+	case "SetTitleTimelineItem.date":
 		if e.complexity.SetTitleTimelineItem.Date == nil {
 			break
 		}
 
 		return e.complexity.SetTitleTimelineItem.Date(childComplexity), true
 
-	case "SetTitleTimelineItem.Hash":
+	case "SetTitleTimelineItem.hash":
 		if e.complexity.SetTitleTimelineItem.Hash == nil {
 			break
 		}
 
 		return e.complexity.SetTitleTimelineItem.Hash(childComplexity), true
 
-	case "SetTitleTimelineItem.Title":
+	case "SetTitleTimelineItem.title":
 		if e.complexity.SetTitleTimelineItem.Title == nil {
 			break
 		}
 
 		return e.complexity.SetTitleTimelineItem.Title(childComplexity), true
 
-	case "SetTitleTimelineItem.Was":
+	case "SetTitleTimelineItem.was":
 		if e.complexity.SetTitleTimelineItem.Was == nil {
 			break
 		}
 
 		return e.complexity.SetTitleTimelineItem.Was(childComplexity), true
 
-	case "TimelineItemConnection.Edges":
+	case "TimelineItemConnection.edges":
 		if e.complexity.TimelineItemConnection.Edges == nil {
 			break
 		}
 
 		return e.complexity.TimelineItemConnection.Edges(childComplexity), true
 
-	case "TimelineItemConnection.Nodes":
+	case "TimelineItemConnection.nodes":
 		if e.complexity.TimelineItemConnection.Nodes == nil {
 			break
 		}
 
 		return e.complexity.TimelineItemConnection.Nodes(childComplexity), true
 
-	case "TimelineItemConnection.PageInfo":
+	case "TimelineItemConnection.pageInfo":
 		if e.complexity.TimelineItemConnection.PageInfo == nil {
 			break
 		}
 
 		return e.complexity.TimelineItemConnection.PageInfo(childComplexity), true
 
-	case "TimelineItemConnection.TotalCount":
+	case "TimelineItemConnection.totalCount":
 		if e.complexity.TimelineItemConnection.TotalCount == nil {
 			break
 		}
 
 		return e.complexity.TimelineItemConnection.TotalCount(childComplexity), true
 
-	case "TimelineItemEdge.Cursor":
+	case "TimelineItemEdge.cursor":
 		if e.complexity.TimelineItemEdge.Cursor == nil {
 			break
 		}
 
 		return e.complexity.TimelineItemEdge.Cursor(childComplexity), true
 
-	case "TimelineItemEdge.Node":
+	case "TimelineItemEdge.node":
 		if e.complexity.TimelineItemEdge.Node == nil {
 			break
 		}

graphql/models/gen_models.go 🔗

@@ -19,10 +19,10 @@ type Authored interface {
 // The connection type for Bug.
 type BugConnection struct {
 	// A list of edges.
-	Edges []BugEdge      `json:"edges"`
-	Nodes []bug.Snapshot `json:"nodes"`
+	Edges []*BugEdge      `json:"edges"`
+	Nodes []*bug.Snapshot `json:"nodes"`
 	// Information to aid in pagination.
-	PageInfo PageInfo `json:"pageInfo"`
+	PageInfo *PageInfo `json:"pageInfo"`
 	// Identifies the total count of items in the connection.
 	TotalCount int `json:"totalCount"`
 }
@@ -32,25 +32,25 @@ type BugEdge struct {
 	// A cursor for use in pagination.
 	Cursor string `json:"cursor"`
 	// The item at the end of the edge.
-	Node bug.Snapshot `json:"node"`
+	Node *bug.Snapshot `json:"node"`
 }
 
 type CommentConnection struct {
-	Edges      []CommentEdge `json:"edges"`
-	Nodes      []bug.Comment `json:"nodes"`
-	PageInfo   PageInfo      `json:"pageInfo"`
-	TotalCount int           `json:"totalCount"`
+	Edges      []*CommentEdge `json:"edges"`
+	Nodes      []*bug.Comment `json:"nodes"`
+	PageInfo   *PageInfo      `json:"pageInfo"`
+	TotalCount int            `json:"totalCount"`
 }
 
 type CommentEdge struct {
-	Cursor string      `json:"cursor"`
-	Node   bug.Comment `json:"node"`
+	Cursor string       `json:"cursor"`
+	Node   *bug.Comment `json:"node"`
 }
 
 type IdentityConnection struct {
-	Edges      []IdentityEdge       `json:"edges"`
+	Edges      []*IdentityEdge      `json:"edges"`
 	Nodes      []identity.Interface `json:"nodes"`
-	PageInfo   PageInfo             `json:"pageInfo"`
+	PageInfo   *PageInfo            `json:"pageInfo"`
 	TotalCount int                  `json:"totalCount"`
 }
 
@@ -61,10 +61,10 @@ type IdentityEdge struct {
 
 // The connection type for an Operation
 type OperationConnection struct {
-	Edges      []OperationEdge `json:"edges"`
-	Nodes      []bug.Operation `json:"nodes"`
-	PageInfo   PageInfo        `json:"pageInfo"`
-	TotalCount int             `json:"totalCount"`
+	Edges      []*OperationEdge `json:"edges"`
+	Nodes      []bug.Operation  `json:"nodes"`
+	PageInfo   *PageInfo        `json:"pageInfo"`
+	TotalCount int              `json:"totalCount"`
 }
 
 // Represent an Operation
@@ -87,10 +87,10 @@ type PageInfo struct {
 
 // The connection type for TimelineItem
 type TimelineItemConnection struct {
-	Edges      []TimelineItemEdge `json:"edges"`
-	Nodes      []bug.TimelineItem `json:"nodes"`
-	PageInfo   PageInfo           `json:"pageInfo"`
-	TotalCount int                `json:"totalCount"`
+	Edges      []*TimelineItemEdge `json:"edges"`
+	Nodes      []bug.TimelineItem  `json:"nodes"`
+	PageInfo   *PageInfo           `json:"pageInfo"`
+	TotalCount int                 `json:"totalCount"`
 }
 
 // Represent a TimelineItem

graphql/resolvers/bug.go 🔗

@@ -29,15 +29,19 @@ func (bugResolver) Comments(ctx context.Context, obj *bug.Snapshot, after *strin
 
 	edger := func(comment bug.Comment, offset int) connections.Edge {
 		return models.CommentEdge{
-			Node:   comment,
+			Node:   &comment,
 			Cursor: connections.OffsetToCursor(offset),
 		}
 	}
 
-	conMaker := func(edges []models.CommentEdge, nodes []bug.Comment, info models.PageInfo, totalCount int) (*models.CommentConnection, error) {
+	conMaker := func(edges []*models.CommentEdge, nodes []bug.Comment, info *models.PageInfo, totalCount int) (*models.CommentConnection, error) {
+		var commentNodes []*bug.Comment
+		for _, c := range nodes {
+			commentNodes = append(commentNodes, &c)
+		}
 		return &models.CommentConnection{
 			Edges:      edges,
-			Nodes:      nodes,
+			Nodes:      commentNodes,
 			PageInfo:   info,
 			TotalCount: totalCount,
 		}, nil
@@ -61,7 +65,7 @@ func (bugResolver) Operations(ctx context.Context, obj *bug.Snapshot, after *str
 		}
 	}
 
-	conMaker := func(edges []models.OperationEdge, nodes []bug.Operation, info models.PageInfo, totalCount int) (*models.OperationConnection, error) {
+	conMaker := func(edges []*models.OperationEdge, nodes []bug.Operation, info *models.PageInfo, totalCount int) (*models.OperationConnection, error) {
 		return &models.OperationConnection{
 			Edges:      edges,
 			Nodes:      nodes,
@@ -88,7 +92,7 @@ func (bugResolver) Timeline(ctx context.Context, obj *bug.Snapshot, after *strin
 		}
 	}
 
-	conMaker := func(edges []models.TimelineItemEdge, nodes []bug.TimelineItem, info models.PageInfo, totalCount int) (*models.TimelineItemConnection, error) {
+	conMaker := func(edges []*models.TimelineItemEdge, nodes []bug.TimelineItem, info *models.PageInfo, totalCount int) (*models.TimelineItemConnection, error) {
 		return &models.TimelineItemConnection{
 			Edges:      edges,
 			Nodes:      nodes,
@@ -120,7 +124,7 @@ func (bugResolver) Actors(ctx context.Context, obj *bug.Snapshot, after *string,
 		}
 	}
 
-	conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
+	conMaker := func(edges []*models.IdentityEdge, nodes []identity.Interface, info *models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
 		return &models.IdentityConnection{
 			Edges:      edges,
 			Nodes:      nodes,
@@ -147,7 +151,7 @@ func (bugResolver) Participants(ctx context.Context, obj *bug.Snapshot, after *s
 		}
 	}
 
-	conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
+	conMaker := func(edges []*models.IdentityEdge, nodes []identity.Interface, info *models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
 		return &models.IdentityConnection{
 			Edges:      edges,
 			Nodes:      nodes,

graphql/resolvers/repo.go 🔗

@@ -46,9 +46,9 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
 	}
 
 	// The conMaker will finally load and compile bugs from git to replace the selected edges
-	conMaker := func(lazyBugEdges []connections.LazyBugEdge, lazyNode []string, info models.PageInfo, totalCount int) (*models.BugConnection, error) {
-		edges := make([]models.BugEdge, len(lazyBugEdges))
-		nodes := make([]bug.Snapshot, len(lazyBugEdges))
+	conMaker := func(lazyBugEdges []*connections.LazyBugEdge, lazyNode []string, info *models.PageInfo, totalCount int) (*models.BugConnection, error) {
+		edges := make([]*models.BugEdge, len(lazyBugEdges))
+		nodes := make([]*bug.Snapshot, len(lazyBugEdges))
 
 		for i, lazyBugEdge := range lazyBugEdges {
 			b, err := obj.Repo.ResolveBug(lazyBugEdge.Id)
@@ -59,11 +59,11 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
 
 			snap := b.Snapshot()
 
-			edges[i] = models.BugEdge{
+			edges[i] = &models.BugEdge{
 				Cursor: lazyBugEdge.Cursor,
-				Node:   *snap,
+				Node:   snap,
 			}
-			nodes[i] = *snap
+			nodes[i] = snap
 		}
 
 		return &models.BugConnection{
@@ -107,8 +107,8 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
 	}
 
 	// The conMaker will finally load and compile identities from git to replace the selected edges
-	conMaker := func(lazyIdentityEdges []connections.LazyIdentityEdge, lazyNode []string, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
-		edges := make([]models.IdentityEdge, len(lazyIdentityEdges))
+	conMaker := func(lazyIdentityEdges []*connections.LazyIdentityEdge, lazyNode []string, info *models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
+		edges := make([]*models.IdentityEdge, len(lazyIdentityEdges))
 		nodes := make([]identity.Interface, len(lazyIdentityEdges))
 
 		for k, lazyIdentityEdge := range lazyIdentityEdges {
@@ -120,9 +120,9 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
 
 			ii := identity.Interface(i.Identity)
 
-			edges[k] = models.IdentityEdge{
+			edges[k] = &models.IdentityEdge{
 				Cursor: lazyIdentityEdge.Cursor,
-				Node:   ii,
+				Node:   i.Identity,
 			}
 			nodes[k] = ii
 		}

vendor/github.com/99designs/gqlgen/codegen/complexity.go 🔗

@@ -1,10 +1,10 @@
 package codegen
 
-func (o *Object) UniqueFields() map[string]*Field {
-	m := map[string]*Field{}
+func (o *Object) UniqueFields() map[string][]*Field {
+	m := map[string][]*Field{}
 
 	for _, f := range o.Fields {
-		m[f.GoFieldName] = f
+		m[f.GoFieldName] = append(m[f.GoFieldName], f)
 	}
 
 	return m

vendor/github.com/99designs/gqlgen/codegen/config/binder.go 🔗

@@ -238,25 +238,6 @@ func (t *TypeReference) IsScalar() bool {
 	return t.Definition.Kind == ast.Scalar
 }
 
-func (t *TypeReference) HasIsZero() bool {
-	it := t.GO
-	if ptr, isPtr := it.(*types.Pointer); isPtr {
-		it = ptr.Elem()
-	}
-	namedType, ok := it.(*types.Named)
-	if !ok {
-		return false
-	}
-
-	for i := 0; i < namedType.NumMethods(); i++ {
-		switch namedType.Method(i).Name() {
-		case "IsZero":
-			return true
-		}
-	}
-	return false
-}
-
 func (t *TypeReference) UniquenessKey() string {
 	var nullability = "O"
 	if t.GQL.NonNull {
@@ -368,7 +349,7 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
 		} else if hasMethod(obj.Type(), "MarshalGQL") && hasMethod(obj.Type(), "UnmarshalGQL") {
 			ref.GO = obj.Type()
 			ref.IsMarshaler = true
-		} else if underlying := basicUnderlying(obj.Type()); underlying != nil && underlying.Kind() == types.String {
+		} else if underlying := basicUnderlying(obj.Type()); def.IsLeafType() && underlying != nil && underlying.Kind() == types.String {
 			// Special case for named types wrapping strings. Used by default enum implementations.
 
 			ref.GO = obj.Type()
@@ -402,7 +383,11 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
 
 func (b *Binder) CopyModifiersFromAst(t *ast.Type, base types.Type) types.Type {
 	if t.Elem != nil {
-		return types.NewSlice(b.CopyModifiersFromAst(t.Elem, base))
+		child := b.CopyModifiersFromAst(t.Elem, base)
+		if _, isStruct := child.Underlying().(*types.Struct); isStruct {
+			child = types.NewPointer(child)
+		}
+		return types.NewSlice(child)
 	}
 
 	var isInterface bool

vendor/github.com/99designs/gqlgen/codegen/config/config.go 🔗

@@ -136,7 +136,7 @@ func (c *PackageConfig) normalize() error {
 	// If Package is not set, first attempt to load the package at the output dir. If that fails
 	// fallback to just the base dir name of the output filename.
 	if c.Package == "" {
-		c.Package = code.NameForPackage(c.ImportPath())
+		c.Package = code.NameForDir(c.Dir())
 	}
 
 	return nil
@@ -363,8 +363,10 @@ func (c *Config) InjectBuiltins(s *ast.Schema) {
 
 	// These are additional types that are injected if defined in the schema as scalars.
 	extraBuiltins := TypeMap{
-		"Time": {Model: StringList{"github.com/99designs/gqlgen/graphql.Time"}},
-		"Map":  {Model: StringList{"github.com/99designs/gqlgen/graphql.Map"}},
+		"Time":   {Model: StringList{"github.com/99designs/gqlgen/graphql.Time"}},
+		"Map":    {Model: StringList{"github.com/99designs/gqlgen/graphql.Map"}},
+		"Upload": {Model: StringList{"github.com/99designs/gqlgen/graphql.Upload"}},
+		"Any":    {Model: StringList{"github.com/99designs/gqlgen/graphql.Any"}},
 	}
 
 	for typeName, entry := range extraBuiltins {

vendor/github.com/99designs/gqlgen/codegen/generated!.gotpl 🔗

@@ -4,6 +4,7 @@
 {{ reserveImport "strconv"  }}
 {{ reserveImport "time"  }}
 {{ reserveImport "sync"  }}
+{{ reserveImport "sync/atomic" }}
 {{ reserveImport "errors"  }}
 {{ reserveImport "bytes"  }}
 
@@ -46,7 +47,8 @@ type ComplexityRoot struct {
 {{ range $object := .Objects }}
 	{{ if not $object.IsReserved -}}
 		{{ $object.Name|go }} struct {
-		{{ range $field := $object.UniqueFields -}}
+		{{ range $_, $fields := $object.UniqueFields }}
+			{{- $field := index $fields 0 -}}
 			{{ if not $field.IsReserved -}}
 				{{ $field.GoFieldName }} {{ $field.ComplexitySignature }}
 			{{ end }}
@@ -84,20 +86,25 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
 	switch typeName + "." + field {
 	{{ range $object := .Objects }}
 		{{ if not $object.IsReserved }}
-			{{ range $field := $object.UniqueFields }}
-				{{ if not $field.IsReserved }}
-					case "{{$object.Name}}.{{$field.GoFieldName}}":
-						if e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}} == nil {
-							break
-						}
-						{{ if $field.Args }}
-							args, err := ec.{{ $field.ArgsFunc }}(context.TODO(),rawArgs)
-							if err != nil {
-								return 0, false
+			{{ range $_, $fields := $object.UniqueFields }}
+				{{- $len := len $fields }}
+				{{- range $i, $field := $fields }}
+					{{- $last := eq (add $i 1) $len }}
+					{{- if not $field.IsReserved }}
+						{{- if eq $i 0 }}case {{ end }}"{{$object.Name}}.{{$field.Name}}"{{ if not $last }},{{ else }}:
+							if e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}} == nil {
+								break
 							}
+							{{ if $field.Args }}
+								args, err := ec.{{ $field.ArgsFunc }}(context.TODO(),rawArgs)
+								if err != nil {
+									return 0, false
+								}
+							{{ end }}
+							return e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{ end }}), true
 						{{ end }}
-						return e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true
-				{{ end }}
+					{{- end }}
+				{{- end }}
 			{{ end }}
 		{{ end }}
 	{{ end }}

vendor/github.com/99designs/gqlgen/codegen/object.gotpl 🔗

@@ -4,7 +4,7 @@ var {{ $object.Name|lcFirst}}Implementors = {{$object.Implementors}}
 
 {{- if .Stream }}
 func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {
-	fields := graphql.CollectFields(ctx, sel, {{$object.Name|lcFirst}}Implementors)
+	fields := graphql.CollectFields(ec.RequestContext, sel, {{$object.Name|lcFirst}}Implementors)
 	ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
 		Object: {{$object.Name|quote}},
 	})
@@ -24,7 +24,7 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
 }
 {{- else }}
 func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.SelectionSet{{ if not $object.Root }},obj {{$object.Reference | ref }}{{ end }}) graphql.Marshaler {
-	fields := graphql.CollectFields(ctx, sel, {{$object.Name|lcFirst}}Implementors)
+	fields := graphql.CollectFields(ec.RequestContext, sel, {{$object.Name|lcFirst}}Implementors)
 	{{if $object.Root}}
 		ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
 			Object: {{$object.Name|quote}},
@@ -32,7 +32,7 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
 	{{end}}
 
 	out := graphql.NewFieldSet(fields)
-	invalid := false
+	var invalids uint32
 	for i, field := range fields {
 		switch field.Name {
 		case "__typename":
@@ -50,7 +50,11 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
 					res = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
 					{{- if $field.TypeReference.GQL.NonNull }}
 						if res == graphql.Null {
-							invalid = true
+							{{- if $object.IsConcurrent }}
+								atomic.AddUint32(&invalids, 1)
+							{{- else }}
+								invalids++
+							{{- end }}
 						}
 					{{- end }}
 					return res
@@ -59,7 +63,11 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
 				out.Values[i] = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
 				{{- if $field.TypeReference.GQL.NonNull }}
 					if out.Values[i] == graphql.Null {
-						invalid = true
+						{{- if $object.IsConcurrent }}
+							atomic.AddUint32(&invalids, 1)
+						{{- else }}
+							invalids++
+						{{- end }}
 					}
 				{{- end }}
 			{{- end }}
@@ -69,7 +77,7 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
 		}
 	}
 	out.Dispatch()
-	if invalid { return graphql.Null }
+	if invalids > 0 { return graphql.Null }
 	return out
 }
 {{- end }}

vendor/github.com/99designs/gqlgen/codegen/templates/templates.go 🔗

@@ -19,18 +19,36 @@ import (
 	"github.com/pkg/errors"
 )
 
+// CurrentImports keeps track of all the import declarations that are needed during the execution of a plugin.
 // this is done with a global because subtemplates currently get called in functions. Lets aim to remove this eventually.
 var CurrentImports *Imports
 
+// Options specify various parameters to rendering a template.
 type Options struct {
-	PackageName     string
+	// PackageName is a helper that specifies the package header declaration.
+	// In other words, when you write the template you don't need to specify `package X`
+	// at the top of the file. By providing PackageName in the Options, the Render
+	// function will do that for you.
+	PackageName string
+	// Template is a string of the entire template that
+	// will be parsed and rendered. If it's empty,
+	// the plugin processor will look for .gotpl files
+	// in the same directory of where you wrote the plugin.
+	Template string
+	// Filename is the name of the file that will be
+	// written to the system disk once the template is rendered.
 	Filename        string
 	RegionTags      bool
 	GeneratedHeader bool
-	Data            interface{}
-	Funcs           template.FuncMap
+	// Data will be passed to the template execution.
+	Data  interface{}
+	Funcs template.FuncMap
 }
 
+// Render renders a gql plugin template from the given Options. Render is an
+// abstraction of the text/template package that makes it easier to write gqlgen
+// plugins. If Options.Template is empty, the Render function will look for `.gotpl`
+// files inside the directory where you wrote the plugin.
 func Render(cfg Options) error {
 	if CurrentImports != nil {
 		panic(fmt.Errorf("recursive or concurrent call to RenderToFile detected"))
@@ -48,31 +66,40 @@ func Render(cfg Options) error {
 	t := template.New("").Funcs(funcs)
 
 	var roots []string
-	// load all the templates in the directory
-	err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
+	if cfg.Template != "" {
+		var err error
+		t, err = t.New("template.gotpl").Parse(cfg.Template)
 		if err != nil {
-			return err
-		}
-		name := filepath.ToSlash(strings.TrimPrefix(path, rootDir+string(os.PathSeparator)))
-		if !strings.HasSuffix(info.Name(), ".gotpl") {
-			return nil
-		}
-		b, err := ioutil.ReadFile(path)
-		if err != nil {
-			return err
+			return errors.Wrap(err, "error with provided template")
 		}
+		roots = append(roots, "template.gotpl")
+	} else {
+		// load all the templates in the directory
+		err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
+			if err != nil {
+				return err
+			}
+			name := filepath.ToSlash(strings.TrimPrefix(path, rootDir+string(os.PathSeparator)))
+			if !strings.HasSuffix(info.Name(), ".gotpl") {
+				return nil
+			}
+			b, err := ioutil.ReadFile(path)
+			if err != nil {
+				return err
+			}
 
-		t, err = t.New(name).Parse(string(b))
-		if err != nil {
-			return errors.Wrap(err, cfg.Filename)
-		}
+			t, err = t.New(name).Parse(string(b))
+			if err != nil {
+				return errors.Wrap(err, cfg.Filename)
+			}
 
-		roots = append(roots, name)
+			roots = append(roots, name)
 
-		return nil
-	})
-	if err != nil {
-		return errors.Wrap(err, "locating templates")
+			return nil
+		})
+		if err != nil {
+			return errors.Wrap(err, "locating templates")
+		}
 	}
 
 	// then execute all the important looking ones in order, adding them to the same file
@@ -91,7 +118,7 @@ func Render(cfg Options) error {
 		if cfg.RegionTags {
 			buf.WriteString("\n// region    " + center(70, "*", " "+root+" ") + "\n")
 		}
-		err = t.Lookup(root).Execute(&buf, cfg.Data)
+		err := t.Lookup(root).Execute(&buf, cfg.Data)
 		if err != nil {
 			return errors.Wrap(err, root)
 		}
@@ -110,7 +137,7 @@ func Render(cfg Options) error {
 	result.WriteString("import (\n")
 	result.WriteString(CurrentImports.String())
 	result.WriteString(")\n")
-	_, err = buf.WriteTo(&result)
+	_, err := buf.WriteTo(&result)
 	if err != nil {
 		return err
 	}

vendor/github.com/99designs/gqlgen/codegen/type.gotpl 🔗

@@ -56,15 +56,6 @@
 					{{- end }}
 					return graphql.Null
 				}
-			{{- else if $type.HasIsZero }}
-				if v.IsZero() {
-					{{- if $type.GQL.NonNull }}
-						if !ec.HasError(graphql.GetResolverContext(ctx)) {
-							ec.Errorf(ctx, "must not be null")
-						}
-					{{- end }}
-					return graphql.Null
-				}
 			{{- end }}
 
 			{{- if $type.IsSlice }}
@@ -119,6 +110,14 @@
 				{{- else if $type.Marshaler }}
 					{{- if $type.IsPtr }}
 						return ec.{{ $type.Elem.MarshalFunc }}(ctx, sel, *v)
+					{{- else if $type.GQL.NonNull }}
+							res := {{ $type.Marshaler | call }}({{- if $type.CastType }}{{ $type.CastType | ref }}(v){{else}}v{{- end }})
+							if res == graphql.Null {
+								if !ec.HasError(graphql.GetResolverContext(ctx)) {
+									ec.Errorf(ctx, "must not be null")
+								}
+							}
+							return res
 					{{- else }}
 						return {{ $type.Marshaler | call }}({{- if $type.CastType }}{{ $type.CastType | ref }}(v){{else}}v{{- end }})
 					{{- end }}

vendor/github.com/99designs/gqlgen/graphql/any.go 🔗

@@ -0,0 +1,19 @@
+package graphql
+
+import (
+	"encoding/json"
+	"io"
+)
+
+func MarshalAny(v interface{}) Marshaler {
+	return WriterFunc(func(w io.Writer) {
+		err := json.NewEncoder(w).Encode(v)
+		if err != nil {
+			panic(err)
+		}
+	})
+}
+
+func UnmarshalAny(v interface{}) (interface{}, error) {
+	return v, nil
+}

vendor/github.com/99designs/gqlgen/graphql/context.go 🔗

@@ -132,14 +132,14 @@ func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Conte
 // This is just a convenient wrapper method for CollectFields
 func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField {
 	resctx := GetResolverContext(ctx)
-	return CollectFields(ctx, resctx.Field.Selections, satisfies)
+	return CollectFields(GetRequestContext(ctx), resctx.Field.Selections, satisfies)
 }
 
 // CollectAllFields returns a slice of all GraphQL field names that were selected for the current resolver context.
 // The slice will contain the unique set of all field names requested regardless of fragment type conditions.
 func CollectAllFields(ctx context.Context) []string {
 	resctx := GetResolverContext(ctx)
-	collected := CollectFields(ctx, resctx.Field.Selections, nil)
+	collected := CollectFields(GetRequestContext(ctx), resctx.Field.Selections, nil)
 	uniq := make([]string, 0, len(collected))
 Next:
 	for _, f := range collected {

vendor/github.com/99designs/gqlgen/graphql/exec.go 🔗

@@ -19,12 +19,12 @@ type ExecutableSchema interface {
 // CollectFields returns the set of fields from an ast.SelectionSet where all collected fields satisfy at least one of the GraphQL types
 // passed through satisfies. Providing an empty or nil slice for satisfies will return collect all fields regardless of fragment
 // type conditions.
-func CollectFields(ctx context.Context, selSet ast.SelectionSet, satisfies []string) []CollectedField {
-	return collectFields(GetRequestContext(ctx), selSet, satisfies, map[string]bool{})
+func CollectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []string) []CollectedField {
+	return collectFields(reqCtx, selSet, satisfies, map[string]bool{})
 }
 
 func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField {
-	var groupedFields []CollectedField
+	groupedFields := make([]CollectedField, 0, len(selSet))
 
 	for _, sel := range selSet {
 		switch sel := sel.(type) {
@@ -32,7 +32,7 @@ func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []
 			if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
 				continue
 			}
-			f := getOrCreateField(&groupedFields, sel.Alias, func() CollectedField {
+			f := getOrCreateAndAppendField(&groupedFields, sel.Alias, func() CollectedField {
 				return CollectedField{Field: sel}
 			})
 
@@ -45,7 +45,7 @@ func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []
 				continue
 			}
 			for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) {
-				f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
+				f := getOrCreateAndAppendField(&groupedFields, childField.Name, func() CollectedField { return childField })
 				f.Selections = append(f.Selections, childField.Selections...)
 			}
 
@@ -70,10 +70,9 @@ func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []
 			}
 
 			for _, childField := range collectFields(reqCtx, fragment.SelectionSet, satisfies, visited) {
-				f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
+				f := getOrCreateAndAppendField(&groupedFields, childField.Name, func() CollectedField { return childField })
 				f.Selections = append(f.Selections, childField.Selections...)
 			}
-
 		default:
 			panic(fmt.Errorf("unsupported %T", sel))
 		}
@@ -97,7 +96,7 @@ func instanceOf(val string, satisfies []string) bool {
 	return false
 }
 
-func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
+func getOrCreateAndAppendField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
 	for i, cf := range *c {
 		if cf.Alias == name {
 			return &(*c)[i]
@@ -111,6 +110,10 @@ func getOrCreateField(c *[]CollectedField, name string, creator func() Collected
 }
 
 func shouldIncludeNode(directives ast.DirectiveList, variables map[string]interface{}) bool {
+	if len(directives) == 0 {
+		return true
+	}
+
 	skip, include := false, true
 
 	if d := directives.ForName("skip"); d != nil {

vendor/github.com/99designs/gqlgen/graphql/upload.go 🔗

@@ -0,0 +1,26 @@
+package graphql
+
+import (
+	"fmt"
+	"io"
+)
+
+type Upload struct {
+	File     io.Reader
+	Filename string
+	Size     int64
+}
+
+func MarshalUpload(f Upload) Marshaler {
+	return WriterFunc(func(w io.Writer) {
+		io.Copy(w, f.File)
+	})
+}
+
+func UnmarshalUpload(v interface{}) (Upload, error) {
+	upload, ok := v.(Upload)
+	if !ok {
+		return Upload{}, fmt.Errorf("%T is not an Upload", v)
+	}
+	return upload, nil
+}

vendor/github.com/99designs/gqlgen/handler/graphql.go 🔗

@@ -3,16 +3,21 @@ package handler
 import (
 	"context"
 	"encoding/json"
+	"errors"
 	"fmt"
 	"io"
+	"io/ioutil"
+	"mime"
 	"net/http"
+	"os"
+	"strconv"
 	"strings"
 	"time"
 
 	"github.com/99designs/gqlgen/complexity"
 	"github.com/99designs/gqlgen/graphql"
 	"github.com/gorilla/websocket"
-	"github.com/hashicorp/golang-lru"
+	lru "github.com/hashicorp/golang-lru"
 	"github.com/vektah/gqlparser/ast"
 	"github.com/vektah/gqlparser/gqlerror"
 	"github.com/vektah/gqlparser/parser"
@@ -37,6 +42,8 @@ type Config struct {
 	complexityLimitFunc             graphql.ComplexityLimitFunc
 	disableIntrospection            bool
 	connectionKeepAlivePingInterval time.Duration
+	uploadMaxMemory                 int64
+	uploadMaxSize                   int64
 }
 
 func (c *Config) newRequestContext(es graphql.ExecutableSchema, doc *ast.QueryDocument, op *ast.OperationDefinition, query string, variables map[string]interface{}) *graphql.RequestContext {
@@ -251,6 +258,23 @@ func CacheSize(size int) Option {
 	}
 }
 
+// UploadMaxSize sets the maximum number of bytes used to parse a request body
+// as multipart/form-data.
+func UploadMaxSize(size int64) Option {
+	return func(cfg *Config) {
+		cfg.uploadMaxSize = size
+	}
+}
+
+// UploadMaxMemory sets the maximum number of bytes used to parse a request body
+// as multipart/form-data in memory, with the remainder stored on disk in
+// temporary files.
+func UploadMaxMemory(size int64) Option {
+	return func(cfg *Config) {
+		cfg.uploadMaxMemory = size
+	}
+}
+
 // WebsocketKeepAliveDuration allows you to reconfigure the keepalive behavior.
 // By default, keepalive is enabled with a DefaultConnectionKeepAlivePingInterval
 // duration. Set handler.connectionKeepAlivePingInterval = 0 to disable keepalive
@@ -264,9 +288,20 @@ func WebsocketKeepAliveDuration(duration time.Duration) Option {
 const DefaultCacheSize = 1000
 const DefaultConnectionKeepAlivePingInterval = 25 * time.Second
 
+// DefaultUploadMaxMemory is the maximum number of bytes used to parse a request body
+// as multipart/form-data in memory, with the remainder stored on disk in
+// temporary files.
+const DefaultUploadMaxMemory = 32 << 20
+
+// DefaultUploadMaxSize is maximum number of bytes used to parse a request body
+// as multipart/form-data.
+const DefaultUploadMaxSize = 32 << 20
+
 func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
 	cfg := &Config{
 		cacheSize:                       DefaultCacheSize,
+		uploadMaxMemory:                 DefaultUploadMaxMemory,
+		uploadMaxSize:                   DefaultUploadMaxSize,
 		connectionKeepAlivePingInterval: DefaultConnectionKeepAlivePingInterval,
 		upgrader: websocket.Upgrader{
 			ReadBufferSize:  1024,
@@ -335,8 +370,36 @@ func (gh *graphqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
 			}
 		}
 	case http.MethodPost:
-		if err := jsonDecode(r.Body, &reqParams); err != nil {
-			sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
+		mediaType, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
+		if err != nil {
+			sendErrorf(w, http.StatusBadRequest, "error parsing request Content-Type")
+			return
+		}
+
+		switch mediaType {
+		case "application/json":
+			if err := jsonDecode(r.Body, &reqParams); err != nil {
+				sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
+				return
+			}
+
+		case "multipart/form-data":
+			var closers []io.Closer
+			var tmpFiles []string
+			defer func() {
+				for i := len(closers) - 1; 0 <= i; i-- {
+					_ = closers[i].Close()
+				}
+				for _, tmpFile := range tmpFiles {
+					_ = os.Remove(tmpFile)
+				}
+			}()
+			if err := processMultipart(w, r, &reqParams, &closers, &tmpFiles, gh.cfg.uploadMaxSize, gh.cfg.uploadMaxMemory); err != nil {
+				sendErrorf(w, http.StatusBadRequest, "multipart body could not be decoded: "+err.Error())
+				return
+			}
+		default:
+			sendErrorf(w, http.StatusBadRequest, "unsupported Content-Type: "+mediaType)
 			return
 		}
 	default:
@@ -493,3 +556,154 @@ func sendError(w http.ResponseWriter, code int, errors ...*gqlerror.Error) {
 func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) {
 	sendError(w, code, &gqlerror.Error{Message: fmt.Sprintf(format, args...)})
 }
+
+type bytesReader struct {
+	s        *[]byte
+	i        int64 // current reading index
+	prevRune int   // index of previous rune; or < 0
+}
+
+func (r *bytesReader) Read(b []byte) (n int, err error) {
+	if r.s == nil {
+		return 0, errors.New("byte slice pointer is nil")
+	}
+	if r.i >= int64(len(*r.s)) {
+		return 0, io.EOF
+	}
+	r.prevRune = -1
+	n = copy(b, (*r.s)[r.i:])
+	r.i += int64(n)
+	return
+}
+
+func processMultipart(w http.ResponseWriter, r *http.Request, request *params, closers *[]io.Closer, tmpFiles *[]string, uploadMaxSize, uploadMaxMemory int64) error {
+	var err error
+	if r.ContentLength > uploadMaxSize {
+		return errors.New("failed to parse multipart form, request body too large")
+	}
+	r.Body = http.MaxBytesReader(w, r.Body, uploadMaxSize)
+	if err = r.ParseMultipartForm(uploadMaxMemory); err != nil {
+		if strings.Contains(err.Error(), "request body too large") {
+			return errors.New("failed to parse multipart form, request body too large")
+		}
+		return errors.New("failed to parse multipart form")
+	}
+	*closers = append(*closers, r.Body)
+
+	if err = jsonDecode(strings.NewReader(r.Form.Get("operations")), &request); err != nil {
+		return errors.New("operations form field could not be decoded")
+	}
+
+	var uploadsMap = map[string][]string{}
+	if err = json.Unmarshal([]byte(r.Form.Get("map")), &uploadsMap); err != nil {
+		return errors.New("map form field could not be decoded")
+	}
+
+	var upload graphql.Upload
+	for key, paths := range uploadsMap {
+		if len(paths) == 0 {
+			return fmt.Errorf("invalid empty operations paths list for key %s", key)
+		}
+		file, header, err := r.FormFile(key)
+		if err != nil {
+			return fmt.Errorf("failed to get key %s from form", key)
+		}
+		*closers = append(*closers, file)
+
+		if len(paths) == 1 {
+			upload = graphql.Upload{
+				File:     file,
+				Size:     header.Size,
+				Filename: header.Filename,
+			}
+			err = addUploadToOperations(request, upload, key, paths[0])
+			if err != nil {
+				return err
+			}
+		} else {
+			if r.ContentLength < uploadMaxMemory {
+				fileBytes, err := ioutil.ReadAll(file)
+				if err != nil {
+					return fmt.Errorf("failed to read file for key %s", key)
+				}
+				for _, path := range paths {
+					upload = graphql.Upload{
+						File:     &bytesReader{s: &fileBytes, i: 0, prevRune: -1},
+						Size:     header.Size,
+						Filename: header.Filename,
+					}
+					err = addUploadToOperations(request, upload, key, path)
+					if err != nil {
+						return err
+					}
+				}
+			} else {
+				tmpFile, err := ioutil.TempFile(os.TempDir(), "gqlgen-")
+				if err != nil {
+					return fmt.Errorf("failed to create temp file for key %s", key)
+				}
+				tmpName := tmpFile.Name()
+				*tmpFiles = append(*tmpFiles, tmpName)
+				_, err = io.Copy(tmpFile, file)
+				if err != nil {
+					if err := tmpFile.Close(); err != nil {
+						return fmt.Errorf("failed to copy to temp file and close temp file for key %s", key)
+					}
+					return fmt.Errorf("failed to copy to temp file for key %s", key)
+				}
+				if err := tmpFile.Close(); err != nil {
+					return fmt.Errorf("failed to close temp file for key %s", key)
+				}
+				for _, path := range paths {
+					pathTmpFile, err := os.Open(tmpName)
+					if err != nil {
+						return fmt.Errorf("failed to open temp file for key %s", key)
+					}
+					*closers = append(*closers, pathTmpFile)
+					upload = graphql.Upload{
+						File:     pathTmpFile,
+						Size:     header.Size,
+						Filename: header.Filename,
+					}
+					err = addUploadToOperations(request, upload, key, path)
+					if err != nil {
+						return err
+					}
+				}
+			}
+		}
+	}
+	return nil
+}
+
+func addUploadToOperations(request *params, upload graphql.Upload, key, path string) error {
+	if !strings.HasPrefix(path, "variables.") {
+		return fmt.Errorf("invalid operations paths for key %s", key)
+	}
+
+	var ptr interface{} = request.Variables
+	parts := strings.Split(path, ".")
+
+	// skip the first part (variables) because we started there
+	for i, p := range parts[1:] {
+		last := i == len(parts)-2
+		if ptr == nil {
+			return fmt.Errorf("path is missing \"variables.\" prefix, key: %s, path: %s", key, path)
+		}
+		if index, parseNbrErr := strconv.Atoi(p); parseNbrErr == nil {
+			if last {
+				ptr.([]interface{})[index] = upload
+			} else {
+				ptr = ptr.([]interface{})[index]
+			}
+		} else {
+			if last {
+				ptr.(map[string]interface{})[p] = upload
+			} else {
+				ptr = ptr.(map[string]interface{})[p]
+			}
+		}
+	}
+
+	return nil
+}

vendor/github.com/99designs/gqlgen/handler/mock.go 🔗

@@ -0,0 +1,57 @@
+package handler
+
+import (
+	"context"
+
+	"github.com/99designs/gqlgen/graphql"
+	"github.com/vektah/gqlparser"
+	"github.com/vektah/gqlparser/ast"
+)
+
+type executableSchemaMock struct {
+	MutationFunc func(ctx context.Context, op *ast.OperationDefinition) *graphql.Response
+}
+
+var _ graphql.ExecutableSchema = &executableSchemaMock{}
+
+func (e *executableSchemaMock) Schema() *ast.Schema {
+	return gqlparser.MustLoadSchema(&ast.Source{Input: `
+		schema { query: Query, mutation: Mutation }
+		type Query {
+			empty: String!
+		}
+		scalar Upload
+        type File {
+            id: Int!
+        }
+        input UploadFile {
+            id: Int!
+            file: Upload!
+        }
+        type Mutation {
+            singleUpload(file: Upload!): File!
+            singleUploadWithPayload(req: UploadFile!): File!
+            multipleUpload(files: [Upload!]!): [File!]!
+            multipleUploadWithPayload(req: [UploadFile!]!): [File!]!
+        }
+	`})
+}
+
+func (e *executableSchemaMock) Complexity(typeName, field string, childComplexity int, args map[string]interface{}) (int, bool) {
+	return 0, false
+}
+
+func (e *executableSchemaMock) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+	return graphql.ErrorResponse(ctx, "queries are not supported")
+}
+
+func (e *executableSchemaMock) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+	return e.MutationFunc(ctx, op)
+}
+
+func (e *executableSchemaMock) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
+	return func() *graphql.Response {
+		<-ctx.Done()
+		return nil
+	}
+}

vendor/github.com/99designs/gqlgen/internal/code/imports.go 🔗

@@ -2,44 +2,96 @@ package code
 
 import (
 	"errors"
+	"go/build"
+	"go/parser"
+	"go/token"
+	"io/ioutil"
 	"path/filepath"
+	"regexp"
+	"strings"
 	"sync"
 
 	"golang.org/x/tools/go/packages"
 )
 
-var pathForDirCache = sync.Map{}
+var nameForPackageCache = sync.Map{}
 
-// ImportPathFromDir takes an *absolute* path and returns a golang import path for the package, and returns an error if it isn't on the gopath
-func ImportPathForDir(dir string) string {
-	if v, ok := pathForDirCache.Load(dir); ok {
-		return v.(string)
+var gopaths []string
+
+func init() {
+	gopaths = filepath.SplitList(build.Default.GOPATH)
+	for i, p := range gopaths {
+		gopaths[i] = filepath.ToSlash(filepath.Join(p, "src"))
 	}
+}
 
-	p, _ := packages.Load(&packages.Config{
-		Dir: dir,
-	}, ".")
+// NameForDir manually looks for package stanzas in files located in the given directory. This can be
+// much faster than having to consult go list, because we already know exactly where to look.
+func NameForDir(dir string) string {
+	dir, err := filepath.Abs(dir)
+	if err != nil {
+		return SanitizePackageName(filepath.Base(dir))
+	}
+	files, err := ioutil.ReadDir(dir)
+	if err != nil {
+		return SanitizePackageName(filepath.Base(dir))
+	}
+	fset := token.NewFileSet()
+	for _, file := range files {
+		if !strings.HasSuffix(strings.ToLower(file.Name()), ".go") {
+			continue
+		}
+
+		filename := filepath.Join(dir, file.Name())
+		if src, err := parser.ParseFile(fset, filename, nil, parser.PackageClauseOnly); err == nil {
+			return src.Name.Name
+		}
+	}
 
-	// If the dir dosent exist yet, keep walking up the directory tree trying to find a match
-	if len(p) != 1 {
-		parent, err := filepath.Abs(filepath.Join(dir, ".."))
+	return SanitizePackageName(filepath.Base(dir))
+}
+
+// ImportPathForDir takes a path and returns a golang import path for the package
+func ImportPathForDir(dir string) (res string) {
+	dir, err := filepath.Abs(dir)
+	if err != nil {
+		panic(err)
+	}
+	dir = filepath.ToSlash(dir)
+
+	modDir := dir
+	assumedPart := ""
+	for {
+		f, err := ioutil.ReadFile(filepath.Join(modDir, "/", "go.mod"))
+		if err == nil {
+			// found it, stop searching
+			return string(modregex.FindSubmatch(f)[1]) + assumedPart
+		}
+
+		assumedPart = "/" + filepath.Base(modDir) + assumedPart
+		modDir, err = filepath.Abs(filepath.Join(modDir, ".."))
 		if err != nil {
 			panic(err)
 		}
+
 		// Walked all the way to the root and didnt find anything :'(
-		if parent == dir {
-			return ""
+		if modDir == "/" {
+			break
 		}
-		return ImportPathForDir(parent) + "/" + filepath.Base(dir)
 	}
 
-	pathForDirCache.Store(dir, p[0].PkgPath)
+	for _, gopath := range gopaths {
+		if len(gopath) < len(dir) && strings.EqualFold(gopath, dir[0:len(gopath)]) {
+			return dir[len(gopath)+1:]
+		}
+	}
 
-	return p[0].PkgPath
+	return ""
 }
 
-var nameForPackageCache = sync.Map{}
+var modregex = regexp.MustCompile("module (.*)\n")
 
+// NameForPackage returns the package name for a given import path. This can be really slow.
 func NameForPackage(importPath string) string {
 	if importPath == "" {
 		panic(errors.New("import path can not be empty"))
@@ -48,7 +100,9 @@ func NameForPackage(importPath string) string {
 		return v.(string)
 	}
 	importPath = QualifyPackagePath(importPath)
-	p, _ := packages.Load(nil, importPath)
+	p, _ := packages.Load(&packages.Config{
+		Mode: packages.NeedName,
+	}, importPath)
 
 	if len(p) != 1 || p[0].Name == "" {
 		return SanitizePackageName(filepath.Base(importPath))

vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go 🔗

@@ -1,6 +1,7 @@
 package modelgen
 
 import (
+	"fmt"
 	"go/types"
 	"sort"
 
@@ -110,6 +111,7 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 
 			for _, field := range schemaType.Fields {
 				var typ types.Type
+				fieldDef := schema.Types[field.Type.Name()]
 
 				if cfg.Models.UserDefined(field.Type.Name()) {
 					pkg, typeName := code.PkgAndType(cfg.Models[field.Type.Name()].Model[0])
@@ -118,7 +120,6 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 						return err
 					}
 				} else {
-					fieldDef := schema.Types[field.Type.Name()]
 					switch fieldDef.Kind {
 					case ast.Scalar:
 						// no user defined model, referencing a default scalar
@@ -127,6 +128,7 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 							nil,
 							nil,
 						)
+
 					case ast.Interface, ast.Union:
 						// no user defined model, referencing a generated interface type
 						typ = types.NewNamed(
@@ -134,13 +136,25 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 							types.NewInterfaceType([]*types.Func{}, []types.Type{}),
 							nil,
 						)
-					default:
-						// no user defined model, must reference another generated model
+
+					case ast.Enum:
+						// no user defined model, must reference a generated enum
 						typ = types.NewNamed(
 							types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGo(field.Type.Name()), nil),
 							nil,
 							nil,
 						)
+
+					case ast.Object, ast.InputObject:
+						// no user defined model, must reference a generated struct
+						typ = types.NewNamed(
+							types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGo(field.Type.Name()), nil),
+							types.NewStruct(nil, nil),
+							nil,
+						)
+
+					default:
+						panic(fmt.Errorf("unknown ast type %s", fieldDef.Kind))
 					}
 				}
 
@@ -149,9 +163,15 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 					name = nameOveride
 				}
 
+				typ = binder.CopyModifiersFromAst(field.Type, typ)
+
+				if isStruct(typ) && (fieldDef.Kind == ast.Object || fieldDef.Kind == ast.InputObject) {
+					typ = types.NewPointer(typ)
+				}
+
 				it.Fields = append(it.Fields, &Field{
 					Name:        name,
-					Type:        binder.CopyModifiersFromAst(field.Type, typ),
+					Type:        typ,
 					Description: field.Description,
 					Tag:         `json:"` + field.Name + `"`,
 				})
@@ -205,3 +225,8 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
 		GeneratedHeader: true,
 	})
 }
+
+func isStruct(t types.Type) bool {
+	_, is := t.Underlying().(*types.Struct)
+	return is
+}