1package parser
2
3import (
4 "fmt"
5
6 "github.com/graphql-go/graphql/gqlerrors"
7 "github.com/graphql-go/graphql/language/ast"
8 "github.com/graphql-go/graphql/language/lexer"
9 "github.com/graphql-go/graphql/language/source"
10)
11
12type parseFn func(parser *Parser) (interface{}, error)
13
14type ParseOptions struct {
15 NoLocation bool
16 NoSource bool
17}
18
19type ParseParams struct {
20 Source interface{}
21 Options ParseOptions
22}
23
24type Parser struct {
25 LexToken lexer.Lexer
26 Source *source.Source
27 Options ParseOptions
28 PrevEnd int
29 Token lexer.Token
30}
31
32func Parse(p ParseParams) (*ast.Document, error) {
33 var sourceObj *source.Source
34 switch p.Source.(type) {
35 case *source.Source:
36 sourceObj = p.Source.(*source.Source)
37 default:
38 body, _ := p.Source.(string)
39 sourceObj = source.NewSource(&source.Source{Body: []byte(body)})
40 }
41 parser, err := makeParser(sourceObj, p.Options)
42 if err != nil {
43 return nil, err
44 }
45 doc, err := parseDocument(parser)
46 if err != nil {
47 return nil, err
48 }
49 return doc, nil
50}
51
52// TODO: test and expose parseValue as a public
53func parseValue(p ParseParams) (ast.Value, error) {
54 var value ast.Value
55 var sourceObj *source.Source
56 switch p.Source.(type) {
57 case *source.Source:
58 sourceObj = p.Source.(*source.Source)
59 default:
60 body, _ := p.Source.(string)
61 sourceObj = source.NewSource(&source.Source{Body: []byte(body)})
62 }
63 parser, err := makeParser(sourceObj, p.Options)
64 if err != nil {
65 return value, err
66 }
67 value, err = parseValueLiteral(parser, false)
68 if err != nil {
69 return value, err
70 }
71 return value, nil
72}
73
74// Converts a name lex token into a name parse node.
75func parseName(parser *Parser) (*ast.Name, error) {
76 token, err := expect(parser, lexer.TokenKind[lexer.NAME])
77 if err != nil {
78 return nil, err
79 }
80 return ast.NewName(&ast.Name{
81 Value: token.Value,
82 Loc: loc(parser, token.Start),
83 }), nil
84}
85
86func makeParser(s *source.Source, opts ParseOptions) (*Parser, error) {
87 lexToken := lexer.Lex(s)
88 token, err := lexToken(0)
89 if err != nil {
90 return &Parser{}, err
91 }
92 return &Parser{
93 LexToken: lexToken,
94 Source: s,
95 Options: opts,
96 PrevEnd: 0,
97 Token: token,
98 }, nil
99}
100
101/* Implements the parsing rules in the Document section. */
102
103func parseDocument(parser *Parser) (*ast.Document, error) {
104 start := parser.Token.Start
105 var nodes []ast.Node
106 for {
107 if skp, err := skip(parser, lexer.TokenKind[lexer.EOF]); err != nil {
108 return nil, err
109 } else if skp {
110 break
111 }
112 if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
113 node, err := parseOperationDefinition(parser)
114 if err != nil {
115 return nil, err
116 }
117 nodes = append(nodes, node)
118 } else if peek(parser, lexer.TokenKind[lexer.NAME]) {
119 switch parser.Token.Value {
120 case "query":
121 fallthrough
122 case "mutation":
123 fallthrough
124 case "subscription": // Note: subscription is an experimental non-spec addition.
125 node, err := parseOperationDefinition(parser)
126 if err != nil {
127 return nil, err
128 }
129 nodes = append(nodes, node)
130 case "fragment":
131 node, err := parseFragmentDefinition(parser)
132 if err != nil {
133 return nil, err
134 }
135 nodes = append(nodes, node)
136
137 // Note: the Type System IDL is an experimental non-spec addition.
138 case "schema":
139 fallthrough
140 case "scalar":
141 fallthrough
142 case "type":
143 fallthrough
144 case "interface":
145 fallthrough
146 case "union":
147 fallthrough
148 case "enum":
149 fallthrough
150 case "input":
151 fallthrough
152 case "extend":
153 fallthrough
154 case "directive":
155 node, err := parseTypeSystemDefinition(parser)
156 if err != nil {
157 return nil, err
158 }
159 nodes = append(nodes, node)
160 default:
161 if err := unexpected(parser, lexer.Token{}); err != nil {
162 return nil, err
163 }
164 }
165 } else if peekDescription(parser) {
166 node, err := parseTypeSystemDefinition(parser)
167 if err != nil {
168 return nil, err
169 }
170 nodes = append(nodes, node)
171 } else {
172 if err := unexpected(parser, lexer.Token{}); err != nil {
173 return nil, err
174 }
175 }
176 }
177 return ast.NewDocument(&ast.Document{
178 Loc: loc(parser, start),
179 Definitions: nodes,
180 }), nil
181}
182
183/* Implements the parsing rules in the Operations section. */
184
185/**
186 * OperationDefinition :
187 * - SelectionSet
188 * - OperationType Name? VariableDefinitions? Directives? SelectionSet
189 */
190func parseOperationDefinition(parser *Parser) (*ast.OperationDefinition, error) {
191 start := parser.Token.Start
192 if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
193 selectionSet, err := parseSelectionSet(parser)
194 if err != nil {
195 return nil, err
196 }
197 return ast.NewOperationDefinition(&ast.OperationDefinition{
198 Operation: ast.OperationTypeQuery,
199 Directives: []*ast.Directive{},
200 SelectionSet: selectionSet,
201 Loc: loc(parser, start),
202 }), nil
203 }
204 operation, err := parseOperationType(parser)
205 if err != nil {
206 return nil, err
207 }
208
209 var name *ast.Name
210 if peek(parser, lexer.TokenKind[lexer.NAME]) {
211 name, err = parseName(parser)
212 }
213 variableDefinitions, err := parseVariableDefinitions(parser)
214 if err != nil {
215 return nil, err
216 }
217 directives, err := parseDirectives(parser)
218 if err != nil {
219 return nil, err
220 }
221 selectionSet, err := parseSelectionSet(parser)
222 if err != nil {
223 return nil, err
224 }
225 return ast.NewOperationDefinition(&ast.OperationDefinition{
226 Operation: operation,
227 Name: name,
228 VariableDefinitions: variableDefinitions,
229 Directives: directives,
230 SelectionSet: selectionSet,
231 Loc: loc(parser, start),
232 }), nil
233}
234
235/**
236 * OperationType : one of query mutation subscription
237 */
238func parseOperationType(parser *Parser) (string, error) {
239 operationToken, err := expect(parser, lexer.TokenKind[lexer.NAME])
240 if err != nil {
241 return "", err
242 }
243 switch operationToken.Value {
244 case ast.OperationTypeQuery:
245 return operationToken.Value, nil
246 case ast.OperationTypeMutation:
247 return operationToken.Value, nil
248 case ast.OperationTypeSubscription:
249 return operationToken.Value, nil
250 default:
251 return "", unexpected(parser, operationToken)
252 }
253}
254
255/**
256 * VariableDefinitions : ( VariableDefinition+ )
257 */
258func parseVariableDefinitions(parser *Parser) ([]*ast.VariableDefinition, error) {
259 variableDefinitions := []*ast.VariableDefinition{}
260 if peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
261 vdefs, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseVariableDefinition, lexer.TokenKind[lexer.PAREN_R])
262 for _, vdef := range vdefs {
263 if vdef != nil {
264 variableDefinitions = append(variableDefinitions, vdef.(*ast.VariableDefinition))
265 }
266 }
267 if err != nil {
268 return variableDefinitions, err
269 }
270 return variableDefinitions, nil
271 }
272 return variableDefinitions, nil
273}
274
275/**
276 * VariableDefinition : Variable : Type DefaultValue?
277 */
278func parseVariableDefinition(parser *Parser) (interface{}, error) {
279 start := parser.Token.Start
280 variable, err := parseVariable(parser)
281 if err != nil {
282 return nil, err
283 }
284 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
285 if err != nil {
286 return nil, err
287 }
288 ttype, err := parseType(parser)
289 if err != nil {
290 return nil, err
291 }
292 var defaultValue ast.Value
293 if skp, err := skip(parser, lexer.TokenKind[lexer.EQUALS]); err != nil {
294 return nil, err
295 } else if skp {
296 dv, err := parseValueLiteral(parser, true)
297 if err != nil {
298 return nil, err
299 }
300 defaultValue = dv
301 }
302 return ast.NewVariableDefinition(&ast.VariableDefinition{
303 Variable: variable,
304 Type: ttype,
305 DefaultValue: defaultValue,
306 Loc: loc(parser, start),
307 }), nil
308}
309
310/**
311 * Variable : $ Name
312 */
313func parseVariable(parser *Parser) (*ast.Variable, error) {
314 start := parser.Token.Start
315 _, err := expect(parser, lexer.TokenKind[lexer.DOLLAR])
316 if err != nil {
317 return nil, err
318 }
319 name, err := parseName(parser)
320 if err != nil {
321 return nil, err
322 }
323 return ast.NewVariable(&ast.Variable{
324 Name: name,
325 Loc: loc(parser, start),
326 }), nil
327}
328
329/**
330 * SelectionSet : { Selection+ }
331 */
332func parseSelectionSet(parser *Parser) (*ast.SelectionSet, error) {
333 start := parser.Token.Start
334 iSelections, err := many(parser, lexer.TokenKind[lexer.BRACE_L], parseSelection, lexer.TokenKind[lexer.BRACE_R])
335 if err != nil {
336 return nil, err
337 }
338 selections := []ast.Selection{}
339 for _, iSelection := range iSelections {
340 if iSelection != nil {
341 // type assert interface{} into Selection interface
342 selections = append(selections, iSelection.(ast.Selection))
343 }
344 }
345
346 return ast.NewSelectionSet(&ast.SelectionSet{
347 Selections: selections,
348 Loc: loc(parser, start),
349 }), nil
350}
351
352/**
353 * Selection :
354 * - Field
355 * - FragmentSpread
356 * - InlineFragment
357 */
358func parseSelection(parser *Parser) (interface{}, error) {
359 if peek(parser, lexer.TokenKind[lexer.SPREAD]) {
360 r, err := parseFragment(parser)
361 return r, err
362 }
363 return parseField(parser)
364}
365
366/**
367 * Field : Alias? Name Arguments? Directives? SelectionSet?
368 *
369 * Alias : Name :
370 */
371func parseField(parser *Parser) (*ast.Field, error) {
372 start := parser.Token.Start
373 nameOrAlias, err := parseName(parser)
374 if err != nil {
375 return nil, err
376 }
377 var (
378 name *ast.Name
379 alias *ast.Name
380 )
381 if skp, err := skip(parser, lexer.TokenKind[lexer.COLON]); err != nil {
382 return nil, err
383 } else if skp {
384 alias = nameOrAlias
385 n, err := parseName(parser)
386 if err != nil {
387 return nil, err
388 }
389 name = n
390 } else {
391 name = nameOrAlias
392 }
393 arguments, err := parseArguments(parser)
394 if err != nil {
395 return nil, err
396 }
397 directives, err := parseDirectives(parser)
398 if err != nil {
399 return nil, err
400 }
401 var selectionSet *ast.SelectionSet
402 if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
403 sSet, err := parseSelectionSet(parser)
404 if err != nil {
405 return nil, err
406 }
407 selectionSet = sSet
408 }
409 return ast.NewField(&ast.Field{
410 Alias: alias,
411 Name: name,
412 Arguments: arguments,
413 Directives: directives,
414 SelectionSet: selectionSet,
415 Loc: loc(parser, start),
416 }), nil
417}
418
419/**
420 * Arguments : ( Argument+ )
421 */
422func parseArguments(parser *Parser) ([]*ast.Argument, error) {
423 arguments := []*ast.Argument{}
424 if peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
425 iArguments, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseArgument, lexer.TokenKind[lexer.PAREN_R])
426 if err != nil {
427 return arguments, err
428 }
429 for _, iArgument := range iArguments {
430 if iArgument != nil {
431 arguments = append(arguments, iArgument.(*ast.Argument))
432 }
433 }
434 return arguments, nil
435 }
436 return arguments, nil
437}
438
439/**
440 * Argument : Name : Value
441 */
442func parseArgument(parser *Parser) (interface{}, error) {
443 start := parser.Token.Start
444 name, err := parseName(parser)
445 if err != nil {
446 return nil, err
447 }
448 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
449 if err != nil {
450 return nil, err
451 }
452 value, err := parseValueLiteral(parser, false)
453 if err != nil {
454 return nil, err
455 }
456 return ast.NewArgument(&ast.Argument{
457 Name: name,
458 Value: value,
459 Loc: loc(parser, start),
460 }), nil
461}
462
463/* Implements the parsing rules in the Fragments section. */
464
465/**
466 * Corresponds to both FragmentSpread and InlineFragment in the spec.
467 *
468 * FragmentSpread : ... FragmentName Directives?
469 *
470 * InlineFragment : ... TypeCondition? Directives? SelectionSet
471 */
472func parseFragment(parser *Parser) (interface{}, error) {
473 start := parser.Token.Start
474 _, err := expect(parser, lexer.TokenKind[lexer.SPREAD])
475 if err != nil {
476 return nil, err
477 }
478 if peek(parser, lexer.TokenKind[lexer.NAME]) && parser.Token.Value != "on" {
479 name, err := parseFragmentName(parser)
480 if err != nil {
481 return nil, err
482 }
483 directives, err := parseDirectives(parser)
484 if err != nil {
485 return nil, err
486 }
487 return ast.NewFragmentSpread(&ast.FragmentSpread{
488 Name: name,
489 Directives: directives,
490 Loc: loc(parser, start),
491 }), nil
492 }
493 var typeCondition *ast.Named
494 if parser.Token.Value == "on" {
495 if err := advance(parser); err != nil {
496 return nil, err
497 }
498 name, err := parseNamed(parser)
499 if err != nil {
500 return nil, err
501 }
502 typeCondition = name
503
504 }
505 directives, err := parseDirectives(parser)
506 if err != nil {
507 return nil, err
508 }
509 selectionSet, err := parseSelectionSet(parser)
510 if err != nil {
511 return nil, err
512 }
513 return ast.NewInlineFragment(&ast.InlineFragment{
514 TypeCondition: typeCondition,
515 Directives: directives,
516 SelectionSet: selectionSet,
517 Loc: loc(parser, start),
518 }), nil
519}
520
521/**
522 * FragmentDefinition :
523 * - fragment FragmentName on TypeCondition Directives? SelectionSet
524 *
525 * TypeCondition : NamedType
526 */
527func parseFragmentDefinition(parser *Parser) (*ast.FragmentDefinition, error) {
528 start := parser.Token.Start
529 _, err := expectKeyWord(parser, "fragment")
530 if err != nil {
531 return nil, err
532 }
533 name, err := parseFragmentName(parser)
534 if err != nil {
535 return nil, err
536 }
537 _, err = expectKeyWord(parser, "on")
538 if err != nil {
539 return nil, err
540 }
541 typeCondition, err := parseNamed(parser)
542 if err != nil {
543 return nil, err
544 }
545 directives, err := parseDirectives(parser)
546 if err != nil {
547 return nil, err
548 }
549 selectionSet, err := parseSelectionSet(parser)
550 if err != nil {
551 return nil, err
552 }
553 return ast.NewFragmentDefinition(&ast.FragmentDefinition{
554 Name: name,
555 TypeCondition: typeCondition,
556 Directives: directives,
557 SelectionSet: selectionSet,
558 Loc: loc(parser, start),
559 }), nil
560}
561
562/**
563 * FragmentName : Name but not `on`
564 */
565func parseFragmentName(parser *Parser) (*ast.Name, error) {
566 if parser.Token.Value == "on" {
567 return nil, unexpected(parser, lexer.Token{})
568 }
569 return parseName(parser)
570}
571
572/* Implements the parsing rules in the Values section. */
573
574/**
575 * Value[Const] :
576 * - [~Const] Variable
577 * - IntValue
578 * - FloatValue
579 * - StringValue
580 * - BooleanValue
581 * - EnumValue
582 * - ListValue[?Const]
583 * - ObjectValue[?Const]
584 *
585 * BooleanValue : one of `true` `false`
586 *
587 * EnumValue : Name but not `true`, `false` or `null`
588 */
589func parseValueLiteral(parser *Parser, isConst bool) (ast.Value, error) {
590 token := parser.Token
591 switch token.Kind {
592 case lexer.TokenKind[lexer.BRACKET_L]:
593 return parseList(parser, isConst)
594 case lexer.TokenKind[lexer.BRACE_L]:
595 return parseObject(parser, isConst)
596 case lexer.TokenKind[lexer.INT]:
597 if err := advance(parser); err != nil {
598 return nil, err
599 }
600 return ast.NewIntValue(&ast.IntValue{
601 Value: token.Value,
602 Loc: loc(parser, token.Start),
603 }), nil
604 case lexer.TokenKind[lexer.FLOAT]:
605 if err := advance(parser); err != nil {
606 return nil, err
607 }
608 return ast.NewFloatValue(&ast.FloatValue{
609 Value: token.Value,
610 Loc: loc(parser, token.Start),
611 }), nil
612 case lexer.TokenKind[lexer.BLOCK_STRING]:
613 fallthrough
614 case lexer.TokenKind[lexer.STRING]:
615 return parseStringLiteral(parser)
616 case lexer.TokenKind[lexer.NAME]:
617 if token.Value == "true" || token.Value == "false" {
618 if err := advance(parser); err != nil {
619 return nil, err
620 }
621 value := true
622 if token.Value == "false" {
623 value = false
624 }
625 return ast.NewBooleanValue(&ast.BooleanValue{
626 Value: value,
627 Loc: loc(parser, token.Start),
628 }), nil
629 } else if token.Value != "null" {
630 if err := advance(parser); err != nil {
631 return nil, err
632 }
633 return ast.NewEnumValue(&ast.EnumValue{
634 Value: token.Value,
635 Loc: loc(parser, token.Start),
636 }), nil
637 }
638 case lexer.TokenKind[lexer.DOLLAR]:
639 if !isConst {
640 return parseVariable(parser)
641 }
642 }
643 if err := unexpected(parser, lexer.Token{}); err != nil {
644 return nil, err
645 }
646 return nil, nil
647}
648
649func parseConstValue(parser *Parser) (interface{}, error) {
650 value, err := parseValueLiteral(parser, true)
651 if err != nil {
652 return value, err
653 }
654 return value, nil
655}
656
657func parseValueValue(parser *Parser) (interface{}, error) {
658 return parseValueLiteral(parser, false)
659}
660
661/**
662 * ListValue[Const] :
663 * - [ ]
664 * - [ Value[?Const]+ ]
665 */
666func parseList(parser *Parser, isConst bool) (*ast.ListValue, error) {
667 start := parser.Token.Start
668 var item parseFn
669 if isConst {
670 item = parseConstValue
671 } else {
672 item = parseValueValue
673 }
674 iValues, err := any(parser, lexer.TokenKind[lexer.BRACKET_L], item, lexer.TokenKind[lexer.BRACKET_R])
675 if err != nil {
676 return nil, err
677 }
678 values := []ast.Value{}
679 for _, iValue := range iValues {
680 values = append(values, iValue.(ast.Value))
681 }
682 return ast.NewListValue(&ast.ListValue{
683 Values: values,
684 Loc: loc(parser, start),
685 }), nil
686}
687
688/**
689 * ObjectValue[Const] :
690 * - { }
691 * - { ObjectField[?Const]+ }
692 */
693func parseObject(parser *Parser, isConst bool) (*ast.ObjectValue, error) {
694 start := parser.Token.Start
695 _, err := expect(parser, lexer.TokenKind[lexer.BRACE_L])
696 if err != nil {
697 return nil, err
698 }
699 fields := []*ast.ObjectField{}
700 for {
701 if skp, err := skip(parser, lexer.TokenKind[lexer.BRACE_R]); err != nil {
702 return nil, err
703 } else if skp {
704 break
705 }
706 field, err := parseObjectField(parser, isConst)
707 if err != nil {
708 return nil, err
709 }
710 fields = append(fields, field)
711 }
712 return ast.NewObjectValue(&ast.ObjectValue{
713 Fields: fields,
714 Loc: loc(parser, start),
715 }), nil
716}
717
718/**
719 * ObjectField[Const] : Name : Value[?Const]
720 */
721func parseObjectField(parser *Parser, isConst bool) (*ast.ObjectField, error) {
722 start := parser.Token.Start
723 name, err := parseName(parser)
724 if err != nil {
725 return nil, err
726 }
727 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
728 if err != nil {
729 return nil, err
730 }
731 value, err := parseValueLiteral(parser, isConst)
732 if err != nil {
733 return nil, err
734 }
735 return ast.NewObjectField(&ast.ObjectField{
736 Name: name,
737 Value: value,
738 Loc: loc(parser, start),
739 }), nil
740}
741
742/* Implements the parsing rules in the Directives section. */
743
744/**
745 * Directives : Directive+
746 */
747func parseDirectives(parser *Parser) ([]*ast.Directive, error) {
748 directives := []*ast.Directive{}
749 for {
750 if !peek(parser, lexer.TokenKind[lexer.AT]) {
751 break
752 }
753 directive, err := parseDirective(parser)
754 if err != nil {
755 return directives, err
756 }
757 directives = append(directives, directive)
758 }
759 return directives, nil
760}
761
762/**
763 * Directive : @ Name Arguments?
764 */
765func parseDirective(parser *Parser) (*ast.Directive, error) {
766 start := parser.Token.Start
767 _, err := expect(parser, lexer.TokenKind[lexer.AT])
768 if err != nil {
769 return nil, err
770 }
771 name, err := parseName(parser)
772 if err != nil {
773 return nil, err
774 }
775 args, err := parseArguments(parser)
776 if err != nil {
777 return nil, err
778 }
779 return ast.NewDirective(&ast.Directive{
780 Name: name,
781 Arguments: args,
782 Loc: loc(parser, start),
783 }), nil
784}
785
786/* Implements the parsing rules in the Types section. */
787
788/**
789 * Type :
790 * - NamedType
791 * - ListType
792 * - NonNullType
793 */
794func parseType(parser *Parser) (ast.Type, error) {
795 start := parser.Token.Start
796 var ttype ast.Type
797 if skp, err := skip(parser, lexer.TokenKind[lexer.BRACKET_L]); err != nil {
798 return nil, err
799 } else if skp {
800 t, err := parseType(parser)
801 if err != nil {
802 return t, err
803 }
804 ttype = t
805 _, err = expect(parser, lexer.TokenKind[lexer.BRACKET_R])
806 if err != nil {
807 return ttype, err
808 }
809 ttype = ast.NewList(&ast.List{
810 Type: ttype,
811 Loc: loc(parser, start),
812 })
813 } else {
814 name, err := parseNamed(parser)
815 if err != nil {
816 return ttype, err
817 }
818 ttype = name
819 }
820 if skp, err := skip(parser, lexer.TokenKind[lexer.BANG]); err != nil {
821 return nil, err
822 } else if skp {
823 ttype = ast.NewNonNull(&ast.NonNull{
824 Type: ttype,
825 Loc: loc(parser, start),
826 })
827 return ttype, nil
828 }
829 return ttype, nil
830}
831
832/**
833 * NamedType : Name
834 */
835func parseNamed(parser *Parser) (*ast.Named, error) {
836 start := parser.Token.Start
837 name, err := parseName(parser)
838 if err != nil {
839 return nil, err
840 }
841 return ast.NewNamed(&ast.Named{
842 Name: name,
843 Loc: loc(parser, start),
844 }), nil
845}
846
847/* Implements the parsing rules in the Type Definition section. */
848
849/**
850 * TypeSystemDefinition :
851 * - SchemaDefinition
852 * - TypeDefinition
853 * - TypeExtension
854 * - DirectiveDefinition
855 *
856 * TypeDefinition :
857 * - ScalarTypeDefinition
858 * - ObjectTypeDefinition
859 * - InterfaceTypeDefinition
860 * - UnionTypeDefinition
861 * - EnumTypeDefinition
862 * - InputObjectTypeDefinition
863 */
864func parseTypeSystemDefinition(parser *Parser) (ast.Node, error) {
865 var err error
866
867 // Many definitions begin with a description and require a lookahead.
868 keywordToken := parser.Token
869 if peekDescription(parser) {
870 keywordToken, err = lookahead(parser)
871 if err != nil {
872 return nil, err
873 }
874 }
875
876 if keywordToken.Kind == lexer.NAME {
877 switch keywordToken.Value {
878 case "schema":
879 return parseSchemaDefinition(parser)
880 case "scalar":
881 return parseScalarTypeDefinition(parser)
882 case "type":
883 return parseObjectTypeDefinition(parser)
884 case "interface":
885 return parseInterfaceTypeDefinition(parser)
886 case "union":
887 return parseUnionTypeDefinition(parser)
888 case "enum":
889 return parseEnumTypeDefinition(parser)
890 case "input":
891 return parseInputObjectTypeDefinition(parser)
892 case "extend":
893 return parseTypeExtensionDefinition(parser)
894 case "directive":
895 return parseDirectiveDefinition(parser)
896 }
897 }
898
899 return nil, unexpected(parser, keywordToken)
900}
901
902/**
903 * SchemaDefinition : schema Directives? { OperationTypeDefinition+ }
904 *
905 * OperationTypeDefinition : OperationType : NamedType
906 */
907func parseSchemaDefinition(parser *Parser) (*ast.SchemaDefinition, error) {
908 start := parser.Token.Start
909 _, err := expectKeyWord(parser, "schema")
910 if err != nil {
911 return nil, err
912 }
913 directives, err := parseDirectives(parser)
914 if err != nil {
915 return nil, err
916 }
917 operationTypesI, err := many(
918 parser,
919 lexer.TokenKind[lexer.BRACE_L],
920 parseOperationTypeDefinition,
921 lexer.TokenKind[lexer.BRACE_R],
922 )
923 if err != nil {
924 return nil, err
925 }
926 operationTypes := []*ast.OperationTypeDefinition{}
927 for _, op := range operationTypesI {
928 if op, ok := op.(*ast.OperationTypeDefinition); ok {
929 operationTypes = append(operationTypes, op)
930 }
931 }
932 return ast.NewSchemaDefinition(&ast.SchemaDefinition{
933 OperationTypes: operationTypes,
934 Directives: directives,
935 Loc: loc(parser, start),
936 }), nil
937}
938
939func parseOperationTypeDefinition(parser *Parser) (interface{}, error) {
940 start := parser.Token.Start
941 operation, err := parseOperationType(parser)
942 if err != nil {
943 return nil, err
944 }
945 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
946 if err != nil {
947 return nil, err
948 }
949 ttype, err := parseNamed(parser)
950 if err != nil {
951 return nil, err
952 }
953 return ast.NewOperationTypeDefinition(&ast.OperationTypeDefinition{
954 Operation: operation,
955 Type: ttype,
956 Loc: loc(parser, start),
957 }), nil
958}
959
960/**
961 * ScalarTypeDefinition : Description? scalar Name Directives?
962 */
963func parseScalarTypeDefinition(parser *Parser) (*ast.ScalarDefinition, error) {
964 start := parser.Token.Start
965 description, err := parseDescription(parser)
966 if err != nil {
967 return nil, err
968 }
969 _, err = expectKeyWord(parser, "scalar")
970 if err != nil {
971 return nil, err
972 }
973 name, err := parseName(parser)
974 if err != nil {
975 return nil, err
976 }
977 directives, err := parseDirectives(parser)
978 if err != nil {
979 return nil, err
980 }
981 def := ast.NewScalarDefinition(&ast.ScalarDefinition{
982 Name: name,
983 Description: description,
984 Directives: directives,
985 Loc: loc(parser, start),
986 })
987 return def, nil
988}
989
990/**
991 * ObjectTypeDefinition :
992 * Description?
993 * type Name ImplementsInterfaces? Directives? { FieldDefinition+ }
994 */
995func parseObjectTypeDefinition(parser *Parser) (*ast.ObjectDefinition, error) {
996 start := parser.Token.Start
997 description, err := parseDescription(parser)
998 if err != nil {
999 return nil, err
1000 }
1001 _, err = expectKeyWord(parser, "type")
1002 if err != nil {
1003 return nil, err
1004 }
1005 name, err := parseName(parser)
1006 if err != nil {
1007 return nil, err
1008 }
1009 interfaces, err := parseImplementsInterfaces(parser)
1010 if err != nil {
1011 return nil, err
1012 }
1013 directives, err := parseDirectives(parser)
1014 if err != nil {
1015 return nil, err
1016 }
1017 iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R])
1018 if err != nil {
1019 return nil, err
1020 }
1021 fields := []*ast.FieldDefinition{}
1022 for _, iField := range iFields {
1023 if iField != nil {
1024 fields = append(fields, iField.(*ast.FieldDefinition))
1025 }
1026 }
1027 return ast.NewObjectDefinition(&ast.ObjectDefinition{
1028 Name: name,
1029 Description: description,
1030 Loc: loc(parser, start),
1031 Interfaces: interfaces,
1032 Directives: directives,
1033 Fields: fields,
1034 }), nil
1035}
1036
1037/**
1038 * ImplementsInterfaces : implements NamedType+
1039 */
1040func parseImplementsInterfaces(parser *Parser) ([]*ast.Named, error) {
1041 types := []*ast.Named{}
1042 if parser.Token.Value == "implements" {
1043 if err := advance(parser); err != nil {
1044 return nil, err
1045 }
1046 for {
1047 ttype, err := parseNamed(parser)
1048 if err != nil {
1049 return types, err
1050 }
1051 types = append(types, ttype)
1052 if !peek(parser, lexer.TokenKind[lexer.NAME]) {
1053 break
1054 }
1055 }
1056 }
1057 return types, nil
1058}
1059
1060/**
1061 * FieldDefinition : Description? Name ArgumentsDefinition? : Type Directives?
1062 */
1063func parseFieldDefinition(parser *Parser) (interface{}, error) {
1064 start := parser.Token.Start
1065 description, err := parseDescription(parser)
1066 if err != nil {
1067 return nil, err
1068 }
1069 name, err := parseName(parser)
1070 if err != nil {
1071 return nil, err
1072 }
1073 args, err := parseArgumentDefs(parser)
1074 if err != nil {
1075 return nil, err
1076 }
1077 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
1078 if err != nil {
1079 return nil, err
1080 }
1081 ttype, err := parseType(parser)
1082 if err != nil {
1083 return nil, err
1084 }
1085 directives, err := parseDirectives(parser)
1086 if err != nil {
1087 return nil, err
1088 }
1089 return ast.NewFieldDefinition(&ast.FieldDefinition{
1090 Name: name,
1091 Description: description,
1092 Arguments: args,
1093 Type: ttype,
1094 Directives: directives,
1095 Loc: loc(parser, start),
1096 }), nil
1097}
1098
1099/**
1100 * ArgumentsDefinition : ( InputValueDefinition+ )
1101 */
1102func parseArgumentDefs(parser *Parser) ([]*ast.InputValueDefinition, error) {
1103 inputValueDefinitions := []*ast.InputValueDefinition{}
1104
1105 if !peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
1106 return inputValueDefinitions, nil
1107 }
1108 iInputValueDefinitions, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseInputValueDef, lexer.TokenKind[lexer.PAREN_R])
1109 if err != nil {
1110 return inputValueDefinitions, err
1111 }
1112 for _, iInputValueDefinition := range iInputValueDefinitions {
1113 if iInputValueDefinition != nil {
1114 inputValueDefinitions = append(inputValueDefinitions, iInputValueDefinition.(*ast.InputValueDefinition))
1115 }
1116 }
1117 return inputValueDefinitions, err
1118}
1119
1120/**
1121 * InputValueDefinition : Description? Name : Type DefaultValue? Directives?
1122 */
1123func parseInputValueDef(parser *Parser) (interface{}, error) {
1124 start := parser.Token.Start
1125 description, err := parseDescription(parser)
1126 if err != nil {
1127 return nil, err
1128 }
1129 name, err := parseName(parser)
1130 if err != nil {
1131 return nil, err
1132 }
1133 _, err = expect(parser, lexer.TokenKind[lexer.COLON])
1134 if err != nil {
1135 return nil, err
1136 }
1137 ttype, err := parseType(parser)
1138 if err != nil {
1139 return nil, err
1140 }
1141 var defaultValue ast.Value
1142 if skp, err := skip(parser, lexer.TokenKind[lexer.EQUALS]); err != nil {
1143 return nil, err
1144 } else if skp {
1145 val, err := parseConstValue(parser)
1146 if err != nil {
1147 return nil, err
1148 }
1149 if val, ok := val.(ast.Value); ok {
1150 defaultValue = val
1151 }
1152 }
1153 directives, err := parseDirectives(parser)
1154 if err != nil {
1155 return nil, err
1156 }
1157 return ast.NewInputValueDefinition(&ast.InputValueDefinition{
1158 Name: name,
1159 Description: description,
1160 Type: ttype,
1161 DefaultValue: defaultValue,
1162 Directives: directives,
1163 Loc: loc(parser, start),
1164 }), nil
1165}
1166
1167/**
1168 * InterfaceTypeDefinition :
1169 * Description?
1170 * interface Name Directives? { FieldDefinition+ }
1171 */
1172func parseInterfaceTypeDefinition(parser *Parser) (*ast.InterfaceDefinition, error) {
1173 start := parser.Token.Start
1174 description, err := parseDescription(parser)
1175 if err != nil {
1176 return nil, err
1177 }
1178 _, err = expectKeyWord(parser, "interface")
1179 if err != nil {
1180 return nil, err
1181 }
1182 name, err := parseName(parser)
1183 if err != nil {
1184 return nil, err
1185 }
1186 directives, err := parseDirectives(parser)
1187 if err != nil {
1188 return nil, err
1189 }
1190 iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R])
1191 if err != nil {
1192 return nil, err
1193 }
1194 fields := []*ast.FieldDefinition{}
1195 for _, iField := range iFields {
1196 if iField != nil {
1197 fields = append(fields, iField.(*ast.FieldDefinition))
1198 }
1199 }
1200 return ast.NewInterfaceDefinition(&ast.InterfaceDefinition{
1201 Name: name,
1202 Description: description,
1203 Directives: directives,
1204 Loc: loc(parser, start),
1205 Fields: fields,
1206 }), nil
1207}
1208
1209/**
1210 * UnionTypeDefinition : Description? union Name Directives? = UnionMembers
1211 */
1212func parseUnionTypeDefinition(parser *Parser) (*ast.UnionDefinition, error) {
1213 start := parser.Token.Start
1214 description, err := parseDescription(parser)
1215 if err != nil {
1216 return nil, err
1217 }
1218 _, err = expectKeyWord(parser, "union")
1219 if err != nil {
1220 return nil, err
1221 }
1222 name, err := parseName(parser)
1223 if err != nil {
1224 return nil, err
1225 }
1226 directives, err := parseDirectives(parser)
1227 if err != nil {
1228 return nil, err
1229 }
1230 _, err = expect(parser, lexer.TokenKind[lexer.EQUALS])
1231 if err != nil {
1232 return nil, err
1233 }
1234 types, err := parseUnionMembers(parser)
1235 if err != nil {
1236 return nil, err
1237 }
1238 return ast.NewUnionDefinition(&ast.UnionDefinition{
1239 Name: name,
1240 Description: description,
1241 Directives: directives,
1242 Loc: loc(parser, start),
1243 Types: types,
1244 }), nil
1245}
1246
1247/**
1248 * UnionMembers :
1249 * - NamedType
1250 * - UnionMembers | NamedType
1251 */
1252func parseUnionMembers(parser *Parser) ([]*ast.Named, error) {
1253 members := []*ast.Named{}
1254 for {
1255 member, err := parseNamed(parser)
1256 if err != nil {
1257 return members, err
1258 }
1259 members = append(members, member)
1260 if skp, err := skip(parser, lexer.TokenKind[lexer.PIPE]); err != nil {
1261 return nil, err
1262 } else if !skp {
1263 break
1264 }
1265 }
1266 return members, nil
1267}
1268
1269/**
1270 * EnumTypeDefinition : Description? enum Name Directives? { EnumValueDefinition+ }
1271 */
1272func parseEnumTypeDefinition(parser *Parser) (*ast.EnumDefinition, error) {
1273 start := parser.Token.Start
1274 description, err := parseDescription(parser)
1275 if err != nil {
1276 return nil, err
1277 }
1278 _, err = expectKeyWord(parser, "enum")
1279 if err != nil {
1280 return nil, err
1281 }
1282 name, err := parseName(parser)
1283 if err != nil {
1284 return nil, err
1285 }
1286 directives, err := parseDirectives(parser)
1287 if err != nil {
1288 return nil, err
1289 }
1290 iEnumValueDefs, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseEnumValueDefinition, lexer.TokenKind[lexer.BRACE_R])
1291 if err != nil {
1292 return nil, err
1293 }
1294 values := []*ast.EnumValueDefinition{}
1295 for _, iEnumValueDef := range iEnumValueDefs {
1296 if iEnumValueDef != nil {
1297 values = append(values, iEnumValueDef.(*ast.EnumValueDefinition))
1298 }
1299 }
1300 return ast.NewEnumDefinition(&ast.EnumDefinition{
1301 Name: name,
1302 Description: description,
1303 Directives: directives,
1304 Loc: loc(parser, start),
1305 Values: values,
1306 }), nil
1307}
1308
1309/**
1310 * EnumValueDefinition : Description? EnumValue Directives?
1311 *
1312 * EnumValue : Name
1313 */
1314func parseEnumValueDefinition(parser *Parser) (interface{}, error) {
1315 start := parser.Token.Start
1316 description, err := parseDescription(parser)
1317 if err != nil {
1318 return nil, err
1319 }
1320 name, err := parseName(parser)
1321 if err != nil {
1322 return nil, err
1323 }
1324 directives, err := parseDirectives(parser)
1325 if err != nil {
1326 return nil, err
1327 }
1328 return ast.NewEnumValueDefinition(&ast.EnumValueDefinition{
1329 Name: name,
1330 Description: description,
1331 Directives: directives,
1332 Loc: loc(parser, start),
1333 }), nil
1334}
1335
1336/**
1337 * InputObjectTypeDefinition :
1338 * - Description? input Name Directives? { InputValueDefinition+ }
1339 */
1340func parseInputObjectTypeDefinition(parser *Parser) (*ast.InputObjectDefinition, error) {
1341 start := parser.Token.Start
1342 description, err := parseDescription(parser)
1343 if err != nil {
1344 return nil, err
1345 }
1346 _, err = expectKeyWord(parser, "input")
1347 if err != nil {
1348 return nil, err
1349 }
1350 name, err := parseName(parser)
1351 if err != nil {
1352 return nil, err
1353 }
1354 directives, err := parseDirectives(parser)
1355 if err != nil {
1356 return nil, err
1357 }
1358 iInputValueDefinitions, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseInputValueDef, lexer.TokenKind[lexer.BRACE_R])
1359 if err != nil {
1360 return nil, err
1361 }
1362 fields := []*ast.InputValueDefinition{}
1363 for _, iInputValueDefinition := range iInputValueDefinitions {
1364 if iInputValueDefinition != nil {
1365 fields = append(fields, iInputValueDefinition.(*ast.InputValueDefinition))
1366 }
1367 }
1368 return ast.NewInputObjectDefinition(&ast.InputObjectDefinition{
1369 Name: name,
1370 Description: description,
1371 Directives: directives,
1372 Loc: loc(parser, start),
1373 Fields: fields,
1374 }), nil
1375}
1376
1377/**
1378 * TypeExtensionDefinition : extend ObjectTypeDefinition
1379 */
1380func parseTypeExtensionDefinition(parser *Parser) (*ast.TypeExtensionDefinition, error) {
1381 start := parser.Token.Start
1382 _, err := expectKeyWord(parser, "extend")
1383 if err != nil {
1384 return nil, err
1385 }
1386
1387 definition, err := parseObjectTypeDefinition(parser)
1388 if err != nil {
1389 return nil, err
1390 }
1391 return ast.NewTypeExtensionDefinition(&ast.TypeExtensionDefinition{
1392 Loc: loc(parser, start),
1393 Definition: definition,
1394 }), nil
1395}
1396
1397/**
1398 * DirectiveDefinition :
1399 * - directive @ Name ArgumentsDefinition? on DirectiveLocations
1400 */
1401func parseDirectiveDefinition(parser *Parser) (*ast.DirectiveDefinition, error) {
1402 start := parser.Token.Start
1403 description, err := parseDescription(parser)
1404 if err != nil {
1405 return nil, err
1406 }
1407 _, err = expectKeyWord(parser, "directive")
1408 if err != nil {
1409 return nil, err
1410 }
1411 _, err = expect(parser, lexer.TokenKind[lexer.AT])
1412 if err != nil {
1413 return nil, err
1414 }
1415 name, err := parseName(parser)
1416 if err != nil {
1417 return nil, err
1418 }
1419 args, err := parseArgumentDefs(parser)
1420 if err != nil {
1421 return nil, err
1422 }
1423 _, err = expectKeyWord(parser, "on")
1424 if err != nil {
1425 return nil, err
1426 }
1427 locations, err := parseDirectiveLocations(parser)
1428 if err != nil {
1429 return nil, err
1430 }
1431
1432 return ast.NewDirectiveDefinition(&ast.DirectiveDefinition{
1433 Loc: loc(parser, start),
1434 Name: name,
1435 Description: description,
1436 Arguments: args,
1437 Locations: locations,
1438 }), nil
1439}
1440
1441/**
1442 * DirectiveLocations :
1443 * - Name
1444 * - DirectiveLocations | Name
1445 */
1446func parseDirectiveLocations(parser *Parser) ([]*ast.Name, error) {
1447 locations := []*ast.Name{}
1448 for {
1449 name, err := parseName(parser)
1450 if err != nil {
1451 return locations, err
1452 }
1453 locations = append(locations, name)
1454
1455 hasPipe, err := skip(parser, lexer.TokenKind[lexer.PIPE])
1456 if err != nil {
1457 return locations, err
1458 }
1459 if !hasPipe {
1460 break
1461 }
1462 }
1463 return locations, nil
1464}
1465
1466func parseStringLiteral(parser *Parser) (*ast.StringValue, error) {
1467 token := parser.Token
1468 if err := advance(parser); err != nil {
1469 return nil, err
1470 }
1471 return ast.NewStringValue(&ast.StringValue{
1472 Value: token.Value,
1473 Loc: loc(parser, token.Start),
1474 }), nil
1475}
1476
1477/**
1478 * Description : StringValue
1479 */
1480func parseDescription(parser *Parser) (*ast.StringValue, error) {
1481 if peekDescription(parser) {
1482 return parseStringLiteral(parser)
1483 }
1484 return nil, nil
1485}
1486
1487/* Core parsing utility functions */
1488
1489// Returns a location object, used to identify the place in
1490// the source that created a given parsed object.
1491func loc(parser *Parser, start int) *ast.Location {
1492 if parser.Options.NoLocation {
1493 return nil
1494 }
1495 if parser.Options.NoSource {
1496 return ast.NewLocation(&ast.Location{
1497 Start: start,
1498 End: parser.PrevEnd,
1499 })
1500 }
1501 return ast.NewLocation(&ast.Location{
1502 Start: start,
1503 End: parser.PrevEnd,
1504 Source: parser.Source,
1505 })
1506}
1507
1508// Moves the internal parser object to the next lexed token.
1509func advance(parser *Parser) error {
1510 prevEnd := parser.Token.End
1511 parser.PrevEnd = prevEnd
1512 token, err := parser.LexToken(prevEnd)
1513 if err != nil {
1514 return err
1515 }
1516 parser.Token = token
1517 return nil
1518}
1519
1520// lookahead retrieves the next token
1521func lookahead(parser *Parser) (lexer.Token, error) {
1522 prevEnd := parser.Token.End
1523 return parser.LexToken(prevEnd)
1524}
1525
1526// Determines if the next token is of a given kind
1527func peek(parser *Parser, Kind int) bool {
1528 return parser.Token.Kind == Kind
1529}
1530
1531// peekDescription determines if the next token is a string value
1532func peekDescription(parser *Parser) bool {
1533 return peek(parser, lexer.STRING) || peek(parser, lexer.BLOCK_STRING)
1534}
1535
1536// If the next token is of the given kind, return true after advancing
1537// the parser. Otherwise, do not change the parser state and return false.
1538func skip(parser *Parser, Kind int) (bool, error) {
1539 if parser.Token.Kind == Kind {
1540 err := advance(parser)
1541 return true, err
1542 }
1543 return false, nil
1544}
1545
1546// If the next token is of the given kind, return that token after advancing
1547// the parser. Otherwise, do not change the parser state and return error.
1548func expect(parser *Parser, kind int) (lexer.Token, error) {
1549 token := parser.Token
1550 if token.Kind == kind {
1551 err := advance(parser)
1552 return token, err
1553 }
1554 descp := fmt.Sprintf("Expected %s, found %s", lexer.GetTokenKindDesc(kind), lexer.GetTokenDesc(token))
1555 return token, gqlerrors.NewSyntaxError(parser.Source, token.Start, descp)
1556}
1557
1558// If the next token is a keyword with the given value, return that token after
1559// advancing the parser. Otherwise, do not change the parser state and return false.
1560func expectKeyWord(parser *Parser, value string) (lexer.Token, error) {
1561 token := parser.Token
1562 if token.Kind == lexer.TokenKind[lexer.NAME] && token.Value == value {
1563 err := advance(parser)
1564 return token, err
1565 }
1566 descp := fmt.Sprintf("Expected \"%s\", found %s", value, lexer.GetTokenDesc(token))
1567 return token, gqlerrors.NewSyntaxError(parser.Source, token.Start, descp)
1568}
1569
1570// Helper function for creating an error when an unexpected lexed token
1571// is encountered.
1572func unexpected(parser *Parser, atToken lexer.Token) error {
1573 var token lexer.Token
1574 if (atToken == lexer.Token{}) {
1575 token = parser.Token
1576 } else {
1577 token = parser.Token
1578 }
1579 description := fmt.Sprintf("Unexpected %v", lexer.GetTokenDesc(token))
1580 return gqlerrors.NewSyntaxError(parser.Source, token.Start, description)
1581}
1582
1583// Returns a possibly empty list of parse nodes, determined by
1584// the parseFn. This list begins with a lex token of openKind
1585// and ends with a lex token of closeKind. Advances the parser
1586// to the next lex token after the closing token.
1587func any(parser *Parser, openKind int, parseFn parseFn, closeKind int) ([]interface{}, error) {
1588 var nodes []interface{}
1589 _, err := expect(parser, openKind)
1590 if err != nil {
1591 return nodes, nil
1592 }
1593 for {
1594 if skp, err := skip(parser, closeKind); err != nil {
1595 return nil, err
1596 } else if skp {
1597 break
1598 }
1599 n, err := parseFn(parser)
1600 if err != nil {
1601 return nodes, err
1602 }
1603 nodes = append(nodes, n)
1604 }
1605 return nodes, nil
1606}
1607
1608// Returns a non-empty list of parse nodes, determined by
1609// the parseFn. This list begins with a lex token of openKind
1610// and ends with a lex token of closeKind. Advances the parser
1611// to the next lex token after the closing token.
1612func many(parser *Parser, openKind int, parseFn parseFn, closeKind int) ([]interface{}, error) {
1613 _, err := expect(parser, openKind)
1614 if err != nil {
1615 return nil, err
1616 }
1617 var nodes []interface{}
1618 node, err := parseFn(parser)
1619 if err != nil {
1620 return nodes, err
1621 }
1622 nodes = append(nodes, node)
1623 for {
1624 if skp, err := skip(parser, closeKind); err != nil {
1625 return nil, err
1626 } else if skp {
1627 break
1628 }
1629 node, err := parseFn(parser)
1630 if err != nil {
1631 return nodes, err
1632 }
1633 nodes = append(nodes, node)
1634 }
1635 return nodes, nil
1636}