lexer.go

  1package common
  2
  3import (
  4	"fmt"
  5	"text/scanner"
  6
  7	"github.com/vektah/gqlgen/neelance/errors"
  8)
  9
 10type syntaxError string
 11
 12type Lexer struct {
 13	sc          *scanner.Scanner
 14	next        rune
 15	descComment string
 16}
 17
 18type Ident struct {
 19	Name string
 20	Loc  errors.Location
 21}
 22
 23func New(sc *scanner.Scanner) *Lexer {
 24	l := &Lexer{sc: sc}
 25	l.Consume()
 26	return l
 27}
 28
 29func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
 30	defer func() {
 31		if err := recover(); err != nil {
 32			if err, ok := err.(syntaxError); ok {
 33				errRes = errors.Errorf("syntax error: %s", err)
 34				errRes.Locations = []errors.Location{l.Location()}
 35				return
 36			}
 37			panic(err)
 38		}
 39	}()
 40
 41	f()
 42	return
 43}
 44
 45func (l *Lexer) Peek() rune {
 46	return l.next
 47}
 48
 49func (l *Lexer) Consume() {
 50	l.descComment = ""
 51	for {
 52		l.next = l.sc.Scan()
 53		if l.next == ',' {
 54			continue
 55		}
 56		if l.next == '#' {
 57			if l.sc.Peek() == ' ' {
 58				l.sc.Next()
 59			}
 60			if l.descComment != "" {
 61				l.descComment += "\n"
 62			}
 63			for {
 64				next := l.sc.Next()
 65				if next == '\n' || next == scanner.EOF {
 66					break
 67				}
 68				l.descComment += string(next)
 69			}
 70			continue
 71		}
 72		break
 73	}
 74}
 75
 76func (l *Lexer) ConsumeIdent() string {
 77	name := l.sc.TokenText()
 78	l.ConsumeToken(scanner.Ident)
 79	return name
 80}
 81
 82func (l *Lexer) ConsumeIdentWithLoc() Ident {
 83	loc := l.Location()
 84	name := l.sc.TokenText()
 85	l.ConsumeToken(scanner.Ident)
 86	return Ident{name, loc}
 87}
 88
 89func (l *Lexer) ConsumeKeyword(keyword string) {
 90	if l.next != scanner.Ident || l.sc.TokenText() != keyword {
 91		l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
 92	}
 93	l.Consume()
 94}
 95
 96func (l *Lexer) ConsumeLiteral() *BasicLit {
 97	lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
 98	l.Consume()
 99	return lit
100}
101
102func (l *Lexer) ConsumeToken(expected rune) {
103	if l.next != expected {
104		l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
105	}
106	l.Consume()
107}
108
109func (l *Lexer) DescComment() string {
110	return l.descComment
111}
112
113func (l *Lexer) SyntaxError(message string) {
114	panic(syntaxError(message))
115}
116
117func (l *Lexer) Location() errors.Location {
118	return errors.Location{
119		Line:   l.sc.Line,
120		Column: l.sc.Column,
121	}
122}