lexer_test.go

 1package query
 2
 3import (
 4	"testing"
 5
 6	"github.com/stretchr/testify/assert"
 7)
 8
 9func TestTokenize(t *testing.T) {
10	var tests = []struct {
11		input  string
12		tokens []token
13	}{
14		{"status:", nil},
15		{":value", nil},
16
17		{"status:open", []token{newTokenKV("status", "open")}},
18		{"status:closed", []token{newTokenKV("status", "closed")}},
19
20		{"author:rene", []token{newTokenKV("author", "rene")}},
21		{`author:"René Descartes"`, []token{newTokenKV("author", "René Descartes")}},
22
23		{
24			`status:open status:closed author:rene author:"René Descartes"`,
25			[]token{
26				newTokenKV("status", "open"),
27				newTokenKV("status", "closed"),
28				newTokenKV("author", "rene"),
29				newTokenKV("author", "René Descartes"),
30			},
31		},
32
33		// quotes
34		{`key:"value value"`, []token{newTokenKV("key", "value value")}},
35		{`key:'value value'`, []token{newTokenKV("key", "value value")}},
36		// unmatched quotes
37		{`key:'value value`, nil},
38		{`key:value value'`, nil},
39
40		// full text search
41		{"search", []token{newTokenSearch("search")}},
42		{"search more terms", []token{
43			newTokenSearch("search"),
44			newTokenSearch("more"),
45			newTokenSearch("terms"),
46		}},
47		{"search \"more terms\"", []token{
48			newTokenSearch("search"),
49			newTokenSearch("more terms"),
50		}},
51	}
52
53	for _, tc := range tests {
54		tokens, err := tokenize(tc.input)
55		if tc.tokens == nil {
56			assert.Error(t, err)
57			assert.Nil(t, tokens)
58		} else {
59			assert.NoError(t, err)
60			assert.Equal(t, tc.tokens, tokens)
61		}
62	}
63}