1import gleam/list
2import gleam/result
3import gleeunit
4import nibble
5import nibble/lexer.{Span, Token}
6import node
7import parser
8import quasi_lexer
9import snapshot_helpers
10
11pub fn main() -> Nil {
12 gleeunit.main()
13}
14
15pub fn simple_quasi_lexer_test() {
16 quasi_lexer.chars()
17 |> quasi_lexer.run(on: "let x1 = e1")
18 |> list.index_map(fn(token, index) {
19 let Token(span, lexeme, value) = token
20 assert lexeme == value
21
22 let Span(row_start, col_start, row_end, col_end) = span
23 assert row_start == row_end && row_start == 1
24 assert col_start == index + 1
25 assert col_end == col_start + 1
26 })
27}
28
29pub fn quasi_lexer_off_by_one_test() {
30 let input = "let x1 =\n e1"
31 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
32
33 snapshot_helpers.snap_lexer_output(
34 input,
35 tokens,
36 "Quasi lexer spans with multiline input",
37 )
38}
39
40pub fn parse_let_successfully_test() {
41 let input = "let"
42 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
43 let parser = parser.exact_string("let", node.Let)
44 let result = nibble.run(tokens, parser)
45
46 snapshot_helpers.snap_parse_result_nodes(
47 input,
48 result |> result.map(list.wrap),
49 "Should successfully parse 'let' as node.Let",
50 )
51}
52
53pub fn parse_let_failing_test() {
54 let input = "lt"
55 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
56 let parser = parser.exact_string("let", node.Let)
57 let result = nibble.run(tokens, parser)
58
59 snapshot_helpers.snap_parse_result_nodes(
60 input,
61 result |> result.map(list.wrap),
62 "Should fail to parse 'lt' as node.Let",
63 )
64}
65
66pub fn parse_unix_line_ending_test() {
67 let input = "\n"
68 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
69 let parser = parser.end_of_line()
70 let assert Ok(node.EndOfLine) = nibble.run(tokens, parser)
71}
72
73pub fn parse_windows_line_ending_test() {
74 let input = "\r\n"
75 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
76 let parser = parser.end_of_line()
77 let assert Ok(node.EndOfLine) = nibble.run(tokens, parser)
78}
79
80pub fn parse_line_ending_fails_on_lone_carriage_return_test() {
81 let input = "\r"
82 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
83 let parser = parser.end_of_line()
84 let result = nibble.run(tokens, parser)
85
86 snapshot_helpers.snap_parse_result_nodes(
87 input,
88 result |> result.map(list.wrap),
89 "Line ending should reject lone carriage return",
90 )
91}
92
93pub fn parse_line_ending_fails_on_other_chars_test() {
94 let input = "x"
95 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
96 let parser = parser.end_of_line()
97 let assert Error(_) = nibble.run(tokens, parser)
98}
99
100pub fn parse_line_ending_after_content_test() {
101 let input = "let\n"
102 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
103 let parser = {
104 use _ <- nibble.do(parser.let_keyword())
105 parser.end_of_line()
106 }
107 let assert Ok(node.EndOfLine) = nibble.run(tokens, parser)
108}
109
110pub fn parse_multiple_line_endings_test() {
111 let input = "\n\r\n\n"
112 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
113 let parser = {
114 use eol1 <- nibble.do(parser.end_of_line())
115 use eol2 <- nibble.do(parser.end_of_line())
116 use eol3 <- nibble.do(parser.end_of_line())
117 nibble.return([eol1, eol2, eol3])
118 }
119 let result = nibble.run(tokens, parser)
120
121 snapshot_helpers.snap_parse_result_nodes(
122 input,
123 result,
124 "Multiple line endings should all parse as EndOfLine",
125 )
126}
127
128// Tests for printable parser (%x20-7F)
129
130pub fn parse_printable_space_test() {
131 let input = " "
132 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
133 let parser = parser.printable()
134 let assert Ok(node.Printable(" ")) = nibble.run(tokens, parser)
135}
136
137pub fn parse_printable_ascii_test() {
138 let input = "a"
139 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
140 let parser = parser.printable()
141 let assert Ok(node.Printable("a")) = nibble.run(tokens, parser)
142}
143
144pub fn parse_printable_tilde_test() {
145 let input = "~"
146 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
147 let parser = parser.printable()
148 let assert Ok(node.Printable("~")) = nibble.run(tokens, parser)
149}
150
151pub fn parse_printable_rejects_tab_test() {
152 let input = "\t"
153 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
154 let parser = parser.printable()
155 let assert Error(_) = nibble.run(tokens, parser)
156}
157
158pub fn parse_printable_rejects_newline_test() {
159 let input = "\n"
160 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
161 let parser = parser.printable()
162 let assert Error(_) = nibble.run(tokens, parser)
163}
164
165pub fn parse_printable_rejects_non_ascii_test() {
166 let input = "é"
167 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
168 let parser = parser.printable()
169 let assert Error(_) = nibble.run(tokens, parser)
170}
171
172// Tests for valid-non-ascii parser
173
174pub fn parse_valid_non_ascii_latin_test() {
175 let input = "é"
176 // é is U+00E9, in range 0x80-0xD7FF
177 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
178 let parser = parser.valid_non_ascii()
179 let assert Ok(node.ValidNonAscii("é")) = nibble.run(tokens, parser)
180}
181
182pub fn parse_valid_non_ascii_emoji_test() {
183 let input = "🎉"
184 // 🎉 is U+1F389, in range 0x10000-0x1FFFD
185 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
186 let parser = parser.valid_non_ascii()
187 let assert Ok(node.ValidNonAscii("🎉")) = nibble.run(tokens, parser)
188}
189
190pub fn parse_valid_non_ascii_chinese_test() {
191 let input = "中"
192 // 中 is U+4E2D, in range 0xE000-0xFFFD
193 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
194 let parser = parser.valid_non_ascii()
195 let assert Ok(node.ValidNonAscii("中")) = nibble.run(tokens, parser)
196}
197
198pub fn parse_valid_non_ascii_rejects_ascii_test() {
199 let input = "a"
200 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
201 let parser = parser.valid_non_ascii()
202 let assert Error(_) = nibble.run(tokens, parser)
203}
204
205pub fn parse_valid_non_ascii_rejects_tab_test() {
206 let input = "\t"
207 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
208 let parser = parser.valid_non_ascii()
209 let assert Error(_) = nibble.run(tokens, parser)
210}
211
212// Tests for not-end-of-line parser
213
214pub fn parse_not_end_of_line_printable_test() {
215 let input = "a"
216 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
217 let parser = parser.not_end_of_line()
218 let assert Ok(node.NotEndOfLine(node.Printable("a"))) =
219 nibble.run(tokens, parser)
220}
221
222pub fn parse_not_end_of_line_space_test() {
223 let input = " "
224 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
225 let parser = parser.not_end_of_line()
226 let assert Ok(node.NotEndOfLine(node.Printable(" "))) =
227 nibble.run(tokens, parser)
228}
229
230pub fn parse_not_end_of_line_tab_test() {
231 let input = "\t"
232 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
233 let parser = parser.not_end_of_line()
234 let assert Ok(node.NotEndOfLine(node.Tab)) = nibble.run(tokens, parser)
235}
236
237pub fn parse_not_end_of_line_valid_non_ascii_test() {
238 let input = "λ"
239 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
240 let parser = parser.not_end_of_line()
241 let assert Ok(node.NotEndOfLine(node.ValidNonAscii("λ"))) =
242 nibble.run(tokens, parser)
243}
244
245pub fn parse_not_end_of_line_rejects_newline_test() {
246 let input = "\n"
247 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
248 let parser = parser.not_end_of_line()
249 let assert Error(_) = nibble.run(tokens, parser)
250}
251
252pub fn parse_not_end_of_line_rejects_carriage_return_test() {
253 let input = "\r"
254 let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input)
255 let parser = parser.not_end_of_line()
256 let assert Error(_) = nibble.run(tokens, parser)
257}