import birdie import gleam/int import gleam/list import gleam/result import gleeunit import nibble.{Expected} import nibble/lexer.{Span, Token} import node import parser import quasi_lexer import snapshot_helpers pub fn main() -> Nil { gleeunit.main() } pub fn simple_quasi_lexer_test() { quasi_lexer.chars() |> quasi_lexer.run(on: "let x1 = e1") |> list.index_map(fn(token, index) { let Token(span, lexeme, value) = token assert lexeme == value let Span(row_start, col_start, row_end, col_end) = span assert row_start == row_end && row_start == 1 assert col_start == index + 1 assert col_end == col_start + 1 }) } pub fn quasi_lexer_off_by_one_test() { let input = "let x1 =\n e1" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) snapshot_helpers.snap_lexer_output( input, tokens, "Quasi lexer spans with multiline input", ) } pub fn parse_let_successfully_test() { let input = "let" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.exact_string("let", node.Let) let assert Ok(_) = nibble.run(tokens, parser) } pub fn parse_let_failing_test() { let input = "lt" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.exact_string("let", node.Let) let assert Error(error) = nibble.run(tokens, parser) let assert [nibble.DeadEnd(Span(_, cs, _, _), Expected(msg, got: got), _)] = error let snap = "Msg: " <> msg <> "\n" <> "Got: " <> got <> "\n" <> "At column: " <> int.to_string(cs) <> "\n" birdie.snap(snap, title: "Should fail to parse 'lt' as node.Let") } pub fn parse_unix_line_ending_test() { let input = "\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.end_of_line() let assert Ok(node.EndOfLine) = nibble.run(tokens, parser) } pub fn parse_windows_line_ending_test() { let input = "\r\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.end_of_line() let assert Ok(node.EndOfLine) = nibble.run(tokens, parser) } pub fn parse_line_ending_fails_on_lone_carriage_return_test() { let input = "\r" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.end_of_line() let result = nibble.run(tokens, parser) snapshot_helpers.snap_parse_result_nodes( input, result |> result.map(list.wrap), "Line ending should reject lone carriage return", ) } pub fn parse_line_ending_fails_on_other_chars_test() { let input = "x" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.end_of_line() let assert Error(_) = nibble.run(tokens, parser) } pub fn parse_line_ending_after_content_test() { let input = "let\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = { use _ <- nibble.do(parser.let_keyword()) parser.end_of_line() } let assert Ok(node.EndOfLine) = nibble.run(tokens, parser) } pub fn parse_multiple_line_endings_test() { let input = "\n\r\n\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = { use eol1 <- nibble.do(parser.end_of_line()) use eol2 <- nibble.do(parser.end_of_line()) use eol3 <- nibble.do(parser.end_of_line()) nibble.return([eol1, eol2, eol3]) } let result = nibble.run(tokens, parser) snapshot_helpers.snap_parse_result_nodes( input, result, "Multiple line endings should all parse as EndOfLine", ) } // Tests for printable parser (%x20-7F) pub fn parse_printable_space_test() { let input = " " let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Ok(node.Printable(" ")) = nibble.run(tokens, parser) } pub fn parse_printable_ascii_test() { let input = "a" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Ok(node.Printable("a")) = nibble.run(tokens, parser) } pub fn parse_printable_tilde_test() { let input = "~" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Ok(node.Printable("~")) = nibble.run(tokens, parser) } pub fn parse_printable_rejects_tab_test() { let input = "\t" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Error(_) = nibble.run(tokens, parser) } pub fn parse_printable_rejects_newline_test() { let input = "\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Error(_) = nibble.run(tokens, parser) } pub fn parse_printable_rejects_non_ascii_test() { let input = "é" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.printable() let assert Error(_) = nibble.run(tokens, parser) } // Tests for valid-non-ascii parser pub fn parse_valid_non_ascii_latin_test() { let input = "é" // é is U+00E9, in range 0x80-0xD7FF let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.valid_non_ascii() let assert Ok(node.ValidNonAscii("é")) = nibble.run(tokens, parser) } pub fn parse_valid_non_ascii_emoji_test() { let input = "🎉" // 🎉 is U+1F389, in range 0x10000-0x1FFFD let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.valid_non_ascii() let assert Ok(node.ValidNonAscii("🎉")) = nibble.run(tokens, parser) } pub fn parse_valid_non_ascii_chinese_test() { let input = "中" // 中 is U+4E2D, in range 0xE000-0xFFFD let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.valid_non_ascii() let assert Ok(node.ValidNonAscii("中")) = nibble.run(tokens, parser) } pub fn parse_valid_non_ascii_rejects_ascii_test() { let input = "a" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.valid_non_ascii() let assert Error(_) = nibble.run(tokens, parser) } pub fn parse_valid_non_ascii_rejects_tab_test() { let input = "\t" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.valid_non_ascii() let assert Error(_) = nibble.run(tokens, parser) } // Tests for not-end-of-line parser pub fn parse_not_end_of_line_printable_test() { let input = "a" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Ok(node.NotEndOfLine(node.Printable("a"))) = nibble.run(tokens, parser) } pub fn parse_not_end_of_line_space_test() { let input = " " let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Ok(node.NotEndOfLine(node.Printable(" "))) = nibble.run(tokens, parser) } pub fn parse_not_end_of_line_tab_test() { let input = "\t" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Ok(node.NotEndOfLine(node.Tab)) = nibble.run(tokens, parser) } pub fn parse_not_end_of_line_valid_non_ascii_test() { let input = "λ" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Ok(node.NotEndOfLine(node.ValidNonAscii("λ"))) = nibble.run(tokens, parser) } pub fn parse_not_end_of_line_rejects_newline_test() { let input = "\n" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Error(_) = nibble.run(tokens, parser) } pub fn parse_not_end_of_line_rejects_carriage_return_test() { let input = "\r" let tokens = quasi_lexer.chars() |> quasi_lexer.run(on: input) let parser = parser.not_end_of_line() let assert Error(_) = nibble.run(tokens, parser) }