scratch/brainfuck/test/gbf_lexer_test.gleam (view raw)
| 1 | import gbf/internal/lexer |
| 2 | import gbf/internal/token |
| 3 | import gleeunit/should |
| 4 | |
| 5 | pub fn can_lex_test() { |
| 6 | "><+-.,[] this is a comment" |
| 7 | |> lexer.new |
| 8 | |> lexer.lex |
| 9 | |> should.equal([ |
| 10 | #(token.IncrementPointer, lexer.Position(0)), |
| 11 | #(token.DecrementPointer, lexer.Position(1)), |
| 12 | #(token.IncrementByte, lexer.Position(2)), |
| 13 | #(token.DecrementByte, lexer.Position(3)), |
| 14 | #(token.OutputByte, lexer.Position(4)), |
| 15 | #(token.InputByte, lexer.Position(5)), |
| 16 | #(token.StartBlock, lexer.Position(6)), |
| 17 | #(token.EndBlock, lexer.Position(7)), |
| 18 | #(token.Comment("this is a comment"), lexer.Position(9)), |
| 19 | ]) |
| 20 | } |
| 21 | |
| 22 | pub fn multiline_test() { |
| 23 | "this is a comment |
| 24 | +++ |
| 25 | <. |
| 26 | " |
| 27 | |> lexer.new |
| 28 | |> lexer.lex |
| 29 | |> should.equal([ |
| 30 | #(token.Comment("this is a comment"), lexer.Position(0)), |
| 31 | #(token.IncrementByte, lexer.Position(18)), |
| 32 | #(token.IncrementByte, lexer.Position(19)), |
| 33 | #(token.IncrementByte, lexer.Position(20)), |
| 34 | #(token.DecrementPointer, lexer.Position(22)), |
| 35 | #(token.OutputByte, lexer.Position(23)), |
| 36 | ]) |
| 37 | } |