19 files changed,
797 insertions(+),
0 deletions(-)
Author:
Oleksandr Smirnov
olexsmir@gmail.com
Committed at:
2025-10-23 13:26:17 +0300
Change ID:
qootszsvnqntntwpxutookswmxtprlzv
Parent:
26124b5
jump to
A
brainfuck/README.md
··· 1 +# gbf 2 + 3 +I was bored and made this :star: gleaming brainfuck interpreter. 4 + 5 +## How to use? 6 +### As library 7 +```gleam 8 +import gbf 9 +import gleam/io 10 + 11 +pub fn main() -> Nil { 12 + let input = 13 + "++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++." 14 + 15 + let assert Ok(virtual_machine) = gbf.run(input) 16 + 17 + virtual_machine 18 + |> gbf.output 19 + |> io.println 20 +//> Hello World! 21 +} 22 +``` 23 + 24 +### As CLI tool 25 +```bash 26 +gleam run -m gbf/run ./examples/helloworld.bf 27 +#> Hello World! 28 +```
A
brainfuck/examples/helloworld.bf
··· 1 +++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.
A
brainfuck/examples/piramide.b
··· 1 +,>++++++++[-<----->], 2 +[>++++++++[-<------>] 3 + <<[->++++++++++<]> 4 +>] 5 +<[->+>+>>>>>>>+<<<<<<<<<] 6 +>>>>++++++++[-<++++>] 7 +>++++++[-<+++++++>] 8 ++>>>++[-<+++++>] 9 +<<<<<<[-[>.<-] 10 + <[-<+>>+<] 11 + <[->+<] 12 + >>>>>[-<.>>+<] 13 + >[-<+>] 14 + >.<<++<<<-<->] 15 +>>>>>>>-[-<<<<<<.>>>>>>] 16 +<<<<<.
A
brainfuck/gleam.toml
··· 1 +name = "gbf" 2 +version = "1.0.0" 3 +description = "gleaming brainfuck" 4 +licences = ["GLWTPL"] 5 +repository = { type = "github", user = "olexsmir", repo = "gbf" } 6 + 7 +[dependencies] 8 +gleam_stdlib = ">= 0.44.0 and < 2.0.0" 9 +splitter = ">= 1.1.0 and < 2.0.0" 10 +argv = ">= 1.0.2 and < 2.0.0" 11 +simplifile = ">= 2.3.0 and < 3.0.0" 12 + 13 +[dev-dependencies] 14 +gleeunit = ">= 1.0.0 and < 2.0.0"
A
brainfuck/manifest.toml
··· 1 +# This file was generated by Gleam 2 +# You typically do not need to edit this file 3 + 4 +packages = [ 5 + { name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" }, 6 + { name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" }, 7 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 8 + { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" }, 9 + { name = "simplifile", version = "2.3.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "0A868DAC6063D9E983477981839810DC2E553285AB4588B87E3E9C96A7FB4CB4" }, 10 + { name = "splitter", version = "1.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "splitter", source = "hex", outer_checksum = "05564A381580395DCDEFF4F88A64B021E8DAFA6540AE99B4623962F52976AA9D" }, 11 +] 12 + 13 +[requirements] 14 +argv = { version = ">= 1.0.2 and < 2.0.0" } 15 +gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" } 16 +gleeunit = { version = ">= 1.0.0 and < 2.0.0" } 17 +simplifile = { version = ">= 2.3.0 and < 3.0.0" } 18 +splitter = { version = ">= 1.1.0 and < 2.0.0" }
A
brainfuck/src/gbf.gleam
··· 1 +import gbf/internal/ascii 2 +import gbf/internal/eval 3 +import gbf/internal/lexer 4 +import gbf/internal/parser 5 +import gbf/internal/vm.{type VirtualMachine} 6 +import gleam/list 7 +import gleam/result 8 +import gleam/string 9 + 10 +pub type Error { 11 + Parser(reason: parser.Error) 12 + Eval(reason: eval.Error) 13 +} 14 + 15 +pub fn run(source: String) -> Result(VirtualMachine, Error) { 16 + let bvm = 17 + source 18 + |> string.split(on: "") 19 + |> list.map(ascii.to_code) 20 + |> vm.new 21 + 22 + use ast <- result.try(parse_ast(source)) 23 + use bvm <- result.try(eval_ast(bvm, ast)) 24 + 25 + Ok(bvm) 26 +} 27 + 28 +pub fn output(virtual_machine: VirtualMachine) -> String { 29 + vm.output(virtual_machine) 30 +} 31 + 32 +fn parse_ast(source: String) { 33 + source 34 + |> lexer.new 35 + |> lexer.lex 36 + |> parser.parse 37 + |> result.map_error(fn(e) { Parser(e) }) 38 +} 39 + 40 +fn eval_ast(vm, ast) { 41 + eval.eval(vm, ast) 42 + |> result.map_error(fn(e) { Eval(e) }) 43 +}
A
brainfuck/src/gbf/internal/ascii.gleam
··· 1 +import gleam/bit_array 2 +import gleam/string 3 + 4 +pub fn to_code(s: String) { 5 + let bytes = bit_array.from_string(s) 6 + case bit_array.byte_size(bytes) { 7 + 1 -> 8 + case bytes { 9 + <<value>> -> value 10 + _ -> 0 11 + } 12 + _ -> 0 13 + } 14 +} 15 + 16 +pub fn from_code(code: Int) { 17 + case code { 18 + c if c >= 1 && c <= 255 -> { 19 + case string.utf_codepoint(code) { 20 + Ok(codepoint) -> string.from_utf_codepoints([codepoint]) 21 + Error(_) -> "" 22 + } 23 + } 24 + _ -> "" 25 + } 26 +}
A
brainfuck/src/gbf/internal/eval.gleam
··· 1 +import gbf/internal/lexer 2 +import gbf/internal/parser.{type AST, type Block, type Command} 3 +import gbf/internal/token 4 +import gbf/internal/vm.{type VirtualMachine} 5 +import gleam/int 6 +import gleam/list 7 +import gleam/result 8 + 9 +pub type Error { 10 + /// An unexpected command was encountered at the given position. 11 + UnexpectedCommand(pos: lexer.Position) 12 + 13 + /// An error occurred in the virtual machine 14 + VmError(reason: vm.Error, pos: lexer.Position) 15 +} 16 + 17 +/// Evaluates an AST node against the virtual machine. 18 +/// 19 +pub fn eval(vm: VirtualMachine, node: AST) -> Result(VirtualMachine, Error) { 20 + case node { 21 + parser.Leaf(command) -> eval_command(vm, command) 22 + parser.Node(block) -> eval_block(vm, block) 23 + } 24 +} 25 + 26 +fn eval_command( 27 + vm: VirtualMachine, 28 + command: Command, 29 +) -> Result(VirtualMachine, Error) { 30 + case command { 31 + #(token.Comment(_), _) -> Ok(vm) 32 + 33 + #(token.IncrementPointer, p) -> 34 + vm.set_pointer(vm, vm.pointer + 1) |> wrap_vm_error(p) 35 + #(token.DecrementPointer, p) -> 36 + vm.set_pointer(vm, vm.pointer - 1) |> wrap_vm_error(p) 37 + 38 + #(token.IncrementByte, p) -> mut_byte(vm, int.add) |> wrap_vm_error(p) 39 + #(token.DecrementByte, p) -> mut_byte(vm, int.subtract) |> wrap_vm_error(p) 40 + 41 + #(token.InputByte, p) -> vm.input_byte(vm) |> wrap_vm_error(p) 42 + #(token.OutputByte, p) -> vm.output_byte(vm) |> wrap_vm_error(p) 43 + 44 + #(token.StartBlock, p) -> Error(UnexpectedCommand(p)) 45 + #(token.EndBlock, p) -> Error(UnexpectedCommand(p)) 46 + #(_, p) -> Error(UnexpectedCommand(p)) 47 + } 48 +} 49 + 50 +fn eval_block(vm: VirtualMachine, block: Block) -> Result(VirtualMachine, Error) { 51 + use acc_vm, child <- list.fold(block.children, Ok(vm)) 52 + case child { 53 + parser.Leaf(command) -> result.try(acc_vm, eval_command(_, command)) 54 + parser.Node(child_block) -> 55 + result.try(acc_vm, eval_child_block(_, child_block)) 56 + } 57 +} 58 + 59 +fn eval_child_block(vm: VirtualMachine, child_block: Block) { 60 + use cell_value <- result.try( 61 + vm.get_cell(vm, vm.pointer) 62 + |> result.map_error(VmError(_, pos: child_block.position)), 63 + ) 64 + 65 + case cell_value > 0 { 66 + False -> Ok(vm) 67 + True -> { 68 + let acc = eval_block(vm, child_block) 69 + result.try(acc, eval_child_block(_, child_block)) 70 + } 71 + } 72 +} 73 + 74 +fn mut_byte(vm: VirtualMachine, op: fn(Int, Int) -> Int) { 75 + use cell <- result.try(vm.get_cell(vm, vm.pointer)) 76 + let cell = op(cell, 1) 77 + vm.set_cell(vm, vm.pointer, cell) 78 +} 79 + 80 +fn wrap_vm_error( 81 + r: Result(VirtualMachine, vm.Error), 82 + pos: lexer.Position, 83 +) -> Result(VirtualMachine, Error) { 84 + result.map_error(r, VmError(_, pos)) 85 +}
A
brainfuck/src/gbf/internal/lexer.gleam
··· 1 +import gbf/internal/token.{type Token} 2 +import gleam/list 3 +import gleam/string 4 +import splitter 5 + 6 +pub opaque type Lexer { 7 + Lexer(source: String, offset: Int, newlines: splitter.Splitter) 8 +} 9 + 10 +pub type Position { 11 + /// A token position in a wile, represented as offset of bytes 12 + Position(offset: Int) 13 +} 14 + 15 +pub fn new(source) { 16 + Lexer(source:, offset: 0, newlines: splitter.new(["\r\n", "\n"])) 17 +} 18 + 19 +pub fn lex(lexer: Lexer) -> List(#(Token, Position)) { 20 + do_lex(lexer, []) 21 + |> list.reverse 22 +} 23 + 24 +fn do_lex(lexer: Lexer, tokens: List(#(Token, Position))) { 25 + case next(lexer) { 26 + #(_, #(token.EndOfFile, _)) -> tokens 27 + #(lexer, token) -> do_lex(lexer, [token, ..tokens]) 28 + } 29 +} 30 + 31 +fn next(lexer: Lexer) { 32 + case lexer.source { 33 + " " <> source | "\n" <> source | "\r" <> source | "\t" <> source -> 34 + advance(lexer, source, 1) |> next 35 + 36 + ">" <> source -> token(lexer, token.IncrementPointer, source, 1) 37 + "<" <> source -> token(lexer, token.DecrementPointer, source, 1) 38 + "+" <> source -> token(lexer, token.IncrementByte, source, 1) 39 + "-" <> source -> token(lexer, token.DecrementByte, source, 1) 40 + "." <> source -> token(lexer, token.OutputByte, source, 1) 41 + "," <> source -> token(lexer, token.InputByte, source, 1) 42 + "[" <> source -> token(lexer, token.StartBlock, source, 1) 43 + "]" <> source -> token(lexer, token.EndBlock, source, 1) 44 + 45 + _ -> 46 + case string.pop_grapheme(lexer.source) { 47 + Error(_) -> #(lexer, #(token.EndOfFile, Position(lexer.offset))) 48 + Ok(_) -> comment(lexer, lexer.offset) 49 + } 50 + } 51 +} 52 + 53 +fn advance(lexer, source, offset) { 54 + Lexer(..lexer, source:, offset: lexer.offset + offset) 55 +} 56 + 57 +fn advanced( 58 + token: #(Token, Position), 59 + lexer: Lexer, 60 + source: String, 61 + offset: Int, 62 +) -> #(Lexer, #(Token, Position)) { 63 + #(advance(lexer, source, offset), token) 64 +} 65 + 66 +fn token( 67 + lexer: Lexer, 68 + token: Token, 69 + source: String, 70 + offset: Int, 71 +) -> #(Lexer, #(Token, Position)) { 72 + #(token, Position(offset: lexer.offset)) 73 + |> advanced(lexer, source, offset) 74 +} 75 + 76 +fn comment(lexer: Lexer, start: Int) -> #(Lexer, #(Token, Position)) { 77 + let #(prefix, suffix) = splitter.split_before(lexer.newlines, lexer.source) 78 + let eaten = string.byte_size(prefix) 79 + let lexer = advance(lexer, suffix, eaten) 80 + 81 + #(lexer, #(token.Comment(prefix), Position(start))) 82 +}
A
brainfuck/src/gbf/internal/parser.gleam
··· 1 +import gbf/internal/lexer.{type Position, Position} 2 +import gbf/internal/token.{type Token} 3 +import gleam/list 4 +import gleam/pair 5 +import gleam/result 6 + 7 +pub type AST { 8 + /// A single command 9 + /// 10 + Leaf(Command) 11 + 12 + /// A block with nested children (used for loops) 13 + /// 14 + Node(Block) 15 +} 16 + 17 +pub type Command = 18 + #(Token, Position) 19 + 20 +pub type Block { 21 + Block(children: List(AST), position: Position) 22 +} 23 + 24 +pub type Error { 25 + FinishedTooEarly 26 + UnexpectedCommand 27 + UnexpectedBlock 28 +} 29 + 30 +/// Parses a list of tokens into an Abstract Syntax Tree. 31 +/// 32 +/// Takes a flat list of tokens with their positions and constructs 33 +/// a hierarchical tree structure where loop blocks become nested nodes. 34 +/// All tokens must be consumed for successful parsing. 35 +/// 36 +pub fn parse(tokens: List(#(Token, Position))) -> Result(AST, Error) { 37 + let root = Node(Block(children: [], position: Position(0))) 38 + use #(ast, remaining_tokens) <- result.try(parse_tokens(tokens, root)) 39 + 40 + case remaining_tokens { 41 + [] -> Ok(ast) 42 + _ -> Error(FinishedTooEarly) 43 + } 44 +} 45 + 46 +fn parse_tokens(tokens: List(#(Token, Position)), node: AST) { 47 + case tokens { 48 + [] -> Ok(#(node, [])) 49 + [token, ..rest] -> { 50 + case token { 51 + #(token.StartBlock, _) -> parse_block(token, rest, node) 52 + #(token.EndBlock, _) -> parse_block_end(rest, node) 53 + _ -> parse_command(token, rest, node) 54 + } 55 + } 56 + } 57 +} 58 + 59 +fn parse_block_end(tokens: List(#(Token, Position)), node: AST) { 60 + Ok(#(node, tokens)) 61 +} 62 + 63 +fn parse_block(token, tokens, node) { 64 + case node { 65 + Leaf(_) -> Error(UnexpectedCommand) 66 + Node(block) -> { 67 + let child_block = Node(Block(children: [], position: pair.second(token))) 68 + use #(parsed_child_block, remaining_tokens) <- result.try(parse_tokens( 69 + tokens, 70 + child_block, 71 + )) 72 + 73 + let children = list.append(block.children, [parsed_child_block]) 74 + let node = Node(Block(children: children, position: block.position)) 75 + 76 + parse_tokens(remaining_tokens, node) 77 + } 78 + } 79 +} 80 + 81 +fn parse_command( 82 + token: #(Token, Position), 83 + tokens: List(#(Token, Position)), 84 + node: AST, 85 +) { 86 + case node { 87 + Leaf(_) -> Error(UnexpectedBlock) 88 + Node(block) -> { 89 + let command = Leaf(token) 90 + let node = 91 + Node(Block( 92 + children: list.append(block.children, [command]), 93 + position: block.position, 94 + )) 95 + 96 + parse_tokens(tokens, node) 97 + } 98 + } 99 +}
A
brainfuck/src/gbf/internal/token.gleam
··· 1 +pub type Token { 2 + /// Everything that is not one of the tokens below, considered the be 3 + /// a comment. 4 + Comment(String) 5 + 6 + /// Increment the data pointer by one. 7 + /// `>` symbol 8 + IncrementPointer 9 + 10 + /// Decrement the data pointer by one. 11 + /// `<` symbol 12 + DecrementPointer 13 + 14 + /// Increment the byte at the data pointer by one. 15 + /// `+` symbol 16 + IncrementByte 17 + 18 + /// Decrement the byte at the data pointer by one. 19 + /// `-` symbol 20 + DecrementByte 21 + 22 + /// Output the byte at the data pointer. 23 + /// `.` symbol 24 + OutputByte 25 + 26 + /// Accept one byte of input, storing its value in the byte at the data pointer. 27 + /// `,` symbol 28 + InputByte 29 + 30 + /// If the byte at the data pointer is zero, then instead of moving the 31 + /// instruction pointer forward to the next command, jump it forward to the 32 + /// command after the matching ] command. 33 + /// `[` symbol 34 + StartBlock 35 + 36 + /// If the byte at the data pointer is nonzero, then instead of moving the 37 + /// instruction pointer forward to the next command, jump it back to the 38 + /// command after the matching [ command. 39 + /// `]` symbol 40 + EndBlock 41 + 42 + /// End of file 43 + EndOfFile 44 +}
A
brainfuck/src/gbf/internal/vm.gleam
··· 1 +import gbf/internal/ascii 2 +import gleam/dict.{type Dict} 3 +import gleam/list 4 +import gleam/result 5 + 6 +pub const tape_size = 30_000 7 + 8 +pub const cell_size = 255 9 + 10 +pub type Error { 11 + PointerRanOffTape 12 + IntegerOverflow 13 + IntegerUnderflow 14 + EmptyInput 15 + InvalidChar(Int) 16 +} 17 + 18 +/// The machine model we are going to use for this interpreter is very simple: 19 +/// - Our memory consists of 30,000 cells (1000 rows * 30 columns). 20 +/// - There's a data pointer which points to a specific cell and is initialized at 21 +/// the leftmost cell, an error will be reported if the pointer runs off the 22 +/// tape at either end. 23 +/// pointer = 0 24 +/// - A data cell is 8 bits, and an error will be reported if the program tries 25 +/// to perform under- or overflow, i.e. decrement 0 or increment 255. 26 +/// - Two streams of bytes for input and output using the ASCII character 27 +/// encoding. 28 +/// 29 +pub type VirtualMachine { 30 + VirtualMachine(pointer: Index, cells: Cells, output: String, input: List(Int)) 31 +} 32 + 33 +pub type Cells = 34 + Dict(Int, Int) 35 + 36 +pub type Index = 37 + Int 38 + 39 +pub fn new(input: List(Int)) -> VirtualMachine { 40 + VirtualMachine(input:, pointer: 0, cells: dict.new(), output: "") 41 +} 42 + 43 +/// Returns the accumulated output string from the virtual machine. 44 +/// 45 +pub fn output(vm: VirtualMachine) -> String { 46 + vm.output 47 +} 48 + 49 +/// Gets the value of the cell at the specified pointer. 50 +/// Returns an error if the pointer is out of bounds. 51 +/// 52 +/// ```gleam 53 +/// get_cell(vm, 0) // Ok(0) 54 +/// get_cell(vm, -1) // Error(PointerRanOffTape) 55 +/// ``` 56 +pub fn get_cell(vm: VirtualMachine, pointer: Index) -> Result(Index, Error) { 57 + use pointer <- result.try(validate_tape_size(pointer)) 58 + 59 + case dict.get(vm.cells, pointer) { 60 + Ok(value) -> Ok(value) 61 + Error(_) -> Ok(0) 62 + } 63 +} 64 + 65 +/// Sets the value of the cell at the specified pointer. 66 +/// 67 +/// Returns an updated virtual machine if successful, or an error if: 68 +/// - The pointer is out of bounds (< 0 or > 30,000) 69 +/// - The value is out of bounds (< 0 or > 255) 70 +/// 71 +/// ```gleam 72 +/// set_cell(vm, 0, 65) // Ok(...) 73 +/// set_cell(vm, 0, 256) // Error(IntegerOverflow) 74 +/// ``` 75 +pub fn set_cell( 76 + vm: VirtualMachine, 77 + pointer: Index, 78 + value: Int, 79 +) -> Result(VirtualMachine, Error) { 80 + use pointer <- result.try(validate_tape_size(pointer)) 81 + use value <- result.try(validate_cell_size(value)) 82 + let cells = dict.insert(vm.cells, pointer, value) 83 + 84 + VirtualMachine(..vm, cells:) 85 + |> Ok 86 +} 87 + 88 +/// Moves the data pointer to the specified position. 89 +/// Returns error if pointer is out of bounds. 90 +/// 91 +/// ```gleam 92 +/// set_pointer(vm, 100) // Ok(...) 93 +/// set_pointer(vm, -1) // Error(PointerRanOffTape) 94 +/// ``` 95 +pub fn set_pointer( 96 + vm: VirtualMachine, 97 + pointer: Index, 98 +) -> Result(VirtualMachine, Error) { 99 + use pointer <- result.try(validate_tape_size(pointer)) 100 + 101 + VirtualMachine(..vm, pointer:) 102 + |> Ok 103 +} 104 + 105 +/// Reads a byte from the input stream and stores it in the current cell. 106 +/// 107 +/// Consumes the first byte from the input list and writes it to the cell 108 +/// at the current pointer position. 109 +/// Returns an error if the input is empty. 110 +/// 111 +/// ```gleam 112 +/// input_byte(vm) // Ok(...) 113 +/// input_byte(empty_vm) // Error(EmptyInput) 114 +/// ``` 115 +pub fn input_byte(vm: VirtualMachine) -> Result(VirtualMachine, Error) { 116 + case vm.input { 117 + [] -> Error(EmptyInput) 118 + [first, ..] -> { 119 + use vm <- result.try(set_cell(vm, vm.pointer, first)) 120 + 121 + VirtualMachine(..vm, input: list.drop(vm.input, 1)) 122 + |> Ok 123 + } 124 + } 125 +} 126 + 127 +/// Reads the value from the current cell and appends it to the output as a character. 128 +/// 129 +/// Converts the cell value to an ASCII character and adds it to the output string. 130 +/// Returns an error if the cell value is not a valid ASCII code point. 131 +/// 132 +pub fn output_byte(vm: VirtualMachine) -> Result(VirtualMachine, Error) { 133 + use cell_value <- result.try(get_cell(vm, vm.pointer)) 134 + 135 + case ascii.from_code(cell_value) { 136 + "" -> Error(InvalidChar(cell_value)) 137 + c -> 138 + VirtualMachine(..vm, output: vm.output <> c) 139 + |> Ok 140 + } 141 +} 142 + 143 +fn validate_tape_size(pointer: Index) { 144 + case pointer { 145 + p if p > tape_size -> Error(PointerRanOffTape) 146 + p if p < 0 -> Error(PointerRanOffTape) 147 + _ -> Ok(pointer) 148 + } 149 +} 150 + 151 +fn validate_cell_size(value: Int) { 152 + case value { 153 + v if v > cell_size -> Error(IntegerOverflow) 154 + v if v < 0 -> Error(IntegerUnderflow) 155 + _ -> Ok(value) 156 + } 157 +}
A
brainfuck/src/gbf/run.gleam
··· 1 +import argv 2 +import gbf 3 +import gleam/io 4 +import simplifile 5 + 6 +pub fn main() -> Nil { 7 + case argv.load().arguments { 8 + [filename] -> { 9 + let assert Ok(source) = simplifile.read(filename) 10 + let assert Ok(virtual_machine) = gbf.run(source) 11 + 12 + virtual_machine 13 + |> gbf.output 14 + |> io.println 15 + } 16 + _ -> io.println("usage: ./program filename.bf") 17 + } 18 +}
A
brainfuck/test/gbf_lexer_test.gleam
··· 1 +import gbf/internal/lexer 2 +import gbf/internal/token 3 +import gleeunit/should 4 + 5 +pub fn can_lex_test() { 6 + "><+-.,[] this is a comment" 7 + |> lexer.new 8 + |> lexer.lex 9 + |> should.equal([ 10 + #(token.IncrementPointer, lexer.Position(0)), 11 + #(token.DecrementPointer, lexer.Position(1)), 12 + #(token.IncrementByte, lexer.Position(2)), 13 + #(token.DecrementByte, lexer.Position(3)), 14 + #(token.OutputByte, lexer.Position(4)), 15 + #(token.InputByte, lexer.Position(5)), 16 + #(token.StartBlock, lexer.Position(6)), 17 + #(token.EndBlock, lexer.Position(7)), 18 + #(token.Comment("this is a comment"), lexer.Position(9)), 19 + ]) 20 +} 21 + 22 +pub fn multiline_test() { 23 + "this is a comment 24 ++++ 25 +<. 26 + " 27 + |> lexer.new 28 + |> lexer.lex 29 + |> should.equal([ 30 + #(token.Comment("this is a comment"), lexer.Position(0)), 31 + #(token.IncrementByte, lexer.Position(18)), 32 + #(token.IncrementByte, lexer.Position(19)), 33 + #(token.IncrementByte, lexer.Position(20)), 34 + #(token.DecrementPointer, lexer.Position(22)), 35 + #(token.OutputByte, lexer.Position(23)), 36 + ]) 37 +}
A
brainfuck/test/gbf_parser_test.gleam
··· 1 +import gbf/internal/lexer.{Position} 2 +import gbf/internal/parser.{Block, Leaf, Node} 3 +import gbf/internal/token.{DecrementByte, IncrementByte, IncrementPointer} 4 +import gleeunit/should 5 + 6 +pub fn should_parse_test() { 7 + "+[->+]>" 8 + |> lexer.new 9 + |> lexer.lex 10 + |> parser.parse 11 + |> should.equal( 12 + Ok( 13 + Node( 14 + Block(position: Position(0), children: [ 15 + Leaf(#(IncrementByte, Position(0))), 16 + Node( 17 + Block(position: Position(1), children: [ 18 + Leaf(#(DecrementByte, Position(2))), 19 + Leaf(#(IncrementPointer, Position(3))), 20 + Leaf(#(IncrementByte, Position(4))), 21 + ]), 22 + ), 23 + Leaf(#(IncrementPointer, Position(6))), 24 + ]), 25 + ), 26 + ), 27 + ) 28 +}
A
brainfuck/test/gbf_test.gleam
··· 1 +import gbf 2 +import gleeunit 3 +import gleeunit/should 4 + 5 +pub fn main() -> Nil { 6 + gleeunit.main() 7 +} 8 + 9 +pub fn should_run_hello_world_test() { 10 + let input = 11 + "++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++." 12 + 13 + let assert Ok(bvm) = gbf.run(input) 14 + 15 + bvm 16 + |> gbf.output 17 + |> should.equal("Hello World!\n") 18 +}
A
brainfuck/test/gbf_vm_test.gleam
··· 1 +import gbf/internal/ascii 2 +import gbf/internal/vm.{type VirtualMachine} 3 +import gleam/result 4 +import gleeunit/should 5 + 6 +fn setup(input) -> VirtualMachine { 7 + vm.new(input) 8 +} 9 + 10 +pub fn get_cell_test() { 11 + let vm = setup([]) 12 + 13 + vm.get_cell(vm, 3) 14 + |> should.equal(Ok(0)) 15 +} 16 + 17 +pub fn get_cell_out_of_tape_test() { 18 + let vm = setup([]) 19 + 20 + vm.get_cell(vm, vm.tape_size + 1) 21 + |> should.equal(Error(vm.PointerRanOffTape)) 22 +} 23 + 24 +pub fn set_cell_test() { 25 + let vm = setup([]) 26 + 27 + vm.set_cell(vm, 2, 22) 28 + |> should.be_ok 29 +} 30 + 31 +pub fn set_cell_errors_test() { 32 + let vm = setup([]) 33 + 34 + vm.set_cell(vm, vm.tape_size + 1, 22) 35 + |> should.be_error 36 + 37 + vm.set_cell(vm, 2, vm.cell_size + 1) 38 + |> should.be_error 39 +} 40 + 41 +pub fn set_pointer_test() { 42 + let vm = setup([]) 43 + 44 + vm.set_pointer(vm, 2) 45 + |> should.be_ok 46 +} 47 + 48 +pub fn output_byte_empty_test() { 49 + let vm = setup([]) 50 + 51 + vm.output_byte(vm) 52 + |> should.equal(Error(vm.InvalidChar(0))) 53 +} 54 + 55 +pub fn output_byte_test() { 56 + let vm = setup([ascii.to_code("a")]) 57 + use vm <- result.try(vm.output_byte(vm)) 58 + 59 + should.equal(vm.output, "a") 60 + Ok("") 61 +} 62 + 63 +pub fn input_byte_empty_test() { 64 + let vm = setup([]) 65 + 66 + vm.input_byte(vm) 67 + |> should.equal(Error(vm.EmptyInput)) 68 +} 69 + 70 +pub fn input_byte_test() { 71 + let vm = setup([ascii.to_code("a"), ascii.to_code("b")]) 72 + use vm <- result.try(vm.input_byte(vm)) 73 + 74 + should.equal(vm.input, [ascii.to_code("b")]) 75 + Ok("") 76 +}