sexy inter time:

main
Brett 2023-11-22 21:22:10 -05:00
parent 117e0210c8
commit 68aad83637
7 changed files with 9097 additions and 104 deletions

View File

@ -71,5 +71,6 @@ class brainfuck_interpreter
void interpret_bf(const std::string& program); void interpret_bf(const std::string& program);
void interpret_bf(bf_tokenizer& tokenizer); void interpret_bf(bf_tokenizer& tokenizer);
void interpret_bf_test(bf_tokenizer& tokenizer);
#endif //BRAINFUCK_MISC_BF_INTERPRETER_H #endif //BRAINFUCK_MISC_BF_INTERPRETER_H

@ -1 +1 @@
Subproject commit b4a7ee403560f2413dc4983cf56dae74c14e926f Subproject commit 02c62a2d63f704bba8de5eb7dc151babb53473c3

9072
mips2.asm

File diff suppressed because it is too large Load Diff

View File

@ -5,6 +5,8 @@
*/ */
#include <bf_interpreter.h> #include <bf_interpreter.h>
#include <iostream> #include <iostream>
#include <functional>
#include <utility>
template<typename functor> template<typename functor>
static inline void match(functor f, int sp, size_t& index, const std::string& program) static inline void match(functor f, int sp, size_t& index, const std::string& program)
@ -77,6 +79,27 @@ void interpret_bf(const std::string& program)
} }
} }
std::function<void(brainfuck_interpreter& inter, size_t offset, size_t& index, const std::vector<token_t>& tokens)> funcs[8] = {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.increment_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.decrement_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
inter.increment(static_cast<int8_t>(off));
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
inter.decrement(static_cast<int8_t>(off));
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.print(std::cout); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.read(std::cin); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
if (inter.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens);
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
if (inter.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens);
},
};
void interpret_bf(bf_tokenizer& tokenizer) void interpret_bf(bf_tokenizer& tokenizer)
{ {
brainfuck_interpreter fuck; brainfuck_interpreter fuck;
@ -119,6 +142,20 @@ void interpret_bf(bf_tokenizer& tokenizer)
} }
} }
void interpret_bf_test(bf_tokenizer& tokenizer)
{
brainfuck_interpreter fuck;
auto& tokens = tokenizer.data();
size_t index = 0;
while (index < tokens.size())
{
auto& c = tokens[index];
funcs[static_cast<std::underlying_type_t<bf_token>>(c.type)](fuck, c.offset, index, tokens);
index++;
}
}
void brainfuck_interpreter::check_size() void brainfuck_interpreter::check_size()
{ {
if (_dp >= _size) if (_dp >= _size)

View File

@ -48,12 +48,12 @@ void codegen(bf_tokenizer& tokenizer, std::ostream& out)
break; break;
case bf_token::INC_DV: case bf_token::INC_DV:
out << "\tlb $t1, ($t0)\n" out << "\tlb $t1, ($t0)\n"
<< "\taddi $t1, $t1, " << static_cast<uint8_t>(token.offset) << "\n" << "\taddi $t1, $t1, " << std::to_string(static_cast<uint8_t>(token.offset)) << "\n"
<< "\tsb $t1, ($t0)\n"; << "\tsb $t1, ($t0)\n";
break; break;
case bf_token::DEC_DV: case bf_token::DEC_DV:
out << "\tlb $t1, ($t0)\n" out << "\tlb $t1, ($t0)\n"
<< "\tsubi $t1, $t1, " << static_cast<uint8_t>(token.offset) << "\n" << "\tsubi $t1, $t1, " << std::to_string(static_cast<uint8_t>(token.offset)) << "\n"
<< "\tsb $t1, ($t0)\n"; << "\tsb $t1, ($t0)\n";
break; break;
case bf_token::PRINT: case bf_token::PRINT:

View File

@ -55,43 +55,59 @@ class characterizer
} }
}; };
std::optional<bf_token> getType(char c)
{
switch (c)
{
case '>':
return (bf_token::INC_DP);
case '<':
return (bf_token::DEC_DP);
case '+':
return (bf_token::INC_DV);
case '-':
return (bf_token::DEC_DV);
case '.':
return (bf_token::PRINT);
case ',':
return (bf_token::READ);
case '[':
return (bf_token::OPEN);
case ']':
return (bf_token::CLOSE);
default:
return {};
}
}
void bf_tokenizer::tokenize(const std::string& program) void bf_tokenizer::tokenize(const std::string& program)
{ {
characterizer tk{program}; characterizer tk{program};
while (tk.hasNext()) while (tk.hasNext())
{ {
auto dv = tk.fetch(); // TODO: something better + an optional wrapper
bf_token type = bf_token::PRINT; std::optional<bf_token> type = getType(tk.peek());
switch (dv.type) if (type.has_value())
{ {
case '>': switch (type.value())
type = (bf_token::INC_DP); {
break; case bf_token::INC_DP:
case '<': case bf_token::DEC_DP:
type = (bf_token::DEC_DP); case bf_token::INC_DV:
break; case bf_token::DEC_DV:
case '+': tokens.emplace_back(type.value(), tk.fetch().count);
type = (bf_token::INC_DV); continue;
break; case bf_token::PRINT:
case '-': case bf_token::READ:
type = (bf_token::DEC_DV); case bf_token::OPEN:
break; case bf_token::CLOSE:
case '.': tokens.emplace_back(type.value(), 1);
type = (bf_token::PRINT); break;
break; }
case ',':
type = (bf_token::READ);
break;
case '[':
type = (bf_token::OPEN);
break;
case ']':
type = (bf_token::CLOSE);
break;
default:
break;
} }
tokens.emplace_back(type, dv.count); // weird syntax relying on the continue;
tk.advance();
} }
} }

View File

@ -7,10 +7,11 @@
#include <bf_mips_codegen.h> #include <bf_mips_codegen.h>
#include <bf_interpreter.h> #include <bf_interpreter.h>
#include <blt/profiling/profiler_v2.h> #include <blt/profiling/profiler_v2.h>
#include "blt/std/format.h"
int main(int argc, const char** argv) int main(int argc, const char** argv)
{ {
std::string file{"../helloworld.bf"}; std::string file{"../mandelbrot.bf"};
if (argc > 1) if (argc > 1)
file = argv[1]; file = argv[1];
auto program = blt::fs::loadBrainFuckFile(file); auto program = blt::fs::loadBrainFuckFile(file);
@ -21,13 +22,17 @@ int main(int argc, const char** argv)
codegen(tokenizer, out); codegen(tokenizer, out);
BLT_START_INTERVAL("Interpreters", "Basic"); BLT_START_INTERVAL("Interpreters", "Basic");
interpret_bf(program); //interpret_bf(program);
BLT_END_INTERVAL("Interpreters", "Basic"); BLT_END_INTERVAL("Interpreters", "Basic");
BLT_START_INTERVAL("Interpreters", "Tokens"); BLT_START_INTERVAL("Interpreters", "Tokens");
interpret_bf(tokenizer); interpret_bf(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens"); BLT_END_INTERVAL("Interpreters", "Tokens");
BLT_START_INTERVAL("Interpreters", "Tokens Test");
interpret_bf_test(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Test");
BLT_PRINT_PROFILE("Interpreters"); BLT_PRINT_PROFILE("Interpreters");
//interpret_bf(program); //interpret_bf(program);