brain fucker

main
Brett 2023-11-24 13:53:27 -05:00
parent 68aad83637
commit 8f4718d699
5 changed files with 65 additions and 15 deletions

View File

@ -71,6 +71,7 @@ class brainfuck_interpreter
void interpret_bf(const std::string& program); void interpret_bf(const std::string& program);
void interpret_bf(bf_tokenizer& tokenizer); void interpret_bf(bf_tokenizer& tokenizer);
void interpret_bf_test(bf_tokenizer& tokenizer); void interpret_bf_dynamic(bf_tokenizer& tokenizer);
void interpret_bf_matching(bf_tokenizer& tokenizer);
#endif //BRAINFUCK_MISC_BF_INTERPRETER_H #endif //BRAINFUCK_MISC_BF_INTERPRETER_H

@ -1 +1 @@
Subproject commit 02c62a2d63f704bba8de5eb7dc151babb53473c3 Subproject commit 82d960f8338c793a32db1d8742bbba6f50b25403

View File

@ -131,18 +131,18 @@ void interpret_bf(bf_tokenizer& tokenizer)
break; break;
case bf_token::OPEN: case bf_token::OPEN:
if (fuck.is() == 0) if (fuck.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens); index = c.offset;
break; break;
case bf_token::CLOSE: case bf_token::CLOSE:
if (fuck.is() != 0) if (fuck.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens); index = c.offset;
break; break;
} }
index++; index++;
} }
} }
void interpret_bf_test(bf_tokenizer& tokenizer) void interpret_bf_dynamic(bf_tokenizer& tokenizer)
{ {
brainfuck_interpreter fuck; brainfuck_interpreter fuck;
auto& tokens = tokenizer.data(); auto& tokens = tokenizer.data();
@ -156,6 +156,48 @@ void interpret_bf_test(bf_tokenizer& tokenizer)
} }
} }
void interpret_bf_matching(bf_tokenizer& tokenizer)
{
brainfuck_interpreter fuck;
auto& tokens = tokenizer.data();
size_t index = 0;
while (index < tokens.size())
{
auto c = tokens[index];
switch (c.type)
{
case bf_token::INC_DP:
fuck.increment_dp(c.offset);
break;
case bf_token::DEC_DP:
fuck.decrement_dp(c.offset);
break;
case bf_token::INC_DV:
fuck.increment(static_cast<int8_t>(c.offset));
break;
case bf_token::DEC_DV:
fuck.decrement(static_cast<int8_t>(c.offset));
break;
case bf_token::PRINT:
fuck.print(std::cout);
break;
case bf_token::READ:
fuck.read(std::cin);
break;
case bf_token::OPEN:
if (fuck.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens);
break;
case bf_token::CLOSE:
if (fuck.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens);
break;
}
index++;
}
}
void brainfuck_interpreter::check_size() void brainfuck_interpreter::check_size()
{ {
if (_dp >= _size) if (_dp >= _size)

View File

@ -85,9 +85,8 @@ void bf_tokenizer::tokenize(const std::string& program)
characterizer tk{program}; characterizer tk{program};
while (tk.hasNext()) while (tk.hasNext())
{ {
// TODO: something better + an optional wrapper // TODO: something better
std::optional<bf_token> type = getType(tk.peek()); if (auto type = getType(tk.peek()))
if (type.has_value())
{ {
switch (type.value()) switch (type.value())
{ {
@ -129,9 +128,13 @@ void bf_tokenizer::bf_name()
if (tokens[search_2].type == bf_token::CLOSE) if (tokens[search_2].type == bf_token::CLOSE)
sp--; sp--;
if (sp == 0) if (sp == 0)
{
tokens[search_2].offset = static_cast<int64_t>(search_index);
break; break;
}
} }
tokens[search_2].name = name; tokens[search_2].name = name;
tokens[search_index].offset = static_cast<int64_t>(search_2);
} }
search_index++; search_index++;
} }
@ -153,19 +156,19 @@ void bf_tokenizer::print(size_t index)
{ {
case bf_token::INC_DP: case bf_token::INC_DP:
tabulate(sp); tabulate(sp);
std::cout << "Increase DP " << token.offset << "\n"; std::cout << "Increase DP " << token.offset << '\n';
break; break;
case bf_token::DEC_DP: case bf_token::DEC_DP:
tabulate(sp); tabulate(sp);
std::cout << "Decrease DP " << token.offset << "\n"; std::cout << "Decrease DP " << token.offset << '\n';
break; break;
case bf_token::INC_DV: case bf_token::INC_DV:
tabulate(sp); tabulate(sp);
std::cout << "Increase DV " << token.offset << "\n"; std::cout << "Increase DV " << token.offset << '\n';
break; break;
case bf_token::DEC_DV: case bf_token::DEC_DV:
tabulate(sp); tabulate(sp);
std::cout << "Decrease DV " << token.offset << "\n"; std::cout << "Decrease DV " << token.offset << '\n';
break; break;
case bf_token::PRINT: case bf_token::PRINT:
tabulate(sp); tabulate(sp);

View File

@ -29,9 +29,13 @@ int main(int argc, const char** argv)
interpret_bf(tokenizer); interpret_bf(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens"); BLT_END_INTERVAL("Interpreters", "Tokens");
BLT_START_INTERVAL("Interpreters", "Tokens Test"); BLT_START_INTERVAL("Interpreters", "Tokens Dynamic Dispatch");
interpret_bf_test(tokenizer); interpret_bf_dynamic(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Test"); BLT_END_INTERVAL("Interpreters", "Tokens Dynamic Dispatch");
BLT_START_INTERVAL("Interpreters", "Tokens Matching");
interpret_bf_matching(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Matching");
BLT_PRINT_PROFILE("Interpreters"); BLT_PRINT_PROFILE("Interpreters");