brain fucker

main
Brett 2023-11-24 13:53:27 -05:00
parent 68aad83637
commit 8f4718d699
5 changed files with 65 additions and 15 deletions

View File

@ -71,6 +71,7 @@ class brainfuck_interpreter
void interpret_bf(const std::string& program);
void interpret_bf(bf_tokenizer& tokenizer);
void interpret_bf_test(bf_tokenizer& tokenizer);
void interpret_bf_dynamic(bf_tokenizer& tokenizer);
void interpret_bf_matching(bf_tokenizer& tokenizer);
#endif //BRAINFUCK_MISC_BF_INTERPRETER_H

@ -1 +1 @@
Subproject commit 02c62a2d63f704bba8de5eb7dc151babb53473c3
Subproject commit 82d960f8338c793a32db1d8742bbba6f50b25403

View File

@ -131,18 +131,18 @@ void interpret_bf(bf_tokenizer& tokenizer)
break;
case bf_token::OPEN:
if (fuck.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens);
index = c.offset;
break;
case bf_token::CLOSE:
if (fuck.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens);
index = c.offset;
break;
}
index++;
}
}
void interpret_bf_test(bf_tokenizer& tokenizer)
void interpret_bf_dynamic(bf_tokenizer& tokenizer)
{
brainfuck_interpreter fuck;
auto& tokens = tokenizer.data();
@ -156,6 +156,48 @@ void interpret_bf_test(bf_tokenizer& tokenizer)
}
}
void interpret_bf_matching(bf_tokenizer& tokenizer)
{
brainfuck_interpreter fuck;
auto& tokens = tokenizer.data();
size_t index = 0;
while (index < tokens.size())
{
auto c = tokens[index];
switch (c.type)
{
case bf_token::INC_DP:
fuck.increment_dp(c.offset);
break;
case bf_token::DEC_DP:
fuck.decrement_dp(c.offset);
break;
case bf_token::INC_DV:
fuck.increment(static_cast<int8_t>(c.offset));
break;
case bf_token::DEC_DV:
fuck.decrement(static_cast<int8_t>(c.offset));
break;
case bf_token::PRINT:
fuck.print(std::cout);
break;
case bf_token::READ:
fuck.read(std::cin);
break;
case bf_token::OPEN:
if (fuck.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens);
break;
case bf_token::CLOSE:
if (fuck.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens);
break;
}
index++;
}
}
void brainfuck_interpreter::check_size()
{
if (_dp >= _size)

View File

@ -85,9 +85,8 @@ void bf_tokenizer::tokenize(const std::string& program)
characterizer tk{program};
while (tk.hasNext())
{
// TODO: something better + an optional wrapper
std::optional<bf_token> type = getType(tk.peek());
if (type.has_value())
// TODO: something better
if (auto type = getType(tk.peek()))
{
switch (type.value())
{
@ -129,9 +128,13 @@ void bf_tokenizer::bf_name()
if (tokens[search_2].type == bf_token::CLOSE)
sp--;
if (sp == 0)
{
tokens[search_2].offset = static_cast<int64_t>(search_index);
break;
}
}
tokens[search_2].name = name;
tokens[search_index].offset = static_cast<int64_t>(search_2);
}
search_index++;
}
@ -153,19 +156,19 @@ void bf_tokenizer::print(size_t index)
{
case bf_token::INC_DP:
tabulate(sp);
std::cout << "Increase DP " << token.offset << "\n";
std::cout << "Increase DP " << token.offset << '\n';
break;
case bf_token::DEC_DP:
tabulate(sp);
std::cout << "Decrease DP " << token.offset << "\n";
std::cout << "Decrease DP " << token.offset << '\n';
break;
case bf_token::INC_DV:
tabulate(sp);
std::cout << "Increase DV " << token.offset << "\n";
std::cout << "Increase DV " << token.offset << '\n';
break;
case bf_token::DEC_DV:
tabulate(sp);
std::cout << "Decrease DV " << token.offset << "\n";
std::cout << "Decrease DV " << token.offset << '\n';
break;
case bf_token::PRINT:
tabulate(sp);

View File

@ -29,9 +29,13 @@ int main(int argc, const char** argv)
interpret_bf(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens");
BLT_START_INTERVAL("Interpreters", "Tokens Test");
interpret_bf_test(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Test");
BLT_START_INTERVAL("Interpreters", "Tokens Dynamic Dispatch");
interpret_bf_dynamic(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Dynamic Dispatch");
BLT_START_INTERVAL("Interpreters", "Tokens Matching");
interpret_bf_matching(tokenizer);
BLT_END_INTERVAL("Interpreters", "Tokens Matching");
BLT_PRINT_PROFILE("Interpreters");