diff --git a/include/bf_interpreter.h b/include/bf_interpreter.h index f60047d..53d9464 100644 --- a/include/bf_interpreter.h +++ b/include/bf_interpreter.h @@ -71,6 +71,7 @@ class brainfuck_interpreter void interpret_bf(const std::string& program); void interpret_bf(bf_tokenizer& tokenizer); -void interpret_bf_test(bf_tokenizer& tokenizer); +void interpret_bf_dynamic(bf_tokenizer& tokenizer); +void interpret_bf_matching(bf_tokenizer& tokenizer); #endif //BRAINFUCK_MISC_BF_INTERPRETER_H diff --git a/libraries/BLT b/libraries/BLT index 02c62a2..82d960f 160000 --- a/libraries/BLT +++ b/libraries/BLT @@ -1 +1 @@ -Subproject commit 02c62a2d63f704bba8de5eb7dc151babb53473c3 +Subproject commit 82d960f8338c793a32db1d8742bbba6f50b25403 diff --git a/src/bf_interpreter.cpp b/src/bf_interpreter.cpp index fecac08..2451578 100644 --- a/src/bf_interpreter.cpp +++ b/src/bf_interpreter.cpp @@ -131,18 +131,18 @@ void interpret_bf(bf_tokenizer& tokenizer) break; case bf_token::OPEN: if (fuck.is() == 0) - match([](size_t& idx) { return ++idx; }, 1, index, tokens); + index = c.offset; break; case bf_token::CLOSE: if (fuck.is() != 0) - match([](size_t& idx) { return --idx; }, -1, index, tokens); + index = c.offset; break; } index++; } } -void interpret_bf_test(bf_tokenizer& tokenizer) +void interpret_bf_dynamic(bf_tokenizer& tokenizer) { brainfuck_interpreter fuck; auto& tokens = tokenizer.data(); @@ -156,6 +156,48 @@ void interpret_bf_test(bf_tokenizer& tokenizer) } } +void interpret_bf_matching(bf_tokenizer& tokenizer) +{ + brainfuck_interpreter fuck; + auto& tokens = tokenizer.data(); + + size_t index = 0; + while (index < tokens.size()) + { + auto c = tokens[index]; + switch (c.type) + { + case bf_token::INC_DP: + fuck.increment_dp(c.offset); + break; + case bf_token::DEC_DP: + fuck.decrement_dp(c.offset); + break; + case bf_token::INC_DV: + fuck.increment(static_cast(c.offset)); + break; + case bf_token::DEC_DV: + fuck.decrement(static_cast(c.offset)); + break; + case bf_token::PRINT: + fuck.print(std::cout); + break; + case bf_token::READ: + fuck.read(std::cin); + break; + case bf_token::OPEN: + if (fuck.is() == 0) + match([](size_t& idx) { return ++idx; }, 1, index, tokens); + break; + case bf_token::CLOSE: + if (fuck.is() != 0) + match([](size_t& idx) { return --idx; }, -1, index, tokens); + break; + } + index++; + } +} + void brainfuck_interpreter::check_size() { if (_dp >= _size) diff --git a/src/bf_tokenizer.cpp b/src/bf_tokenizer.cpp index f75748d..4860a5b 100644 --- a/src/bf_tokenizer.cpp +++ b/src/bf_tokenizer.cpp @@ -85,9 +85,8 @@ void bf_tokenizer::tokenize(const std::string& program) characterizer tk{program}; while (tk.hasNext()) { - // TODO: something better + an optional wrapper - std::optional type = getType(tk.peek()); - if (type.has_value()) + // TODO: something better + if (auto type = getType(tk.peek())) { switch (type.value()) { @@ -129,9 +128,13 @@ void bf_tokenizer::bf_name() if (tokens[search_2].type == bf_token::CLOSE) sp--; if (sp == 0) + { + tokens[search_2].offset = static_cast(search_index); break; + } } tokens[search_2].name = name; + tokens[search_index].offset = static_cast(search_2); } search_index++; } @@ -153,19 +156,19 @@ void bf_tokenizer::print(size_t index) { case bf_token::INC_DP: tabulate(sp); - std::cout << "Increase DP " << token.offset << "\n"; + std::cout << "Increase DP " << token.offset << '\n'; break; case bf_token::DEC_DP: tabulate(sp); - std::cout << "Decrease DP " << token.offset << "\n"; + std::cout << "Decrease DP " << token.offset << '\n'; break; case bf_token::INC_DV: tabulate(sp); - std::cout << "Increase DV " << token.offset << "\n"; + std::cout << "Increase DV " << token.offset << '\n'; break; case bf_token::DEC_DV: tabulate(sp); - std::cout << "Decrease DV " << token.offset << "\n"; + std::cout << "Decrease DV " << token.offset << '\n'; break; case bf_token::PRINT: tabulate(sp); diff --git a/src/main.cpp b/src/main.cpp index 9f201a7..d61e814 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -29,9 +29,13 @@ int main(int argc, const char** argv) interpret_bf(tokenizer); BLT_END_INTERVAL("Interpreters", "Tokens"); - BLT_START_INTERVAL("Interpreters", "Tokens Test"); - interpret_bf_test(tokenizer); - BLT_END_INTERVAL("Interpreters", "Tokens Test"); + BLT_START_INTERVAL("Interpreters", "Tokens Dynamic Dispatch"); + interpret_bf_dynamic(tokenizer); + BLT_END_INTERVAL("Interpreters", "Tokens Dynamic Dispatch"); + + BLT_START_INTERVAL("Interpreters", "Tokens Matching"); + interpret_bf_matching(tokenizer); + BLT_END_INTERVAL("Interpreters", "Tokens Matching"); BLT_PRINT_PROFILE("Interpreters");