diff --git a/CMakeLists.txt b/CMakeLists.txt index a41103c..3a4eb30 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -7,9 +7,9 @@ option(ENABLE_TSAN "Enable the thread data race sanitizer" OFF) set(CMAKE_CXX_STANDARD 20) -option(BUILD_PROFILING OFF) -option(BUILD_NBT OFF) -option(BUILD_PARSE OFF) +option(BUILD_PROFILING ON) +option(BUILD_NBT ON) +option(BUILD_PARSE ON) add_subdirectory(libraries/BLT) include_directories(include/) diff --git a/include/bf_tokenizer.h b/include/bf_tokenizer.h index a89e02c..1d2a31d 100644 --- a/include/bf_tokenizer.h +++ b/include/bf_tokenizer.h @@ -50,6 +50,12 @@ class bf_tokenizer { tokenize(program); bf_name(); + if (hasNext()){ + // skip past comments + if (peek().type == bf_token::OPEN) + while (hasNext() && peek().type != bf_token::CLOSE) + advance(); + } } inline bool hasNext(){ @@ -68,6 +74,10 @@ class bf_tokenizer return tokens[currentIndex]; } + inline std::vector& data(){ + return tokens; + } + void print(size_t index); inline void print(){ print(currentIndex); diff --git a/src/bf_interpreter.cpp b/src/bf_interpreter.cpp index bdf614d..4d4fbbc 100644 --- a/src/bf_interpreter.cpp +++ b/src/bf_interpreter.cpp @@ -20,6 +20,20 @@ static inline void match(functor f, int sp, size_t& index, const std::string& pr } } +template +static inline void match(functor f, int sp, size_t& index, const std::vector& program) +{ + while (f(index) < program.size()) + { + if (program[index].type == bf_token::OPEN) + sp++; + if (program[index].type == bf_token::CLOSE) + sp--; + if (sp == 0) + break; + } +} + void interpret_bf(const std::string& program) { brainfuck_interpreter fuck; @@ -65,7 +79,44 @@ void interpret_bf(const std::string& program) void interpret_bf(bf_tokenizer& tokenizer) { - + brainfuck_interpreter fuck; + auto& tokens = tokenizer.data(); + + size_t index = 0; + while (index < tokens.size()) + { + auto c = tokens[index]; + switch (c.type) + { + case bf_token::INC_DP: + fuck.increment_dp(c.offset); + break; + case bf_token::DEC_DP: + fuck.decrement_dp(c.offset); + break; + case bf_token::INC_DV: + fuck.increment(static_cast(c.offset)); + break; + case bf_token::DEC_DV: + fuck.decrement(static_cast(c.offset)); + break; + case bf_token::PRINT: + fuck.print(std::cout); + break; + case bf_token::READ: + fuck.read(std::cin); + break; + case bf_token::OPEN: + if (fuck.is() == 0) + match([](size_t& idx) { return ++idx; }, 1, index, tokens); + break; + case bf_token::CLOSE: + if (fuck.is() != 0) + match([](size_t& idx) { return --idx; }, -1, index, tokens); + break; + } + index++; + } } void brainfuck_interpreter::check_size() diff --git a/src/bf_mips_codegen.cpp b/src/bf_mips_codegen.cpp index 1a0df4b..fcc6c73 100644 --- a/src/bf_mips_codegen.cpp +++ b/src/bf_mips_codegen.cpp @@ -29,10 +29,6 @@ void codegen(bf_tokenizer& tokenizer, std::ostream& out) out << preamble; if (!tokenizer.hasNext()) throw std::runtime_error("You failed to provide valid BF code"); - // skip past comments - if (tokenizer.peek().type == bf_token::OPEN) - while (tokenizer.hasNext() && tokenizer.peek().type != bf_token::CLOSE) - tokenizer.advance(); tokenizer.print(); diff --git a/src/main.cpp b/src/main.cpp index 1e89b75..c49b053 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -6,6 +6,7 @@ #include #include +#include int main(int argc, const char** argv) { @@ -19,6 +20,16 @@ int main(int argc, const char** argv) bf_tokenizer tokenizer(program); codegen(tokenizer, out); + BLT_START_INTERVAL("Interpreters", "Basic"); + interpret_bf(program); + BLT_END_INTERVAL("Interpreters", "Basic"); + + BLT_START_INTERVAL("Interpreters", "Tokens"); + interpret_bf(tokenizer); + BLT_END_INTERVAL("Interpreters", "Tokens"); + + BLT_PRINT_PROFILE("Interpreters"); + //interpret_bf(program); return 0;