diff --git a/include/bf_tokenizer.h b/include/bf_tokenizer.h new file mode 100644 index 0000000..418a233 --- /dev/null +++ b/include/bf_tokenizer.h @@ -0,0 +1,29 @@ +#pragma once +/* + * Created by Brett on 17/11/23. + * Licensed under GNU General Public License V3.0 + * See LICENSE file for license detail + */ + +#ifndef BRAINFUCK_MISC_BF_TOKENIZER_H +#define BRAINFUCK_MISC_BF_TOKENIZER_H + +#include +#include + +enum class bf_token +{ + INC_DP, + DEC_DP, + INC_DV, + DEC_DV, + PRINT, + READ, + OPEN, + CLOSE +}; + +std::vector tokenize(const std::string& program); + + +#endif //BRAINFUCK_MISC_BF_TOKENIZER_H diff --git a/src/bf_tokenizer.cpp b/src/bf_tokenizer.cpp new file mode 100644 index 0000000..006321c --- /dev/null +++ b/src/bf_tokenizer.cpp @@ -0,0 +1,49 @@ +/* + * Created by Brett on 17/11/23. + * Licensed under GNU General Public License V3.0 + * See LICENSE file for license detail + */ +#include + +std::vector tokenize(const std::string& program) +{ + std::vector tokens; + + size_t index = 0; + while (index < program.size()) + { + auto c = program[index]; + switch (c) + { + case '>': + tokens.push_back(bf_token::INC_DP); + break; + case '<': + tokens.push_back(bf_token::DEC_DP); + break; + case '+': + tokens.push_back(bf_token::INC_DV); + break; + case '-': + tokens.push_back(bf_token::DEC_DV); + break; + case '.': + tokens.push_back(bf_token::PRINT); + break; + case ',': + tokens.push_back(bf_token::READ); + break; + case '[': + tokens.push_back(bf_token::OPEN); + break; + case ']': + tokens.push_back(bf_token::CLOSE); + break; + default: + break; + } + index++; + } + + return tokens; +} diff --git a/src/main.cpp b/src/main.cpp index 9d56c1e..6a56e09 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -6,7 +6,6 @@ #include - int main(int argc, const char** argv) { std::string file{"../life.bf"};