tokenizer

main
Brett 2023-11-17 00:30:48 -05:00
parent 2670e34e25
commit 40858868d2
3 changed files with 78 additions and 1 deletions

29
include/bf_tokenizer.h Normal file
View File

@ -0,0 +1,29 @@
#pragma once
/*
* Created by Brett on 17/11/23.
* Licensed under GNU General Public License V3.0
* See LICENSE file for license detail
*/
#ifndef BRAINFUCK_MISC_BF_TOKENIZER_H
#define BRAINFUCK_MISC_BF_TOKENIZER_H
#include <vector>
#include <string>
enum class bf_token
{
INC_DP,
DEC_DP,
INC_DV,
DEC_DV,
PRINT,
READ,
OPEN,
CLOSE
};
std::vector<bf_token> tokenize(const std::string& program);
#endif //BRAINFUCK_MISC_BF_TOKENIZER_H

49
src/bf_tokenizer.cpp Normal file
View File

@ -0,0 +1,49 @@
/*
* Created by Brett on 17/11/23.
* Licensed under GNU General Public License V3.0
* See LICENSE file for license detail
*/
#include <bf_tokenizer.h>
std::vector<bf_token> tokenize(const std::string& program)
{
std::vector<bf_token> tokens;
size_t index = 0;
while (index < program.size())
{
auto c = program[index];
switch (c)
{
case '>':
tokens.push_back(bf_token::INC_DP);
break;
case '<':
tokens.push_back(bf_token::DEC_DP);
break;
case '+':
tokens.push_back(bf_token::INC_DV);
break;
case '-':
tokens.push_back(bf_token::DEC_DV);
break;
case '.':
tokens.push_back(bf_token::PRINT);
break;
case ',':
tokens.push_back(bf_token::READ);
break;
case '[':
tokens.push_back(bf_token::OPEN);
break;
case ']':
tokens.push_back(bf_token::CLOSE);
break;
default:
break;
}
index++;
}
return tokens;
}

View File

@ -6,7 +6,6 @@
#include <bf_interpreter.h>
int main(int argc, const char** argv)
{
std::string file{"../life.bf"};