more thought required for ast

main
Brett 2023-11-24 14:41:45 -05:00
parent 8f4718d699
commit 30979b6cdb
3 changed files with 41 additions and 21 deletions

View File

@ -24,20 +24,20 @@ enum class bf_token
CLOSE
};
struct token_t
struct bf_token_t
{
bf_token type;
int64_t offset;
std::optional<uint64_t> name = {};
explicit token_t(bf_token type, int64_t offset = 1): type(type), offset(offset)
explicit bf_token_t(bf_token type, int64_t offset = 1): type(type), offset(offset)
{}
};
class bf_tokenizer
{
private:
std::vector<token_t> tokens;
std::vector<bf_token_t> tokens;
size_t conditionalCount = 0;
size_t currentIndex = 0;
@ -50,7 +50,8 @@ class bf_tokenizer
{
tokenize(program);
bf_name();
if (hasNext()){
if (hasNext())
{
// skip past comments
if (peek().type == bf_token::OPEN)
while (hasNext() && peek().type != bf_token::CLOSE)
@ -58,30 +59,49 @@ class bf_tokenizer
}
}
inline bool hasNext(){
inline bool hasNext()
{
return currentIndex < tokens.size();
}
inline size_t advance(){
inline size_t advance()
{
return currentIndex++;
}
inline const token_t& next() {
inline const bf_token_t& next()
{
return tokens[advance()];
}
inline const token_t& peek(){
inline const bf_token_t& peek()
{
return tokens[currentIndex];
}
inline std::vector<token_t>& data(){
inline std::vector<bf_token_t>& data()
{
return tokens;
}
void print(size_t index);
inline void print(){
inline void print()
{
print(currentIndex);
}
};
enum ast_token
{
INC_DP, DEC_DP, INC_VAL, DEC_VAL, PRINT, READ, WHILE
};
struct ast_token_t
{
ast_token type;
};
#endif //BRAINFUCK_MISC_BF_TOKENIZER_H

View File

@ -23,7 +23,7 @@ static inline void match(functor f, int sp, size_t& index, const std::string& pr
}
template<typename functor>
static inline void match(functor f, int sp, size_t& index, const std::vector<token_t>& program)
static inline void match(functor f, int sp, size_t& index, const std::vector<bf_token_t>& program)
{
while (f(index) < program.size())
{
@ -79,22 +79,22 @@ void interpret_bf(const std::string& program)
}
}
std::function<void(brainfuck_interpreter& inter, size_t offset, size_t& index, const std::vector<token_t>& tokens)> funcs[8] = {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.increment_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.decrement_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
std::function<void(brainfuck_interpreter& inter, size_t offset, size_t& index, const std::vector<bf_token_t>& tokens)> funcs[8] = {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) { inter.increment_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) { inter.decrement_dp(off); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) {
inter.increment(static_cast<int8_t>(off));
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) {
inter.decrement(static_cast<int8_t>(off));
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.print(std::cout); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) { inter.read(std::cin); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) { inter.print(std::cout); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) { inter.read(std::cin); },
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) {
if (inter.is() == 0)
match([](size_t& idx) { return ++idx; }, 1, index, tokens);
},
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<token_t>& tokens) {
[](brainfuck_interpreter& inter, size_t off, size_t& index, const std::vector<bf_token_t>& tokens) {
if (inter.is() != 0)
match([](size_t& idx) { return --idx; }, -1, index, tokens);
},

View File

@ -22,7 +22,7 @@ setup:
bf:
)";
void process_print(const std::vector<token_t>& tokens, size_t index);
void process_print(const std::vector<bf_token_t>& tokens, size_t index);
void codegen(bf_tokenizer& tokenizer, std::ostream& out)
{