2022-12-26 00:31:00 -05:00
|
|
|
/*
|
|
|
|
* Created by Brett on 26/12/22.
|
|
|
|
* Licensed under GNU General Public License V3.0
|
|
|
|
* See LICENSE file for license detail
|
|
|
|
*/
|
|
|
|
|
2023-01-05 12:34:14 -05:00
|
|
|
#ifndef BLT_QUEUE_H
|
|
|
|
#define BLT_QUEUE_H
|
2022-12-26 00:31:00 -05:00
|
|
|
|
2022-12-26 23:36:34 -05:00
|
|
|
/**
|
|
|
|
* Do no use any queue in this file. They are slower than std::queue.
|
|
|
|
*/
|
2023-01-10 10:45:11 -05:00
|
|
|
namespace blt {
|
2022-12-26 00:31:00 -05:00
|
|
|
|
|
|
|
template<typename T>
|
|
|
|
struct node {
|
|
|
|
T t;
|
|
|
|
node* next;
|
2023-01-05 12:10:38 -05:00
|
|
|
|
|
|
|
node(const T& t, node* next) {
|
2022-12-26 00:57:11 -05:00
|
|
|
this->t = t;
|
|
|
|
this->next = next;
|
|
|
|
}
|
2022-12-26 00:31:00 -05:00
|
|
|
};
|
|
|
|
|
2023-01-05 12:34:14 -05:00
|
|
|
/**
|
|
|
|
* Standard array backed first in first out queue
|
|
|
|
* @tparam T type stored in the queue
|
|
|
|
*/
|
2022-12-26 00:31:00 -05:00
|
|
|
template<typename T>
|
2023-01-05 12:37:15 -05:00
|
|
|
class flat_stack {
|
2022-12-26 01:10:37 -05:00
|
|
|
private:
|
2023-01-05 12:10:38 -05:00
|
|
|
int m_size = 16;
|
|
|
|
int m_insertIndex = 0;
|
|
|
|
T* m_data = new T[m_size];
|
2022-12-26 01:10:37 -05:00
|
|
|
|
2023-01-04 14:35:57 -05:00
|
|
|
/**
|
|
|
|
* Expands the internal array to the new size, copying over the data and shifting its minimal position to index 0
|
|
|
|
* and deletes the old array from memory.
|
|
|
|
* @param newSize new size of the internal array
|
|
|
|
*/
|
2023-01-05 12:10:38 -05:00
|
|
|
void expand(int newSize) {
|
2022-12-26 01:10:37 -05:00
|
|
|
auto tempData = new T[newSize];
|
2023-01-05 12:10:38 -05:00
|
|
|
for (int i = 0; i < m_insertIndex; i++)
|
|
|
|
tempData[i] = m_data[i];
|
|
|
|
delete[] m_data;
|
|
|
|
m_data = tempData;
|
|
|
|
m_size = newSize;
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
|
|
|
|
2023-01-05 12:10:38 -05:00
|
|
|
public:
|
|
|
|
|
2022-12-26 01:10:37 -05:00
|
|
|
void push(const T& t) {
|
2023-01-05 12:10:38 -05:00
|
|
|
if (m_insertIndex >= m_size) {
|
|
|
|
expand(m_size * 2);
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
m_data[m_insertIndex++] = t;
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2023-01-05 01:52:56 -05:00
|
|
|
/**
|
|
|
|
* Warning does not contain runtime error checking!
|
|
|
|
* @return the element at the "front" of the queue.
|
|
|
|
*/
|
2023-01-10 22:05:47 -05:00
|
|
|
[[nodiscard]] const T& top() const {
|
2023-01-05 12:10:38 -05:00
|
|
|
return m_data[m_insertIndex - 1];
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2022-12-26 01:10:37 -05:00
|
|
|
void pop() {
|
2023-01-05 01:52:56 -05:00
|
|
|
// TODO: throw exception when popping would result in a overflow?
|
|
|
|
// I didn't make it an exception here due to not wanting to import the class.
|
2023-01-05 11:45:27 -05:00
|
|
|
if (isEmpty())
|
2023-01-05 01:52:56 -05:00
|
|
|
return;
|
2023-01-05 12:10:38 -05:00
|
|
|
m_insertIndex--;
|
2023-01-05 01:52:56 -05:00
|
|
|
}
|
|
|
|
|
2023-01-05 12:10:38 -05:00
|
|
|
bool isEmpty() {
|
|
|
|
return m_insertIndex <= 0;
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
2023-01-05 11:49:45 -05:00
|
|
|
|
2023-01-05 12:10:38 -05:00
|
|
|
int size() {
|
|
|
|
return m_insertIndex;
|
2023-01-05 11:49:45 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2023-01-05 12:37:15 -05:00
|
|
|
~flat_stack() {
|
2023-01-05 12:10:38 -05:00
|
|
|
delete[](m_data);
|
2022-12-26 01:10:37 -05:00
|
|
|
}
|
2022-12-26 00:31:00 -05:00
|
|
|
};
|
|
|
|
|
2023-01-05 12:34:14 -05:00
|
|
|
/**
|
|
|
|
* Standard array backed first in last out queue (stack)
|
|
|
|
* @tparam T type stored in the queue
|
|
|
|
*/
|
|
|
|
template<typename T>
|
2023-01-05 12:37:15 -05:00
|
|
|
class flat_queue {
|
2023-01-05 12:34:14 -05:00
|
|
|
private:
|
|
|
|
int m_size = 16;
|
|
|
|
int m_headIndex = 0;
|
|
|
|
int m_insertIndex = 0;
|
|
|
|
T* m_data = new T[m_size];
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Expands the internal array to the new size, copying over the data and shifting its minimal position to index 0
|
|
|
|
* and deletes the old array from memory.
|
|
|
|
* @param newSize new size of the internal array
|
|
|
|
*/
|
|
|
|
void expand(int newSize) {
|
|
|
|
auto tempData = new T[newSize];
|
|
|
|
for (int i = 0; i < m_size - m_headIndex; i++)
|
|
|
|
tempData[i] = m_data[i + m_headIndex];
|
|
|
|
delete[] m_data;
|
|
|
|
m_insertIndex = m_size - m_headIndex;
|
|
|
|
m_headIndex = 0;
|
|
|
|
m_data = tempData;
|
|
|
|
m_size = newSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
|
|
|
void push(const T& t) {
|
|
|
|
if (m_insertIndex >= m_size) {
|
|
|
|
expand(m_size * 2);
|
|
|
|
}
|
|
|
|
m_data[m_insertIndex++] = t;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Warning does not contain runtime error checking!
|
|
|
|
* @return the element at the "front" of the queue.
|
|
|
|
*/
|
|
|
|
[[nodiscard]] const T& front() const {
|
|
|
|
return m_data[m_headIndex];
|
|
|
|
}
|
|
|
|
|
|
|
|
void pop() {
|
|
|
|
// TODO: throw exception when popping would result in a overflow?
|
|
|
|
// I didn't make it an exception here due to not wanting to import the class.
|
|
|
|
if (isEmpty())
|
|
|
|
return;
|
2023-01-05 12:40:08 -05:00
|
|
|
m_headIndex++;
|
2023-01-05 12:34:14 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
bool isEmpty() {
|
|
|
|
return m_headIndex >= m_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
int size() {
|
|
|
|
return m_insertIndex - m_headIndex;
|
|
|
|
}
|
|
|
|
|
2023-01-05 12:37:15 -05:00
|
|
|
~flat_queue() {
|
2023-01-05 12:34:14 -05:00
|
|
|
delete[](m_data);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-01-05 01:52:56 -05:00
|
|
|
// avoid this. it is very slow.
|
2022-12-26 00:31:00 -05:00
|
|
|
template<typename T>
|
|
|
|
class node_queue {
|
|
|
|
private:
|
2023-01-05 12:10:38 -05:00
|
|
|
node<T>* m_head;
|
2022-12-26 00:31:00 -05:00
|
|
|
public:
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2022-12-26 01:10:37 -05:00
|
|
|
void push(const T& t) {
|
2023-01-05 12:10:38 -05:00
|
|
|
if (m_head == nullptr)
|
|
|
|
m_head = new node<T>(t, nullptr);
|
2022-12-26 01:02:46 -05:00
|
|
|
else
|
2023-01-05 12:10:38 -05:00
|
|
|
m_head = new node<T>(t, m_head);
|
2022-12-26 00:31:00 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2022-12-26 00:55:49 -05:00
|
|
|
[[nodiscard]] const T& front() const {
|
2023-01-05 12:10:38 -05:00
|
|
|
return m_head->t;
|
2022-12-26 00:31:00 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2022-12-26 00:31:00 -05:00
|
|
|
void pop() {
|
2023-01-05 12:10:38 -05:00
|
|
|
auto nextNode = m_head->next;
|
|
|
|
delete (m_head);
|
|
|
|
m_head = nextNode;
|
2022-12-26 00:31:00 -05:00
|
|
|
}
|
2023-01-05 12:10:38 -05:00
|
|
|
|
2022-12-26 00:31:00 -05:00
|
|
|
~node_queue() {
|
2023-01-05 12:10:38 -05:00
|
|
|
auto next = m_head;
|
|
|
|
while (next != nullptr) {
|
2022-12-26 00:31:00 -05:00
|
|
|
auto nextNode = next->next;
|
2023-01-05 12:10:38 -05:00
|
|
|
delete (next);
|
2022-12-26 00:31:00 -05:00
|
|
|
next = nextNode;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2023-01-05 12:34:14 -05:00
|
|
|
#endif //BLT_QUEUE_H
|