diff --git a/.idea/vcs.xml b/.idea/vcs.xml
index 8bc3e1c..330b065 100644
--- a/.idea/vcs.xml
+++ b/.idea/vcs.xml
@@ -9,5 +9,6 @@
+
\ No newline at end of file
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c8a9fb4..cfdd2b3 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,5 +1,5 @@
cmake_minimum_required(VERSION 3.25)
-project(COSC-4P80-Assignment-2 VERSION 0.0.7)
+project(COSC-4P80-Assignment-2 VERSION 0.0.8)
option(ENABLE_ADDRSAN "Enable the address sanitizer" OFF)
option(ENABLE_UBSAN "Enable the ub sanitizer" OFF)
@@ -16,6 +16,8 @@ endif()
if (ENABLE_GRAPHICS)
add_subdirectory(lib/blt-graphics)
add_compile_definitions(BLT_USE_GRAPHICS)
+ set(EXTRA_SOURCES lib/implot/implot.cpp lib/implot/implot_demo.cpp lib/implot/implot_items.cpp)
+ include_directories(lib/implot)
else ()
add_subdirectory(lib/blt)
endif ()
@@ -25,7 +27,7 @@ endif ()
include_directories(include/)
file(GLOB_RECURSE PROJECT_BUILD_FILES "${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp")
-add_executable(COSC-4P80-Assignment-2 ${PROJECT_BUILD_FILES})
+add_executable(COSC-4P80-Assignment-2 ${PROJECT_BUILD_FILES} ${EXTRA_SOURCES})
target_compile_options(COSC-4P80-Assignment-2 PRIVATE -Wall -Wextra -Wpedantic -Wno-comment)
target_link_options(COSC-4P80-Assignment-2 PRIVATE -Wall -Wextra -Wpedantic -Wno-comment)
diff --git a/include/assign2/common.h b/include/assign2/common.h
index 9fd58aa..a72db0b 100644
--- a/include/assign2/common.h
+++ b/include/assign2/common.h
@@ -21,11 +21,21 @@
#include
#include
+#include
+
+#ifdef BLT_USE_GRAPHICS
+
+ #include "blt/gfx/renderer/batch_2d_renderer.h"
+ #include "blt/gfx/window.h"
+ #include
+
+#endif
namespace assign2
{
using Scalar = float;
- const inline Scalar learn_rate = 0.1;
+// const inline Scalar learn_rate = 0.001;
+ inline Scalar learn_rate = 0.001;
template
decltype(std::cout)& print_vec(const std::vector& vec)
@@ -102,6 +112,22 @@ namespace assign2
class weight_t
{
public:
+ weight_t() = default;
+
+ weight_t(const weight_t& copy) = delete;
+
+ weight_t& operator=(const weight_t& copy) = delete;
+
+ weight_t(weight_t&& move) noexcept: place(std::exchange(move.place, 0)), data(std::move(move.data))
+ {}
+
+ weight_t& operator=(weight_t&& move) noexcept
+ {
+ place = std::exchange(move.place, place);
+ data = std::exchange(move.data, std::move(data));
+ return *this;
+ }
+
void preallocate(blt::size_t amount)
{
data.resize(amount);
@@ -125,6 +151,82 @@ namespace assign2
std::vector data;
};
+ std::vector get_data_files(std::string_view path)
+ {
+ std::vector files;
+
+ for (const auto& file : std::filesystem::recursive_directory_iterator(path))
+ {
+ if (file.is_directory())
+ continue;
+ auto file_path = file.path().string();
+ if (blt::string::ends_with(file_path, ".out"))
+ files.push_back(blt::fs::getFile(file_path));
+ }
+
+ return files;
+ }
+
+ std::vector load_data_files(const std::vector& files)
+ {
+ std::vector loaded_data;
+
+ // load all file
+ for (auto file : files)
+ {
+ // we only use unix line endings here...
+ blt::string::replaceAll(file, "\r", "");
+ auto lines = blt::string::split(file, "\n");
+ auto line_it = lines.begin();
+ auto meta = blt::string::split(*line_it, ' ');
+
+ // load data inside files
+ data_file_t data;
+ data.data_points.reserve(std::stoll(meta[0]));
+ auto bin_count = std::stoul(meta[1]);
+
+ for (++line_it; line_it != lines.end(); ++line_it)
+ {
+ auto line_data_meta = blt::string::split(*line_it, ' ');
+ if (line_data_meta.size() != bin_count + 1)
+ continue;
+ auto line_data_it = line_data_meta.begin();
+
+ // load bins
+ data_t line_data;
+ line_data.is_bad = std::stoi(*line_data_it) == 1;
+ line_data.bins.reserve(bin_count);
+ Scalar total = 0;
+ for (++line_data_it; line_data_it != line_data_meta.end(); ++line_data_it)
+ {
+ auto v = std::stof(*line_data_it);
+ total += v * v;
+ line_data.bins.push_back(v);
+ }
+
+ // normalize vector.
+ total = std::sqrt(total);
+//
+ for (auto& v : line_data.bins)
+ v /= total;
+//
+// if (line_data.bins.size() == 32)
+// print_vec(line_data.bins) << std::endl;
+
+ data.data_points.push_back(line_data);
+ }
+
+ loaded_data.push_back(data);
+ }
+
+ return loaded_data;
+ }
+
+ bool is_thinks_bad(const std::vector& out)
+ {
+ return out[0] < out[1];
+ }
+
}
#endif //COSC_4P80_ASSIGNMENT_2_COMMON_H
diff --git a/include/assign2/functions.h b/include/assign2/functions.h
index aa2b739..2742461 100644
--- a/include/assign2/functions.h
+++ b/include/assign2/functions.h
@@ -38,16 +38,19 @@ namespace assign2
}
};
- struct threshold_function : public function_t
+ struct tanh_function : public function_t
{
- [[nodiscard]] Scalar call(const Scalar s) const final
+ [[nodiscard]] Scalar call(Scalar s) const final
{
- return s >= 0 ? 1 : 0;
+ auto x = std::exp(s);
+ auto nx = std::exp(-s);
+ return (x - nx) / (x + nx);
}
[[nodiscard]] Scalar derivative(Scalar s) const final
{
- return 0;
+ auto tanh = std::tanh(s);
+ return 1 - (tanh * tanh);
}
};
@@ -60,7 +63,7 @@ namespace assign2
[[nodiscard]] Scalar derivative(Scalar s) const final
{
- return 0;
+ return s >= 0 ? 1 : 0;
}
};
}
diff --git a/include/assign2/global_magic.h b/include/assign2/global_magic.h
new file mode 100644
index 0000000..3731710
--- /dev/null
+++ b/include/assign2/global_magic.h
@@ -0,0 +1,61 @@
+#pragma once
+/*
+ * Copyright (C) 2024 Brett Terpstra
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+#ifndef COSC_4P80_ASSIGNMENT_2_GLOBAL_MAGIC_H
+#define COSC_4P80_ASSIGNMENT_2_GLOBAL_MAGIC_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+namespace assign2
+{
+
+ inline blt::size_t layer_id_counter = 0;
+ inline const blt::size_t distance_between_layers = 250;
+ inline std::atomic_bool pause_mode = true;
+ inline std::atomic_bool pause_flag = false;
+
+ void await()
+ {
+ if (!pause_mode.load(std::memory_order_relaxed))
+ return;
+ // wait for flag to come in
+ while (!pause_flag.load(std::memory_order_relaxed))
+ std::this_thread::sleep_for(std::chrono::milliseconds(1));
+ // reset the flag back to false
+ auto flag = pause_flag.load(std::memory_order_relaxed);
+ while (!pause_flag.compare_exchange_strong(flag, false, std::memory_order_relaxed))
+ {}
+ }
+
+ struct node_data
+ {
+
+ };
+
+ inline std::vector errors_over_time;
+ inline std::vector error_derivative_over_time;
+ inline std::vector correct_over_time;
+ inline std::vector nodes;
+}
+
+#endif //COSC_4P80_ASSIGNMENT_2_GLOBAL_MAGIC_H
diff --git a/include/assign2/initializers.h b/include/assign2/initializers.h
index bdfc2e8..d8c11c2 100644
--- a/include/assign2/initializers.h
+++ b/include/assign2/initializers.h
@@ -33,11 +33,11 @@ namespace assign2
}
};
- struct half_init
+ struct small_init
{
inline Scalar operator()(blt::i32) const
{
- return 0;
+ return 0.01;
}
};
diff --git a/include/assign2/layer.h b/include/assign2/layer.h
index bfe41c0..66197d3 100644
--- a/include/assign2/layer.h
+++ b/include/assign2/layer.h
@@ -17,13 +17,15 @@
*/
#include "blt/std/assert.h"
+
#ifndef COSC_4P80_ASSIGNMENT_2_LAYER_H
#define COSC_4P80_ASSIGNMENT_2_LAYER_H
-
-#include
-#include
-#include "blt/iterator/zip.h"
-#include "blt/iterator/iterator.h"
+
+ #include
+ #include
+ #include "blt/iterator/zip.h"
+ #include "blt/iterator/iterator.h"
+ #include "global_magic.h"
namespace assign2
{
@@ -52,11 +54,12 @@ namespace assign2
{
// delta for weights
error = act->derivative(z) * next_error;
+ db = learn_rate * error;
BLT_ASSERT(previous_outputs.size() == dw.size());
for (auto [prev_out, d_weight] : blt::zip(previous_outputs, dw))
{
- // dw / apply dw
- d_weight = learn_rate * prev_out * error;
+ // dw
+ d_weight = -learn_rate * prev_out * error;
}
}
@@ -64,6 +67,7 @@ namespace assign2
{
for (auto [w, d] : blt::in_pairs(weights, dw))
w += d;
+ bias += db;
}
template
@@ -91,6 +95,7 @@ namespace assign2
float z = 0;
float a = 0;
float bias = 0;
+ float db = 0;
float error = 0;
weight_view dw;
weight_view weights;
@@ -102,7 +107,7 @@ namespace assign2
public:
template
layer_t(const blt::i32 in, const blt::i32 out, function_t* act_func, WeightFunc w, BiasFunc b):
- in_size(in), out_size(out), act_func(act_func)
+ in_size(in), out_size(out), layer_id(layer_id_counter++), act_func(act_func)
{
neurons.reserve(out_size);
weights.preallocate(in_size * out_size);
@@ -130,38 +135,36 @@ namespace assign2
return outputs;
}
- Scalar back_prop(const std::vector& prev_layer_output,
- const std::variant>, blt::ref>& data)
+ std::pair back_prop(const std::vector& prev_layer_output,
+ const std::variant>, blt::ref>& data)
{
- return std::visit(blt::lambda_visitor{
+ Scalar total_error = 0;
+ Scalar total_derivative = 0;
+ std::visit(blt::lambda_visitor{
// is provided if we are an output layer, contains output of this net (per neuron) and the expected output (per neuron)
- [this, &prev_layer_output](const std::vector& expected) {
- Scalar total_error = 0;
+ [this, &prev_layer_output, &total_error, &total_derivative](const std::vector& expected) {
for (auto [i, n] : blt::enumerate(neurons))
{
auto d = outputs[i] - expected[i];
auto d2 = 0.5f * (d * d);
total_error += d2;
- n.back_prop(act_func, prev_layer_output, d2);
+ total_derivative += d;
+ n.back_prop(act_func, prev_layer_output, d);
}
- return total_error;
},
// interior layer
[this, &prev_layer_output](const layer_t& layer) {
- Scalar total_error = 0;
for (auto [i, n] : blt::enumerate(neurons))
{
- Scalar weight_error = 0;
+ Scalar w = 0;
// TODO: this is not efficient on the cache!
for (auto nn : layer.neurons)
- weight_error += nn.error * nn.weights[i];
- Scalar w2 = 0.5f * weight_error * weight_error;
- total_error += w2;
- n.back_prop(act_func, prev_layer_output, w2);
+ w += nn.error * nn.weights[i];
+ n.back_prop(act_func, prev_layer_output, w);
}
- return total_error;
}
}, data);
+ return {total_error, total_derivative};
}
void update()
@@ -202,9 +205,19 @@ namespace assign2
std::cout << std::endl;
weights.debug();
}
+
+#ifdef BLT_USE_GRAPHICS
+
+ void render() const
+ {
+
+ }
+
+#endif
private:
const blt::i32 in_size, out_size;
+ const blt::size_t layer_id;
weight_t weights;
weight_t weight_derivatives;
function_t* act_func;
diff --git a/include/assign2/network.h b/include/assign2/network.h
index da81efb..c59cac1 100644
--- a/include/assign2/network.h
+++ b/include/assign2/network.h
@@ -22,6 +22,7 @@
#include
#include
#include "blt/std/assert.h"
+#include "global_magic.h"
namespace assign2
{
@@ -36,14 +37,14 @@ namespace assign2
for (blt::i32 i = 0; i < layer_count; i++)
{
if (i == 0)
- layers.push_back(layer_t{input_size, hidden_size, w, b});
+ layers.push_back(std::make_unique(input_size, hidden_size, w, b));
else
- layers.push_back(layer_t{hidden_size, hidden_size, w, b});
+ layers.push_back(std::make_unique(hidden_size, hidden_size, w, b));
}
- layers.push_back(layer_t{hidden_size, output_size, w, b});
+ layers.push_back(std::make_unique(hidden_size, output_size, w, b));
} else
{
- layers.push_back(layer_t{input_size, output_size, w, b});
+ layers.push_back(std::make_unique(input_size, output_size, w, b));
}
}
@@ -56,18 +57,18 @@ namespace assign2
for (blt::i32 i = 0; i < layer_count; i++)
{
if (i == 0)
- layers.push_back(layer_t{input_size, hidden_size, w, b});
+ layers.push_back(std::make_unique(input_size, hidden_size, w, b));
else
- layers.push_back(layer_t{hidden_size, hidden_size, w, b});
+ layers.push_back(std::make_unique(hidden_size, hidden_size, w, b));
}
- layers.push_back(layer_t{hidden_size, output_size, ow, ob});
+ layers.push_back(std::make_unique(hidden_size, output_size, ow, ob));
} else
{
- layers.push_back(layer_t{input_size, output_size, ow, ob});
+ layers.push_back(std::make_unique(input_size, output_size, ow, ob));
}
}
- explicit network_t(std::vector layers): layers(std::move(layers))
+ explicit network_t(std::vector> layers): layers(std::move(layers))
{}
network_t() = default;
@@ -78,27 +79,12 @@ namespace assign2
outputs.emplace_back(input);
for (auto& v : layers)
- outputs.emplace_back(v.call(outputs.back()));
+ outputs.emplace_back(v->call(outputs.back()));
return outputs.back();
}
- std::pair error(const std::vector& outputs, bool is_bad)
- {
- BLT_ASSERT(outputs.size() == 2);
- auto g = is_bad ? 0.0f : 1.0f;
- auto b = is_bad ? 1.0f : 0.0f;
-
- auto g_diff = outputs[0] - g;
- auto b_diff = outputs[1] - b;
-
- auto error = g_diff * g_diff + b_diff * b_diff;
- BLT_INFO("%f %f %f", error, g_diff, b_diff);
-
- return {0.5f * (error * error), error};
- }
-
- Scalar train_epoch(const data_file_t& example)
+ std::pair train_epoch(const data_file_t& example)
{
Scalar total_error = 0;
Scalar total_d_error = 0;
@@ -111,28 +97,45 @@ namespace assign2
{
if (i == layers.size() - 1)
{
- auto e = layer.back_prop(layers[i - 1].outputs, expected);
- total_error += e;
+ auto e = layer->back_prop(layers[i - 1]->outputs, expected);
+// layer->update();
+ total_error += e.first;
+ total_d_error += e.second;
} else if (i == 0)
{
- auto e = layer.back_prop(x.bins, layers[i + 1]);
- total_error += e;
+ auto e = layer->back_prop(x.bins, *layers[i + 1]);
+// layer->update();
+ total_error += e.first;
+ total_d_error += e.second;
} else
{
- auto e = layer.back_prop(layers[i - 1].outputs, layers[i + 1]);
- total_error += e;
+ auto e = layer->back_prop(layers[i - 1]->outputs, *layers[i + 1]);
+// layer->update();
+ total_error += e.first;
+ total_d_error += e.second;
}
}
for (auto& l : layers)
- l.update();
+ l->update();
}
- BLT_DEBUG("Total Errors found %f, %f", total_error, total_d_error);
+// errors_over_time.push_back(total_error);
+// BLT_DEBUG("Total Errors found %f, %f", total_error, total_d_error);
- return total_error;
+ return {total_error, total_d_error};
}
+
+#ifdef BLT_USE_GRAPHICS
+
+ void render() const
+ {
+ for (auto& l : layers)
+ l->render();
+ }
+
+#endif
private:
- std::vector layers;
+ std::vector> layers;
};
}
diff --git a/lib/implot b/lib/implot
new file mode 160000
index 0000000..f156599
--- /dev/null
+++ b/lib/implot
@@ -0,0 +1 @@
+Subproject commit f156599faefe316f7dd20fe6c783bf87c8bb6fd9
diff --git a/src/main.cpp b/src/main.cpp
index ef7cab6..f195e3b 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -7,67 +7,317 @@
#include
#include
#include
+#include
+#include
using namespace assign2;
-std::vector get_data_files(std::string_view path)
+std::vector data_files;
+random_init randomizer{619};
+empty_init empty;
+small_init small;
+sigmoid_function sig;
+relu_function relu;
+tanh_function func_tanh;
+
+network_t create_network(blt::i32 input, blt::i32 hidden)
{
- std::vector files;
+ auto layer1 = std::make_unique(input, hidden * 2, &sig, randomizer, empty);
+ auto layer2 = std::make_unique(hidden * 2, hidden / 2, &sig, randomizer, empty);
+ auto layer_output = std::make_unique(hidden / 2, 2, &sig, randomizer, empty);
- for (const auto& file : std::filesystem::recursive_directory_iterator(path))
- {
- if (file.is_directory())
- continue;
- auto file_path = file.path().string();
- if (blt::string::ends_with(file_path, ".out"))
- files.push_back(blt::fs::getFile(file_path));
- }
+ std::vector> vec;
+ vec.push_back(std::move(layer1));
+ vec.push_back(std::move(layer2));
+ vec.push_back(std::move(layer_output));
- return files;
+ return network_t{std::move(vec)};
}
-std::vector load_data_files(const std::vector& files)
+#ifdef BLT_USE_GRAPHICS
+
+#include
+#include "blt/gfx/renderer/resource_manager.h"
+#include "blt/gfx/renderer/batch_2d_renderer.h"
+#include "blt/gfx/renderer/camera.h"
+#include "implot.h"
+#include
+
+blt::gfx::matrix_state_manager global_matrices;
+blt::gfx::resource_manager resources;
+blt::gfx::batch_renderer_2d renderer_2d(resources, global_matrices);
+blt::gfx::first_person_camera_2d camera;
+
+blt::hashmap_t networks;
+blt::hashmap_t file_map;
+std::unique_ptr network_thread;
+std::atomic_bool running = true;
+std::atomic_bool run_exit = true;
+std::atomic_int32_t run_epoch = -1;
+std::atomic_uint64_t epochs = 0;
+blt::i32 time_between_runs = 0;
+blt::size_t correct_recall = 0;
+blt::size_t wrong_recall = 0;
+bool run_network = false;
+
+void init(const blt::gfx::window_data& data)
{
- std::vector loaded_data;
+ using namespace blt::gfx;
+
+// auto monitor = glfwGetPrimaryMonitor();
+// auto mode = glfwGetVideoMode(monitor);
+// glfwSetWindowMonitor(data.window, monitor, 0, 0, mode->width, mode->height, mode->refreshRate);
- // load all file
- for (auto file : files)
+ global_matrices.create_internals();
+ resources.load_resources();
+ renderer_2d.create();
+ ImPlot::CreateContext();
+
+ for (auto& f : data_files)
{
- // we only use unix line endings here...
- blt::string::replaceAll(file, "\r", "");
- auto lines = blt::string::split(file, "\n");
- auto line_it = lines.begin();
- auto meta = blt::string::split(*line_it, ' ');
+ int input = static_cast(f.data_points.begin()->bins.size());
+ int hidden = input * 1;
- // load data inside files
- data_file_t data;
- data.data_points.reserve(std::stoll(meta[0]));
- auto bin_count = std::stoul(meta[1]);
-
- for (++line_it; line_it != lines.end(); ++line_it)
- {
- auto line_data_meta = blt::string::split(*line_it, ' ');
- if (line_data_meta.size() != bin_count + 1)
- continue;
- auto line_data_it = line_data_meta.begin();
-
- // load bins
- data_t line_data;
- line_data.is_bad = std::stoi(*line_data_it) == 1;
- line_data.bins.reserve(bin_count);
- for (++line_data_it; line_data_it != line_data_meta.end(); ++line_data_it)
- {
- line_data.bins.push_back(std::stof(*line_data_it));
- }
- data.data_points.push_back(line_data);
- }
-
- loaded_data.push_back(data);
+ BLT_INFO("Making network of size %d", input);
+ networks[input] = create_network(input, hidden);
+ file_map[input] = &f;
}
- return loaded_data;
+ errors_over_time.reserve(25000);
+ error_derivative_over_time.reserve(25000);
+ correct_over_time.reserve(25000);
+
+ network_thread = std::make_unique([]() {
+ while (running)
+ {
+ if (run_epoch >= 0)
+ {
+ auto error = networks.at(run_epoch).train_epoch(*file_map[run_epoch]);
+ errors_over_time.push_back(error.first);
+ error_derivative_over_time.push_back(error.second);
+
+ blt::size_t right = 0;
+ blt::size_t wrong = 0;
+ for (auto& d : file_map[run_epoch]->data_points)
+ {
+ auto out = networks.at(run_epoch).execute(d.bins);
+ auto is_bad = is_thinks_bad(out);
+
+ if ((is_bad && d.is_bad) || (!is_bad && !d.is_bad))
+ right++;
+ else
+ wrong++;
+ }
+ correct_recall = right;
+ wrong_recall = wrong;
+ correct_over_time.push_back(static_cast(right) / static_cast(right + wrong) * 100);
+
+ epochs++;
+ run_epoch = -1;
+ std::this_thread::sleep_for(std::chrono::milliseconds(time_between_runs));
+ }
+ }
+ run_exit = false;
+ });
}
+template
+void plot_vector(ImPlotRect& lims, const std::vector& v, std::string name, const std::string& x, const std::string& y, Func axis_func)
+{
+ if (lims.X.Min < 0)
+ lims.X.Min = 0;
+ if (ImPlot::BeginPlot(name.c_str()))
+ {
+ ImPlot::SetupAxes(x.c_str(), y.c_str(), ImPlotAxisFlags_None, ImPlotAxisFlags_None);
+ int minX = static_cast(lims.X.Min);
+ int maxX = static_cast(lims.X.Max);
+
+ if (minX < 0)
+ minX = 0;
+ if (minX >= static_cast(v.size()))
+ minX = static_cast(v.size()) - 1;
+ if (maxX < 0)
+ maxX = 0;
+ if (maxX >= static_cast(v.size()))
+ maxX = static_cast(v.size()) - 1;
+ if (static_cast(v.size()) > 0)
+ {
+ auto min = v[minX];
+ auto max = v[minX];
+ for (int i = minX; i < maxX; i++)
+ {
+ auto val = v[i];
+ if (val < min)
+ min = val;
+ if (val > max)
+ max = val;
+ }
+ ImPlot::SetupAxisLimits(ImAxis_Y1, axis_func(min, true), axis_func(max, false), ImGuiCond_Always);
+ }
+
+ name = "##" + name;
+ ImPlot::SetupAxisLinks(ImAxis_X1, &lims.X.Min, &lims.X.Max);
+ ImPlot::PlotLine(name.c_str(), v.data(), static_cast(v.size()), 1, 0, ImPlotLineFlags_Shaded);
+ ImPlot::EndPlot();
+ }
+}
+
+void update(const blt::gfx::window_data& data)
+{
+ global_matrices.update_perspectives(data.width, data.height, 90, 0.1, 2000);
+
+ camera.update();
+ camera.update_view(global_matrices);
+ global_matrices.update();
+
+ ImGui::ShowDemoWindow();
+ ImPlot::ShowDemoWindow();
+
+ auto net = networks.begin();
+ if (ImGui::Begin("Control", nullptr))
+ {
+ static std::vector> owner;
+ static std::vector lists;
+ if (lists.empty())
+ {
+ for (auto& n : networks)
+ {
+ auto str = std::to_string(n.first);
+ char* ptr = new char[str.size() + 1];
+ owner.push_back(std::unique_ptr(ptr));
+ std::memcpy(ptr, str.data(), str.size());
+ ptr[str.size()] = '\0';
+ lists.push_back(ptr);
+ }
+ }
+ static int selected = 1;
+ for (int i = 0; i < selected; i++)
+ net++;
+ ImGui::Separator();
+ ImGui::Text("Select Network Size");
+ if (ImGui::ListBox("", &selected, lists.data(), static_cast(lists.size()), 4))
+ {
+ errors_over_time.clear();
+ correct_over_time.clear();
+ error_derivative_over_time.clear();
+ run_network = false;
+ }
+ ImGui::Separator();
+ ImGui::Text("Using network %d size %d", selected, net->first);
+ static bool pause = pause_mode.load();
+ ImGui::Checkbox("Stepped Mode", &pause);
+ pause_mode = pause;
+ ImGui::Checkbox("Train Network", &run_network);
+ if (run_network)
+ run_epoch = net->first;
+ ImGui::InputInt("Time Between Runs", &time_between_runs);
+ if (time_between_runs < 0)
+ time_between_runs = 0;
+ std::string str = std::to_string(correct_recall) + "/" + std::to_string(wrong_recall + correct_recall);
+ ImGui::ProgressBar(
+ (wrong_recall + correct_recall != 0) ? static_cast(correct_recall) / static_cast(wrong_recall + correct_recall) : 0,
+ ImVec2(0, 0), str.c_str());
+// const float max_learn = 100000;
+// static float learn = max_learn;
+// ImGui::SliderFloat("Learn Rate", &learn, 1, max_learn, "", ImGuiSliderFlags_Logarithmic);
+// learn_rate = learn / (max_learn * 1000);
+ ImGui::Text("Learn Rate %.9f", learn_rate);
+ if (ImGui::Button("Print Current"))
+ {
+ BLT_INFO("Test Cases:");
+ blt::size_t right = 0;
+ blt::size_t wrong = 0;
+ for (auto& d : file_map[net->first]->data_points)
+ {
+ std::cout << "Good or bad? " << (d.is_bad ? "Bad" : "Good") << " :: ";
+ auto out = net->second.execute(d.bins);
+ auto is_bad = is_thinks_bad(out);
+
+ if ((is_bad && d.is_bad) || (!is_bad && !d.is_bad))
+ right++;
+ else
+ wrong++;
+
+ std::cout << "NN Thinks: " << (is_bad ? "Bad" : "Good") << " || Outs: [";
+ print_vec(out) << "]" << std::endl;
+ }
+ BLT_INFO("NN got %ld right and %ld wrong (%%%lf)", right, wrong, static_cast(right) / static_cast(right + wrong) * 100);
+ }
+ }
+ ImGui::End();
+
+ if (ImGui::Begin("Stats"))
+ {
+ static std::vector x_points;
+ if (errors_over_time.size() != x_points.size())
+ {
+ x_points.clear();
+ for (int i = 0; i < static_cast(errors_over_time.size()); i++)
+ x_points.push_back(i);
+ }
+
+ auto domain = static_cast(errors_over_time.size());
+ blt::i32 history = std::min(100, domain);
+ static ImPlotRect lims(0, 100, 0, 1);
+ if (ImPlot::BeginAlignedPlots("AlignedGroup"))
+ {
+ plot_vector(lims, errors_over_time, "Error", "Time", "Error", [](auto v, bool b) {
+ float percent = 0.15;
+ if (b)
+ return v < 0 ? v * (1 + percent) : v * (1 - percent);
+ else
+ return v < 0 ? v * (1 - percent) : v * (1 + percent);
+ });
+ plot_vector(lims, correct_over_time, "Correct", "Time", "Correct", [](auto v, bool b) {
+ if (b)
+ return v - 1;
+ else
+ return v + 1;
+ });
+ plot_vector(lims, error_derivative_over_time, "DError/Dw", "Time", "Error", [](auto v, bool b) {
+ float percent = 0.05;
+ if (b)
+ return v < 0 ? v * (1 + percent) : v * (1 - percent);
+ else
+ return v < 0 ? v * (1 - percent) : v * (1 + percent);
+ });
+ ImPlot::EndAlignedPlots();
+ }
+ }
+ ImGui::End();
+
+
+ ImGui::Begin("Hello", nullptr,
+ ImGuiWindowFlags_AlwaysAutoResize | ImGuiWindowFlags_NoBackground | ImGuiWindowFlags_NoCollapse | ImGuiWindowFlags_NoInputs |
+ ImGuiWindowFlags_NoTitleBar);
+ net->second.render();
+ ImGui::End();
+
+ renderer_2d.render(data.width, data.height);
+}
+
+void destroy()
+{
+ running = false;
+ while (run_exit)
+ {
+ if (pause_mode)
+ pause_flag = true;
+ }
+ if (network_thread->joinable())
+ network_thread->join();
+ network_thread = nullptr;
+ networks.clear();
+ file_map.clear();
+ ImPlot::DestroyContext();
+ global_matrices.cleanup();
+ resources.cleanup();
+ renderer_2d.cleanup();
+ blt::gfx::cleanup();
+}
+
+#endif
+
int main(int argc, const char** argv)
{
blt::arg_parse parser;
@@ -76,66 +326,52 @@ int main(int argc, const char** argv)
auto args = parser.parse_args(argc, argv);
std::string data_directory = blt::string::ensure_ends_with_path_separator(args.get("file"));
- auto data_files = load_data_files(get_data_files(data_directory));
-
- random_init randomizer{619};
- empty_init empty;
- sigmoid_function sig;
- relu_function relu;
- threshold_function thresh;
-
- layer_t layer1{16, 16, &sig, randomizer, empty};
- layer1.debug();
- layer_t layer2{16, 16, &sig, randomizer, empty};
- layer2.debug();
- layer_t layer3{16, 16, &sig, randomizer, empty};
- layer3.debug();
- layer_t layer_output{16, 2, &sig, randomizer, empty};
- layer_output.debug();
-
- network_t network{{layer1, layer2, layer3, layer_output}};
+ data_files = load_data_files(get_data_files(data_directory));
+
+#ifdef BLT_USE_GRAPHICS
+ blt::gfx::init(blt::gfx::window_data{"Freeplay Graphics", init, update, 1440, 720}.setSyncInterval(1).setMonitor(glfwGetPrimaryMonitor())
+ .setMaximized(true));
+ destroy();
+ return 0;
+#endif
for (auto f : data_files)
{
- if (f.data_points.begin()->bins.size() == 16)
+ int input = static_cast(f.data_points.begin()->bins.size());
+ int hidden = input * 3;
+
+ if (input != 64)
+ continue;
+
+ BLT_INFO("-----------------");
+ BLT_INFO("Running for size %d", input);
+ BLT_INFO("With hidden layers %d", input);
+ BLT_INFO("-----------------");
+
+ network_t network = create_network(input, hidden);
+
+ for (blt::size_t i = 0; i < 2000; i++)
+ network.train_epoch(f);
+
+ BLT_INFO("Test Cases:");
+ blt::size_t right = 0;
+ blt::size_t wrong = 0;
+ for (auto& d : f.data_points)
{
- for (blt::size_t i = 0; i < 10; i++)
- {
- network.train_epoch(f);
- }
- break;
+ std::cout << "Good or bad? " << (d.is_bad ? "Bad" : "Good") << " :: ";
+ auto out = network.execute(d.bins);
+ auto is_bad = is_thinks_bad(out);
+
+ if ((is_bad && d.is_bad) || (!is_bad && !d.is_bad))
+ right++;
+ else
+ wrong++;
+
+ std::cout << "NN Thinks: " << (is_bad ? "Bad" : "Good") << " || Outs: [";
+ print_vec(out) << "]" << std::endl;
}
+ BLT_INFO("NN got %ld right and %ld wrong (%%%lf)", right, wrong, static_cast(right) / static_cast(right + wrong) * 100);
}
- BLT_INFO("Test Cases:");
-
- for (auto f : data_files)
- {
- if (f.data_points.begin()->bins.size() == 16)
- {
- for (auto& d : f.data_points)
- {
- std::cout << "Good or bad? " << d.is_bad << " :: ";
- print_vec(network.execute(d.bins)) << std::endl;
- }
- }
- }
-// for (auto d : data_files)
-// {
-// BLT_TRACE_STREAM << "\nSilly new file:\n";
-// for (auto point : d.data_points)
-// {
-// BLT_TRACE_STREAM << "Is bad? " << (point.is_bad ? "True" : "False") << " [";
-// for (auto [i, bin] : blt::enumerate(point.bins))
-// {
-// BLT_TRACE_STREAM << bin;
-// if (i != point.bins.size()-1)
-// BLT_TRACE_STREAM << ", ";
-// }
-// BLT_TRACE_STREAM << "]\n";
-// }
-// }
-
-
std::cout << "Hello World!" << std::endl;
}