diff --git a/CMakeLists.txt b/CMakeLists.txt index b239ee6..48fa62f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -27,7 +27,7 @@ macro(compile_options target_name) sanitizers(${target_name}) endmacro() -project(blt-gp VERSION 0.3.25) +project(blt-gp VERSION 0.3.26) include(CTest) diff --git a/include/blt/gp/fwdecl.h b/include/blt/gp/fwdecl.h index 313bf40..e13bec7 100644 --- a/include/blt/gp/fwdecl.h +++ b/include/blt/gp/fwdecl.h @@ -76,11 +76,11 @@ namespace blt::gp enum class destroy_t { - ARGS, + PTR, RETURN }; - using destroy_func_t = std::function; + using destroy_func_t = std::function; using const_op_iter_t = tracked_vector::const_iterator; using op_iter_t = tracked_vector::iterator; diff --git a/include/blt/gp/program.h b/include/blt/gp/program.h index a574d62..0e375fa 100644 --- a/include/blt/gp/program.h +++ b/include/blt/gp/program.h @@ -254,18 +254,15 @@ namespace blt::gp out << "[Printing Value on '" << (op.get_name() ? *op.get_name() : "") << "' Not Supported!]"; } }); - storage.destroy_funcs.push_back([](const detail::destroy_t type, stack_allocator& alloc) + storage.destroy_funcs.push_back([](const detail::destroy_t type, u8* data) { switch (type) { - case detail::destroy_t::ARGS: - // alloc.call_destructors(); - BLT_ERROR("Unimplemented"); - break; + case detail::destroy_t::PTR: case detail::destroy_t::RETURN: if constexpr (detail::has_func_drop_v>) { - alloc.from>(0).drop(); + reinterpret_cast*>(data)->drop(); } break; } diff --git a/include/blt/gp/stack.h b/include/blt/gp/stack.h index 8ebc32f..ef046cc 100644 --- a/include/blt/gp/stack.h +++ b/include/blt/gp/stack.h @@ -218,20 +218,20 @@ namespace blt::gp return *reinterpret_cast(from(aligned_size() + bytes)); } - [[nodiscard]] mem::pointer_storage& access_pointer(const size_t bytes, const size_t type_size) const + [[nodiscard]] std::pair&> access_pointer(const size_t bytes, const size_t type_size) const { const auto type_ref = from(bytes); - return *std::launder( + return {type_ref, *std::launder( reinterpret_cast*>(type_ref + (type_size - detail::aligned_size( - sizeof(std::atomic_uint64_t*))))); + sizeof(std::atomic_uint64_t*)))))}; } - [[nodiscard]] mem::pointer_storage& access_pointer_forward(const size_t bytes, const size_t type_size) const + [[nodiscard]] std::pair&> access_pointer_forward(const size_t bytes, const size_t type_size) const { const auto type_ref = data_ + bytes; - return *std::launder( + return {type_ref, *std::launder( reinterpret_cast*>(type_ref + (type_size - detail::aligned_size( - sizeof(std::atomic_uint64_t*))))); + sizeof(std::atomic_uint64_t*)))))}; } template diff --git a/include/blt/gp/tree.h b/include/blt/gp/tree.h index fca0a56..b82c786 100644 --- a/include/blt/gp/tree.h +++ b/include/blt/gp/tree.h @@ -117,8 +117,10 @@ namespace blt::gp class evaluation_ref { public: - explicit evaluation_ref(T& value, evaluation_context& context): m_value(&value), m_context(&context) + explicit evaluation_ref(const bool ephemeral, T& value, evaluation_context& context): m_value(&value), m_context(&context) { + if (ephemeral) + m_value.bit(0, true); } evaluation_ref(const evaluation_ref& copy) = delete; @@ -157,20 +159,26 @@ namespace blt::gp return *m_value; } + T* operator->() + { + return m_value.get(); + } + ~evaluation_ref() { if constexpr (detail::has_func_drop_v) { - if (m_value != nullptr) + if (m_value.get() != nullptr) { - m_value->drop(); + if (!m_value.bit(0)) + m_value->drop(); m_context->values.reset(); } } } private: - T* m_value; + mem::pointer_storage m_value; evaluation_context* m_context; }; @@ -480,12 +488,8 @@ namespace blt::gp { auto& ctx = evaluate(context); auto val = ctx.values.template from(0); - if constexpr (detail::has_func_drop_v) - { - ctx.values.template from(0).drop(); - } - ctx.values.reset(); - return val; + evaluation_ref ref{operations.front().get_flags().is_ephemeral(), val, ctx}; + return ref.get(); } /** @@ -498,12 +502,8 @@ namespace blt::gp { auto& ctx = evaluate(); auto val = ctx.values.from(0); - if constexpr (detail::has_func_drop_v) - { - ctx.values.from(0).drop(); - } - ctx.values.reset(); - return val; + evaluation_ref ref{operations.front().get_flags().is_ephemeral(), val, ctx}; + return ref.get(); } /** @@ -515,7 +515,7 @@ namespace blt::gp { auto& ctx = evaluate(context); auto& val = ctx.values.template from(0); - return evaluation_ref{val, ctx}; + return evaluation_ref{operations.front().get_flags().is_ephemeral(), val, ctx}; } /** @@ -527,7 +527,7 @@ namespace blt::gp { auto& ctx = evaluate(); auto& val = ctx.values.from(0); - return evaluation_ref{val, ctx}; + return evaluation_ref{operations.front().get_flags().is_ephemeral(), val, ctx}; } void print(std::ostream& out, bool print_literals = true, bool pretty_indent = false, bool include_types = false, @@ -616,14 +616,17 @@ namespace blt::gp private: void handle_operator_inserted(const op_container_t& op); + void handle_ptr_empty(const mem::pointer_storage& ptr, u8* data, operator_id id) const; + template void handle_refcount_decrement(const Iter iter, const size_t forward_bytes) const { if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop()) { - // TODO - auto& ptr = values.access_pointer_forward(forward_bytes, iter->type_size()); + auto [val, ptr] = values.access_pointer_forward(forward_bytes, iter->type_size()); --*ptr; + if (*ptr == 0) + handle_ptr_empty(ptr, val, iter->id()); } } @@ -632,9 +635,8 @@ namespace blt::gp { if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop()) { - // TODO - auto& ptr = values.access_pointer_forward(forward_bytes, iter->type_size()); - --*ptr; + auto [_, ptr] = values.access_pointer_forward(forward_bytes, iter->type_size()); + ++*ptr; } } diff --git a/src/tree.cpp b/src/tree.cpp index de22fc0..be8ea89 100644 --- a/src/tree.cpp +++ b/src/tree.cpp @@ -283,7 +283,7 @@ namespace blt::gp for_bytes += it.type_size(); if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop()) { - auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size()); + auto [_, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size()); ++*ptr; } } @@ -295,25 +295,25 @@ namespace blt::gp void tree_t::swap_subtrees(const subtree_point_t our_subtree, tree_t& other_tree, const subtree_point_t other_subtree) { - const auto our_point_begin_itr = operations.begin() + our_subtree.pos; - const auto our_point_end_itr = operations.begin() + find_endpoint(our_subtree.pos); + const auto c1_subtree_begin_itr = operations.begin() + our_subtree.pos; + const auto c1_subtree_end_itr = operations.begin() + find_endpoint(our_subtree.pos); - const auto other_point_begin_itr = other_tree.operations.begin() + other_subtree.pos; - const auto other_point_end_itr = other_tree.operations.begin() + other_tree.find_endpoint(other_subtree.pos); + const auto c2_subtree_begin_itr = other_tree.operations.begin() + other_subtree.pos; + const auto c2_subtree_end_itr = other_tree.operations.begin() + other_tree.find_endpoint(other_subtree.pos); - thread_local tracked_vector c1_operators; - thread_local tracked_vector c2_operators; - c1_operators.clear(); - c2_operators.clear(); + thread_local tracked_vector c1_subtree_operators; + thread_local tracked_vector c2_subtree_operators; + c1_subtree_operators.clear(); + c2_subtree_operators.clear(); - c1_operators.reserve(std::distance(our_point_begin_itr, our_point_end_itr)); - c2_operators.reserve(std::distance(other_point_begin_itr, other_point_end_itr)); + c1_subtree_operators.reserve(std::distance(c1_subtree_begin_itr, c1_subtree_end_itr)); + c2_subtree_operators.reserve(std::distance(c2_subtree_begin_itr, c2_subtree_end_itr)); // i don't think this is required for swapping values, since the total number of additions is net zero // the tree isn't destroyed at any point. - size_t for_our_bytes = 0; - for (const auto& it : iterate(our_point_begin_itr, our_point_end_itr)) + size_t c1_subtree_bytes = 0; + for (const auto& it : iterate(c1_subtree_begin_itr, c1_subtree_end_itr)) { if (it.is_value()) { @@ -322,13 +322,13 @@ namespace blt::gp // auto& ptr = values.access_pointer_forward(for_our_bytes, it.type_size()); // ++*ptr; // } - for_our_bytes += it.type_size(); + c1_subtree_bytes += it.type_size(); } - c1_operators.emplace_back(it); + c1_subtree_operators.push_back(it); } - size_t for_other_bytes = 0; - for (const auto& it : iterate(other_point_begin_itr, other_point_end_itr)) + size_t c2_subtree_bytes = 0; + for (const auto& it : iterate(c2_subtree_begin_itr, c2_subtree_end_itr)) { if (it.is_value()) { @@ -337,20 +337,20 @@ namespace blt::gp // auto& ptr = values.access_pointer_forward(for_other_bytes, it.type_size()); // ++*ptr; // } - for_other_bytes += it.type_size(); + c2_subtree_bytes += it.type_size(); } - c2_operators.emplace_back(it); + c2_subtree_operators.push_back(it); } - const size_t c1_stack_after_bytes = accumulate_type_sizes(our_point_end_itr, operations.end()); - const size_t c2_stack_after_bytes = accumulate_type_sizes(other_point_end_itr, other_tree.operations.end()); - const auto c1_total = static_cast(c1_stack_after_bytes + for_our_bytes); - const auto c2_total = static_cast(c2_stack_after_bytes + for_other_bytes); + const size_t c1_stack_after_bytes = accumulate_type_sizes(c1_subtree_end_itr, operations.end()); + const size_t c2_stack_after_bytes = accumulate_type_sizes(c2_subtree_end_itr, other_tree.operations.end()); + const auto c1_total = static_cast(c1_stack_after_bytes + c1_subtree_bytes); + const auto c2_total = static_cast(c2_stack_after_bytes + c2_subtree_bytes); const auto copy_ptr_c1 = get_thread_pointer_for_size(c1_total); const auto copy_ptr_c2 = get_thread_pointer_for_size(c2_total); - values.reserve(values.bytes_in_head() - for_our_bytes + for_other_bytes); - other_tree.values.reserve(other_tree.values.bytes_in_head() - for_other_bytes + for_our_bytes); + values.reserve(values.bytes_in_head() - c1_subtree_bytes + c2_subtree_bytes); + other_tree.values.reserve(other_tree.values.bytes_in_head() - c2_subtree_bytes + c1_subtree_bytes); values.copy_to(copy_ptr_c1, c1_total); values.pop_bytes(c1_total); @@ -358,22 +358,22 @@ namespace blt::gp other_tree.values.copy_to(copy_ptr_c2, c2_total); other_tree.values.pop_bytes(c2_total); - other_tree.values.copy_from(copy_ptr_c1, for_our_bytes); - other_tree.values.copy_from(copy_ptr_c2 + for_other_bytes, c2_stack_after_bytes); + other_tree.values.copy_from(copy_ptr_c1, c1_subtree_bytes); + other_tree.values.copy_from(copy_ptr_c2 + c2_subtree_bytes, c2_stack_after_bytes); - values.copy_from(copy_ptr_c2, for_other_bytes); - values.copy_from(copy_ptr_c1 + for_our_bytes, c1_stack_after_bytes); + values.copy_from(copy_ptr_c2, c2_subtree_bytes); + values.copy_from(copy_ptr_c1 + c1_subtree_bytes, c1_stack_after_bytes); // now swap the operators - auto insert_point_c1 = our_point_begin_itr - 1; - auto insert_point_c2 = other_point_begin_itr - 1; + auto insert_point_c1 = c1_subtree_begin_itr - 1; + auto insert_point_c2 = c2_subtree_begin_itr - 1; // invalidates [begin, end()) so the insert points should be fine - operations.erase(our_point_begin_itr, our_point_end_itr); - other_tree.operations.erase(other_point_begin_itr, other_point_end_itr); + operations.erase(c1_subtree_begin_itr, c1_subtree_end_itr); + other_tree.operations.erase(c2_subtree_begin_itr, c2_subtree_end_itr); - operations.insert(++insert_point_c1, c2_operators.begin(), c2_operators.end()); - other_tree.operations.insert(++insert_point_c2, c1_operators.begin(), c1_operators.end()); + operations.insert(++insert_point_c1, c2_subtree_operators.begin(), c2_subtree_operators.end()); + other_tree.operations.insert(++insert_point_c2, c1_subtree_operators.begin(), c1_subtree_operators.end()); } void tree_t::replace_subtree(const subtree_point_t point, const ptrdiff_t extent, tree_t& other_tree) @@ -391,12 +391,10 @@ namespace blt::gp for_bytes += it.type_size(); if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop()) { - auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size()); + auto [val, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size()); --*ptr; if (*ptr == 0) - { - // TODO - } + handle_ptr_empty(ptr, val, it.id()); } } } @@ -413,7 +411,7 @@ namespace blt::gp { if (v.get_flags().is_ephemeral() && v.has_ephemeral_drop()) { - auto& pointer = other_tree.values.access_pointer_forward(copy_bytes, v.type_size()); + auto [_, pointer] = other_tree.values.access_pointer_forward(copy_bytes, v.type_size()); ++*pointer; } copy_bytes += v.type_size(); @@ -440,12 +438,10 @@ namespace blt::gp for_bytes += it.type_size(); if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop()) { - auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size()); + auto [val, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size()); --*ptr; if (*ptr == 0) - { - // TODO - } + handle_ptr_empty(ptr, val, it.id()); } } } @@ -471,7 +467,7 @@ namespace blt::gp bytes += it.type_size(); if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop()) { - auto& ptr = other_tree.values.access_pointer(bytes, it.type_size()); + auto [_, ptr] = other_tree.values.access_pointer(bytes, it.type_size()); ++*ptr; } } @@ -510,13 +506,20 @@ namespace blt::gp m_program->get_operator_info(op.id()).func(nullptr, values, values); if (m_program->operator_has_ephemeral_drop(op.id())) { - auto& ptr = values.access_pointer(op.type_size(), op.type_size()); + auto [_, ptr] = values.access_pointer(op.type_size(), op.type_size()); ptr = new std::atomic_uint64_t(1); ptr.bit(0, true); } } } + void tree_t::handle_ptr_empty(const mem::pointer_storage& ptr, u8* data, const operator_id id) const + { + m_program->get_destroy_func(id)(detail::destroy_t::RETURN, data); + delete ptr.get(); + // BLT_INFO("Deleting pointer!"); + } + evaluation_context& tree_t::evaluate(void* ptr) const { return m_program->get_eval_func()(*this, ptr); @@ -573,7 +576,7 @@ namespace blt::gp const auto v1 = results.values.bytes_in_head(); const auto v2 = static_cast(operations.front().type_size()); - m_program->get_destroy_func(operations.front().id())(detail::destroy_t::RETURN, results.values); + m_program->get_destroy_func(operations.front().id())(detail::destroy_t::RETURN, results.values.from(operations.front().type_size())); if (v1 != v2) { const auto vd = std::abs(v1 - v2); @@ -596,10 +599,11 @@ namespace blt::gp void tree_t::find_child_extends(tracked_vector& vec, const size_t parent_node, const size_t argc) const { + BLT_ASSERT_MSG(vec.empty(), "Vector to find_child_extends should be empty!"); while (vec.size() < argc) { const auto current_point = vec.size(); - child_t prev{}; + child_t prev; // NOLINT if (current_point == 0) { // first child. @@ -628,15 +632,10 @@ namespace blt::gp { if (op.get_flags().is_ephemeral() && op.has_ephemeral_drop()) { - auto& ptr = values.access_pointer_forward(total_bytes, op.type_size()); + auto [val, ptr] = values.access_pointer_forward(total_bytes, op.type_size()); --*ptr; - // TODO - // BLT_TRACE(ptr->load()); - // if (*ptr == 0) - // { - // BLT_TRACE("Deleting pointers!"); - // delete ptr.get(); - // } + if (*ptr == 0) + handle_ptr_empty(ptr, val, op.id()); } total_bytes += op.type_size(); } @@ -671,12 +670,10 @@ namespace blt::gp move_data.move(after_bytes); if (operations[point].get_flags().is_ephemeral() && operations[point].has_ephemeral_drop()) { - const auto& ptr = values.access_pointer(operations[point].type_size(), operations[point].type_size()); + auto [val, ptr] = values.access_pointer(operations[point].type_size(), operations[point].type_size()); --*ptr; if (*ptr == 0) - { - // TODO: - } + handle_ptr_empty(ptr, val, operations[point].id()); } values.pop_bytes(operations[point].type_size()); } diff --git a/tests/drop_test.cpp b/tests/drop_test.cpp index 0bfce8c..7d8fccb 100644 --- a/tests/drop_test.cpp +++ b/tests/drop_test.cpp @@ -50,8 +50,10 @@ struct drop_type void drop() const { if (ephemeral) + { + std::cout << ("Ephemeral drop") << std::endl; ++ephemeral_drop; - else + }else ++normal_drop; } @@ -73,7 +75,7 @@ prog_config_t config = prog_config_t() .set_elite_count(2) .set_crossover_chance(0.8) .set_mutation_chance(0.0) - .set_reproduction_chance(0.0) + .set_reproduction_chance(0.1) .set_max_generations(50) .set_pop_size(50) .set_thread_count(1); @@ -132,6 +134,7 @@ int main() program.generate_population(program.get_typesystem().get_type().id(), fitness_function, sel, sel, sel); while (!program.should_terminate()) { + BLT_TRACE("---------------{Begin Generation %lu}---------------", program.get_current_generation()); BLT_TRACE("Creating next generation"); program.create_next_generation(); BLT_TRACE("Move to next generation"); @@ -142,8 +145,13 @@ int main() // program.get_best_individuals<1>()[0].get().tree.print(program, std::cout, true, true); + regression.get_program().get_current_pop().clear(); + regression.get_program().next_generation(); + regression.get_program().get_current_pop().clear(); + BLT_TRACE("Created %ld times", normal_construct.load()); BLT_TRACE("Dropped %ld times", normal_drop.load()); BLT_TRACE("Ephemeral created %ld times", ephemeral_construct.load()); BLT_TRACE("Ephemeral dropped %ld times", ephemeral_drop.load()); + }