hey lifetimes!!!
parent
1a070ab5f2
commit
d457abc92f
|
@ -27,7 +27,7 @@ macro(compile_options target_name)
|
||||||
sanitizers(${target_name})
|
sanitizers(${target_name})
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
project(blt-gp VERSION 0.3.25)
|
project(blt-gp VERSION 0.3.26)
|
||||||
|
|
||||||
include(CTest)
|
include(CTest)
|
||||||
|
|
||||||
|
|
|
@ -76,11 +76,11 @@ namespace blt::gp
|
||||||
|
|
||||||
enum class destroy_t
|
enum class destroy_t
|
||||||
{
|
{
|
||||||
ARGS,
|
PTR,
|
||||||
RETURN
|
RETURN
|
||||||
};
|
};
|
||||||
|
|
||||||
using destroy_func_t = std::function<void(destroy_t, stack_allocator&)>;
|
using destroy_func_t = std::function<void(destroy_t, u8*)>;
|
||||||
|
|
||||||
using const_op_iter_t = tracked_vector<op_container_t>::const_iterator;
|
using const_op_iter_t = tracked_vector<op_container_t>::const_iterator;
|
||||||
using op_iter_t = tracked_vector<op_container_t>::iterator;
|
using op_iter_t = tracked_vector<op_container_t>::iterator;
|
||||||
|
|
|
@ -254,18 +254,15 @@ namespace blt::gp
|
||||||
out << "[Printing Value on '" << (op.get_name() ? *op.get_name() : "") << "' Not Supported!]";
|
out << "[Printing Value on '" << (op.get_name() ? *op.get_name() : "") << "' Not Supported!]";
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
storage.destroy_funcs.push_back([](const detail::destroy_t type, stack_allocator& alloc)
|
storage.destroy_funcs.push_back([](const detail::destroy_t type, u8* data)
|
||||||
{
|
{
|
||||||
switch (type)
|
switch (type)
|
||||||
{
|
{
|
||||||
case detail::destroy_t::ARGS:
|
case detail::destroy_t::PTR:
|
||||||
// alloc.call_destructors<Args...>();
|
|
||||||
BLT_ERROR("Unimplemented");
|
|
||||||
break;
|
|
||||||
case detail::destroy_t::RETURN:
|
case detail::destroy_t::RETURN:
|
||||||
if constexpr (detail::has_func_drop_v<remove_cvref_t<Return>>)
|
if constexpr (detail::has_func_drop_v<remove_cvref_t<Return>>)
|
||||||
{
|
{
|
||||||
alloc.from<detail::remove_cv_ref<Return>>(0).drop();
|
reinterpret_cast<detail::remove_cv_ref<Return>*>(data)->drop();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,20 +218,20 @@ namespace blt::gp
|
||||||
return *reinterpret_cast<NO_REF*>(from(aligned_size<NO_REF>() + bytes));
|
return *reinterpret_cast<NO_REF*>(from(aligned_size<NO_REF>() + bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
[[nodiscard]] mem::pointer_storage<std::atomic_uint64_t>& access_pointer(const size_t bytes, const size_t type_size) const
|
[[nodiscard]] std::pair<u8*, mem::pointer_storage<std::atomic_uint64_t>&> access_pointer(const size_t bytes, const size_t type_size) const
|
||||||
{
|
{
|
||||||
const auto type_ref = from(bytes);
|
const auto type_ref = from(bytes);
|
||||||
return *std::launder(
|
return {type_ref, *std::launder(
|
||||||
reinterpret_cast<mem::pointer_storage<std::atomic_uint64_t>*>(type_ref + (type_size - detail::aligned_size(
|
reinterpret_cast<mem::pointer_storage<std::atomic_uint64_t>*>(type_ref + (type_size - detail::aligned_size(
|
||||||
sizeof(std::atomic_uint64_t*)))));
|
sizeof(std::atomic_uint64_t*)))))};
|
||||||
}
|
}
|
||||||
|
|
||||||
[[nodiscard]] mem::pointer_storage<std::atomic_uint64_t>& access_pointer_forward(const size_t bytes, const size_t type_size) const
|
[[nodiscard]] std::pair<u8*, mem::pointer_storage<std::atomic_uint64_t>&> access_pointer_forward(const size_t bytes, const size_t type_size) const
|
||||||
{
|
{
|
||||||
const auto type_ref = data_ + bytes;
|
const auto type_ref = data_ + bytes;
|
||||||
return *std::launder(
|
return {type_ref, *std::launder(
|
||||||
reinterpret_cast<mem::pointer_storage<std::atomic_uint64_t>*>(type_ref + (type_size - detail::aligned_size(
|
reinterpret_cast<mem::pointer_storage<std::atomic_uint64_t>*>(type_ref + (type_size - detail::aligned_size(
|
||||||
sizeof(std::atomic_uint64_t*)))));
|
sizeof(std::atomic_uint64_t*)))))};
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
|
|
|
@ -117,8 +117,10 @@ namespace blt::gp
|
||||||
class evaluation_ref
|
class evaluation_ref
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
explicit evaluation_ref(T& value, evaluation_context& context): m_value(&value), m_context(&context)
|
explicit evaluation_ref(const bool ephemeral, T& value, evaluation_context& context): m_value(&value), m_context(&context)
|
||||||
{
|
{
|
||||||
|
if (ephemeral)
|
||||||
|
m_value.bit(0, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
evaluation_ref(const evaluation_ref& copy) = delete;
|
evaluation_ref(const evaluation_ref& copy) = delete;
|
||||||
|
@ -157,20 +159,26 @@ namespace blt::gp
|
||||||
return *m_value;
|
return *m_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
T* operator->()
|
||||||
|
{
|
||||||
|
return m_value.get();
|
||||||
|
}
|
||||||
|
|
||||||
~evaluation_ref()
|
~evaluation_ref()
|
||||||
{
|
{
|
||||||
if constexpr (detail::has_func_drop_v<T>)
|
if constexpr (detail::has_func_drop_v<T>)
|
||||||
{
|
{
|
||||||
if (m_value != nullptr)
|
if (m_value.get() != nullptr)
|
||||||
{
|
{
|
||||||
m_value->drop();
|
if (!m_value.bit(0))
|
||||||
|
m_value->drop();
|
||||||
m_context->values.reset();
|
m_context->values.reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
T* m_value;
|
mem::pointer_storage<T> m_value;
|
||||||
evaluation_context* m_context;
|
evaluation_context* m_context;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -480,12 +488,8 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
auto& ctx = evaluate(context);
|
auto& ctx = evaluate(context);
|
||||||
auto val = ctx.values.template from<T>(0);
|
auto val = ctx.values.template from<T>(0);
|
||||||
if constexpr (detail::has_func_drop_v<T>)
|
evaluation_ref<T> ref{operations.front().get_flags().is_ephemeral(), val, ctx};
|
||||||
{
|
return ref.get();
|
||||||
ctx.values.template from<T>(0).drop();
|
|
||||||
}
|
|
||||||
ctx.values.reset();
|
|
||||||
return val;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -498,12 +502,8 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
auto& ctx = evaluate();
|
auto& ctx = evaluate();
|
||||||
auto val = ctx.values.from<T>(0);
|
auto val = ctx.values.from<T>(0);
|
||||||
if constexpr (detail::has_func_drop_v<T>)
|
evaluation_ref<T> ref{operations.front().get_flags().is_ephemeral(), val, ctx};
|
||||||
{
|
return ref.get();
|
||||||
ctx.values.from<T>(0).drop();
|
|
||||||
}
|
|
||||||
ctx.values.reset();
|
|
||||||
return val;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -515,7 +515,7 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
auto& ctx = evaluate(context);
|
auto& ctx = evaluate(context);
|
||||||
auto& val = ctx.values.template from<T>(0);
|
auto& val = ctx.values.template from<T>(0);
|
||||||
return evaluation_ref<T>{val, ctx};
|
return evaluation_ref<T>{operations.front().get_flags().is_ephemeral(), val, ctx};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -527,7 +527,7 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
auto& ctx = evaluate();
|
auto& ctx = evaluate();
|
||||||
auto& val = ctx.values.from<T>(0);
|
auto& val = ctx.values.from<T>(0);
|
||||||
return evaluation_ref<T>{val, ctx};
|
return evaluation_ref<T>{operations.front().get_flags().is_ephemeral(), val, ctx};
|
||||||
}
|
}
|
||||||
|
|
||||||
void print(std::ostream& out, bool print_literals = true, bool pretty_indent = false, bool include_types = false,
|
void print(std::ostream& out, bool print_literals = true, bool pretty_indent = false, bool include_types = false,
|
||||||
|
@ -616,14 +616,17 @@ namespace blt::gp
|
||||||
private:
|
private:
|
||||||
void handle_operator_inserted(const op_container_t& op);
|
void handle_operator_inserted(const op_container_t& op);
|
||||||
|
|
||||||
|
void handle_ptr_empty(const mem::pointer_storage<std::atomic_uint64_t>& ptr, u8* data, operator_id id) const;
|
||||||
|
|
||||||
template <typename Iter>
|
template <typename Iter>
|
||||||
void handle_refcount_decrement(const Iter iter, const size_t forward_bytes) const
|
void handle_refcount_decrement(const Iter iter, const size_t forward_bytes) const
|
||||||
{
|
{
|
||||||
if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop())
|
if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
// TODO
|
auto [val, ptr] = values.access_pointer_forward(forward_bytes, iter->type_size());
|
||||||
auto& ptr = values.access_pointer_forward(forward_bytes, iter->type_size());
|
|
||||||
--*ptr;
|
--*ptr;
|
||||||
|
if (*ptr == 0)
|
||||||
|
handle_ptr_empty(ptr, val, iter->id());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -632,9 +635,8 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop())
|
if (iter->get_flags().is_ephemeral() && iter->has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
// TODO
|
auto [_, ptr] = values.access_pointer_forward(forward_bytes, iter->type_size());
|
||||||
auto& ptr = values.access_pointer_forward(forward_bytes, iter->type_size());
|
++*ptr;
|
||||||
--*ptr;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
117
src/tree.cpp
117
src/tree.cpp
|
@ -283,7 +283,7 @@ namespace blt::gp
|
||||||
for_bytes += it.type_size();
|
for_bytes += it.type_size();
|
||||||
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
auto [_, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
||||||
++*ptr;
|
++*ptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -295,25 +295,25 @@ namespace blt::gp
|
||||||
|
|
||||||
void tree_t::swap_subtrees(const subtree_point_t our_subtree, tree_t& other_tree, const subtree_point_t other_subtree)
|
void tree_t::swap_subtrees(const subtree_point_t our_subtree, tree_t& other_tree, const subtree_point_t other_subtree)
|
||||||
{
|
{
|
||||||
const auto our_point_begin_itr = operations.begin() + our_subtree.pos;
|
const auto c1_subtree_begin_itr = operations.begin() + our_subtree.pos;
|
||||||
const auto our_point_end_itr = operations.begin() + find_endpoint(our_subtree.pos);
|
const auto c1_subtree_end_itr = operations.begin() + find_endpoint(our_subtree.pos);
|
||||||
|
|
||||||
const auto other_point_begin_itr = other_tree.operations.begin() + other_subtree.pos;
|
const auto c2_subtree_begin_itr = other_tree.operations.begin() + other_subtree.pos;
|
||||||
const auto other_point_end_itr = other_tree.operations.begin() + other_tree.find_endpoint(other_subtree.pos);
|
const auto c2_subtree_end_itr = other_tree.operations.begin() + other_tree.find_endpoint(other_subtree.pos);
|
||||||
|
|
||||||
thread_local tracked_vector<op_container_t> c1_operators;
|
thread_local tracked_vector<op_container_t> c1_subtree_operators;
|
||||||
thread_local tracked_vector<op_container_t> c2_operators;
|
thread_local tracked_vector<op_container_t> c2_subtree_operators;
|
||||||
c1_operators.clear();
|
c1_subtree_operators.clear();
|
||||||
c2_operators.clear();
|
c2_subtree_operators.clear();
|
||||||
|
|
||||||
c1_operators.reserve(std::distance(our_point_begin_itr, our_point_end_itr));
|
c1_subtree_operators.reserve(std::distance(c1_subtree_begin_itr, c1_subtree_end_itr));
|
||||||
c2_operators.reserve(std::distance(other_point_begin_itr, other_point_end_itr));
|
c2_subtree_operators.reserve(std::distance(c2_subtree_begin_itr, c2_subtree_end_itr));
|
||||||
|
|
||||||
// i don't think this is required for swapping values, since the total number of additions is net zero
|
// i don't think this is required for swapping values, since the total number of additions is net zero
|
||||||
// the tree isn't destroyed at any point.
|
// the tree isn't destroyed at any point.
|
||||||
|
|
||||||
size_t for_our_bytes = 0;
|
size_t c1_subtree_bytes = 0;
|
||||||
for (const auto& it : iterate(our_point_begin_itr, our_point_end_itr))
|
for (const auto& it : iterate(c1_subtree_begin_itr, c1_subtree_end_itr))
|
||||||
{
|
{
|
||||||
if (it.is_value())
|
if (it.is_value())
|
||||||
{
|
{
|
||||||
|
@ -322,13 +322,13 @@ namespace blt::gp
|
||||||
// auto& ptr = values.access_pointer_forward(for_our_bytes, it.type_size());
|
// auto& ptr = values.access_pointer_forward(for_our_bytes, it.type_size());
|
||||||
// ++*ptr;
|
// ++*ptr;
|
||||||
// }
|
// }
|
||||||
for_our_bytes += it.type_size();
|
c1_subtree_bytes += it.type_size();
|
||||||
}
|
}
|
||||||
c1_operators.emplace_back(it);
|
c1_subtree_operators.push_back(it);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t for_other_bytes = 0;
|
size_t c2_subtree_bytes = 0;
|
||||||
for (const auto& it : iterate(other_point_begin_itr, other_point_end_itr))
|
for (const auto& it : iterate(c2_subtree_begin_itr, c2_subtree_end_itr))
|
||||||
{
|
{
|
||||||
if (it.is_value())
|
if (it.is_value())
|
||||||
{
|
{
|
||||||
|
@ -337,20 +337,20 @@ namespace blt::gp
|
||||||
// auto& ptr = values.access_pointer_forward(for_other_bytes, it.type_size());
|
// auto& ptr = values.access_pointer_forward(for_other_bytes, it.type_size());
|
||||||
// ++*ptr;
|
// ++*ptr;
|
||||||
// }
|
// }
|
||||||
for_other_bytes += it.type_size();
|
c2_subtree_bytes += it.type_size();
|
||||||
}
|
}
|
||||||
c2_operators.emplace_back(it);
|
c2_subtree_operators.push_back(it);
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t c1_stack_after_bytes = accumulate_type_sizes(our_point_end_itr, operations.end());
|
const size_t c1_stack_after_bytes = accumulate_type_sizes(c1_subtree_end_itr, operations.end());
|
||||||
const size_t c2_stack_after_bytes = accumulate_type_sizes(other_point_end_itr, other_tree.operations.end());
|
const size_t c2_stack_after_bytes = accumulate_type_sizes(c2_subtree_end_itr, other_tree.operations.end());
|
||||||
const auto c1_total = static_cast<ptrdiff_t>(c1_stack_after_bytes + for_our_bytes);
|
const auto c1_total = static_cast<ptrdiff_t>(c1_stack_after_bytes + c1_subtree_bytes);
|
||||||
const auto c2_total = static_cast<ptrdiff_t>(c2_stack_after_bytes + for_other_bytes);
|
const auto c2_total = static_cast<ptrdiff_t>(c2_stack_after_bytes + c2_subtree_bytes);
|
||||||
const auto copy_ptr_c1 = get_thread_pointer_for_size<struct c1_t>(c1_total);
|
const auto copy_ptr_c1 = get_thread_pointer_for_size<struct c1_t>(c1_total);
|
||||||
const auto copy_ptr_c2 = get_thread_pointer_for_size<struct c2_t>(c2_total);
|
const auto copy_ptr_c2 = get_thread_pointer_for_size<struct c2_t>(c2_total);
|
||||||
|
|
||||||
values.reserve(values.bytes_in_head() - for_our_bytes + for_other_bytes);
|
values.reserve(values.bytes_in_head() - c1_subtree_bytes + c2_subtree_bytes);
|
||||||
other_tree.values.reserve(other_tree.values.bytes_in_head() - for_other_bytes + for_our_bytes);
|
other_tree.values.reserve(other_tree.values.bytes_in_head() - c2_subtree_bytes + c1_subtree_bytes);
|
||||||
|
|
||||||
values.copy_to(copy_ptr_c1, c1_total);
|
values.copy_to(copy_ptr_c1, c1_total);
|
||||||
values.pop_bytes(c1_total);
|
values.pop_bytes(c1_total);
|
||||||
|
@ -358,22 +358,22 @@ namespace blt::gp
|
||||||
other_tree.values.copy_to(copy_ptr_c2, c2_total);
|
other_tree.values.copy_to(copy_ptr_c2, c2_total);
|
||||||
other_tree.values.pop_bytes(c2_total);
|
other_tree.values.pop_bytes(c2_total);
|
||||||
|
|
||||||
other_tree.values.copy_from(copy_ptr_c1, for_our_bytes);
|
other_tree.values.copy_from(copy_ptr_c1, c1_subtree_bytes);
|
||||||
other_tree.values.copy_from(copy_ptr_c2 + for_other_bytes, c2_stack_after_bytes);
|
other_tree.values.copy_from(copy_ptr_c2 + c2_subtree_bytes, c2_stack_after_bytes);
|
||||||
|
|
||||||
values.copy_from(copy_ptr_c2, for_other_bytes);
|
values.copy_from(copy_ptr_c2, c2_subtree_bytes);
|
||||||
values.copy_from(copy_ptr_c1 + for_our_bytes, c1_stack_after_bytes);
|
values.copy_from(copy_ptr_c1 + c1_subtree_bytes, c1_stack_after_bytes);
|
||||||
|
|
||||||
// now swap the operators
|
// now swap the operators
|
||||||
auto insert_point_c1 = our_point_begin_itr - 1;
|
auto insert_point_c1 = c1_subtree_begin_itr - 1;
|
||||||
auto insert_point_c2 = other_point_begin_itr - 1;
|
auto insert_point_c2 = c2_subtree_begin_itr - 1;
|
||||||
|
|
||||||
// invalidates [begin, end()) so the insert points should be fine
|
// invalidates [begin, end()) so the insert points should be fine
|
||||||
operations.erase(our_point_begin_itr, our_point_end_itr);
|
operations.erase(c1_subtree_begin_itr, c1_subtree_end_itr);
|
||||||
other_tree.operations.erase(other_point_begin_itr, other_point_end_itr);
|
other_tree.operations.erase(c2_subtree_begin_itr, c2_subtree_end_itr);
|
||||||
|
|
||||||
operations.insert(++insert_point_c1, c2_operators.begin(), c2_operators.end());
|
operations.insert(++insert_point_c1, c2_subtree_operators.begin(), c2_subtree_operators.end());
|
||||||
other_tree.operations.insert(++insert_point_c2, c1_operators.begin(), c1_operators.end());
|
other_tree.operations.insert(++insert_point_c2, c1_subtree_operators.begin(), c1_subtree_operators.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
void tree_t::replace_subtree(const subtree_point_t point, const ptrdiff_t extent, tree_t& other_tree)
|
void tree_t::replace_subtree(const subtree_point_t point, const ptrdiff_t extent, tree_t& other_tree)
|
||||||
|
@ -391,12 +391,10 @@ namespace blt::gp
|
||||||
for_bytes += it.type_size();
|
for_bytes += it.type_size();
|
||||||
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
auto [val, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
||||||
--*ptr;
|
--*ptr;
|
||||||
if (*ptr == 0)
|
if (*ptr == 0)
|
||||||
{
|
handle_ptr_empty(ptr, val, it.id());
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -413,7 +411,7 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
if (v.get_flags().is_ephemeral() && v.has_ephemeral_drop())
|
if (v.get_flags().is_ephemeral() && v.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& pointer = other_tree.values.access_pointer_forward(copy_bytes, v.type_size());
|
auto [_, pointer] = other_tree.values.access_pointer_forward(copy_bytes, v.type_size());
|
||||||
++*pointer;
|
++*pointer;
|
||||||
}
|
}
|
||||||
copy_bytes += v.type_size();
|
copy_bytes += v.type_size();
|
||||||
|
@ -440,12 +438,10 @@ namespace blt::gp
|
||||||
for_bytes += it.type_size();
|
for_bytes += it.type_size();
|
||||||
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& ptr = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
auto [val, ptr] = values.access_pointer(for_bytes + after_bytes, it.type_size());
|
||||||
--*ptr;
|
--*ptr;
|
||||||
if (*ptr == 0)
|
if (*ptr == 0)
|
||||||
{
|
handle_ptr_empty(ptr, val, it.id());
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -471,7 +467,7 @@ namespace blt::gp
|
||||||
bytes += it.type_size();
|
bytes += it.type_size();
|
||||||
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
if (it.get_flags().is_ephemeral() && it.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& ptr = other_tree.values.access_pointer(bytes, it.type_size());
|
auto [_, ptr] = other_tree.values.access_pointer(bytes, it.type_size());
|
||||||
++*ptr;
|
++*ptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -510,13 +506,20 @@ namespace blt::gp
|
||||||
m_program->get_operator_info(op.id()).func(nullptr, values, values);
|
m_program->get_operator_info(op.id()).func(nullptr, values, values);
|
||||||
if (m_program->operator_has_ephemeral_drop(op.id()))
|
if (m_program->operator_has_ephemeral_drop(op.id()))
|
||||||
{
|
{
|
||||||
auto& ptr = values.access_pointer(op.type_size(), op.type_size());
|
auto [_, ptr] = values.access_pointer(op.type_size(), op.type_size());
|
||||||
ptr = new std::atomic_uint64_t(1);
|
ptr = new std::atomic_uint64_t(1);
|
||||||
ptr.bit(0, true);
|
ptr.bit(0, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void tree_t::handle_ptr_empty(const mem::pointer_storage<std::atomic_uint64_t>& ptr, u8* data, const operator_id id) const
|
||||||
|
{
|
||||||
|
m_program->get_destroy_func(id)(detail::destroy_t::RETURN, data);
|
||||||
|
delete ptr.get();
|
||||||
|
// BLT_INFO("Deleting pointer!");
|
||||||
|
}
|
||||||
|
|
||||||
evaluation_context& tree_t::evaluate(void* ptr) const
|
evaluation_context& tree_t::evaluate(void* ptr) const
|
||||||
{
|
{
|
||||||
return m_program->get_eval_func()(*this, ptr);
|
return m_program->get_eval_func()(*this, ptr);
|
||||||
|
@ -573,7 +576,7 @@ namespace blt::gp
|
||||||
const auto v1 = results.values.bytes_in_head();
|
const auto v1 = results.values.bytes_in_head();
|
||||||
const auto v2 = static_cast<ptrdiff_t>(operations.front().type_size());
|
const auto v2 = static_cast<ptrdiff_t>(operations.front().type_size());
|
||||||
|
|
||||||
m_program->get_destroy_func(operations.front().id())(detail::destroy_t::RETURN, results.values);
|
m_program->get_destroy_func(operations.front().id())(detail::destroy_t::RETURN, results.values.from(operations.front().type_size()));
|
||||||
if (v1 != v2)
|
if (v1 != v2)
|
||||||
{
|
{
|
||||||
const auto vd = std::abs(v1 - v2);
|
const auto vd = std::abs(v1 - v2);
|
||||||
|
@ -596,10 +599,11 @@ namespace blt::gp
|
||||||
|
|
||||||
void tree_t::find_child_extends(tracked_vector<child_t>& vec, const size_t parent_node, const size_t argc) const
|
void tree_t::find_child_extends(tracked_vector<child_t>& vec, const size_t parent_node, const size_t argc) const
|
||||||
{
|
{
|
||||||
|
BLT_ASSERT_MSG(vec.empty(), "Vector to find_child_extends should be empty!");
|
||||||
while (vec.size() < argc)
|
while (vec.size() < argc)
|
||||||
{
|
{
|
||||||
const auto current_point = vec.size();
|
const auto current_point = vec.size();
|
||||||
child_t prev{};
|
child_t prev; // NOLINT
|
||||||
if (current_point == 0)
|
if (current_point == 0)
|
||||||
{
|
{
|
||||||
// first child.
|
// first child.
|
||||||
|
@ -628,15 +632,10 @@ namespace blt::gp
|
||||||
{
|
{
|
||||||
if (op.get_flags().is_ephemeral() && op.has_ephemeral_drop())
|
if (op.get_flags().is_ephemeral() && op.has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
auto& ptr = values.access_pointer_forward(total_bytes, op.type_size());
|
auto [val, ptr] = values.access_pointer_forward(total_bytes, op.type_size());
|
||||||
--*ptr;
|
--*ptr;
|
||||||
// TODO
|
if (*ptr == 0)
|
||||||
// BLT_TRACE(ptr->load());
|
handle_ptr_empty(ptr, val, op.id());
|
||||||
// if (*ptr == 0)
|
|
||||||
// {
|
|
||||||
// BLT_TRACE("Deleting pointers!");
|
|
||||||
// delete ptr.get();
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
total_bytes += op.type_size();
|
total_bytes += op.type_size();
|
||||||
}
|
}
|
||||||
|
@ -671,12 +670,10 @@ namespace blt::gp
|
||||||
move_data.move(after_bytes);
|
move_data.move(after_bytes);
|
||||||
if (operations[point].get_flags().is_ephemeral() && operations[point].has_ephemeral_drop())
|
if (operations[point].get_flags().is_ephemeral() && operations[point].has_ephemeral_drop())
|
||||||
{
|
{
|
||||||
const auto& ptr = values.access_pointer(operations[point].type_size(), operations[point].type_size());
|
auto [val, ptr] = values.access_pointer(operations[point].type_size(), operations[point].type_size());
|
||||||
--*ptr;
|
--*ptr;
|
||||||
if (*ptr == 0)
|
if (*ptr == 0)
|
||||||
{
|
handle_ptr_empty(ptr, val, operations[point].id());
|
||||||
// TODO:
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
values.pop_bytes(operations[point].type_size());
|
values.pop_bytes(operations[point].type_size());
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,8 +50,10 @@ struct drop_type
|
||||||
void drop() const
|
void drop() const
|
||||||
{
|
{
|
||||||
if (ephemeral)
|
if (ephemeral)
|
||||||
|
{
|
||||||
|
std::cout << ("Ephemeral drop") << std::endl;
|
||||||
++ephemeral_drop;
|
++ephemeral_drop;
|
||||||
else
|
}else
|
||||||
++normal_drop;
|
++normal_drop;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +75,7 @@ prog_config_t config = prog_config_t()
|
||||||
.set_elite_count(2)
|
.set_elite_count(2)
|
||||||
.set_crossover_chance(0.8)
|
.set_crossover_chance(0.8)
|
||||||
.set_mutation_chance(0.0)
|
.set_mutation_chance(0.0)
|
||||||
.set_reproduction_chance(0.0)
|
.set_reproduction_chance(0.1)
|
||||||
.set_max_generations(50)
|
.set_max_generations(50)
|
||||||
.set_pop_size(50)
|
.set_pop_size(50)
|
||||||
.set_thread_count(1);
|
.set_thread_count(1);
|
||||||
|
@ -132,6 +134,7 @@ int main()
|
||||||
program.generate_population(program.get_typesystem().get_type<drop_type>().id(), fitness_function, sel, sel, sel);
|
program.generate_population(program.get_typesystem().get_type<drop_type>().id(), fitness_function, sel, sel, sel);
|
||||||
while (!program.should_terminate())
|
while (!program.should_terminate())
|
||||||
{
|
{
|
||||||
|
BLT_TRACE("---------------{Begin Generation %lu}---------------", program.get_current_generation());
|
||||||
BLT_TRACE("Creating next generation");
|
BLT_TRACE("Creating next generation");
|
||||||
program.create_next_generation();
|
program.create_next_generation();
|
||||||
BLT_TRACE("Move to next generation");
|
BLT_TRACE("Move to next generation");
|
||||||
|
@ -142,8 +145,13 @@ int main()
|
||||||
|
|
||||||
// program.get_best_individuals<1>()[0].get().tree.print(program, std::cout, true, true);
|
// program.get_best_individuals<1>()[0].get().tree.print(program, std::cout, true, true);
|
||||||
|
|
||||||
|
regression.get_program().get_current_pop().clear();
|
||||||
|
regression.get_program().next_generation();
|
||||||
|
regression.get_program().get_current_pop().clear();
|
||||||
|
|
||||||
BLT_TRACE("Created %ld times", normal_construct.load());
|
BLT_TRACE("Created %ld times", normal_construct.load());
|
||||||
BLT_TRACE("Dropped %ld times", normal_drop.load());
|
BLT_TRACE("Dropped %ld times", normal_drop.load());
|
||||||
BLT_TRACE("Ephemeral created %ld times", ephemeral_construct.load());
|
BLT_TRACE("Ephemeral created %ld times", ephemeral_construct.load());
|
||||||
BLT_TRACE("Ephemeral dropped %ld times", ephemeral_drop.load());
|
BLT_TRACE("Ephemeral dropped %ld times", ephemeral_drop.load());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue