Skip to content

Commit

Permalink
tmp: bug fixes for namespaces and construct_at when iterator empty
Browse files Browse the repository at this point in the history
  • Loading branch information
Yan-Tong Lin committed Jul 18, 2024
1 parent 9036968 commit 0c139d2
Show file tree
Hide file tree
Showing 2 changed files with 214 additions and 152 deletions.
144 changes: 100 additions & 44 deletions include/fixed_containers/struct_view.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
#include <type_traits>
#include <utility>

#include <iostream>

/**
* Terminologies
*
Expand Down Expand Up @@ -174,9 +176,13 @@ constexpr void for_each_path_dfs_helper(S&& instance,
fixed_containers::in_out<PathNameChain> chain)
{
std::forward<PreFunction>(pre_fn)(std::as_const(*chain), std::forward<S>(instance));
// into data
// recurse into an element instance, construct a dummy instance at `begin()` if the range is empty
chain->push_back(ITERABLE_PATH_NAME);
for_each_path_dfs_helper(*std::forward<S>(instance.data()),
if (std::ranges::empty(instance))
{
std::ranges::construct_at(instance.data());
}
for_each_path_dfs_helper(*instance.data(),
std::forward<PreFunction>(pre_fn),
std::forward<PostFunction>(post_fn),
fixed_containers::in_out{*chain});
Expand Down Expand Up @@ -232,9 +238,9 @@ constexpr void for_each_index_helper(const FixedTensorView<MAXIMUM_SIZE>& offset
auto&& func,
Indices<MAXIMUM_SIZE>& indices)
{
// DIM == std::size(indices)
if (DIM == offset.dim)
{
assert_or_abort(offset.dim == std::size(indices));
func(indices);
return;
}
Expand Down Expand Up @@ -298,59 +304,60 @@ enum class StructTreeNodeType
template <typename S>
struct StructTreeNodeTypeOf
{
static constexpr StructTreeNodeType value = StructTreeNodeType::NOT_SUPPORTED;
}
static constexpr StructTreeNodeType value = StructTreeNodeType::UNREACHABLE;
};

template <typename S>
inline constexpr StructTreeNodeType struct_tree_node_type_of_v = StructTreeNodeTypeOf<S>::value;

template <Optional T>
struct StructTreeNodeTypeOf
template <struct_view_detail::Optional T>
struct StructTreeNodeTypeOf<T>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::OPTIONAL;
}
};

template <Iterable S>
struct StructTreeNodeTypeOf
template <struct_view_detail::Iterable S>
struct StructTreeNodeTypeOf<S>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::ITER_NOT_SUPPORTED;
}
};

template <typename T, std::size_t MAXIMUM_SIZE>
struct StructTreeNodeTypeOf<fixed_containers::FixedVector<T, MAXIMUM_SIZE>>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::FIXED_VECTOR;
}
};

template <typename T, std::size_t MAXIMUM_SIZE>
struct StructTreeNodeTypeOf<std::array<T, MAXIMUM_SIZE>>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::STD_ARRAY;
}
};

template <Terminal T>
struct StructTreeNodeTypeOf
template <struct_view_detail::Terminal T>
struct StructTreeNodeTypeOf<T>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::TERMINAL_NOT_SUPPORTED;
}
};

template <Terminal T>
requires(std::is_arithmetic_v<T>)
struct StructTreeNodeTypeOf
template <struct_view_detail::Terminal T>
requires( (std::is_pointer_v<T> && std::is_arithmetic_v<std::remove_pointer_t<T>>) || std::is_arithmetic_v<T> )
struct StructTreeNodeTypeOf<T>
{
static constexpr StructTreeNodeType value = StructTreeNodeType::PRIMITIVE;
}
};

// helper function for getting the right run time values
inline bool optional_has_value(const void* field_ptr)
{
return static_cast<const std::optional<std::byte>*>(field_ptr)->has_value();
}

using FixedVectorProxyType = fixed_containers::FixedVector<std::byte, 100>;

inline std::size_t fixed_vector_size(const void* field_ptr)
{
return static_cast<const fixed_containers::FixedVector<std::byte, 0>*>(field_ptr)->size();
return static_cast<const FixedVectorProxyType*>(field_ptr)->size();
}

// TODO: we may need a std::variant of metadata types for supporting more complex types instead of enum
Expand Down Expand Up @@ -470,8 +477,8 @@ auto extract_path_properties_of_filtered(
{
PathPropertiesMap<MAXIMUM_SIZE_OUT> paths{};
std::size_t dim = 0;
Capacity<MAXIMUM_SIZE_OUT> capacity;
Strides<MAXIMUM_SIZE_OUT> strides;
Capacity capacity;
Strides strides;

for_each_path_dfs(
instance,
Expand All @@ -482,14 +489,6 @@ auto extract_path_properties_of_filtered(
return;
}

if constexpr (struct_view_detail::Iterable<F>)
{
auto type = struct_tree_node_type_of_v<F>;
++dim;
strides.push_back(sizeof(typename std::ranges::range_value_t<F>));
capacity.push_back(type == FIXED_VECTOR ? field.capacity() : std::size(field));
}

// only for these nodes we will create a path property
if constexpr (struct_view_detail::Terminal<F> || struct_view_detail::Iterable<F> || struct_view_detail::Optional<F>)
{
Expand All @@ -506,6 +505,21 @@ auto extract_path_properties_of_filtered(
});
assert_or_abort(was_inserted);
}

if constexpr (struct_view_detail::Iterable<F>)
{
constexpr auto type = struct_tree_node_type_of_v<F>;
++dim;
strides.push_back(sizeof(typename std::ranges::range_value_t<F>));
if constexpr (type == StructTreeNodeType::FIXED_VECTOR)
{
capacity.push_back(field.capacity());
}
else
{
capacity.push_back(std::size(field));
}
}
},
[&]<typename F>(const PathNameChain& /*chain*/, const F& /*field*/)
{
Expand All @@ -531,8 +545,7 @@ void for_each_index(const FixedTensorView& offset, auto&& func)
for_each_index_helper<0>(offset, func, indices);
}

// When InPointer is const, OutPointer must also be const
// They can only be byte or void pointer
// Get the field pointer of a struct given a path and indices
template <typename InPointer, typename OutPointer>
inline OutPointer get_field(InPointer instance,
const PathProperties& path_properties,
Expand All @@ -541,10 +554,13 @@ inline OutPointer get_field(InPointer instance,
using BytePointer = std::conditional_t<std::is_const_v<std::remove_pointer_t<InPointer>>,
const std::byte*,
std::byte*>;
using VoidPointer = std::conditional_t<std::is_const_v<std::remove_pointer_t<InPointer>>,
const void*,
void*>;

return static_cast<OutPointer>(
return static_cast<OutPointer>(static_cast<VoidPointer>(
std::next(static_cast<BytePointer>(instance),
static_cast<std::ptrdiff_t>(path_properties.offset.get_offset(indices))));
static_cast<std::ptrdiff_t>(path_properties.offset.get_offset(indices)))));
}

// This is the user facing interface for a view of a struct
Expand Down Expand Up @@ -626,13 +642,13 @@ template <typename Function, std::size_t MAXIMUM_SIZE = MAX_NUM_PATHS>
void for_each_field(const StructView<MAXIMUM_SIZE>& struct_view,
void* base_pointer,
Function&& func)
requires(std::invocable<Function, const PathNameChain&, const Indices&, void*>)
requires(std::invocable<Function, const PathNameChain&, const PathProperties&, const Indices&, void*>)
{
for (const auto& [path, path_properties] : struct_view.get_path_map_ref())
{
for_each_index(path_properties.offset,
[&](const Indices& indices)
{ func(path, indices, get_field<void*, void*>(base_pointer, path_properties, indices)); });
{ func(path, path_properties, indices, get_field<void*, void*>(base_pointer, path_properties, indices)); });
}
}

Expand All @@ -646,17 +662,34 @@ void sub_struct_view_of(const void* base_super_struct_pointer,
{
for (const auto& [path, path_properties] : sub_struct_view.get_path_map_ref())
{
std::cout << "path: " << path_to_string(path) << "\n";
FixedTensorView sub_struct_offset = path_properties.offset;
for_each_index(sub_struct_offset,
[&](const auto& indices)
{
const std::byte* super_struct_field_ptr =
get_field<const void*, const std::byte*>(
base_super_struct_pointer, super_struct_view.at(path), indices);
std::byte* sub_struct_field_ptr = get_field<void*, std::byte*>(
base_sub_struct_pointer, sub_struct_view.at(path), indices);
*reinterpret_cast<std::uintptr_t*>(sub_struct_field_ptr) =
reinterpret_cast<std::uintptr_t>(super_struct_field_ptr);
std::cout << "indices: ";
for(auto&& index: indices) std::cout << index << " ";
std::cout << "\n";

if (path_properties.type == StructTreeNodeType::FIXED_VECTOR)
{
const auto super_struct_field_ptr = get_field<const void*, const FixedVectorProxyType*>(
base_super_struct_pointer, super_struct_view.at(path), indices);
const auto sub_struct_field_ptr = get_field<void*, FixedVectorProxyType*>(
base_sub_struct_pointer, sub_struct_view.at(path), indices);
sub_struct_field_ptr->resize(super_struct_field_ptr->size());
}

if (path_properties.type == StructTreeNodeType::PRIMITIVE)
{
const std::byte* super_struct_field_ptr =
get_field<const void*, const std::byte*>(
base_super_struct_pointer, super_struct_view.at(path), indices);
std::byte* sub_struct_field_ptr = get_field<void*, std::byte*>(
base_sub_struct_pointer, sub_struct_view.at(path), indices);
*reinterpret_cast<std::uintptr_t*>(sub_struct_field_ptr) =
reinterpret_cast<std::uintptr_t>(super_struct_field_ptr);
}
});
}
}
Expand All @@ -680,6 +713,29 @@ void sub_struct_view_of(const SuperStruct& super_struct,
base_super_struct_pointer, super_struct_view, base_sub_struct_pointer, sub_struct_view);
}

// Adaptors for classes
template <typename T>
class OptionalView
{
private:
bool has_value_;
T value_;

public:
bool has_value()
{
return has_value_;
}
const T& value()
{
return value_;
}
};

// use `FixedVector` for iterable for now and directly resize
template <typename T, std::size_t MAXIMUM_SIZE>
using IterableView = fixed_containers::FixedVector<T, MAXIMUM_SIZE>;

template <typename SubStruct, std::size_t MAXIMUM_SIZE = MAX_NUM_PATHS>
class ContiguousRangeSubStructView
{
Expand Down
Loading

0 comments on commit 0c139d2

Please sign in to comment.