22 const int count = counts_to_offsets[i];
24 counts_to_offsets[i] = offset;
30 counts_to_offsets.
last() = offset;
46 const int count = counts_to_offsets[i];
48 counts_to_offsets[i] = offset;
51 counts_to_offsets.
last() = offset;
52 const bool has_overflow = offset >= std::numeric_limits<int>::max();
62 threading::parallel_for(offsets.index_range(), 1024, [&](const IndexRange range) {
63 for (const int64_t i : range) {
64 offsets[i] = size * i + start_offset;
75 [&](
const int64_t i) { sizes[i] = offsets[i].
size(); });
91 threading::memory_bandwidth_bound_task(
93 threading::parallel_for(indices.index_range(), 4096, [&](const IndexRange range) {
94 for (const int i : range) {
95 sizes[i] = offsets[indices[i]].size();
113 mask.foreach_segment_optimized([&](
const auto segment) {
114 if constexpr (std::is_same_v<std::decay_t<
decltype(segment)>,
IndexRange>) {
118 for (
const int64_t i : segment) {
136 dst_offsets[
pos] = offset;
137 offset += src_offsets[i].
size();
139 dst_offsets.
last() = offset;
146 for (const int64_t i : range) {
147 r_map.slice(offsets[i]).fill(i);
155 array_utils::count_indices(
indices, offsets);
156 offset_indices::accumulate_counts_to_offsets(offsets);
#define BLI_assert_msg(a, msg)
#define UNUSED_VARS_NDEBUG(...)
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
constexpr IndexRange drop_back(int64_t n) const
constexpr int64_t size_in_bytes() const
constexpr T * end() const
constexpr T * begin() const
constexpr IndexRange index_range() const
constexpr T & last(const int64_t n=0) const
void foreach_index_optimized(Fn &&fn) const
IndexRange index_range() const
ccl_device_inline float2 mask(const MaskType mask, const float2 a)
void copy_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask, MutableSpan< int > sizes)
OffsetIndices< int > accumulate_counts_to_offsets(MutableSpan< int > counts_to_offsets, int start_offset=0)
std::optional< OffsetIndices< int > > accumulate_counts_to_offsets_with_overflow_check(MutableSpan< int > counts_to_offsets, int start_offset=0)
void gather_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask, MutableSpan< int > sizes)
void fill_constant_group_size(int size, int start_offset, MutableSpan< int > offsets)
void build_reverse_offsets(Span< int > indices, MutableSpan< int > offsets)
int sum_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask)
OffsetIndices< int > gather_selected_offsets(OffsetIndices< int > src_offsets, const IndexMask &selection, int start_offset, MutableSpan< int > dst_offsets)
void memory_bandwidth_bound_task(const int64_t approximate_bytes_touched, const Function &function)