1#ifndef BM__H__INCLUDED__
2#define BM__H__INCLUDED__
30# include <initializer_list>
37#pragma warning( push )
38#pragma warning( disable : 4311 4312 4127)
47# define BM_DECLARE_TEMP_BLOCK(x) bm::bit_block_t x;
158 position_(ref.position_)
160 bv_.set(position_, ref.bv_.get_bit(position_));
165 return bv_.get_bit(position_);
170 bv_.set(position_, (
bool)ref);
176 bv_.set(position_, value);
182 return bool(*
this) == bool(ref);
188 bv_.set_bit_and(position_, value);
195 if (value != bv_.get_bit(position_))
197 bv_.set_bit(position_);
205 bv_.set(position_, value != bv_.get_bit(position_));
212 return !bv_.get_bit(position_);
218 return !bv_.get_bit(position_);
297 if (this->
bv_ != ib.bv_)
return false;
298 if (this->
position_ != ib.position_)
return false;
299 if (this->
block_ != ib.block_)
return false;
300 if (this->
block_type_ != ib.block_type_)
return false;
301 if (this->
block_idx_ != ib.block_idx_)
return false;
312 for (
unsigned i = 0; i < bd.
bit_.
cnt; ++i)
428 bvect_->set_bit_no_check(n);
500 buf_ =
bvect_->blockman_.get_allocator().alloc_bit_block();
517 buf_ = iit.buf_; iit.buf_ = 0;
538 buf_ = ii.buf_; ii.buf_ = 0;
695 return this->
valid();
712 typedef typename iterator_base::block_descr block_descr_type;
714 static bool decode_wave(block_descr_type* bdescr)
BMNOEXCEPT;
715 bool decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT;
716 bool decode_bit_group(block_descr_type* bdescr,
743 bit_count_ = this->
valid();
751 this->bit_count_ = 1;
758 this->bit_count_ += this->
valid();
766 this->bit_count_ += this->
valid();
792 friend class iterator_base;
793 friend class enumerator;
846 const Alloc& alloc = Alloc())
847 : blockman_(glevel_len, bv_size, alloc),
848 new_blocks_strat_(strat),
858 const Alloc& alloc = Alloc())
859 : blockman_(glevel_len, bv_size, alloc),
860 new_blocks_strat_(strat),
868 : blockman_(
bvect.blockman_),
869 new_blocks_strat_(
bvect.new_blocks_strat_),
878 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
879 new_blocks_strat_(
bvect.new_blocks_strat_),
882 if (!
bvect.blockman_.is_init())
893 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
894 new_blocks_strat_(
bvect.new_blocks_strat_),
897 if (!
bvect.blockman_.is_init())
900 blockman_.copy_to_arena(
bvect.blockman_);
902 blockman_.copy(
bvect.blockman_);
924 void init(
unsigned top_size,
bool alloc_subs);
942 blockman_.move_from(
bvect.blockman_);
944 new_blocks_strat_ =
bvect.new_blocks_strat_;
952 new_blocks_strat_(
BM_BIT),
956 std::initializer_list<size_type>::const_iterator it_start = il.begin();
957 std::initializer_list<size_type>::const_iterator it_end = il.end();
958 for (; it_start < it_end; ++it_start)
1035 {
return blockman_.get_allocator(); }
1041 { blockman_.get_allocator().set_pool(pool_ptr); }
1046 {
return blockman_.get_allocator().get_pool(); }
1287 insert_iterator
inserter() {
return insert_iterator(*
this); }
1921 {
return new_blocks_strat_; }
1944 statistics* stat = 0);
2059 {
return blockman_; }
2067 {
return blockman_; }
2081 const size_type ids_size,
bool opt_flag);
2198 void keep_range_no_check(
size_type left,
2207 unsigned nbit_right,
2217 unsigned nbit_right,
2234template<class Alloc>
2245template<
class Alloc>
2256template<
class Alloc>
2267template<
class Alloc>
2279template<
typename Alloc>
2283 blockman_.init_tree();
2288template<
typename Alloc>
2292 if (!blockman_.is_init())
2293 blockman_.init_tree(top_size);
2295 for (
unsigned nb = 0; nb < top_size; ++nb)
2296 blockman_.alloc_top_subblock(nb);
2302template<
typename Alloc>
2307 blockman_.deinit_tree();
2313 blockman_.copy_to_arena(
bvect.blockman_);
2314 size_ =
bvect.size();
2318 blockman_.copy(
bvect.blockman_);
2323 blockman_.copy_to_arena(
bvect.blockman_);
2324 size_ =
bvect.size();
2327 blockman_.copy(
bvect.blockman_);
2339template<
typename Alloc>
2344 blockman_.move_from(
bvect.blockman_);
2345 size_ =
bvect.size_;
2346 new_blocks_strat_ =
bvect.new_blocks_strat_;
2352template<
class Alloc>
2356 if (!blockman_.is_init())
2362 keep_range_no_check(left, right);
2367template<
typename Alloc>
2374 if (!blockman_.is_init() && !
value)
2400template<
typename Alloc>
2403 if (!blockman_.is_init())
2406 word_t*** blk_root = blockman_.top_blocks_root();
2410 unsigned top_blocks = blockman_.top_block_size();
2411 for (
unsigned i = 0; i < top_blocks; ++i)
2420 blk_blk = blk_root[i];
2434 cnt += blockman_.block_bitcount(blk_blk[j]);
2436 cnt += blockman_.block_bitcount(blk_blk[j+1]);
2438 cnt += blockman_.block_bitcount(blk_blk[j+2]);
2440 cnt += blockman_.block_bitcount(blk_blk[j+3]);
2450template<
typename Alloc>
2453 word_t*** blk_root = blockman_.top_blocks_root();
2456 typename blocks_manager_type::block_any_func func(blockman_);
2462template<
typename Alloc>
2467 if (size_ == new_size)
return;
2468 if (size_ < new_size)
2470 if (!blockman_.is_init())
2471 blockman_.init_tree();
2473 blockman_.reserve(new_size);
2485template<
typename Alloc>
2494 if (found && last >= size_)
2500template<
typename Alloc>
2515 if (!blockman_.is_init())
2528 unsigned real_top_blocks = blockman_.find_real_top_blocks();
2529 unsigned max_top_blocks = blockman_.find_max_top_blocks();
2532 rs_idx->set_total(nb + 1);
2533 rs_idx->resize(nb + 1);
2534 rs_idx->resize_effective_super_blocks(real_top_blocks);
2538 BM_ASSERT(max_top_blocks <= blockman_.top_block_size());
2539 bm::word_t*** blk_root = blockman_.top_blocks_root();
2540 for (
unsigned i = 0; i < max_top_blocks; ++i)
2545 rs_idx->set_null_super_block(i);
2550 rs_idx->set_full_super_block(i);
2566 bcount[j] = 0; sub_count[j] = 0;
2569 unsigned local_first, local_second, local_third;
2591 if (is_set) aux0 |= 1;
2594 if (is_set) aux1 |= 1;
2613 unsigned cnt = local_first + local_second + local_third;
2614 BM_ASSERT(cnt == blockman_.block_bitcount(block));
2617 if (bv_blocks && cnt)
2620 BM_ASSERT(cnt >= local_first + local_second);
2621 sub_count[j] =
bm::id64_t(local_first | (local_second << 16)) |
2622 (aux0 << 32) | (aux1 << 48);
2626 rs_idx->register_super_block(i, &bcount[0], &sub_count[0]);
2635template<
typename Alloc>
2639 bm::word_t*** blk_root = blockman_.top_blocks_root();
2642 typename blocks_manager_type::block_count_arr_func func(blockman_, &(arr[0]));
2644 return func.last_block();
2649#define BM_BORDER_TEST(blk, idx) bool(blk[idx >> bm::set_word_shift] & (1u << (idx & bm::set_word_mask)))
2652#if (defined(BMSSE42OPT) || defined(BMAVX2OPT) || defined(BMAVX512OPT) || \
2653 defined(BMWASMSIMDOPT) || defined(BMNEONOPT))
2656template<
typename Alloc>
2658bvector<Alloc>::block_count_to(
const bm::word_t* block,
2660 unsigned nbit_right,
2667 unsigned sub_cnt = unsigned(sub);
2668 unsigned first = sub_cnt & 0xFFFF;
2669 unsigned second = sub_cnt >> 16;
2676 unsigned cnt, aux0(
bm::gap_word_t(sub >> 32)); (void)cnt; (void)aux0;
2678 unsigned cnt1, aux1(
bm::gap_word_t(sub >> 48)); (void)aux1; (void) cnt1;
2683 #if defined(BM64_SSE4) || defined(BM64_AVX2) || defined(BM64_AVX512)
2686 const unsigned cutoff_bias = 0;
2689 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
2730 unsigned bc_second_range =
first + second;
2734 c = bc_second_range;
2748 c = bc_second_range - c;
2777 c +=
first + second;
2802 c = rs_idx.count(nb);
2844template<
typename Alloc>
2848 unsigned nbit_right,
2854 unsigned sub_cnt = unsigned(sub);
2855 unsigned first = sub_cnt & 0xFFFF;
2856 unsigned second = sub_cnt >> 16;
2863 unsigned cnt, aux0(
bm::gap_word_t(sub >> 32)); (void)cnt; (void)aux0;
2865 unsigned cnt1, aux1(
bm::gap_word_t(sub >> 48)); (void)aux1; (void) cnt1;
2870 unsigned sub_choice =
2896 c =
first + second - c;
2910 c +=
first + second;
2923 c = rs_idx.count(nb);
2929 c = rs_idx.count(nb) - c;
2946#undef BM_BORDER_TEST
2950template<
typename Alloc>
2954 unsigned nbit_right,
2986 unsigned sub_cnt = unsigned(sub);
2988 unsigned second = (sub_cnt >> 16);
2997 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
3043 c =
first + second - c;
3055 c +=
first + second;
3062 c = rs_idx.count(nb);
3086template<
typename Alloc>
3092 if (!blockman_.is_init())
3100 if (nb_right >= rs_idx.get_total())
3102 cnt = rs_idx.count();
3105 cnt = nb_right ? rs_idx.rcount(nb_right-1) : 0;
3109 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3118 c = this->gap_count_to(gap_block, nb_right, nbit_right, rs_idx);
3124 return cnt + nbit_right + 1;
3128 c = this->block_count_to(block, nb_right, nbit_right, rs_idx);
3139template<
typename Alloc>
3145 if (!blockman_.is_init())
3152 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3161 cnt = gap_count_to(gap_blk, nb_right, nbit_right, rs_idx,
true);
3182 cnt = block_count_to(block, nb_right, nbit_right, rs_idx);
3189 cnt += nb_right ? rs_idx.rcount(nb_right - 1) : 0;
3195template<
typename Alloc>
3201 if (!blockman_.is_init())
3207 size_type cnt = nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
3211 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3228 cnt += block_count_to(block, nblock_right, nbit_right, rs_idx);
3240template<
typename Alloc>
3245 if (!blockman_.is_init())
3256template<
typename Alloc>
3268 const bm::word_t* block = blockman_.get_block(i0, j0);
3278 typename blocks_manager_type::block_count_func func(blockman_);
3285 cnt += func.count();
3302 if (nblock_left == nblock_right)
3308 word_t*** blk_root = blockman_.top_blocks_root();
3312 nblock_left+1, nblock_right-1, func);
3313 cnt += func.count();
3317 block = blockman_.get_block(i0, j0);
3337template<
typename Alloc>
3341 if (!blockman_.is_init())
3358 const bm::word_t* block = blockman_.get_block(i0, j0);
3360 if (nblock_left == nblock_right)
3380 block = blockman_.get_block(i0, j0);
3390 if (nblock_left <= nblock_right)
3392 unsigned i_from, j_from, i_to, j_to;
3396 bm::word_t*** blk_root = blockman_.top_blocks_root();
3398 for (
unsigned i = i_from; i <= i_to; ++i)
3406 unsigned j = (i == i_from) ? j_from : 0;
3413 }
while (++j < j_limit);
3421template<
typename Alloc>
3426 if (!blockman_.is_init())
3441 const bm::word_t* block = blockman_.get_block(i0, j0);
3443 if (nblock_left == nblock_right)
3463 block = blockman_.get_block(i0, j0);
3473 if (nblock_left <= nblock_right)
3475 unsigned i_from, j_from, i_to, j_to;
3479 bm::word_t*** blk_root = blockman_.top_blocks_root();
3482 if (i_from >= top_size)
3484 if (i_to >= top_size)
3486 i_to = unsigned(top_size-1);
3491 for (
unsigned i = i_from; i <= i_to; ++i)
3499 unsigned j = (i == i_from) ? j_from : 0;
3506 }
while (++j < j_limit);
3514template<
typename Alloc>
3528template<
typename Alloc>
3536 return this->
test(left);
3539 cnt_l = this->
count_to(left-1, rs_idx);
3542 cnt_r = this->
count_to(right, rs_idx);
3544 return cnt_r - cnt_l;
3549template<
typename Alloc>
3558 bm::word_t*** blk_root = blockman_.top_blocks_root();
3559 for (
unsigned i = 0; i < top_blocks; ++i)
3580 blockman_.set_block_ptr(i, j, 0);
3601template<
typename Alloc>
3612 if (!blockman_.top_blocks_ || i >= blockman_.top_block_size_)
3615 const bm::word_t*
const* blk_blk = blockman_.top_blocks_[i];
3618 if (!blk_blk)
return false;
3634template<
typename Alloc>
3641 if (!blockman_.is_init())
3648 temp_block = blockman_.check_allocate_tempblock();
3657 blockman_.optimize_tree(temp_block, opt_mode, stat);
3660 blockman_.stat_correction(stat);
3661 stat->
memory_used += (unsigned)(
sizeof(*
this) -
sizeof(blockman_));
3665 blockman_.free_temp_block();
3670template<
typename Alloc>
3684 for (; nblock_left <= nblock_right; ++nblock_left)
3688 if (i0 >= blockman_.top_block_size())
3690 bm::word_t* block = blockman_.get_block_ptr(i0, j0);
3692 blockman_.optimize_block(i0, j0, block, temp_block, opt_mode, 0);
3699template<
typename Alloc>
3703 if (!blockman_.is_init())
3713 ::memcpy(opt_glen, st.gap_levels, bm::gap_levels * sizeof(*opt_glen));
3716 st.gap_length + st.gap_blocks,
3725template<typename Alloc>
3730 if (blockman_.is_init())
3732 word_t*** blk_root = blockman_.top_blocks_root();
3734 blocks_manager_type::gap_level_func gl_func(blockman_, glevel_len);
3738 blockman_.set_glen(glevel_len);
3743template<
typename Alloc>
3747 unsigned top_blocks = blockman_.top_block_size();
3748 unsigned bvect_top_blocks = bv.blockman_.top_block_size();
3750 if (bvect_top_blocks > top_blocks) top_blocks = bvect_top_blocks;
3752 for (
unsigned i = 0; i < top_blocks; ++i)
3754 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3755 const bm::word_t*
const* arg_blk_blk = bv.blockman_.get_topblock(i);
3757 if (blk_blk == arg_blk_blk)
3767 arg_blk = arg_blk_blk ? arg_blk_blk[j] : 0;
3775 blk = blk_blk ? blk_blk[j] : 0;
3779 if (blk == arg_blk)
continue;
3784 if (!blk || !arg_blk)
3861template<
typename Alloc>
3866 unsigned top_blocks = blockman_.top_block_size();
3867 bm::word_t*** top_root = blockman_.top_blocks_root();
3869 if (!top_blocks || !top_root)
3871 return bvect.find(pos);
3874 unsigned i_to, j_to;
3876 unsigned bvect_top_blocks =
bvect.blockman_.top_block_size();
3877 if (!bvect_top_blocks || !arg_top_root)
3879 bool f = this->
find(pos);
3882 if (pos > search_to)
3888 if (bvect_top_blocks > top_blocks)
3889 top_blocks = bvect_top_blocks;
3893 if (i_to < top_blocks)
3894 top_blocks = i_to+1;
3896 for (
unsigned i = 0; i < top_blocks; ++i)
3898 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3899 const bm::word_t*
const* arg_blk_blk =
bvect.blockman_.get_topblock(i);
3901 if (blk_blk == arg_blk_blk)
3945 if (pos > search_to)
3965template<
typename Alloc>
3970 blockman_.swap(
bvect.blockman_);
3977template<
typename Alloc>
3984 ::memcpy(st->gap_levels,
3987 st->max_serialize_mem = unsigned(
sizeof(
bm::id_t) * 4);
3988 unsigned top_size = blockman_.top_block_size();
3990 size_t blocks_mem =
sizeof(blockman_);
3993 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3994 bm::word_t*** blk_root = blockman_.top_blocks_root();
3998 for (
unsigned i = 0; i < top_size; ++i)
4000 const bm::word_t*
const* blk_blk = blk_root[i];
4007 blk_blk = blk_root[i];
4014 st->ptr_sub_blocks++;
4028 st->add_gap_block(cap, len, level);
4031 st->add_bit_block();
4036 size_t full_null_size = blockman_.calc_serialization_null_full();
4037 st->max_serialize_mem += full_null_size;
4041 size_t safe_inc = st->max_serialize_mem / 10;
4042 if (!safe_inc) safe_inc = 256;
4043 st->max_serialize_mem += safe_inc;
4046 st->memory_used += unsigned(
sizeof(*
this) -
sizeof(blockman_));
4048 st->memory_used += blocks_mem;
4050 st->memory_used +=
sizeof(
typename blocks_manager_type::arena);
4057template<
typename Alloc>
4062 unsigned top_size = blockman_.top_block_size();
4063 bm::word_t*** blk_root = blockman_.top_blocks_root();
4066 for (
unsigned i = 0; i < top_size; ++i)
4068 const bm::word_t*
const* blk_blk = blk_root[i];
4087template<
class Alloc>
4093 if (!ids || !ids_size)
4095 if (!blockman_.is_init())
4096 blockman_.init_tree();
4098 import(ids, ids_size, so);
4104template<
class Alloc>
4110 if (!ids || !ids_size || !blockman_.is_init())
4116 bv_tmp.
import(ids, ids_size, so);
4134template<
class Alloc>
4140 blockman_.destroy_arena();
4143 blockman_.set_all_zero(free_mem);
4148template<
class Alloc>
4154 if (!ids || !ids_size || !blockman_.is_init())
4159 bv_tmp.
import(ids, ids_size, so);
4176template<
class Alloc>
4187template<
class Alloc>
4198template<
class Alloc>
4201 if (val == condition)
return false;
4212template<
class Alloc>
4219 if (!blockman_.is_init())
4220 blockman_.init_tree();
4226template<
class Alloc>
4232 if (!blockman_.is_init())
4233 blockman_.init_tree();
4244template<
class Alloc>
4264 if (nblock == nblock_end)
4272 blockman_.reserve_top_blocks(i+1);
4291 }
while (start < size_in);
4296template<
class Alloc>
4312 if (nblock == nblock_end)
4316 if (opt_flag && nbit == 65535)
4338 }
while (start < size_in);
4347 nblock_end += bool(nbit == 65535);
4353 }
while (nblock < nblock_end);
4362template<
class Alloc>
4371 blockman_.check_allocate_block(nblock, 1, 0, &block_type,
4377 if (stop-start == 1)
4383 blk = blockman_.deoptimize_block(nblock);
4390 if (new_blocks_strat_ ==
BM_GAP)
4395 blockman_.optimize_block(i0, j0, blk, temp_blk,
opt_compress, 0);
4405template<
class Alloc>
4416 unsigned nbit1, nbit2;
4424 block1 = blockman_.get_block_ptr(i0, j0);
4436 block2 = blockman_.get_block_ptr(i0, j0);
4437 if (block1 == block2)
4447 bool b1 = block1[nword1] & (1u << nbit1);
4448 bool b2 = block1[nword2] & (1u << nbit2);
4451 nbit1 = 1u << nbit1; nbit2 = 1u << nbit2;
4452 auto w = block1[nword1];
4453 (b2) ? w |= nbit1 : w &= ~nbit1;
4456 (b1) ? w |= nbit2 : w &= ~nbit2;
4465 block1 = blockman_.get_block_ptr(i0, j0);
4467 block2 = blockman_.get_block_ptr(i0, j0);
4469 if (block1 == block2)
4473 unsigned cpos1{ 0 }, cpos2{ 0 };
4474 bool b1, b2, b1real, b2real;
4478 b1 =
false; b1real =
false;
4483 b1 =
true; b1real =
false;
4505 b2 =
false; b2real =
false;
4510 b2 =
true; b2real =
false;
4537 unsigned new_len, old_len;
4538 unsigned is_set = b1;
4541 if (old_len < new_len)
4543 unsigned threshold =
bm::gap_limit(gblk1, blockman_.glen());
4544 if (new_len > threshold)
4545 blockman_.extend_gap_block(nb1, gblk1);
4552 auto w = block1[nword1];
4553 (b2) ? w |= nbit1 : w &= ~nbit1;
4566 unsigned new_len, old_len;
4567 unsigned is_set = b2;
4570 if (old_len < new_len)
4572 unsigned threshold =
bm::gap_limit(gblk2, blockman_.glen());
4573 if (new_len > threshold)
4574 blockman_.extend_gap_block(nb2, gblk2);
4581 auto w = block2[nword2];
4582 (b1) ? w |= nbit2 : w &= ~nbit2;
4596template<
class Alloc>
4607 blockman_.check_allocate_block(nblock,
4630 val = ~(*word & mask);
4636 val = ~(*word & mask);
4645template<
class Alloc>
4651 const bool val =
true;
4658 blockman_.check_allocate_block(nblock,
4673 blk[nword] |= (1u << nbit);
4679template<
class Alloc>
4685 const bool val =
false;
4690 blockman_.check_allocate_block(nblock,
4706 blk[nword] &= ~(1u << nbit);
4713template<
class Alloc>
4718 unsigned is_set, new_len, old_len;
4721 if (old_len < new_len)
4723 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4724 if (new_len > threshold)
4725 blockman_.extend_gap_block(nblock, gap_blk);
4732template<
class Alloc>
4736 unsigned new_len, old_len;
4739 if (old_len < new_len)
4741 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4742 if (new_len > threshold)
4743 blockman_.extend_gap_block(nblock, gap_blk);
4750template<
class Alloc>
4757 blockman_.check_allocate_block(nblock,
4775 is_set = ((*word) & mask);
4783template<
class Alloc>
4792 blockman_.check_allocate_block(nblock,
4802 if (block_type == 1)
4805 unsigned is_set =
gap_block_set(gap_blk, val, nblock, nbit);
4815 bool is_set = ((*word) & mask) != 0;
4817 if (is_set != condition)
4835template<
class Alloc>
4844 blockman_.check_allocate_block(nblock,
4854 if (block_type == 1)
4859 bool new_val = val & old_val;
4860 if (new_val != old_val)
4862 unsigned is_set =
gap_block_set(gap_blk, val, nblock, nbit);
4874 bool is_set = ((*word) & mask) != 0;
4876 bool new_val = is_set & val;
4895template<
class Alloc>
4910template<
class Alloc>
4915 unsigned top_blocks = blockman_.top_block_size();
4918 for (
unsigned i = top_blocks-1;
true; --i)
4920 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4949 pos = base_idx + block_pos;
4967template<
class Alloc>
4971 if (!blockman_.is_init())
4976 return this->
test(from);
4983 const bm::word_t* block = blockman_.get_block_ptr(i0, j0);
4987 unsigned found_nbit;
4993 pos = base_idx + found_nbit;
5004 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i0);
5009 for (
unsigned j = j0;
true; --j)
5032 pos = base_idx + block_pos;
5045 for (
unsigned i = i0;
true; --i)
5047 blk_blk = blockman_.get_topblock(i);
5075 pos = base_idx + block_pos;
5092template<
class Alloc>
5097 unsigned top_blocks = blockman_.top_block_size();
5098 for (
unsigned i = 0; i < top_blocks; ++i)
5100 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
5114 found =
true; block_pos = 0;
5126 pos = base_idx + block_pos;
5138template<
class Alloc>
5142 bool found =
find(in_first);
5151 in_first = in_last = 0;
5158template<
class Alloc>
5167 if (!rank_in || !blockman_.is_init())
5172 unsigned bit_pos = 0;
5177 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
5182 unsigned cnt = blockman_.block_bitcount(block);
5211template<
class Alloc>
5222 !blockman_.is_init() ||
5223 (rs_idx.count() < rank_in))
5231 nb = rs_idx.find(rank_in);
5232 BM_ASSERT(rs_idx.rcount(nb) >= rank_in);
5234 rank_in -= rs_idx.rcount(nb-1);
5238 unsigned bit_pos = 0;
5240 for (; nb < rs_idx.get_total(); ++nb)
5243 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
5248 unsigned block_bc = rs_idx.count(nb);
5249 if (rank_in <= block_bc)
5251 nbit = rs_idx.select_sub_range(nb, rank_in);
5257 rank_in -= block_bc;
5282template<
class Alloc>
5289 !blockman_.is_init() ||
5290 (rs_idx.count() < rank_in))
5295 bool found = rs_idx.find(&rank_in, &nb, &sub_range_from);
5301 const bm::word_t* block = blockman_.get_block_ptr(i, j);
5305 unsigned bit_pos = 0;
5309 bit_pos = sub_range_from + unsigned(rank_in) - 1;
5322template<
class Alloc>
5326 if (!blockman_.is_init())
5333 const bm::word_t* block = blockman_.get_block_ptr(i, j);
5363 for (; i < top_blocks; ++i)
5365 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
5380 found =
true; block_pos = 0;
5392 prev = base_idx + block_pos;
5406template<
class Alloc>
5411 if (!blockman_.is_init())
5422template<
class Alloc>
5431template<
class Alloc>
5435 bool b = this->
test(0);
5442template<
class Alloc>
5450 if (!blockman_.is_init())
5468 bm::word_t* block = blockman_.get_block_ptr(i, j);
5476 goto insert_bit_check;
5484 unsigned new_block_len;
5487 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5488 if (new_block_len > threshold)
5489 blockman_.extend_gap_block(nb, gap_blk);
5497 block = blockman_.deoptimize_block(nb);
5518 unsigned top_blocks = blockman_.top_block_size();
5519 bm::word_t*** blk_root = blockman_.top_blocks_root();
5525 if (i >= top_blocks)
5532 blk_blk = blk_root[i];
5543 blockman_.check_allocate_block(nblock, 0, 0, &block_type,
false);
5544 block[0] |= carry_over;
5547 blk_root = blockman_.top_blocks_root();
5548 blk_blk = blk_root[i];
5549 top_blocks = blockman_.top_block_size();
5560 blk_blk = blockman_.check_alloc_top_subblock(i);
5574 carry_over = 0; block = 0;
5579 if (0 != (block = blk_blk[j]))
5594 block = blockman_.deoptimize_block(nblock);
5595 block[0] <<= (carry_over = 1);
5604 block = blockman_.deoptimize_block(nblock);
5608 unsigned new_block_len;
5612 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5613 if (new_block_len > threshold)
5614 blockman_.extend_gap_block(nblock, gap_blk);
5629 blockman_.zero_block(nblock);
5633 blockman_.zero_block(nblock);
5645template<
class Alloc>
5651 if (!blockman_.is_init())
5663 bm::word_t* block = blockman_.get_block_ptr(i, j);
5664 bool carry_over = test_first_block_bit(nb+1);
5669 block = blockman_.check_allocate_block(nb,
BM_BIT);
5676 block = blockman_.deoptimize_block(nb);
5688 unsigned top_blocks = blockman_.top_block_size();
5689 bm::word_t*** blk_root = blockman_.top_blocks_root();
5695 if (i >= top_blocks)
5698 blk_blk = blk_root[i];
5710 bool carry_over = 0;
5714 carry_over = this->
test(co_idx);
5718 blk_blk = blockman_.check_alloc_top_subblock(i);
5722 blk_blk = blockman_.check_alloc_top_subblock(i);
5730 bool carry_over = 0;
5735 bool no_blocks = (j == 0);
5738 if (0 != (block = blk_blk[j]))
5748 blockman_.zero_block(i, j-1);
5756 carry_over = test_first_block_bit(nblock+1);
5761 block = blockman_.deoptimize_block(nblock);
5769 carry_over = test_first_block_bit(nblock+1);
5770 unsigned new_block_len;
5774 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5775 if (new_block_len > threshold)
5776 blockman_.extend_gap_block(nblock, gap_blk);
5780 blockman_.zero_block(i, j);
5788 blockman_.zero_block(i, j);
5791 if (carry_over && nblock)
5804template<
class Alloc>
5815template<
class Alloc>
5820 if (!bv.blockman_.is_init())
5829 unsigned top_blocks = blockman_.top_block_size();
5830 if (size_ < bv.size_)
5834 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5835 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5838 bm::word_t*** blk_root = blockman_.top_blocks_root();
5839 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5841 for (
unsigned i = 0; i < top_blocks; ++i)
5844 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5845 if (blk_blk == blk_blk_arg || !blk_blk_arg)
5847 if (!blk_blk && blk_blk_arg)
5851 blk_root[i] = blk_root_arg[i];
5852 blk_root_arg[i] = 0;
5859 blockman_.deallocate_top_subblock(i);
5869 blk = blk_blk[j]; arg_blk = blk_blk_arg[j];
5872 if (!blk && arg_blk)
5874 blockman_.set_block_ptr(i, j, arg_blk);
5875 bv.blockman_.set_block_ptr(i, j, 0);
5888template<
class Alloc>
5896 bm::word_t* blk = blockman_.get_block_ptr(i0, j0);
5904template<
class Alloc>
5912 if (blockman_.is_init())
5913 blockman_.deinit_tree();
5934 unsigned top_blocks1 = bman1.top_block_size();
5935 unsigned top_blocks2 = bman2.top_block_size();
5936 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5937 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5940 if (size_ < bv2.size_)
5943 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5949 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5956 for (
unsigned i = 0; i < top_blocks; ++i)
5958 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5959 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5961 if (blk_blk_arg1 == blk_blk_arg2)
5964 bm::word_t*** blk_root = blockman_.top_blocks_root();
5965 blk_root[i] = blk_blk_arg1;
5971 bm::word_t*** blk_root = blockman_.top_blocks_root();
5975 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5976 bool any_blocks =
false;
5980 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5981 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5982 if (arg_blk1 == arg_blk2 && !arg_blk1)
5986 blockman_.optimize_bit_block(i, j, opt_mode);
5987 any_blocks |= bool(blk_blk[j]);
5991 blockman_.free_top_subblock(i);
5996 blockman_.free_temp_block();
6003template<
class Alloc>
6011 if (blockman_.is_init())
6012 blockman_.deinit_tree();
6029 if (!bman1.is_init())
6035 if (!bman2.is_init())
6041 unsigned top_blocks1 = bman1.top_block_size();
6042 unsigned top_blocks2 = bman2.top_block_size();
6043 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6044 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6047 if (size_ < bv2.size_)
6050 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6051 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6053 for (
unsigned i = 0; i < top_blocks; ++i)
6055 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6056 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6058 if (blk_blk_arg1 == blk_blk_arg2)
6063 blockman_.deallocate_top_subblock(i);
6071 bm::word_t*** blk_root= blockman_.top_blocks_root();
6084 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
6085 bool any_blocks =
false;
6090 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6091 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6093 if ((arg_blk1 == arg_blk2) &&
6099 blockman_.optimize_bit_block(i, j, opt_mode);
6100 any_blocks |= bool(blk_blk[j]);
6104 blockman_.free_top_subblock(i);
6109 blockman_.free_temp_block();
6116template<
class Alloc>
6139 if (blockman_.is_init())
6140 blockman_.deinit_tree();
6144 if (!bman1.is_init() || !bman2.is_init())
6147 unsigned top_blocks1 = bman1.top_block_size();
6148 unsigned top_blocks2 = bman2.top_block_size();
6149 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6150 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6153 if (size_ < bv2.size_)
6156 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6157 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6159 for (
unsigned i = 0; i < top_blocks; ++i)
6161 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6162 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6164 if (blk_blk_arg1 == blk_blk_arg2)
6170 bm::word_t*** blk_root = blockman_.top_blocks_root();
6180 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
6181 bool any_blocks =
false;
6186 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6187 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6189 if ((arg_blk1 == arg_blk2) && !arg_blk1)
6194 blockman_.optimize_bit_block(i, j, opt_mode);
6195 any_blocks |= bool(blk_blk[j]);
6199 blockman_.free_top_subblock(i);
6204 blockman_.free_temp_block();
6211template<
class Alloc>
6237 if (!bman1.is_init() || !bman2.is_init())
6240 unsigned top_blocks1 = bman1.top_block_size();
6241 unsigned top_blocks2 = bman2.top_block_size();
6242 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6243 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6246 if (new_size < bv2.size_)
6247 new_size = bv2.size_;
6248 if (size_ < new_size)
6251 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6252 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6254 for (
unsigned i = 0; i < top_blocks; ++i)
6256 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6257 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6259 if (blk_blk_arg1 == blk_blk_arg2)
6265 bm::word_t*** blk_root = blockman_.top_blocks_root();
6267 blockman_.deallocate_top_subblock(i);
6278 bm::word_t** blk_blk = blockman_.check_alloc_top_subblock(i);
6279 bool any_blocks =
false;
6289 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6290 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6292 if ((arg_blk1 == arg_blk2) && !arg_blk1)
6307 blockman_.optimize_bit_block(i, j, opt_mode);
6309 any_blocks |= bool(blk_blk[j]);
6314 blockman_.free_top_subblock(i);
6319 blockman_.free_temp_block();
6328template<
class Alloc>
6336 if (blockman_.is_init())
6337 blockman_.deinit_tree();
6355 if (!bman1.is_init())
6359 if (!bman2.is_init())
6365 unsigned top_blocks1 = bman1.top_block_size();
6366 unsigned top_blocks2 = bman2.top_block_size();
6367 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6368 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6371 if (size_ < bv2.size_)
6374 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6375 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6377 for (
unsigned i = 0; i < top_blocks; ++i)
6379 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6380 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6382 if (blk_blk_arg1 == blk_blk_arg2)
6389 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
6390 bool any_blocks =
false;
6394 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6395 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6396 if ((arg_blk1 == arg_blk2) && !arg_blk1)
6401 blockman_.optimize_bit_block(i, j, opt_mode);
6402 any_blocks |= bool(blk_blk[j]);
6406 blockman_.free_top_subblock(i);
6411 blockman_.free_temp_block();
6419#define BM_OR_OP(x) \
6421 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
6422 if (blk != arg_blk) \
6423 combine_operation_block_or(i, j+x, blk, arg_blk); \
6426template<
class Alloc>
6429 if (!bv.blockman_.is_init())
6432 unsigned top_blocks = blockman_.top_block_size();
6433 if (size_ < bv.size_)
6436 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6437 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6439 bm::word_t*** blk_root = blockman_.top_blocks_root();
6440 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6442 for (
unsigned i = 0; i < top_blocks; ++i)
6445 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6446 if (blk_blk == blk_blk_arg || !blk_blk_arg)
6452 blockman_.deallocate_top_subblock(i);
6457 blk_blk = blockman_.alloc_top_subblock(i);
6464 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6473 #elif defined(BM64_SSE4)
6492#define BM_XOR_OP(x) \
6494 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
6495 combine_operation_block_xor(i, j+x, blk, arg_blk); \
6498template<
class Alloc>
6501 if (!bv.blockman_.is_init())
6503 if (!blockman_.is_init())
6509 unsigned top_blocks = blockman_.top_block_size();
6510 if (size_ < bv.size_)
6514 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6515 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6517 bm::word_t*** blk_root = blockman_.top_blocks_root();
6518 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6520 for (
unsigned i = 0; i < top_blocks; ++i)
6522 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6526 if (blk_blk == blk_blk_arg)
6536 blk_blk = blockman_.check_alloc_top_subblock(i);
6549 blk_blk = blockman_.alloc_top_subblock(i);
6556 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6565 #elif defined(BM64_SSE4)
6585#define BM_AND_OP(x) if (0 != (blk = blk_blk[j+x])) \
6587 if (0 != (arg_blk = blk_blk_arg[j+x])) \
6589 combine_operation_block_and(i, j+x, blk, arg_blk); \
6590 if (opt_mode == opt_compress) \
6591 blockman_.optimize_bit_block(i, j+x, opt_mode); \
6594 blockman_.zero_block(i, j+x); \
6597template<
class Alloc>
6601 if (!blockman_.is_init())
6603 if (!bv.blockman_.is_init())
6609 unsigned top_blocks = blockman_.top_block_size();
6610 if (size_ < bv.size_)
6613 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6614 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6617 bm::word_t*** blk_root = blockman_.top_blocks_root();
6618 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6620 for (
unsigned i = 0; i < top_blocks; ++i)
6625 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6635 blockman_.zero_block(i, j);
6636 blockman_.deallocate_top_subblock(i);
6644 blk_blk = blockman_.check_alloc_top_subblock(i);
6651 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6660 #elif defined(BM64_SSE4)
6680#define BM_SUB_OP(x) \
6681 if ((0 != (blk = blk_blk[j+x])) && (0 != (arg_blk = blk_blk_arg[j+x]))) \
6682 combine_operation_block_sub(i, j+x, blk, arg_blk);
6685template<
class Alloc>
6688 if (!blockman_.is_init() || !bv.blockman_.is_init())
6691 unsigned top_blocks = blockman_.top_block_size();
6692 if (size_ < bv.size_)
6695 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6696 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6698 bm::word_t*** blk_root = blockman_.top_blocks_root();
6699 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6701 for (
unsigned i = 0; i < top_blocks; ++i)
6704 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6705 if (!blk_blk || !blk_blk_arg)
6710 blockman_.deallocate_top_subblock(i);
6714 blk_blk = blockman_.check_alloc_top_subblock(i);
6721 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6730 #elif defined(BM64_SSE4)
6749template<
class Alloc>
6754 if (!blockman_.is_init())
6758 blockman_.init_tree();
6761 unsigned top_blocks = blockman_.top_block_size();
6762 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6764 if (arg_top_blocks > top_blocks)
6765 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6767 if (size_ < bv.size_)
6771 blockman_.reserve_top_blocks(arg_top_blocks);
6772 top_blocks = blockman_.top_block_size();
6775 if (size_ > bv.size_)
6780 if (arg_top_blocks < top_blocks)
6783 top_blocks = arg_top_blocks;
6788 bm::word_t*** blk_root = blockman_.top_blocks_root();
6789 unsigned block_idx = 0; (void) block_idx;
6793 top_blocks = blockman_.top_block_size();
6794 if (top_blocks < bv.blockman_.top_block_size())
6798 top_blocks = bv.blockman_.top_block_size();
6802 for (i = 0; i < top_blocks; ++i)
6812 const bm::word_t*
const* bvbb = bv.blockman_.get_topblock(i);
6822 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6840 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6847 blockman_.zero_block(i, j);
6858 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6871template<
class Alloc>
6880 blockman_.clone_assign_block(i, j, arg_blk2);
6885 blockman_.clone_assign_block(i, j, arg_blk1);
6897 if (is_gap1 | is_gap2)
6899 if (is_gap1 & is_gap2)
6905 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
6913 arg_block = arg_blk2;
6918 arg_block = arg_blk1;
6921 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
6929 bm::word_t* block = blockman_.borrow_tempblock();
6930 blockman_.set_block_ptr(i, j, block);
6936 blockman_.return_tempblock(block);
6944template<
class Alloc>
6954 blockman_.clone_assign_block(i, j, arg_blk2);
6959 blockman_.clone_assign_block(i, j, arg_blk1);
6965 blockman_.clone_assign_block(i, j, arg_blk2,
true);
6971 blockman_.clone_assign_block(i, j, arg_blk1,
true);
6978 if (is_gap1 | is_gap2)
6980 if (is_gap1 & is_gap2)
6986 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
6994 arg_block = arg_blk2;
6999 arg_block = arg_blk1;
7002 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
7010 bm::word_t* block = blockman_.borrow_tempblock();
7011 blockman_.set_block_ptr(i, j, block);
7016 blockman_.set_block_ptr(i, j, 0);
7017 blockman_.return_tempblock(block);
7026template<
class Alloc>
7034 if (!arg_blk1 || !arg_blk2)
7043 blockman_.clone_assign_block(i, j, arg_blk2);
7048 blockman_.clone_assign_block(i, j, arg_blk1);
7055 if (is_gap1 | is_gap2)
7057 if (is_gap1 & is_gap2)
7063 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
7071 arg_block = arg_blk2;
7076 arg_block = arg_blk1;
7079 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
7085 blockman_.set_block_ptr(i, j, 0);
7086 blockman_.return_tempblock(block);
7095 bm::word_t* block = blockman_.borrow_tempblock();
7096 blockman_.set_block_ptr(i, j, block);
7101 blockman_.set_block_ptr(i, j, 0);
7102 blockman_.return_tempblock(block);
7111template<
class Alloc>
7118 bm::word_t* blk = blockman_.get_block_ptr(i, j);
7121 if (!arg_blk1 || !arg_blk2)
7126 blockman_.zero_block(i, j);
7141 blk = blockman_.deoptimize_block_no_check(blk, i, j);
7146 if (is_gap1 | is_gap2)
7148 if (is_gap1 & is_gap2)
7157 blockman_.return_tempblock(blk);
7168 arg_block = arg_blk2;
7173 arg_block = arg_blk1;
7176 bm::word_t* tmp_blk = blockman_.check_allocate_tempblock();
7183 blockman_.return_tempblock(blk);
7195 if (digest == digest_and)
7200 blockman_.return_tempblock(blk);
7211template<
class Alloc>
7223 blockman_.clone_assign_block(i, j, arg_blk1);
7234 if (is_gap1 | is_gap2)
7236 if (is_gap1 & is_gap2)
7242 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
7248 bm::word_t* block = blockman_.borrow_tempblock();
7249 blockman_.set_block_ptr(i, j, block);
7254 blockman_.set_block_ptr(i, j, 0);
7255 blockman_.return_tempblock(block);
7261 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_blk1);
7269 bm::word_t* block = blockman_.borrow_tempblock();
7270 blockman_.set_block_ptr(i, j, block);
7275 blockman_.set_block_ptr(i, j, 0);
7276 blockman_.return_tempblock(block);
7285template<
class Alloc>
7287 unsigned i,
unsigned j,
7297 blockman_.zero_block(i, j);
7311 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7316 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7320 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7321 blockman_.set_block_ptr(i, j, new_blk);
7333 blk = blockman_.clone_gap_block(arg_gap, gap);
7334 blockman_.set_block(i, j, blk, gap);
7346 blk = blockman_.alloc_.alloc_bit_block();
7348 blockman_.set_block_ptr(i, j, blk);
7356 blockman_.get_allocator().free_bit_block(blk);
7364template<
class Alloc>
7366 unsigned i,
unsigned j,
7382 blockman_.set_block_ptr(i, j, 0);
7398 blk = blockman_.get_allocator().alloc_bit_block();
7400 blockman_.set_block_ptr(i, j, blk);
7416 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7421 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7425 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7426 blockman_.set_block_ptr(i, j, new_blk);
7438 blk = blockman_.clone_gap_block(arg_gap, gap);
7439 blockman_.set_block(i, j, blk, gap);
7450 blk = blockman_.alloc_.alloc_bit_block();
7452 blockman_.set_block_ptr(i, j, blk);
7459 blockman_.get_allocator().free_bit_block(blk);
7460 blockman_.set_block_ptr(i, j, 0);
7468template<
class Alloc>
7491 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7496 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7507 blockman_.get_allocator().free_bit_block(new_blk);
7514 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7515 blockman_.set_block_ptr(i, j, new_blk);
7526 blockman_.zero_block(i, j);
7534 bm::word_t* new_blk = blockman_.clone_gap_block(arg_gap, is_new_gap);
7538 blockman_.set_block_ptr(i, j, new_blk);
7546 blockman_.zero_block(i, j);
7556 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7558 blockman_.set_block_ptr(i, j, new_blk);
7565 blockman_.get_allocator().free_bit_block(blk);
7566 blockman_.set_block_ptr(i, j, 0);
7572template<
class Alloc>
7580 blockman_.zero_block(i, j);
7599 BM_ASSERT(!(res == tmp_buf && res_len == 0));
7601 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7605 blk = blockman_.convert_gap2bitset(i, j, gap_blk);
7617 blockman_.zero_block(i, j);
7632 if (!dst || !arg_blk)
7636 if (ret && ret == arg_blk)
7638 ret = blockman_.get_allocator().alloc_bit_block();
7644 blockman_.set_block_ptr(i, j, ret);
7646 blockman_.get_allocator().free_bit_block(dst);
7652template<
class Alloc>
7667 if (!blk && arg_gap)
7669 blk = blockman_.clone_gap_block(
BMGAP_PTR(arg_blk), gap);
7670 blockman_.set_block(nb, blk, gap);
7689 BM_ASSERT(!(res == tmp_buf && res_len == 0));
7694 blockman_.zero_block(nb);
7696 blockman_.assign_gap(nb, res, ++res_len, blk, tmp_buf);
7709 blockman_.zero_block(nb);
7718 blk = blockman_.convert_gap2bitset(nb, gap_blk);
7755 if (dst == 0 && arg_blk == 0)
7767 ret = blockman_.get_allocator().alloc_bit_block();
7789 }
while (wrd_ptr < wrd_end);
7799 ret = blockman_.get_allocator().alloc_bit_block();
7806 if (ret && ret == arg_blk)
7808 ret = blockman_.get_allocator().alloc_bit_block();
7819 blockman_.set_block(nb, ret);
7822 blockman_.get_allocator().free_bit_block(dst);
7829template<
class Alloc>
7861 block = blockman_.get_block_ptr(i, j);
7868 if (nblock_left == nblock_right)
7880 blockman_.set_all_set(nb, nb_to-1);
7883 if (nb_to > nblock_right)
7887 block = blockman_.get_block_ptr(i, j);
7903template<
class Alloc>
7936 block = blockman_.get_block_ptr(i, j);
7944 if (nblock_left == nblock_right)
7946 nb = nblock_left + 1;
7956 blockman_.set_all_zero(nb, nb_to - 1u);
7959 if (nb_to > nblock_right)
7963 block = blockman_.get_block_ptr(i, j);
7980template<
class Alloc>
7985 if (!
bvect.blockman_.is_init())
7991 if (blockman_.is_init())
7993 blockman_.deinit_tree();
8003template<
class Alloc>
8017 if (found && (last > right))
8025template<
class Alloc>
8037 blockman_.copy(
bvect.blockman_, nblock_left, nblock_right);
8049 if (found && (last > right))
8057template<
class Alloc>
8068template<
class Alloc>
8072 throw std::bad_alloc();
8074 BM_THROW(BM_ERR_BADALLOC);
8082template<
class Alloc>
8087 block_descr_type* bdescr = &(this->
bdescr_);
8098 unsigned val = *(++(bdescr->
gap_.
ptr));
8104 val = *(++(bdescr->
gap_.
ptr));
8112 unsigned short idx = ++(bdescr->
bit_.
idx);
8113 if (idx < bdescr->bit_.cnt)
8121 if (decode_bit_group(bdescr))
8125 if (search_in_blocks())
8135template<
class Alloc>
8141 return this->
valid();
8145 block_descr_type* bdescr = &(this->
bdescr_);
8151 unsigned short idx = ++(bdescr->
bit_.
idx);
8152 if (idx < bdescr->bit_.cnt)
8161 if (!decode_bit_group(bdescr,
rank))
8176 unsigned int val = *(++(bdescr->
gap_.
ptr));
8183 val = *(++(bdescr->
gap_.
ptr));
8194 if (!search_in_blocks())
8201 return this->
valid();
8208template<
class Alloc>
8214 return this->
valid();
8217 size_type new_pos = this->
bv_->check_or_next(pos);
8230 this->
bv_->get_blocks_manager();
8233 this->
block_ = bman.get_block(i0, j0);
8239 block_descr_type* bdescr = &(this->
bdescr_);
8245 search_in_gapblock();
8248 return this->
valid();
8256 bdescr->
gap_.
ptr = gptr + gpos;
8272 search_in_bitblock();
8273 return this->
valid();
8286 nbit += 32 * parity;
8287 for (
unsigned i = 0; i < bdescr->
bit_.
cnt; ++i)
8290 return this->
valid();
8295 return this->
valid();
8300template<
class Alloc>
8306 if (!bman->is_init())
8312 bm::word_t*** blk_root = bman->top_blocks_root();
8316 for (i = 0; i < bman->top_block_size(); ++i)
8331 this->
block_ = blk_blk[j];
8340 if (search_in_gapblock())
8348 if (search_in_bitblock())
8359template<
class Alloc>
8361bvector<Alloc>::enumerator::decode_wave(block_descr_type* bdescr)
BMNOEXCEPT
8364 if (bdescr->bit_.cnt)
8366 bdescr->bit_.idx = 0;
8374template<
class Alloc>
8376bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT
8379 for (; bdescr->bit_.ptr < block_end;)
8381 if (decode_wave(bdescr))
8384 this->
position_ += bdescr->bit_.bits[0];
8395template<
class Alloc>
8401 for (; bdescr->bit_.ptr < block_end;)
8404 #if defined(BMAVX512OPT) || defined(BMAVX2OPT) || defined(BM64OPT) || defined(BM64_SSE4)
8427 if (decode_wave(bdescr))
8430 this->
position_ += bdescr->bit_.bits[0];
8443template<
class Alloc>
8448 block_descr_type* bdescr = &(this->
bdescr_);
8449 bdescr->bit_.ptr = this->
block_;
8450 return decode_bit_group(bdescr);
8455template<
class Alloc>
8460 block_descr_type* bdescr = &(this->
bdescr_);
8462 unsigned bitval = *(bdescr->gap_.ptr) & 1;
8464 ++(bdescr->gap_.ptr);
8468 unsigned val = *(bdescr->gap_.ptr);
8472 if (bdescr->gap_.ptr ==
first)
8474 bdescr->gap_.gap_len = (
gap_word_t)(val + 1);
8478 bdescr->gap_.gap_len =
8487 ++(bdescr->gap_.ptr);
8494template<
class Alloc>
8501 bm::word_t*** blk_root = bman.top_blocks_root();
8502 for (; i < top_block_size; ++i)
8510 for (++i; i < top_block_size; ++i)
8519 if ((i < top_block_size) && blk_root[i])
8530 this->
block_ = blk_blk[j];
8539 if (search_in_gapblock())
8546 if (search_in_bitblock())
8561#pragma warning( pop )
#define BM_BORDER_TEST(blk, idx)
#define BM_DECLARE_TEMP_BLOCK(x)
Default SIMD friendly allocator.
#define VECT_XOR_ARR_2_MASK(dst, src, src_end, mask)
Constants, lookup tables and typedefs.
#define IS_FULL_BLOCK(addr)
#define IS_VALID_ADDR(addr)
#define BMSET_PTRGAP(ptr)
#define BLOCK_ADDR_SAN(addr)
#define BM_ASSERT_THROW(x, xerrcode)
#define FULL_BLOCK_FAKE_ADDR
#define FULL_SUB_BLOCK_REAL_ADDR
#define FULL_BLOCK_REAL_ADDR
Bit manipulation primitives (internal).
SIMD target version definitions.
bulk_insert_iterator(const insert_iterator &iit)
bulk_insert_iterator & operator=(bulk_insert_iterator &&ii) BMNOEXCEPT
size_type * buf_
bulk insert buffer
bvector_type * get_bvector() const BMNOEXCEPT
bm::sort_order sorted_
sort order hint
bulk_insert_iterator(const bulk_insert_iterator &iit)
bulk_insert_iterator(bulk_insert_iterator &&iit) BMNOEXCEPT
static size_type buf_size_max() BMNOEXCEPT
bulk_insert_iterator & operator++(int)
bvector_type::size_type size_type
size_type buf_size_
current buffer size
std::output_iterator_tag iterator_category
bvector_type * bvect_
target bvector
bulk_insert_iterator & operator++()
bulk_insert_iterator(bvector< Alloc > &bvect, bm::sort_order so=BM_UNKNOWN) BMNOEXCEPT
bulk_insert_iterator & operator=(const bulk_insert_iterator &ii)
bvector_type::size_type value_type
bulk_insert_iterator() BMNOEXCEPT
bm::bvector< Alloc > bvector_type
bulk_insert_iterator & operator=(size_type n)
bulk_insert_iterator & operator*()
Constant iterator designed to enumerate "ON" bits counted_enumerator keeps bitcount,...
counted_enumerator & operator=(const enumerator &en) BMNOEXCEPT
counted_enumerator(const enumerator &en) BMNOEXCEPT
size_type count() const BMNOEXCEPT
Number of bits ON starting from the .
counted_enumerator() BMNOEXCEPT
counted_enumerator operator++(int)
std::input_iterator_tag iterator_category
counted_enumerator & operator++() BMNOEXCEPT
std::input_iterator_tag iterator_category
enumerator(const bvector< Alloc > &bv, size_type pos=0) BMNOEXCEPT
Construct enumerator for bit vector.
void go_first() BMNOEXCEPT
Position enumerator to the first available bit.
size_type value() const BMNOEXCEPT
Get current position (value).
size_type operator*() const BMNOEXCEPT
Get current position (value).
enumerator operator++(int) BMNOEXCEPT
Advance enumerator forward to the next available bit. Possibly do NOT use this operator it is slower ...
enumerator & operator++() BMNOEXCEPT
Advance enumerator forward to the next available bit.
bool go_to(size_type pos) BMNOEXCEPT
go to a specific position in the bit-vector (or next)
enumerator(const bvector< Alloc > *bv) BMNOEXCEPT
Construct enumerator associated with a vector. Important: This construction creates unpositioned iter...
bool skip(size_type rank) BMNOEXCEPT
Skip specified number of bits from enumeration.
bool go_up() BMNOEXCEPT
Advance enumerator to the next available bit.
bool skip_to_rank(size_type rank) BMNOEXCEPT
Skip to specified relative rank.
bool advance() BMNOEXCEPT
enumerator(const bvector< Alloc > *bv, size_type pos) BMNOEXCEPT
Construct enumerator for bit vector.
Output iterator iterator designed to set "ON" bits based on input sequence of integers (bit indeces).
insert_iterator & operator*()
insert_iterator & operator++(int)
bvector_type * get_bvector() const
insert_iterator(const insert_iterator &iit)
insert_iterator(bvector< Alloc > &bvect) BMNOEXCEPT
insert_iterator() BMNOEXCEPT
friend class bulk_insert_iterator
insert_iterator & operator++()
bm::bvector< Alloc > bvector_type
insert_iterator & operator=(const insert_iterator &ii)
insert_iterator & operator=(size_type n)
std::output_iterator_tag iterator_category
size_type position_
Bit position (bit idx).
iterator_base() BMNOEXCEPT
bool valid() const BMNOEXCEPT
Checks if iterator is still valid.
bool operator!=(const iterator_base &it) const BMNOEXCEPT
unsigned block_type_
Type of block. 0-Bit, 1-GAP.
bool compare_state(const iterator_base &ib) const BMNOEXCEPT
Compare FSMs for testing purposes.
union bm::bvector::iterator_base::block_descr bdescr_
void invalidate() BMNOEXCEPT
Turns iterator into an invalid state.
bool operator<(const iterator_base &it) const BMNOEXCEPT
bm::bvector< Alloc > * bv_
Pointer on parent bitvector.
bool operator==(const iterator_base &it) const BMNOEXCEPT
const bm::word_t * block_
Block pointer.(NULL-invalid).
bool operator>=(const iterator_base &it) const BMNOEXCEPT
bool operator>(const iterator_base &it) const BMNOEXCEPT
bool operator<=(const iterator_base &it) const BMNOEXCEPT
block_idx_type block_idx_
Block index.
bool operator==(const reference &ref) const BMNOEXCEPT
bool operator~() const BMNOEXCEPT
const reference & operator|=(bool value) const
const reference & operator=(const reference &ref) const
const reference & operator^=(bool value) const
const reference & operator=(bool value) const BMNOEXCEPT
const reference & operator&=(bool value) const
reference(const reference &ref) BMNOEXCEPT
bool operator!() const BMNOEXCEPT
reference(bvector< Alloc > &bv, size_type position) BMNOEXCEPT
Bitvector Bit-vector container with runtime compression of bits.
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand OR : this := bv1 OR bv2
void import_block(const size_type *ids, block_idx_type nblock, size_type start, size_type stop)
optmode
Optimization mode Every next level means additional checks (better compression vs time).
bool find(size_type &pos) const BMNOEXCEPT
Finds index of first 1 bit.
void swap(size_type idx1, size_type idx2)
swap values of bits
bvector & operator=(bvector< Alloc > &&bvect) BMNOEXCEPT
Move assignment operator.
bvector(const bvector< Alloc > &bvect, bm::finalization is_final)
Copy-constructor for mutable/immutable initialization.
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv, optmode opt_mode=opt_none)
2 operand logical AND
void operator&=(const bvector< Alloc > &bv)
friend class deserializer
bool combine_operation_block_and_or(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
void set_gap_levels(const gap_word_t *glevel_len)
Sets new GAP lengths table. All GAP blocks will be reallocated to match the new scheme.
bvector(strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, size_type bv_size=bm::id_max, const Alloc &alloc=Alloc())
Constructs bvector class.
bool test(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
allocator_pool_type * get_allocator_pool() BMNOEXCEPT
Get curent allocator pool (if set).
bvector(bvector< Alloc > &&bvect) BMNOEXCEPT
Move constructor.
void combine_operation_with_block(block_idx_type nb, bool gap, bm::word_t *blk, const bm::word_t *arg_blk, bool arg_gap, bm::operation opcode)
void merge(bm::bvector< Alloc > &bvect)
Merge/move content from another vector.
bool equal(const bvector< Alloc > &bvect) const BMNOEXCEPT
Equal comparison with an agr bit-vector.
void clear_range_no_check(size_type left, size_type right)
Clear range without validity/bounds checking.
bvector< Alloc > & invert()
Invert/NEG all bits It should be noted, invert is affected by size() if size is set - it only inverts...
void combine_operation_block_or(unsigned i, unsigned j, bm::word_t *blk, const bm::word_t *arg_blk)
bool set_bit_and(size_type n, bool val=true)
Sets bit n using bit AND with the provided value.
allocator_type::allocator_pool_type allocator_pool_type
void init(unsigned top_size, bool alloc_subs)
Explicit post-construction initialization. Must be caled right after construction strickly before any...
size_type size() const BMNOEXCEPT
Returns bvector's capacity (number of bits it can store).
size_type count() const BMNOEXCEPT
population count (count of ON bits)
void combine_operation_xor(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation XOR
bool set_bit_conditional_impl(size_type n, bool val, bool condition)
bool clear_bit(size_type n)
Clears bit n.
bool operator[](size_type n) const BMNOEXCEPT
bool is_init() const BMNOEXCEPT
Return true if bvector is initialized at all.
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand AND : this := bv1 AND bv2
static void throw_bad_alloc()
bool insert(size_type n, bool value)
Insert bit into specified position All the vector content after insert position is shifted right.
bool combine_operation_block_or(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv)
2 operand logical OR
bvector< Alloc > operator~() const
void sync_size()
Syncronize size if it got extended due to bulk import.
void clear(bool free_mem=true) BMNOEXCEPT
Clears every bit in the bitvector.
bool find_range(size_type &first, size_type &last) const BMNOEXCEPT
Finds dynamic range of bit-vector [first, last].
size_type check_or_next_extract(size_type prev)
check if specified bit is 1, and set it to 0 if specified bit is 0, scan for the next 1 and returns i...
bvector< Alloc > & set(size_type n, bool val=true)
Sets bit n if val is true, clears bit n if val is false.
bool any_range(size_type left, size_type right) const BMNOEXCEPT
bvector< dbg_alloc > & reset() BMNOEXCEPT
void resize(size_type new_size)
Change size of the bvector.
size_type check_or_next(size_type prev) const BMNOEXCEPT
reference operator[](size_type n)
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand SUB : this := bv1 MINUS bv2 SUBtraction is also known as AND NOT
void set_allocator_pool(allocator_pool_type *pool_ptr) BMNOEXCEPT
Set allocator pool for local (non-th readed) memory cyclic(lots of alloc-free ops) opertations.
size_type extract_next(size_type prev)
Finds the number of the next bit ON and sets it to 0.
bvector(const bvector< Alloc > &bvect)
Copy constructor.
strategy get_new_blocks_strat() const BMNOEXCEPT
Returns blocks allocation strategy.
void optimize(bm::word_t *temp_block=0, optmode opt_mode=opt_compress, statistics *stat=0)
Optimize memory bitvector's memory allocation.
void forget_count() BMNOEXCEPT
size_type count_to_test(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
popcount in [0..right] range if test(right) == true
void set_new_blocks_strat(strategy strat)
Sets new blocks allocation strategy.
bool find_reverse(size_type from, size_type &pos) const BMNOEXCEPT
Reverse finds next(prev) index of 1 bit.
size_type rank(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank of specified bit position (same as count_to()).
bvector(std::initializer_list< size_type > il)
Brace constructor.
bvector< Alloc > & flip()
Flips all bits.
void fill_alloc_digest(bvector< Alloc > &bv_blocks) const
Calculate blocks digest vector (for diagnostics purposes) 1 is added if NB is a real,...
bool any() const BMNOEXCEPT
Returns true if any bits in this bitset are set, and otherwise returns false.
friend class operation_deserializer
bool set_bit(size_type n, bool val=true)
Sets bit n.
insert_iterator inserter()
blocks_manager< Alloc > blocks_manager_type
bool empty() const BMNOEXCEPT
Returns true if the set is empty (no bits are set, otherwise returns false) Please note that this is ...
void keep_range(size_type left, size_type right)
Sets all bits to zero outside of the closed interval [left,right] Expected result: 00000....
void combine_operation_block_sub(unsigned i, unsigned j, bm::word_t *blk, const bm::word_t *arg_blk)
const blocks_manager_type & get_blocks_manager() const BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
void operator-=(const bvector< Alloc > &bv)
bool is_all_one_range(size_type left, size_type right) const BMNOEXCEPT
bool select(size_type rank, size_type &pos, const rs_index_type &rs_idx) const BMNOEXCEPT
select bit-vector position for the specified rank(bitcount)
Alloc get_allocator() const
bool and_bit_no_check(size_type n, bool val)
AND specified bit without checking preconditions (size, etc).
bvector_size_type size_type
size_type get_next(size_type prev) const BMNOEXCEPT
Finds the number of the next bit ON.
void clear_range(size_type left, size_type right)
Sets all bits to zero in the specified closed interval [left,right] Interval must be inside the bvect...
bool combine_operation_block_and(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
void combine_operation_and(const bm::bvector< Alloc > &bvect, optmode opt_mode)
perform a set-algebra operation AND
bool find_first_mismatch(const bvector< Alloc > &bvect, size_type &pos, size_type search_to=bm::id_max) const BMNOEXCEPT
Find index of first bit different between this and the agr vector.
bool combine_operation_block_sub(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
bool combine_operation_block_xor(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
bvector< Alloc > & set()
Sets every bit in this bitset to 1.
bool shift_left()
Shift left by 1 bit, fill with zero return carry out.
void swap(bvector< Alloc > &bvect) BMNOEXCEPT
Exchanges content of bv and this bvector.
size_type count_range(size_type left, size_type right, const rs_index_type &rs_idx) const BMNOEXCEPT
void freeze()
Turn current vector to read-only (immutable vector).
bool find_rank(size_type rank, size_type from, size_type &pos) const BMNOEXCEPT
Find bit-vector position for the specified rank(bitcount).
void combine_operation_block_xor(unsigned i, unsigned j, bm::word_t *blk, const bm::word_t *arg_blk)
void optimize_gap_size()
Optimize sizes of GAP blocks.
bool set_bit_no_check(size_type n, bool val)
Set specified bit without checking preconditions (size, etc).
void init()
Explicit post-construction initialization. Must be caled to make sure safe use of *_no_check() method...
bool shift_right()
Shift right by 1 bit, fill with zero return carry out.
bool find_rank(size_type rank, size_type from, size_type &pos, const rs_index_type &rs_idx) const BMNOEXCEPT
Find bit-vector position for the specified rank(bitcount).
bvector< Alloc > & set_range(size_type left, size_type right, bool value=true)
Sets all bits in the specified closed interval [left,right] Interval must be inside the bvector's siz...
void erase(size_type n)
Erase bit in the specified position All the vector content after erase position is shifted left.
size_type get_first() const BMNOEXCEPT
find first 1 bit in vector. Function may return 0 and this requires an extra check if bit 0 is actual...
void set(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
Set list of bits in this bitset to 1.
void copy_range(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy all bits in the specified closed interval [left,right].
bool is_ro() const BMNOEXCEPT
Returns true if vector is read-only.
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv)
2 operand logical XOR
enumerator get_enumerator(size_type pos) const
Returns enumerator pointing on specified or the next available bit.
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv)
2 operand logical SUB(AND NOT). Also known as MINUS.
size_type rank_corrected(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank corrceted by the requested border value (as -1).
void import(const size_type *ids, size_type ids_size, bm::sort_order sorted_idx)
Import integers (set bits).
bvector & operator=(const bvector< Alloc > &bvect)
Copy assignment operator.
void copy_range_no_check(const bvector< Alloc > &bvect, size_type left, size_type right)
void combine_operation_or(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation OR
blocks_manager_type::block_idx_type block_idx_type
void copy(const bvector< Alloc > &bvect, bm::finalization is_final)
Copy bvector from the argument bvector.
void combine_operation_block_and(unsigned i, unsigned j, bm::word_t *blk, const bm::word_t *arg_blk)
void operator^=(const bvector< Alloc > &bv)
void operator|=(const bvector< Alloc > &bv)
bvector< Alloc > & flip(size_type n)
Flips bit n.
void clear(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
clear list of bits in this bitset
void import_sorted(const size_type *ids, const size_type ids_size, bool opt_flag)
Import sorted integers (set bits).
void optimize_range(size_type left, size_type right, bm::word_t *temp_block, optmode opt_mode=opt_compress)
block_idx_type count_blocks(unsigned *arr) const BMNOEXCEPT
rs_index< allocator_type > rs_index_type
size_type count_range_no_check(size_type left, size_type right) const BMNOEXCEPT
bvector(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy constructor for range copy [left..right].
void combine_operation_sub(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation MINUS (AND NOT)
int compare(const bvector< Alloc > &bvect) const BMNOEXCEPT
Lexicographical comparison with a bitvector.
blocks_manager_type & get_blocks_manager() BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
size_type count_to(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
rs_index< allocator_type > blocks_count
bool none() const BMNOEXCEPT
bool set_bit_conditional(size_type n, bool val, bool condition)
Sets bit n only if current value equals the condition.
bool find_reverse(size_type &pos) const BMNOEXCEPT
Finds last index of 1 bit.
void build_rs_index(rs_index_type *rs_idx, bvector< dbg_alloc > *bv_blocks=0) const
void combine_operation_with_block(block_idx_type nb, const bm::word_t *arg_blk, bool arg_gap, bm::operation opcode)
void clear_bit_no_check(size_type n)
Clears bit n without precondiion checks.
void gap_block_set_no_ret(bm::gap_word_t *gap_blk, bool val, block_idx_type nblock, unsigned nbit)
set bit in GAP block with GAP block length control
bvector(size_type bv_size, strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, const Alloc &alloc=Alloc())
Constructs bvector class.
void combine_operation(const bm::bvector< Alloc > &bvect, bm::operation opcode)
perform a set-algebra operation by operation code
void keep(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
Keep list of bits in this bitset, others are cleared.
bm::bvector< Alloc > & bit_or_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand AND where result is ORed into the terget vector : this |= bv1 AND bv2 TARGET := TARGET OR (...
enumerator end() const
Returns enumerator pointing on the next bit after the last.
bool get_bit(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
bool gap_block_set(bm::gap_word_t *gap_blk, bool val, block_idx_type nblock, unsigned nbit)
set bit in GAP block with GAP block length control
void calc_stat(struct bm::bvector< Alloc >::statistics *st) const BMNOEXCEPT
Calculates bitvector statistics.
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand XOR : this := bv1 XOR bv2
void set_range_no_check(size_type left, size_type right)
Set range without validity/bounds checking.
size_type recalc_count() BMNOEXCEPT
void move_from(bvector< Alloc > &bvect) BMNOEXCEPT
Move bvector content from another bvector.
bool find(size_type from, size_type &pos) const BMNOEXCEPT
Find index of 1 bit starting from position.
bool inc(size_type n)
Increment the specified element.
void set_bit_no_check(size_type n)
Set bit without checking preconditions (size, etc).
Encoding utilities for serialization (internal).
BMFORCEINLINE bool avx2_test_all_zero_wave(const void *ptr)
check if wave of pointers is all NULL
BMFORCEINLINE bool avx2_test_all_zero_wave2(const void *ptr0, const void *ptr1)
check if 2 wave of pointers are all NULL
BMFORCEINLINE bool avx2_test_all_eq_wave2(const void *ptr0, const void *ptr1)
check if 2 wave of pointers are all the same (NULL or FULL)
BMFORCEINLINE bool sse42_test_all_eq_wave2(const void *ptr0, const void *ptr1) BMNOEXCEPT
check if wave of 2 pointers are the same (null or FULL)
BMFORCEINLINE bool sse42_test_all_zero_wave(const void *ptr) BMNOEXCEPT
check if wave of pointers is all NULL
BMFORCEINLINE bool sse42_test_all_zero_wave2(const void *ptr0, const void *ptr1) BMNOEXCEPT
check if 2 waves of pointers are all NULL
unsigned bit_block_find(const bm::word_t *BMRESTRICT block, unsigned nbit, unsigned *BMRESTRICT pos) BMNOEXCEPT
Searches for the next 1 bit in the BIT block.
bool bit_block_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 | source2) bitblock OR operation.
BMFORCEINLINE bm::id_t word_bitcount(bm::id_t w) BMNOEXCEPT
void bit_block_copy(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Bitblock copy operation.
bm::id64_t bit_block_sub_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bitblock SUB (AND NOT) operation (3 operand)
bm::id64_t bit_block_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock AND operation. Function does not analyse availability of source and destination blocks...
bm::word_t * bit_operation_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock XOR operation.
bool bit_block_shift_r1_unr(bm::word_t *BMRESTRICT block, bm::word_t *BMRESTRICT empty_acc, bm::word_t co_flag) BMNOEXCEPT
Right bit-shift of bit-block by 1 bit (loop unrolled).
BMFORCEINLINE unsigned word_bitcount64(bm::id64_t x) BMNOEXCEPT
bm::id_t bit_block_calc_count_to(const bm::word_t *block, bm::word_t right) BMNOEXCEPT
void bit_block_erase(bm::word_t *block, unsigned bitpos, bool carry_over) BMNOEXCEPT
erase bit from position and shift the rest right with carryover
bm::word_t * bit_operation_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Block OR operation. Makes analysis if block is 0 or FULL.
bool bit_block_shift_l1_unr(bm::word_t *block, bm::word_t *empty_acc, bm::word_t co_flag) BMNOEXCEPT
Left bit-shift of bit-block by 1 bit (loop unrolled).
bm::id64_t calc_block_digest0(const bm::word_t *const block) BMNOEXCEPT
Compute digest for 64 non-zero areas.
bm::id64_t bit_block_and_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bit-block AND
bm::word_t * bit_operation_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock SUB operation.
unsigned short bitscan_wave(const bm::word_t *BMRESTRICT w_ptr, unsigned char *BMRESTRICT bits) BMNOEXCEPT
Unpacks word wave (Nx 32-bit words).
bool bit_find_first(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT pos) BMNOEXCEPT
BIT block find the first set bit.
void bit_invert(T *start) BMNOEXCEPT
int bitcmp(const T *buf1, const T *buf2, unsigned len) BMNOEXCEPT
Lexicographical comparison of BIT buffers.
bm::id64_t bit_block_and_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bit-block AND - OR
bm::id64_t bit_block_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock XOR operation. Function does not analyse availability of source and destination blocks...
bm::word_t * bit_operation_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND operation.
void bit_block_set(bm::word_t *BMRESTRICT dst, bm::word_t value) BMNOEXCEPT
Bitblock memset operation.
bool bit_block_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock OR operation. Function does not analyse availability of source and destination blocks.
bool bit_is_all_zero(const bm::word_t *BMRESTRICT start) BMNOEXCEPT
Returns "true" if all bits in the block are 0.
unsigned bit_find_last(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT last) BMNOEXCEPT
BIT block find the last set bit (backward search).
bm::id_t bit_block_calc_count_range(const bm::word_t *block, bm::word_t left, bm::word_t right) BMNOEXCEPT
void bit_andnot_arr_ffmask(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND NOT with constant ~0 mask operation.
bm::word_t bit_block_insert(bm::word_t *BMRESTRICT block, unsigned bitpos, bool value) BMNOEXCEPT
insert bit into position and shift the rest right with carryover
bm::id64_t bit_block_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock SUB (AND NOT) operation. Function does not analyse availability of source and destinat...
bm::id64_t bit_block_xor_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 ^ source2) bitblock XOR operation.
bool is_bits_one(const bm::wordop_t *start) BMNOEXCEPT
Returns "true" if all bits in the block are 1.
sort_order
Sort order declaration.
bm::alloc_pool_guard< allocator_pool_type, bvector< Alloc > > mem_pool_guard
int(* bit_visitor_callback_type)(void *handle_ptr, bm::id_t bit_idx)
Callback type to visit (callback style) bits in bit-vector(s).
finalization
copy strategy
strategy
Block allocation strategies.
@ BM_SORTED
input set is sorted (ascending order)
@ BM_UNKNOWN
sort order unknown
@ READWRITE
mutable (read-write object)
@ READONLY
immutable (read-only object)
@ BM_BIT
No GAP compression strategy. All new blocks are bit blocks.
@ BM_GAP
GAP compression is ON.
gap_word_t * gap_operation_or(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP OR operation.
unsigned gap_test_unr(const T *BMRESTRICT buf, const unsigned pos) BMNOEXCEPT
Tests if bit = pos is true. Analog of bm::gap_test with SIMD unrolling.
unsigned gap_bit_count_range(const T *const buf, unsigned left, unsigned right) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [left, right] range.
unsigned gap_find_last(const T *BMRESTRICT buf, unsigned *BMRESTRICT last) BMNOEXCEPT
GAP block find the last set bit.
void gap_and_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
ANDs GAP block to bitblock.
gap_word_t * gap_operation_xor(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP XOR operation.
unsigned gap_set_value(unsigned val, T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
Sets or clears bit in the GAP buffer.
T gap_level(const T *BMRESTRICT buf) BMNOEXCEPT
Returs GAP blocks capacity level.
void gap_xor_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
XOR GAP block to bitblock.
bool gap_shift_r1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Right shift GAP block by 1 bit.
int gapcmp(const T *buf1, const T *buf2) BMNOEXCEPT
Lexicographical comparison of GAP buffers.
void gap_convert_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf, unsigned len=0) BMNOEXCEPT
GAP block to bitblock conversion.
unsigned gap_find_first(const T *BMRESTRICT buf, unsigned *BMRESTRICT first) BMNOEXCEPT
GAP block find the first set bit.
bool gap_insert(T *BMRESTRICT buf, unsigned pos, unsigned val, unsigned *BMRESTRICT new_len) BMNOEXCEPT
isnert bit into GAP compressed block
void gap_invert(T *buf) BMNOEXCEPT
Inverts all bits in the GAP buffer.
unsigned * gap_convert_to_bitset_smart(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf, id_t set_max) BMNOEXCEPT
Smart GAP block to bitblock conversion.
void gap_sub_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
SUB (AND NOT) GAP block to bitblock.
unsigned gap_block_find(const T *BMRESTRICT buf, unsigned nbit, bm::id_t *BMRESTRICT prev) BMNOEXCEPT
Searches for the next 1 bit in the GAP block.
void gap_init_range_block(T *buf, T from, T to, T value) BMNOEXCEPT
Init gap block so it has block in it (can be whole block).
gap_word_t * gap_operation_and(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP AND operation.
BMFORCEINLINE bool gap_is_all_zero(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Checks if GAP block is all-zero.
bool gap_shift_l1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Left shift GAP block by 1 bit.
void gap_add_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr, unsigned len) BMNOEXCEPT
Adds(OR) GAP block to bitblock.
unsigned gap_bit_count_range_hint(const T *const buf, unsigned left, unsigned right, unsigned hint) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [left, right] range using position hint to avoid bfind.
gap_word_t * gap_operation_sub(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP SUB (AND NOT) operation.
bool improve_gap_levels(const T *length, const T *length_end, T *glevel_len) BMNOEXCEPT
Finds optimal gap blocks lengths.
unsigned gap_capacity(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity.
unsigned gap_limit(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity limit.
BMFORCEINLINE bm::gap_word_t gap_length(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Returs GAP block length.
unsigned gap_set_value_cpos(unsigned val, T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set, unsigned curr) BMNOEXCEPT
Sets or clears bit in the GAP buffer.
const unsigned set_array_mask
bool block_any(const bm::word_t *const BMRESTRICT block) BMNOEXCEPT
Returns "true" if one bit is set in the block Function check for block varieties.
bvector< Alloc > operator|(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
const id64_t all_bits_mask
void for_each_nzblock(T ***root, unsigned size1, F &f)
bool block_is_all_one_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if all bits are 1 in the block [left, right] Function check for block varieties.
const unsigned set_block_mask
const unsigned char lzcnt_table< T >::_lut[16]
unsigned gap_bit_count_to(const T *const buf, T right) BMNOEXCEPT
bvector< Alloc > operator-(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
bool check_block_one(const bm::word_t *blk, bool deep_scan) BMNOEXCEPT
Checks if block has only 1 bits.
unsigned gap_bfind(const T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
BMFORCEINLINE RTYPE get_block_start(unsigned i, unsigned j) BMNOEXCEPT
Compute bit address of the first bit in a block.
const unsigned set_sub_array_size
BMFORCEINLINE void get_block_coord(BI_TYPE nb, unsigned &i, unsigned &j) BMNOEXCEPT
Recalc linear bvector block index into 2D matrix coordinates.
const unsigned bits_in_array
const unsigned set_total_blocks
bvector< Alloc > operator^(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
unsigned char get_nibble(const unsigned char *arr, unsigned idx) BMNOEXCEPT
get nibble from the array
gap_word_t *(* gap_operation_func_type)(const gap_word_t *BMRESTRICT, const gap_word_t *BMRESTRICT, gap_word_t *BMRESTRICT, unsigned &)
const unsigned set_block_size_op
bool block_find_first_diff(const bm::word_t *BMRESTRICT blk, const bm::word_t *BMRESTRICT arg_blk, unsigned *BMRESTRICT pos) BMNOEXCEPT
Find first bit which is different between two blocks (GAP or bit).
const unsigned rs3_half_span
const unsigned gap_levels
const unsigned set_word_shift
BMFORCEINLINE void xor_swap(W &x, W &y) BMNOEXCEPT
XOR swap two variables.
bm::id64_t idx_arr_block_lookup_u64(const bm::id64_t *idx, bm::id64_t size, bm::id64_t nb, bm::id64_t start) BMNOEXCEPT
block boundaries look ahead U32
const gap_word_t gap_len_table< T >::_len[bm::gap_levels]
const unsigned set_block_size
unsigned long long int id64_t
bool block_any_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if one bit is set in the block [left, right] Function check for block varieties.
const unsigned gap_equiv_len
const unsigned rs3_border0_1
const unsigned rs3_border1
void set_block_bits_u32(bm::word_t *BMRESTRICT block, const unsigned *BMRESTRICT idx, unsigned start, unsigned stop) BMNOEXCEPT
set bits in a bit-block using global index
unsigned idx_arr_block_lookup_u32(const unsigned *idx, unsigned size, unsigned nb, unsigned start) BMNOEXCEPT
block boundaries look ahead U32
const unsigned short set_bitscan_wave_size
Size of bit decode wave in words.
const unsigned set_array_shift
unsigned short gap_word_t
const unsigned rs3_border1_1
void(* gap_operation_to_bitset_func_type)(unsigned *, const gap_word_t *)
bm::id_t bvector_size_type
const unsigned rs3_border0
const unsigned gap_max_bits
const unsigned set_top_array_size
const unsigned set_block_shift
void for_each_nzblock_range(T ***root, N top_size, N nb_from, N nb_to, F &f) BMNOEXCEPT
const unsigned set_word_mask
bool find_not_null_ptr(const bm::word_t *const *const *arr, N start, N size, N *pos) BMNOEXCEPT
const unsigned bits_in_block
bool block_find_reverse(const bm::word_t *BMRESTRICT block, unsigned nbit_from, unsigned *BMRESTRICT found_nbit) BMNOEXCEPT
Reverse find 1.
void set_block_bits_u64(bm::word_t *BMRESTRICT block, const bm::id64_t *BMRESTRICT idx, bm::id64_t start, bm::id64_t stop) BMNOEXCEPT
set bits in a bit-block using global index
bool for_each_nzblock_if(T ***root, BI size1, F &f) BMNOEXCEPT
SIZE_TYPE block_find_rank(const bm::word_t *const block, SIZE_TYPE rank, unsigned nbit_from, unsigned &nbit_pos) BMNOEXCEPT
Find rank in block (GAP or BIT).
bm::bvector< dbg_alloc > bvect
bv_statistics() BMNOEXCEPT
gap_word_t gap_levels[bm::gap_levels]
GAP block lengths in the bvect.
size_t max_serialize_mem
estimated maximum memory for serialization
void reset() BMNOEXCEPT
Reset statisctics.
size_t memory_used
memory usage for all blocks and service tables
allocation_policy(bm::strategy s=BM_BIT, const gap_word_t *glevels=bm::gap_len_table< true >::_len) BMNOEXCEPT
const gap_word_t * glevel_len
unsigned short idx
Current position in the bit list.
unsigned char bits[set_bitscan_wave_size *32]
bit list
size_type pos
Last bit position decode before.
unsigned short cnt
Number of ON bits.
const bm::word_t * ptr
Word pointer.
Information about current DGAP block.
const gap_word_t * ptr
Word pointer.
gap_word_t gap_len
Current dgap length.
Statistical information about bitset's memory allocation details.
Default GAP lengths table.
static const gap_word_t _len[bm::gap_levels]
static gap_operation_to_bitset_func_type gap_op_to_bit(unsigned i)
static gap_operation_func_type gap_operation(unsigned i)
dgap_descr gap_
DGAP block related info.
bitblock_descr bit_
BitBlock related info.