1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
5 #ifndef __INCLUDE_RTE_BITMAP_H__
6 #define __INCLUDE_RTE_BITMAP_H__
16 * The bitmap component provides a mechanism to manage large arrays of bits
17 * through bit get/set/clear and bit array scan operations.
19 * The bitmap scan operation is optimized for 64-bit CPUs using 64/128 byte cache
20 * lines. The bitmap is hierarchically organized using two arrays (array1 and
21 * array2), with each bit in array1 being associated with a full cache line
22 * (512/1024 bits) of bitmap bits, which are stored in array2: the bit in array1
23 * is set only when there is at least one bit set within its associated array2
24 * bits, otherwise the bit in array1 is cleared. The read and write operations
25 * for array1 and array2 are always done in slabs of 64 bits.
27 * This bitmap is not thread safe. For lock free operation on a specific bitmap
28 * instance, a single writer thread performing bit set/clear operations is
29 * allowed, only the writer thread can do bitmap scan operations, while there
30 * can be several reader threads performing bit get operations in parallel with
31 * the writer thread. When the use of locking primitives is acceptable, the
32 * serialization of the bit set/clear and bitmap scan operations needs to be
33 * enforced by the caller, while the bit get operation does not require locking
39 #include <rte_common.h>
40 #include <rte_config.h>
41 #include <rte_debug.h>
42 #include <rte_memory.h>
43 #include <rte_branch_prediction.h>
44 #include <rte_prefetch.h>
46 #ifndef RTE_BITMAP_OPTIMIZATIONS
47 #define RTE_BITMAP_OPTIMIZATIONS 1
51 #define RTE_BITMAP_SLAB_BIT_SIZE 64
52 #define RTE_BITMAP_SLAB_BIT_SIZE_LOG2 6
53 #define RTE_BITMAP_SLAB_BIT_MASK (RTE_BITMAP_SLAB_BIT_SIZE - 1)
56 #define RTE_BITMAP_CL_BIT_SIZE (RTE_CACHE_LINE_SIZE * 8)
57 #define RTE_BITMAP_CL_BIT_SIZE_LOG2 (RTE_CACHE_LINE_SIZE_LOG2 + 3)
58 #define RTE_BITMAP_CL_BIT_MASK (RTE_BITMAP_CL_BIT_SIZE - 1)
60 #define RTE_BITMAP_CL_SLAB_SIZE (RTE_BITMAP_CL_BIT_SIZE / RTE_BITMAP_SLAB_BIT_SIZE)
61 #define RTE_BITMAP_CL_SLAB_SIZE_LOG2 (RTE_BITMAP_CL_BIT_SIZE_LOG2 - RTE_BITMAP_SLAB_BIT_SIZE_LOG2)
62 #define RTE_BITMAP_CL_SLAB_MASK (RTE_BITMAP_CL_SLAB_SIZE - 1)
64 /** Bitmap data structure */
66 /* Context for array1 and array2 */
67 uint64_t *array1; /**< Bitmap array1 */
68 uint64_t *array2; /**< Bitmap array2 */
69 uint32_t array1_size; /**< Number of 64-bit slabs in array1 that are actually used */
70 uint32_t array2_size; /**< Number of 64-bit slabs in array2 */
72 /* Context for the "scan next" operation */
73 uint32_t index1; /**< Bitmap scan: Index of current array1 slab */
74 uint32_t offset1; /**< Bitmap scan: Offset of current bit within current array1 slab */
75 uint32_t index2; /**< Bitmap scan: Index of current array2 slab */
76 uint32_t go2; /**< Bitmap scan: Go/stop condition for current array2 cache line */
78 /* Storage space for array1 and array2 */
83 __rte_bitmap_index1_inc(struct rte_bitmap *bmp)
85 bmp->index1 = (bmp->index1 + 1) & (bmp->array1_size - 1);
88 static inline uint64_t
89 __rte_bitmap_mask1_get(struct rte_bitmap *bmp)
91 return (~1lu) << bmp->offset1;
95 __rte_bitmap_index2_set(struct rte_bitmap *bmp)
97 bmp->index2 = (((bmp->index1 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2) + bmp->offset1) << RTE_BITMAP_CL_SLAB_SIZE_LOG2);
100 #if RTE_BITMAP_OPTIMIZATIONS
103 rte_bsf64(uint64_t slab, uint32_t *pos)
105 if (likely(slab == 0)) {
109 *pos = __builtin_ctzll(slab);
116 rte_bsf64(uint64_t slab, uint32_t *pos)
121 if (likely(slab == 0)) {
125 for (i = 0, mask = 1; i < RTE_BITMAP_SLAB_BIT_SIZE; i ++, mask <<= 1) {
126 if (unlikely(slab & mask)) {
137 static inline uint32_t
138 __rte_bitmap_get_memory_footprint(uint32_t n_bits,
139 uint32_t *array1_byte_offset, uint32_t *array1_slabs,
140 uint32_t *array2_byte_offset, uint32_t *array2_slabs)
142 uint32_t n_slabs_context, n_slabs_array1, n_cache_lines_context_and_array1;
143 uint32_t n_cache_lines_array2;
144 uint32_t n_bytes_total;
146 n_cache_lines_array2 = (n_bits + RTE_BITMAP_CL_BIT_SIZE - 1) / RTE_BITMAP_CL_BIT_SIZE;
147 n_slabs_array1 = (n_cache_lines_array2 + RTE_BITMAP_SLAB_BIT_SIZE - 1) / RTE_BITMAP_SLAB_BIT_SIZE;
148 n_slabs_array1 = rte_align32pow2(n_slabs_array1);
149 n_slabs_context = (sizeof(struct rte_bitmap) + (RTE_BITMAP_SLAB_BIT_SIZE / 8) - 1) / (RTE_BITMAP_SLAB_BIT_SIZE / 8);
150 n_cache_lines_context_and_array1 = (n_slabs_context + n_slabs_array1 + RTE_BITMAP_CL_SLAB_SIZE - 1) / RTE_BITMAP_CL_SLAB_SIZE;
151 n_bytes_total = (n_cache_lines_context_and_array1 + n_cache_lines_array2) * RTE_CACHE_LINE_SIZE;
153 if (array1_byte_offset) {
154 *array1_byte_offset = n_slabs_context * (RTE_BITMAP_SLAB_BIT_SIZE / 8);
157 *array1_slabs = n_slabs_array1;
159 if (array2_byte_offset) {
160 *array2_byte_offset = n_cache_lines_context_and_array1 * RTE_CACHE_LINE_SIZE;
163 *array2_slabs = n_cache_lines_array2 * RTE_BITMAP_CL_SLAB_SIZE;
166 return n_bytes_total;
170 __rte_bitmap_scan_init(struct rte_bitmap *bmp)
172 bmp->index1 = bmp->array1_size - 1;
173 bmp->offset1 = RTE_BITMAP_SLAB_BIT_SIZE - 1;
174 __rte_bitmap_index2_set(bmp);
175 bmp->index2 += RTE_BITMAP_CL_SLAB_SIZE;
181 * Bitmap memory footprint calculation
184 * Number of bits in the bitmap
186 * Bitmap memory footprint measured in bytes on success, 0 on error
188 static inline uint32_t
189 rte_bitmap_get_memory_footprint(uint32_t n_bits) {
190 /* Check input arguments */
195 return __rte_bitmap_get_memory_footprint(n_bits, NULL, NULL, NULL, NULL);
199 * Bitmap initialization
202 * Minimum expected size of bitmap.
204 * Base address of array1 and array2.
206 * Number of pre-allocated bits in array2. Must be non-zero and multiple of 512.
208 * Handle to bitmap instance.
210 static inline struct rte_bitmap *
211 rte_bitmap_init(uint32_t n_bits, uint8_t *mem, uint32_t mem_size)
213 struct rte_bitmap *bmp;
214 uint32_t array1_byte_offset, array1_slabs, array2_byte_offset, array2_slabs;
217 /* Check input arguments */
222 if ((mem == NULL) || (((uintptr_t) mem) & RTE_CACHE_LINE_MASK)) {
226 size = __rte_bitmap_get_memory_footprint(n_bits,
227 &array1_byte_offset, &array1_slabs,
228 &array2_byte_offset, &array2_slabs);
229 if (size < mem_size) {
234 memset(mem, 0, size);
235 bmp = (struct rte_bitmap *) mem;
237 bmp->array1 = (uint64_t *) &mem[array1_byte_offset];
238 bmp->array1_size = array1_slabs;
239 bmp->array2 = (uint64_t *) &mem[array2_byte_offset];
240 bmp->array2_size = array2_slabs;
242 __rte_bitmap_scan_init(bmp);
251 * Handle to bitmap instance
253 * 0 upon success, error code otherwise
256 rte_bitmap_free(struct rte_bitmap *bmp)
258 /* Check input arguments */
270 * Handle to bitmap instance
273 rte_bitmap_reset(struct rte_bitmap *bmp)
275 memset(bmp->array1, 0, bmp->array1_size * sizeof(uint64_t));
276 memset(bmp->array2, 0, bmp->array2_size * sizeof(uint64_t));
277 __rte_bitmap_scan_init(bmp);
281 * Bitmap location prefetch into CPU L1 cache
284 * Handle to bitmap instance
288 * 0 upon success, error code otherwise
291 rte_bitmap_prefetch0(struct rte_bitmap *bmp, uint32_t pos)
296 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
297 slab2 = bmp->array2 + index2;
298 rte_prefetch0((void *) slab2);
305 * Handle to bitmap instance
309 * 0 when bit is cleared, non-zero when bit is set
311 static inline uint64_t
312 rte_bitmap_get(struct rte_bitmap *bmp, uint32_t pos)
315 uint32_t index2, offset2;
317 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
318 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
319 slab2 = bmp->array2 + index2;
320 return (*slab2) & (1lu << offset2);
327 * Handle to bitmap instance
332 rte_bitmap_set(struct rte_bitmap *bmp, uint32_t pos)
334 uint64_t *slab1, *slab2;
335 uint32_t index1, index2, offset1, offset2;
337 /* Set bit in array2 slab and set bit in array1 slab */
338 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
339 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
340 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
341 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
342 slab2 = bmp->array2 + index2;
343 slab1 = bmp->array1 + index1;
345 *slab2 |= 1lu << offset2;
346 *slab1 |= 1lu << offset1;
353 * Handle to bitmap instance
355 * Bit position identifying the array2 slab
357 * Value to be assigned to the 64-bit slab in array2
360 rte_bitmap_set_slab(struct rte_bitmap *bmp, uint32_t pos, uint64_t slab)
362 uint64_t *slab1, *slab2;
363 uint32_t index1, index2, offset1;
365 /* Set bits in array2 slab and set bit in array1 slab */
366 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
367 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
368 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
369 slab2 = bmp->array2 + index2;
370 slab1 = bmp->array1 + index1;
373 *slab1 |= 1lu << offset1;
376 static inline uint64_t
377 __rte_bitmap_line_not_empty(uint64_t *slab2)
379 uint64_t v1, v2, v3, v4;
381 v1 = slab2[0] | slab2[1];
382 v2 = slab2[2] | slab2[3];
383 v3 = slab2[4] | slab2[5];
384 v4 = slab2[6] | slab2[7];
395 * Handle to bitmap instance
400 rte_bitmap_clear(struct rte_bitmap *bmp, uint32_t pos)
402 uint64_t *slab1, *slab2;
403 uint32_t index1, index2, offset1, offset2;
405 /* Clear bit in array2 slab */
406 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
407 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
408 slab2 = bmp->array2 + index2;
410 /* Return if array2 slab is not all-zeros */
411 *slab2 &= ~(1lu << offset2);
416 /* Check the entire cache line of array2 for all-zeros */
417 index2 &= ~ RTE_BITMAP_CL_SLAB_MASK;
418 slab2 = bmp->array2 + index2;
419 if (__rte_bitmap_line_not_empty(slab2)) {
423 /* The array2 cache line is all-zeros, so clear bit in array1 slab */
424 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
425 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
426 slab1 = bmp->array1 + index1;
427 *slab1 &= ~(1lu << offset1);
433 __rte_bitmap_scan_search(struct rte_bitmap *bmp)
438 /* Check current array1 slab */
439 value1 = bmp->array1[bmp->index1];
440 value1 &= __rte_bitmap_mask1_get(bmp);
442 if (rte_bsf64(value1, &bmp->offset1)) {
446 __rte_bitmap_index1_inc(bmp);
449 /* Look for another array1 slab */
450 for (i = 0; i < bmp->array1_size; i ++, __rte_bitmap_index1_inc(bmp)) {
451 value1 = bmp->array1[bmp->index1];
453 if (rte_bsf64(value1, &bmp->offset1)) {
462 __rte_bitmap_scan_read_init(struct rte_bitmap *bmp)
464 __rte_bitmap_index2_set(bmp);
466 rte_prefetch1((void *)(bmp->array2 + bmp->index2 + 8));
470 __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab)
474 slab2 = bmp->array2 + bmp->index2;
475 for ( ; bmp->go2 ; bmp->index2 ++, slab2 ++, bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK) {
477 *pos = bmp->index2 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
482 bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK;
491 * Bitmap scan (with automatic wrap-around)
494 * Handle to bitmap instance
496 * When function call returns 1, pos contains the position of the next set
497 * bit, otherwise not modified
499 * When function call returns 1, slab contains the value of the entire 64-bit
500 * slab where the bit indicated by pos is located. Slabs are always 64-bit
501 * aligned, so the position of the first bit of the slab (this bit is not
502 * necessarily set) is pos / 64. Once a slab has been returned by the bitmap
503 * scan operation, the internal pointers of the bitmap are updated to point
504 * after this slab, so the same slab will not be returned again if it
505 * contains more than one bit which is set. When function call returns 0,
506 * slab is not modified.
508 * 0 if there is no bit set in the bitmap, 1 otherwise
511 rte_bitmap_scan(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab)
513 /* Return data from current array2 line if available */
514 if (__rte_bitmap_scan_read(bmp, pos, slab)) {
518 /* Look for non-empty array2 line */
519 if (__rte_bitmap_scan_search(bmp)) {
520 __rte_bitmap_scan_read_init(bmp);
521 __rte_bitmap_scan_read(bmp, pos, slab);
533 #endif /* __INCLUDE_RTE_BITMAP_H__ */