4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef __INCLUDE_RTE_BITMAP_H__
35 #define __INCLUDE_RTE_BITMAP_H__
45 * The bitmap component provides a mechanism to manage large arrays of bits
46 * through bit get/set/clear and bit array scan operations.
48 * The bitmap scan operation is optimized for 64-bit CPUs using 64/128 byte cache
49 * lines. The bitmap is hierarchically organized using two arrays (array1 and
50 * array2), with each bit in array1 being associated with a full cache line
51 * (512/1024 bits) of bitmap bits, which are stored in array2: the bit in array1
52 * is set only when there is at least one bit set within its associated array2
53 * bits, otherwise the bit in array1 is cleared. The read and write operations
54 * for array1 and array2 are always done in slabs of 64 bits.
56 * This bitmap is not thread safe. For lock free operation on a specific bitmap
57 * instance, a single writer thread performing bit set/clear operations is
58 * allowed, only the writer thread can do bitmap scan operations, while there
59 * can be several reader threads performing bit get operations in parallel with
60 * the writer thread. When the use of locking primitives is acceptable, the
61 * serialization of the bit set/clear and bitmap scan operations needs to be
62 * enforced by the caller, while the bit get operation does not require locking
68 #include <rte_common.h>
69 #include <rte_debug.h>
70 #include <rte_memory.h>
71 #include <rte_branch_prediction.h>
72 #include <rte_prefetch.h>
74 #ifndef RTE_BITMAP_OPTIMIZATIONS
75 #define RTE_BITMAP_OPTIMIZATIONS 1
79 #define RTE_BITMAP_SLAB_BIT_SIZE 64
80 #define RTE_BITMAP_SLAB_BIT_SIZE_LOG2 6
81 #define RTE_BITMAP_SLAB_BIT_MASK (RTE_BITMAP_SLAB_BIT_SIZE - 1)
84 #define RTE_BITMAP_CL_BIT_SIZE (RTE_CACHE_LINE_SIZE * 8)
85 #define RTE_BITMAP_CL_BIT_SIZE_LOG2 (RTE_CACHE_LINE_SIZE_LOG2 + 3)
86 #define RTE_BITMAP_CL_BIT_MASK (RTE_BITMAP_CL_BIT_SIZE - 1)
88 #define RTE_BITMAP_CL_SLAB_SIZE (RTE_BITMAP_CL_BIT_SIZE / RTE_BITMAP_SLAB_BIT_SIZE)
89 #define RTE_BITMAP_CL_SLAB_SIZE_LOG2 (RTE_BITMAP_CL_BIT_SIZE_LOG2 - RTE_BITMAP_SLAB_BIT_SIZE_LOG2)
90 #define RTE_BITMAP_CL_SLAB_MASK (RTE_BITMAP_CL_SLAB_SIZE - 1)
92 /** Bitmap data structure */
94 /* Context for array1 and array2 */
95 uint64_t *array1; /**< Bitmap array1 */
96 uint64_t *array2; /**< Bitmap array2 */
97 uint32_t array1_size; /**< Number of 64-bit slabs in array1 that are actually used */
98 uint32_t array2_size; /**< Number of 64-bit slabs in array2 */
100 /* Context for the "scan next" operation */
101 uint32_t index1; /**< Bitmap scan: Index of current array1 slab */
102 uint32_t offset1; /**< Bitmap scan: Offset of current bit within current array1 slab */
103 uint32_t index2; /**< Bitmap scan: Index of current array2 slab */
104 uint32_t go2; /**< Bitmap scan: Go/stop condition for current array2 cache line */
106 /* Storage space for array1 and array2 */
111 __rte_bitmap_index1_inc(struct rte_bitmap *bmp)
113 bmp->index1 = (bmp->index1 + 1) & (bmp->array1_size - 1);
116 static inline uint64_t
117 __rte_bitmap_mask1_get(struct rte_bitmap *bmp)
119 return (~1lu) << bmp->offset1;
123 __rte_bitmap_index2_set(struct rte_bitmap *bmp)
125 bmp->index2 = (((bmp->index1 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2) + bmp->offset1) << RTE_BITMAP_CL_SLAB_SIZE_LOG2);
128 #if RTE_BITMAP_OPTIMIZATIONS
131 rte_bsf64(uint64_t slab, uint32_t *pos)
133 if (likely(slab == 0)) {
137 *pos = __builtin_ctzll(slab);
144 rte_bsf64(uint64_t slab, uint32_t *pos)
149 if (likely(slab == 0)) {
153 for (i = 0, mask = 1; i < RTE_BITMAP_SLAB_BIT_SIZE; i ++, mask <<= 1) {
154 if (unlikely(slab & mask)) {
165 static inline uint32_t
166 __rte_bitmap_get_memory_footprint(uint32_t n_bits,
167 uint32_t *array1_byte_offset, uint32_t *array1_slabs,
168 uint32_t *array2_byte_offset, uint32_t *array2_slabs)
170 uint32_t n_slabs_context, n_slabs_array1, n_cache_lines_context_and_array1;
171 uint32_t n_cache_lines_array2;
172 uint32_t n_bytes_total;
174 n_cache_lines_array2 = (n_bits + RTE_BITMAP_CL_BIT_SIZE - 1) / RTE_BITMAP_CL_BIT_SIZE;
175 n_slabs_array1 = (n_cache_lines_array2 + RTE_BITMAP_SLAB_BIT_SIZE - 1) / RTE_BITMAP_SLAB_BIT_SIZE;
176 n_slabs_array1 = rte_align32pow2(n_slabs_array1);
177 n_slabs_context = (sizeof(struct rte_bitmap) + (RTE_BITMAP_SLAB_BIT_SIZE / 8) - 1) / (RTE_BITMAP_SLAB_BIT_SIZE / 8);
178 n_cache_lines_context_and_array1 = (n_slabs_context + n_slabs_array1 + RTE_BITMAP_CL_SLAB_SIZE - 1) / RTE_BITMAP_CL_SLAB_SIZE;
179 n_bytes_total = (n_cache_lines_context_and_array1 + n_cache_lines_array2) * RTE_CACHE_LINE_SIZE;
181 if (array1_byte_offset) {
182 *array1_byte_offset = n_slabs_context * (RTE_BITMAP_SLAB_BIT_SIZE / 8);
185 *array1_slabs = n_slabs_array1;
187 if (array2_byte_offset) {
188 *array2_byte_offset = n_cache_lines_context_and_array1 * RTE_CACHE_LINE_SIZE;
191 *array2_slabs = n_cache_lines_array2 * RTE_BITMAP_CL_SLAB_SIZE;
194 return n_bytes_total;
198 __rte_bitmap_scan_init(struct rte_bitmap *bmp)
200 bmp->index1 = bmp->array1_size - 1;
201 bmp->offset1 = RTE_BITMAP_SLAB_BIT_SIZE - 1;
202 __rte_bitmap_index2_set(bmp);
203 bmp->index2 += RTE_BITMAP_CL_SLAB_SIZE;
209 * Bitmap memory footprint calculation
212 * Number of bits in the bitmap
214 * Bitmap memory footprint measured in bytes on success, 0 on error
216 static inline uint32_t
217 rte_bitmap_get_memory_footprint(uint32_t n_bits) {
218 /* Check input arguments */
223 return __rte_bitmap_get_memory_footprint(n_bits, NULL, NULL, NULL, NULL);
227 * Bitmap initialization
230 * Minimum expected size of bitmap.
232 * Base address of array1 and array2.
234 * Number of pre-allocated bits in array2. Must be non-zero and multiple of 512.
236 * Handle to bitmap instance.
238 static inline struct rte_bitmap *
239 rte_bitmap_init(uint32_t n_bits, uint8_t *mem, uint32_t mem_size)
241 struct rte_bitmap *bmp;
242 uint32_t array1_byte_offset, array1_slabs, array2_byte_offset, array2_slabs;
245 /* Check input arguments */
250 if ((mem == NULL) || (((uintptr_t) mem) & RTE_CACHE_LINE_MASK)) {
254 size = __rte_bitmap_get_memory_footprint(n_bits,
255 &array1_byte_offset, &array1_slabs,
256 &array2_byte_offset, &array2_slabs);
257 if (size < mem_size) {
262 memset(mem, 0, size);
263 bmp = (struct rte_bitmap *) mem;
265 bmp->array1 = (uint64_t *) &mem[array1_byte_offset];
266 bmp->array1_size = array1_slabs;
267 bmp->array2 = (uint64_t *) &mem[array2_byte_offset];
268 bmp->array2_size = array2_slabs;
270 __rte_bitmap_scan_init(bmp);
279 * Handle to bitmap instance
281 * 0 upon success, error code otherwise
284 rte_bitmap_free(struct rte_bitmap *bmp)
286 /* Check input arguments */
298 * Handle to bitmap instance
301 rte_bitmap_reset(struct rte_bitmap *bmp)
303 memset(bmp->array1, 0, bmp->array1_size * sizeof(uint64_t));
304 memset(bmp->array2, 0, bmp->array2_size * sizeof(uint64_t));
305 __rte_bitmap_scan_init(bmp);
309 * Bitmap location prefetch into CPU L1 cache
312 * Handle to bitmap instance
316 * 0 upon success, error code otherwise
319 rte_bitmap_prefetch0(struct rte_bitmap *bmp, uint32_t pos)
324 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
325 slab2 = bmp->array2 + index2;
326 rte_prefetch0((void *) slab2);
333 * Handle to bitmap instance
337 * 0 when bit is cleared, non-zero when bit is set
339 static inline uint64_t
340 rte_bitmap_get(struct rte_bitmap *bmp, uint32_t pos)
343 uint32_t index2, offset2;
345 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
346 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
347 slab2 = bmp->array2 + index2;
348 return (*slab2) & (1lu << offset2);
355 * Handle to bitmap instance
360 rte_bitmap_set(struct rte_bitmap *bmp, uint32_t pos)
362 uint64_t *slab1, *slab2;
363 uint32_t index1, index2, offset1, offset2;
365 /* Set bit in array2 slab and set bit in array1 slab */
366 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
367 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
368 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
369 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
370 slab2 = bmp->array2 + index2;
371 slab1 = bmp->array1 + index1;
373 *slab2 |= 1lu << offset2;
374 *slab1 |= 1lu << offset1;
381 * Handle to bitmap instance
383 * Bit position identifying the array2 slab
385 * Value to be assigned to the 64-bit slab in array2
388 rte_bitmap_set_slab(struct rte_bitmap *bmp, uint32_t pos, uint64_t slab)
390 uint64_t *slab1, *slab2;
391 uint32_t index1, index2, offset1;
393 /* Set bits in array2 slab and set bit in array1 slab */
394 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
395 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
396 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
397 slab2 = bmp->array2 + index2;
398 slab1 = bmp->array1 + index1;
401 *slab1 |= 1lu << offset1;
404 static inline uint64_t
405 __rte_bitmap_line_not_empty(uint64_t *slab2)
407 uint64_t v1, v2, v3, v4;
409 v1 = slab2[0] | slab2[1];
410 v2 = slab2[2] | slab2[3];
411 v3 = slab2[4] | slab2[5];
412 v4 = slab2[6] | slab2[7];
423 * Handle to bitmap instance
428 rte_bitmap_clear(struct rte_bitmap *bmp, uint32_t pos)
430 uint64_t *slab1, *slab2;
431 uint32_t index1, index2, offset1, offset2;
433 /* Clear bit in array2 slab */
434 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
435 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
436 slab2 = bmp->array2 + index2;
438 /* Return if array2 slab is not all-zeros */
439 *slab2 &= ~(1lu << offset2);
444 /* Check the entire cache line of array2 for all-zeros */
445 index2 &= ~ RTE_BITMAP_CL_SLAB_MASK;
446 slab2 = bmp->array2 + index2;
447 if (__rte_bitmap_line_not_empty(slab2)) {
451 /* The array2 cache line is all-zeros, so clear bit in array1 slab */
452 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
453 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
454 slab1 = bmp->array1 + index1;
455 *slab1 &= ~(1lu << offset1);
461 __rte_bitmap_scan_search(struct rte_bitmap *bmp)
466 /* Check current array1 slab */
467 value1 = bmp->array1[bmp->index1];
468 value1 &= __rte_bitmap_mask1_get(bmp);
470 if (rte_bsf64(value1, &bmp->offset1)) {
474 __rte_bitmap_index1_inc(bmp);
477 /* Look for another array1 slab */
478 for (i = 0; i < bmp->array1_size; i ++, __rte_bitmap_index1_inc(bmp)) {
479 value1 = bmp->array1[bmp->index1];
481 if (rte_bsf64(value1, &bmp->offset1)) {
490 __rte_bitmap_scan_read_init(struct rte_bitmap *bmp)
492 __rte_bitmap_index2_set(bmp);
494 rte_prefetch1((void *)(bmp->array2 + bmp->index2 + 8));
498 __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab)
502 slab2 = bmp->array2 + bmp->index2;
503 for ( ; bmp->go2 ; bmp->index2 ++, slab2 ++, bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK) {
505 *pos = bmp->index2 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
510 bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK;
519 * Bitmap scan (with automatic wrap-around)
522 * Handle to bitmap instance
524 * When function call returns 1, pos contains the position of the next set
525 * bit, otherwise not modified
527 * When function call returns 1, slab contains the value of the entire 64-bit
528 * slab where the bit indicated by pos is located. Slabs are always 64-bit
529 * aligned, so the position of the first bit of the slab (this bit is not
530 * necessarily set) is pos / 64. Once a slab has been returned by the bitmap
531 * scan operation, the internal pointers of the bitmap are updated to point
532 * after this slab, so the same slab will not be returned again if it
533 * contains more than one bit which is set. When function call returns 0,
534 * slab is not modified.
536 * 0 if there is no bit set in the bitmap, 1 otherwise
539 rte_bitmap_scan(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab)
541 /* Return data from current array2 line if available */
542 if (__rte_bitmap_scan_read(bmp, pos, slab)) {
546 /* Look for non-empty array2 line */
547 if (__rte_bitmap_scan_search(bmp)) {
548 __rte_bitmap_scan_read_init(bmp);
549 __rte_bitmap_scan_read(bmp, pos, slab);
561 #endif /* __INCLUDE_RTE_BITMAP_H__ */