nrank = 1;
/* process new object size */
- new_obj_size = (obj_size + RTE_CACHE_LINE_MASK) / RTE_CACHE_LINE_SIZE;
+ new_obj_size = (obj_size + RTE_MEMPOOL_ALIGN_MASK) / RTE_MEMPOOL_ALIGN;
while (get_gcd(new_obj_size, nrank * nchan) != 1)
new_obj_size++;
- return new_obj_size * RTE_CACHE_LINE_SIZE;
+ return new_obj_size * RTE_MEMPOOL_ALIGN;
}
static void
#endif
if ((flags & MEMPOOL_F_NO_CACHE_ALIGN) == 0)
sz->header_size = RTE_ALIGN_CEIL(sz->header_size,
- RTE_CACHE_LINE_SIZE);
+ RTE_MEMPOOL_ALIGN);
/* trailer contains the cookie in debug mode */
sz->trailer_size = 0;
if ((flags & MEMPOOL_F_NO_CACHE_ALIGN) == 0) {
sz->total_size = sz->header_size + sz->elt_size +
sz->trailer_size;
- sz->trailer_size += ((RTE_CACHE_LINE_SIZE -
- (sz->total_size & RTE_CACHE_LINE_MASK)) &
- RTE_CACHE_LINE_MASK);
+ sz->trailer_size += ((RTE_MEMPOOL_ALIGN -
+ (sz->total_size & RTE_MEMPOOL_ALIGN_MASK)) &
+ RTE_MEMPOOL_ALIGN_MASK);
}
/*
* cache-aligned
*/
private_data_size = (private_data_size +
- RTE_CACHE_LINE_MASK) & (~RTE_CACHE_LINE_MASK);
+ RTE_MEMPOOL_ALIGN_MASK) & (~RTE_MEMPOOL_ALIGN_MASK);
if (! rte_eal_has_hugepages()) {
/*
* enough to hold mempool header and metadata plus mempool objects.
*/
mempool_size = MEMPOOL_HEADER_SIZE(mp, pg_num) + private_data_size;
+ mempool_size = RTE_ALIGN_CEIL(mempool_size, RTE_MEMPOOL_ALIGN);
if (vaddr == NULL)
mempool_size += (size_t)objsz.total_size * n;
/* calculate address of the first element for continuous mempool. */
obj = (char *)mp + MEMPOOL_HEADER_SIZE(mp, pg_num) +
private_data_size;
+ obj = RTE_PTR_ALIGN_CEIL(obj, RTE_MEMPOOL_ALIGN);
/* populate address translation fields. */
mp->pg_num = pg_num;
/** Mempool over one chunk of physically continuous memory */
#define MEMPOOL_PG_NUM_DEFAULT 1
+#ifndef RTE_MEMPOOL_ALIGN
+#define RTE_MEMPOOL_ALIGN RTE_CACHE_LINE_SIZE
+#endif
+
+#define RTE_MEMPOOL_ALIGN_MASK (RTE_MEMPOOL_ALIGN - 1)
+
/**
* Mempool object header structure
*