rte_iova_t iova;
unsigned mz_id, n;
int ret;
- bool no_contig, try_contig, no_pageshift, external;
+ bool need_iova_contig_obj;
+ bool try_iova_contig_mempool;
+ bool alloc_in_ext_mem;
ret = mempool_ops_alloc_once(mp);
if (ret != 0)
return ret;
- /* check if we can retrieve a valid socket ID */
- ret = rte_malloc_heap_socket_is_external(mp->socket_id);
- if (ret < 0)
- return -EINVAL;
- external = ret;
-
/* mempool must not be populated */
if (mp->nb_mem_chunks != 0)
return -EEXIST;
- no_contig = mp->flags & MEMPOOL_F_NO_IOVA_CONTIG;
-
/*
* the following section calculates page shift and page size values.
*
* to go for contiguous memory even if we're in no-huge mode, because
* external memory may in fact be IOVA-contiguous.
*/
- external = rte_malloc_heap_socket_is_external(mp->socket_id) == 1;
- no_pageshift = no_contig ||
- (!external && rte_eal_iova_mode() == RTE_IOVA_VA);
- try_contig = !no_contig && !no_pageshift &&
- (rte_eal_has_hugepages() || external);
- if (no_pageshift) {
+ /* check if we can retrieve a valid socket ID */
+ ret = rte_malloc_heap_socket_is_external(mp->socket_id);
+ if (ret < 0)
+ return -EINVAL;
+ alloc_in_ext_mem = (ret == 1);
+ need_iova_contig_obj = !(mp->flags & MEMPOOL_F_NO_IOVA_CONTIG);
+ try_iova_contig_mempool = false;
+
+ if (!need_iova_contig_obj) {
+ pg_sz = 0;
+ pg_shift = 0;
+ } else if (!alloc_in_ext_mem && rte_eal_iova_mode() == RTE_IOVA_VA) {
pg_sz = 0;
pg_shift = 0;
- } else if (try_contig) {
+ } else if (rte_eal_has_hugepages() || alloc_in_ext_mem) {
+ try_iova_contig_mempool = true;
pg_sz = get_min_page_size(mp->socket_id);
pg_shift = rte_bsf32(pg_sz);
} else {
size_t min_chunk_size;
unsigned int flags;
- if (try_contig || no_pageshift)
+ if (try_iova_contig_mempool || pg_sz == 0)
mem_size = rte_mempool_ops_calc_mem_size(mp, n,
0, &min_chunk_size, &align);
else
/* if we're trying to reserve contiguous memory, add appropriate
* memzone flag.
*/
- if (try_contig)
+ if (try_iova_contig_mempool)
flags |= RTE_MEMZONE_IOVA_CONTIG;
mz = rte_memzone_reserve_aligned(mz_name, mem_size,
* minimum required contiguous chunk fits minimum page, adjust
* memzone size to the page size, and try again.
*/
- if (mz == NULL && try_contig && min_chunk_size <= pg_sz) {
- try_contig = false;
+ if (mz == NULL && try_iova_contig_mempool &&
+ min_chunk_size <= pg_sz) {
+ try_iova_contig_mempool = false;
flags &= ~RTE_MEMZONE_IOVA_CONTIG;
mem_size = rte_mempool_ops_calc_mem_size(mp, n,
goto fail;
}
- if (no_contig)
- iova = RTE_BAD_IOVA;
- else
+ if (need_iova_contig_obj)
iova = mz->iova;
+ else
+ iova = RTE_BAD_IOVA;
- if (no_pageshift || try_contig)
+ if (try_iova_contig_mempool || pg_sz == 0)
ret = rte_mempool_populate_iova(mp, mz->addr,
iova, mz->len,
rte_mempool_memchunk_mz_free,