iova = get_iova(addr + off);
- if (iova == RTE_BAD_IOVA && rte_eal_has_hugepages()) {
- ret = -EINVAL;
- goto fail;
- }
-
/* populate with the largest group of contiguous pages */
for (phys_len = RTE_MIN(
(size_t)(RTE_PTR_ALIGN_CEIL(addr + off + 1, pg_sz) -
unsigned mz_id, n;
int ret;
bool need_iova_contig_obj;
+ size_t max_alloc_size = SIZE_MAX;
ret = mempool_ops_alloc_once(mp);
if (ret != 0)
if (min_chunk_size == (size_t)mem_size)
mz_flags |= RTE_MEMZONE_IOVA_CONTIG;
- mz = rte_memzone_reserve_aligned(mz_name, mem_size,
+ /* Allocate a memzone, retrying with a smaller area on ENOMEM */
+ do {
+ mz = rte_memzone_reserve_aligned(mz_name,
+ RTE_MIN((size_t)mem_size, max_alloc_size),
mp->socket_id, mz_flags, align);
- /* don't try reserving with 0 size if we were asked to reserve
- * IOVA-contiguous memory.
- */
- if (min_chunk_size < (size_t)mem_size && mz == NULL) {
- /* not enough memory, retry with the biggest zone we
- * have
- */
- mz = rte_memzone_reserve_aligned(mz_name, 0,
- mp->socket_id, mz_flags, align);
- }
+ if (mz == NULL && rte_errno != ENOMEM)
+ break;
+
+ max_alloc_size = RTE_MIN(max_alloc_size,
+ (size_t)mem_size) / 2;
+ } while (mz == NULL && max_alloc_size >= min_chunk_size);
+
if (mz == NULL) {
ret = -rte_errno;
goto fail;
}
- if (mz->len < min_chunk_size) {
- rte_memzone_free(mz);
- ret = -ENOMEM;
- goto fail;
- }
-
if (need_iova_contig_obj)
iova = mz->iova;
else
}
ret = mempool_ops_alloc_once(mp);
- if (ret != 0)
- return ret;
+ if (ret < 0) {
+ rte_errno = -ret;
+ return 0;
+ }
size = get_anon_size(mp);
if (size < 0) {
ret = rte_mempool_populate_virt(mp, addr, size, getpagesize(),
rte_mempool_memchunk_anon_free, addr);
- if (ret == 0)
+ if (ret < 0) {
+ rte_errno = -ret;
goto fail;
+ }
return mp->populated_size;