#include <rte_string_fns.h>
#include <rte_spinlock.h>
#include <rte_tailq.h>
+#include <rte_function_versioning.h>
#include "rte_mempool.h"
return 0;
}
+int
+rte_mempool_populate_iova_v20_0_1(struct rte_mempool *mp, char *vaddr,
+ rte_iova_t iova, size_t len, rte_mempool_memchunk_free_cb_t *free_cb,
+ void *opaque);
+
/* Add objects in the pool, using a physically contiguous memory
* zone. Return the number of objects added, or a negative value
* on error.
*/
int
-rte_mempool_populate_iova(struct rte_mempool *mp, char *vaddr,
+rte_mempool_populate_iova_v20_0_1(struct rte_mempool *mp, char *vaddr,
rte_iova_t iova, size_t len, rte_mempool_memchunk_free_cb_t *free_cb,
void *opaque)
{
off = RTE_PTR_ALIGN_CEIL(vaddr, RTE_MEMPOOL_ALIGN) - vaddr;
if (off > len) {
- ret = -EINVAL;
+ ret = -ENOBUFS;
goto fail;
}
/* not enough room to store one object */
if (i == 0) {
- ret = -EINVAL;
+ ret = -ENOBUFS;
goto fail;
}
rte_free(memhdr);
return ret;
}
+BIND_DEFAULT_SYMBOL(rte_mempool_populate_iova, _v20_0_1, 20.0.1);
+MAP_STATIC_SYMBOL(
+ int rte_mempool_populate_iova(struct rte_mempool *mp, char *vaddr,
+ rte_iova_t iova, size_t len,
+ rte_mempool_memchunk_free_cb_t *free_cb,
+ void *opaque),
+ rte_mempool_populate_iova_v20_0_1);
+
+int
+rte_mempool_populate_iova_v20_0(struct rte_mempool *mp, char *vaddr,
+ rte_iova_t iova, size_t len, rte_mempool_memchunk_free_cb_t *free_cb,
+ void *opaque);
+int
+rte_mempool_populate_iova_v20_0(struct rte_mempool *mp, char *vaddr,
+ rte_iova_t iova, size_t len, rte_mempool_memchunk_free_cb_t *free_cb,
+ void *opaque)
+{
+ int ret;
+
+ ret = rte_mempool_populate_iova_v20_0_1(mp, vaddr, iova, len, free_cb,
+ opaque);
+ if (ret == -ENOBUFS)
+ ret = -EINVAL;
+
+ return ret;
+}
+VERSION_SYMBOL(rte_mempool_populate_iova, _v20_0, 20.0);
static rte_iova_t
get_iova(void *addr)
size_t off, phys_len;
int ret, cnt = 0;
+ /* call alloc_once() in advance, it avoids a misinterpretation
+ * of -ENOBUFS when delegated to rte_mempool_populate_iova().
+ */
+ ret = mempool_ops_alloc_once(mp);
+ if (ret != 0)
+ return ret;
+
if (mp->flags & MEMPOOL_F_NO_IOVA_CONTIG)
return rte_mempool_populate_iova(mp, addr, RTE_BAD_IOVA,
len, free_cb, opaque);
break;
}
- ret = rte_mempool_populate_iova(mp, addr + off, iova,
+ ret = rte_mempool_populate_iova_v20_0_1(mp, addr + off, iova,
phys_len, free_cb, opaque);
+ if (ret == -ENOBUFS)
+ continue;
if (ret < 0)
goto fail;
/* no need to call the free callback for next chunks */
* @return
* The number of objects added on success.
* On error, the chunk is not added in the memory list of the
- * mempool and a negative errno is returned.
+ * mempool and a negative errno is returned:
+ * (-ENOBUFS): not enough room in chunk for one object.
+ * (-ENOSPC): mempool is already populated.
+ * (-ENOMEM): allocation failure.
*/
int rte_mempool_populate_iova(struct rte_mempool *mp, char *vaddr,
rte_iova_t iova, size_t len, rte_mempool_memchunk_free_cb_t *free_cb,