nb = pg_num * sizeof(*pa);
if ((fd = open(PAGEMAP_FNAME, O_RDONLY)) < 0)
- return (ENOENT);
+ return ENOENT;
if ((rc = pread(fd, pa, nb, ofs)) < 0 || (rc -= nb) != 0) {
for (i = 0; i != pg_num; i++)
pa[i] = (pa[i] & PAGEMAP_PFN_MASK) * pg_sz;
- return (rc);
+ return rc;
}
struct rte_mempool *
pg_sz = getpagesize();
if (rte_is_power_of_2(pg_sz) == 0) {
rte_errno = EINVAL;
- return (mp);
+ return mp;
}
pg_shift = rte_bsf32(pg_sz);
"error code: %d\n",
__func__, name, sz, errno);
rte_errno = rc;
- return (mp);
+ return mp;
}
/* extract physical mappings of the allocated memory. */
(rc = get_phys_map(va, pa, pg_num, pg_sz)) == 0) {
/*
- * Check that allocated size is big enough to hold elt_num
- * objects and a calcualte how many bytes are actually required.
+ * Check that allocated size is big enough to hold elt_num
+ * objects and a calcualte how many bytes are actually required.
*/
if ((usz = rte_mempool_xmem_usage(va, elt_num, total_size, pa,
}
free(pa);
- return (mp);
+ return mp;
}
#else /* RTE_EXEC_ENV_LINUXAPP */
__rte_unused int socket_id, __rte_unused unsigned flags)
{
rte_errno = ENOTSUP;
- return (NULL);
+ return NULL;
}
#endif /* RTE_EXEC_ENV_LINUXAPP */