/*
* Trigger hardware to begin performing enqueued operations
*/
-static __rte_always_inline void
+static __rte_always_inline int
__ioat_perform_ops(int dev_id)
{
struct rte_ioat_rawdev *ioat =
rte_compiler_barrier();
*ioat->doorbell = ioat->next_write;
ioat->xstats.started = ioat->xstats.enqueued;
+
+ return 0;
}
/**
: "a" (dst), "d" (src));
}
-static __rte_always_inline void
+static __rte_always_inline int
__idxd_perform_ops(int dev_id)
{
struct rte_idxd_rawdev *idxd =
struct rte_idxd_desc_batch *b = &idxd->batch_ring[idxd->next_batch];
if (b->submitted || b->op_count == 0)
- return;
+ return 0;
b->hdl_end = idxd->next_free_hdl;
b->comp.status = 0;
b->submitted = 1;
if (++idxd->next_batch == idxd->batch_ring_sz)
idxd->next_batch = 0;
idxd->xstats.started = idxd->xstats.enqueued;
+ return 0;
}
static __rte_always_inline int
return __ioat_fence(dev_id);
}
-static inline void
+static inline int
rte_ioat_perform_ops(int dev_id)
{
enum rte_ioat_dev_type *type =