#include "rte_compressdev.h"
#include "rte_compressdev_internal.h"
+const char *
+rte_comp_get_feature_name(uint64_t flag)
+{
+ switch (flag) {
+ case RTE_COMP_FF_STATEFUL_COMPRESSION:
+ return "STATEFUL_COMPRESSION";
+ case RTE_COMP_FF_STATEFUL_DECOMPRESSION:
+ return "STATEFUL_DECOMPRESSION";
+ case RTE_COMP_FF_OOP_SGL_IN_SGL_OUT:
+ return "OOP_SGL_IN_SGL_OUT";
+ case RTE_COMP_FF_OOP_SGL_IN_LB_OUT:
+ return "OOP_SGL_IN_LB_OUT";
+ case RTE_COMP_FF_OOP_LB_IN_SGL_OUT:
+ return "OOP_LB_IN_SGL_OUT";
+ case RTE_COMP_FF_MULTI_PKT_CHECKSUM:
+ return "MULTI_PKT_CHECKSUM";
+ case RTE_COMP_FF_ADLER32_CHECKSUM:
+ return "ADLER32_CHECKSUM";
+ case RTE_COMP_FF_CRC32_CHECKSUM:
+ return "CRC32_CHECKSUM";
+ case RTE_COMP_FF_CRC32_ADLER32_CHECKSUM:
+ return "CRC32_ADLER32_CHECKSUM";
+ case RTE_COMP_FF_NONCOMPRESSED_BLOCKS:
+ return "NONCOMPRESSED_BLOCKS";
+ case RTE_COMP_FF_SHA1_HASH:
+ return "SHA1_HASH";
+ case RTE_COMP_FF_SHA2_SHA256_HASH:
+ return "SHA2_SHA256_HASH";
+ case RTE_COMP_FF_SHAREABLE_PRIV_XFORM:
+ return "SHAREABLE_PRIV_XFORM";
+ case RTE_COMP_FF_HUFFMAN_FIXED:
+ return "HUFFMAN_FIXED";
+ case RTE_COMP_FF_HUFFMAN_DYNAMIC:
+ return "HUFFMAN_DYNAMIC";
+ default:
+ return NULL;
+ }
+}
+
/**
* Reset the fields of an operation to their default values.
*
* @param nb_ops
* Number of operations to allocate
* @return
- * - 0: Success
- * - -ENOENT: Not enough entries in the mempool; no ops are retrieved.
+ * - nb_ops: Success, the nb_ops requested was allocated
+ * - 0: Not enough entries in the mempool; no ops are retrieved.
*/
static inline int
rte_comp_op_raw_bulk_alloc(struct rte_mempool *mempool,
op->mempool = mempool;
}
-struct rte_mempool * __rte_experimental
+struct rte_mempool *
rte_comp_op_pool_create(const char *name,
unsigned int nb_elts, unsigned int cache_size,
uint16_t user_size, int socket_id)
return mp;
}
-struct rte_comp_op * __rte_experimental
+struct rte_comp_op *
rte_comp_op_alloc(struct rte_mempool *mempool)
{
struct rte_comp_op *op = NULL;
int retval;
retval = rte_comp_op_raw_bulk_alloc(mempool, &op, 1);
- if (unlikely(retval < 0))
+ if (unlikely(retval != 1))
return NULL;
rte_comp_op_reset(op);
return op;
}
-int __rte_experimental
+int
rte_comp_op_bulk_alloc(struct rte_mempool *mempool,
struct rte_comp_op **ops, uint16_t nb_ops)
{
- int ret;
+ int retval;
uint16_t i;
- ret = rte_comp_op_raw_bulk_alloc(mempool, ops, nb_ops);
- if (unlikely(ret < nb_ops))
- return ret;
+ retval = rte_comp_op_raw_bulk_alloc(mempool, ops, nb_ops);
+ if (unlikely(retval != nb_ops))
+ return 0;
for (i = 0; i < nb_ops; i++)
rte_comp_op_reset(ops[i]);
* @param op
* Compress operation
*/
-void __rte_experimental
+void
rte_comp_op_free(struct rte_comp_op *op)
{
if (op != NULL && op->mempool != NULL)
rte_mempool_put(op->mempool, op);
}
+
+void
+rte_comp_op_bulk_free(struct rte_comp_op **ops, uint16_t nb_ops)
+{
+ uint16_t i;
+
+ for (i = 0; i < nb_ops; i++) {
+ if (ops[i] != NULL && ops[i]->mempool != NULL)
+ rte_mempool_put(ops[i]->mempool, ops[i]);
+ ops[i] = NULL;
+ }
+}