1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Intel Corporation
5 #include <rte_malloc.h>
8 #include <rte_compressdev.h>
10 #include "comp_perf_test_verify.h"
11 #include "comp_perf_test_common.h"
14 cperf_verify_test_destructor(void *arg)
17 comp_perf_free_memory(&((struct cperf_verify_ctx *)arg)->mem);
23 cperf_verify_test_constructor(uint8_t dev_id, uint16_t qp_id,
24 struct comp_test_data *options)
26 struct cperf_verify_ctx *ctx = NULL;
28 ctx = rte_malloc(NULL, sizeof(struct cperf_verify_ctx), 0);
33 ctx->mem.dev_id = dev_id;
34 ctx->mem.qp_id = qp_id;
35 ctx->options = options;
37 if (!comp_perf_allocate_memory(ctx->options, &ctx->mem) &&
38 !prepare_bufs(ctx->options, &ctx->mem))
41 cperf_verify_test_destructor(ctx);
46 main_loop(struct cperf_verify_ctx *ctx, enum rte_comp_xform_type type)
48 struct comp_test_data *test_data = ctx->options;
49 uint8_t *output_data_ptr;
50 size_t *output_data_sz;
51 struct cperf_mem_resources *mem = &ctx->mem;
53 uint8_t dev_id = mem->dev_id;
54 uint32_t i, iter, num_iter;
55 struct rte_comp_op **ops, **deq_ops;
56 void *priv_xform = NULL;
57 struct rte_comp_xform xform;
58 size_t output_size = 0;
59 struct rte_mbuf **input_bufs, **output_bufs;
64 if (test_data == NULL || !test_data->burst_sz) {
66 "Unknown burst size\n");
70 ops = rte_zmalloc_socket(NULL,
71 2 * mem->total_bufs * sizeof(struct rte_comp_op *),
76 "Can't allocate memory for ops strucures\n");
80 deq_ops = &ops[mem->total_bufs];
82 if (type == RTE_COMP_COMPRESS) {
83 xform = (struct rte_comp_xform) {
84 .type = RTE_COMP_COMPRESS,
86 .algo = RTE_COMP_ALGO_DEFLATE,
87 .deflate.huffman = test_data->huffman_enc,
88 .level = test_data->level,
89 .window_size = test_data->window_sz,
90 .chksum = RTE_COMP_CHECKSUM_NONE,
91 .hash_algo = RTE_COMP_HASH_ALGO_NONE
94 output_data_ptr = ctx->mem.compressed_data;
95 output_data_sz = &ctx->comp_data_sz;
96 input_bufs = mem->decomp_bufs;
97 output_bufs = mem->comp_bufs;
98 out_seg_sz = test_data->out_seg_sz;
100 xform = (struct rte_comp_xform) {
101 .type = RTE_COMP_DECOMPRESS,
103 .algo = RTE_COMP_ALGO_DEFLATE,
104 .chksum = RTE_COMP_CHECKSUM_NONE,
105 .window_size = test_data->window_sz,
106 .hash_algo = RTE_COMP_HASH_ALGO_NONE
109 output_data_ptr = ctx->mem.decompressed_data;
110 output_data_sz = &ctx->decomp_data_sz;
111 input_bufs = mem->comp_bufs;
112 output_bufs = mem->decomp_bufs;
113 out_seg_sz = test_data->seg_sz;
116 /* Create private xform */
117 if (rte_compressdev_private_xform_create(dev_id, &xform,
119 RTE_LOG(ERR, USER1, "Private xform could not be created\n");
126 for (iter = 0; iter < num_iter; iter++) {
127 uint32_t total_ops = mem->total_bufs;
128 uint32_t remaining_ops = mem->total_bufs;
129 uint32_t total_deq_ops = 0;
130 uint32_t total_enq_ops = 0;
131 uint16_t ops_unused = 0;
132 uint16_t num_enq = 0;
133 uint16_t num_deq = 0;
137 while (remaining_ops > 0) {
138 uint16_t num_ops = RTE_MIN(remaining_ops,
139 test_data->burst_sz);
140 uint16_t ops_needed = num_ops - ops_unused;
143 * Move the unused operations from the previous
144 * enqueue_burst call to the front, to maintain order
146 if ((ops_unused > 0) && (num_enq > 0)) {
148 ops_unused * sizeof(struct rte_comp_op *);
150 memmove(ops, &ops[num_enq], nb_b_to_mov);
153 /* Allocate compression operations */
154 if (ops_needed && !rte_comp_op_bulk_alloc(
159 "Could not allocate enough operations\n");
163 allocated += ops_needed;
165 for (i = 0; i < ops_needed; i++) {
167 * Calculate next buffer to attach to operation
169 uint32_t buf_id = total_enq_ops + i +
171 uint16_t op_id = ops_unused + i;
172 /* Reset all data in output buffers */
173 struct rte_mbuf *m = output_bufs[buf_id];
175 m->pkt_len = out_seg_sz * m->nb_segs;
177 m->data_len = m->buf_len - m->data_off;
180 ops[op_id]->m_src = input_bufs[buf_id];
181 ops[op_id]->m_dst = output_bufs[buf_id];
182 ops[op_id]->src.offset = 0;
183 ops[op_id]->src.length =
184 rte_pktmbuf_pkt_len(input_bufs[buf_id]);
185 ops[op_id]->dst.offset = 0;
186 ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL;
187 ops[op_id]->input_chksum = buf_id;
188 ops[op_id]->private_xform = priv_xform;
191 num_enq = rte_compressdev_enqueue_burst(dev_id,
195 struct rte_compressdev_stats stats;
197 rte_compressdev_stats_get(dev_id, &stats);
198 if (stats.enqueue_err_count) {
204 ops_unused = num_ops - num_enq;
205 remaining_ops -= num_enq;
206 total_enq_ops += num_enq;
208 num_deq = rte_compressdev_dequeue_burst(dev_id,
211 test_data->burst_sz);
212 total_deq_ops += num_deq;
214 for (i = 0; i < num_deq; i++) {
215 struct rte_comp_op *op = deq_ops[i];
217 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
219 "Some operations were not successful\n");
223 const void *read_data_addr =
224 rte_pktmbuf_read(op->m_dst, 0,
225 op->produced, output_data_ptr);
226 if (read_data_addr == NULL) {
228 "Could not copy buffer in destination\n");
233 if (read_data_addr != output_data_ptr)
234 rte_memcpy(output_data_ptr,
235 rte_pktmbuf_mtod(op->m_dst,
238 output_data_ptr += op->produced;
239 output_size += op->produced;
244 if (iter == num_iter - 1) {
245 for (i = 0; i < num_deq; i++) {
246 struct rte_comp_op *op = deq_ops[i];
247 struct rte_mbuf *m = op->m_dst;
249 m->pkt_len = op->produced;
250 uint32_t remaining_data = op->produced;
251 uint16_t data_to_append;
253 while (remaining_data > 0) {
255 RTE_MIN(remaining_data,
257 m->data_len = data_to_append;
264 rte_mempool_put_bulk(mem->op_pool,
265 (void **)deq_ops, num_deq);
266 allocated -= num_deq;
269 /* Dequeue the last operations */
270 while (total_deq_ops < total_ops) {
271 num_deq = rte_compressdev_dequeue_burst(dev_id,
274 test_data->burst_sz);
276 struct rte_compressdev_stats stats;
278 rte_compressdev_stats_get(dev_id, &stats);
279 if (stats.dequeue_err_count) {
285 total_deq_ops += num_deq;
287 for (i = 0; i < num_deq; i++) {
288 struct rte_comp_op *op = deq_ops[i];
290 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
292 "Some operations were not successful\n");
296 const void *read_data_addr =
297 rte_pktmbuf_read(op->m_dst,
299 op->produced, output_data_ptr);
300 if (read_data_addr == NULL) {
302 "Could not copy buffer in destination\n");
307 if (read_data_addr != output_data_ptr)
308 rte_memcpy(output_data_ptr,
310 op->m_dst, uint8_t *),
312 output_data_ptr += op->produced;
313 output_size += op->produced;
317 if (iter == num_iter - 1) {
318 for (i = 0; i < num_deq; i++) {
319 struct rte_comp_op *op = deq_ops[i];
320 struct rte_mbuf *m = op->m_dst;
322 m->pkt_len = op->produced;
323 uint32_t remaining_data = op->produced;
324 uint16_t data_to_append;
326 while (remaining_data > 0) {
328 RTE_MIN(remaining_data,
330 m->data_len = data_to_append;
337 rte_mempool_put_bulk(mem->op_pool,
338 (void **)deq_ops, num_deq);
339 allocated -= num_deq;
344 *output_data_sz = output_size;
346 rte_mempool_put_bulk(mem->op_pool, (void **)ops, allocated);
347 rte_compressdev_private_xform_free(dev_id, priv_xform);
353 cperf_verify_test_runner(void *test_ctx)
355 struct cperf_verify_ctx *ctx = test_ctx;
356 struct comp_test_data *test_data = ctx->options;
357 int ret = EXIT_SUCCESS;
358 static rte_atomic16_t display_once = RTE_ATOMIC16_INIT(0);
359 uint32_t lcore = rte_lcore_id();
361 ctx->mem.lcore_id = lcore;
363 test_data->ratio = 0;
365 if (main_loop(ctx, RTE_COMP_COMPRESS) < 0) {
370 if (main_loop(ctx, RTE_COMP_DECOMPRESS) < 0) {
375 if (ctx->decomp_data_sz != test_data->input_data_sz) {
377 "Decompressed data length not equal to input data length\n");
379 "Decompressed size = %zu, expected = %zu\n",
380 ctx->decomp_data_sz, test_data->input_data_sz);
384 if (memcmp(ctx->mem.decompressed_data,
385 test_data->input_data,
386 test_data->input_data_sz) != 0) {
388 "Decompressed data is not the same as file data\n");
394 ctx->ratio = (double) ctx->comp_data_sz /
395 test_data->input_data_sz * 100;
398 if (rte_atomic16_test_and_set(&display_once)) {
399 printf("%12s%6s%12s%17s\n",
400 "lcore id", "Level", "Comp size", "Comp ratio [%]");
402 printf("%12u%6u%12zu%17.2f\n",
404 test_data->level, ctx->comp_data_sz, ctx->ratio);