1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Intel Corporation
5 #include <rte_malloc.h>
8 #include <rte_compressdev.h>
10 #include "comp_perf_test_verify.h"
13 main_loop(struct comp_test_data *test_data, uint8_t level,
14 enum rte_comp_xform_type type,
15 uint8_t *output_data_ptr,
16 size_t *output_data_sz)
18 uint8_t dev_id = test_data->cdev_id;
19 uint32_t i, iter, num_iter;
20 struct rte_comp_op **ops, **deq_ops;
21 void *priv_xform = NULL;
22 struct rte_comp_xform xform;
23 size_t output_size = 0;
24 struct rte_mbuf **input_bufs, **output_bufs;
29 if (test_data == NULL || !test_data->burst_sz) {
31 "Unknown burst size\n");
35 ops = rte_zmalloc_socket(NULL,
36 2 * test_data->total_bufs * sizeof(struct rte_comp_op *),
41 "Can't allocate memory for ops strucures\n");
45 deq_ops = &ops[test_data->total_bufs];
47 if (type == RTE_COMP_COMPRESS) {
48 xform = (struct rte_comp_xform) {
49 .type = RTE_COMP_COMPRESS,
51 .algo = RTE_COMP_ALGO_DEFLATE,
52 .deflate.huffman = test_data->huffman_enc,
54 .window_size = test_data->window_sz,
55 .chksum = RTE_COMP_CHECKSUM_NONE,
56 .hash_algo = RTE_COMP_HASH_ALGO_NONE
59 input_bufs = test_data->decomp_bufs;
60 output_bufs = test_data->comp_bufs;
61 out_seg_sz = test_data->out_seg_sz;
63 xform = (struct rte_comp_xform) {
64 .type = RTE_COMP_DECOMPRESS,
66 .algo = RTE_COMP_ALGO_DEFLATE,
67 .chksum = RTE_COMP_CHECKSUM_NONE,
68 .window_size = test_data->window_sz,
69 .hash_algo = RTE_COMP_HASH_ALGO_NONE
72 input_bufs = test_data->comp_bufs;
73 output_bufs = test_data->decomp_bufs;
74 out_seg_sz = test_data->seg_sz;
77 /* Create private xform */
78 if (rte_compressdev_private_xform_create(dev_id, &xform,
80 RTE_LOG(ERR, USER1, "Private xform could not be created\n");
87 for (iter = 0; iter < num_iter; iter++) {
88 uint32_t total_ops = test_data->total_bufs;
89 uint32_t remaining_ops = test_data->total_bufs;
90 uint32_t total_deq_ops = 0;
91 uint32_t total_enq_ops = 0;
92 uint16_t ops_unused = 0;
98 while (remaining_ops > 0) {
99 uint16_t num_ops = RTE_MIN(remaining_ops,
100 test_data->burst_sz);
101 uint16_t ops_needed = num_ops - ops_unused;
104 * Move the unused operations from the previous
105 * enqueue_burst call to the front, to maintain order
107 if ((ops_unused > 0) && (num_enq > 0)) {
109 ops_unused * sizeof(struct rte_comp_op *);
111 memmove(ops, &ops[num_enq], nb_b_to_mov);
114 /* Allocate compression operations */
115 if (ops_needed && !rte_comp_op_bulk_alloc(
120 "Could not allocate enough operations\n");
124 allocated += ops_needed;
126 for (i = 0; i < ops_needed; i++) {
128 * Calculate next buffer to attach to operation
130 uint32_t buf_id = total_enq_ops + i +
132 uint16_t op_id = ops_unused + i;
133 /* Reset all data in output buffers */
134 struct rte_mbuf *m = output_bufs[buf_id];
136 m->pkt_len = out_seg_sz * m->nb_segs;
138 m->data_len = m->buf_len - m->data_off;
141 ops[op_id]->m_src = input_bufs[buf_id];
142 ops[op_id]->m_dst = output_bufs[buf_id];
143 ops[op_id]->src.offset = 0;
144 ops[op_id]->src.length =
145 rte_pktmbuf_pkt_len(input_bufs[buf_id]);
146 ops[op_id]->dst.offset = 0;
147 ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL;
148 ops[op_id]->input_chksum = buf_id;
149 ops[op_id]->private_xform = priv_xform;
152 num_enq = rte_compressdev_enqueue_burst(dev_id, 0, ops,
155 struct rte_compressdev_stats stats;
157 rte_compressdev_stats_get(dev_id, &stats);
158 if (stats.enqueue_err_count) {
164 ops_unused = num_ops - num_enq;
165 remaining_ops -= num_enq;
166 total_enq_ops += num_enq;
168 num_deq = rte_compressdev_dequeue_burst(dev_id, 0,
170 test_data->burst_sz);
171 total_deq_ops += num_deq;
173 for (i = 0; i < num_deq; i++) {
174 struct rte_comp_op *op = deq_ops[i];
176 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
178 "Some operations were not successful\n");
182 const void *read_data_addr =
183 rte_pktmbuf_read(op->m_dst, 0,
184 op->produced, output_data_ptr);
185 if (read_data_addr == NULL) {
187 "Could not copy buffer in destination\n");
192 if (read_data_addr != output_data_ptr)
193 rte_memcpy(output_data_ptr,
194 rte_pktmbuf_mtod(op->m_dst,
197 output_data_ptr += op->produced;
198 output_size += op->produced;
203 if (iter == num_iter - 1) {
204 for (i = 0; i < num_deq; i++) {
205 struct rte_comp_op *op = deq_ops[i];
206 struct rte_mbuf *m = op->m_dst;
208 m->pkt_len = op->produced;
209 uint32_t remaining_data = op->produced;
210 uint16_t data_to_append;
212 while (remaining_data > 0) {
214 RTE_MIN(remaining_data,
216 m->data_len = data_to_append;
223 rte_mempool_put_bulk(test_data->op_pool,
224 (void **)deq_ops, num_deq);
225 allocated -= num_deq;
228 /* Dequeue the last operations */
229 while (total_deq_ops < total_ops) {
230 num_deq = rte_compressdev_dequeue_burst(dev_id, 0,
231 deq_ops, test_data->burst_sz);
233 struct rte_compressdev_stats stats;
235 rte_compressdev_stats_get(dev_id, &stats);
236 if (stats.dequeue_err_count) {
242 total_deq_ops += num_deq;
244 for (i = 0; i < num_deq; i++) {
245 struct rte_comp_op *op = deq_ops[i];
247 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
249 "Some operations were not successful\n");
253 const void *read_data_addr =
254 rte_pktmbuf_read(op->m_dst,
256 op->produced, output_data_ptr);
257 if (read_data_addr == NULL) {
259 "Could not copy buffer in destination\n");
264 if (read_data_addr != output_data_ptr)
265 rte_memcpy(output_data_ptr,
267 op->m_dst, uint8_t *),
269 output_data_ptr += op->produced;
270 output_size += op->produced;
274 if (iter == num_iter - 1) {
275 for (i = 0; i < num_deq; i++) {
276 struct rte_comp_op *op = deq_ops[i];
277 struct rte_mbuf *m = op->m_dst;
279 m->pkt_len = op->produced;
280 uint32_t remaining_data = op->produced;
281 uint16_t data_to_append;
283 while (remaining_data > 0) {
285 RTE_MIN(remaining_data,
287 m->data_len = data_to_append;
294 rte_mempool_put_bulk(test_data->op_pool,
295 (void **)deq_ops, num_deq);
296 allocated -= num_deq;
301 *output_data_sz = output_size;
303 rte_mempool_put_bulk(test_data->op_pool, (void **)ops, allocated);
304 rte_compressdev_private_xform_free(dev_id, priv_xform);
312 cperf_verification(struct comp_test_data *test_data, uint8_t level)
314 int ret = EXIT_SUCCESS;
316 test_data->ratio = 0;
318 if (main_loop(test_data, level, RTE_COMP_COMPRESS,
319 test_data->compressed_data,
320 &test_data->comp_data_sz) < 0) {
325 if (main_loop(test_data, level, RTE_COMP_DECOMPRESS,
326 test_data->decompressed_data,
327 &test_data->decomp_data_sz) < 0) {
332 if (test_data->decomp_data_sz != test_data->input_data_sz) {
334 "Decompressed data length not equal to input data length\n");
336 "Decompressed size = %zu, expected = %zu\n",
337 test_data->decomp_data_sz, test_data->input_data_sz);
341 if (memcmp(test_data->decompressed_data,
342 test_data->input_data,
343 test_data->input_data_sz) != 0) {
345 "Decompressed data is not the same as file data\n");
351 test_data->ratio = (double) test_data->comp_data_sz /
352 test_data->input_data_sz * 100;