1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Intel Corporation
5 #include <rte_malloc.h>
8 #include <rte_compressdev.h>
10 #include "comp_perf_test_verify.h"
13 main_loop(struct comp_test_data *test_data, uint8_t level,
14 enum rte_comp_xform_type type,
15 uint8_t *output_data_ptr,
16 size_t *output_data_sz)
18 uint8_t dev_id = test_data->cdev_id;
19 uint32_t i, iter, num_iter;
20 struct rte_comp_op **ops, **deq_ops;
21 void *priv_xform = NULL;
22 struct rte_comp_xform xform;
23 size_t output_size = 0;
24 struct rte_mbuf **input_bufs, **output_bufs;
28 if (test_data == NULL || !test_data->burst_sz) {
30 "Unknown burst size\n");
34 ops = rte_zmalloc_socket(NULL,
35 2 * test_data->total_bufs * sizeof(struct rte_comp_op *),
40 "Can't allocate memory for ops strucures\n");
44 deq_ops = &ops[test_data->total_bufs];
46 if (type == RTE_COMP_COMPRESS) {
47 xform = (struct rte_comp_xform) {
48 .type = RTE_COMP_COMPRESS,
50 .algo = RTE_COMP_ALGO_DEFLATE,
51 .deflate.huffman = test_data->huffman_enc,
53 .window_size = test_data->window_sz,
54 .chksum = RTE_COMP_CHECKSUM_NONE,
55 .hash_algo = RTE_COMP_HASH_ALGO_NONE
58 input_bufs = test_data->decomp_bufs;
59 output_bufs = test_data->comp_bufs;
61 xform = (struct rte_comp_xform) {
62 .type = RTE_COMP_DECOMPRESS,
64 .algo = RTE_COMP_ALGO_DEFLATE,
65 .chksum = RTE_COMP_CHECKSUM_NONE,
66 .window_size = test_data->window_sz,
67 .hash_algo = RTE_COMP_HASH_ALGO_NONE
70 input_bufs = test_data->comp_bufs;
71 output_bufs = test_data->decomp_bufs;
74 /* Create private xform */
75 if (rte_compressdev_private_xform_create(dev_id, &xform,
77 RTE_LOG(ERR, USER1, "Private xform could not be created\n");
84 for (iter = 0; iter < num_iter; iter++) {
85 uint32_t total_ops = test_data->total_bufs;
86 uint32_t remaining_ops = test_data->total_bufs;
87 uint32_t total_deq_ops = 0;
88 uint32_t total_enq_ops = 0;
89 uint16_t ops_unused = 0;
95 while (remaining_ops > 0) {
96 uint16_t num_ops = RTE_MIN(remaining_ops,
98 uint16_t ops_needed = num_ops - ops_unused;
101 * Move the unused operations from the previous
102 * enqueue_burst call to the front, to maintain order
104 if ((ops_unused > 0) && (num_enq > 0)) {
106 ops_unused * sizeof(struct rte_comp_op *);
108 memmove(ops, &ops[num_enq], nb_b_to_mov);
111 /* Allocate compression operations */
112 if (ops_needed && !rte_comp_op_bulk_alloc(
117 "Could not allocate enough operations\n");
121 allocated += ops_needed;
123 for (i = 0; i < ops_needed; i++) {
125 * Calculate next buffer to attach to operation
127 uint32_t buf_id = total_enq_ops + i +
129 uint16_t op_id = ops_unused + i;
130 /* Reset all data in output buffers */
131 struct rte_mbuf *m = output_bufs[buf_id];
133 m->pkt_len = test_data->seg_sz * m->nb_segs;
135 m->data_len = m->buf_len - m->data_off;
138 ops[op_id]->m_src = input_bufs[buf_id];
139 ops[op_id]->m_dst = output_bufs[buf_id];
140 ops[op_id]->src.offset = 0;
141 ops[op_id]->src.length =
142 rte_pktmbuf_pkt_len(input_bufs[buf_id]);
143 ops[op_id]->dst.offset = 0;
144 ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL;
145 ops[op_id]->input_chksum = buf_id;
146 ops[op_id]->private_xform = priv_xform;
149 num_enq = rte_compressdev_enqueue_burst(dev_id, 0, ops,
152 struct rte_compressdev_stats stats;
154 rte_compressdev_stats_get(dev_id, &stats);
155 if (stats.enqueue_err_count) {
161 ops_unused = num_ops - num_enq;
162 remaining_ops -= num_enq;
163 total_enq_ops += num_enq;
165 num_deq = rte_compressdev_dequeue_burst(dev_id, 0,
167 test_data->burst_sz);
168 total_deq_ops += num_deq;
170 for (i = 0; i < num_deq; i++) {
171 struct rte_comp_op *op = deq_ops[i];
173 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
175 "Some operations were not successful\n");
179 const void *read_data_addr =
180 rte_pktmbuf_read(op->m_dst, 0,
181 op->produced, output_data_ptr);
182 if (read_data_addr == NULL) {
184 "Could not copy buffer in destination\n");
189 if (read_data_addr != output_data_ptr)
190 rte_memcpy(output_data_ptr,
191 rte_pktmbuf_mtod(op->m_dst,
194 output_data_ptr += op->produced;
195 output_size += op->produced;
200 if (iter == num_iter - 1) {
201 for (i = 0; i < num_deq; i++) {
202 struct rte_comp_op *op = deq_ops[i];
203 struct rte_mbuf *m = op->m_dst;
205 m->pkt_len = op->produced;
206 uint32_t remaining_data = op->produced;
207 uint16_t data_to_append;
209 while (remaining_data > 0) {
211 RTE_MIN(remaining_data,
213 m->data_len = data_to_append;
220 rte_mempool_put_bulk(test_data->op_pool,
221 (void **)deq_ops, num_deq);
222 allocated -= num_deq;
225 /* Dequeue the last operations */
226 while (total_deq_ops < total_ops) {
227 num_deq = rte_compressdev_dequeue_burst(dev_id, 0,
228 deq_ops, test_data->burst_sz);
230 struct rte_compressdev_stats stats;
232 rte_compressdev_stats_get(dev_id, &stats);
233 if (stats.dequeue_err_count) {
239 total_deq_ops += num_deq;
241 for (i = 0; i < num_deq; i++) {
242 struct rte_comp_op *op = deq_ops[i];
244 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) {
246 "Some operations were not successful\n");
250 const void *read_data_addr =
251 rte_pktmbuf_read(op->m_dst,
253 op->produced, output_data_ptr);
254 if (read_data_addr == NULL) {
256 "Could not copy buffer in destination\n");
261 if (read_data_addr != output_data_ptr)
262 rte_memcpy(output_data_ptr,
264 op->m_dst, uint8_t *),
266 output_data_ptr += op->produced;
267 output_size += op->produced;
271 if (iter == num_iter - 1) {
272 for (i = 0; i < num_deq; i++) {
273 struct rte_comp_op *op = deq_ops[i];
274 struct rte_mbuf *m = op->m_dst;
276 m->pkt_len = op->produced;
277 uint32_t remaining_data = op->produced;
278 uint16_t data_to_append;
280 while (remaining_data > 0) {
282 RTE_MIN(remaining_data,
284 m->data_len = data_to_append;
291 rte_mempool_put_bulk(test_data->op_pool,
292 (void **)deq_ops, num_deq);
293 allocated -= num_deq;
298 *output_data_sz = output_size;
300 rte_mempool_put_bulk(test_data->op_pool, (void **)ops, allocated);
301 rte_compressdev_private_xform_free(dev_id, priv_xform);
309 cperf_verification(struct comp_test_data *test_data, uint8_t level)
311 int ret = EXIT_SUCCESS;
313 test_data->ratio = 0;
315 if (main_loop(test_data, level, RTE_COMP_COMPRESS,
316 test_data->compressed_data,
317 &test_data->comp_data_sz) < 0) {
322 if (main_loop(test_data, level, RTE_COMP_DECOMPRESS,
323 test_data->decompressed_data,
324 &test_data->decomp_data_sz) < 0) {
329 if (test_data->decomp_data_sz != test_data->input_data_sz) {
331 "Decompressed data length not equal to input data length\n");
333 "Decompressed size = %zu, expected = %zu\n",
334 test_data->decomp_data_sz, test_data->input_data_sz);
338 if (memcmp(test_data->decompressed_data,
339 test_data->input_data,
340 test_data->input_data_sz) != 0) {
342 "Decompressed data is not the same as file data\n");
348 test_data->ratio = (double) test_data->comp_data_sz /
349 test_data->input_data_sz * 100;