1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 - 2019 Intel Corporation
11 #include <rte_cycles.h>
12 #include <rte_malloc.h>
13 #include <rte_mempool.h>
15 #include <rte_compressdev.h>
16 #include <rte_string_fns.h>
18 #include "test_compressdev_test_buffer.h"
21 #define DIV_CEIL(a, b) ((a) / (b) + ((a) % (b) != 0))
23 #define DEFAULT_WINDOW_SIZE 15
24 #define DEFAULT_MEM_LEVEL 8
25 #define MAX_DEQD_RETRIES 10
26 #define DEQUEUE_WAIT_TIME 10000
29 * 30% extra size for compressed data compared to original data,
30 * in case data size cannot be reduced and it is actually bigger
31 * due to the compress block headers
33 #define COMPRESS_BUF_SIZE_RATIO 1.3
34 #define COMPRESS_BUF_SIZE_RATIO_DISABLED 1.0
35 #define COMPRESS_BUF_SIZE_RATIO_OVERFLOW 0.2
36 #define NUM_LARGE_MBUFS 16
37 #define SMALL_SEG_SIZE 256
40 #define NUM_MAX_XFORMS 16
41 #define NUM_MAX_INFLIGHT_OPS 128
44 #define ZLIB_CRC_CHECKSUM_WINDOW_BITS 31
45 #define ZLIB_HEADER_SIZE 2
46 #define ZLIB_TRAILER_SIZE 4
47 #define GZIP_HEADER_SIZE 10
48 #define GZIP_TRAILER_SIZE 8
50 #define OUT_OF_SPACE_BUF 1
52 #define MAX_MBUF_SEGMENT_SIZE 65535
53 #define MAX_DATA_MBUF_SIZE (MAX_MBUF_SEGMENT_SIZE - RTE_PKTMBUF_HEADROOM)
54 #define NUM_BIG_MBUFS (512 + 1)
55 #define BIG_DATA_TEST_SIZE (MAX_DATA_MBUF_SIZE * 2)
57 /* constants for "im buffer" tests start here */
59 /* number of mbufs lower than number of inflight ops */
60 #define IM_BUF_NUM_MBUFS 3
61 /* above threshold (QAT_FALLBACK_THLD) and below max mbuf size */
62 #define IM_BUF_DATA_TEST_SIZE_LB 59600
63 /* data size smaller than the queue capacity */
64 #define IM_BUF_DATA_TEST_SIZE_SGL (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS)
65 /* number of mbufs bigger than number of inflight ops */
66 #define IM_BUF_NUM_MBUFS_OVER (NUM_MAX_INFLIGHT_OPS + 1)
67 /* data size bigger than the queue capacity */
68 #define IM_BUF_DATA_TEST_SIZE_OVER (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_OVER)
69 /* number of mid-size mbufs */
70 #define IM_BUF_NUM_MBUFS_MID ((NUM_MAX_INFLIGHT_OPS / 3) + 1)
71 /* capacity of mid-size mbufs */
72 #define IM_BUF_DATA_TEST_SIZE_MID (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_MID)
76 huffman_type_strings[] = {
77 [RTE_COMP_HUFFMAN_DEFAULT] = "PMD default",
78 [RTE_COMP_HUFFMAN_FIXED] = "Fixed",
79 [RTE_COMP_HUFFMAN_DYNAMIC] = "Dynamic"
90 LB_BOTH = 0, /* both input and output are linear*/
91 SGL_BOTH, /* both input and output are chained */
92 SGL_TO_LB, /* input buffer is chained */
93 LB_TO_SGL /* output buffer is chained */
106 enum operation_type {
107 OPERATION_COMPRESSION,
108 OPERATION_DECOMPRESSION
111 struct priv_op_data {
115 struct comp_testsuite_params {
116 struct rte_mempool *large_mbuf_pool;
117 struct rte_mempool *small_mbuf_pool;
118 struct rte_mempool *big_mbuf_pool;
119 struct rte_mempool *op_pool;
120 struct rte_comp_xform *def_comp_xform;
121 struct rte_comp_xform *def_decomp_xform;
124 struct interim_data_params {
125 const char * const *test_bufs;
126 unsigned int num_bufs;
128 struct rte_comp_xform **compress_xforms;
129 struct rte_comp_xform **decompress_xforms;
130 unsigned int num_xforms;
133 struct test_data_params {
134 enum rte_comp_op_type compress_state;
135 enum rte_comp_op_type decompress_state;
136 enum varied_buff buff_type;
137 enum zlib_direction zlib_dir;
138 unsigned int out_of_space;
139 unsigned int big_data;
140 /* stateful decompression specific parameters */
141 unsigned int decompress_output_block_size;
142 unsigned int decompress_steps_max;
143 /* external mbufs specific parameters */
144 unsigned int use_external_mbufs;
145 unsigned int inbuf_data_size;
146 const struct rte_memzone *inbuf_memzone;
147 const struct rte_memzone *compbuf_memzone;
148 const struct rte_memzone *uncompbuf_memzone;
149 /* overflow test activation */
150 enum overflow_test overflow;
151 enum ratio_switch ratio;
154 struct test_private_arrays {
155 struct rte_mbuf **uncomp_bufs;
156 struct rte_mbuf **comp_bufs;
157 struct rte_comp_op **ops;
158 struct rte_comp_op **ops_processed;
160 uint64_t *compress_checksum;
161 uint32_t *compressed_data_size;
163 char **all_decomp_data;
164 unsigned int *decomp_produced_data_size;
165 uint16_t num_priv_xforms;
168 static struct comp_testsuite_params testsuite_params = { 0 };
172 testsuite_teardown(void)
174 struct comp_testsuite_params *ts_params = &testsuite_params;
176 if (rte_mempool_in_use_count(ts_params->large_mbuf_pool))
177 RTE_LOG(ERR, USER1, "Large mbuf pool still has unfreed bufs\n");
178 if (rte_mempool_in_use_count(ts_params->small_mbuf_pool))
179 RTE_LOG(ERR, USER1, "Small mbuf pool still has unfreed bufs\n");
180 if (rte_mempool_in_use_count(ts_params->big_mbuf_pool))
181 RTE_LOG(ERR, USER1, "Big mbuf pool still has unfreed bufs\n");
182 if (rte_mempool_in_use_count(ts_params->op_pool))
183 RTE_LOG(ERR, USER1, "op pool still has unfreed ops\n");
185 rte_mempool_free(ts_params->large_mbuf_pool);
186 rte_mempool_free(ts_params->small_mbuf_pool);
187 rte_mempool_free(ts_params->big_mbuf_pool);
188 rte_mempool_free(ts_params->op_pool);
189 rte_free(ts_params->def_comp_xform);
190 rte_free(ts_params->def_decomp_xform);
194 testsuite_setup(void)
196 struct comp_testsuite_params *ts_params = &testsuite_params;
197 uint32_t max_buf_size = 0;
200 if (rte_compressdev_count() == 0) {
201 RTE_LOG(WARNING, USER1, "Need at least one compress device\n");
205 RTE_LOG(NOTICE, USER1, "Running tests on device %s\n",
206 rte_compressdev_name_get(0));
208 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
209 max_buf_size = RTE_MAX(max_buf_size,
210 strlen(compress_test_bufs[i]) + 1);
213 * Buffers to be used in compression and decompression.
214 * Since decompressed data might be larger than
215 * compressed data (due to block header),
216 * buffers should be big enough for both cases.
218 max_buf_size *= COMPRESS_BUF_SIZE_RATIO;
219 ts_params->large_mbuf_pool = rte_pktmbuf_pool_create("large_mbuf_pool",
222 max_buf_size + RTE_PKTMBUF_HEADROOM,
224 if (ts_params->large_mbuf_pool == NULL) {
225 RTE_LOG(ERR, USER1, "Large mbuf pool could not be created\n");
229 /* Create mempool with smaller buffers for SGL testing */
230 ts_params->small_mbuf_pool = rte_pktmbuf_pool_create("small_mbuf_pool",
231 NUM_LARGE_MBUFS * MAX_SEGS,
233 SMALL_SEG_SIZE + RTE_PKTMBUF_HEADROOM,
235 if (ts_params->small_mbuf_pool == NULL) {
236 RTE_LOG(ERR, USER1, "Small mbuf pool could not be created\n");
240 /* Create mempool with big buffers for SGL testing */
241 ts_params->big_mbuf_pool = rte_pktmbuf_pool_create("big_mbuf_pool",
244 MAX_MBUF_SEGMENT_SIZE,
246 if (ts_params->big_mbuf_pool == NULL) {
247 RTE_LOG(ERR, USER1, "Big mbuf pool could not be created\n");
251 ts_params->op_pool = rte_comp_op_pool_create("op_pool", NUM_OPS,
252 0, sizeof(struct priv_op_data),
254 if (ts_params->op_pool == NULL) {
255 RTE_LOG(ERR, USER1, "Operation pool could not be created\n");
259 ts_params->def_comp_xform =
260 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
261 if (ts_params->def_comp_xform == NULL) {
263 "Default compress xform could not be created\n");
266 ts_params->def_decomp_xform =
267 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
268 if (ts_params->def_decomp_xform == NULL) {
270 "Default decompress xform could not be created\n");
274 /* Initializes default values for compress/decompress xforms */
275 ts_params->def_comp_xform->type = RTE_COMP_COMPRESS;
276 ts_params->def_comp_xform->compress.algo = RTE_COMP_ALGO_DEFLATE,
277 ts_params->def_comp_xform->compress.deflate.huffman =
278 RTE_COMP_HUFFMAN_DEFAULT;
279 ts_params->def_comp_xform->compress.level = RTE_COMP_LEVEL_PMD_DEFAULT;
280 ts_params->def_comp_xform->compress.chksum = RTE_COMP_CHECKSUM_NONE;
281 ts_params->def_comp_xform->compress.window_size = DEFAULT_WINDOW_SIZE;
283 ts_params->def_decomp_xform->type = RTE_COMP_DECOMPRESS;
284 ts_params->def_decomp_xform->decompress.algo = RTE_COMP_ALGO_DEFLATE,
285 ts_params->def_decomp_xform->decompress.chksum = RTE_COMP_CHECKSUM_NONE;
286 ts_params->def_decomp_xform->decompress.window_size = DEFAULT_WINDOW_SIZE;
291 testsuite_teardown();
297 generic_ut_setup(void)
299 /* Configure compressdev (one device, one queue pair) */
300 struct rte_compressdev_config config = {
301 .socket_id = rte_socket_id(),
303 .max_nb_priv_xforms = NUM_MAX_XFORMS,
307 if (rte_compressdev_configure(0, &config) < 0) {
308 RTE_LOG(ERR, USER1, "Device configuration failed\n");
312 if (rte_compressdev_queue_pair_setup(0, 0, NUM_MAX_INFLIGHT_OPS,
313 rte_socket_id()) < 0) {
314 RTE_LOG(ERR, USER1, "Queue pair setup failed\n");
318 if (rte_compressdev_start(0) < 0) {
319 RTE_LOG(ERR, USER1, "Device could not be started\n");
327 generic_ut_teardown(void)
329 rte_compressdev_stop(0);
330 if (rte_compressdev_close(0) < 0)
331 RTE_LOG(ERR, USER1, "Device could not be closed\n");
335 test_compressdev_invalid_configuration(void)
337 struct rte_compressdev_config invalid_config;
338 struct rte_compressdev_config valid_config = {
339 .socket_id = rte_socket_id(),
341 .max_nb_priv_xforms = NUM_MAX_XFORMS,
344 struct rte_compressdev_info dev_info;
346 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
348 /* Invalid configuration with 0 queue pairs */
349 memcpy(&invalid_config, &valid_config,
350 sizeof(struct rte_compressdev_config));
351 invalid_config.nb_queue_pairs = 0;
353 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
354 "Device configuration was successful "
355 "with no queue pairs (invalid)\n");
358 * Invalid configuration with too many queue pairs
359 * (if there is an actual maximum number of queue pairs)
361 rte_compressdev_info_get(0, &dev_info);
362 if (dev_info.max_nb_queue_pairs != 0) {
363 memcpy(&invalid_config, &valid_config,
364 sizeof(struct rte_compressdev_config));
365 invalid_config.nb_queue_pairs = dev_info.max_nb_queue_pairs + 1;
367 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
368 "Device configuration was successful "
369 "with too many queue pairs (invalid)\n");
372 /* Invalid queue pair setup, with no number of queue pairs set */
373 TEST_ASSERT_FAIL(rte_compressdev_queue_pair_setup(0, 0,
374 NUM_MAX_INFLIGHT_OPS, rte_socket_id()),
375 "Queue pair setup was successful "
376 "with no queue pairs set (invalid)\n");
382 compare_buffers(const char *buffer1, uint32_t buffer1_len,
383 const char *buffer2, uint32_t buffer2_len)
385 if (buffer1_len != buffer2_len) {
386 RTE_LOG(ERR, USER1, "Buffer lengths are different\n");
390 if (memcmp(buffer1, buffer2, buffer1_len) != 0) {
391 RTE_LOG(ERR, USER1, "Buffers are different\n");
399 * Maps compressdev and Zlib flush flags
402 map_zlib_flush_flag(enum rte_comp_flush_flag flag)
405 case RTE_COMP_FLUSH_NONE:
407 case RTE_COMP_FLUSH_SYNC:
409 case RTE_COMP_FLUSH_FULL:
411 case RTE_COMP_FLUSH_FINAL:
414 * There should be only the values above,
415 * so this should never happen
423 compress_zlib(struct rte_comp_op *op,
424 const struct rte_comp_xform *xform, int mem_level)
428 int strategy, window_bits, comp_level;
429 int ret = TEST_FAILED;
430 uint8_t *single_src_buf = NULL;
431 uint8_t *single_dst_buf = NULL;
433 /* initialize zlib stream */
434 stream.zalloc = Z_NULL;
435 stream.zfree = Z_NULL;
436 stream.opaque = Z_NULL;
438 if (xform->compress.deflate.huffman == RTE_COMP_HUFFMAN_FIXED)
441 strategy = Z_DEFAULT_STRATEGY;
444 * Window bits is the base two logarithm of the window size (in bytes).
445 * When doing raw DEFLATE, this number will be negative.
447 window_bits = -(xform->compress.window_size);
448 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32)
450 else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32)
451 window_bits = ZLIB_CRC_CHECKSUM_WINDOW_BITS;
453 comp_level = xform->compress.level;
455 if (comp_level != RTE_COMP_LEVEL_NONE)
456 ret = deflateInit2(&stream, comp_level, Z_DEFLATED,
457 window_bits, mem_level, strategy);
459 ret = deflateInit(&stream, Z_NO_COMPRESSION);
462 printf("Zlib deflate could not be initialized\n");
466 /* Assuming stateless operation */
468 if (op->m_src->nb_segs > 1) {
469 single_src_buf = rte_malloc(NULL,
470 rte_pktmbuf_pkt_len(op->m_src), 0);
471 if (single_src_buf == NULL) {
472 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
476 if (rte_pktmbuf_read(op->m_src, op->src.offset,
477 rte_pktmbuf_pkt_len(op->m_src) -
479 single_src_buf) == NULL) {
481 "Buffer could not be read entirely\n");
485 stream.avail_in = op->src.length;
486 stream.next_in = single_src_buf;
489 stream.avail_in = op->src.length;
490 stream.next_in = rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
494 if (op->m_dst->nb_segs > 1) {
496 single_dst_buf = rte_malloc(NULL,
497 rte_pktmbuf_pkt_len(op->m_dst), 0);
498 if (single_dst_buf == NULL) {
500 "Buffer could not be allocated\n");
504 stream.avail_out = op->m_dst->pkt_len;
505 stream.next_out = single_dst_buf;
507 } else {/* linear output */
508 stream.avail_out = op->m_dst->data_len;
509 stream.next_out = rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
513 /* Stateless operation, all buffer will be compressed in one go */
514 zlib_flush = map_zlib_flush_flag(op->flush_flag);
515 ret = deflate(&stream, zlib_flush);
517 if (stream.avail_in != 0) {
518 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
522 if (ret != Z_STREAM_END)
525 /* Copy data to destination SGL */
526 if (op->m_dst->nb_segs > 1) {
527 uint32_t remaining_data = stream.total_out;
528 uint8_t *src_data = single_dst_buf;
529 struct rte_mbuf *dst_buf = op->m_dst;
531 while (remaining_data > 0) {
532 uint8_t *dst_data = rte_pktmbuf_mtod_offset(dst_buf,
533 uint8_t *, op->dst.offset);
535 if (remaining_data < dst_buf->data_len) {
536 memcpy(dst_data, src_data, remaining_data);
539 memcpy(dst_data, src_data, dst_buf->data_len);
540 remaining_data -= dst_buf->data_len;
541 src_data += dst_buf->data_len;
542 dst_buf = dst_buf->next;
547 op->consumed = stream.total_in;
548 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32) {
549 rte_pktmbuf_adj(op->m_dst, ZLIB_HEADER_SIZE);
550 rte_pktmbuf_trim(op->m_dst, ZLIB_TRAILER_SIZE);
551 op->produced = stream.total_out - (ZLIB_HEADER_SIZE +
553 } else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32) {
554 rte_pktmbuf_adj(op->m_dst, GZIP_HEADER_SIZE);
555 rte_pktmbuf_trim(op->m_dst, GZIP_TRAILER_SIZE);
556 op->produced = stream.total_out - (GZIP_HEADER_SIZE +
559 op->produced = stream.total_out;
561 op->status = RTE_COMP_OP_STATUS_SUCCESS;
562 op->output_chksum = stream.adler;
564 deflateReset(&stream);
569 rte_free(single_src_buf);
570 rte_free(single_dst_buf);
576 decompress_zlib(struct rte_comp_op *op,
577 const struct rte_comp_xform *xform)
582 int ret = TEST_FAILED;
583 uint8_t *single_src_buf = NULL;
584 uint8_t *single_dst_buf = NULL;
586 /* initialize zlib stream */
587 stream.zalloc = Z_NULL;
588 stream.zfree = Z_NULL;
589 stream.opaque = Z_NULL;
592 * Window bits is the base two logarithm of the window size (in bytes).
593 * When doing raw DEFLATE, this number will be negative.
595 window_bits = -(xform->decompress.window_size);
596 ret = inflateInit2(&stream, window_bits);
599 printf("Zlib deflate could not be initialized\n");
603 /* Assuming stateless operation */
605 if (op->m_src->nb_segs > 1) {
606 single_src_buf = rte_malloc(NULL,
607 rte_pktmbuf_pkt_len(op->m_src), 0);
608 if (single_src_buf == NULL) {
609 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
612 single_dst_buf = rte_malloc(NULL,
613 rte_pktmbuf_pkt_len(op->m_dst), 0);
614 if (single_dst_buf == NULL) {
615 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
618 if (rte_pktmbuf_read(op->m_src, 0,
619 rte_pktmbuf_pkt_len(op->m_src),
620 single_src_buf) == NULL) {
622 "Buffer could not be read entirely\n");
626 stream.avail_in = op->src.length;
627 stream.next_in = single_src_buf;
628 stream.avail_out = rte_pktmbuf_pkt_len(op->m_dst);
629 stream.next_out = single_dst_buf;
632 stream.avail_in = op->src.length;
633 stream.next_in = rte_pktmbuf_mtod(op->m_src, uint8_t *);
634 stream.avail_out = op->m_dst->data_len;
635 stream.next_out = rte_pktmbuf_mtod(op->m_dst, uint8_t *);
638 /* Stateless operation, all buffer will be compressed in one go */
639 zlib_flush = map_zlib_flush_flag(op->flush_flag);
640 ret = inflate(&stream, zlib_flush);
642 if (stream.avail_in != 0) {
643 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
647 if (ret != Z_STREAM_END)
650 if (op->m_src->nb_segs > 1) {
651 uint32_t remaining_data = stream.total_out;
652 uint8_t *src_data = single_dst_buf;
653 struct rte_mbuf *dst_buf = op->m_dst;
655 while (remaining_data > 0) {
656 uint8_t *dst_data = rte_pktmbuf_mtod(dst_buf,
659 if (remaining_data < dst_buf->data_len) {
660 memcpy(dst_data, src_data, remaining_data);
663 memcpy(dst_data, src_data, dst_buf->data_len);
664 remaining_data -= dst_buf->data_len;
665 src_data += dst_buf->data_len;
666 dst_buf = dst_buf->next;
671 op->consumed = stream.total_in;
672 op->produced = stream.total_out;
673 op->status = RTE_COMP_OP_STATUS_SUCCESS;
675 inflateReset(&stream);
685 prepare_sgl_bufs(const char *test_buf, struct rte_mbuf *head_buf,
686 uint32_t total_data_size,
687 struct rte_mempool *small_mbuf_pool,
688 struct rte_mempool *large_mbuf_pool,
689 uint8_t limit_segs_in_sgl,
692 uint32_t remaining_data = total_data_size;
693 uint16_t num_remaining_segs = DIV_CEIL(remaining_data, seg_size);
694 struct rte_mempool *pool;
695 struct rte_mbuf *next_seg;
698 const char *data_ptr = test_buf;
702 if (limit_segs_in_sgl != 0 && num_remaining_segs > limit_segs_in_sgl)
703 num_remaining_segs = limit_segs_in_sgl - 1;
706 * Allocate data in the first segment (header) and
707 * copy data if test buffer is provided
709 if (remaining_data < seg_size)
710 data_size = remaining_data;
712 data_size = seg_size;
714 buf_ptr = rte_pktmbuf_append(head_buf, data_size);
715 if (buf_ptr == NULL) {
717 "Not enough space in the 1st buffer\n");
721 if (data_ptr != NULL) {
722 /* Copy characters without NULL terminator */
723 memcpy(buf_ptr, data_ptr, data_size);
724 data_ptr += data_size;
726 remaining_data -= data_size;
727 num_remaining_segs--;
730 * Allocate the rest of the segments,
731 * copy the rest of the data and chain the segments.
733 for (i = 0; i < num_remaining_segs; i++) {
735 if (i == (num_remaining_segs - 1)) {
737 if (remaining_data > seg_size)
738 pool = large_mbuf_pool;
740 pool = small_mbuf_pool;
741 data_size = remaining_data;
743 data_size = seg_size;
744 pool = small_mbuf_pool;
747 next_seg = rte_pktmbuf_alloc(pool);
748 if (next_seg == NULL) {
750 "New segment could not be allocated "
751 "from the mempool\n");
754 buf_ptr = rte_pktmbuf_append(next_seg, data_size);
755 if (buf_ptr == NULL) {
757 "Not enough space in the buffer\n");
758 rte_pktmbuf_free(next_seg);
761 if (data_ptr != NULL) {
762 /* Copy characters without NULL terminator */
763 memcpy(buf_ptr, data_ptr, data_size);
764 data_ptr += data_size;
766 remaining_data -= data_size;
768 ret = rte_pktmbuf_chain(head_buf, next_seg);
770 rte_pktmbuf_free(next_seg);
772 "Segment could not chained\n");
781 extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
786 test_run_enqueue_dequeue(struct rte_comp_op **ops,
787 struct rte_comp_op **ops_processed,
788 unsigned int num_bufs)
790 uint16_t num_enqd, num_deqd, num_total_deqd;
791 unsigned int deqd_retries = 0;
794 /* Enqueue and dequeue all operations */
795 num_enqd = rte_compressdev_enqueue_burst(0, 0, ops, num_bufs);
796 if (num_enqd < num_bufs) {
798 "Some operations could not be enqueued\n");
802 /* dequeue ops even on error (same number of ops as was enqueued) */
805 while (num_total_deqd < num_enqd) {
807 * If retrying a dequeue call, wait for 10 ms to allow
808 * enough time to the driver to process the operations
810 if (deqd_retries != 0) {
812 * Avoid infinite loop if not all the
813 * operations get out of the device
815 if (deqd_retries == MAX_DEQD_RETRIES) {
817 "Not all operations could be dequeued\n");
821 usleep(DEQUEUE_WAIT_TIME);
823 num_deqd = rte_compressdev_dequeue_burst(0, 0,
824 &ops_processed[num_total_deqd], num_bufs);
825 num_total_deqd += num_deqd;
834 * Arrays initialization. Input buffers preparation for compression.
836 * API that initializes all the private arrays to NULL
837 * and allocates input buffers to perform compression operations.
840 * Interim data containing session/transformation objects.
842 * The test parameters set by users (command line parameters).
843 * @param test_priv_data
844 * A container used for aggregation all the private test arrays.
850 test_setup_com_bufs(const struct interim_data_params *int_data,
851 const struct test_data_params *test_data,
852 const struct test_private_arrays *test_priv_data)
854 /* local variables: */
859 char **all_decomp_data = test_priv_data->all_decomp_data;
861 struct comp_testsuite_params *ts_params = &testsuite_params;
864 const char * const *test_bufs = int_data->test_bufs;
865 unsigned int num_bufs = int_data->num_bufs;
867 /* from test_data: */
868 unsigned int buff_type = test_data->buff_type;
869 unsigned int big_data = test_data->big_data;
871 /* from test_priv_data: */
872 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
873 struct rte_mempool *buf_pool;
875 static struct rte_mbuf_ext_shared_info inbuf_info;
877 size_t array_size = sizeof(void *) * num_bufs;
879 /* Initialize all arrays to NULL */
880 memset(test_priv_data->uncomp_bufs, 0, array_size);
881 memset(test_priv_data->comp_bufs, 0, array_size);
882 memset(test_priv_data->ops, 0, array_size);
883 memset(test_priv_data->ops_processed, 0, array_size);
884 memset(test_priv_data->priv_xforms, 0, array_size);
885 memset(test_priv_data->compressed_data_size,
886 0, sizeof(uint32_t) * num_bufs);
888 if (test_data->decompress_state == RTE_COMP_OP_STATEFUL) {
889 data_size = strlen(test_bufs[0]) + 1;
890 *all_decomp_data = rte_malloc(NULL, data_size,
891 RTE_CACHE_LINE_SIZE);
895 buf_pool = ts_params->big_mbuf_pool;
896 else if (buff_type == SGL_BOTH)
897 buf_pool = ts_params->small_mbuf_pool;
899 buf_pool = ts_params->large_mbuf_pool;
901 /* for compression uncomp_bufs is used as a source buffer */
902 /* allocation from buf_pool (mempool type) */
903 ret = rte_pktmbuf_alloc_bulk(buf_pool,
904 uncomp_bufs, num_bufs);
907 "Source mbufs could not be allocated "
908 "from the mempool\n");
912 if (test_data->use_external_mbufs) {
913 inbuf_info.free_cb = extbuf_free_callback;
914 inbuf_info.fcb_opaque = NULL;
915 rte_mbuf_ext_refcnt_set(&inbuf_info, 1);
916 for (i = 0; i < num_bufs; i++) {
917 rte_pktmbuf_attach_extbuf(uncomp_bufs[i],
918 test_data->inbuf_memzone->addr,
919 test_data->inbuf_memzone->iova,
920 test_data->inbuf_data_size,
922 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i],
923 test_data->inbuf_data_size);
924 if (buf_ptr == NULL) {
926 "Append extra bytes to the source mbuf failed\n");
930 } else if (buff_type == SGL_BOTH || buff_type == SGL_TO_LB) {
931 for (i = 0; i < num_bufs; i++) {
932 data_size = strlen(test_bufs[i]) + 1;
933 if (prepare_sgl_bufs(test_bufs[i], uncomp_bufs[i],
935 big_data ? buf_pool : ts_params->small_mbuf_pool,
936 big_data ? buf_pool : ts_params->large_mbuf_pool,
937 big_data ? 0 : MAX_SEGS,
938 big_data ? MAX_DATA_MBUF_SIZE : SMALL_SEG_SIZE) < 0)
942 for (i = 0; i < num_bufs; i++) {
943 data_size = strlen(test_bufs[i]) + 1;
945 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i], data_size);
946 if (buf_ptr == NULL) {
948 "Append extra bytes to the source mbuf failed\n");
951 strlcpy(buf_ptr, test_bufs[i], data_size);
959 * Data size calculation (for both compression and decompression).
961 * Calculate size of anticipated output buffer required for both
962 * compression and decompression operations based on input int_data.
965 * Operation type: compress or decompress
966 * @param out_of_space_and_zlib
967 * Boolean value to switch into "out of space" buffer if set.
968 * To test "out-of-space" data size, zlib_decompress must be set as well.
969 * @param test_priv_data
970 * A container used for aggregation all the private test arrays.
972 * Interim data containing session/transformation objects.
974 * The test parameters set by users (command line parameters).
976 * current buffer index
980 static inline uint32_t
981 test_mbufs_calculate_data_size(
982 enum operation_type op_type,
983 unsigned int out_of_space_and_zlib,
984 const struct test_private_arrays *test_priv_data,
985 const struct interim_data_params *int_data,
986 const struct test_data_params *test_data,
989 /* local variables: */
991 struct priv_op_data *priv_data;
993 enum ratio_switch ratio = test_data->ratio;
995 uint8_t not_zlib_compr; /* true if zlib isn't current compression dev */
996 enum overflow_test overflow = test_data->overflow;
998 /* from test_priv_data: */
999 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1001 /* from int_data: */
1002 const char * const *test_bufs = int_data->test_bufs;
1004 if (out_of_space_and_zlib)
1005 data_size = OUT_OF_SPACE_BUF;
1007 if (op_type == OPERATION_COMPRESSION) {
1008 not_zlib_compr = (test_data->zlib_dir == ZLIB_DECOMPRESS
1009 || test_data->zlib_dir == ZLIB_NONE);
1011 ratio_val = (ratio == RATIO_ENABLED) ?
1012 COMPRESS_BUF_SIZE_RATIO :
1013 COMPRESS_BUF_SIZE_RATIO_DISABLED;
1015 ratio_val = (not_zlib_compr &&
1016 (overflow == OVERFLOW_ENABLED)) ?
1017 COMPRESS_BUF_SIZE_RATIO_OVERFLOW :
1020 data_size = strlen(test_bufs[i]) * ratio_val;
1022 priv_data = (struct priv_op_data *)
1023 (ops_processed[i] + 1);
1024 data_size = strlen(test_bufs[priv_data->orig_idx]) + 1;
1033 * Memory buffers preparation (for both compression and decompression).
1035 * Function allocates output buffers to perform compression
1036 * or decompression operations depending on value of op_type.
1039 * Operation type: compress or decompress
1040 * @param out_of_space_and_zlib
1041 * Boolean value to switch into "out of space" buffer if set.
1042 * To test "out-of-space" data size, zlib_decompress must be set as well.
1043 * @param test_priv_data
1044 * A container used for aggregation all the private test arrays.
1046 * Interim data containing session/transformation objects.
1048 * The test parameters set by users (command line parameters).
1049 * @param current_extbuf_info,
1050 * The structure containing all the information related to external mbufs
1056 test_setup_output_bufs(
1057 enum operation_type op_type,
1058 unsigned int out_of_space_and_zlib,
1059 const struct test_private_arrays *test_priv_data,
1060 const struct interim_data_params *int_data,
1061 const struct test_data_params *test_data,
1062 struct rte_mbuf_ext_shared_info *current_extbuf_info)
1064 /* local variables: */
1070 /* from test_priv_data: */
1071 struct rte_mbuf **current_bufs;
1073 /* from int_data: */
1074 unsigned int num_bufs = int_data->num_bufs;
1076 /* from test_data: */
1077 unsigned int buff_type = test_data->buff_type;
1078 unsigned int big_data = test_data->big_data;
1079 const struct rte_memzone *current_memzone;
1081 struct comp_testsuite_params *ts_params = &testsuite_params;
1082 struct rte_mempool *buf_pool;
1085 buf_pool = ts_params->big_mbuf_pool;
1086 else if (buff_type == SGL_BOTH)
1087 buf_pool = ts_params->small_mbuf_pool;
1089 buf_pool = ts_params->large_mbuf_pool;
1091 if (op_type == OPERATION_COMPRESSION)
1092 current_bufs = test_priv_data->comp_bufs;
1094 current_bufs = test_priv_data->uncomp_bufs;
1096 /* the mbufs allocation*/
1097 ret = rte_pktmbuf_alloc_bulk(buf_pool, current_bufs, num_bufs);
1100 "Destination mbufs could not be allocated "
1101 "from the mempool\n");
1105 if (test_data->use_external_mbufs) {
1106 current_extbuf_info->free_cb = extbuf_free_callback;
1107 current_extbuf_info->fcb_opaque = NULL;
1108 rte_mbuf_ext_refcnt_set(current_extbuf_info, 1);
1109 if (op_type == OPERATION_COMPRESSION)
1110 current_memzone = test_data->compbuf_memzone;
1112 current_memzone = test_data->uncompbuf_memzone;
1114 for (i = 0; i < num_bufs; i++) {
1115 rte_pktmbuf_attach_extbuf(current_bufs[i],
1116 current_memzone->addr,
1117 current_memzone->iova,
1118 current_memzone->len,
1119 current_extbuf_info);
1120 rte_pktmbuf_append(current_bufs[i],
1121 current_memzone->len);
1124 for (i = 0; i < num_bufs; i++) {
1126 enum rte_comp_huffman comp_huffman =
1127 ts_params->def_comp_xform->compress.deflate.huffman;
1129 /* data size calculation */
1130 data_size = test_mbufs_calculate_data_size(
1132 out_of_space_and_zlib,
1138 if (comp_huffman != RTE_COMP_HUFFMAN_DYNAMIC) {
1139 if (op_type == OPERATION_DECOMPRESSION)
1140 data_size *= COMPRESS_BUF_SIZE_RATIO;
1143 /* data allocation */
1144 if (buff_type == SGL_BOTH || buff_type == LB_TO_SGL) {
1145 ret = prepare_sgl_bufs(NULL, current_bufs[i],
1147 big_data ? buf_pool :
1148 ts_params->small_mbuf_pool,
1149 big_data ? buf_pool :
1150 ts_params->large_mbuf_pool,
1151 big_data ? 0 : MAX_SEGS,
1152 big_data ? MAX_DATA_MBUF_SIZE :
1157 buf_ptr = rte_pktmbuf_append(current_bufs[i],
1159 if (buf_ptr == NULL) {
1161 "Append extra bytes to the destination mbuf failed\n");
1172 * The main compression function.
1174 * Function performs compression operation.
1175 * Operation(s) configuration, depending on CLI parameters.
1176 * Operation(s) processing.
1179 * Interim data containing session/transformation objects.
1181 * The test parameters set by users (command line parameters).
1182 * @param test_priv_data
1183 * A container used for aggregation all the private test arrays.
1189 test_deflate_comp_run(const struct interim_data_params *int_data,
1190 const struct test_data_params *test_data,
1191 const struct test_private_arrays *test_priv_data)
1193 /* local variables: */
1194 struct priv_op_data *priv_data;
1196 uint16_t num_priv_xforms = 0;
1201 struct comp_testsuite_params *ts_params = &testsuite_params;
1203 /* from test_data: */
1204 enum rte_comp_op_type operation_type = test_data->compress_state;
1205 unsigned int zlib_compress =
1206 (test_data->zlib_dir == ZLIB_ALL ||
1207 test_data->zlib_dir == ZLIB_COMPRESS);
1209 /* from int_data: */
1210 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1211 unsigned int num_xforms = int_data->num_xforms;
1212 unsigned int num_bufs = int_data->num_bufs;
1214 /* from test_priv_data: */
1215 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1216 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1217 struct rte_comp_op **ops = test_priv_data->ops;
1218 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1219 void **priv_xforms = test_priv_data->priv_xforms;
1221 const struct rte_compressdev_capabilities *capa =
1222 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1224 /* Build the compression operations */
1225 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1228 "Compress operations could not be allocated "
1229 "from the mempool\n");
1234 for (i = 0; i < num_bufs; i++) {
1235 ops[i]->m_src = uncomp_bufs[i];
1236 ops[i]->m_dst = comp_bufs[i];
1237 ops[i]->src.offset = 0;
1238 ops[i]->src.length = rte_pktmbuf_pkt_len(uncomp_bufs[i]);
1239 ops[i]->dst.offset = 0;
1241 RTE_LOG(DEBUG, USER1,
1242 "Uncompressed buffer length = %u compressed buffer length = %u",
1243 rte_pktmbuf_pkt_len(uncomp_bufs[i]),
1244 rte_pktmbuf_pkt_len(comp_bufs[i]));
1246 if (operation_type == RTE_COMP_OP_STATELESS) {
1247 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1250 "Compression: stateful operations are not "
1251 "supported in these tests yet\n");
1255 ops[i]->input_chksum = 0;
1257 * Store original operation index in private data,
1258 * since ordering does not have to be maintained,
1259 * when dequeueing from compressdev, so a comparison
1260 * at the end of the test can be done.
1262 priv_data = (struct priv_op_data *) (ops[i] + 1);
1263 priv_data->orig_idx = i;
1266 /* Compress data (either with Zlib API or compressdev API */
1267 if (zlib_compress) {
1268 for (i = 0; i < num_bufs; i++) {
1269 const struct rte_comp_xform *compress_xform =
1270 compress_xforms[i % num_xforms];
1271 ret = compress_zlib(ops[i], compress_xform,
1278 ops_processed[i] = ops[i];
1281 /* Create compress private xform data */
1282 for (i = 0; i < num_xforms; i++) {
1283 ret = rte_compressdev_private_xform_create(0,
1284 (const struct rte_comp_xform *)
1289 "Compression private xform "
1290 "could not be created\n");
1296 if (capa->comp_feature_flags &
1297 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1298 /* Attach shareable private xform data to ops */
1299 for (i = 0; i < num_bufs; i++)
1300 ops[i]->private_xform =
1301 priv_xforms[i % num_xforms];
1303 /* Create rest of the private xforms for the other ops */
1304 for (i = num_xforms; i < num_bufs; i++) {
1305 ret = rte_compressdev_private_xform_create(0,
1306 compress_xforms[i % num_xforms],
1310 "Compression private xform "
1311 "could not be created\n");
1317 /* Attach non shareable private xform data to ops */
1318 for (i = 0; i < num_bufs; i++)
1319 ops[i]->private_xform = priv_xforms[i];
1323 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
1326 "Compression: enqueue/dequeue operation failed\n");
1331 for (i = 0; i < num_bufs; i++) {
1332 test_priv_data->compressed_data_size[i] +=
1333 ops_processed[i]->produced;
1335 if (ops_processed[i]->status ==
1336 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
1339 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1340 ops[i]->src.offset +=
1341 ops_processed[i]->consumed;
1342 ops[i]->src.length -=
1343 ops_processed[i]->consumed;
1344 ops[i]->dst.offset +=
1345 ops_processed[i]->produced;
1347 buf_ptr = rte_pktmbuf_append(
1349 ops_processed[i]->produced);
1351 if (buf_ptr == NULL) {
1353 "Data recovery: append extra bytes to the current mbuf failed\n");
1363 /* Free resources */
1365 for (i = 0; i < num_bufs; i++) {
1366 rte_comp_op_free(ops[i]);
1368 ops_processed[i] = NULL;
1371 /* Free compress private xforms */
1372 for (i = 0; i < num_priv_xforms; i++) {
1373 if (priv_xforms[i] != NULL) {
1374 rte_compressdev_private_xform_free(0, priv_xforms[i]);
1375 priv_xforms[i] = NULL;
1383 * Prints out the test report. Memory freeing.
1385 * Called after successful compression.
1386 * Operation(s) status validation and decompression buffers freeing.
1388 * -1 returned if function fail.
1391 * Interim data containing session/transformation objects.
1393 * The test parameters set by users (command line parameters).
1394 * @param test_priv_data
1395 * A container used for aggregation all the private test arrays.
1397 * - 2: Some operation is not supported
1398 * - 1: Decompression should be skipped
1403 test_deflate_comp_finalize(const struct interim_data_params *int_data,
1404 const struct test_data_params *test_data,
1405 const struct test_private_arrays *test_priv_data)
1407 /* local variables: */
1409 struct priv_op_data *priv_data;
1411 /* from int_data: */
1412 unsigned int num_xforms = int_data->num_xforms;
1413 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1414 uint16_t *buf_idx = int_data->buf_idx;
1415 unsigned int num_bufs = int_data->num_bufs;
1417 /* from test_priv_data: */
1418 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1419 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1420 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1421 struct rte_comp_op **ops = test_priv_data->ops;
1423 /* from test_data: */
1424 unsigned int out_of_space = test_data->out_of_space;
1425 unsigned int zlib_compress =
1426 (test_data->zlib_dir == ZLIB_ALL ||
1427 test_data->zlib_dir == ZLIB_COMPRESS);
1428 unsigned int zlib_decompress =
1429 (test_data->zlib_dir == ZLIB_ALL ||
1430 test_data->zlib_dir == ZLIB_DECOMPRESS);
1432 for (i = 0; i < num_bufs; i++) {
1433 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1434 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1435 const struct rte_comp_compress_xform *compress_xform =
1436 &compress_xforms[xform_idx]->compress;
1437 enum rte_comp_huffman huffman_type =
1438 compress_xform->deflate.huffman;
1439 char engine[] = "zlib (directly, not PMD)";
1440 if (zlib_decompress)
1441 strlcpy(engine, "PMD", sizeof(engine));
1443 RTE_LOG(DEBUG, USER1, "Buffer %u compressed by %s from %u to"
1444 " %u bytes (level = %d, huffman = %s)\n",
1445 buf_idx[priv_data->orig_idx], engine,
1446 ops_processed[i]->consumed, ops_processed[i]->produced,
1447 compress_xform->level,
1448 huffman_type_strings[huffman_type]);
1449 RTE_LOG(DEBUG, USER1, "Compression ratio = %.2f\n",
1450 ops_processed[i]->consumed == 0 ? 0 :
1451 (float)ops_processed[i]->produced /
1452 ops_processed[i]->consumed * 100);
1453 if (compress_xform->chksum != RTE_COMP_CHECKSUM_NONE)
1454 compress_checksum[i] = ops_processed[i]->output_chksum;
1459 * Check operation status and free source mbufs (destination mbuf and
1460 * compress operation information is needed for the decompression stage)
1462 for (i = 0; i < num_bufs; i++) {
1463 if (out_of_space && !zlib_compress) {
1464 if (ops_processed[i]->status !=
1465 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1467 "Operation without expected out of "
1468 "space status error\n");
1474 if (ops_processed[i]->status != RTE_COMP_OP_STATUS_SUCCESS) {
1475 if (test_data->overflow == OVERFLOW_ENABLED) {
1476 if (ops_processed[i]->status ==
1477 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1478 RTE_LOG(INFO, USER1,
1479 "Out-of-space-recoverable functionality"
1480 " is not supported on this device\n");
1486 "Comp: Some operations were not successful\n");
1489 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1490 rte_pktmbuf_free(uncomp_bufs[priv_data->orig_idx]);
1491 uncomp_bufs[priv_data->orig_idx] = NULL;
1494 if (out_of_space && !zlib_compress)
1501 * The main decompression function.
1503 * Function performs decompression operation.
1504 * Operation(s) configuration, depending on CLI parameters.
1505 * Operation(s) processing.
1508 * Interim data containing session/transformation objects.
1510 * The test parameters set by users (command line parameters).
1511 * @param test_priv_data
1512 * A container used for aggregation all the private test arrays.
1518 test_deflate_decomp_run(const struct interim_data_params *int_data,
1519 const struct test_data_params *test_data,
1520 struct test_private_arrays *test_priv_data)
1523 /* local variables: */
1524 struct priv_op_data *priv_data;
1526 uint16_t num_priv_xforms = 0;
1530 struct comp_testsuite_params *ts_params = &testsuite_params;
1532 /* from test_data: */
1533 enum rte_comp_op_type operation_type = test_data->decompress_state;
1534 unsigned int zlib_decompress =
1535 (test_data->zlib_dir == ZLIB_ALL ||
1536 test_data->zlib_dir == ZLIB_DECOMPRESS);
1538 /* from int_data: */
1539 struct rte_comp_xform **decompress_xforms = int_data->decompress_xforms;
1540 unsigned int num_xforms = int_data->num_xforms;
1541 unsigned int num_bufs = int_data->num_bufs;
1543 /* from test_priv_data: */
1544 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1545 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1546 struct rte_comp_op **ops = test_priv_data->ops;
1547 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1548 void **priv_xforms = test_priv_data->priv_xforms;
1549 uint32_t *compressed_data_size = test_priv_data->compressed_data_size;
1550 void **stream = test_priv_data->stream;
1552 const struct rte_compressdev_capabilities *capa =
1553 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1555 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1558 "Decompress operations could not be allocated "
1559 "from the mempool\n");
1564 /* Source buffer is the compressed data from the previous operations */
1565 for (i = 0; i < num_bufs; i++) {
1566 ops[i]->m_src = comp_bufs[i];
1567 ops[i]->m_dst = uncomp_bufs[i];
1568 ops[i]->src.offset = 0;
1570 * Set the length of the compressed data to the
1571 * number of bytes that were produced in the previous stage
1574 if (compressed_data_size[i])
1575 ops[i]->src.length = compressed_data_size[i];
1577 ops[i]->src.length = ops_processed[i]->produced;
1579 ops[i]->dst.offset = 0;
1581 if (operation_type == RTE_COMP_OP_STATELESS) {
1582 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1583 ops[i]->op_type = RTE_COMP_OP_STATELESS;
1584 } else if (!zlib_decompress) {
1585 ops[i]->flush_flag = RTE_COMP_FLUSH_SYNC;
1586 ops[i]->op_type = RTE_COMP_OP_STATEFUL;
1589 "Decompression: stateful operations are"
1590 " not supported in these tests yet\n");
1594 ops[i]->input_chksum = 0;
1596 * Copy private data from previous operations,
1597 * to keep the pointer to the original buffer
1599 memcpy(ops[i] + 1, ops_processed[i] + 1,
1600 sizeof(struct priv_op_data));
1604 * Free the previous compress operations,
1605 * as they are not needed anymore
1607 rte_comp_op_bulk_free(ops_processed, num_bufs);
1609 /* Decompress data (either with Zlib API or compressdev API */
1610 if (zlib_decompress) {
1611 for (i = 0; i < num_bufs; i++) {
1612 priv_data = (struct priv_op_data *)(ops[i] + 1);
1613 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1614 const struct rte_comp_xform *decompress_xform =
1615 decompress_xforms[xform_idx];
1617 ret = decompress_zlib(ops[i], decompress_xform);
1623 ops_processed[i] = ops[i];
1626 if (operation_type == RTE_COMP_OP_STATELESS) {
1627 /* Create decompress private xform data */
1628 for (i = 0; i < num_xforms; i++) {
1629 ret = rte_compressdev_private_xform_create(0,
1630 (const struct rte_comp_xform *)
1631 decompress_xforms[i],
1635 "Decompression private xform "
1636 "could not be created\n");
1643 if (capa->comp_feature_flags &
1644 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1645 /* Attach shareable private xform data to ops */
1646 for (i = 0; i < num_bufs; i++) {
1647 priv_data = (struct priv_op_data *)
1649 uint16_t xform_idx =
1650 priv_data->orig_idx % num_xforms;
1651 ops[i]->private_xform =
1652 priv_xforms[xform_idx];
1655 /* Create rest of the private xforms */
1656 /* for the other ops */
1657 for (i = num_xforms; i < num_bufs; i++) {
1659 rte_compressdev_private_xform_create(0,
1660 decompress_xforms[i % num_xforms],
1664 "Decompression private xform"
1665 " could not be created\n");
1672 /* Attach non shareable private xform data */
1674 for (i = 0; i < num_bufs; i++) {
1675 priv_data = (struct priv_op_data *)
1677 uint16_t xform_idx =
1678 priv_data->orig_idx;
1679 ops[i]->private_xform =
1680 priv_xforms[xform_idx];
1684 /* Create a stream object for stateful decompression */
1685 ret = rte_compressdev_stream_create(0,
1686 decompress_xforms[0], stream);
1689 "Decompression stream could not be created, error %d\n",
1694 /* Attach stream to ops */
1695 for (i = 0; i < num_bufs; i++)
1696 ops[i]->stream = *stream;
1699 test_priv_data->num_priv_xforms = num_priv_xforms;
1707 * Prints out the test report. Memory freeing.
1709 * Called after successful decompression.
1710 * Operation(s) status validation and compression buffers freeing.
1712 * -1 returned if function fail.
1715 * Interim data containing session/transformation objects.
1717 * The test parameters set by users (command line parameters).
1718 * @param test_priv_data
1719 * A container used for aggregation all the private test arrays.
1721 * - 2: Next step must be executed by the caller (stateful decompression only)
1722 * - 1: On success (caller should stop and exit)
1727 test_deflate_decomp_finalize(const struct interim_data_params *int_data,
1728 const struct test_data_params *test_data,
1729 const struct test_private_arrays *test_priv_data)
1731 /* local variables: */
1733 struct priv_op_data *priv_data;
1734 static unsigned int step;
1736 /* from int_data: */
1737 uint16_t *buf_idx = int_data->buf_idx;
1738 unsigned int num_bufs = int_data->num_bufs;
1739 const char * const *test_bufs = int_data->test_bufs;
1740 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1742 /* from test_priv_data: */
1743 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1744 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1745 struct rte_comp_op **ops = test_priv_data->ops;
1746 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1747 unsigned int *decomp_produced_data_size =
1748 test_priv_data->decomp_produced_data_size;
1749 char **all_decomp_data = test_priv_data->all_decomp_data;
1751 /* from test_data: */
1752 unsigned int out_of_space = test_data->out_of_space;
1753 enum rte_comp_op_type operation_type = test_data->decompress_state;
1755 unsigned int zlib_compress =
1756 (test_data->zlib_dir == ZLIB_ALL ||
1757 test_data->zlib_dir == ZLIB_COMPRESS);
1758 unsigned int zlib_decompress =
1759 (test_data->zlib_dir == ZLIB_ALL ||
1760 test_data->zlib_dir == ZLIB_DECOMPRESS);
1762 for (i = 0; i < num_bufs; i++) {
1763 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1764 char engine[] = "zlib, (directly, no PMD)";
1766 strlcpy(engine, "pmd", sizeof(engine));
1767 RTE_LOG(DEBUG, USER1,
1768 "Buffer %u decompressed by %s from %u to %u bytes\n",
1769 buf_idx[priv_data->orig_idx], engine,
1770 ops_processed[i]->consumed, ops_processed[i]->produced);
1775 * Check operation status and free source mbuf (destination mbuf and
1776 * compress operation information is still needed)
1778 for (i = 0; i < num_bufs; i++) {
1779 if (out_of_space && !zlib_decompress) {
1780 if (ops_processed[i]->status !=
1781 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1784 "Operation without expected out of "
1785 "space status error\n");
1791 if (operation_type == RTE_COMP_OP_STATEFUL
1792 && (ops_processed[i]->status ==
1793 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE
1794 || ops_processed[i]->status ==
1795 RTE_COMP_OP_STATUS_SUCCESS)) {
1797 RTE_LOG(DEBUG, USER1,
1798 ".............RECOVERABLE\n");
1800 /* collect the output into all_decomp_data */
1801 const void *ptr = rte_pktmbuf_read(
1802 ops_processed[i]->m_dst,
1803 ops_processed[i]->dst.offset,
1804 ops_processed[i]->produced,
1806 *decomp_produced_data_size);
1807 if (ptr != *all_decomp_data +
1808 *decomp_produced_data_size)
1809 rte_memcpy(*all_decomp_data +
1810 *decomp_produced_data_size,
1811 ptr, ops_processed[i]->produced);
1813 *decomp_produced_data_size +=
1814 ops_processed[i]->produced;
1815 if (ops_processed[i]->src.length >
1816 ops_processed[i]->consumed) {
1817 if (ops_processed[i]->status ==
1818 RTE_COMP_OP_STATUS_SUCCESS) {
1820 "Operation finished too early\n");
1824 if (step >= test_data->decompress_steps_max) {
1826 "Operation exceeded maximum steps\n");
1829 ops[i] = ops_processed[i];
1831 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1832 ops[i]->src.offset +=
1833 ops_processed[i]->consumed;
1834 ops[i]->src.length -=
1835 ops_processed[i]->consumed;
1836 /* repeat the operation */
1839 /* Compare the original stream with the */
1840 /* decompressed stream (in size and the data) */
1841 priv_data = (struct priv_op_data *)
1842 (ops_processed[i] + 1);
1844 test_bufs[priv_data->orig_idx];
1845 const char *buf2 = *all_decomp_data;
1847 if (compare_buffers(buf1, strlen(buf1) + 1,
1848 buf2, *decomp_produced_data_size) < 0)
1850 /* Test checksums */
1851 if (compress_xforms[0]->compress.chksum
1852 != RTE_COMP_CHECKSUM_NONE) {
1853 if (ops_processed[i]->output_chksum
1854 != compress_checksum[i]) {
1856 "The checksums differ\n"
1857 "Compression Checksum: %" PRIu64 "\tDecompression "
1858 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1859 ops_processed[i]->output_chksum);
1864 } else if (ops_processed[i]->status !=
1865 RTE_COMP_OP_STATUS_SUCCESS) {
1867 "Decomp: Some operations were not successful, status = %u\n",
1868 ops_processed[i]->status);
1871 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1872 rte_pktmbuf_free(comp_bufs[priv_data->orig_idx]);
1873 comp_bufs[priv_data->orig_idx] = NULL;
1876 if (out_of_space && !zlib_decompress)
1883 * Validation of the output (compression/decompression) data.
1885 * The function compares the source stream with the output stream,
1886 * after decompression, to check if compression/decompression
1888 * -1 returned if function fail.
1891 * Interim data containing session/transformation objects.
1893 * The test parameters set by users (command line parameters).
1894 * @param test_priv_data
1895 * A container used for aggregation all the private test arrays.
1901 test_results_validation(const struct interim_data_params *int_data,
1902 const struct test_data_params *test_data,
1903 const struct test_private_arrays *test_priv_data)
1905 /* local variables: */
1907 struct priv_op_data *priv_data;
1910 char *contig_buf = NULL;
1913 /* from int_data: */
1914 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1915 unsigned int num_bufs = int_data->num_bufs;
1916 const char * const *test_bufs = int_data->test_bufs;
1918 /* from test_priv_data: */
1919 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1920 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1923 * Compare the original stream with the decompressed stream
1924 * (in size and the data)
1926 for (i = 0; i < num_bufs; i++) {
1927 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1928 buf1 = test_data->use_external_mbufs ?
1929 test_data->inbuf_memzone->addr :
1930 test_bufs[priv_data->orig_idx];
1931 data_size = test_data->use_external_mbufs ?
1932 test_data->inbuf_data_size :
1935 contig_buf = rte_malloc(NULL, ops_processed[i]->produced, 0);
1936 if (contig_buf == NULL) {
1937 RTE_LOG(ERR, USER1, "Contiguous buffer could not "
1942 buf2 = rte_pktmbuf_read(ops_processed[i]->m_dst, 0,
1943 ops_processed[i]->produced, contig_buf);
1944 if (compare_buffers(buf1, data_size,
1945 buf2, ops_processed[i]->produced) < 0)
1948 /* Test checksums */
1949 if (compress_xforms[0]->compress.chksum !=
1950 RTE_COMP_CHECKSUM_NONE) {
1951 if (ops_processed[i]->output_chksum !=
1952 compress_checksum[i]) {
1953 RTE_LOG(ERR, USER1, "The checksums differ\n"
1954 "Compression Checksum: %" PRIu64 "\tDecompression "
1955 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1956 ops_processed[i]->output_chksum);
1961 rte_free(contig_buf);
1967 rte_free(contig_buf);
1972 * Compresses and decompresses input stream with compressdev API and Zlib API
1974 * Basic test function. Common for all the functional tests.
1975 * -1 returned if function fail.
1978 * Interim data containing session/transformation objects.
1980 * The test parameters set by users (command line parameters).
1982 * - 1: Some operation not supported
1988 test_deflate_comp_decomp(const struct interim_data_params *int_data,
1989 const struct test_data_params *test_data)
1991 unsigned int num_bufs = int_data->num_bufs;
1992 unsigned int out_of_space = test_data->out_of_space;
1994 void *stream = NULL;
1995 char *all_decomp_data = NULL;
1996 unsigned int decomp_produced_data_size = 0;
1998 int ret_status = -1;
2000 struct rte_mbuf *uncomp_bufs[num_bufs];
2001 struct rte_mbuf *comp_bufs[num_bufs];
2002 struct rte_comp_op *ops[num_bufs];
2003 struct rte_comp_op *ops_processed[num_bufs];
2004 void *priv_xforms[num_bufs];
2007 uint64_t compress_checksum[num_bufs];
2008 uint32_t compressed_data_size[num_bufs];
2009 char *contig_buf = NULL;
2011 struct rte_mbuf_ext_shared_info compbuf_info;
2012 struct rte_mbuf_ext_shared_info decompbuf_info;
2014 const struct rte_compressdev_capabilities *capa;
2016 /* Compressing with CompressDev */
2017 unsigned int zlib_compress =
2018 (test_data->zlib_dir == ZLIB_ALL ||
2019 test_data->zlib_dir == ZLIB_COMPRESS);
2020 unsigned int zlib_decompress =
2021 (test_data->zlib_dir == ZLIB_ALL ||
2022 test_data->zlib_dir == ZLIB_DECOMPRESS);
2024 struct test_private_arrays test_priv_data;
2026 test_priv_data.uncomp_bufs = uncomp_bufs;
2027 test_priv_data.comp_bufs = comp_bufs;
2028 test_priv_data.ops = ops;
2029 test_priv_data.ops_processed = ops_processed;
2030 test_priv_data.priv_xforms = priv_xforms;
2031 test_priv_data.compress_checksum = compress_checksum;
2032 test_priv_data.compressed_data_size = compressed_data_size;
2034 test_priv_data.stream = &stream;
2035 test_priv_data.all_decomp_data = &all_decomp_data;
2036 test_priv_data.decomp_produced_data_size = &decomp_produced_data_size;
2038 test_priv_data.num_priv_xforms = 0; /* it's used for deompression only */
2040 capa = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2043 "Compress device does not support DEFLATE\n");
2047 /* Prepare the source mbufs with the data */
2048 ret = test_setup_com_bufs(int_data, test_data, &test_priv_data);
2054 RTE_LOG(DEBUG, USER1, "<<< COMPRESSION >>>\n");
2058 /* Prepare output (destination) mbufs for compressed data */
2059 ret = test_setup_output_bufs(
2060 OPERATION_COMPRESSION,
2061 out_of_space == 1 && !zlib_compress,
2071 /* Run compression */
2072 ret = test_deflate_comp_run(int_data, test_data, &test_priv_data);
2078 ret = test_deflate_comp_finalize(int_data, test_data, &test_priv_data);
2082 } else if (ret == 1) {
2085 } else if (ret == 2) {
2086 ret_status = 1; /* some operation not supported */
2092 RTE_LOG(DEBUG, USER1, "<<< DECOMPRESSION >>>\n");
2094 /* Prepare output (destination) mbufs for decompressed data */
2095 ret = test_setup_output_bufs(
2096 OPERATION_DECOMPRESSION,
2097 out_of_space == 1 && !zlib_decompress,
2107 /* Run decompression */
2108 ret = test_deflate_decomp_run(int_data, test_data, &test_priv_data);
2114 if (!zlib_decompress) {
2115 next_step: /* next step for stateful decompression only */
2116 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
2120 "Decompression: enqueue/dequeue operation failed\n");
2124 ret = test_deflate_decomp_finalize(int_data, test_data, &test_priv_data);
2128 } else if (ret == 1) {
2131 } else if (ret == 2) {
2135 /* FINAL PROCESSING */
2137 ret = test_results_validation(int_data, test_data, &test_priv_data);
2145 /* Free resources */
2148 rte_compressdev_stream_free(0, stream);
2149 if (all_decomp_data != NULL)
2150 rte_free(all_decomp_data);
2152 /* Free compress private xforms */
2153 for (i = 0; i < test_priv_data.num_priv_xforms; i++) {
2154 if (priv_xforms[i] != NULL) {
2155 rte_compressdev_private_xform_free(0, priv_xforms[i]);
2156 priv_xforms[i] = NULL;
2159 for (i = 0; i < num_bufs; i++) {
2160 rte_pktmbuf_free(uncomp_bufs[i]);
2161 rte_pktmbuf_free(comp_bufs[i]);
2162 rte_comp_op_free(ops[i]);
2163 rte_comp_op_free(ops_processed[i]);
2165 rte_free(contig_buf);
2171 test_compressdev_deflate_stateless_fixed(void)
2173 struct comp_testsuite_params *ts_params = &testsuite_params;
2176 const struct rte_compressdev_capabilities *capab;
2178 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2179 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2181 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2184 struct rte_comp_xform *compress_xform =
2185 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2187 if (compress_xform == NULL) {
2189 "Compress xform could not be created\n");
2194 memcpy(compress_xform, ts_params->def_comp_xform,
2195 sizeof(struct rte_comp_xform));
2196 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
2198 struct interim_data_params int_data = {
2203 &ts_params->def_decomp_xform,
2207 struct test_data_params test_data = {
2208 .compress_state = RTE_COMP_OP_STATELESS,
2209 .decompress_state = RTE_COMP_OP_STATELESS,
2210 .buff_type = LB_BOTH,
2211 .zlib_dir = ZLIB_DECOMPRESS,
2214 .overflow = OVERFLOW_DISABLED,
2215 .ratio = RATIO_ENABLED
2218 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2219 int_data.test_bufs = &compress_test_bufs[i];
2220 int_data.buf_idx = &i;
2222 /* Compress with compressdev, decompress with Zlib */
2223 test_data.zlib_dir = ZLIB_DECOMPRESS;
2224 ret = test_deflate_comp_decomp(&int_data, &test_data);
2228 /* Compress with Zlib, decompress with compressdev */
2229 test_data.zlib_dir = ZLIB_COMPRESS;
2230 ret = test_deflate_comp_decomp(&int_data, &test_data);
2238 rte_free(compress_xform);
2243 test_compressdev_deflate_stateless_dynamic(void)
2245 struct comp_testsuite_params *ts_params = &testsuite_params;
2248 struct rte_comp_xform *compress_xform =
2249 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2251 const struct rte_compressdev_capabilities *capab;
2253 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2254 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2256 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2259 if (compress_xform == NULL) {
2261 "Compress xform could not be created\n");
2266 memcpy(compress_xform, ts_params->def_comp_xform,
2267 sizeof(struct rte_comp_xform));
2268 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_DYNAMIC;
2270 struct interim_data_params int_data = {
2275 &ts_params->def_decomp_xform,
2279 struct test_data_params test_data = {
2280 .compress_state = RTE_COMP_OP_STATELESS,
2281 .decompress_state = RTE_COMP_OP_STATELESS,
2282 .buff_type = LB_BOTH,
2283 .zlib_dir = ZLIB_DECOMPRESS,
2286 .overflow = OVERFLOW_DISABLED,
2287 .ratio = RATIO_ENABLED
2290 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2291 int_data.test_bufs = &compress_test_bufs[i];
2292 int_data.buf_idx = &i;
2294 /* Compress with compressdev, decompress with Zlib */
2295 test_data.zlib_dir = ZLIB_DECOMPRESS;
2296 ret = test_deflate_comp_decomp(&int_data, &test_data);
2300 /* Compress with Zlib, decompress with compressdev */
2301 test_data.zlib_dir = ZLIB_COMPRESS;
2302 ret = test_deflate_comp_decomp(&int_data, &test_data);
2310 rte_free(compress_xform);
2315 test_compressdev_deflate_stateless_multi_op(void)
2317 struct comp_testsuite_params *ts_params = &testsuite_params;
2318 uint16_t num_bufs = RTE_DIM(compress_test_bufs);
2319 uint16_t buf_idx[num_bufs];
2323 for (i = 0; i < num_bufs; i++)
2326 struct interim_data_params int_data = {
2330 &ts_params->def_comp_xform,
2331 &ts_params->def_decomp_xform,
2335 struct test_data_params test_data = {
2336 .compress_state = RTE_COMP_OP_STATELESS,
2337 .decompress_state = RTE_COMP_OP_STATELESS,
2338 .buff_type = LB_BOTH,
2339 .zlib_dir = ZLIB_DECOMPRESS,
2342 .overflow = OVERFLOW_DISABLED,
2343 .ratio = RATIO_ENABLED
2346 /* Compress with compressdev, decompress with Zlib */
2347 test_data.zlib_dir = ZLIB_DECOMPRESS;
2348 ret = test_deflate_comp_decomp(&int_data, &test_data);
2352 /* Compress with Zlib, decompress with compressdev */
2353 test_data.zlib_dir = ZLIB_COMPRESS;
2354 ret = test_deflate_comp_decomp(&int_data, &test_data);
2358 return TEST_SUCCESS;
2362 test_compressdev_deflate_stateless_multi_level(void)
2364 struct comp_testsuite_params *ts_params = &testsuite_params;
2368 struct rte_comp_xform *compress_xform =
2369 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2371 if (compress_xform == NULL) {
2373 "Compress xform could not be created\n");
2378 memcpy(compress_xform, ts_params->def_comp_xform,
2379 sizeof(struct rte_comp_xform));
2381 struct interim_data_params int_data = {
2386 &ts_params->def_decomp_xform,
2390 struct test_data_params test_data = {
2391 .compress_state = RTE_COMP_OP_STATELESS,
2392 .decompress_state = RTE_COMP_OP_STATELESS,
2393 .buff_type = LB_BOTH,
2394 .zlib_dir = ZLIB_DECOMPRESS,
2397 .overflow = OVERFLOW_DISABLED,
2398 .ratio = RATIO_ENABLED
2401 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2402 int_data.test_bufs = &compress_test_bufs[i];
2403 int_data.buf_idx = &i;
2405 for (level = RTE_COMP_LEVEL_MIN; level <= RTE_COMP_LEVEL_MAX;
2407 compress_xform->compress.level = level;
2408 /* Compress with compressdev, decompress with Zlib */
2409 test_data.zlib_dir = ZLIB_DECOMPRESS;
2410 ret = test_deflate_comp_decomp(&int_data, &test_data);
2419 rte_free(compress_xform);
2423 #define NUM_XFORMS 3
2425 test_compressdev_deflate_stateless_multi_xform(void)
2427 struct comp_testsuite_params *ts_params = &testsuite_params;
2428 uint16_t num_bufs = NUM_XFORMS;
2429 struct rte_comp_xform *compress_xforms[NUM_XFORMS] = {NULL};
2430 struct rte_comp_xform *decompress_xforms[NUM_XFORMS] = {NULL};
2431 const char *test_buffers[NUM_XFORMS];
2433 unsigned int level = RTE_COMP_LEVEL_MIN;
2434 uint16_t buf_idx[num_bufs];
2437 /* Create multiple xforms with various levels */
2438 for (i = 0; i < NUM_XFORMS; i++) {
2439 compress_xforms[i] = rte_malloc(NULL,
2440 sizeof(struct rte_comp_xform), 0);
2441 if (compress_xforms[i] == NULL) {
2443 "Compress xform could not be created\n");
2448 memcpy(compress_xforms[i], ts_params->def_comp_xform,
2449 sizeof(struct rte_comp_xform));
2450 compress_xforms[i]->compress.level = level;
2453 decompress_xforms[i] = rte_malloc(NULL,
2454 sizeof(struct rte_comp_xform), 0);
2455 if (decompress_xforms[i] == NULL) {
2457 "Decompress xform could not be created\n");
2462 memcpy(decompress_xforms[i], ts_params->def_decomp_xform,
2463 sizeof(struct rte_comp_xform));
2466 for (i = 0; i < NUM_XFORMS; i++) {
2468 /* Use the same buffer in all sessions */
2469 test_buffers[i] = compress_test_bufs[0];
2472 struct interim_data_params int_data = {
2481 struct test_data_params test_data = {
2482 .compress_state = RTE_COMP_OP_STATELESS,
2483 .decompress_state = RTE_COMP_OP_STATELESS,
2484 .buff_type = LB_BOTH,
2485 .zlib_dir = ZLIB_DECOMPRESS,
2488 .overflow = OVERFLOW_DISABLED,
2489 .ratio = RATIO_ENABLED
2492 /* Compress with compressdev, decompress with Zlib */
2493 ret = test_deflate_comp_decomp(&int_data, &test_data);
2500 for (i = 0; i < NUM_XFORMS; i++) {
2501 rte_free(compress_xforms[i]);
2502 rte_free(decompress_xforms[i]);
2509 test_compressdev_deflate_stateless_sgl(void)
2511 struct comp_testsuite_params *ts_params = &testsuite_params;
2514 const struct rte_compressdev_capabilities *capab;
2516 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2517 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2519 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2522 struct interim_data_params int_data = {
2526 &ts_params->def_comp_xform,
2527 &ts_params->def_decomp_xform,
2531 struct test_data_params test_data = {
2532 .compress_state = RTE_COMP_OP_STATELESS,
2533 .decompress_state = RTE_COMP_OP_STATELESS,
2534 .buff_type = SGL_BOTH,
2535 .zlib_dir = ZLIB_DECOMPRESS,
2538 .overflow = OVERFLOW_DISABLED,
2539 .ratio = RATIO_ENABLED
2542 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2543 int_data.test_bufs = &compress_test_bufs[i];
2544 int_data.buf_idx = &i;
2546 /* Compress with compressdev, decompress with Zlib */
2547 test_data.zlib_dir = ZLIB_DECOMPRESS;
2548 ret = test_deflate_comp_decomp(&int_data, &test_data);
2552 /* Compress with Zlib, decompress with compressdev */
2553 test_data.zlib_dir = ZLIB_COMPRESS;
2554 ret = test_deflate_comp_decomp(&int_data, &test_data);
2558 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_LB_OUT) {
2559 /* Compress with compressdev, decompress with Zlib */
2560 test_data.zlib_dir = ZLIB_DECOMPRESS;
2561 test_data.buff_type = SGL_TO_LB;
2562 ret = test_deflate_comp_decomp(&int_data, &test_data);
2566 /* Compress with Zlib, decompress with compressdev */
2567 test_data.zlib_dir = ZLIB_COMPRESS;
2568 test_data.buff_type = SGL_TO_LB;
2569 ret = test_deflate_comp_decomp(&int_data, &test_data);
2574 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_LB_IN_SGL_OUT) {
2575 /* Compress with compressdev, decompress with Zlib */
2576 test_data.zlib_dir = ZLIB_DECOMPRESS;
2577 test_data.buff_type = LB_TO_SGL;
2578 ret = test_deflate_comp_decomp(&int_data, &test_data);
2582 /* Compress with Zlib, decompress with compressdev */
2583 test_data.zlib_dir = ZLIB_COMPRESS;
2584 test_data.buff_type = LB_TO_SGL;
2585 ret = test_deflate_comp_decomp(&int_data, &test_data);
2591 return TEST_SUCCESS;
2595 test_compressdev_deflate_stateless_checksum(void)
2597 struct comp_testsuite_params *ts_params = &testsuite_params;
2600 const struct rte_compressdev_capabilities *capab;
2602 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2603 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2605 /* Check if driver supports any checksum */
2606 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) == 0 &&
2607 (capab->comp_feature_flags &
2608 RTE_COMP_FF_ADLER32_CHECKSUM) == 0 &&
2609 (capab->comp_feature_flags &
2610 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) == 0)
2613 struct rte_comp_xform *compress_xform =
2614 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2615 if (compress_xform == NULL) {
2616 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2620 memcpy(compress_xform, ts_params->def_comp_xform,
2621 sizeof(struct rte_comp_xform));
2623 struct rte_comp_xform *decompress_xform =
2624 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2625 if (decompress_xform == NULL) {
2626 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2627 rte_free(compress_xform);
2631 memcpy(decompress_xform, ts_params->def_decomp_xform,
2632 sizeof(struct rte_comp_xform));
2634 struct interim_data_params int_data = {
2643 struct test_data_params test_data = {
2644 .compress_state = RTE_COMP_OP_STATELESS,
2645 .decompress_state = RTE_COMP_OP_STATELESS,
2646 .buff_type = LB_BOTH,
2647 .zlib_dir = ZLIB_DECOMPRESS,
2650 .overflow = OVERFLOW_DISABLED,
2651 .ratio = RATIO_ENABLED
2654 /* Check if driver supports crc32 checksum and test */
2655 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM)) {
2656 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
2657 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
2659 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2660 /* Compress with compressdev, decompress with Zlib */
2661 int_data.test_bufs = &compress_test_bufs[i];
2662 int_data.buf_idx = &i;
2664 /* Generate zlib checksum and test against selected
2665 * drivers decompression checksum
2667 test_data.zlib_dir = ZLIB_COMPRESS;
2668 ret = test_deflate_comp_decomp(&int_data, &test_data);
2672 /* Generate compression and decompression
2673 * checksum of selected driver
2675 test_data.zlib_dir = ZLIB_NONE;
2676 ret = test_deflate_comp_decomp(&int_data, &test_data);
2682 /* Check if driver supports adler32 checksum and test */
2683 if ((capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM)) {
2684 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2685 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2687 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2688 int_data.test_bufs = &compress_test_bufs[i];
2689 int_data.buf_idx = &i;
2691 /* Generate zlib checksum and test against selected
2692 * drivers decompression checksum
2694 test_data.zlib_dir = ZLIB_COMPRESS;
2695 ret = test_deflate_comp_decomp(&int_data, &test_data);
2698 /* Generate compression and decompression
2699 * checksum of selected driver
2701 test_data.zlib_dir = ZLIB_NONE;
2702 ret = test_deflate_comp_decomp(&int_data, &test_data);
2708 /* Check if driver supports combined crc and adler checksum and test */
2709 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)) {
2710 compress_xform->compress.chksum =
2711 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2712 decompress_xform->decompress.chksum =
2713 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2715 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2716 int_data.test_bufs = &compress_test_bufs[i];
2717 int_data.buf_idx = &i;
2719 /* Generate compression and decompression
2720 * checksum of selected driver
2722 test_data.zlib_dir = ZLIB_NONE;
2723 ret = test_deflate_comp_decomp(&int_data, &test_data);
2732 rte_free(compress_xform);
2733 rte_free(decompress_xform);
2738 test_compressdev_out_of_space_buffer(void)
2740 struct comp_testsuite_params *ts_params = &testsuite_params;
2743 const struct rte_compressdev_capabilities *capab;
2745 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
2747 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2748 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2750 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2753 struct interim_data_params int_data = {
2754 &compress_test_bufs[0],
2757 &ts_params->def_comp_xform,
2758 &ts_params->def_decomp_xform,
2762 struct test_data_params test_data = {
2763 .compress_state = RTE_COMP_OP_STATELESS,
2764 .decompress_state = RTE_COMP_OP_STATELESS,
2765 .buff_type = LB_BOTH,
2766 .zlib_dir = ZLIB_DECOMPRESS,
2767 .out_of_space = 1, /* run out-of-space test */
2769 .overflow = OVERFLOW_DISABLED,
2770 .ratio = RATIO_ENABLED
2772 /* Compress with compressdev, decompress with Zlib */
2773 test_data.zlib_dir = ZLIB_DECOMPRESS;
2774 ret = test_deflate_comp_decomp(&int_data, &test_data);
2778 /* Compress with Zlib, decompress with compressdev */
2779 test_data.zlib_dir = ZLIB_COMPRESS;
2780 ret = test_deflate_comp_decomp(&int_data, &test_data);
2784 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2785 /* Compress with compressdev, decompress with Zlib */
2786 test_data.zlib_dir = ZLIB_DECOMPRESS;
2787 test_data.buff_type = SGL_BOTH;
2788 ret = test_deflate_comp_decomp(&int_data, &test_data);
2792 /* Compress with Zlib, decompress with compressdev */
2793 test_data.zlib_dir = ZLIB_COMPRESS;
2794 test_data.buff_type = SGL_BOTH;
2795 ret = test_deflate_comp_decomp(&int_data, &test_data);
2807 test_compressdev_deflate_stateless_dynamic_big(void)
2809 struct comp_testsuite_params *ts_params = &testsuite_params;
2813 const struct rte_compressdev_capabilities *capab;
2814 char *test_buffer = NULL;
2816 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2817 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2819 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2822 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2825 test_buffer = rte_malloc(NULL, BIG_DATA_TEST_SIZE, 0);
2826 if (test_buffer == NULL) {
2828 "Can't allocate buffer for big-data\n");
2832 struct interim_data_params int_data = {
2833 (const char * const *)&test_buffer,
2836 &ts_params->def_comp_xform,
2837 &ts_params->def_decomp_xform,
2841 struct test_data_params test_data = {
2842 .compress_state = RTE_COMP_OP_STATELESS,
2843 .decompress_state = RTE_COMP_OP_STATELESS,
2844 .buff_type = SGL_BOTH,
2845 .zlib_dir = ZLIB_DECOMPRESS,
2848 .overflow = OVERFLOW_DISABLED,
2849 .ratio = RATIO_DISABLED
2852 ts_params->def_comp_xform->compress.deflate.huffman =
2853 RTE_COMP_HUFFMAN_DYNAMIC;
2855 /* fill the buffer with data based on rand. data */
2856 srand(BIG_DATA_TEST_SIZE);
2857 for (j = 0; j < BIG_DATA_TEST_SIZE - 1; ++j)
2858 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
2859 test_buffer[BIG_DATA_TEST_SIZE - 1] = 0;
2861 /* Compress with compressdev, decompress with Zlib */
2862 test_data.zlib_dir = ZLIB_DECOMPRESS;
2863 ret = test_deflate_comp_decomp(&int_data, &test_data);
2867 /* Compress with Zlib, decompress with compressdev */
2868 test_data.zlib_dir = ZLIB_COMPRESS;
2869 ret = test_deflate_comp_decomp(&int_data, &test_data);
2876 ts_params->def_comp_xform->compress.deflate.huffman =
2877 RTE_COMP_HUFFMAN_DEFAULT;
2878 rte_free(test_buffer);
2883 test_compressdev_deflate_stateful_decomp(void)
2885 struct comp_testsuite_params *ts_params = &testsuite_params;
2888 const struct rte_compressdev_capabilities *capab;
2890 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2891 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2893 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2896 struct interim_data_params int_data = {
2897 &compress_test_bufs[0],
2900 &ts_params->def_comp_xform,
2901 &ts_params->def_decomp_xform,
2905 struct test_data_params test_data = {
2906 .compress_state = RTE_COMP_OP_STATELESS,
2907 .decompress_state = RTE_COMP_OP_STATEFUL,
2908 .buff_type = LB_BOTH,
2909 .zlib_dir = ZLIB_COMPRESS,
2912 .decompress_output_block_size = 2000,
2913 .decompress_steps_max = 4,
2914 .overflow = OVERFLOW_DISABLED,
2915 .ratio = RATIO_ENABLED
2918 /* Compress with Zlib, decompress with compressdev */
2919 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2924 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2925 /* Now test with SGL buffers */
2926 test_data.buff_type = SGL_BOTH;
2927 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2940 test_compressdev_deflate_stateful_decomp_checksum(void)
2942 struct comp_testsuite_params *ts_params = &testsuite_params;
2945 const struct rte_compressdev_capabilities *capab;
2947 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2948 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2950 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2953 /* Check if driver supports any checksum */
2954 if (!(capab->comp_feature_flags &
2955 (RTE_COMP_FF_CRC32_CHECKSUM | RTE_COMP_FF_ADLER32_CHECKSUM |
2956 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)))
2959 struct rte_comp_xform *compress_xform =
2960 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2961 if (compress_xform == NULL) {
2962 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2966 memcpy(compress_xform, ts_params->def_comp_xform,
2967 sizeof(struct rte_comp_xform));
2969 struct rte_comp_xform *decompress_xform =
2970 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2971 if (decompress_xform == NULL) {
2972 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2973 rte_free(compress_xform);
2977 memcpy(decompress_xform, ts_params->def_decomp_xform,
2978 sizeof(struct rte_comp_xform));
2980 struct interim_data_params int_data = {
2981 &compress_test_bufs[0],
2989 struct test_data_params test_data = {
2990 .compress_state = RTE_COMP_OP_STATELESS,
2991 .decompress_state = RTE_COMP_OP_STATEFUL,
2992 .buff_type = LB_BOTH,
2993 .zlib_dir = ZLIB_COMPRESS,
2996 .decompress_output_block_size = 2000,
2997 .decompress_steps_max = 4,
2998 .overflow = OVERFLOW_DISABLED,
2999 .ratio = RATIO_ENABLED
3002 /* Check if driver supports crc32 checksum and test */
3003 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) {
3004 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
3005 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
3006 /* Compress with Zlib, decompress with compressdev */
3007 test_data.buff_type = LB_BOTH;
3008 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3012 if (capab->comp_feature_flags &
3013 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3014 /* Now test with SGL buffers */
3015 test_data.buff_type = SGL_BOTH;
3016 if (test_deflate_comp_decomp(&int_data,
3024 /* Check if driver supports adler32 checksum and test */
3025 if (capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM) {
3026 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3027 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3028 /* Compress with Zlib, decompress with compressdev */
3029 test_data.buff_type = LB_BOTH;
3030 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3034 if (capab->comp_feature_flags &
3035 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3036 /* Now test with SGL buffers */
3037 test_data.buff_type = SGL_BOTH;
3038 if (test_deflate_comp_decomp(&int_data,
3046 /* Check if driver supports combined crc and adler checksum and test */
3047 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) {
3048 compress_xform->compress.chksum =
3049 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3050 decompress_xform->decompress.chksum =
3051 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3052 /* Zlib doesn't support combined checksum */
3053 test_data.zlib_dir = ZLIB_NONE;
3054 /* Compress stateless, decompress stateful with compressdev */
3055 test_data.buff_type = LB_BOTH;
3056 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3060 if (capab->comp_feature_flags &
3061 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3062 /* Now test with SGL buffers */
3063 test_data.buff_type = SGL_BOTH;
3064 if (test_deflate_comp_decomp(&int_data,
3075 rte_free(compress_xform);
3076 rte_free(decompress_xform);
3080 static const struct rte_memzone *
3081 make_memzone(const char *name, size_t size)
3083 unsigned int socket_id = rte_socket_id();
3084 char mz_name[RTE_MEMZONE_NAMESIZE];
3085 const struct rte_memzone *memzone;
3087 snprintf(mz_name, RTE_MEMZONE_NAMESIZE, "%s_%u", name, socket_id);
3088 memzone = rte_memzone_lookup(mz_name);
3089 if (memzone != NULL && memzone->len != size) {
3090 rte_memzone_free(memzone);
3093 if (memzone == NULL) {
3094 memzone = rte_memzone_reserve_aligned(mz_name, size, socket_id,
3095 RTE_MEMZONE_IOVA_CONTIG, RTE_CACHE_LINE_SIZE);
3096 if (memzone == NULL)
3097 RTE_LOG(ERR, USER1, "Can't allocate memory zone %s",
3104 test_compressdev_external_mbufs(void)
3106 struct comp_testsuite_params *ts_params = &testsuite_params;
3107 size_t data_len = 0;
3109 int ret = TEST_FAILED;
3111 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
3112 data_len = RTE_MAX(data_len, strlen(compress_test_bufs[i]) + 1);
3114 struct interim_data_params int_data = {
3118 &ts_params->def_comp_xform,
3119 &ts_params->def_decomp_xform,
3123 struct test_data_params test_data = {
3124 .compress_state = RTE_COMP_OP_STATELESS,
3125 .decompress_state = RTE_COMP_OP_STATELESS,
3126 .buff_type = LB_BOTH,
3127 .zlib_dir = ZLIB_DECOMPRESS,
3130 .use_external_mbufs = 1,
3131 .inbuf_data_size = data_len,
3132 .inbuf_memzone = make_memzone("inbuf", data_len),
3133 .compbuf_memzone = make_memzone("compbuf", data_len *
3134 COMPRESS_BUF_SIZE_RATIO),
3135 .uncompbuf_memzone = make_memzone("decompbuf", data_len),
3136 .overflow = OVERFLOW_DISABLED
3139 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3140 /* prepare input data */
3141 data_len = strlen(compress_test_bufs[i]) + 1;
3142 rte_memcpy(test_data.inbuf_memzone->addr, compress_test_bufs[i],
3144 test_data.inbuf_data_size = data_len;
3145 int_data.buf_idx = &i;
3147 /* Compress with compressdev, decompress with Zlib */
3148 test_data.zlib_dir = ZLIB_DECOMPRESS;
3149 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3152 /* Compress with Zlib, decompress with compressdev */
3153 test_data.zlib_dir = ZLIB_COMPRESS;
3154 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3161 rte_memzone_free(test_data.inbuf_memzone);
3162 rte_memzone_free(test_data.compbuf_memzone);
3163 rte_memzone_free(test_data.uncompbuf_memzone);
3168 test_compressdev_deflate_stateless_fixed_oos_recoverable(void)
3170 struct comp_testsuite_params *ts_params = &testsuite_params;
3174 const struct rte_compressdev_capabilities *capab;
3176 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3177 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3179 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
3182 struct rte_comp_xform *compress_xform =
3183 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
3185 if (compress_xform == NULL) {
3187 "Compress xform could not be created\n");
3192 memcpy(compress_xform, ts_params->def_comp_xform,
3193 sizeof(struct rte_comp_xform));
3194 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
3196 struct interim_data_params int_data = {
3201 &ts_params->def_decomp_xform,
3205 struct test_data_params test_data = {
3206 .compress_state = RTE_COMP_OP_STATELESS,
3207 .decompress_state = RTE_COMP_OP_STATELESS,
3208 .buff_type = LB_BOTH,
3209 .zlib_dir = ZLIB_DECOMPRESS,
3212 .overflow = OVERFLOW_ENABLED,
3213 .ratio = RATIO_ENABLED
3216 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3217 int_data.test_bufs = &compress_test_bufs[i];
3218 int_data.buf_idx = &i;
3220 /* Compress with compressdev, decompress with Zlib */
3221 test_data.zlib_dir = ZLIB_DECOMPRESS;
3222 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3223 if (comp_result < 0) {
3226 } else if (comp_result > 0) {
3231 /* Compress with Zlib, decompress with compressdev */
3232 test_data.zlib_dir = ZLIB_COMPRESS;
3233 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3234 if (comp_result < 0) {
3237 } else if (comp_result > 0) {
3246 rte_free(compress_xform);
3251 test_compressdev_deflate_im_buffers_LB_1op(void)
3253 struct comp_testsuite_params *ts_params = &testsuite_params;
3255 int ret = TEST_SUCCESS;
3257 const struct rte_compressdev_capabilities *capab;
3258 char *test_buffer = NULL;
3260 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3261 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3263 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3266 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3269 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3270 if (test_buffer == NULL) {
3272 "Can't allocate buffer for 'im buffer' test\n");
3276 struct interim_data_params int_data = {
3277 (const char * const *)&test_buffer,
3280 &ts_params->def_comp_xform,
3281 &ts_params->def_decomp_xform,
3285 struct test_data_params test_data = {
3286 .compress_state = RTE_COMP_OP_STATELESS,
3287 .decompress_state = RTE_COMP_OP_STATELESS,
3288 /* must be LB to SGL,
3289 * input LB buffer reaches its maximum,
3290 * if ratio 1.3 than another mbuf must be
3291 * created and attached
3293 .buff_type = LB_BOTH,
3294 .zlib_dir = ZLIB_NONE,
3297 .overflow = OVERFLOW_DISABLED,
3298 .ratio = RATIO_DISABLED
3301 ts_params->def_comp_xform->compress.deflate.huffman =
3302 RTE_COMP_HUFFMAN_DYNAMIC;
3304 /* fill the buffer with data based on rand. data */
3305 srand(IM_BUF_DATA_TEST_SIZE_LB);
3306 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3307 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3309 /* Compress with compressdev, decompress with compressdev */
3310 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3316 ts_params->def_comp_xform->compress.deflate.huffman =
3317 RTE_COMP_HUFFMAN_DEFAULT;
3318 rte_free(test_buffer);
3323 test_compressdev_deflate_im_buffers_LB_2ops_first(void)
3325 struct comp_testsuite_params *ts_params = &testsuite_params;
3327 int ret = TEST_SUCCESS;
3329 const struct rte_compressdev_capabilities *capab;
3330 char *test_buffer = NULL;
3331 const char *test_buffers[2];
3333 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3334 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3336 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3339 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3342 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3343 if (test_buffer == NULL) {
3345 "Can't allocate buffer for 'im buffer' test\n");
3349 test_buffers[0] = test_buffer;
3350 test_buffers[1] = compress_test_bufs[0];
3352 struct interim_data_params int_data = {
3353 (const char * const *)test_buffers,
3356 &ts_params->def_comp_xform,
3357 &ts_params->def_decomp_xform,
3361 struct test_data_params test_data = {
3362 .compress_state = RTE_COMP_OP_STATELESS,
3363 .decompress_state = RTE_COMP_OP_STATELESS,
3364 .buff_type = LB_BOTH,
3365 .zlib_dir = ZLIB_NONE,
3368 .overflow = OVERFLOW_DISABLED,
3369 .ratio = RATIO_DISABLED
3372 ts_params->def_comp_xform->compress.deflate.huffman =
3373 RTE_COMP_HUFFMAN_DYNAMIC;
3375 /* fill the buffer with data based on rand. data */
3376 srand(IM_BUF_DATA_TEST_SIZE_LB);
3377 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3378 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3380 /* Compress with compressdev, decompress with compressdev */
3381 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3387 ts_params->def_comp_xform->compress.deflate.huffman =
3388 RTE_COMP_HUFFMAN_DEFAULT;
3389 rte_free(test_buffer);
3394 test_compressdev_deflate_im_buffers_LB_2ops_second(void)
3396 struct comp_testsuite_params *ts_params = &testsuite_params;
3398 int ret = TEST_SUCCESS;
3400 const struct rte_compressdev_capabilities *capab;
3401 char *test_buffer = NULL;
3402 const char *test_buffers[2];
3404 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3405 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3407 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3410 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3413 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3414 if (test_buffer == NULL) {
3416 "Can't allocate buffer for 'im buffer' test\n");
3420 test_buffers[0] = compress_test_bufs[0];
3421 test_buffers[1] = test_buffer;
3423 struct interim_data_params int_data = {
3424 (const char * const *)test_buffers,
3427 &ts_params->def_comp_xform,
3428 &ts_params->def_decomp_xform,
3432 struct test_data_params test_data = {
3433 .compress_state = RTE_COMP_OP_STATELESS,
3434 .decompress_state = RTE_COMP_OP_STATELESS,
3435 .buff_type = LB_BOTH,
3436 .zlib_dir = ZLIB_NONE,
3439 .overflow = OVERFLOW_DISABLED,
3440 .ratio = RATIO_DISABLED
3443 ts_params->def_comp_xform->compress.deflate.huffman =
3444 RTE_COMP_HUFFMAN_DYNAMIC;
3446 /* fill the buffer with data based on rand. data */
3447 srand(IM_BUF_DATA_TEST_SIZE_LB);
3448 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3449 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3451 /* Compress with compressdev, decompress with compressdev */
3452 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3458 ts_params->def_comp_xform->compress.deflate.huffman =
3459 RTE_COMP_HUFFMAN_DEFAULT;
3460 rte_free(test_buffer);
3465 test_compressdev_deflate_im_buffers_LB_3ops(void)
3467 struct comp_testsuite_params *ts_params = &testsuite_params;
3469 int ret = TEST_SUCCESS;
3471 const struct rte_compressdev_capabilities *capab;
3472 char *test_buffer = NULL;
3473 const char *test_buffers[3];
3475 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3476 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3478 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3481 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3484 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3485 if (test_buffer == NULL) {
3487 "Can't allocate buffer for 'im buffer' test\n");
3491 test_buffers[0] = compress_test_bufs[0];
3492 test_buffers[1] = test_buffer;
3493 test_buffers[2] = compress_test_bufs[1];
3495 struct interim_data_params int_data = {
3496 (const char * const *)test_buffers,
3499 &ts_params->def_comp_xform,
3500 &ts_params->def_decomp_xform,
3504 struct test_data_params test_data = {
3505 .compress_state = RTE_COMP_OP_STATELESS,
3506 .decompress_state = RTE_COMP_OP_STATELESS,
3507 .buff_type = LB_BOTH,
3508 .zlib_dir = ZLIB_NONE,
3511 .overflow = OVERFLOW_DISABLED,
3512 .ratio = RATIO_DISABLED
3515 ts_params->def_comp_xform->compress.deflate.huffman =
3516 RTE_COMP_HUFFMAN_DYNAMIC;
3518 /* fill the buffer with data based on rand. data */
3519 srand(IM_BUF_DATA_TEST_SIZE_LB);
3520 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3521 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3523 /* Compress with compressdev, decompress with compressdev */
3524 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3530 ts_params->def_comp_xform->compress.deflate.huffman =
3531 RTE_COMP_HUFFMAN_DEFAULT;
3532 rte_free(test_buffer);
3537 test_compressdev_deflate_im_buffers_LB_4ops(void)
3539 struct comp_testsuite_params *ts_params = &testsuite_params;
3541 int ret = TEST_SUCCESS;
3543 const struct rte_compressdev_capabilities *capab;
3544 char *test_buffer = NULL;
3545 const char *test_buffers[4];
3547 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3548 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3550 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3553 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3556 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3557 if (test_buffer == NULL) {
3559 "Can't allocate buffer for 'im buffer' test\n");
3563 test_buffers[0] = compress_test_bufs[0];
3564 test_buffers[1] = test_buffer;
3565 test_buffers[2] = compress_test_bufs[1];
3566 test_buffers[3] = test_buffer;
3568 struct interim_data_params int_data = {
3569 (const char * const *)test_buffers,
3572 &ts_params->def_comp_xform,
3573 &ts_params->def_decomp_xform,
3577 struct test_data_params test_data = {
3578 .compress_state = RTE_COMP_OP_STATELESS,
3579 .decompress_state = RTE_COMP_OP_STATELESS,
3580 .buff_type = LB_BOTH,
3581 .zlib_dir = ZLIB_NONE,
3584 .overflow = OVERFLOW_DISABLED,
3585 .ratio = RATIO_DISABLED
3588 ts_params->def_comp_xform->compress.deflate.huffman =
3589 RTE_COMP_HUFFMAN_DYNAMIC;
3591 /* fill the buffer with data based on rand. data */
3592 srand(IM_BUF_DATA_TEST_SIZE_LB);
3593 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3594 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3596 /* Compress with compressdev, decompress with compressdev */
3597 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3603 ts_params->def_comp_xform->compress.deflate.huffman =
3604 RTE_COMP_HUFFMAN_DEFAULT;
3605 rte_free(test_buffer);
3611 test_compressdev_deflate_im_buffers_SGL_1op(void)
3613 struct comp_testsuite_params *ts_params = &testsuite_params;
3615 int ret = TEST_SUCCESS;
3617 const struct rte_compressdev_capabilities *capab;
3618 char *test_buffer = NULL;
3620 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3621 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3623 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3626 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3629 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3630 if (test_buffer == NULL) {
3632 "Can't allocate buffer for big-data\n");
3636 struct interim_data_params int_data = {
3637 (const char * const *)&test_buffer,
3640 &ts_params->def_comp_xform,
3641 &ts_params->def_decomp_xform,
3645 struct test_data_params test_data = {
3646 .compress_state = RTE_COMP_OP_STATELESS,
3647 .decompress_state = RTE_COMP_OP_STATELESS,
3648 .buff_type = SGL_BOTH,
3649 .zlib_dir = ZLIB_NONE,
3652 .overflow = OVERFLOW_DISABLED,
3653 .ratio = RATIO_DISABLED
3656 ts_params->def_comp_xform->compress.deflate.huffman =
3657 RTE_COMP_HUFFMAN_DYNAMIC;
3659 /* fill the buffer with data based on rand. data */
3660 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3661 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3662 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3664 /* Compress with compressdev, decompress with compressdev */
3665 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3671 ts_params->def_comp_xform->compress.deflate.huffman =
3672 RTE_COMP_HUFFMAN_DEFAULT;
3673 rte_free(test_buffer);
3678 test_compressdev_deflate_im_buffers_SGL_2ops_first(void)
3680 struct comp_testsuite_params *ts_params = &testsuite_params;
3682 int ret = TEST_SUCCESS;
3684 const struct rte_compressdev_capabilities *capab;
3685 char *test_buffer = NULL;
3686 const char *test_buffers[2];
3688 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3689 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3691 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3694 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3697 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3698 if (test_buffer == NULL) {
3700 "Can't allocate buffer for big-data\n");
3704 test_buffers[0] = test_buffer;
3705 test_buffers[1] = compress_test_bufs[0];
3707 struct interim_data_params int_data = {
3708 (const char * const *)test_buffers,
3711 &ts_params->def_comp_xform,
3712 &ts_params->def_decomp_xform,
3716 struct test_data_params test_data = {
3717 .compress_state = RTE_COMP_OP_STATELESS,
3718 .decompress_state = RTE_COMP_OP_STATELESS,
3719 .buff_type = SGL_BOTH,
3720 .zlib_dir = ZLIB_NONE,
3723 .overflow = OVERFLOW_DISABLED,
3724 .ratio = RATIO_DISABLED
3727 ts_params->def_comp_xform->compress.deflate.huffman =
3728 RTE_COMP_HUFFMAN_DYNAMIC;
3730 /* fill the buffer with data based on rand. data */
3731 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3732 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3733 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3735 /* Compress with compressdev, decompress with compressdev */
3736 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3742 ts_params->def_comp_xform->compress.deflate.huffman =
3743 RTE_COMP_HUFFMAN_DEFAULT;
3744 rte_free(test_buffer);
3749 test_compressdev_deflate_im_buffers_SGL_2ops_second(void)
3751 struct comp_testsuite_params *ts_params = &testsuite_params;
3753 int ret = TEST_SUCCESS;
3755 const struct rte_compressdev_capabilities *capab;
3756 char *test_buffer = NULL;
3757 const char *test_buffers[2];
3759 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3760 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3762 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3765 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3768 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3769 if (test_buffer == NULL) {
3771 "Can't allocate buffer for big-data\n");
3775 test_buffers[0] = compress_test_bufs[0];
3776 test_buffers[1] = test_buffer;
3778 struct interim_data_params int_data = {
3779 (const char * const *)test_buffers,
3782 &ts_params->def_comp_xform,
3783 &ts_params->def_decomp_xform,
3787 struct test_data_params test_data = {
3788 .compress_state = RTE_COMP_OP_STATELESS,
3789 .decompress_state = RTE_COMP_OP_STATELESS,
3790 .buff_type = SGL_BOTH,
3791 .zlib_dir = ZLIB_NONE,
3794 .overflow = OVERFLOW_DISABLED,
3795 .ratio = RATIO_DISABLED
3798 ts_params->def_comp_xform->compress.deflate.huffman =
3799 RTE_COMP_HUFFMAN_DYNAMIC;
3801 /* fill the buffer with data based on rand. data */
3802 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3803 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3804 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3806 /* Compress with compressdev, decompress with compressdev */
3807 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3813 ts_params->def_comp_xform->compress.deflate.huffman =
3814 RTE_COMP_HUFFMAN_DEFAULT;
3815 rte_free(test_buffer);
3820 test_compressdev_deflate_im_buffers_SGL_3ops(void)
3822 struct comp_testsuite_params *ts_params = &testsuite_params;
3824 int ret = TEST_SUCCESS;
3826 const struct rte_compressdev_capabilities *capab;
3827 char *test_buffer = NULL;
3828 const char *test_buffers[3];
3830 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3831 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3833 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3836 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3839 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3840 if (test_buffer == NULL) {
3842 "Can't allocate buffer for big-data\n");
3846 test_buffers[0] = compress_test_bufs[0];
3847 test_buffers[1] = test_buffer;
3848 test_buffers[2] = compress_test_bufs[1];
3850 struct interim_data_params int_data = {
3851 (const char * const *)test_buffers,
3854 &ts_params->def_comp_xform,
3855 &ts_params->def_decomp_xform,
3859 struct test_data_params test_data = {
3860 .compress_state = RTE_COMP_OP_STATELESS,
3861 .decompress_state = RTE_COMP_OP_STATELESS,
3862 .buff_type = SGL_BOTH,
3863 .zlib_dir = ZLIB_NONE,
3866 .overflow = OVERFLOW_DISABLED,
3867 .ratio = RATIO_DISABLED
3870 ts_params->def_comp_xform->compress.deflate.huffman =
3871 RTE_COMP_HUFFMAN_DYNAMIC;
3873 /* fill the buffer with data based on rand. data */
3874 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3875 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3876 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3878 /* Compress with compressdev, decompress with compressdev */
3879 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3885 ts_params->def_comp_xform->compress.deflate.huffman =
3886 RTE_COMP_HUFFMAN_DEFAULT;
3887 rte_free(test_buffer);
3893 test_compressdev_deflate_im_buffers_SGL_4ops(void)
3895 struct comp_testsuite_params *ts_params = &testsuite_params;
3897 int ret = TEST_SUCCESS;
3899 const struct rte_compressdev_capabilities *capab;
3900 char *test_buffer = NULL;
3901 const char *test_buffers[4];
3903 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3904 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3906 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3909 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3912 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3913 if (test_buffer == NULL) {
3915 "Can't allocate buffer for big-data\n");
3919 test_buffers[0] = compress_test_bufs[0];
3920 test_buffers[1] = test_buffer;
3921 test_buffers[2] = compress_test_bufs[1];
3922 test_buffers[3] = test_buffer;
3924 struct interim_data_params int_data = {
3925 (const char * const *)test_buffers,
3928 &ts_params->def_comp_xform,
3929 &ts_params->def_decomp_xform,
3933 struct test_data_params test_data = {
3934 .compress_state = RTE_COMP_OP_STATELESS,
3935 .decompress_state = RTE_COMP_OP_STATELESS,
3936 .buff_type = SGL_BOTH,
3937 .zlib_dir = ZLIB_NONE,
3940 .overflow = OVERFLOW_DISABLED,
3941 .ratio = RATIO_DISABLED
3944 ts_params->def_comp_xform->compress.deflate.huffman =
3945 RTE_COMP_HUFFMAN_DYNAMIC;
3947 /* fill the buffer with data based on rand. data */
3948 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3949 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3950 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3952 /* Compress with compressdev, decompress with compressdev */
3953 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3959 ts_params->def_comp_xform->compress.deflate.huffman =
3960 RTE_COMP_HUFFMAN_DEFAULT;
3961 rte_free(test_buffer);
3966 test_compressdev_deflate_im_buffers_SGL_over_1op(void)
3968 struct comp_testsuite_params *ts_params = &testsuite_params;
3970 int ret = TEST_SUCCESS;
3972 const struct rte_compressdev_capabilities *capab;
3973 char *test_buffer = NULL;
3975 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
3977 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3978 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3980 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3983 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3986 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
3987 if (test_buffer == NULL) {
3989 "Can't allocate buffer for big-data\n");
3993 struct interim_data_params int_data = {
3994 (const char * const *)&test_buffer,
3997 &ts_params->def_comp_xform,
3998 &ts_params->def_decomp_xform,
4002 struct test_data_params test_data = {
4003 .compress_state = RTE_COMP_OP_STATELESS,
4004 .decompress_state = RTE_COMP_OP_STATELESS,
4005 .buff_type = SGL_BOTH,
4006 .zlib_dir = ZLIB_NONE,
4009 .overflow = OVERFLOW_DISABLED,
4010 .ratio = RATIO_DISABLED
4013 ts_params->def_comp_xform->compress.deflate.huffman =
4014 RTE_COMP_HUFFMAN_DYNAMIC;
4016 /* fill the buffer with data based on rand. data */
4017 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4018 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4019 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4021 /* Compress with compressdev, decompress with compressdev */
4022 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4028 ts_params->def_comp_xform->compress.deflate.huffman =
4029 RTE_COMP_HUFFMAN_DEFAULT;
4030 rte_free(test_buffer);
4037 test_compressdev_deflate_im_buffers_SGL_over_2ops_first(void)
4039 struct comp_testsuite_params *ts_params = &testsuite_params;
4041 int ret = TEST_SUCCESS;
4043 const struct rte_compressdev_capabilities *capab;
4044 char *test_buffer = NULL;
4045 const char *test_buffers[2];
4047 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4049 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4050 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4052 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4055 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4058 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4059 if (test_buffer == NULL) {
4061 "Can't allocate buffer for big-data\n");
4065 test_buffers[0] = test_buffer;
4066 test_buffers[1] = compress_test_bufs[0];
4068 struct interim_data_params int_data = {
4069 (const char * const *)test_buffers,
4072 &ts_params->def_comp_xform,
4073 &ts_params->def_decomp_xform,
4077 struct test_data_params test_data = {
4078 .compress_state = RTE_COMP_OP_STATELESS,
4079 .decompress_state = RTE_COMP_OP_STATELESS,
4080 .buff_type = SGL_BOTH,
4081 .zlib_dir = ZLIB_NONE,
4084 .overflow = OVERFLOW_DISABLED,
4085 .ratio = RATIO_DISABLED
4088 ts_params->def_comp_xform->compress.deflate.huffman =
4089 RTE_COMP_HUFFMAN_DYNAMIC;
4091 /* fill the buffer with data based on rand. data */
4092 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4093 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4094 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4096 /* Compress with compressdev, decompress with compressdev */
4097 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4103 ts_params->def_comp_xform->compress.deflate.huffman =
4104 RTE_COMP_HUFFMAN_DEFAULT;
4105 rte_free(test_buffer);
4110 test_compressdev_deflate_im_buffers_SGL_over_2ops_second(void)
4112 struct comp_testsuite_params *ts_params = &testsuite_params;
4114 int ret = TEST_SUCCESS;
4116 const struct rte_compressdev_capabilities *capab;
4117 char *test_buffer = NULL;
4118 const char *test_buffers[2];
4120 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4122 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4123 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4125 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4128 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4131 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4132 if (test_buffer == NULL) {
4134 "Can't allocate buffer for big-data\n");
4138 test_buffers[0] = compress_test_bufs[0];
4139 test_buffers[1] = test_buffer;
4141 struct interim_data_params int_data = {
4142 (const char * const *)test_buffers,
4145 &ts_params->def_comp_xform,
4146 &ts_params->def_decomp_xform,
4150 struct test_data_params test_data = {
4151 .compress_state = RTE_COMP_OP_STATELESS,
4152 .decompress_state = RTE_COMP_OP_STATELESS,
4153 .buff_type = SGL_BOTH,
4154 .zlib_dir = ZLIB_NONE,
4157 .overflow = OVERFLOW_DISABLED,
4158 .ratio = RATIO_DISABLED
4161 ts_params->def_comp_xform->compress.deflate.huffman =
4162 RTE_COMP_HUFFMAN_DYNAMIC;
4164 /* fill the buffer with data based on rand. data */
4165 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4166 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4167 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4169 /* Compress with compressdev, decompress with compressdev */
4170 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4176 ts_params->def_comp_xform->compress.deflate.huffman =
4177 RTE_COMP_HUFFMAN_DEFAULT;
4178 rte_free(test_buffer);
4182 static struct unit_test_suite compressdev_testsuite = {
4183 .suite_name = "compressdev unit test suite",
4184 .setup = testsuite_setup,
4185 .teardown = testsuite_teardown,
4186 .unit_test_cases = {
4187 TEST_CASE_ST(NULL, NULL,
4188 test_compressdev_invalid_configuration),
4189 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4190 test_compressdev_deflate_stateless_fixed),
4191 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4192 test_compressdev_deflate_stateless_dynamic),
4193 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4194 test_compressdev_deflate_stateless_dynamic_big),
4195 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4196 test_compressdev_deflate_stateless_multi_op),
4197 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4198 test_compressdev_deflate_stateless_multi_level),
4199 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4200 test_compressdev_deflate_stateless_multi_xform),
4201 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4202 test_compressdev_deflate_stateless_sgl),
4203 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4204 test_compressdev_deflate_stateless_checksum),
4205 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4206 test_compressdev_out_of_space_buffer),
4207 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4208 test_compressdev_deflate_stateful_decomp),
4209 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4210 test_compressdev_deflate_stateful_decomp_checksum),
4211 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4212 test_compressdev_external_mbufs),
4213 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4214 test_compressdev_deflate_stateless_fixed_oos_recoverable),
4216 /* Positive test cases for IM buffer handling verification */
4217 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4218 test_compressdev_deflate_im_buffers_LB_1op),
4219 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4220 test_compressdev_deflate_im_buffers_LB_2ops_first),
4221 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4222 test_compressdev_deflate_im_buffers_LB_2ops_second),
4223 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4224 test_compressdev_deflate_im_buffers_LB_3ops),
4226 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4227 test_compressdev_deflate_im_buffers_LB_4ops),
4228 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4229 test_compressdev_deflate_im_buffers_SGL_1op),
4231 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4232 test_compressdev_deflate_im_buffers_SGL_2ops_first),
4233 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4234 test_compressdev_deflate_im_buffers_SGL_2ops_second),
4235 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4236 test_compressdev_deflate_im_buffers_SGL_3ops),
4237 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4238 test_compressdev_deflate_im_buffers_SGL_4ops),
4240 /* Negative test cases for IM buffer handling verification */
4242 /* For this test huge mempool is necessary.
4243 * It tests one case:
4244 * only one op containing big amount of data, so that
4245 * number of requested descriptors higher than number
4246 * of available descriptors (128)
4248 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4249 test_compressdev_deflate_im_buffers_SGL_over_1op),
4251 /* For this test huge mempool is necessary.
4252 * 2 ops. First op contains big amount of data:
4253 * number of requested descriptors higher than number
4254 * of available descriptors (128), the second op is
4255 * relatively small. In this case both ops are rejected
4257 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4258 test_compressdev_deflate_im_buffers_SGL_over_2ops_first),
4260 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4261 test_compressdev_deflate_im_buffers_SGL_over_2ops_second),
4263 TEST_CASES_END() /**< NULL terminate unit test array */
4268 test_compressdev(void)
4270 return unit_test_suite_runner(&compressdev_testsuite);
4273 REGISTER_TEST_COMMAND(compressdev_autotest, test_compressdev);