net/ice: fix VLAN 0 adding based on VLAN mode
[dpdk.git] / app / test-bbdev / test_bbdev_perf.c
index 729abf6..59b37ed 100644 (file)
@@ -24,7 +24,7 @@
 #define GET_SOCKET(socket_id) (((socket_id) == SOCKET_ID_ANY) ? 0 : (socket_id))
 
 #define MAX_QUEUES RTE_MAX_LCORE
-#define TEST_REPETITIONS 1000
+#define TEST_REPETITIONS 100
 #define WAIT_OFFLOAD_US 1000
 
 #ifdef RTE_BASEBAND_FPGA_LTE_FEC
@@ -3719,7 +3719,11 @@ bler_test(struct active_device *ad,
                        RTE_ALIGN(sizeof(struct thread_params) * num_lcores,
                                RTE_CACHE_LINE_SIZE));
 
-       if (test_vector.op_type == RTE_BBDEV_OP_LDPC_DEC)
+       if ((test_vector.op_type == RTE_BBDEV_OP_LDPC_DEC) &&
+                       !check_bit(test_vector.ldpc_dec.op_flags,
+                       RTE_BBDEV_LDPC_INTERNAL_HARQ_MEMORY_LOOPBACK)
+                       && !check_bit(test_vector.ldpc_dec.op_flags,
+                       RTE_BBDEV_LDPC_LLR_COMPRESSION))
                bler_function = bler_pmd_lcore_ldpc_dec;
        else
                return TEST_SKIPPED;
@@ -4396,8 +4400,8 @@ offload_latency_test_dec(struct rte_mempool *mempool, struct test_buffers *bufs,
                /* Dequeue one operation */
                do {
                        deq += rte_bbdev_dequeue_dec_ops(dev_id, queue_id,
-                                       &ops_deq[deq], 1);
-               } while (unlikely(deq != 1));
+                                       &ops_deq[deq], enq);
+               } while (unlikely(deq == 0));
 
                deq_last_time = rte_rdtsc_precise() - deq_start_time;
                time_st->deq_max_time = RTE_MAX(time_st->deq_max_time,
@@ -4487,8 +4491,8 @@ offload_latency_test_ldpc_dec(struct rte_mempool *mempool,
                /* Dequeue one operation */
                do {
                        deq += rte_bbdev_dequeue_ldpc_dec_ops(dev_id, queue_id,
-                                       &ops_deq[deq], 1);
-               } while (unlikely(deq != 1));
+                                       &ops_deq[deq], enq);
+               } while (unlikely(deq == 0));
 
                deq_last_time = rte_rdtsc_precise() - deq_start_time;
                time_st->deq_max_time = RTE_MAX(time_st->deq_max_time,
@@ -4575,8 +4579,8 @@ offload_latency_test_enc(struct rte_mempool *mempool, struct test_buffers *bufs,
                /* Dequeue one operation */
                do {
                        deq += rte_bbdev_dequeue_enc_ops(dev_id, queue_id,
-                                       &ops_deq[deq], 1);
-               } while (unlikely(deq != 1));
+                                       &ops_deq[deq], enq);
+               } while (unlikely(deq == 0));
 
                deq_last_time = rte_rdtsc_precise() - deq_start_time;
                time_st->deq_max_time = RTE_MAX(time_st->deq_max_time,
@@ -4658,8 +4662,8 @@ offload_latency_test_ldpc_enc(struct rte_mempool *mempool,
                /* Dequeue one operation */
                do {
                        deq += rte_bbdev_dequeue_ldpc_enc_ops(dev_id, queue_id,
-                                       &ops_deq[deq], 1);
-               } while (unlikely(deq != 1));
+                                       &ops_deq[deq], enq);
+               } while (unlikely(deq == 0));
 
                deq_last_time = rte_rdtsc_precise() - deq_start_time;
                time_st->deq_max_time = RTE_MAX(time_st->deq_max_time,