1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
8 #include "cperf_test_vectors.h"
10 #ifdef RTE_LIBRTE_SECURITY
12 cperf_set_ops_security(struct rte_crypto_op **ops,
13 uint32_t src_buf_offset __rte_unused,
14 uint32_t dst_buf_offset __rte_unused,
15 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
16 const struct cperf_options *options __rte_unused,
17 const struct cperf_test_vector *test_vector __rte_unused,
18 uint16_t iv_offset __rte_unused,
19 uint32_t *imix_idx __rte_unused)
23 for (i = 0; i < nb_ops; i++) {
24 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25 struct rte_security_session *sec_sess =
26 (struct rte_security_session *)sess;
28 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
29 rte_security_attach_session(ops[i], sec_sess);
30 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
32 sym_op->m_src->buf_len = options->segment_sz;
33 sym_op->m_src->data_len = options->test_buffer_size;
34 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
36 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
37 if (dst_buf_offset == 0)
40 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
49 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
50 uint32_t src_buf_offset, uint32_t dst_buf_offset,
51 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
52 const struct cperf_options *options,
53 const struct cperf_test_vector *test_vector __rte_unused,
54 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
58 for (i = 0; i < nb_ops; i++) {
59 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
61 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
62 rte_crypto_op_attach_sym_session(ops[i], sess);
64 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
67 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
68 if (dst_buf_offset == 0)
71 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
74 /* cipher parameters */
75 if (options->imix_distribution_count) {
76 sym_op->cipher.data.length =
77 options->imix_buffer_sizes[*imix_idx];
78 *imix_idx = (*imix_idx + 1) % options->pool_sz;
80 sym_op->cipher.data.length = options->test_buffer_size;
81 sym_op->cipher.data.offset = 0;
88 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
89 uint32_t src_buf_offset, uint32_t dst_buf_offset,
90 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
91 const struct cperf_options *options,
92 const struct cperf_test_vector *test_vector __rte_unused,
93 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
97 for (i = 0; i < nb_ops; i++) {
98 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
100 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
101 rte_crypto_op_attach_sym_session(ops[i], sess);
103 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
106 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
107 if (dst_buf_offset == 0)
108 sym_op->m_dst = NULL;
110 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
113 /* auth parameters */
114 if (options->imix_distribution_count) {
115 sym_op->auth.data.length =
116 options->imix_buffer_sizes[*imix_idx];
117 *imix_idx = (*imix_idx + 1) % options->pool_sz;
119 sym_op->auth.data.length = options->test_buffer_size;
120 sym_op->auth.data.offset = 0;
127 cperf_set_ops_cipher(struct rte_crypto_op **ops,
128 uint32_t src_buf_offset, uint32_t dst_buf_offset,
129 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
130 const struct cperf_options *options,
131 const struct cperf_test_vector *test_vector,
132 uint16_t iv_offset, uint32_t *imix_idx)
136 for (i = 0; i < nb_ops; i++) {
137 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
139 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
140 rte_crypto_op_attach_sym_session(ops[i], sess);
142 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
145 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
146 if (dst_buf_offset == 0)
147 sym_op->m_dst = NULL;
149 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
152 /* cipher parameters */
153 if (options->imix_distribution_count) {
154 sym_op->cipher.data.length =
155 options->imix_buffer_sizes[*imix_idx];
156 *imix_idx = (*imix_idx + 1) % options->pool_sz;
158 sym_op->cipher.data.length = options->test_buffer_size;
160 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
161 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
162 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
163 sym_op->cipher.data.length <<= 3;
165 sym_op->cipher.data.offset = 0;
168 if (options->test == CPERF_TEST_TYPE_VERIFY) {
169 for (i = 0; i < nb_ops; i++) {
170 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
171 uint8_t *, iv_offset);
173 memcpy(iv_ptr, test_vector->cipher_iv.data,
174 test_vector->cipher_iv.length);
183 cperf_set_ops_auth(struct rte_crypto_op **ops,
184 uint32_t src_buf_offset, uint32_t dst_buf_offset,
185 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
186 const struct cperf_options *options,
187 const struct cperf_test_vector *test_vector,
188 uint16_t iv_offset, uint32_t *imix_idx)
192 for (i = 0; i < nb_ops; i++) {
193 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
195 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
196 rte_crypto_op_attach_sym_session(ops[i], sess);
198 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
201 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
202 if (dst_buf_offset == 0)
203 sym_op->m_dst = NULL;
205 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
208 if (test_vector->auth_iv.length) {
209 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
212 memcpy(iv_ptr, test_vector->auth_iv.data,
213 test_vector->auth_iv.length);
216 /* authentication parameters */
217 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
218 sym_op->auth.digest.data = test_vector->digest.data;
219 sym_op->auth.digest.phys_addr =
220 test_vector->digest.phys_addr;
223 uint32_t offset = options->test_buffer_size;
224 struct rte_mbuf *buf, *tbuf;
226 if (options->out_of_place) {
229 tbuf = sym_op->m_src;
230 while ((tbuf->next != NULL) &&
231 (offset >= tbuf->data_len)) {
232 offset -= tbuf->data_len;
236 * If there is not enough room in segment,
237 * place the digest in the next segment
239 if ((tbuf->data_len - offset) < options->digest_sz) {
246 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
248 sym_op->auth.digest.phys_addr =
249 rte_pktmbuf_iova_offset(buf, offset);
253 if (options->imix_distribution_count) {
254 sym_op->auth.data.length =
255 options->imix_buffer_sizes[*imix_idx];
256 *imix_idx = (*imix_idx + 1) % options->pool_sz;
258 sym_op->auth.data.length = options->test_buffer_size;
260 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
261 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
262 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
263 sym_op->auth.data.length <<= 3;
265 sym_op->auth.data.offset = 0;
268 if (options->test == CPERF_TEST_TYPE_VERIFY) {
269 if (test_vector->auth_iv.length) {
270 for (i = 0; i < nb_ops; i++) {
271 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
272 uint8_t *, iv_offset);
274 memcpy(iv_ptr, test_vector->auth_iv.data,
275 test_vector->auth_iv.length);
283 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
284 uint32_t src_buf_offset, uint32_t dst_buf_offset,
285 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
286 const struct cperf_options *options,
287 const struct cperf_test_vector *test_vector,
288 uint16_t iv_offset, uint32_t *imix_idx)
292 for (i = 0; i < nb_ops; i++) {
293 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
295 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
296 rte_crypto_op_attach_sym_session(ops[i], sess);
298 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
301 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
302 if (dst_buf_offset == 0)
303 sym_op->m_dst = NULL;
305 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
308 /* cipher parameters */
309 if (options->imix_distribution_count) {
310 sym_op->cipher.data.length =
311 options->imix_buffer_sizes[*imix_idx];
312 *imix_idx = (*imix_idx + 1) % options->pool_sz;
314 sym_op->cipher.data.length = options->test_buffer_size;
316 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
317 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
318 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
319 sym_op->cipher.data.length <<= 3;
321 sym_op->cipher.data.offset = 0;
323 /* authentication parameters */
324 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
325 sym_op->auth.digest.data = test_vector->digest.data;
326 sym_op->auth.digest.phys_addr =
327 test_vector->digest.phys_addr;
330 uint32_t offset = options->test_buffer_size;
331 struct rte_mbuf *buf, *tbuf;
333 if (options->out_of_place) {
336 tbuf = sym_op->m_src;
337 while ((tbuf->next != NULL) &&
338 (offset >= tbuf->data_len)) {
339 offset -= tbuf->data_len;
343 * If there is not enough room in segment,
344 * place the digest in the next segment
346 if ((tbuf->data_len - offset) < options->digest_sz) {
353 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
355 sym_op->auth.digest.phys_addr =
356 rte_pktmbuf_iova_offset(buf, offset);
359 if (options->imix_distribution_count) {
360 sym_op->auth.data.length =
361 options->imix_buffer_sizes[*imix_idx];
362 *imix_idx = (*imix_idx + 1) % options->pool_sz;
364 sym_op->auth.data.length = options->test_buffer_size;
366 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
367 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
368 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
369 sym_op->auth.data.length <<= 3;
371 sym_op->auth.data.offset = 0;
374 if (options->test == CPERF_TEST_TYPE_VERIFY) {
375 for (i = 0; i < nb_ops; i++) {
376 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
377 uint8_t *, iv_offset);
379 memcpy(iv_ptr, test_vector->cipher_iv.data,
380 test_vector->cipher_iv.length);
381 if (test_vector->auth_iv.length) {
383 * Copy IV after the crypto operation and
386 iv_ptr += test_vector->cipher_iv.length;
387 memcpy(iv_ptr, test_vector->auth_iv.data,
388 test_vector->auth_iv.length);
398 cperf_set_ops_aead(struct rte_crypto_op **ops,
399 uint32_t src_buf_offset, uint32_t dst_buf_offset,
400 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
401 const struct cperf_options *options,
402 const struct cperf_test_vector *test_vector,
403 uint16_t iv_offset, uint32_t *imix_idx)
406 /* AAD is placed after the IV */
407 uint16_t aad_offset = iv_offset +
408 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
410 for (i = 0; i < nb_ops; i++) {
411 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
413 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
414 rte_crypto_op_attach_sym_session(ops[i], sess);
416 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
419 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
420 if (dst_buf_offset == 0)
421 sym_op->m_dst = NULL;
423 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
426 /* AEAD parameters */
427 if (options->imix_distribution_count) {
428 sym_op->aead.data.length =
429 options->imix_buffer_sizes[*imix_idx];
430 *imix_idx = (*imix_idx + 1) % options->pool_sz;
432 sym_op->aead.data.length = options->test_buffer_size;
433 sym_op->aead.data.offset = 0;
435 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
436 uint8_t *, aad_offset);
437 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
440 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
441 sym_op->aead.digest.data = test_vector->digest.data;
442 sym_op->aead.digest.phys_addr =
443 test_vector->digest.phys_addr;
446 uint32_t offset = sym_op->aead.data.length +
447 sym_op->aead.data.offset;
448 struct rte_mbuf *buf, *tbuf;
450 if (options->out_of_place) {
453 tbuf = sym_op->m_src;
454 while ((tbuf->next != NULL) &&
455 (offset >= tbuf->data_len)) {
456 offset -= tbuf->data_len;
460 * If there is not enough room in segment,
461 * place the digest in the next segment
463 if ((tbuf->data_len - offset) < options->digest_sz) {
470 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
472 sym_op->aead.digest.phys_addr =
473 rte_pktmbuf_iova_offset(buf, offset);
477 if (options->test == CPERF_TEST_TYPE_VERIFY) {
478 for (i = 0; i < nb_ops; i++) {
479 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
480 uint8_t *, iv_offset);
483 * If doing AES-CCM, nonce is copied one byte
484 * after the start of IV field, and AAD is copied
485 * 18 bytes after the start of the AAD field.
487 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
488 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
489 test_vector->aead_iv.length);
491 memcpy(ops[i]->sym->aead.aad.data + 18,
492 test_vector->aad.data,
493 test_vector->aad.length);
495 memcpy(iv_ptr, test_vector->aead_iv.data,
496 test_vector->aead_iv.length);
498 memcpy(ops[i]->sym->aead.aad.data,
499 test_vector->aad.data,
500 test_vector->aad.length);
508 static struct rte_cryptodev_sym_session *
509 cperf_create_session(struct rte_mempool *sess_mp,
510 struct rte_mempool *priv_mp,
512 const struct cperf_options *options,
513 const struct cperf_test_vector *test_vector,
516 struct rte_crypto_sym_xform cipher_xform;
517 struct rte_crypto_sym_xform auth_xform;
518 struct rte_crypto_sym_xform aead_xform;
519 struct rte_cryptodev_sym_session *sess = NULL;
521 #ifdef RTE_LIBRTE_SECURITY
525 if (options->op_type == CPERF_PDCP) {
526 /* Setup Cipher Parameters */
527 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
528 cipher_xform.next = NULL;
529 cipher_xform.cipher.algo = options->cipher_algo;
530 cipher_xform.cipher.op = options->cipher_op;
531 cipher_xform.cipher.iv.offset = iv_offset;
533 /* cipher different than null */
534 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
535 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
536 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
537 cipher_xform.cipher.iv.length = test_vector->cipher_iv.length;
539 cipher_xform.cipher.key.data = NULL;
540 cipher_xform.cipher.key.length = 0;
541 cipher_xform.cipher.iv.length = 0;
544 /* Setup Auth Parameters */
545 if (options->auth_algo != 0) {
546 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
547 auth_xform.next = NULL;
548 auth_xform.auth.algo = options->auth_algo;
549 auth_xform.auth.op = options->auth_op;
550 auth_xform.auth.iv.offset = iv_offset +
551 cipher_xform.cipher.iv.length;
553 /* auth different than null */
554 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
555 auth_xform.auth.digest_length = options->digest_sz;
556 auth_xform.auth.key.length = test_vector->auth_key.length;
557 auth_xform.auth.key.data = test_vector->auth_key.data;
558 auth_xform.auth.iv.length = test_vector->auth_iv.length;
560 auth_xform.auth.digest_length = 0;
561 auth_xform.auth.key.length = 0;
562 auth_xform.auth.key.data = NULL;
563 auth_xform.auth.iv.length = 0;
566 cipher_xform.next = &auth_xform;
568 cipher_xform.next = NULL;
571 struct rte_security_session_conf sess_conf = {
572 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
573 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
576 .domain = options->pdcp_domain,
578 .sn_size = options->pdcp_sn_sz,
580 .hfn_threshold = 0x70C0A,
582 .crypto_xform = &cipher_xform
585 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
586 rte_cryptodev_get_sec_ctx(dev_id);
588 /* Create security session */
589 return (void *)rte_security_session_create(ctx,
590 &sess_conf, sess_mp);
593 sess = rte_cryptodev_sym_session_create(sess_mp);
597 if (options->op_type == CPERF_CIPHER_ONLY) {
598 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
599 cipher_xform.next = NULL;
600 cipher_xform.cipher.algo = options->cipher_algo;
601 cipher_xform.cipher.op = options->cipher_op;
602 cipher_xform.cipher.iv.offset = iv_offset;
604 /* cipher different than null */
605 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
606 cipher_xform.cipher.key.data =
607 test_vector->cipher_key.data;
608 cipher_xform.cipher.key.length =
609 test_vector->cipher_key.length;
610 cipher_xform.cipher.iv.length =
611 test_vector->cipher_iv.length;
613 cipher_xform.cipher.key.data = NULL;
614 cipher_xform.cipher.key.length = 0;
615 cipher_xform.cipher.iv.length = 0;
617 /* create crypto session */
618 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
623 } else if (options->op_type == CPERF_AUTH_ONLY) {
624 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
625 auth_xform.next = NULL;
626 auth_xform.auth.algo = options->auth_algo;
627 auth_xform.auth.op = options->auth_op;
628 auth_xform.auth.iv.offset = iv_offset;
630 /* auth different than null */
631 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
632 auth_xform.auth.digest_length =
634 auth_xform.auth.key.length =
635 test_vector->auth_key.length;
636 auth_xform.auth.key.data = test_vector->auth_key.data;
637 auth_xform.auth.iv.length =
638 test_vector->auth_iv.length;
640 auth_xform.auth.digest_length = 0;
641 auth_xform.auth.key.length = 0;
642 auth_xform.auth.key.data = NULL;
643 auth_xform.auth.iv.length = 0;
645 /* create crypto session */
646 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
651 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
652 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
656 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
657 cipher_xform.next = NULL;
658 cipher_xform.cipher.algo = options->cipher_algo;
659 cipher_xform.cipher.op = options->cipher_op;
660 cipher_xform.cipher.iv.offset = iv_offset;
662 /* cipher different than null */
663 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
664 cipher_xform.cipher.key.data =
665 test_vector->cipher_key.data;
666 cipher_xform.cipher.key.length =
667 test_vector->cipher_key.length;
668 cipher_xform.cipher.iv.length =
669 test_vector->cipher_iv.length;
671 cipher_xform.cipher.key.data = NULL;
672 cipher_xform.cipher.key.length = 0;
673 cipher_xform.cipher.iv.length = 0;
679 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
680 auth_xform.next = NULL;
681 auth_xform.auth.algo = options->auth_algo;
682 auth_xform.auth.op = options->auth_op;
683 auth_xform.auth.iv.offset = iv_offset +
684 cipher_xform.cipher.iv.length;
686 /* auth different than null */
687 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
688 auth_xform.auth.digest_length = options->digest_sz;
689 auth_xform.auth.iv.length = test_vector->auth_iv.length;
690 auth_xform.auth.key.length =
691 test_vector->auth_key.length;
692 auth_xform.auth.key.data =
693 test_vector->auth_key.data;
695 auth_xform.auth.digest_length = 0;
696 auth_xform.auth.key.length = 0;
697 auth_xform.auth.key.data = NULL;
698 auth_xform.auth.iv.length = 0;
701 /* cipher then auth */
702 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
703 cipher_xform.next = &auth_xform;
704 /* create crypto session */
705 rte_cryptodev_sym_session_init(dev_id,
706 sess, &cipher_xform, priv_mp);
707 } else { /* auth then cipher */
708 auth_xform.next = &cipher_xform;
709 /* create crypto session */
710 rte_cryptodev_sym_session_init(dev_id,
711 sess, &auth_xform, priv_mp);
713 } else { /* options->op_type == CPERF_AEAD */
714 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
715 aead_xform.next = NULL;
716 aead_xform.aead.algo = options->aead_algo;
717 aead_xform.aead.op = options->aead_op;
718 aead_xform.aead.iv.offset = iv_offset;
720 aead_xform.aead.key.data =
721 test_vector->aead_key.data;
722 aead_xform.aead.key.length =
723 test_vector->aead_key.length;
724 aead_xform.aead.iv.length = test_vector->aead_iv.length;
726 aead_xform.aead.digest_length = options->digest_sz;
727 aead_xform.aead.aad_length =
728 options->aead_aad_sz;
730 /* Create crypto session */
731 rte_cryptodev_sym_session_init(dev_id,
732 sess, &aead_xform, priv_mp);
739 cperf_get_op_functions(const struct cperf_options *options,
740 struct cperf_op_fns *op_fns)
742 memset(op_fns, 0, sizeof(struct cperf_op_fns));
744 op_fns->sess_create = cperf_create_session;
746 if (options->op_type == CPERF_AEAD) {
747 op_fns->populate_ops = cperf_set_ops_aead;
751 if (options->op_type == CPERF_AUTH_THEN_CIPHER
752 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
753 op_fns->populate_ops = cperf_set_ops_cipher_auth;
756 if (options->op_type == CPERF_AUTH_ONLY) {
757 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
758 op_fns->populate_ops = cperf_set_ops_null_auth;
760 op_fns->populate_ops = cperf_set_ops_auth;
763 if (options->op_type == CPERF_CIPHER_ONLY) {
764 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
765 op_fns->populate_ops = cperf_set_ops_null_cipher;
767 op_fns->populate_ops = cperf_set_ops_cipher;
770 #ifdef RTE_LIBRTE_SECURITY
771 if (options->op_type == CPERF_PDCP) {
772 op_fns->populate_ops = cperf_set_ops_security;