1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
9 #include "cperf_test_vectors.h"
11 #ifdef RTE_LIB_SECURITY
13 cperf_set_ops_security(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused,
16 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
23 for (i = 0; i < nb_ops; i++) {
24 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25 struct rte_security_session *sec_sess =
26 (struct rte_security_session *)sess;
29 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
30 uint32_t *, iv_offset);
31 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
33 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
34 rte_security_attach_session(ops[i], sec_sess);
35 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
38 if (options->op_type == CPERF_PDCP) {
39 sym_op->m_src->buf_len = options->segment_sz;
40 sym_op->m_src->data_len = options->test_buffer_size;
41 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
44 if (options->op_type == CPERF_DOCSIS) {
45 if (options->imix_distribution_count) {
46 buf_sz = options->imix_buffer_sizes[*imix_idx];
47 *imix_idx = (*imix_idx + 1) % options->pool_sz;
49 buf_sz = options->test_buffer_size;
51 sym_op->m_src->buf_len = options->segment_sz;
52 sym_op->m_src->data_len = buf_sz;
53 sym_op->m_src->pkt_len = buf_sz;
55 /* DOCSIS header is not CRC'ed */
56 sym_op->auth.data.offset = options->docsis_hdr_sz;
57 sym_op->auth.data.length = buf_sz -
58 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
60 * DOCSIS header and SRC and DST MAC addresses are not
63 sym_op->cipher.data.offset = sym_op->auth.data.offset +
64 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
65 sym_op->cipher.data.length = buf_sz -
66 sym_op->cipher.data.offset;
69 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
70 if (dst_buf_offset == 0)
73 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
82 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
83 uint32_t src_buf_offset, uint32_t dst_buf_offset,
84 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
85 const struct cperf_options *options,
86 const struct cperf_test_vector *test_vector __rte_unused,
87 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
91 for (i = 0; i < nb_ops; i++) {
92 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
94 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
95 rte_crypto_op_attach_sym_session(ops[i], sess);
97 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
100 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
101 if (dst_buf_offset == 0)
102 sym_op->m_dst = NULL;
104 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
107 /* cipher parameters */
108 if (options->imix_distribution_count) {
109 sym_op->cipher.data.length =
110 options->imix_buffer_sizes[*imix_idx];
111 *imix_idx = (*imix_idx + 1) % options->pool_sz;
113 sym_op->cipher.data.length = options->test_buffer_size;
114 sym_op->cipher.data.offset = 0;
121 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
122 uint32_t src_buf_offset, uint32_t dst_buf_offset,
123 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
124 const struct cperf_options *options,
125 const struct cperf_test_vector *test_vector __rte_unused,
126 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
130 for (i = 0; i < nb_ops; i++) {
131 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
133 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
134 rte_crypto_op_attach_sym_session(ops[i], sess);
136 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
139 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
140 if (dst_buf_offset == 0)
141 sym_op->m_dst = NULL;
143 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
146 /* auth parameters */
147 if (options->imix_distribution_count) {
148 sym_op->auth.data.length =
149 options->imix_buffer_sizes[*imix_idx];
150 *imix_idx = (*imix_idx + 1) % options->pool_sz;
152 sym_op->auth.data.length = options->test_buffer_size;
153 sym_op->auth.data.offset = 0;
160 cperf_set_ops_cipher(struct rte_crypto_op **ops,
161 uint32_t src_buf_offset, uint32_t dst_buf_offset,
162 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
163 const struct cperf_options *options,
164 const struct cperf_test_vector *test_vector,
165 uint16_t iv_offset, uint32_t *imix_idx)
169 for (i = 0; i < nb_ops; i++) {
170 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
172 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
173 rte_crypto_op_attach_sym_session(ops[i], sess);
175 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
178 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
179 if (dst_buf_offset == 0)
180 sym_op->m_dst = NULL;
182 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
185 /* cipher parameters */
186 if (options->imix_distribution_count) {
187 sym_op->cipher.data.length =
188 options->imix_buffer_sizes[*imix_idx];
189 *imix_idx = (*imix_idx + 1) % options->pool_sz;
191 sym_op->cipher.data.length = options->test_buffer_size;
193 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
194 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
195 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
196 sym_op->cipher.data.length <<= 3;
198 sym_op->cipher.data.offset = 0;
201 if (options->test == CPERF_TEST_TYPE_VERIFY) {
202 for (i = 0; i < nb_ops; i++) {
203 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
204 uint8_t *, iv_offset);
206 memcpy(iv_ptr, test_vector->cipher_iv.data,
207 test_vector->cipher_iv.length);
216 cperf_set_ops_auth(struct rte_crypto_op **ops,
217 uint32_t src_buf_offset, uint32_t dst_buf_offset,
218 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
219 const struct cperf_options *options,
220 const struct cperf_test_vector *test_vector,
221 uint16_t iv_offset, uint32_t *imix_idx)
225 for (i = 0; i < nb_ops; i++) {
226 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
228 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
229 rte_crypto_op_attach_sym_session(ops[i], sess);
231 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
234 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
235 if (dst_buf_offset == 0)
236 sym_op->m_dst = NULL;
238 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
241 if (test_vector->auth_iv.length) {
242 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
245 memcpy(iv_ptr, test_vector->auth_iv.data,
246 test_vector->auth_iv.length);
249 /* authentication parameters */
250 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
251 sym_op->auth.digest.data = test_vector->digest.data;
252 sym_op->auth.digest.phys_addr =
253 test_vector->digest.phys_addr;
256 uint32_t offset = options->test_buffer_size;
257 struct rte_mbuf *buf, *tbuf;
259 if (options->out_of_place) {
262 tbuf = sym_op->m_src;
263 while ((tbuf->next != NULL) &&
264 (offset >= tbuf->data_len)) {
265 offset -= tbuf->data_len;
269 * If there is not enough room in segment,
270 * place the digest in the next segment
272 if ((tbuf->data_len - offset) < options->digest_sz) {
279 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
281 sym_op->auth.digest.phys_addr =
282 rte_pktmbuf_iova_offset(buf, offset);
286 if (options->imix_distribution_count) {
287 sym_op->auth.data.length =
288 options->imix_buffer_sizes[*imix_idx];
289 *imix_idx = (*imix_idx + 1) % options->pool_sz;
291 sym_op->auth.data.length = options->test_buffer_size;
293 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
294 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
295 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
296 sym_op->auth.data.length <<= 3;
298 sym_op->auth.data.offset = 0;
301 if (options->test == CPERF_TEST_TYPE_VERIFY) {
302 if (test_vector->auth_iv.length) {
303 for (i = 0; i < nb_ops; i++) {
304 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
305 uint8_t *, iv_offset);
307 memcpy(iv_ptr, test_vector->auth_iv.data,
308 test_vector->auth_iv.length);
316 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
317 uint32_t src_buf_offset, uint32_t dst_buf_offset,
318 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
319 const struct cperf_options *options,
320 const struct cperf_test_vector *test_vector,
321 uint16_t iv_offset, uint32_t *imix_idx)
325 for (i = 0; i < nb_ops; i++) {
326 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
328 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
329 rte_crypto_op_attach_sym_session(ops[i], sess);
331 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
334 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
335 if (dst_buf_offset == 0)
336 sym_op->m_dst = NULL;
338 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
341 /* cipher parameters */
342 if (options->imix_distribution_count) {
343 sym_op->cipher.data.length =
344 options->imix_buffer_sizes[*imix_idx];
345 *imix_idx = (*imix_idx + 1) % options->pool_sz;
347 sym_op->cipher.data.length = options->test_buffer_size;
349 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
350 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
351 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
352 sym_op->cipher.data.length <<= 3;
354 sym_op->cipher.data.offset = 0;
356 /* authentication parameters */
357 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
358 sym_op->auth.digest.data = test_vector->digest.data;
359 sym_op->auth.digest.phys_addr =
360 test_vector->digest.phys_addr;
363 uint32_t offset = options->test_buffer_size;
364 struct rte_mbuf *buf, *tbuf;
366 if (options->out_of_place) {
369 tbuf = sym_op->m_src;
370 while ((tbuf->next != NULL) &&
371 (offset >= tbuf->data_len)) {
372 offset -= tbuf->data_len;
376 * If there is not enough room in segment,
377 * place the digest in the next segment
379 if ((tbuf->data_len - offset) < options->digest_sz) {
386 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
388 sym_op->auth.digest.phys_addr =
389 rte_pktmbuf_iova_offset(buf, offset);
392 if (options->imix_distribution_count) {
393 sym_op->auth.data.length =
394 options->imix_buffer_sizes[*imix_idx];
395 *imix_idx = (*imix_idx + 1) % options->pool_sz;
397 sym_op->auth.data.length = options->test_buffer_size;
399 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
400 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
401 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
402 sym_op->auth.data.length <<= 3;
404 sym_op->auth.data.offset = 0;
407 if (options->test == CPERF_TEST_TYPE_VERIFY) {
408 for (i = 0; i < nb_ops; i++) {
409 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
410 uint8_t *, iv_offset);
412 memcpy(iv_ptr, test_vector->cipher_iv.data,
413 test_vector->cipher_iv.length);
414 if (test_vector->auth_iv.length) {
416 * Copy IV after the crypto operation and
419 iv_ptr += test_vector->cipher_iv.length;
420 memcpy(iv_ptr, test_vector->auth_iv.data,
421 test_vector->auth_iv.length);
431 cperf_set_ops_aead(struct rte_crypto_op **ops,
432 uint32_t src_buf_offset, uint32_t dst_buf_offset,
433 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
434 const struct cperf_options *options,
435 const struct cperf_test_vector *test_vector,
436 uint16_t iv_offset, uint32_t *imix_idx)
439 /* AAD is placed after the IV */
440 uint16_t aad_offset = iv_offset +
441 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
443 for (i = 0; i < nb_ops; i++) {
444 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
446 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
447 rte_crypto_op_attach_sym_session(ops[i], sess);
449 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
452 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
453 if (dst_buf_offset == 0)
454 sym_op->m_dst = NULL;
456 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
459 /* AEAD parameters */
460 if (options->imix_distribution_count) {
461 sym_op->aead.data.length =
462 options->imix_buffer_sizes[*imix_idx];
463 *imix_idx = (*imix_idx + 1) % options->pool_sz;
465 sym_op->aead.data.length = options->test_buffer_size;
466 sym_op->aead.data.offset = 0;
468 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
469 uint8_t *, aad_offset);
470 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
473 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
474 sym_op->aead.digest.data = test_vector->digest.data;
475 sym_op->aead.digest.phys_addr =
476 test_vector->digest.phys_addr;
479 uint32_t offset = sym_op->aead.data.length +
480 sym_op->aead.data.offset;
481 struct rte_mbuf *buf, *tbuf;
483 if (options->out_of_place) {
486 tbuf = sym_op->m_src;
487 while ((tbuf->next != NULL) &&
488 (offset >= tbuf->data_len)) {
489 offset -= tbuf->data_len;
493 * If there is not enough room in segment,
494 * place the digest in the next segment
496 if ((tbuf->data_len - offset) < options->digest_sz) {
503 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
505 sym_op->aead.digest.phys_addr =
506 rte_pktmbuf_iova_offset(buf, offset);
510 if (options->test == CPERF_TEST_TYPE_VERIFY) {
511 for (i = 0; i < nb_ops; i++) {
512 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
513 uint8_t *, iv_offset);
516 * If doing AES-CCM, nonce is copied one byte
517 * after the start of IV field, and AAD is copied
518 * 18 bytes after the start of the AAD field.
520 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
521 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
522 test_vector->aead_iv.length);
524 memcpy(ops[i]->sym->aead.aad.data + 18,
525 test_vector->aad.data,
526 test_vector->aad.length);
528 memcpy(iv_ptr, test_vector->aead_iv.data,
529 test_vector->aead_iv.length);
531 memcpy(ops[i]->sym->aead.aad.data,
532 test_vector->aad.data,
533 test_vector->aad.length);
541 static struct rte_cryptodev_sym_session *
542 cperf_create_session(struct rte_mempool *sess_mp,
543 struct rte_mempool *priv_mp,
545 const struct cperf_options *options,
546 const struct cperf_test_vector *test_vector,
549 struct rte_crypto_sym_xform cipher_xform;
550 struct rte_crypto_sym_xform auth_xform;
551 struct rte_crypto_sym_xform aead_xform;
552 struct rte_cryptodev_sym_session *sess = NULL;
554 #ifdef RTE_LIB_SECURITY
558 if (options->op_type == CPERF_PDCP) {
559 /* Setup Cipher Parameters */
560 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
561 cipher_xform.next = NULL;
562 cipher_xform.cipher.algo = options->cipher_algo;
563 cipher_xform.cipher.op = options->cipher_op;
564 cipher_xform.cipher.iv.offset = iv_offset;
565 cipher_xform.cipher.iv.length = 4;
567 /* cipher different than null */
568 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
569 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
570 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
572 cipher_xform.cipher.key.data = NULL;
573 cipher_xform.cipher.key.length = 0;
576 /* Setup Auth Parameters */
577 if (options->auth_algo != 0) {
578 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
579 auth_xform.next = NULL;
580 auth_xform.auth.algo = options->auth_algo;
581 auth_xform.auth.op = options->auth_op;
582 auth_xform.auth.iv.offset = iv_offset +
583 cipher_xform.cipher.iv.length;
585 /* auth different than null */
586 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
587 auth_xform.auth.digest_length = options->digest_sz;
588 auth_xform.auth.key.length = test_vector->auth_key.length;
589 auth_xform.auth.key.data = test_vector->auth_key.data;
590 auth_xform.auth.iv.length = test_vector->auth_iv.length;
592 auth_xform.auth.digest_length = 0;
593 auth_xform.auth.key.length = 0;
594 auth_xform.auth.key.data = NULL;
595 auth_xform.auth.iv.length = 0;
598 cipher_xform.next = &auth_xform;
600 cipher_xform.next = NULL;
603 struct rte_security_session_conf sess_conf = {
604 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
605 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
608 .domain = options->pdcp_domain,
610 .sn_size = options->pdcp_sn_sz,
611 .hfn = options->pdcp_ses_hfn_en ?
612 PDCP_DEFAULT_HFN : 0,
613 .hfn_threshold = 0x70C0A,
614 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
616 .crypto_xform = &cipher_xform
619 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
620 rte_cryptodev_get_sec_ctx(dev_id);
622 /* Create security session */
623 return (void *)rte_security_session_create(ctx,
624 &sess_conf, sess_mp, priv_mp);
626 if (options->op_type == CPERF_DOCSIS) {
627 enum rte_security_docsis_direction direction;
629 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
630 cipher_xform.next = NULL;
631 cipher_xform.cipher.algo = options->cipher_algo;
632 cipher_xform.cipher.op = options->cipher_op;
633 cipher_xform.cipher.iv.offset = iv_offset;
634 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
635 cipher_xform.cipher.key.data =
636 test_vector->cipher_key.data;
637 cipher_xform.cipher.key.length =
638 test_vector->cipher_key.length;
639 cipher_xform.cipher.iv.length =
640 test_vector->cipher_iv.length;
642 cipher_xform.cipher.key.data = NULL;
643 cipher_xform.cipher.key.length = 0;
644 cipher_xform.cipher.iv.length = 0;
646 cipher_xform.next = NULL;
648 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
649 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
651 direction = RTE_SECURITY_DOCSIS_UPLINK;
653 struct rte_security_session_conf sess_conf = {
655 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
656 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
658 .direction = direction,
660 .crypto_xform = &cipher_xform
662 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
663 rte_cryptodev_get_sec_ctx(dev_id);
665 /* Create security session */
666 return (void *)rte_security_session_create(ctx,
667 &sess_conf, sess_mp, priv_mp);
670 sess = rte_cryptodev_sym_session_create(sess_mp);
674 if (options->op_type == CPERF_CIPHER_ONLY) {
675 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
676 cipher_xform.next = NULL;
677 cipher_xform.cipher.algo = options->cipher_algo;
678 cipher_xform.cipher.op = options->cipher_op;
679 cipher_xform.cipher.iv.offset = iv_offset;
681 /* cipher different than null */
682 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
683 cipher_xform.cipher.key.data =
684 test_vector->cipher_key.data;
685 cipher_xform.cipher.key.length =
686 test_vector->cipher_key.length;
687 cipher_xform.cipher.iv.length =
688 test_vector->cipher_iv.length;
690 cipher_xform.cipher.key.data = NULL;
691 cipher_xform.cipher.key.length = 0;
692 cipher_xform.cipher.iv.length = 0;
694 /* create crypto session */
695 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
700 } else if (options->op_type == CPERF_AUTH_ONLY) {
701 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
702 auth_xform.next = NULL;
703 auth_xform.auth.algo = options->auth_algo;
704 auth_xform.auth.op = options->auth_op;
705 auth_xform.auth.iv.offset = iv_offset;
707 /* auth different than null */
708 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
709 auth_xform.auth.digest_length =
711 auth_xform.auth.key.length =
712 test_vector->auth_key.length;
713 auth_xform.auth.key.data = test_vector->auth_key.data;
714 auth_xform.auth.iv.length =
715 test_vector->auth_iv.length;
717 auth_xform.auth.digest_length = 0;
718 auth_xform.auth.key.length = 0;
719 auth_xform.auth.key.data = NULL;
720 auth_xform.auth.iv.length = 0;
722 /* create crypto session */
723 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
728 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
729 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
733 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
734 cipher_xform.next = NULL;
735 cipher_xform.cipher.algo = options->cipher_algo;
736 cipher_xform.cipher.op = options->cipher_op;
737 cipher_xform.cipher.iv.offset = iv_offset;
739 /* cipher different than null */
740 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
741 cipher_xform.cipher.key.data =
742 test_vector->cipher_key.data;
743 cipher_xform.cipher.key.length =
744 test_vector->cipher_key.length;
745 cipher_xform.cipher.iv.length =
746 test_vector->cipher_iv.length;
748 cipher_xform.cipher.key.data = NULL;
749 cipher_xform.cipher.key.length = 0;
750 cipher_xform.cipher.iv.length = 0;
756 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
757 auth_xform.next = NULL;
758 auth_xform.auth.algo = options->auth_algo;
759 auth_xform.auth.op = options->auth_op;
760 auth_xform.auth.iv.offset = iv_offset +
761 cipher_xform.cipher.iv.length;
763 /* auth different than null */
764 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
765 auth_xform.auth.digest_length = options->digest_sz;
766 auth_xform.auth.iv.length = test_vector->auth_iv.length;
767 auth_xform.auth.key.length =
768 test_vector->auth_key.length;
769 auth_xform.auth.key.data =
770 test_vector->auth_key.data;
772 auth_xform.auth.digest_length = 0;
773 auth_xform.auth.key.length = 0;
774 auth_xform.auth.key.data = NULL;
775 auth_xform.auth.iv.length = 0;
778 /* cipher then auth */
779 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
780 cipher_xform.next = &auth_xform;
781 /* create crypto session */
782 rte_cryptodev_sym_session_init(dev_id,
783 sess, &cipher_xform, priv_mp);
784 } else { /* auth then cipher */
785 auth_xform.next = &cipher_xform;
786 /* create crypto session */
787 rte_cryptodev_sym_session_init(dev_id,
788 sess, &auth_xform, priv_mp);
790 } else { /* options->op_type == CPERF_AEAD */
791 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
792 aead_xform.next = NULL;
793 aead_xform.aead.algo = options->aead_algo;
794 aead_xform.aead.op = options->aead_op;
795 aead_xform.aead.iv.offset = iv_offset;
797 aead_xform.aead.key.data =
798 test_vector->aead_key.data;
799 aead_xform.aead.key.length =
800 test_vector->aead_key.length;
801 aead_xform.aead.iv.length = test_vector->aead_iv.length;
803 aead_xform.aead.digest_length = options->digest_sz;
804 aead_xform.aead.aad_length =
805 options->aead_aad_sz;
807 /* Create crypto session */
808 rte_cryptodev_sym_session_init(dev_id,
809 sess, &aead_xform, priv_mp);
816 cperf_get_op_functions(const struct cperf_options *options,
817 struct cperf_op_fns *op_fns)
819 memset(op_fns, 0, sizeof(struct cperf_op_fns));
821 op_fns->sess_create = cperf_create_session;
823 if (options->op_type == CPERF_AEAD) {
824 op_fns->populate_ops = cperf_set_ops_aead;
828 if (options->op_type == CPERF_AUTH_THEN_CIPHER
829 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
830 op_fns->populate_ops = cperf_set_ops_cipher_auth;
833 if (options->op_type == CPERF_AUTH_ONLY) {
834 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
835 op_fns->populate_ops = cperf_set_ops_null_auth;
837 op_fns->populate_ops = cperf_set_ops_auth;
840 if (options->op_type == CPERF_CIPHER_ONLY) {
841 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
842 op_fns->populate_ops = cperf_set_ops_null_cipher;
844 op_fns->populate_ops = cperf_set_ops_cipher;
847 #ifdef RTE_LIB_SECURITY
848 if (options->op_type == CPERF_PDCP) {
849 op_fns->populate_ops = cperf_set_ops_security;
852 if (options->op_type == CPERF_DOCSIS) {
853 op_fns->populate_ops = cperf_set_ops_security;