1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
9 #include "cperf_test_vectors.h"
11 #ifdef RTE_LIBRTE_SECURITY
13 cperf_set_ops_security(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused,
16 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
23 for (i = 0; i < nb_ops; i++) {
24 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25 struct rte_security_session *sec_sess =
26 (struct rte_security_session *)sess;
29 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
30 rte_security_attach_session(ops[i], sec_sess);
31 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
34 if (options->op_type == CPERF_PDCP) {
35 sym_op->m_src->buf_len = options->segment_sz;
36 sym_op->m_src->data_len = options->test_buffer_size;
37 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
40 if (options->op_type == CPERF_DOCSIS) {
41 if (options->imix_distribution_count) {
42 buf_sz = options->imix_buffer_sizes[*imix_idx];
43 *imix_idx = (*imix_idx + 1) % options->pool_sz;
45 buf_sz = options->test_buffer_size;
47 /* DOCSIS header is not CRC'ed */
48 sym_op->auth.data.offset = options->docsis_hdr_sz;
49 sym_op->auth.data.length = buf_sz -
50 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
52 * DOCSIS header and SRC and DST MAC addresses are not
55 sym_op->cipher.data.offset = sym_op->auth.data.offset +
56 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
57 sym_op->cipher.data.length = buf_sz -
58 sym_op->cipher.data.offset;
61 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
62 if (dst_buf_offset == 0)
65 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
74 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
75 uint32_t src_buf_offset, uint32_t dst_buf_offset,
76 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
77 const struct cperf_options *options,
78 const struct cperf_test_vector *test_vector __rte_unused,
79 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
83 for (i = 0; i < nb_ops; i++) {
84 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
86 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
87 rte_crypto_op_attach_sym_session(ops[i], sess);
89 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
92 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
93 if (dst_buf_offset == 0)
96 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
99 /* cipher parameters */
100 if (options->imix_distribution_count) {
101 sym_op->cipher.data.length =
102 options->imix_buffer_sizes[*imix_idx];
103 *imix_idx = (*imix_idx + 1) % options->pool_sz;
105 sym_op->cipher.data.length = options->test_buffer_size;
106 sym_op->cipher.data.offset = 0;
113 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
114 uint32_t src_buf_offset, uint32_t dst_buf_offset,
115 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
116 const struct cperf_options *options,
117 const struct cperf_test_vector *test_vector __rte_unused,
118 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
122 for (i = 0; i < nb_ops; i++) {
123 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
125 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
126 rte_crypto_op_attach_sym_session(ops[i], sess);
128 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
131 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
132 if (dst_buf_offset == 0)
133 sym_op->m_dst = NULL;
135 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
138 /* auth parameters */
139 if (options->imix_distribution_count) {
140 sym_op->auth.data.length =
141 options->imix_buffer_sizes[*imix_idx];
142 *imix_idx = (*imix_idx + 1) % options->pool_sz;
144 sym_op->auth.data.length = options->test_buffer_size;
145 sym_op->auth.data.offset = 0;
152 cperf_set_ops_cipher(struct rte_crypto_op **ops,
153 uint32_t src_buf_offset, uint32_t dst_buf_offset,
154 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
155 const struct cperf_options *options,
156 const struct cperf_test_vector *test_vector,
157 uint16_t iv_offset, uint32_t *imix_idx)
161 for (i = 0; i < nb_ops; i++) {
162 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
164 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
165 rte_crypto_op_attach_sym_session(ops[i], sess);
167 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
170 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
171 if (dst_buf_offset == 0)
172 sym_op->m_dst = NULL;
174 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
177 /* cipher parameters */
178 if (options->imix_distribution_count) {
179 sym_op->cipher.data.length =
180 options->imix_buffer_sizes[*imix_idx];
181 *imix_idx = (*imix_idx + 1) % options->pool_sz;
183 sym_op->cipher.data.length = options->test_buffer_size;
185 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
186 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
187 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
188 sym_op->cipher.data.length <<= 3;
190 sym_op->cipher.data.offset = 0;
193 if (options->test == CPERF_TEST_TYPE_VERIFY) {
194 for (i = 0; i < nb_ops; i++) {
195 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
196 uint8_t *, iv_offset);
198 memcpy(iv_ptr, test_vector->cipher_iv.data,
199 test_vector->cipher_iv.length);
208 cperf_set_ops_auth(struct rte_crypto_op **ops,
209 uint32_t src_buf_offset, uint32_t dst_buf_offset,
210 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
211 const struct cperf_options *options,
212 const struct cperf_test_vector *test_vector,
213 uint16_t iv_offset, uint32_t *imix_idx)
217 for (i = 0; i < nb_ops; i++) {
218 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
220 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
221 rte_crypto_op_attach_sym_session(ops[i], sess);
223 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
226 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
227 if (dst_buf_offset == 0)
228 sym_op->m_dst = NULL;
230 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
233 if (test_vector->auth_iv.length) {
234 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
237 memcpy(iv_ptr, test_vector->auth_iv.data,
238 test_vector->auth_iv.length);
241 /* authentication parameters */
242 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
243 sym_op->auth.digest.data = test_vector->digest.data;
244 sym_op->auth.digest.phys_addr =
245 test_vector->digest.phys_addr;
248 uint32_t offset = options->test_buffer_size;
249 struct rte_mbuf *buf, *tbuf;
251 if (options->out_of_place) {
254 tbuf = sym_op->m_src;
255 while ((tbuf->next != NULL) &&
256 (offset >= tbuf->data_len)) {
257 offset -= tbuf->data_len;
261 * If there is not enough room in segment,
262 * place the digest in the next segment
264 if ((tbuf->data_len - offset) < options->digest_sz) {
271 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
273 sym_op->auth.digest.phys_addr =
274 rte_pktmbuf_iova_offset(buf, offset);
278 if (options->imix_distribution_count) {
279 sym_op->auth.data.length =
280 options->imix_buffer_sizes[*imix_idx];
281 *imix_idx = (*imix_idx + 1) % options->pool_sz;
283 sym_op->auth.data.length = options->test_buffer_size;
285 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
286 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
287 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
288 sym_op->auth.data.length <<= 3;
290 sym_op->auth.data.offset = 0;
293 if (options->test == CPERF_TEST_TYPE_VERIFY) {
294 if (test_vector->auth_iv.length) {
295 for (i = 0; i < nb_ops; i++) {
296 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
297 uint8_t *, iv_offset);
299 memcpy(iv_ptr, test_vector->auth_iv.data,
300 test_vector->auth_iv.length);
308 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
309 uint32_t src_buf_offset, uint32_t dst_buf_offset,
310 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
311 const struct cperf_options *options,
312 const struct cperf_test_vector *test_vector,
313 uint16_t iv_offset, uint32_t *imix_idx)
317 for (i = 0; i < nb_ops; i++) {
318 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
320 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
321 rte_crypto_op_attach_sym_session(ops[i], sess);
323 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
326 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
327 if (dst_buf_offset == 0)
328 sym_op->m_dst = NULL;
330 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
333 /* cipher parameters */
334 if (options->imix_distribution_count) {
335 sym_op->cipher.data.length =
336 options->imix_buffer_sizes[*imix_idx];
337 *imix_idx = (*imix_idx + 1) % options->pool_sz;
339 sym_op->cipher.data.length = options->test_buffer_size;
341 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
342 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
343 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
344 sym_op->cipher.data.length <<= 3;
346 sym_op->cipher.data.offset = 0;
348 /* authentication parameters */
349 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
350 sym_op->auth.digest.data = test_vector->digest.data;
351 sym_op->auth.digest.phys_addr =
352 test_vector->digest.phys_addr;
355 uint32_t offset = options->test_buffer_size;
356 struct rte_mbuf *buf, *tbuf;
358 if (options->out_of_place) {
361 tbuf = sym_op->m_src;
362 while ((tbuf->next != NULL) &&
363 (offset >= tbuf->data_len)) {
364 offset -= tbuf->data_len;
368 * If there is not enough room in segment,
369 * place the digest in the next segment
371 if ((tbuf->data_len - offset) < options->digest_sz) {
378 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
380 sym_op->auth.digest.phys_addr =
381 rte_pktmbuf_iova_offset(buf, offset);
384 if (options->imix_distribution_count) {
385 sym_op->auth.data.length =
386 options->imix_buffer_sizes[*imix_idx];
387 *imix_idx = (*imix_idx + 1) % options->pool_sz;
389 sym_op->auth.data.length = options->test_buffer_size;
391 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
392 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
393 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
394 sym_op->auth.data.length <<= 3;
396 sym_op->auth.data.offset = 0;
399 if (options->test == CPERF_TEST_TYPE_VERIFY) {
400 for (i = 0; i < nb_ops; i++) {
401 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
402 uint8_t *, iv_offset);
404 memcpy(iv_ptr, test_vector->cipher_iv.data,
405 test_vector->cipher_iv.length);
406 if (test_vector->auth_iv.length) {
408 * Copy IV after the crypto operation and
411 iv_ptr += test_vector->cipher_iv.length;
412 memcpy(iv_ptr, test_vector->auth_iv.data,
413 test_vector->auth_iv.length);
423 cperf_set_ops_aead(struct rte_crypto_op **ops,
424 uint32_t src_buf_offset, uint32_t dst_buf_offset,
425 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
426 const struct cperf_options *options,
427 const struct cperf_test_vector *test_vector,
428 uint16_t iv_offset, uint32_t *imix_idx)
431 /* AAD is placed after the IV */
432 uint16_t aad_offset = iv_offset +
433 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
435 for (i = 0; i < nb_ops; i++) {
436 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
438 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
439 rte_crypto_op_attach_sym_session(ops[i], sess);
441 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
444 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
445 if (dst_buf_offset == 0)
446 sym_op->m_dst = NULL;
448 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
451 /* AEAD parameters */
452 if (options->imix_distribution_count) {
453 sym_op->aead.data.length =
454 options->imix_buffer_sizes[*imix_idx];
455 *imix_idx = (*imix_idx + 1) % options->pool_sz;
457 sym_op->aead.data.length = options->test_buffer_size;
458 sym_op->aead.data.offset = 0;
460 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
461 uint8_t *, aad_offset);
462 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
465 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
466 sym_op->aead.digest.data = test_vector->digest.data;
467 sym_op->aead.digest.phys_addr =
468 test_vector->digest.phys_addr;
471 uint32_t offset = sym_op->aead.data.length +
472 sym_op->aead.data.offset;
473 struct rte_mbuf *buf, *tbuf;
475 if (options->out_of_place) {
478 tbuf = sym_op->m_src;
479 while ((tbuf->next != NULL) &&
480 (offset >= tbuf->data_len)) {
481 offset -= tbuf->data_len;
485 * If there is not enough room in segment,
486 * place the digest in the next segment
488 if ((tbuf->data_len - offset) < options->digest_sz) {
495 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
497 sym_op->aead.digest.phys_addr =
498 rte_pktmbuf_iova_offset(buf, offset);
502 if (options->test == CPERF_TEST_TYPE_VERIFY) {
503 for (i = 0; i < nb_ops; i++) {
504 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
505 uint8_t *, iv_offset);
508 * If doing AES-CCM, nonce is copied one byte
509 * after the start of IV field, and AAD is copied
510 * 18 bytes after the start of the AAD field.
512 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
513 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
514 test_vector->aead_iv.length);
516 memcpy(ops[i]->sym->aead.aad.data + 18,
517 test_vector->aad.data,
518 test_vector->aad.length);
520 memcpy(iv_ptr, test_vector->aead_iv.data,
521 test_vector->aead_iv.length);
523 memcpy(ops[i]->sym->aead.aad.data,
524 test_vector->aad.data,
525 test_vector->aad.length);
533 static struct rte_cryptodev_sym_session *
534 cperf_create_session(struct rte_mempool *sess_mp,
535 struct rte_mempool *priv_mp,
537 const struct cperf_options *options,
538 const struct cperf_test_vector *test_vector,
541 struct rte_crypto_sym_xform cipher_xform;
542 struct rte_crypto_sym_xform auth_xform;
543 struct rte_crypto_sym_xform aead_xform;
544 struct rte_cryptodev_sym_session *sess = NULL;
546 #ifdef RTE_LIBRTE_SECURITY
550 if (options->op_type == CPERF_PDCP) {
551 /* Setup Cipher Parameters */
552 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
553 cipher_xform.next = NULL;
554 cipher_xform.cipher.algo = options->cipher_algo;
555 cipher_xform.cipher.op = options->cipher_op;
556 cipher_xform.cipher.iv.offset = iv_offset;
558 /* cipher different than null */
559 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
560 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
561 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
562 cipher_xform.cipher.iv.length = test_vector->cipher_iv.length;
564 cipher_xform.cipher.key.data = NULL;
565 cipher_xform.cipher.key.length = 0;
566 cipher_xform.cipher.iv.length = 0;
569 /* Setup Auth Parameters */
570 if (options->auth_algo != 0) {
571 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
572 auth_xform.next = NULL;
573 auth_xform.auth.algo = options->auth_algo;
574 auth_xform.auth.op = options->auth_op;
575 auth_xform.auth.iv.offset = iv_offset +
576 cipher_xform.cipher.iv.length;
578 /* auth different than null */
579 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
580 auth_xform.auth.digest_length = options->digest_sz;
581 auth_xform.auth.key.length = test_vector->auth_key.length;
582 auth_xform.auth.key.data = test_vector->auth_key.data;
583 auth_xform.auth.iv.length = test_vector->auth_iv.length;
585 auth_xform.auth.digest_length = 0;
586 auth_xform.auth.key.length = 0;
587 auth_xform.auth.key.data = NULL;
588 auth_xform.auth.iv.length = 0;
591 cipher_xform.next = &auth_xform;
593 cipher_xform.next = NULL;
596 struct rte_security_session_conf sess_conf = {
597 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
598 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
601 .domain = options->pdcp_domain,
603 .sn_size = options->pdcp_sn_sz,
605 .hfn_threshold = 0x70C0A,
607 .crypto_xform = &cipher_xform
610 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
611 rte_cryptodev_get_sec_ctx(dev_id);
613 /* Create security session */
614 return (void *)rte_security_session_create(ctx,
615 &sess_conf, sess_mp);
617 if (options->op_type == CPERF_DOCSIS) {
618 enum rte_security_docsis_direction direction;
620 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
621 cipher_xform.next = NULL;
622 cipher_xform.cipher.algo = options->cipher_algo;
623 cipher_xform.cipher.op = options->cipher_op;
624 cipher_xform.cipher.iv.offset = iv_offset;
625 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
626 cipher_xform.cipher.key.data =
627 test_vector->cipher_key.data;
628 cipher_xform.cipher.key.length =
629 test_vector->cipher_key.length;
630 cipher_xform.cipher.iv.length =
631 test_vector->cipher_iv.length;
633 cipher_xform.cipher.key.data = NULL;
634 cipher_xform.cipher.key.length = 0;
635 cipher_xform.cipher.iv.length = 0;
637 cipher_xform.next = NULL;
639 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
640 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
642 direction = RTE_SECURITY_DOCSIS_UPLINK;
644 struct rte_security_session_conf sess_conf = {
646 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
647 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
649 .direction = direction,
651 .crypto_xform = &cipher_xform
653 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
654 rte_cryptodev_get_sec_ctx(dev_id);
656 /* Create security session */
657 return (void *)rte_security_session_create(ctx,
658 &sess_conf, priv_mp);
661 sess = rte_cryptodev_sym_session_create(sess_mp);
665 if (options->op_type == CPERF_CIPHER_ONLY) {
666 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
667 cipher_xform.next = NULL;
668 cipher_xform.cipher.algo = options->cipher_algo;
669 cipher_xform.cipher.op = options->cipher_op;
670 cipher_xform.cipher.iv.offset = iv_offset;
672 /* cipher different than null */
673 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
674 cipher_xform.cipher.key.data =
675 test_vector->cipher_key.data;
676 cipher_xform.cipher.key.length =
677 test_vector->cipher_key.length;
678 cipher_xform.cipher.iv.length =
679 test_vector->cipher_iv.length;
681 cipher_xform.cipher.key.data = NULL;
682 cipher_xform.cipher.key.length = 0;
683 cipher_xform.cipher.iv.length = 0;
685 /* create crypto session */
686 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
691 } else if (options->op_type == CPERF_AUTH_ONLY) {
692 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
693 auth_xform.next = NULL;
694 auth_xform.auth.algo = options->auth_algo;
695 auth_xform.auth.op = options->auth_op;
696 auth_xform.auth.iv.offset = iv_offset;
698 /* auth different than null */
699 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
700 auth_xform.auth.digest_length =
702 auth_xform.auth.key.length =
703 test_vector->auth_key.length;
704 auth_xform.auth.key.data = test_vector->auth_key.data;
705 auth_xform.auth.iv.length =
706 test_vector->auth_iv.length;
708 auth_xform.auth.digest_length = 0;
709 auth_xform.auth.key.length = 0;
710 auth_xform.auth.key.data = NULL;
711 auth_xform.auth.iv.length = 0;
713 /* create crypto session */
714 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
719 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
720 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
724 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
725 cipher_xform.next = NULL;
726 cipher_xform.cipher.algo = options->cipher_algo;
727 cipher_xform.cipher.op = options->cipher_op;
728 cipher_xform.cipher.iv.offset = iv_offset;
730 /* cipher different than null */
731 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
732 cipher_xform.cipher.key.data =
733 test_vector->cipher_key.data;
734 cipher_xform.cipher.key.length =
735 test_vector->cipher_key.length;
736 cipher_xform.cipher.iv.length =
737 test_vector->cipher_iv.length;
739 cipher_xform.cipher.key.data = NULL;
740 cipher_xform.cipher.key.length = 0;
741 cipher_xform.cipher.iv.length = 0;
747 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
748 auth_xform.next = NULL;
749 auth_xform.auth.algo = options->auth_algo;
750 auth_xform.auth.op = options->auth_op;
751 auth_xform.auth.iv.offset = iv_offset +
752 cipher_xform.cipher.iv.length;
754 /* auth different than null */
755 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
756 auth_xform.auth.digest_length = options->digest_sz;
757 auth_xform.auth.iv.length = test_vector->auth_iv.length;
758 auth_xform.auth.key.length =
759 test_vector->auth_key.length;
760 auth_xform.auth.key.data =
761 test_vector->auth_key.data;
763 auth_xform.auth.digest_length = 0;
764 auth_xform.auth.key.length = 0;
765 auth_xform.auth.key.data = NULL;
766 auth_xform.auth.iv.length = 0;
769 /* cipher then auth */
770 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
771 cipher_xform.next = &auth_xform;
772 /* create crypto session */
773 rte_cryptodev_sym_session_init(dev_id,
774 sess, &cipher_xform, priv_mp);
775 } else { /* auth then cipher */
776 auth_xform.next = &cipher_xform;
777 /* create crypto session */
778 rte_cryptodev_sym_session_init(dev_id,
779 sess, &auth_xform, priv_mp);
781 } else { /* options->op_type == CPERF_AEAD */
782 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
783 aead_xform.next = NULL;
784 aead_xform.aead.algo = options->aead_algo;
785 aead_xform.aead.op = options->aead_op;
786 aead_xform.aead.iv.offset = iv_offset;
788 aead_xform.aead.key.data =
789 test_vector->aead_key.data;
790 aead_xform.aead.key.length =
791 test_vector->aead_key.length;
792 aead_xform.aead.iv.length = test_vector->aead_iv.length;
794 aead_xform.aead.digest_length = options->digest_sz;
795 aead_xform.aead.aad_length =
796 options->aead_aad_sz;
798 /* Create crypto session */
799 rte_cryptodev_sym_session_init(dev_id,
800 sess, &aead_xform, priv_mp);
807 cperf_get_op_functions(const struct cperf_options *options,
808 struct cperf_op_fns *op_fns)
810 memset(op_fns, 0, sizeof(struct cperf_op_fns));
812 op_fns->sess_create = cperf_create_session;
814 if (options->op_type == CPERF_AEAD) {
815 op_fns->populate_ops = cperf_set_ops_aead;
819 if (options->op_type == CPERF_AUTH_THEN_CIPHER
820 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
821 op_fns->populate_ops = cperf_set_ops_cipher_auth;
824 if (options->op_type == CPERF_AUTH_ONLY) {
825 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
826 op_fns->populate_ops = cperf_set_ops_null_auth;
828 op_fns->populate_ops = cperf_set_ops_auth;
831 if (options->op_type == CPERF_CIPHER_ONLY) {
832 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
833 op_fns->populate_ops = cperf_set_ops_null_cipher;
835 op_fns->populate_ops = cperf_set_ops_cipher;
838 #ifdef RTE_LIBRTE_SECURITY
839 if (options->op_type == CPERF_PDCP) {
840 op_fns->populate_ops = cperf_set_ops_security;
843 if (options->op_type == CPERF_DOCSIS) {
844 op_fns->populate_ops = cperf_set_ops_security;