1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
9 #include "cperf_test_vectors.h"
11 #ifdef RTE_LIBRTE_SECURITY
13 cperf_set_ops_security(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused,
16 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
23 for (i = 0; i < nb_ops; i++) {
24 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25 struct rte_security_session *sec_sess =
26 (struct rte_security_session *)sess;
29 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
30 uint32_t *, iv_offset);
31 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
33 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
34 rte_security_attach_session(ops[i], sec_sess);
35 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
38 if (options->op_type == CPERF_PDCP) {
39 sym_op->m_src->buf_len = options->segment_sz;
40 sym_op->m_src->data_len = options->test_buffer_size;
41 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
44 if (options->op_type == CPERF_DOCSIS) {
45 if (options->imix_distribution_count) {
46 buf_sz = options->imix_buffer_sizes[*imix_idx];
47 *imix_idx = (*imix_idx + 1) % options->pool_sz;
49 buf_sz = options->test_buffer_size;
51 /* DOCSIS header is not CRC'ed */
52 sym_op->auth.data.offset = options->docsis_hdr_sz;
53 sym_op->auth.data.length = buf_sz -
54 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
56 * DOCSIS header and SRC and DST MAC addresses are not
59 sym_op->cipher.data.offset = sym_op->auth.data.offset +
60 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
61 sym_op->cipher.data.length = buf_sz -
62 sym_op->cipher.data.offset;
65 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
66 if (dst_buf_offset == 0)
69 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
78 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
79 uint32_t src_buf_offset, uint32_t dst_buf_offset,
80 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
81 const struct cperf_options *options,
82 const struct cperf_test_vector *test_vector __rte_unused,
83 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
87 for (i = 0; i < nb_ops; i++) {
88 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
90 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
91 rte_crypto_op_attach_sym_session(ops[i], sess);
93 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
96 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
97 if (dst_buf_offset == 0)
100 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
103 /* cipher parameters */
104 if (options->imix_distribution_count) {
105 sym_op->cipher.data.length =
106 options->imix_buffer_sizes[*imix_idx];
107 *imix_idx = (*imix_idx + 1) % options->pool_sz;
109 sym_op->cipher.data.length = options->test_buffer_size;
110 sym_op->cipher.data.offset = 0;
117 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
118 uint32_t src_buf_offset, uint32_t dst_buf_offset,
119 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
120 const struct cperf_options *options,
121 const struct cperf_test_vector *test_vector __rte_unused,
122 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
126 for (i = 0; i < nb_ops; i++) {
127 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
129 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
130 rte_crypto_op_attach_sym_session(ops[i], sess);
132 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
135 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
136 if (dst_buf_offset == 0)
137 sym_op->m_dst = NULL;
139 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
142 /* auth parameters */
143 if (options->imix_distribution_count) {
144 sym_op->auth.data.length =
145 options->imix_buffer_sizes[*imix_idx];
146 *imix_idx = (*imix_idx + 1) % options->pool_sz;
148 sym_op->auth.data.length = options->test_buffer_size;
149 sym_op->auth.data.offset = 0;
156 cperf_set_ops_cipher(struct rte_crypto_op **ops,
157 uint32_t src_buf_offset, uint32_t dst_buf_offset,
158 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
159 const struct cperf_options *options,
160 const struct cperf_test_vector *test_vector,
161 uint16_t iv_offset, uint32_t *imix_idx)
165 for (i = 0; i < nb_ops; i++) {
166 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
168 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
169 rte_crypto_op_attach_sym_session(ops[i], sess);
171 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
174 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
175 if (dst_buf_offset == 0)
176 sym_op->m_dst = NULL;
178 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
181 /* cipher parameters */
182 if (options->imix_distribution_count) {
183 sym_op->cipher.data.length =
184 options->imix_buffer_sizes[*imix_idx];
185 *imix_idx = (*imix_idx + 1) % options->pool_sz;
187 sym_op->cipher.data.length = options->test_buffer_size;
189 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
190 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
191 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
192 sym_op->cipher.data.length <<= 3;
194 sym_op->cipher.data.offset = 0;
197 if (options->test == CPERF_TEST_TYPE_VERIFY) {
198 for (i = 0; i < nb_ops; i++) {
199 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
200 uint8_t *, iv_offset);
202 memcpy(iv_ptr, test_vector->cipher_iv.data,
203 test_vector->cipher_iv.length);
212 cperf_set_ops_auth(struct rte_crypto_op **ops,
213 uint32_t src_buf_offset, uint32_t dst_buf_offset,
214 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
215 const struct cperf_options *options,
216 const struct cperf_test_vector *test_vector,
217 uint16_t iv_offset, uint32_t *imix_idx)
221 for (i = 0; i < nb_ops; i++) {
222 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
224 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
225 rte_crypto_op_attach_sym_session(ops[i], sess);
227 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
230 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
231 if (dst_buf_offset == 0)
232 sym_op->m_dst = NULL;
234 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
237 if (test_vector->auth_iv.length) {
238 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
241 memcpy(iv_ptr, test_vector->auth_iv.data,
242 test_vector->auth_iv.length);
245 /* authentication parameters */
246 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
247 sym_op->auth.digest.data = test_vector->digest.data;
248 sym_op->auth.digest.phys_addr =
249 test_vector->digest.phys_addr;
252 uint32_t offset = options->test_buffer_size;
253 struct rte_mbuf *buf, *tbuf;
255 if (options->out_of_place) {
258 tbuf = sym_op->m_src;
259 while ((tbuf->next != NULL) &&
260 (offset >= tbuf->data_len)) {
261 offset -= tbuf->data_len;
265 * If there is not enough room in segment,
266 * place the digest in the next segment
268 if ((tbuf->data_len - offset) < options->digest_sz) {
275 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
277 sym_op->auth.digest.phys_addr =
278 rte_pktmbuf_iova_offset(buf, offset);
282 if (options->imix_distribution_count) {
283 sym_op->auth.data.length =
284 options->imix_buffer_sizes[*imix_idx];
285 *imix_idx = (*imix_idx + 1) % options->pool_sz;
287 sym_op->auth.data.length = options->test_buffer_size;
289 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
290 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
291 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
292 sym_op->auth.data.length <<= 3;
294 sym_op->auth.data.offset = 0;
297 if (options->test == CPERF_TEST_TYPE_VERIFY) {
298 if (test_vector->auth_iv.length) {
299 for (i = 0; i < nb_ops; i++) {
300 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
301 uint8_t *, iv_offset);
303 memcpy(iv_ptr, test_vector->auth_iv.data,
304 test_vector->auth_iv.length);
312 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
313 uint32_t src_buf_offset, uint32_t dst_buf_offset,
314 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
315 const struct cperf_options *options,
316 const struct cperf_test_vector *test_vector,
317 uint16_t iv_offset, uint32_t *imix_idx)
321 for (i = 0; i < nb_ops; i++) {
322 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
324 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
325 rte_crypto_op_attach_sym_session(ops[i], sess);
327 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
330 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
331 if (dst_buf_offset == 0)
332 sym_op->m_dst = NULL;
334 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
337 /* cipher parameters */
338 if (options->imix_distribution_count) {
339 sym_op->cipher.data.length =
340 options->imix_buffer_sizes[*imix_idx];
341 *imix_idx = (*imix_idx + 1) % options->pool_sz;
343 sym_op->cipher.data.length = options->test_buffer_size;
345 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
346 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
347 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
348 sym_op->cipher.data.length <<= 3;
350 sym_op->cipher.data.offset = 0;
352 /* authentication parameters */
353 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
354 sym_op->auth.digest.data = test_vector->digest.data;
355 sym_op->auth.digest.phys_addr =
356 test_vector->digest.phys_addr;
359 uint32_t offset = options->test_buffer_size;
360 struct rte_mbuf *buf, *tbuf;
362 if (options->out_of_place) {
365 tbuf = sym_op->m_src;
366 while ((tbuf->next != NULL) &&
367 (offset >= tbuf->data_len)) {
368 offset -= tbuf->data_len;
372 * If there is not enough room in segment,
373 * place the digest in the next segment
375 if ((tbuf->data_len - offset) < options->digest_sz) {
382 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
384 sym_op->auth.digest.phys_addr =
385 rte_pktmbuf_iova_offset(buf, offset);
388 if (options->imix_distribution_count) {
389 sym_op->auth.data.length =
390 options->imix_buffer_sizes[*imix_idx];
391 *imix_idx = (*imix_idx + 1) % options->pool_sz;
393 sym_op->auth.data.length = options->test_buffer_size;
395 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
396 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
397 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
398 sym_op->auth.data.length <<= 3;
400 sym_op->auth.data.offset = 0;
403 if (options->test == CPERF_TEST_TYPE_VERIFY) {
404 for (i = 0; i < nb_ops; i++) {
405 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
406 uint8_t *, iv_offset);
408 memcpy(iv_ptr, test_vector->cipher_iv.data,
409 test_vector->cipher_iv.length);
410 if (test_vector->auth_iv.length) {
412 * Copy IV after the crypto operation and
415 iv_ptr += test_vector->cipher_iv.length;
416 memcpy(iv_ptr, test_vector->auth_iv.data,
417 test_vector->auth_iv.length);
427 cperf_set_ops_aead(struct rte_crypto_op **ops,
428 uint32_t src_buf_offset, uint32_t dst_buf_offset,
429 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
430 const struct cperf_options *options,
431 const struct cperf_test_vector *test_vector,
432 uint16_t iv_offset, uint32_t *imix_idx)
435 /* AAD is placed after the IV */
436 uint16_t aad_offset = iv_offset +
437 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
439 for (i = 0; i < nb_ops; i++) {
440 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
442 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
443 rte_crypto_op_attach_sym_session(ops[i], sess);
445 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
448 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
449 if (dst_buf_offset == 0)
450 sym_op->m_dst = NULL;
452 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
455 /* AEAD parameters */
456 if (options->imix_distribution_count) {
457 sym_op->aead.data.length =
458 options->imix_buffer_sizes[*imix_idx];
459 *imix_idx = (*imix_idx + 1) % options->pool_sz;
461 sym_op->aead.data.length = options->test_buffer_size;
462 sym_op->aead.data.offset = 0;
464 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
465 uint8_t *, aad_offset);
466 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
469 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
470 sym_op->aead.digest.data = test_vector->digest.data;
471 sym_op->aead.digest.phys_addr =
472 test_vector->digest.phys_addr;
475 uint32_t offset = sym_op->aead.data.length +
476 sym_op->aead.data.offset;
477 struct rte_mbuf *buf, *tbuf;
479 if (options->out_of_place) {
482 tbuf = sym_op->m_src;
483 while ((tbuf->next != NULL) &&
484 (offset >= tbuf->data_len)) {
485 offset -= tbuf->data_len;
489 * If there is not enough room in segment,
490 * place the digest in the next segment
492 if ((tbuf->data_len - offset) < options->digest_sz) {
499 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
501 sym_op->aead.digest.phys_addr =
502 rte_pktmbuf_iova_offset(buf, offset);
506 if (options->test == CPERF_TEST_TYPE_VERIFY) {
507 for (i = 0; i < nb_ops; i++) {
508 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
509 uint8_t *, iv_offset);
512 * If doing AES-CCM, nonce is copied one byte
513 * after the start of IV field, and AAD is copied
514 * 18 bytes after the start of the AAD field.
516 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
517 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
518 test_vector->aead_iv.length);
520 memcpy(ops[i]->sym->aead.aad.data + 18,
521 test_vector->aad.data,
522 test_vector->aad.length);
524 memcpy(iv_ptr, test_vector->aead_iv.data,
525 test_vector->aead_iv.length);
527 memcpy(ops[i]->sym->aead.aad.data,
528 test_vector->aad.data,
529 test_vector->aad.length);
537 static struct rte_cryptodev_sym_session *
538 cperf_create_session(struct rte_mempool *sess_mp,
539 struct rte_mempool *priv_mp,
541 const struct cperf_options *options,
542 const struct cperf_test_vector *test_vector,
545 struct rte_crypto_sym_xform cipher_xform;
546 struct rte_crypto_sym_xform auth_xform;
547 struct rte_crypto_sym_xform aead_xform;
548 struct rte_cryptodev_sym_session *sess = NULL;
550 #ifdef RTE_LIBRTE_SECURITY
554 if (options->op_type == CPERF_PDCP) {
555 /* Setup Cipher Parameters */
556 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
557 cipher_xform.next = NULL;
558 cipher_xform.cipher.algo = options->cipher_algo;
559 cipher_xform.cipher.op = options->cipher_op;
560 cipher_xform.cipher.iv.offset = iv_offset;
561 cipher_xform.cipher.iv.length = 4;
563 /* cipher different than null */
564 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
565 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
566 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
568 cipher_xform.cipher.key.data = NULL;
569 cipher_xform.cipher.key.length = 0;
572 /* Setup Auth Parameters */
573 if (options->auth_algo != 0) {
574 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
575 auth_xform.next = NULL;
576 auth_xform.auth.algo = options->auth_algo;
577 auth_xform.auth.op = options->auth_op;
578 auth_xform.auth.iv.offset = iv_offset +
579 cipher_xform.cipher.iv.length;
581 /* auth different than null */
582 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
583 auth_xform.auth.digest_length = options->digest_sz;
584 auth_xform.auth.key.length = test_vector->auth_key.length;
585 auth_xform.auth.key.data = test_vector->auth_key.data;
586 auth_xform.auth.iv.length = test_vector->auth_iv.length;
588 auth_xform.auth.digest_length = 0;
589 auth_xform.auth.key.length = 0;
590 auth_xform.auth.key.data = NULL;
591 auth_xform.auth.iv.length = 0;
594 cipher_xform.next = &auth_xform;
596 cipher_xform.next = NULL;
599 struct rte_security_session_conf sess_conf = {
600 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
601 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
604 .domain = options->pdcp_domain,
606 .sn_size = options->pdcp_sn_sz,
607 .hfn = options->pdcp_ses_hfn_en ?
608 PDCP_DEFAULT_HFN : 0,
609 .hfn_threshold = 0x70C0A,
610 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
612 .crypto_xform = &cipher_xform
615 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
616 rte_cryptodev_get_sec_ctx(dev_id);
618 /* Create security session */
619 return (void *)rte_security_session_create(ctx,
620 &sess_conf, sess_mp);
622 if (options->op_type == CPERF_DOCSIS) {
623 enum rte_security_docsis_direction direction;
625 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
626 cipher_xform.next = NULL;
627 cipher_xform.cipher.algo = options->cipher_algo;
628 cipher_xform.cipher.op = options->cipher_op;
629 cipher_xform.cipher.iv.offset = iv_offset;
630 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
631 cipher_xform.cipher.key.data =
632 test_vector->cipher_key.data;
633 cipher_xform.cipher.key.length =
634 test_vector->cipher_key.length;
635 cipher_xform.cipher.iv.length =
636 test_vector->cipher_iv.length;
638 cipher_xform.cipher.key.data = NULL;
639 cipher_xform.cipher.key.length = 0;
640 cipher_xform.cipher.iv.length = 0;
642 cipher_xform.next = NULL;
644 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
645 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
647 direction = RTE_SECURITY_DOCSIS_UPLINK;
649 struct rte_security_session_conf sess_conf = {
651 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
652 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
654 .direction = direction,
656 .crypto_xform = &cipher_xform
658 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
659 rte_cryptodev_get_sec_ctx(dev_id);
661 /* Create security session */
662 return (void *)rte_security_session_create(ctx,
663 &sess_conf, priv_mp);
666 sess = rte_cryptodev_sym_session_create(sess_mp);
670 if (options->op_type == CPERF_CIPHER_ONLY) {
671 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
672 cipher_xform.next = NULL;
673 cipher_xform.cipher.algo = options->cipher_algo;
674 cipher_xform.cipher.op = options->cipher_op;
675 cipher_xform.cipher.iv.offset = iv_offset;
677 /* cipher different than null */
678 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
679 cipher_xform.cipher.key.data =
680 test_vector->cipher_key.data;
681 cipher_xform.cipher.key.length =
682 test_vector->cipher_key.length;
683 cipher_xform.cipher.iv.length =
684 test_vector->cipher_iv.length;
686 cipher_xform.cipher.key.data = NULL;
687 cipher_xform.cipher.key.length = 0;
688 cipher_xform.cipher.iv.length = 0;
690 /* create crypto session */
691 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
696 } else if (options->op_type == CPERF_AUTH_ONLY) {
697 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
698 auth_xform.next = NULL;
699 auth_xform.auth.algo = options->auth_algo;
700 auth_xform.auth.op = options->auth_op;
701 auth_xform.auth.iv.offset = iv_offset;
703 /* auth different than null */
704 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
705 auth_xform.auth.digest_length =
707 auth_xform.auth.key.length =
708 test_vector->auth_key.length;
709 auth_xform.auth.key.data = test_vector->auth_key.data;
710 auth_xform.auth.iv.length =
711 test_vector->auth_iv.length;
713 auth_xform.auth.digest_length = 0;
714 auth_xform.auth.key.length = 0;
715 auth_xform.auth.key.data = NULL;
716 auth_xform.auth.iv.length = 0;
718 /* create crypto session */
719 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
724 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
725 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
729 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
730 cipher_xform.next = NULL;
731 cipher_xform.cipher.algo = options->cipher_algo;
732 cipher_xform.cipher.op = options->cipher_op;
733 cipher_xform.cipher.iv.offset = iv_offset;
735 /* cipher different than null */
736 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
737 cipher_xform.cipher.key.data =
738 test_vector->cipher_key.data;
739 cipher_xform.cipher.key.length =
740 test_vector->cipher_key.length;
741 cipher_xform.cipher.iv.length =
742 test_vector->cipher_iv.length;
744 cipher_xform.cipher.key.data = NULL;
745 cipher_xform.cipher.key.length = 0;
746 cipher_xform.cipher.iv.length = 0;
752 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
753 auth_xform.next = NULL;
754 auth_xform.auth.algo = options->auth_algo;
755 auth_xform.auth.op = options->auth_op;
756 auth_xform.auth.iv.offset = iv_offset +
757 cipher_xform.cipher.iv.length;
759 /* auth different than null */
760 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
761 auth_xform.auth.digest_length = options->digest_sz;
762 auth_xform.auth.iv.length = test_vector->auth_iv.length;
763 auth_xform.auth.key.length =
764 test_vector->auth_key.length;
765 auth_xform.auth.key.data =
766 test_vector->auth_key.data;
768 auth_xform.auth.digest_length = 0;
769 auth_xform.auth.key.length = 0;
770 auth_xform.auth.key.data = NULL;
771 auth_xform.auth.iv.length = 0;
774 /* cipher then auth */
775 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
776 cipher_xform.next = &auth_xform;
777 /* create crypto session */
778 rte_cryptodev_sym_session_init(dev_id,
779 sess, &cipher_xform, priv_mp);
780 } else { /* auth then cipher */
781 auth_xform.next = &cipher_xform;
782 /* create crypto session */
783 rte_cryptodev_sym_session_init(dev_id,
784 sess, &auth_xform, priv_mp);
786 } else { /* options->op_type == CPERF_AEAD */
787 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
788 aead_xform.next = NULL;
789 aead_xform.aead.algo = options->aead_algo;
790 aead_xform.aead.op = options->aead_op;
791 aead_xform.aead.iv.offset = iv_offset;
793 aead_xform.aead.key.data =
794 test_vector->aead_key.data;
795 aead_xform.aead.key.length =
796 test_vector->aead_key.length;
797 aead_xform.aead.iv.length = test_vector->aead_iv.length;
799 aead_xform.aead.digest_length = options->digest_sz;
800 aead_xform.aead.aad_length =
801 options->aead_aad_sz;
803 /* Create crypto session */
804 rte_cryptodev_sym_session_init(dev_id,
805 sess, &aead_xform, priv_mp);
812 cperf_get_op_functions(const struct cperf_options *options,
813 struct cperf_op_fns *op_fns)
815 memset(op_fns, 0, sizeof(struct cperf_op_fns));
817 op_fns->sess_create = cperf_create_session;
819 if (options->op_type == CPERF_AEAD) {
820 op_fns->populate_ops = cperf_set_ops_aead;
824 if (options->op_type == CPERF_AUTH_THEN_CIPHER
825 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
826 op_fns->populate_ops = cperf_set_ops_cipher_auth;
829 if (options->op_type == CPERF_AUTH_ONLY) {
830 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
831 op_fns->populate_ops = cperf_set_ops_null_auth;
833 op_fns->populate_ops = cperf_set_ops_auth;
836 if (options->op_type == CPERF_CIPHER_ONLY) {
837 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
838 op_fns->populate_ops = cperf_set_ops_null_cipher;
840 op_fns->populate_ops = cperf_set_ops_cipher;
843 #ifdef RTE_LIBRTE_SECURITY
844 if (options->op_type == CPERF_PDCP) {
845 op_fns->populate_ops = cperf_set_ops_security;
848 if (options->op_type == CPERF_DOCSIS) {
849 op_fns->populate_ops = cperf_set_ops_security;