1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
9 #include "cperf_test_vectors.h"
12 cperf_set_ops_asym(struct rte_crypto_op **ops,
13 uint32_t src_buf_offset __rte_unused,
14 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
15 struct rte_cryptodev_sym_session *sess,
16 const struct cperf_options *options __rte_unused,
17 const struct cperf_test_vector *test_vector __rte_unused,
18 uint16_t iv_offset __rte_unused,
19 uint32_t *imix_idx __rte_unused)
22 uint8_t result[sizeof(perf_mod_p)] = { 0 };
23 struct rte_cryptodev_asym_session *asym_sess = (void *)sess;
25 for (i = 0; i < nb_ops; i++) {
26 struct rte_crypto_asym_op *asym_op = ops[i]->asym;
28 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
29 asym_op->modex.base.data = perf_base;
30 asym_op->modex.base.length = sizeof(perf_base);
31 asym_op->modex.result.data = result;
32 asym_op->modex.result.length = sizeof(result);
33 rte_crypto_op_attach_asym_session(ops[i], asym_sess);
38 #ifdef RTE_LIB_SECURITY
40 cperf_set_ops_security(struct rte_crypto_op **ops,
41 uint32_t src_buf_offset __rte_unused,
42 uint32_t dst_buf_offset __rte_unused,
43 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
44 const struct cperf_options *options __rte_unused,
45 const struct cperf_test_vector *test_vector __rte_unused,
46 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
50 for (i = 0; i < nb_ops; i++) {
51 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
52 struct rte_security_session *sec_sess =
53 (struct rte_security_session *)sess;
56 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
57 uint32_t *, iv_offset);
58 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
60 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
61 rte_security_attach_session(ops[i], sec_sess);
62 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
65 if (options->op_type == CPERF_PDCP) {
66 sym_op->m_src->buf_len = options->segment_sz;
67 sym_op->m_src->data_len = options->test_buffer_size;
68 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
71 if (options->op_type == CPERF_DOCSIS) {
72 if (options->imix_distribution_count) {
73 buf_sz = options->imix_buffer_sizes[*imix_idx];
74 *imix_idx = (*imix_idx + 1) % options->pool_sz;
76 buf_sz = options->test_buffer_size;
78 sym_op->m_src->buf_len = options->segment_sz;
79 sym_op->m_src->data_len = buf_sz;
80 sym_op->m_src->pkt_len = buf_sz;
82 /* DOCSIS header is not CRC'ed */
83 sym_op->auth.data.offset = options->docsis_hdr_sz;
84 sym_op->auth.data.length = buf_sz -
85 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
87 * DOCSIS header and SRC and DST MAC addresses are not
90 sym_op->cipher.data.offset = sym_op->auth.data.offset +
91 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
92 sym_op->cipher.data.length = buf_sz -
93 sym_op->cipher.data.offset;
96 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
97 if (dst_buf_offset == 0)
100 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
109 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
110 uint32_t src_buf_offset, uint32_t dst_buf_offset,
111 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
112 const struct cperf_options *options,
113 const struct cperf_test_vector *test_vector __rte_unused,
114 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
118 for (i = 0; i < nb_ops; i++) {
119 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
121 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
122 rte_crypto_op_attach_sym_session(ops[i], sess);
124 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
127 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
128 if (dst_buf_offset == 0)
129 sym_op->m_dst = NULL;
131 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
134 /* cipher parameters */
135 if (options->imix_distribution_count) {
136 sym_op->cipher.data.length =
137 options->imix_buffer_sizes[*imix_idx];
138 *imix_idx = (*imix_idx + 1) % options->pool_sz;
140 sym_op->cipher.data.length = options->test_buffer_size;
141 sym_op->cipher.data.offset = 0;
148 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
149 uint32_t src_buf_offset, uint32_t dst_buf_offset,
150 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
151 const struct cperf_options *options,
152 const struct cperf_test_vector *test_vector __rte_unused,
153 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
157 for (i = 0; i < nb_ops; i++) {
158 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
160 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
161 rte_crypto_op_attach_sym_session(ops[i], sess);
163 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
166 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
167 if (dst_buf_offset == 0)
168 sym_op->m_dst = NULL;
170 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
173 /* auth parameters */
174 if (options->imix_distribution_count) {
175 sym_op->auth.data.length =
176 options->imix_buffer_sizes[*imix_idx];
177 *imix_idx = (*imix_idx + 1) % options->pool_sz;
179 sym_op->auth.data.length = options->test_buffer_size;
180 sym_op->auth.data.offset = 0;
187 cperf_set_ops_cipher(struct rte_crypto_op **ops,
188 uint32_t src_buf_offset, uint32_t dst_buf_offset,
189 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
190 const struct cperf_options *options,
191 const struct cperf_test_vector *test_vector,
192 uint16_t iv_offset, uint32_t *imix_idx)
196 for (i = 0; i < nb_ops; i++) {
197 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
199 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
200 rte_crypto_op_attach_sym_session(ops[i], sess);
202 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
205 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
206 if (dst_buf_offset == 0)
207 sym_op->m_dst = NULL;
209 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
212 /* cipher parameters */
213 if (options->imix_distribution_count) {
214 sym_op->cipher.data.length =
215 options->imix_buffer_sizes[*imix_idx];
216 *imix_idx = (*imix_idx + 1) % options->pool_sz;
218 sym_op->cipher.data.length = options->test_buffer_size;
220 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
221 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
222 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
223 sym_op->cipher.data.length <<= 3;
225 sym_op->cipher.data.offset = 0;
228 if (options->test == CPERF_TEST_TYPE_VERIFY) {
229 for (i = 0; i < nb_ops; i++) {
230 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
231 uint8_t *, iv_offset);
233 memcpy(iv_ptr, test_vector->cipher_iv.data,
234 test_vector->cipher_iv.length);
243 cperf_set_ops_auth(struct rte_crypto_op **ops,
244 uint32_t src_buf_offset, uint32_t dst_buf_offset,
245 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
246 const struct cperf_options *options,
247 const struct cperf_test_vector *test_vector,
248 uint16_t iv_offset, uint32_t *imix_idx)
252 for (i = 0; i < nb_ops; i++) {
253 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
255 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
256 rte_crypto_op_attach_sym_session(ops[i], sess);
258 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
261 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
262 if (dst_buf_offset == 0)
263 sym_op->m_dst = NULL;
265 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
268 if (test_vector->auth_iv.length) {
269 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
272 memcpy(iv_ptr, test_vector->auth_iv.data,
273 test_vector->auth_iv.length);
276 /* authentication parameters */
277 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
278 sym_op->auth.digest.data = test_vector->digest.data;
279 sym_op->auth.digest.phys_addr =
280 test_vector->digest.phys_addr;
283 uint32_t offset = options->test_buffer_size;
284 struct rte_mbuf *buf, *tbuf;
286 if (options->out_of_place) {
289 tbuf = sym_op->m_src;
290 while ((tbuf->next != NULL) &&
291 (offset >= tbuf->data_len)) {
292 offset -= tbuf->data_len;
296 * If there is not enough room in segment,
297 * place the digest in the next segment
299 if ((tbuf->data_len - offset) < options->digest_sz) {
306 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
308 sym_op->auth.digest.phys_addr =
309 rte_pktmbuf_iova_offset(buf, offset);
313 if (options->imix_distribution_count) {
314 sym_op->auth.data.length =
315 options->imix_buffer_sizes[*imix_idx];
316 *imix_idx = (*imix_idx + 1) % options->pool_sz;
318 sym_op->auth.data.length = options->test_buffer_size;
320 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
321 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
322 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
323 sym_op->auth.data.length <<= 3;
325 sym_op->auth.data.offset = 0;
328 if (options->test == CPERF_TEST_TYPE_VERIFY) {
329 if (test_vector->auth_iv.length) {
330 for (i = 0; i < nb_ops; i++) {
331 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
332 uint8_t *, iv_offset);
334 memcpy(iv_ptr, test_vector->auth_iv.data,
335 test_vector->auth_iv.length);
343 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
344 uint32_t src_buf_offset, uint32_t dst_buf_offset,
345 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
346 const struct cperf_options *options,
347 const struct cperf_test_vector *test_vector,
348 uint16_t iv_offset, uint32_t *imix_idx)
352 for (i = 0; i < nb_ops; i++) {
353 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
355 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
356 rte_crypto_op_attach_sym_session(ops[i], sess);
358 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
361 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
362 if (dst_buf_offset == 0)
363 sym_op->m_dst = NULL;
365 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
368 /* cipher parameters */
369 if (options->imix_distribution_count) {
370 sym_op->cipher.data.length =
371 options->imix_buffer_sizes[*imix_idx];
372 *imix_idx = (*imix_idx + 1) % options->pool_sz;
374 sym_op->cipher.data.length = options->test_buffer_size;
376 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
377 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
378 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
379 sym_op->cipher.data.length <<= 3;
381 sym_op->cipher.data.offset = 0;
383 /* authentication parameters */
384 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
385 sym_op->auth.digest.data = test_vector->digest.data;
386 sym_op->auth.digest.phys_addr =
387 test_vector->digest.phys_addr;
390 uint32_t offset = options->test_buffer_size;
391 struct rte_mbuf *buf, *tbuf;
393 if (options->out_of_place) {
396 tbuf = sym_op->m_src;
397 while ((tbuf->next != NULL) &&
398 (offset >= tbuf->data_len)) {
399 offset -= tbuf->data_len;
403 * If there is not enough room in segment,
404 * place the digest in the next segment
406 if ((tbuf->data_len - offset) < options->digest_sz) {
413 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
415 sym_op->auth.digest.phys_addr =
416 rte_pktmbuf_iova_offset(buf, offset);
419 if (options->imix_distribution_count) {
420 sym_op->auth.data.length =
421 options->imix_buffer_sizes[*imix_idx];
422 *imix_idx = (*imix_idx + 1) % options->pool_sz;
424 sym_op->auth.data.length = options->test_buffer_size;
426 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
427 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
428 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
429 sym_op->auth.data.length <<= 3;
431 sym_op->auth.data.offset = 0;
434 if (options->test == CPERF_TEST_TYPE_VERIFY) {
435 for (i = 0; i < nb_ops; i++) {
436 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
437 uint8_t *, iv_offset);
439 memcpy(iv_ptr, test_vector->cipher_iv.data,
440 test_vector->cipher_iv.length);
441 if (test_vector->auth_iv.length) {
443 * Copy IV after the crypto operation and
446 iv_ptr += test_vector->cipher_iv.length;
447 memcpy(iv_ptr, test_vector->auth_iv.data,
448 test_vector->auth_iv.length);
458 cperf_set_ops_aead(struct rte_crypto_op **ops,
459 uint32_t src_buf_offset, uint32_t dst_buf_offset,
460 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
461 const struct cperf_options *options,
462 const struct cperf_test_vector *test_vector,
463 uint16_t iv_offset, uint32_t *imix_idx)
466 /* AAD is placed after the IV */
467 uint16_t aad_offset = iv_offset +
468 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
470 for (i = 0; i < nb_ops; i++) {
471 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
473 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
474 rte_crypto_op_attach_sym_session(ops[i], sess);
476 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
479 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
480 if (dst_buf_offset == 0)
481 sym_op->m_dst = NULL;
483 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
486 /* AEAD parameters */
487 if (options->imix_distribution_count) {
488 sym_op->aead.data.length =
489 options->imix_buffer_sizes[*imix_idx];
490 *imix_idx = (*imix_idx + 1) % options->pool_sz;
492 sym_op->aead.data.length = options->test_buffer_size;
493 sym_op->aead.data.offset = 0;
495 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
496 uint8_t *, aad_offset);
497 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
500 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
501 sym_op->aead.digest.data = test_vector->digest.data;
502 sym_op->aead.digest.phys_addr =
503 test_vector->digest.phys_addr;
506 uint32_t offset = sym_op->aead.data.length +
507 sym_op->aead.data.offset;
508 struct rte_mbuf *buf, *tbuf;
510 if (options->out_of_place) {
513 tbuf = sym_op->m_src;
514 while ((tbuf->next != NULL) &&
515 (offset >= tbuf->data_len)) {
516 offset -= tbuf->data_len;
520 * If there is not enough room in segment,
521 * place the digest in the next segment
523 if ((tbuf->data_len - offset) < options->digest_sz) {
530 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
532 sym_op->aead.digest.phys_addr =
533 rte_pktmbuf_iova_offset(buf, offset);
537 if (options->test == CPERF_TEST_TYPE_VERIFY) {
538 for (i = 0; i < nb_ops; i++) {
539 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
540 uint8_t *, iv_offset);
543 * If doing AES-CCM, nonce is copied one byte
544 * after the start of IV field, and AAD is copied
545 * 18 bytes after the start of the AAD field.
547 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
548 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
549 test_vector->aead_iv.length);
551 memcpy(ops[i]->sym->aead.aad.data + 18,
552 test_vector->aad.data,
553 test_vector->aad.length);
555 memcpy(iv_ptr, test_vector->aead_iv.data,
556 test_vector->aead_iv.length);
558 memcpy(ops[i]->sym->aead.aad.data,
559 test_vector->aad.data,
560 test_vector->aad.length);
568 static struct rte_cryptodev_sym_session *
569 cperf_create_session(struct rte_mempool *sess_mp,
570 struct rte_mempool *priv_mp,
572 const struct cperf_options *options,
573 const struct cperf_test_vector *test_vector,
576 struct rte_crypto_sym_xform cipher_xform;
577 struct rte_crypto_sym_xform auth_xform;
578 struct rte_crypto_sym_xform aead_xform;
579 struct rte_cryptodev_sym_session *sess = NULL;
580 struct rte_crypto_asym_xform xform = {0};
583 if (options->op_type == CPERF_ASYM_MODEX) {
585 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
586 xform.modex.modulus.data = perf_mod_p;
587 xform.modex.modulus.length = sizeof(perf_mod_p);
588 xform.modex.exponent.data = perf_mod_e;
589 xform.modex.exponent.length = sizeof(perf_mod_e);
591 sess = (void *)rte_cryptodev_asym_session_create(sess_mp);
594 rc = rte_cryptodev_asym_session_init(dev_id, (void *)sess,
598 rte_cryptodev_asym_session_clear(dev_id,
600 rte_cryptodev_asym_session_free((void *)sess);
606 #ifdef RTE_LIB_SECURITY
610 if (options->op_type == CPERF_PDCP) {
611 /* Setup Cipher Parameters */
612 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
613 cipher_xform.next = NULL;
614 cipher_xform.cipher.algo = options->cipher_algo;
615 cipher_xform.cipher.op = options->cipher_op;
616 cipher_xform.cipher.iv.offset = iv_offset;
617 cipher_xform.cipher.iv.length = 4;
619 /* cipher different than null */
620 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
621 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
622 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
624 cipher_xform.cipher.key.data = NULL;
625 cipher_xform.cipher.key.length = 0;
628 /* Setup Auth Parameters */
629 if (options->auth_algo != 0) {
630 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
631 auth_xform.next = NULL;
632 auth_xform.auth.algo = options->auth_algo;
633 auth_xform.auth.op = options->auth_op;
634 auth_xform.auth.iv.offset = iv_offset +
635 cipher_xform.cipher.iv.length;
637 /* auth different than null */
638 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
639 auth_xform.auth.digest_length = options->digest_sz;
640 auth_xform.auth.key.length = test_vector->auth_key.length;
641 auth_xform.auth.key.data = test_vector->auth_key.data;
642 auth_xform.auth.iv.length = test_vector->auth_iv.length;
644 auth_xform.auth.digest_length = 0;
645 auth_xform.auth.key.length = 0;
646 auth_xform.auth.key.data = NULL;
647 auth_xform.auth.iv.length = 0;
650 cipher_xform.next = &auth_xform;
652 cipher_xform.next = NULL;
655 struct rte_security_session_conf sess_conf = {
656 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
657 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
660 .domain = options->pdcp_domain,
662 .sn_size = options->pdcp_sn_sz,
663 .hfn = options->pdcp_ses_hfn_en ?
664 PDCP_DEFAULT_HFN : 0,
665 .hfn_threshold = 0x70C0A,
666 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
668 .crypto_xform = &cipher_xform
671 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
672 rte_cryptodev_get_sec_ctx(dev_id);
674 /* Create security session */
675 return (void *)rte_security_session_create(ctx,
676 &sess_conf, sess_mp, priv_mp);
678 if (options->op_type == CPERF_DOCSIS) {
679 enum rte_security_docsis_direction direction;
681 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
682 cipher_xform.next = NULL;
683 cipher_xform.cipher.algo = options->cipher_algo;
684 cipher_xform.cipher.op = options->cipher_op;
685 cipher_xform.cipher.iv.offset = iv_offset;
686 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
687 cipher_xform.cipher.key.data =
688 test_vector->cipher_key.data;
689 cipher_xform.cipher.key.length =
690 test_vector->cipher_key.length;
691 cipher_xform.cipher.iv.length =
692 test_vector->cipher_iv.length;
694 cipher_xform.cipher.key.data = NULL;
695 cipher_xform.cipher.key.length = 0;
696 cipher_xform.cipher.iv.length = 0;
698 cipher_xform.next = NULL;
700 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
701 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
703 direction = RTE_SECURITY_DOCSIS_UPLINK;
705 struct rte_security_session_conf sess_conf = {
707 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
708 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
710 .direction = direction,
712 .crypto_xform = &cipher_xform
714 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
715 rte_cryptodev_get_sec_ctx(dev_id);
717 /* Create security session */
718 return (void *)rte_security_session_create(ctx,
719 &sess_conf, sess_mp, priv_mp);
722 sess = rte_cryptodev_sym_session_create(sess_mp);
726 if (options->op_type == CPERF_CIPHER_ONLY) {
727 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
728 cipher_xform.next = NULL;
729 cipher_xform.cipher.algo = options->cipher_algo;
730 cipher_xform.cipher.op = options->cipher_op;
731 cipher_xform.cipher.iv.offset = iv_offset;
733 /* cipher different than null */
734 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
735 cipher_xform.cipher.key.data =
736 test_vector->cipher_key.data;
737 cipher_xform.cipher.key.length =
738 test_vector->cipher_key.length;
739 cipher_xform.cipher.iv.length =
740 test_vector->cipher_iv.length;
742 cipher_xform.cipher.key.data = NULL;
743 cipher_xform.cipher.key.length = 0;
744 cipher_xform.cipher.iv.length = 0;
746 /* create crypto session */
747 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
752 } else if (options->op_type == CPERF_AUTH_ONLY) {
753 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
754 auth_xform.next = NULL;
755 auth_xform.auth.algo = options->auth_algo;
756 auth_xform.auth.op = options->auth_op;
757 auth_xform.auth.iv.offset = iv_offset;
759 /* auth different than null */
760 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
761 auth_xform.auth.digest_length =
763 auth_xform.auth.key.length =
764 test_vector->auth_key.length;
765 auth_xform.auth.key.data = test_vector->auth_key.data;
766 auth_xform.auth.iv.length =
767 test_vector->auth_iv.length;
769 auth_xform.auth.digest_length = 0;
770 auth_xform.auth.key.length = 0;
771 auth_xform.auth.key.data = NULL;
772 auth_xform.auth.iv.length = 0;
774 /* create crypto session */
775 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
780 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
781 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
785 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
786 cipher_xform.next = NULL;
787 cipher_xform.cipher.algo = options->cipher_algo;
788 cipher_xform.cipher.op = options->cipher_op;
789 cipher_xform.cipher.iv.offset = iv_offset;
791 /* cipher different than null */
792 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
793 cipher_xform.cipher.key.data =
794 test_vector->cipher_key.data;
795 cipher_xform.cipher.key.length =
796 test_vector->cipher_key.length;
797 cipher_xform.cipher.iv.length =
798 test_vector->cipher_iv.length;
800 cipher_xform.cipher.key.data = NULL;
801 cipher_xform.cipher.key.length = 0;
802 cipher_xform.cipher.iv.length = 0;
808 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
809 auth_xform.next = NULL;
810 auth_xform.auth.algo = options->auth_algo;
811 auth_xform.auth.op = options->auth_op;
812 auth_xform.auth.iv.offset = iv_offset +
813 cipher_xform.cipher.iv.length;
815 /* auth different than null */
816 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
817 auth_xform.auth.digest_length = options->digest_sz;
818 auth_xform.auth.iv.length = test_vector->auth_iv.length;
819 auth_xform.auth.key.length =
820 test_vector->auth_key.length;
821 auth_xform.auth.key.data =
822 test_vector->auth_key.data;
824 auth_xform.auth.digest_length = 0;
825 auth_xform.auth.key.length = 0;
826 auth_xform.auth.key.data = NULL;
827 auth_xform.auth.iv.length = 0;
830 /* cipher then auth */
831 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
832 cipher_xform.next = &auth_xform;
833 /* create crypto session */
834 rte_cryptodev_sym_session_init(dev_id,
835 sess, &cipher_xform, priv_mp);
836 } else { /* auth then cipher */
837 auth_xform.next = &cipher_xform;
838 /* create crypto session */
839 rte_cryptodev_sym_session_init(dev_id,
840 sess, &auth_xform, priv_mp);
842 } else { /* options->op_type == CPERF_AEAD */
843 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
844 aead_xform.next = NULL;
845 aead_xform.aead.algo = options->aead_algo;
846 aead_xform.aead.op = options->aead_op;
847 aead_xform.aead.iv.offset = iv_offset;
849 aead_xform.aead.key.data =
850 test_vector->aead_key.data;
851 aead_xform.aead.key.length =
852 test_vector->aead_key.length;
853 aead_xform.aead.iv.length = test_vector->aead_iv.length;
855 aead_xform.aead.digest_length = options->digest_sz;
856 aead_xform.aead.aad_length =
857 options->aead_aad_sz;
859 /* Create crypto session */
860 rte_cryptodev_sym_session_init(dev_id,
861 sess, &aead_xform, priv_mp);
868 cperf_get_op_functions(const struct cperf_options *options,
869 struct cperf_op_fns *op_fns)
871 memset(op_fns, 0, sizeof(struct cperf_op_fns));
873 op_fns->sess_create = cperf_create_session;
875 if (options->op_type == CPERF_ASYM_MODEX) {
876 op_fns->populate_ops = cperf_set_ops_asym;
880 if (options->op_type == CPERF_AEAD) {
881 op_fns->populate_ops = cperf_set_ops_aead;
885 if (options->op_type == CPERF_AUTH_THEN_CIPHER
886 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
887 op_fns->populate_ops = cperf_set_ops_cipher_auth;
890 if (options->op_type == CPERF_AUTH_ONLY) {
891 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
892 op_fns->populate_ops = cperf_set_ops_null_auth;
894 op_fns->populate_ops = cperf_set_ops_auth;
897 if (options->op_type == CPERF_CIPHER_ONLY) {
898 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
899 op_fns->populate_ops = cperf_set_ops_null_cipher;
901 op_fns->populate_ops = cperf_set_ops_cipher;
904 #ifdef RTE_LIB_SECURITY
905 if (options->op_type == CPERF_PDCP) {
906 op_fns->populate_ops = cperf_set_ops_security;
909 if (options->op_type == CPERF_DOCSIS) {
910 op_fns->populate_ops = cperf_set_ops_security;