1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
10 #include "cperf_test_vectors.h"
13 cperf_set_ops_asym(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused,
20 uint32_t *imix_idx __rte_unused,
21 uint64_t *tsc_start __rte_unused)
24 struct rte_cryptodev_asym_session *asym_sess = (void *)sess;
26 for (i = 0; i < nb_ops; i++) {
27 struct rte_crypto_asym_op *asym_op = ops[i]->asym;
29 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
30 asym_op->modex.base.data = perf_base;
31 asym_op->modex.base.length = sizeof(perf_base);
32 asym_op->modex.result.data = perf_mod_result;
33 asym_op->modex.result.length = sizeof(perf_mod_result);
34 rte_crypto_op_attach_asym_session(ops[i], asym_sess);
39 #ifdef RTE_LIB_SECURITY
41 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
42 const struct cperf_test_vector *test_vector)
44 struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
46 if ((options->aead_op == RTE_CRYPTO_AEAD_OP_ENCRYPT) ||
47 (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)) {
48 memcpy(ip, test_vector->plaintext.data, m->data_len);
50 ip->total_length = rte_cpu_to_be_16(m->data_len);
55 cperf_set_ops_security(struct rte_crypto_op **ops,
56 uint32_t src_buf_offset __rte_unused,
57 uint32_t dst_buf_offset __rte_unused,
58 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
59 const struct cperf_options *options,
60 const struct cperf_test_vector *test_vector,
61 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
64 uint64_t tsc_start_temp, tsc_end_temp;
67 for (i = 0; i < nb_ops; i++) {
68 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
69 struct rte_security_session *sec_sess =
70 (struct rte_security_session *)sess;
73 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
74 uint32_t *, iv_offset);
75 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
77 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
78 rte_security_attach_session(ops[i], sec_sess);
79 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
82 if (options->op_type == CPERF_PDCP ||
83 options->op_type == CPERF_IPSEC) {
84 /* In case of IPsec, headroom is consumed by PMD,
87 sym_op->m_src->data_off = options->headroom_sz;
89 sym_op->m_src->buf_len = options->segment_sz;
90 sym_op->m_src->data_len = options->test_buffer_size;
91 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
93 if ((options->op_type == CPERF_IPSEC) &&
94 (options->test_file == NULL) &&
95 (options->test == CPERF_TEST_TYPE_THROUGHPUT)) {
96 tsc_start_temp = rte_rdtsc_precise();
97 test_ipsec_vec_populate(sym_op->m_src, options,
99 tsc_end_temp = rte_rdtsc_precise();
101 *tsc_start += (tsc_end_temp - tsc_start_temp);
105 if (options->op_type == CPERF_DOCSIS) {
106 if (options->imix_distribution_count) {
107 buf_sz = options->imix_buffer_sizes[*imix_idx];
108 *imix_idx = (*imix_idx + 1) % options->pool_sz;
110 buf_sz = options->test_buffer_size;
112 sym_op->m_src->buf_len = options->segment_sz;
113 sym_op->m_src->data_len = buf_sz;
114 sym_op->m_src->pkt_len = buf_sz;
116 /* DOCSIS header is not CRC'ed */
117 sym_op->auth.data.offset = options->docsis_hdr_sz;
118 sym_op->auth.data.length = buf_sz -
119 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
121 * DOCSIS header and SRC and DST MAC addresses are not
124 sym_op->cipher.data.offset = sym_op->auth.data.offset +
125 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
126 sym_op->cipher.data.length = buf_sz -
127 sym_op->cipher.data.offset;
130 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
131 if (dst_buf_offset == 0)
132 sym_op->m_dst = NULL;
134 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
143 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
144 uint32_t src_buf_offset, uint32_t dst_buf_offset,
145 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
146 const struct cperf_options *options,
147 const struct cperf_test_vector *test_vector __rte_unused,
148 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
149 uint64_t *tsc_start __rte_unused)
153 for (i = 0; i < nb_ops; i++) {
154 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
156 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
157 rte_crypto_op_attach_sym_session(ops[i], sess);
159 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
162 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
163 if (dst_buf_offset == 0)
164 sym_op->m_dst = NULL;
166 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
169 /* cipher parameters */
170 if (options->imix_distribution_count) {
171 sym_op->cipher.data.length =
172 options->imix_buffer_sizes[*imix_idx];
173 *imix_idx = (*imix_idx + 1) % options->pool_sz;
175 sym_op->cipher.data.length = options->test_buffer_size;
176 sym_op->cipher.data.offset = 0;
183 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
184 uint32_t src_buf_offset, uint32_t dst_buf_offset,
185 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
186 const struct cperf_options *options,
187 const struct cperf_test_vector *test_vector __rte_unused,
188 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
189 uint64_t *tsc_start __rte_unused)
193 for (i = 0; i < nb_ops; i++) {
194 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
196 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
197 rte_crypto_op_attach_sym_session(ops[i], sess);
199 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
202 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
203 if (dst_buf_offset == 0)
204 sym_op->m_dst = NULL;
206 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
209 /* auth parameters */
210 if (options->imix_distribution_count) {
211 sym_op->auth.data.length =
212 options->imix_buffer_sizes[*imix_idx];
213 *imix_idx = (*imix_idx + 1) % options->pool_sz;
215 sym_op->auth.data.length = options->test_buffer_size;
216 sym_op->auth.data.offset = 0;
223 cperf_set_ops_cipher(struct rte_crypto_op **ops,
224 uint32_t src_buf_offset, uint32_t dst_buf_offset,
225 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
226 const struct cperf_options *options,
227 const struct cperf_test_vector *test_vector,
228 uint16_t iv_offset, uint32_t *imix_idx,
229 uint64_t *tsc_start __rte_unused)
233 for (i = 0; i < nb_ops; i++) {
234 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
236 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
237 rte_crypto_op_attach_sym_session(ops[i], sess);
239 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
242 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
243 if (dst_buf_offset == 0)
244 sym_op->m_dst = NULL;
246 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
249 /* cipher parameters */
250 if (options->imix_distribution_count) {
251 sym_op->cipher.data.length =
252 options->imix_buffer_sizes[*imix_idx];
253 *imix_idx = (*imix_idx + 1) % options->pool_sz;
255 sym_op->cipher.data.length = options->test_buffer_size;
257 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
258 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
259 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
260 sym_op->cipher.data.length <<= 3;
262 sym_op->cipher.data.offset = 0;
265 if (options->test == CPERF_TEST_TYPE_VERIFY) {
266 for (i = 0; i < nb_ops; i++) {
267 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
268 uint8_t *, iv_offset);
270 memcpy(iv_ptr, test_vector->cipher_iv.data,
271 test_vector->cipher_iv.length);
280 cperf_set_ops_auth(struct rte_crypto_op **ops,
281 uint32_t src_buf_offset, uint32_t dst_buf_offset,
282 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
283 const struct cperf_options *options,
284 const struct cperf_test_vector *test_vector,
285 uint16_t iv_offset, uint32_t *imix_idx,
286 uint64_t *tsc_start __rte_unused)
290 for (i = 0; i < nb_ops; i++) {
291 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
293 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
294 rte_crypto_op_attach_sym_session(ops[i], sess);
296 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
299 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
300 if (dst_buf_offset == 0)
301 sym_op->m_dst = NULL;
303 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
306 if (test_vector->auth_iv.length) {
307 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
310 memcpy(iv_ptr, test_vector->auth_iv.data,
311 test_vector->auth_iv.length);
314 /* authentication parameters */
315 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
316 sym_op->auth.digest.data = test_vector->digest.data;
317 sym_op->auth.digest.phys_addr =
318 test_vector->digest.phys_addr;
321 uint32_t offset = options->test_buffer_size;
322 struct rte_mbuf *buf, *tbuf;
324 if (options->out_of_place) {
327 tbuf = sym_op->m_src;
328 while ((tbuf->next != NULL) &&
329 (offset >= tbuf->data_len)) {
330 offset -= tbuf->data_len;
334 * If there is not enough room in segment,
335 * place the digest in the next segment
337 if ((tbuf->data_len - offset) < options->digest_sz) {
344 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
346 sym_op->auth.digest.phys_addr =
347 rte_pktmbuf_iova_offset(buf, offset);
351 if (options->imix_distribution_count) {
352 sym_op->auth.data.length =
353 options->imix_buffer_sizes[*imix_idx];
354 *imix_idx = (*imix_idx + 1) % options->pool_sz;
356 sym_op->auth.data.length = options->test_buffer_size;
358 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
359 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
360 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
361 sym_op->auth.data.length <<= 3;
363 sym_op->auth.data.offset = 0;
366 if (options->test == CPERF_TEST_TYPE_VERIFY) {
367 if (test_vector->auth_iv.length) {
368 for (i = 0; i < nb_ops; i++) {
369 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
370 uint8_t *, iv_offset);
372 memcpy(iv_ptr, test_vector->auth_iv.data,
373 test_vector->auth_iv.length);
381 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
382 uint32_t src_buf_offset, uint32_t dst_buf_offset,
383 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
384 const struct cperf_options *options,
385 const struct cperf_test_vector *test_vector,
386 uint16_t iv_offset, uint32_t *imix_idx,
387 uint64_t *tsc_start __rte_unused)
391 for (i = 0; i < nb_ops; i++) {
392 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
394 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
395 rte_crypto_op_attach_sym_session(ops[i], sess);
397 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
400 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
401 if (dst_buf_offset == 0)
402 sym_op->m_dst = NULL;
404 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
407 /* cipher parameters */
408 if (options->imix_distribution_count) {
409 sym_op->cipher.data.length =
410 options->imix_buffer_sizes[*imix_idx];
411 *imix_idx = (*imix_idx + 1) % options->pool_sz;
413 sym_op->cipher.data.length = options->test_buffer_size;
415 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
416 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
417 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
418 sym_op->cipher.data.length <<= 3;
420 sym_op->cipher.data.offset = 0;
422 /* authentication parameters */
423 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
424 sym_op->auth.digest.data = test_vector->digest.data;
425 sym_op->auth.digest.phys_addr =
426 test_vector->digest.phys_addr;
429 uint32_t offset = options->test_buffer_size;
430 struct rte_mbuf *buf, *tbuf;
432 if (options->out_of_place) {
435 tbuf = sym_op->m_src;
436 while ((tbuf->next != NULL) &&
437 (offset >= tbuf->data_len)) {
438 offset -= tbuf->data_len;
442 * If there is not enough room in segment,
443 * place the digest in the next segment
445 if ((tbuf->data_len - offset) < options->digest_sz) {
452 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
454 sym_op->auth.digest.phys_addr =
455 rte_pktmbuf_iova_offset(buf, offset);
458 if (options->imix_distribution_count) {
459 sym_op->auth.data.length =
460 options->imix_buffer_sizes[*imix_idx];
461 *imix_idx = (*imix_idx + 1) % options->pool_sz;
463 sym_op->auth.data.length = options->test_buffer_size;
465 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
466 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
467 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
468 sym_op->auth.data.length <<= 3;
470 sym_op->auth.data.offset = 0;
473 if (options->test == CPERF_TEST_TYPE_VERIFY) {
474 for (i = 0; i < nb_ops; i++) {
475 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
476 uint8_t *, iv_offset);
478 memcpy(iv_ptr, test_vector->cipher_iv.data,
479 test_vector->cipher_iv.length);
480 if (test_vector->auth_iv.length) {
482 * Copy IV after the crypto operation and
485 iv_ptr += test_vector->cipher_iv.length;
486 memcpy(iv_ptr, test_vector->auth_iv.data,
487 test_vector->auth_iv.length);
497 cperf_set_ops_aead(struct rte_crypto_op **ops,
498 uint32_t src_buf_offset, uint32_t dst_buf_offset,
499 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
500 const struct cperf_options *options,
501 const struct cperf_test_vector *test_vector,
502 uint16_t iv_offset, uint32_t *imix_idx,
503 uint64_t *tsc_start __rte_unused)
506 /* AAD is placed after the IV */
507 uint16_t aad_offset = iv_offset +
508 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
510 for (i = 0; i < nb_ops; i++) {
511 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
513 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
514 rte_crypto_op_attach_sym_session(ops[i], sess);
516 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
519 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
520 if (dst_buf_offset == 0)
521 sym_op->m_dst = NULL;
523 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
526 /* AEAD parameters */
527 if (options->imix_distribution_count) {
528 sym_op->aead.data.length =
529 options->imix_buffer_sizes[*imix_idx];
530 *imix_idx = (*imix_idx + 1) % options->pool_sz;
532 sym_op->aead.data.length = options->test_buffer_size;
533 sym_op->aead.data.offset = 0;
535 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
536 uint8_t *, aad_offset);
537 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
540 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
541 sym_op->aead.digest.data = test_vector->digest.data;
542 sym_op->aead.digest.phys_addr =
543 test_vector->digest.phys_addr;
546 uint32_t offset = sym_op->aead.data.length +
547 sym_op->aead.data.offset;
548 struct rte_mbuf *buf, *tbuf;
550 if (options->out_of_place) {
553 tbuf = sym_op->m_src;
554 while ((tbuf->next != NULL) &&
555 (offset >= tbuf->data_len)) {
556 offset -= tbuf->data_len;
560 * If there is not enough room in segment,
561 * place the digest in the next segment
563 if ((tbuf->data_len - offset) < options->digest_sz) {
570 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
572 sym_op->aead.digest.phys_addr =
573 rte_pktmbuf_iova_offset(buf, offset);
577 if (options->test == CPERF_TEST_TYPE_VERIFY) {
578 for (i = 0; i < nb_ops; i++) {
579 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
580 uint8_t *, iv_offset);
583 * If doing AES-CCM, nonce is copied one byte
584 * after the start of IV field, and AAD is copied
585 * 18 bytes after the start of the AAD field.
587 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
588 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
589 test_vector->aead_iv.length);
591 memcpy(ops[i]->sym->aead.aad.data + 18,
592 test_vector->aad.data,
593 test_vector->aad.length);
595 memcpy(iv_ptr, test_vector->aead_iv.data,
596 test_vector->aead_iv.length);
598 memcpy(ops[i]->sym->aead.aad.data,
599 test_vector->aad.data,
600 test_vector->aad.length);
608 static struct rte_cryptodev_sym_session *
609 create_ipsec_session(struct rte_mempool *sess_mp,
610 struct rte_mempool *priv_mp,
612 const struct cperf_options *options,
613 const struct cperf_test_vector *test_vector,
616 struct rte_crypto_sym_xform xform = {0};
617 struct rte_crypto_sym_xform auth_xform = {0};
619 if (options->aead_algo != 0) {
620 /* Setup AEAD Parameters */
621 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
623 xform.aead.algo = options->aead_algo;
624 xform.aead.op = options->aead_op;
625 xform.aead.iv.offset = iv_offset;
626 xform.aead.key.data = test_vector->aead_key.data;
627 xform.aead.key.length = test_vector->aead_key.length;
628 xform.aead.iv.length = test_vector->aead_iv.length;
629 xform.aead.digest_length = options->digest_sz;
630 xform.aead.aad_length = options->aead_aad_sz;
631 } else if (options->cipher_algo != 0 && options->auth_algo != 0) {
632 /* Setup Cipher Parameters */
633 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
635 xform.cipher.algo = options->cipher_algo;
636 xform.cipher.op = options->cipher_op;
637 xform.cipher.iv.offset = iv_offset;
638 xform.cipher.iv.length = test_vector->cipher_iv.length;
639 /* cipher different than null */
640 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
641 xform.cipher.key.data = test_vector->cipher_key.data;
642 xform.cipher.key.length =
643 test_vector->cipher_key.length;
645 xform.cipher.key.data = NULL;
646 xform.cipher.key.length = 0;
649 /* Setup Auth Parameters */
650 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
651 auth_xform.next = NULL;
652 auth_xform.auth.algo = options->auth_algo;
653 auth_xform.auth.op = options->auth_op;
654 auth_xform.auth.iv.offset = iv_offset +
655 xform.cipher.iv.length;
656 /* auth different than null */
657 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
658 auth_xform.auth.digest_length = options->digest_sz;
659 auth_xform.auth.key.length =
660 test_vector->auth_key.length;
661 auth_xform.auth.key.data = test_vector->auth_key.data;
662 auth_xform.auth.iv.length = test_vector->auth_iv.length;
664 auth_xform.auth.digest_length = 0;
665 auth_xform.auth.key.length = 0;
666 auth_xform.auth.key.data = NULL;
667 auth_xform.auth.iv.length = 0;
670 xform.next = &auth_xform;
675 #define CPERF_IPSEC_SRC_IP 0x01010101
676 #define CPERF_IPSEC_DST_IP 0x02020202
677 #define CPERF_IPSEC_SALT 0x0
678 #define CPERF_IPSEC_DEFTTL 64
679 struct rte_security_ipsec_tunnel_param tunnel = {
680 .type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
682 .src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
683 .dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
686 .ttl = CPERF_IPSEC_DEFTTL,
689 struct rte_security_session_conf sess_conf = {
690 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
691 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
693 .spi = rte_lcore_id(),
694 /**< For testing sake, lcore_id is taken as SPI so that
695 * for every core a different session is created.
697 .salt = CPERF_IPSEC_SALT,
701 ((options->cipher_op ==
702 RTE_CRYPTO_CIPHER_OP_ENCRYPT) &&
704 RTE_CRYPTO_AUTH_OP_GENERATE)) ||
706 RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
707 RTE_SECURITY_IPSEC_SA_DIR_EGRESS :
708 RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
709 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
710 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
714 .crypto_xform = &xform
717 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
718 rte_cryptodev_get_sec_ctx(dev_id);
720 /* Create security session */
721 return (void *)rte_security_session_create(ctx,
722 &sess_conf, sess_mp, priv_mp);
725 static struct rte_cryptodev_sym_session *
726 cperf_create_session(struct rte_mempool *sess_mp,
727 struct rte_mempool *priv_mp,
729 const struct cperf_options *options,
730 const struct cperf_test_vector *test_vector,
733 struct rte_crypto_sym_xform cipher_xform;
734 struct rte_crypto_sym_xform auth_xform;
735 struct rte_crypto_sym_xform aead_xform;
736 struct rte_cryptodev_sym_session *sess = NULL;
737 struct rte_crypto_asym_xform xform = {0};
740 if (options->op_type == CPERF_ASYM_MODEX) {
742 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
743 xform.modex.modulus.data = perf_mod_p;
744 xform.modex.modulus.length = sizeof(perf_mod_p);
745 xform.modex.exponent.data = perf_mod_e;
746 xform.modex.exponent.length = sizeof(perf_mod_e);
748 sess = (void *)rte_cryptodev_asym_session_create(sess_mp);
751 rc = rte_cryptodev_asym_session_init(dev_id, (void *)sess,
755 rte_cryptodev_asym_session_clear(dev_id,
757 rte_cryptodev_asym_session_free((void *)sess);
763 #ifdef RTE_LIB_SECURITY
767 if (options->op_type == CPERF_PDCP) {
768 /* Setup Cipher Parameters */
769 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
770 cipher_xform.next = NULL;
771 cipher_xform.cipher.algo = options->cipher_algo;
772 cipher_xform.cipher.op = options->cipher_op;
773 cipher_xform.cipher.iv.offset = iv_offset;
774 cipher_xform.cipher.iv.length = 4;
776 /* cipher different than null */
777 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
778 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
779 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
781 cipher_xform.cipher.key.data = NULL;
782 cipher_xform.cipher.key.length = 0;
785 /* Setup Auth Parameters */
786 if (options->auth_algo != 0) {
787 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
788 auth_xform.next = NULL;
789 auth_xform.auth.algo = options->auth_algo;
790 auth_xform.auth.op = options->auth_op;
791 auth_xform.auth.iv.offset = iv_offset +
792 cipher_xform.cipher.iv.length;
794 /* auth different than null */
795 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
796 auth_xform.auth.digest_length = options->digest_sz;
797 auth_xform.auth.key.length = test_vector->auth_key.length;
798 auth_xform.auth.key.data = test_vector->auth_key.data;
799 auth_xform.auth.iv.length = test_vector->auth_iv.length;
801 auth_xform.auth.digest_length = 0;
802 auth_xform.auth.key.length = 0;
803 auth_xform.auth.key.data = NULL;
804 auth_xform.auth.iv.length = 0;
807 cipher_xform.next = &auth_xform;
809 cipher_xform.next = NULL;
812 struct rte_security_session_conf sess_conf = {
813 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
814 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
817 .domain = options->pdcp_domain,
819 .sn_size = options->pdcp_sn_sz,
820 .hfn = options->pdcp_ses_hfn_en ?
821 PDCP_DEFAULT_HFN : 0,
822 .hfn_threshold = 0x70C0A,
823 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
825 .crypto_xform = &cipher_xform
828 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
829 rte_cryptodev_get_sec_ctx(dev_id);
831 /* Create security session */
832 return (void *)rte_security_session_create(ctx,
833 &sess_conf, sess_mp, priv_mp);
836 if (options->op_type == CPERF_IPSEC) {
837 return create_ipsec_session(sess_mp, priv_mp, dev_id,
838 options, test_vector, iv_offset);
841 if (options->op_type == CPERF_DOCSIS) {
842 enum rte_security_docsis_direction direction;
844 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
845 cipher_xform.next = NULL;
846 cipher_xform.cipher.algo = options->cipher_algo;
847 cipher_xform.cipher.op = options->cipher_op;
848 cipher_xform.cipher.iv.offset = iv_offset;
849 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
850 cipher_xform.cipher.key.data =
851 test_vector->cipher_key.data;
852 cipher_xform.cipher.key.length =
853 test_vector->cipher_key.length;
854 cipher_xform.cipher.iv.length =
855 test_vector->cipher_iv.length;
857 cipher_xform.cipher.key.data = NULL;
858 cipher_xform.cipher.key.length = 0;
859 cipher_xform.cipher.iv.length = 0;
861 cipher_xform.next = NULL;
863 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
864 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
866 direction = RTE_SECURITY_DOCSIS_UPLINK;
868 struct rte_security_session_conf sess_conf = {
870 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
871 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
873 .direction = direction,
875 .crypto_xform = &cipher_xform
877 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
878 rte_cryptodev_get_sec_ctx(dev_id);
880 /* Create security session */
881 return (void *)rte_security_session_create(ctx,
882 &sess_conf, sess_mp, priv_mp);
885 sess = rte_cryptodev_sym_session_create(sess_mp);
889 if (options->op_type == CPERF_CIPHER_ONLY) {
890 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
891 cipher_xform.next = NULL;
892 cipher_xform.cipher.algo = options->cipher_algo;
893 cipher_xform.cipher.op = options->cipher_op;
894 cipher_xform.cipher.iv.offset = iv_offset;
896 /* cipher different than null */
897 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
898 cipher_xform.cipher.key.data =
899 test_vector->cipher_key.data;
900 cipher_xform.cipher.key.length =
901 test_vector->cipher_key.length;
902 cipher_xform.cipher.iv.length =
903 test_vector->cipher_iv.length;
905 cipher_xform.cipher.key.data = NULL;
906 cipher_xform.cipher.key.length = 0;
907 cipher_xform.cipher.iv.length = 0;
909 /* create crypto session */
910 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
915 } else if (options->op_type == CPERF_AUTH_ONLY) {
916 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
917 auth_xform.next = NULL;
918 auth_xform.auth.algo = options->auth_algo;
919 auth_xform.auth.op = options->auth_op;
920 auth_xform.auth.iv.offset = iv_offset;
922 /* auth different than null */
923 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
924 auth_xform.auth.digest_length =
926 auth_xform.auth.key.length =
927 test_vector->auth_key.length;
928 auth_xform.auth.key.data = test_vector->auth_key.data;
929 auth_xform.auth.iv.length =
930 test_vector->auth_iv.length;
932 auth_xform.auth.digest_length = 0;
933 auth_xform.auth.key.length = 0;
934 auth_xform.auth.key.data = NULL;
935 auth_xform.auth.iv.length = 0;
937 /* create crypto session */
938 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
943 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
944 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
948 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
949 cipher_xform.next = NULL;
950 cipher_xform.cipher.algo = options->cipher_algo;
951 cipher_xform.cipher.op = options->cipher_op;
952 cipher_xform.cipher.iv.offset = iv_offset;
954 /* cipher different than null */
955 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
956 cipher_xform.cipher.key.data =
957 test_vector->cipher_key.data;
958 cipher_xform.cipher.key.length =
959 test_vector->cipher_key.length;
960 cipher_xform.cipher.iv.length =
961 test_vector->cipher_iv.length;
963 cipher_xform.cipher.key.data = NULL;
964 cipher_xform.cipher.key.length = 0;
965 cipher_xform.cipher.iv.length = 0;
971 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
972 auth_xform.next = NULL;
973 auth_xform.auth.algo = options->auth_algo;
974 auth_xform.auth.op = options->auth_op;
975 auth_xform.auth.iv.offset = iv_offset +
976 cipher_xform.cipher.iv.length;
978 /* auth different than null */
979 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
980 auth_xform.auth.digest_length = options->digest_sz;
981 auth_xform.auth.iv.length = test_vector->auth_iv.length;
982 auth_xform.auth.key.length =
983 test_vector->auth_key.length;
984 auth_xform.auth.key.data =
985 test_vector->auth_key.data;
987 auth_xform.auth.digest_length = 0;
988 auth_xform.auth.key.length = 0;
989 auth_xform.auth.key.data = NULL;
990 auth_xform.auth.iv.length = 0;
993 /* cipher then auth */
994 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
995 cipher_xform.next = &auth_xform;
996 /* create crypto session */
997 rte_cryptodev_sym_session_init(dev_id,
998 sess, &cipher_xform, priv_mp);
999 } else { /* auth then cipher */
1000 auth_xform.next = &cipher_xform;
1001 /* create crypto session */
1002 rte_cryptodev_sym_session_init(dev_id,
1003 sess, &auth_xform, priv_mp);
1005 } else { /* options->op_type == CPERF_AEAD */
1006 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
1007 aead_xform.next = NULL;
1008 aead_xform.aead.algo = options->aead_algo;
1009 aead_xform.aead.op = options->aead_op;
1010 aead_xform.aead.iv.offset = iv_offset;
1012 aead_xform.aead.key.data =
1013 test_vector->aead_key.data;
1014 aead_xform.aead.key.length =
1015 test_vector->aead_key.length;
1016 aead_xform.aead.iv.length = test_vector->aead_iv.length;
1018 aead_xform.aead.digest_length = options->digest_sz;
1019 aead_xform.aead.aad_length =
1020 options->aead_aad_sz;
1022 /* Create crypto session */
1023 rte_cryptodev_sym_session_init(dev_id,
1024 sess, &aead_xform, priv_mp);
1031 cperf_get_op_functions(const struct cperf_options *options,
1032 struct cperf_op_fns *op_fns)
1034 memset(op_fns, 0, sizeof(struct cperf_op_fns));
1036 op_fns->sess_create = cperf_create_session;
1038 switch (options->op_type) {
1040 op_fns->populate_ops = cperf_set_ops_aead;
1043 case CPERF_AUTH_THEN_CIPHER:
1044 case CPERF_CIPHER_THEN_AUTH:
1045 op_fns->populate_ops = cperf_set_ops_cipher_auth;
1047 case CPERF_AUTH_ONLY:
1048 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1049 op_fns->populate_ops = cperf_set_ops_null_auth;
1051 op_fns->populate_ops = cperf_set_ops_auth;
1053 case CPERF_CIPHER_ONLY:
1054 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1055 op_fns->populate_ops = cperf_set_ops_null_cipher;
1057 op_fns->populate_ops = cperf_set_ops_cipher;
1059 case CPERF_ASYM_MODEX:
1060 op_fns->populate_ops = cperf_set_ops_asym;
1062 #ifdef RTE_LIB_SECURITY
1066 op_fns->populate_ops = cperf_set_ops_security;