1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
10 #include "cperf_test_vectors.h"
13 cperf_set_ops_asym(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused,
20 uint32_t *imix_idx __rte_unused,
21 uint64_t *tsc_start __rte_unused)
24 void *asym_sess = (void *)sess;
26 for (i = 0; i < nb_ops; i++) {
27 struct rte_crypto_asym_op *asym_op = ops[i]->asym;
29 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
30 asym_op->modex.base.data = perf_base;
31 asym_op->modex.base.length = sizeof(perf_base);
32 asym_op->modex.result.data = perf_mod_result;
33 asym_op->modex.result.length = sizeof(perf_mod_result);
34 rte_crypto_op_attach_asym_session(ops[i], asym_sess);
39 #ifdef RTE_LIB_SECURITY
41 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
42 const struct cperf_test_vector *test_vector)
44 struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
46 if ((options->aead_op == RTE_CRYPTO_AEAD_OP_ENCRYPT) ||
47 (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)) {
48 memcpy(ip, test_vector->plaintext.data, m->data_len);
50 ip->total_length = rte_cpu_to_be_16(m->data_len);
55 cperf_set_ops_security(struct rte_crypto_op **ops,
56 uint32_t src_buf_offset __rte_unused,
57 uint32_t dst_buf_offset __rte_unused,
58 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
59 const struct cperf_options *options,
60 const struct cperf_test_vector *test_vector,
61 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
64 uint64_t tsc_start_temp, tsc_end_temp;
67 for (i = 0; i < nb_ops; i++) {
68 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
69 struct rte_security_session *sec_sess =
70 (struct rte_security_session *)sess;
73 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
74 uint32_t *, iv_offset);
75 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
77 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
78 rte_security_attach_session(ops[i], sec_sess);
79 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
82 if (options->op_type == CPERF_PDCP ||
83 options->op_type == CPERF_IPSEC) {
84 /* In case of IPsec, headroom is consumed by PMD,
87 sym_op->m_src->data_off = options->headroom_sz;
89 sym_op->m_src->buf_len = options->segment_sz;
90 sym_op->m_src->data_len = options->test_buffer_size;
91 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
93 if ((options->op_type == CPERF_IPSEC) &&
94 (options->test_file == NULL) &&
95 (options->test == CPERF_TEST_TYPE_THROUGHPUT)) {
96 tsc_start_temp = rte_rdtsc_precise();
97 test_ipsec_vec_populate(sym_op->m_src, options,
99 tsc_end_temp = rte_rdtsc_precise();
101 *tsc_start += (tsc_end_temp - tsc_start_temp);
105 if (options->op_type == CPERF_DOCSIS) {
106 if (options->imix_distribution_count) {
107 buf_sz = options->imix_buffer_sizes[*imix_idx];
108 *imix_idx = (*imix_idx + 1) % options->pool_sz;
110 buf_sz = options->test_buffer_size;
112 sym_op->m_src->buf_len = options->segment_sz;
113 sym_op->m_src->data_len = buf_sz;
114 sym_op->m_src->pkt_len = buf_sz;
116 /* DOCSIS header is not CRC'ed */
117 sym_op->auth.data.offset = options->docsis_hdr_sz;
118 sym_op->auth.data.length = buf_sz -
119 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
121 * DOCSIS header and SRC and DST MAC addresses are not
124 sym_op->cipher.data.offset = sym_op->auth.data.offset +
125 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
126 sym_op->cipher.data.length = buf_sz -
127 sym_op->cipher.data.offset;
130 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
131 if (dst_buf_offset == 0)
132 sym_op->m_dst = NULL;
134 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
143 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
144 uint32_t src_buf_offset, uint32_t dst_buf_offset,
145 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
146 const struct cperf_options *options,
147 const struct cperf_test_vector *test_vector __rte_unused,
148 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
149 uint64_t *tsc_start __rte_unused)
153 for (i = 0; i < nb_ops; i++) {
154 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
156 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
157 rte_crypto_op_attach_sym_session(ops[i], sess);
159 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
162 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
163 if (dst_buf_offset == 0)
164 sym_op->m_dst = NULL;
166 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
169 /* cipher parameters */
170 if (options->imix_distribution_count) {
171 sym_op->cipher.data.length =
172 options->imix_buffer_sizes[*imix_idx];
173 *imix_idx = (*imix_idx + 1) % options->pool_sz;
175 sym_op->cipher.data.length = options->test_buffer_size;
176 sym_op->cipher.data.offset = 0;
183 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
184 uint32_t src_buf_offset, uint32_t dst_buf_offset,
185 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
186 const struct cperf_options *options,
187 const struct cperf_test_vector *test_vector __rte_unused,
188 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
189 uint64_t *tsc_start __rte_unused)
193 for (i = 0; i < nb_ops; i++) {
194 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
196 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
197 rte_crypto_op_attach_sym_session(ops[i], sess);
199 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
202 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
203 if (dst_buf_offset == 0)
204 sym_op->m_dst = NULL;
206 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
209 /* auth parameters */
210 if (options->imix_distribution_count) {
211 sym_op->auth.data.length =
212 options->imix_buffer_sizes[*imix_idx];
213 *imix_idx = (*imix_idx + 1) % options->pool_sz;
215 sym_op->auth.data.length = options->test_buffer_size;
216 sym_op->auth.data.offset = 0;
223 cperf_set_ops_cipher(struct rte_crypto_op **ops,
224 uint32_t src_buf_offset, uint32_t dst_buf_offset,
225 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
226 const struct cperf_options *options,
227 const struct cperf_test_vector *test_vector,
228 uint16_t iv_offset, uint32_t *imix_idx,
229 uint64_t *tsc_start __rte_unused)
233 for (i = 0; i < nb_ops; i++) {
234 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
236 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
237 rte_crypto_op_attach_sym_session(ops[i], sess);
239 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
242 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
243 if (dst_buf_offset == 0)
244 sym_op->m_dst = NULL;
246 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
249 /* cipher parameters */
250 if (options->imix_distribution_count) {
251 sym_op->cipher.data.length =
252 options->imix_buffer_sizes[*imix_idx];
253 *imix_idx = (*imix_idx + 1) % options->pool_sz;
255 sym_op->cipher.data.length = options->test_buffer_size;
257 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
258 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
259 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
260 sym_op->cipher.data.length <<= 3;
262 sym_op->cipher.data.offset = 0;
265 if (options->test == CPERF_TEST_TYPE_VERIFY) {
266 for (i = 0; i < nb_ops; i++) {
267 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
268 uint8_t *, iv_offset);
270 memcpy(iv_ptr, test_vector->cipher_iv.data,
271 test_vector->cipher_iv.length);
280 cperf_set_ops_auth(struct rte_crypto_op **ops,
281 uint32_t src_buf_offset, uint32_t dst_buf_offset,
282 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
283 const struct cperf_options *options,
284 const struct cperf_test_vector *test_vector,
285 uint16_t iv_offset, uint32_t *imix_idx,
286 uint64_t *tsc_start __rte_unused)
290 for (i = 0; i < nb_ops; i++) {
291 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
293 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
294 rte_crypto_op_attach_sym_session(ops[i], sess);
296 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
299 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
300 if (dst_buf_offset == 0)
301 sym_op->m_dst = NULL;
303 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
306 if (test_vector->auth_iv.length) {
307 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
310 memcpy(iv_ptr, test_vector->auth_iv.data,
311 test_vector->auth_iv.length);
314 /* authentication parameters */
315 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
316 sym_op->auth.digest.data = test_vector->digest.data;
317 sym_op->auth.digest.phys_addr =
318 test_vector->digest.phys_addr;
321 uint32_t offset = options->test_buffer_size;
322 struct rte_mbuf *buf, *tbuf;
324 if (options->out_of_place) {
327 tbuf = sym_op->m_src;
328 while ((tbuf->next != NULL) &&
329 (offset >= tbuf->data_len)) {
330 offset -= tbuf->data_len;
334 * If there is not enough room in segment,
335 * place the digest in the next segment
337 if ((tbuf->data_len - offset) < options->digest_sz) {
344 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
346 sym_op->auth.digest.phys_addr =
347 rte_pktmbuf_iova_offset(buf, offset);
351 if (options->imix_distribution_count) {
352 sym_op->auth.data.length =
353 options->imix_buffer_sizes[*imix_idx];
354 *imix_idx = (*imix_idx + 1) % options->pool_sz;
356 sym_op->auth.data.length = options->test_buffer_size;
358 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
359 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
360 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
361 sym_op->auth.data.length <<= 3;
363 sym_op->auth.data.offset = 0;
366 if (options->test == CPERF_TEST_TYPE_VERIFY) {
367 if (test_vector->auth_iv.length) {
368 for (i = 0; i < nb_ops; i++) {
369 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
370 uint8_t *, iv_offset);
372 memcpy(iv_ptr, test_vector->auth_iv.data,
373 test_vector->auth_iv.length);
381 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
382 uint32_t src_buf_offset, uint32_t dst_buf_offset,
383 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
384 const struct cperf_options *options,
385 const struct cperf_test_vector *test_vector,
386 uint16_t iv_offset, uint32_t *imix_idx,
387 uint64_t *tsc_start __rte_unused)
391 for (i = 0; i < nb_ops; i++) {
392 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
394 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
395 rte_crypto_op_attach_sym_session(ops[i], sess);
397 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
400 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
401 if (dst_buf_offset == 0)
402 sym_op->m_dst = NULL;
404 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
407 /* cipher parameters */
408 if (options->imix_distribution_count) {
409 sym_op->cipher.data.length =
410 options->imix_buffer_sizes[*imix_idx];
411 *imix_idx = (*imix_idx + 1) % options->pool_sz;
413 sym_op->cipher.data.length = options->test_buffer_size;
415 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
416 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
417 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
418 sym_op->cipher.data.length <<= 3;
420 sym_op->cipher.data.offset = 0;
422 /* authentication parameters */
423 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
424 sym_op->auth.digest.data = test_vector->digest.data;
425 sym_op->auth.digest.phys_addr =
426 test_vector->digest.phys_addr;
429 uint32_t offset = options->test_buffer_size;
430 struct rte_mbuf *buf, *tbuf;
432 if (options->out_of_place) {
435 tbuf = sym_op->m_src;
436 while ((tbuf->next != NULL) &&
437 (offset >= tbuf->data_len)) {
438 offset -= tbuf->data_len;
442 * If there is not enough room in segment,
443 * place the digest in the next segment
445 if ((tbuf->data_len - offset) < options->digest_sz) {
452 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
454 sym_op->auth.digest.phys_addr =
455 rte_pktmbuf_iova_offset(buf, offset);
458 if (options->imix_distribution_count) {
459 sym_op->auth.data.length =
460 options->imix_buffer_sizes[*imix_idx];
461 *imix_idx = (*imix_idx + 1) % options->pool_sz;
463 sym_op->auth.data.length = options->test_buffer_size;
465 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
466 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
467 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
468 sym_op->auth.data.length <<= 3;
470 sym_op->auth.data.offset = 0;
473 if (options->test == CPERF_TEST_TYPE_VERIFY) {
474 for (i = 0; i < nb_ops; i++) {
475 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
476 uint8_t *, iv_offset);
478 memcpy(iv_ptr, test_vector->cipher_iv.data,
479 test_vector->cipher_iv.length);
480 if (test_vector->auth_iv.length) {
482 * Copy IV after the crypto operation and
485 iv_ptr += test_vector->cipher_iv.length;
486 memcpy(iv_ptr, test_vector->auth_iv.data,
487 test_vector->auth_iv.length);
497 cperf_set_ops_aead(struct rte_crypto_op **ops,
498 uint32_t src_buf_offset, uint32_t dst_buf_offset,
499 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
500 const struct cperf_options *options,
501 const struct cperf_test_vector *test_vector,
502 uint16_t iv_offset, uint32_t *imix_idx,
503 uint64_t *tsc_start __rte_unused)
506 /* AAD is placed after the IV */
507 uint16_t aad_offset = iv_offset +
508 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
510 for (i = 0; i < nb_ops; i++) {
511 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
513 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
514 rte_crypto_op_attach_sym_session(ops[i], sess);
516 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
519 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
520 if (dst_buf_offset == 0)
521 sym_op->m_dst = NULL;
523 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
526 /* AEAD parameters */
527 if (options->imix_distribution_count) {
528 sym_op->aead.data.length =
529 options->imix_buffer_sizes[*imix_idx];
530 *imix_idx = (*imix_idx + 1) % options->pool_sz;
532 sym_op->aead.data.length = options->test_buffer_size;
533 sym_op->aead.data.offset = 0;
535 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
536 uint8_t *, aad_offset);
537 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
540 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
541 sym_op->aead.digest.data = test_vector->digest.data;
542 sym_op->aead.digest.phys_addr =
543 test_vector->digest.phys_addr;
546 uint32_t offset = sym_op->aead.data.length +
547 sym_op->aead.data.offset;
548 struct rte_mbuf *buf, *tbuf;
550 if (options->out_of_place) {
553 tbuf = sym_op->m_src;
554 while ((tbuf->next != NULL) &&
555 (offset >= tbuf->data_len)) {
556 offset -= tbuf->data_len;
560 * If there is not enough room in segment,
561 * place the digest in the next segment
563 if ((tbuf->data_len - offset) < options->digest_sz) {
570 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
572 sym_op->aead.digest.phys_addr =
573 rte_pktmbuf_iova_offset(buf, offset);
577 if (options->test == CPERF_TEST_TYPE_VERIFY) {
578 for (i = 0; i < nb_ops; i++) {
579 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
580 uint8_t *, iv_offset);
583 * If doing AES-CCM, nonce is copied one byte
584 * after the start of IV field, and AAD is copied
585 * 18 bytes after the start of the AAD field.
587 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
588 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
589 test_vector->aead_iv.length);
591 memcpy(ops[i]->sym->aead.aad.data + 18,
592 test_vector->aad.data,
593 test_vector->aad.length);
595 memcpy(iv_ptr, test_vector->aead_iv.data,
596 test_vector->aead_iv.length);
598 memcpy(ops[i]->sym->aead.aad.data,
599 test_vector->aad.data,
600 test_vector->aad.length);
608 static struct rte_cryptodev_sym_session *
609 create_ipsec_session(struct rte_mempool *sess_mp,
610 struct rte_mempool *priv_mp,
612 const struct cperf_options *options,
613 const struct cperf_test_vector *test_vector,
616 struct rte_crypto_sym_xform xform = {0};
617 struct rte_crypto_sym_xform auth_xform = {0};
619 if (options->aead_algo != 0) {
620 /* Setup AEAD Parameters */
621 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
623 xform.aead.algo = options->aead_algo;
624 xform.aead.op = options->aead_op;
625 xform.aead.iv.offset = iv_offset;
626 xform.aead.key.data = test_vector->aead_key.data;
627 xform.aead.key.length = test_vector->aead_key.length;
628 xform.aead.iv.length = test_vector->aead_iv.length;
629 xform.aead.digest_length = options->digest_sz;
630 xform.aead.aad_length = options->aead_aad_sz;
631 } else if (options->cipher_algo != 0 && options->auth_algo != 0) {
632 /* Setup Cipher Parameters */
633 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
635 xform.cipher.algo = options->cipher_algo;
636 xform.cipher.op = options->cipher_op;
637 xform.cipher.iv.offset = iv_offset;
638 xform.cipher.iv.length = test_vector->cipher_iv.length;
639 /* cipher different than null */
640 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
641 xform.cipher.key.data = test_vector->cipher_key.data;
642 xform.cipher.key.length =
643 test_vector->cipher_key.length;
645 xform.cipher.key.data = NULL;
646 xform.cipher.key.length = 0;
649 /* Setup Auth Parameters */
650 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
651 auth_xform.next = NULL;
652 auth_xform.auth.algo = options->auth_algo;
653 auth_xform.auth.op = options->auth_op;
654 auth_xform.auth.iv.offset = iv_offset +
655 xform.cipher.iv.length;
656 /* auth different than null */
657 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
658 auth_xform.auth.digest_length = options->digest_sz;
659 auth_xform.auth.key.length =
660 test_vector->auth_key.length;
661 auth_xform.auth.key.data = test_vector->auth_key.data;
662 auth_xform.auth.iv.length = test_vector->auth_iv.length;
664 auth_xform.auth.digest_length = 0;
665 auth_xform.auth.key.length = 0;
666 auth_xform.auth.key.data = NULL;
667 auth_xform.auth.iv.length = 0;
670 xform.next = &auth_xform;
675 #define CPERF_IPSEC_SRC_IP 0x01010101
676 #define CPERF_IPSEC_DST_IP 0x02020202
677 #define CPERF_IPSEC_SALT 0x0
678 #define CPERF_IPSEC_DEFTTL 64
679 struct rte_security_ipsec_tunnel_param tunnel = {
680 .type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
682 .src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
683 .dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
686 .ttl = CPERF_IPSEC_DEFTTL,
689 struct rte_security_session_conf sess_conf = {
690 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
691 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
693 .spi = rte_lcore_id(),
694 /**< For testing sake, lcore_id is taken as SPI so that
695 * for every core a different session is created.
697 .salt = CPERF_IPSEC_SALT,
701 ((options->cipher_op ==
702 RTE_CRYPTO_CIPHER_OP_ENCRYPT) &&
704 RTE_CRYPTO_AUTH_OP_GENERATE)) ||
706 RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
707 RTE_SECURITY_IPSEC_SA_DIR_EGRESS :
708 RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
709 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
710 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
714 .crypto_xform = &xform
717 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
718 rte_cryptodev_get_sec_ctx(dev_id);
720 /* Create security session */
721 return (void *)rte_security_session_create(ctx,
722 &sess_conf, sess_mp, priv_mp);
725 static struct rte_cryptodev_sym_session *
726 cperf_create_session(struct rte_mempool *sess_mp,
727 struct rte_mempool *priv_mp,
729 const struct cperf_options *options,
730 const struct cperf_test_vector *test_vector,
733 struct rte_crypto_sym_xform cipher_xform;
734 struct rte_crypto_sym_xform auth_xform;
735 struct rte_crypto_sym_xform aead_xform;
736 struct rte_cryptodev_sym_session *sess = NULL;
737 struct rte_crypto_asym_xform xform = {0};
739 if (options->op_type == CPERF_ASYM_MODEX) {
741 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
742 xform.modex.modulus.data = perf_mod_p;
743 xform.modex.modulus.length = sizeof(perf_mod_p);
744 xform.modex.exponent.data = perf_mod_e;
745 xform.modex.exponent.length = sizeof(perf_mod_e);
747 sess = (void *)rte_cryptodev_asym_session_create(dev_id, &xform, sess_mp);
753 #ifdef RTE_LIB_SECURITY
757 if (options->op_type == CPERF_PDCP) {
758 /* Setup Cipher Parameters */
759 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
760 cipher_xform.next = NULL;
761 cipher_xform.cipher.algo = options->cipher_algo;
762 cipher_xform.cipher.op = options->cipher_op;
763 cipher_xform.cipher.iv.offset = iv_offset;
764 cipher_xform.cipher.iv.length = 4;
766 /* cipher different than null */
767 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
768 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
769 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
771 cipher_xform.cipher.key.data = NULL;
772 cipher_xform.cipher.key.length = 0;
775 /* Setup Auth Parameters */
776 if (options->auth_algo != 0) {
777 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
778 auth_xform.next = NULL;
779 auth_xform.auth.algo = options->auth_algo;
780 auth_xform.auth.op = options->auth_op;
781 auth_xform.auth.iv.offset = iv_offset +
782 cipher_xform.cipher.iv.length;
784 /* auth different than null */
785 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
786 auth_xform.auth.digest_length = options->digest_sz;
787 auth_xform.auth.key.length = test_vector->auth_key.length;
788 auth_xform.auth.key.data = test_vector->auth_key.data;
789 auth_xform.auth.iv.length = test_vector->auth_iv.length;
791 auth_xform.auth.digest_length = 0;
792 auth_xform.auth.key.length = 0;
793 auth_xform.auth.key.data = NULL;
794 auth_xform.auth.iv.length = 0;
797 cipher_xform.next = &auth_xform;
799 cipher_xform.next = NULL;
802 struct rte_security_session_conf sess_conf = {
803 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
804 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
807 .domain = options->pdcp_domain,
809 .sn_size = options->pdcp_sn_sz,
810 .hfn = options->pdcp_ses_hfn_en ?
811 PDCP_DEFAULT_HFN : 0,
812 .hfn_threshold = 0x70C0A,
813 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
815 .crypto_xform = &cipher_xform
818 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
819 rte_cryptodev_get_sec_ctx(dev_id);
821 /* Create security session */
822 return (void *)rte_security_session_create(ctx,
823 &sess_conf, sess_mp, priv_mp);
826 if (options->op_type == CPERF_IPSEC) {
827 return create_ipsec_session(sess_mp, priv_mp, dev_id,
828 options, test_vector, iv_offset);
831 if (options->op_type == CPERF_DOCSIS) {
832 enum rte_security_docsis_direction direction;
834 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
835 cipher_xform.next = NULL;
836 cipher_xform.cipher.algo = options->cipher_algo;
837 cipher_xform.cipher.op = options->cipher_op;
838 cipher_xform.cipher.iv.offset = iv_offset;
839 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
840 cipher_xform.cipher.key.data =
841 test_vector->cipher_key.data;
842 cipher_xform.cipher.key.length =
843 test_vector->cipher_key.length;
844 cipher_xform.cipher.iv.length =
845 test_vector->cipher_iv.length;
847 cipher_xform.cipher.key.data = NULL;
848 cipher_xform.cipher.key.length = 0;
849 cipher_xform.cipher.iv.length = 0;
851 cipher_xform.next = NULL;
853 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
854 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
856 direction = RTE_SECURITY_DOCSIS_UPLINK;
858 struct rte_security_session_conf sess_conf = {
860 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
861 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
863 .direction = direction,
865 .crypto_xform = &cipher_xform
867 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
868 rte_cryptodev_get_sec_ctx(dev_id);
870 /* Create security session */
871 return (void *)rte_security_session_create(ctx,
872 &sess_conf, sess_mp, priv_mp);
875 sess = rte_cryptodev_sym_session_create(sess_mp);
879 if (options->op_type == CPERF_CIPHER_ONLY) {
880 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
881 cipher_xform.next = NULL;
882 cipher_xform.cipher.algo = options->cipher_algo;
883 cipher_xform.cipher.op = options->cipher_op;
884 cipher_xform.cipher.iv.offset = iv_offset;
886 /* cipher different than null */
887 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
888 cipher_xform.cipher.key.data =
889 test_vector->cipher_key.data;
890 cipher_xform.cipher.key.length =
891 test_vector->cipher_key.length;
892 cipher_xform.cipher.iv.length =
893 test_vector->cipher_iv.length;
895 cipher_xform.cipher.key.data = NULL;
896 cipher_xform.cipher.key.length = 0;
897 cipher_xform.cipher.iv.length = 0;
899 /* create crypto session */
900 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
905 } else if (options->op_type == CPERF_AUTH_ONLY) {
906 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
907 auth_xform.next = NULL;
908 auth_xform.auth.algo = options->auth_algo;
909 auth_xform.auth.op = options->auth_op;
910 auth_xform.auth.iv.offset = iv_offset;
912 /* auth different than null */
913 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
914 auth_xform.auth.digest_length =
916 auth_xform.auth.key.length =
917 test_vector->auth_key.length;
918 auth_xform.auth.key.data = test_vector->auth_key.data;
919 auth_xform.auth.iv.length =
920 test_vector->auth_iv.length;
922 auth_xform.auth.digest_length = 0;
923 auth_xform.auth.key.length = 0;
924 auth_xform.auth.key.data = NULL;
925 auth_xform.auth.iv.length = 0;
927 /* create crypto session */
928 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
933 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
934 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
938 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
939 cipher_xform.next = NULL;
940 cipher_xform.cipher.algo = options->cipher_algo;
941 cipher_xform.cipher.op = options->cipher_op;
942 cipher_xform.cipher.iv.offset = iv_offset;
944 /* cipher different than null */
945 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
946 cipher_xform.cipher.key.data =
947 test_vector->cipher_key.data;
948 cipher_xform.cipher.key.length =
949 test_vector->cipher_key.length;
950 cipher_xform.cipher.iv.length =
951 test_vector->cipher_iv.length;
953 cipher_xform.cipher.key.data = NULL;
954 cipher_xform.cipher.key.length = 0;
955 cipher_xform.cipher.iv.length = 0;
961 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
962 auth_xform.next = NULL;
963 auth_xform.auth.algo = options->auth_algo;
964 auth_xform.auth.op = options->auth_op;
965 auth_xform.auth.iv.offset = iv_offset +
966 cipher_xform.cipher.iv.length;
968 /* auth different than null */
969 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
970 auth_xform.auth.digest_length = options->digest_sz;
971 auth_xform.auth.iv.length = test_vector->auth_iv.length;
972 auth_xform.auth.key.length =
973 test_vector->auth_key.length;
974 auth_xform.auth.key.data =
975 test_vector->auth_key.data;
977 auth_xform.auth.digest_length = 0;
978 auth_xform.auth.key.length = 0;
979 auth_xform.auth.key.data = NULL;
980 auth_xform.auth.iv.length = 0;
983 /* cipher then auth */
984 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
985 cipher_xform.next = &auth_xform;
986 /* create crypto session */
987 rte_cryptodev_sym_session_init(dev_id,
988 sess, &cipher_xform, priv_mp);
989 } else { /* auth then cipher */
990 auth_xform.next = &cipher_xform;
991 /* create crypto session */
992 rte_cryptodev_sym_session_init(dev_id,
993 sess, &auth_xform, priv_mp);
995 } else { /* options->op_type == CPERF_AEAD */
996 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
997 aead_xform.next = NULL;
998 aead_xform.aead.algo = options->aead_algo;
999 aead_xform.aead.op = options->aead_op;
1000 aead_xform.aead.iv.offset = iv_offset;
1002 aead_xform.aead.key.data =
1003 test_vector->aead_key.data;
1004 aead_xform.aead.key.length =
1005 test_vector->aead_key.length;
1006 aead_xform.aead.iv.length = test_vector->aead_iv.length;
1008 aead_xform.aead.digest_length = options->digest_sz;
1009 aead_xform.aead.aad_length =
1010 options->aead_aad_sz;
1012 /* Create crypto session */
1013 rte_cryptodev_sym_session_init(dev_id,
1014 sess, &aead_xform, priv_mp);
1021 cperf_get_op_functions(const struct cperf_options *options,
1022 struct cperf_op_fns *op_fns)
1024 memset(op_fns, 0, sizeof(struct cperf_op_fns));
1026 op_fns->sess_create = cperf_create_session;
1028 switch (options->op_type) {
1030 op_fns->populate_ops = cperf_set_ops_aead;
1033 case CPERF_AUTH_THEN_CIPHER:
1034 case CPERF_CIPHER_THEN_AUTH:
1035 op_fns->populate_ops = cperf_set_ops_cipher_auth;
1037 case CPERF_AUTH_ONLY:
1038 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1039 op_fns->populate_ops = cperf_set_ops_null_auth;
1041 op_fns->populate_ops = cperf_set_ops_auth;
1043 case CPERF_CIPHER_ONLY:
1044 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1045 op_fns->populate_ops = cperf_set_ops_null_cipher;
1047 op_fns->populate_ops = cperf_set_ops_cipher;
1049 case CPERF_ASYM_MODEX:
1050 op_fns->populate_ops = cperf_set_ops_asym;
1052 #ifdef RTE_LIB_SECURITY
1056 op_fns->populate_ops = cperf_set_ops_security;