1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
10 #include "cperf_test_vectors.h"
13 cperf_set_ops_asym(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused,
20 uint32_t *imix_idx __rte_unused,
21 uint64_t *tsc_start __rte_unused)
24 void *asym_sess = (void *)sess;
26 for (i = 0; i < nb_ops; i++) {
27 struct rte_crypto_asym_op *asym_op = ops[i]->asym;
29 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
30 asym_op->modex.base.data = perf_base;
31 asym_op->modex.base.length = sizeof(perf_base);
32 asym_op->modex.result.data = perf_mod_result;
33 asym_op->modex.result.length = sizeof(perf_mod_result);
34 rte_crypto_op_attach_asym_session(ops[i], asym_sess);
39 #ifdef RTE_LIB_SECURITY
41 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
42 const struct cperf_test_vector *test_vector)
44 struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
46 if ((options->aead_op == RTE_CRYPTO_AEAD_OP_ENCRYPT) ||
47 (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)) {
48 memcpy(ip, test_vector->plaintext.data,
49 sizeof(struct rte_ipv4_hdr));
51 ip->total_length = rte_cpu_to_be_16(m->data_len);
56 cperf_set_ops_security(struct rte_crypto_op **ops,
57 uint32_t src_buf_offset __rte_unused,
58 uint32_t dst_buf_offset __rte_unused,
59 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
60 const struct cperf_options *options,
61 const struct cperf_test_vector *test_vector,
62 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
67 for (i = 0; i < nb_ops; i++) {
68 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
69 struct rte_security_session *sec_sess =
70 (struct rte_security_session *)sess;
73 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
74 uint32_t *, iv_offset);
75 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
77 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
78 rte_security_attach_session(ops[i], sec_sess);
79 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
82 if (options->op_type == CPERF_PDCP) {
83 sym_op->m_src->buf_len = options->segment_sz;
84 sym_op->m_src->data_len = options->test_buffer_size;
85 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
88 if (options->op_type == CPERF_DOCSIS) {
89 if (options->imix_distribution_count) {
90 buf_sz = options->imix_buffer_sizes[*imix_idx];
91 *imix_idx = (*imix_idx + 1) % options->pool_sz;
93 buf_sz = options->test_buffer_size;
95 sym_op->m_src->buf_len = options->segment_sz;
96 sym_op->m_src->data_len = buf_sz;
97 sym_op->m_src->pkt_len = buf_sz;
99 /* DOCSIS header is not CRC'ed */
100 sym_op->auth.data.offset = options->docsis_hdr_sz;
101 sym_op->auth.data.length = buf_sz -
102 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
104 * DOCSIS header and SRC and DST MAC addresses are not
107 sym_op->cipher.data.offset = sym_op->auth.data.offset +
108 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
109 sym_op->cipher.data.length = buf_sz -
110 sym_op->cipher.data.offset;
113 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
114 if (dst_buf_offset == 0)
115 sym_op->m_dst = NULL;
117 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
121 RTE_SET_USED(tsc_start);
122 RTE_SET_USED(test_vector);
128 cperf_set_ops_security_ipsec(struct rte_crypto_op **ops,
129 uint32_t src_buf_offset __rte_unused,
130 uint32_t dst_buf_offset __rte_unused,
131 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
132 const struct cperf_options *options,
133 const struct cperf_test_vector *test_vector,
134 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
137 struct rte_security_session *sec_sess =
138 (struct rte_security_session *)sess;
139 const uint32_t test_buffer_size = options->test_buffer_size;
140 const uint32_t headroom_sz = options->headroom_sz;
141 const uint32_t segment_sz = options->segment_sz;
142 uint64_t tsc_start_temp, tsc_end_temp;
145 RTE_SET_USED(imix_idx);
147 for (i = 0; i < nb_ops; i++) {
148 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
149 struct rte_mbuf *m = sym_op->m_src;
151 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
152 rte_security_attach_session(ops[i], sec_sess);
153 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
156 /* In case of IPsec, headroom is consumed by PMD,
157 * hence resetting it.
159 m->data_off = headroom_sz;
161 m->buf_len = segment_sz;
162 m->data_len = test_buffer_size;
163 m->pkt_len = test_buffer_size;
165 sym_op->m_dst = NULL;
168 if (options->test_file != NULL)
171 tsc_start_temp = rte_rdtsc_precise();
173 for (i = 0; i < nb_ops; i++) {
174 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
175 struct rte_mbuf *m = sym_op->m_src;
177 test_ipsec_vec_populate(m, options, test_vector);
180 tsc_end_temp = rte_rdtsc_precise();
181 *tsc_start += tsc_end_temp - tsc_start_temp;
189 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
190 uint32_t src_buf_offset, uint32_t dst_buf_offset,
191 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
192 const struct cperf_options *options,
193 const struct cperf_test_vector *test_vector __rte_unused,
194 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
195 uint64_t *tsc_start __rte_unused)
199 for (i = 0; i < nb_ops; i++) {
200 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
202 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
203 rte_crypto_op_attach_sym_session(ops[i], sess);
205 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
208 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
209 if (dst_buf_offset == 0)
210 sym_op->m_dst = NULL;
212 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
215 /* cipher parameters */
216 if (options->imix_distribution_count) {
217 sym_op->cipher.data.length =
218 options->imix_buffer_sizes[*imix_idx];
219 *imix_idx = (*imix_idx + 1) % options->pool_sz;
221 sym_op->cipher.data.length = options->test_buffer_size;
222 sym_op->cipher.data.offset = 0;
229 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
230 uint32_t src_buf_offset, uint32_t dst_buf_offset,
231 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
232 const struct cperf_options *options,
233 const struct cperf_test_vector *test_vector __rte_unused,
234 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
235 uint64_t *tsc_start __rte_unused)
239 for (i = 0; i < nb_ops; i++) {
240 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
242 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
243 rte_crypto_op_attach_sym_session(ops[i], sess);
245 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
248 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
249 if (dst_buf_offset == 0)
250 sym_op->m_dst = NULL;
252 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
255 /* auth parameters */
256 if (options->imix_distribution_count) {
257 sym_op->auth.data.length =
258 options->imix_buffer_sizes[*imix_idx];
259 *imix_idx = (*imix_idx + 1) % options->pool_sz;
261 sym_op->auth.data.length = options->test_buffer_size;
262 sym_op->auth.data.offset = 0;
269 cperf_set_ops_cipher(struct rte_crypto_op **ops,
270 uint32_t src_buf_offset, uint32_t dst_buf_offset,
271 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
272 const struct cperf_options *options,
273 const struct cperf_test_vector *test_vector,
274 uint16_t iv_offset, uint32_t *imix_idx,
275 uint64_t *tsc_start __rte_unused)
279 for (i = 0; i < nb_ops; i++) {
280 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
282 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
283 rte_crypto_op_attach_sym_session(ops[i], sess);
285 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
288 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
289 if (dst_buf_offset == 0)
290 sym_op->m_dst = NULL;
292 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
295 /* cipher parameters */
296 if (options->imix_distribution_count) {
297 sym_op->cipher.data.length =
298 options->imix_buffer_sizes[*imix_idx];
299 *imix_idx = (*imix_idx + 1) % options->pool_sz;
301 sym_op->cipher.data.length = options->test_buffer_size;
303 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
304 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
305 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
306 sym_op->cipher.data.length <<= 3;
308 sym_op->cipher.data.offset = 0;
311 if (options->test == CPERF_TEST_TYPE_VERIFY) {
312 for (i = 0; i < nb_ops; i++) {
313 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
314 uint8_t *, iv_offset);
316 memcpy(iv_ptr, test_vector->cipher_iv.data,
317 test_vector->cipher_iv.length);
326 cperf_set_ops_auth(struct rte_crypto_op **ops,
327 uint32_t src_buf_offset, uint32_t dst_buf_offset,
328 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
329 const struct cperf_options *options,
330 const struct cperf_test_vector *test_vector,
331 uint16_t iv_offset, uint32_t *imix_idx,
332 uint64_t *tsc_start __rte_unused)
336 for (i = 0; i < nb_ops; i++) {
337 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
339 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
340 rte_crypto_op_attach_sym_session(ops[i], sess);
342 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
345 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
346 if (dst_buf_offset == 0)
347 sym_op->m_dst = NULL;
349 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
352 if (test_vector->auth_iv.length) {
353 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
356 memcpy(iv_ptr, test_vector->auth_iv.data,
357 test_vector->auth_iv.length);
360 /* authentication parameters */
361 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
362 sym_op->auth.digest.data = test_vector->digest.data;
363 sym_op->auth.digest.phys_addr =
364 test_vector->digest.phys_addr;
367 uint32_t offset = options->test_buffer_size;
368 struct rte_mbuf *buf, *tbuf;
370 if (options->out_of_place) {
373 tbuf = sym_op->m_src;
374 while ((tbuf->next != NULL) &&
375 (offset >= tbuf->data_len)) {
376 offset -= tbuf->data_len;
380 * If there is not enough room in segment,
381 * place the digest in the next segment
383 if ((tbuf->data_len - offset) < options->digest_sz) {
390 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
392 sym_op->auth.digest.phys_addr =
393 rte_pktmbuf_iova_offset(buf, offset);
397 if (options->imix_distribution_count) {
398 sym_op->auth.data.length =
399 options->imix_buffer_sizes[*imix_idx];
400 *imix_idx = (*imix_idx + 1) % options->pool_sz;
402 sym_op->auth.data.length = options->test_buffer_size;
404 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
405 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
406 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
407 sym_op->auth.data.length <<= 3;
409 sym_op->auth.data.offset = 0;
412 if (options->test == CPERF_TEST_TYPE_VERIFY) {
413 if (test_vector->auth_iv.length) {
414 for (i = 0; i < nb_ops; i++) {
415 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
416 uint8_t *, iv_offset);
418 memcpy(iv_ptr, test_vector->auth_iv.data,
419 test_vector->auth_iv.length);
427 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
428 uint32_t src_buf_offset, uint32_t dst_buf_offset,
429 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
430 const struct cperf_options *options,
431 const struct cperf_test_vector *test_vector,
432 uint16_t iv_offset, uint32_t *imix_idx,
433 uint64_t *tsc_start __rte_unused)
437 for (i = 0; i < nb_ops; i++) {
438 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
440 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
441 rte_crypto_op_attach_sym_session(ops[i], sess);
443 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
446 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
447 if (dst_buf_offset == 0)
448 sym_op->m_dst = NULL;
450 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
453 /* cipher parameters */
454 if (options->imix_distribution_count) {
455 sym_op->cipher.data.length =
456 options->imix_buffer_sizes[*imix_idx];
457 *imix_idx = (*imix_idx + 1) % options->pool_sz;
459 sym_op->cipher.data.length = options->test_buffer_size;
461 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
462 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
463 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
464 sym_op->cipher.data.length <<= 3;
466 sym_op->cipher.data.offset = 0;
468 /* authentication parameters */
469 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
470 sym_op->auth.digest.data = test_vector->digest.data;
471 sym_op->auth.digest.phys_addr =
472 test_vector->digest.phys_addr;
475 uint32_t offset = options->test_buffer_size;
476 struct rte_mbuf *buf, *tbuf;
478 if (options->out_of_place) {
481 tbuf = sym_op->m_src;
482 while ((tbuf->next != NULL) &&
483 (offset >= tbuf->data_len)) {
484 offset -= tbuf->data_len;
488 * If there is not enough room in segment,
489 * place the digest in the next segment
491 if ((tbuf->data_len - offset) < options->digest_sz) {
498 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
500 sym_op->auth.digest.phys_addr =
501 rte_pktmbuf_iova_offset(buf, offset);
504 if (options->imix_distribution_count) {
505 sym_op->auth.data.length =
506 options->imix_buffer_sizes[*imix_idx];
507 *imix_idx = (*imix_idx + 1) % options->pool_sz;
509 sym_op->auth.data.length = options->test_buffer_size;
511 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
512 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
513 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
514 sym_op->auth.data.length <<= 3;
516 sym_op->auth.data.offset = 0;
519 if (options->test == CPERF_TEST_TYPE_VERIFY) {
520 for (i = 0; i < nb_ops; i++) {
521 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
522 uint8_t *, iv_offset);
524 memcpy(iv_ptr, test_vector->cipher_iv.data,
525 test_vector->cipher_iv.length);
526 if (test_vector->auth_iv.length) {
528 * Copy IV after the crypto operation and
531 iv_ptr += test_vector->cipher_iv.length;
532 memcpy(iv_ptr, test_vector->auth_iv.data,
533 test_vector->auth_iv.length);
543 cperf_set_ops_aead(struct rte_crypto_op **ops,
544 uint32_t src_buf_offset, uint32_t dst_buf_offset,
545 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
546 const struct cperf_options *options,
547 const struct cperf_test_vector *test_vector,
548 uint16_t iv_offset, uint32_t *imix_idx,
549 uint64_t *tsc_start __rte_unused)
552 /* AAD is placed after the IV */
553 uint16_t aad_offset = iv_offset +
554 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
556 for (i = 0; i < nb_ops; i++) {
557 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
559 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
560 rte_crypto_op_attach_sym_session(ops[i], sess);
562 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
565 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
566 if (dst_buf_offset == 0)
567 sym_op->m_dst = NULL;
569 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
572 /* AEAD parameters */
573 if (options->imix_distribution_count) {
574 sym_op->aead.data.length =
575 options->imix_buffer_sizes[*imix_idx];
576 *imix_idx = (*imix_idx + 1) % options->pool_sz;
578 sym_op->aead.data.length = options->test_buffer_size;
579 sym_op->aead.data.offset = 0;
581 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
582 uint8_t *, aad_offset);
583 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
586 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
587 sym_op->aead.digest.data = test_vector->digest.data;
588 sym_op->aead.digest.phys_addr =
589 test_vector->digest.phys_addr;
592 uint32_t offset = sym_op->aead.data.length +
593 sym_op->aead.data.offset;
594 struct rte_mbuf *buf, *tbuf;
596 if (options->out_of_place) {
599 tbuf = sym_op->m_src;
600 while ((tbuf->next != NULL) &&
601 (offset >= tbuf->data_len)) {
602 offset -= tbuf->data_len;
606 * If there is not enough room in segment,
607 * place the digest in the next segment
609 if ((tbuf->data_len - offset) < options->digest_sz) {
616 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
618 sym_op->aead.digest.phys_addr =
619 rte_pktmbuf_iova_offset(buf, offset);
623 if (options->test == CPERF_TEST_TYPE_VERIFY) {
624 for (i = 0; i < nb_ops; i++) {
625 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
626 uint8_t *, iv_offset);
629 * If doing AES-CCM, nonce is copied one byte
630 * after the start of IV field, and AAD is copied
631 * 18 bytes after the start of the AAD field.
633 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
634 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
635 test_vector->aead_iv.length);
637 memcpy(ops[i]->sym->aead.aad.data + 18,
638 test_vector->aad.data,
639 test_vector->aad.length);
641 memcpy(iv_ptr, test_vector->aead_iv.data,
642 test_vector->aead_iv.length);
644 memcpy(ops[i]->sym->aead.aad.data,
645 test_vector->aad.data,
646 test_vector->aad.length);
654 static struct rte_cryptodev_sym_session *
655 create_ipsec_session(struct rte_mempool *sess_mp,
656 struct rte_mempool *priv_mp,
658 const struct cperf_options *options,
659 const struct cperf_test_vector *test_vector,
662 struct rte_crypto_sym_xform xform = {0};
663 struct rte_crypto_sym_xform auth_xform = {0};
665 if (options->aead_algo != 0) {
666 /* Setup AEAD Parameters */
667 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
669 xform.aead.algo = options->aead_algo;
670 xform.aead.op = options->aead_op;
671 xform.aead.iv.offset = iv_offset;
672 xform.aead.key.data = test_vector->aead_key.data;
673 xform.aead.key.length = test_vector->aead_key.length;
674 xform.aead.iv.length = test_vector->aead_iv.length;
675 xform.aead.digest_length = options->digest_sz;
676 xform.aead.aad_length = options->aead_aad_sz;
677 } else if (options->cipher_algo != 0 && options->auth_algo != 0) {
678 /* Setup Cipher Parameters */
679 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
681 xform.cipher.algo = options->cipher_algo;
682 xform.cipher.op = options->cipher_op;
683 xform.cipher.iv.offset = iv_offset;
684 xform.cipher.iv.length = test_vector->cipher_iv.length;
685 /* cipher different than null */
686 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
687 xform.cipher.key.data = test_vector->cipher_key.data;
688 xform.cipher.key.length =
689 test_vector->cipher_key.length;
691 xform.cipher.key.data = NULL;
692 xform.cipher.key.length = 0;
695 /* Setup Auth Parameters */
696 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
697 auth_xform.next = NULL;
698 auth_xform.auth.algo = options->auth_algo;
699 auth_xform.auth.op = options->auth_op;
700 auth_xform.auth.iv.offset = iv_offset +
701 xform.cipher.iv.length;
702 /* auth different than null */
703 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
704 auth_xform.auth.digest_length = options->digest_sz;
705 auth_xform.auth.key.length =
706 test_vector->auth_key.length;
707 auth_xform.auth.key.data = test_vector->auth_key.data;
708 auth_xform.auth.iv.length = test_vector->auth_iv.length;
710 auth_xform.auth.digest_length = 0;
711 auth_xform.auth.key.length = 0;
712 auth_xform.auth.key.data = NULL;
713 auth_xform.auth.iv.length = 0;
716 xform.next = &auth_xform;
721 #define CPERF_IPSEC_SRC_IP 0x01010101
722 #define CPERF_IPSEC_DST_IP 0x02020202
723 #define CPERF_IPSEC_SALT 0x0
724 #define CPERF_IPSEC_DEFTTL 64
725 struct rte_security_ipsec_tunnel_param tunnel = {
726 .type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
728 .src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
729 .dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
732 .ttl = CPERF_IPSEC_DEFTTL,
735 struct rte_security_session_conf sess_conf = {
736 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
737 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
739 .spi = rte_lcore_id(),
740 /**< For testing sake, lcore_id is taken as SPI so that
741 * for every core a different session is created.
743 .salt = CPERF_IPSEC_SALT,
747 ((options->cipher_op ==
748 RTE_CRYPTO_CIPHER_OP_ENCRYPT) &&
750 RTE_CRYPTO_AUTH_OP_GENERATE)) ||
752 RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
753 RTE_SECURITY_IPSEC_SA_DIR_EGRESS :
754 RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
755 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
756 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
760 .crypto_xform = &xform
763 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
764 rte_cryptodev_get_sec_ctx(dev_id);
766 /* Create security session */
767 return (void *)rte_security_session_create(ctx,
768 &sess_conf, sess_mp, priv_mp);
771 static struct rte_cryptodev_sym_session *
772 cperf_create_session(struct rte_mempool *sess_mp,
773 struct rte_mempool *priv_mp,
775 const struct cperf_options *options,
776 const struct cperf_test_vector *test_vector,
779 struct rte_crypto_sym_xform cipher_xform;
780 struct rte_crypto_sym_xform auth_xform;
781 struct rte_crypto_sym_xform aead_xform;
782 struct rte_cryptodev_sym_session *sess = NULL;
783 void *asym_sess = NULL;
784 struct rte_crypto_asym_xform xform = {0};
787 if (options->op_type == CPERF_ASYM_MODEX) {
789 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
790 xform.modex.modulus.data = perf_mod_p;
791 xform.modex.modulus.length = sizeof(perf_mod_p);
792 xform.modex.exponent.data = perf_mod_e;
793 xform.modex.exponent.length = sizeof(perf_mod_e);
795 ret = rte_cryptodev_asym_session_create(dev_id, &xform,
796 sess_mp, &asym_sess);
798 RTE_LOG(ERR, USER1, "Asym session create failed\n");
803 #ifdef RTE_LIB_SECURITY
807 if (options->op_type == CPERF_PDCP) {
808 /* Setup Cipher Parameters */
809 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
810 cipher_xform.next = NULL;
811 cipher_xform.cipher.algo = options->cipher_algo;
812 cipher_xform.cipher.op = options->cipher_op;
813 cipher_xform.cipher.iv.offset = iv_offset;
814 cipher_xform.cipher.iv.length = 4;
816 /* cipher different than null */
817 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
818 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
819 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
821 cipher_xform.cipher.key.data = NULL;
822 cipher_xform.cipher.key.length = 0;
825 /* Setup Auth Parameters */
826 if (options->auth_algo != 0) {
827 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
828 auth_xform.next = NULL;
829 auth_xform.auth.algo = options->auth_algo;
830 auth_xform.auth.op = options->auth_op;
831 auth_xform.auth.iv.offset = iv_offset +
832 cipher_xform.cipher.iv.length;
834 /* auth different than null */
835 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
836 auth_xform.auth.digest_length = options->digest_sz;
837 auth_xform.auth.key.length = test_vector->auth_key.length;
838 auth_xform.auth.key.data = test_vector->auth_key.data;
839 auth_xform.auth.iv.length = test_vector->auth_iv.length;
841 auth_xform.auth.digest_length = 0;
842 auth_xform.auth.key.length = 0;
843 auth_xform.auth.key.data = NULL;
844 auth_xform.auth.iv.length = 0;
847 cipher_xform.next = &auth_xform;
849 cipher_xform.next = NULL;
852 struct rte_security_session_conf sess_conf = {
853 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
854 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
857 .domain = options->pdcp_domain,
859 .sn_size = options->pdcp_sn_sz,
860 .hfn = options->pdcp_ses_hfn_en ?
861 PDCP_DEFAULT_HFN : 0,
862 .hfn_threshold = 0x70C0A,
863 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
865 .crypto_xform = &cipher_xform
868 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
869 rte_cryptodev_get_sec_ctx(dev_id);
871 /* Create security session */
872 return (void *)rte_security_session_create(ctx,
873 &sess_conf, sess_mp, priv_mp);
876 if (options->op_type == CPERF_IPSEC) {
877 return create_ipsec_session(sess_mp, priv_mp, dev_id,
878 options, test_vector, iv_offset);
881 if (options->op_type == CPERF_DOCSIS) {
882 enum rte_security_docsis_direction direction;
884 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
885 cipher_xform.next = NULL;
886 cipher_xform.cipher.algo = options->cipher_algo;
887 cipher_xform.cipher.op = options->cipher_op;
888 cipher_xform.cipher.iv.offset = iv_offset;
889 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
890 cipher_xform.cipher.key.data =
891 test_vector->cipher_key.data;
892 cipher_xform.cipher.key.length =
893 test_vector->cipher_key.length;
894 cipher_xform.cipher.iv.length =
895 test_vector->cipher_iv.length;
897 cipher_xform.cipher.key.data = NULL;
898 cipher_xform.cipher.key.length = 0;
899 cipher_xform.cipher.iv.length = 0;
901 cipher_xform.next = NULL;
903 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
904 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
906 direction = RTE_SECURITY_DOCSIS_UPLINK;
908 struct rte_security_session_conf sess_conf = {
910 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
911 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
913 .direction = direction,
915 .crypto_xform = &cipher_xform
917 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
918 rte_cryptodev_get_sec_ctx(dev_id);
920 /* Create security session */
921 return (void *)rte_security_session_create(ctx,
922 &sess_conf, sess_mp, priv_mp);
925 sess = rte_cryptodev_sym_session_create(sess_mp);
929 if (options->op_type == CPERF_CIPHER_ONLY) {
930 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
931 cipher_xform.next = NULL;
932 cipher_xform.cipher.algo = options->cipher_algo;
933 cipher_xform.cipher.op = options->cipher_op;
934 cipher_xform.cipher.iv.offset = iv_offset;
936 /* cipher different than null */
937 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
938 cipher_xform.cipher.key.data =
939 test_vector->cipher_key.data;
940 cipher_xform.cipher.key.length =
941 test_vector->cipher_key.length;
942 cipher_xform.cipher.iv.length =
943 test_vector->cipher_iv.length;
945 cipher_xform.cipher.key.data = NULL;
946 cipher_xform.cipher.key.length = 0;
947 cipher_xform.cipher.iv.length = 0;
949 /* create crypto session */
950 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
955 } else if (options->op_type == CPERF_AUTH_ONLY) {
956 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
957 auth_xform.next = NULL;
958 auth_xform.auth.algo = options->auth_algo;
959 auth_xform.auth.op = options->auth_op;
960 auth_xform.auth.iv.offset = iv_offset;
962 /* auth different than null */
963 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
964 auth_xform.auth.digest_length =
966 auth_xform.auth.key.length =
967 test_vector->auth_key.length;
968 auth_xform.auth.key.data = test_vector->auth_key.data;
969 auth_xform.auth.iv.length =
970 test_vector->auth_iv.length;
972 auth_xform.auth.digest_length = 0;
973 auth_xform.auth.key.length = 0;
974 auth_xform.auth.key.data = NULL;
975 auth_xform.auth.iv.length = 0;
977 /* create crypto session */
978 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
983 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
984 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
988 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
989 cipher_xform.next = NULL;
990 cipher_xform.cipher.algo = options->cipher_algo;
991 cipher_xform.cipher.op = options->cipher_op;
992 cipher_xform.cipher.iv.offset = iv_offset;
994 /* cipher different than null */
995 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
996 cipher_xform.cipher.key.data =
997 test_vector->cipher_key.data;
998 cipher_xform.cipher.key.length =
999 test_vector->cipher_key.length;
1000 cipher_xform.cipher.iv.length =
1001 test_vector->cipher_iv.length;
1003 cipher_xform.cipher.key.data = NULL;
1004 cipher_xform.cipher.key.length = 0;
1005 cipher_xform.cipher.iv.length = 0;
1011 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1012 auth_xform.next = NULL;
1013 auth_xform.auth.algo = options->auth_algo;
1014 auth_xform.auth.op = options->auth_op;
1015 auth_xform.auth.iv.offset = iv_offset +
1016 cipher_xform.cipher.iv.length;
1018 /* auth different than null */
1019 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1020 auth_xform.auth.digest_length = options->digest_sz;
1021 auth_xform.auth.iv.length = test_vector->auth_iv.length;
1022 auth_xform.auth.key.length =
1023 test_vector->auth_key.length;
1024 auth_xform.auth.key.data =
1025 test_vector->auth_key.data;
1027 auth_xform.auth.digest_length = 0;
1028 auth_xform.auth.key.length = 0;
1029 auth_xform.auth.key.data = NULL;
1030 auth_xform.auth.iv.length = 0;
1033 /* cipher then auth */
1034 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
1035 cipher_xform.next = &auth_xform;
1036 /* create crypto session */
1037 rte_cryptodev_sym_session_init(dev_id,
1038 sess, &cipher_xform, priv_mp);
1039 } else { /* auth then cipher */
1040 auth_xform.next = &cipher_xform;
1041 /* create crypto session */
1042 rte_cryptodev_sym_session_init(dev_id,
1043 sess, &auth_xform, priv_mp);
1045 } else { /* options->op_type == CPERF_AEAD */
1046 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
1047 aead_xform.next = NULL;
1048 aead_xform.aead.algo = options->aead_algo;
1049 aead_xform.aead.op = options->aead_op;
1050 aead_xform.aead.iv.offset = iv_offset;
1052 aead_xform.aead.key.data =
1053 test_vector->aead_key.data;
1054 aead_xform.aead.key.length =
1055 test_vector->aead_key.length;
1056 aead_xform.aead.iv.length = test_vector->aead_iv.length;
1058 aead_xform.aead.digest_length = options->digest_sz;
1059 aead_xform.aead.aad_length =
1060 options->aead_aad_sz;
1062 /* Create crypto session */
1063 rte_cryptodev_sym_session_init(dev_id,
1064 sess, &aead_xform, priv_mp);
1071 cperf_get_op_functions(const struct cperf_options *options,
1072 struct cperf_op_fns *op_fns)
1074 memset(op_fns, 0, sizeof(struct cperf_op_fns));
1076 op_fns->sess_create = cperf_create_session;
1078 switch (options->op_type) {
1080 op_fns->populate_ops = cperf_set_ops_aead;
1083 case CPERF_AUTH_THEN_CIPHER:
1084 case CPERF_CIPHER_THEN_AUTH:
1085 op_fns->populate_ops = cperf_set_ops_cipher_auth;
1087 case CPERF_AUTH_ONLY:
1088 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1089 op_fns->populate_ops = cperf_set_ops_null_auth;
1091 op_fns->populate_ops = cperf_set_ops_auth;
1093 case CPERF_CIPHER_ONLY:
1094 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1095 op_fns->populate_ops = cperf_set_ops_null_cipher;
1097 op_fns->populate_ops = cperf_set_ops_cipher;
1099 case CPERF_ASYM_MODEX:
1100 op_fns->populate_ops = cperf_set_ops_asym;
1102 #ifdef RTE_LIB_SECURITY
1105 op_fns->populate_ops = cperf_set_ops_security;
1108 op_fns->populate_ops = cperf_set_ops_security_ipsec;