1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
10 #include "cperf_test_vectors.h"
13 cperf_set_ops_asym(struct rte_crypto_op **ops,
14 uint32_t src_buf_offset __rte_unused,
15 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 struct rte_cryptodev_sym_session *sess,
17 const struct cperf_options *options __rte_unused,
18 const struct cperf_test_vector *test_vector __rte_unused,
19 uint16_t iv_offset __rte_unused,
20 uint32_t *imix_idx __rte_unused,
21 uint64_t *tsc_start __rte_unused)
24 uint8_t result[sizeof(perf_mod_p)] = { 0 };
25 struct rte_cryptodev_asym_session *asym_sess = (void *)sess;
27 for (i = 0; i < nb_ops; i++) {
28 struct rte_crypto_asym_op *asym_op = ops[i]->asym;
30 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
31 asym_op->modex.base.data = perf_base;
32 asym_op->modex.base.length = sizeof(perf_base);
33 asym_op->modex.result.data = result;
34 asym_op->modex.result.length = sizeof(result);
35 rte_crypto_op_attach_asym_session(ops[i], asym_sess);
40 #ifdef RTE_LIB_SECURITY
42 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector)
45 struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
47 if ((options->aead_op == RTE_CRYPTO_AEAD_OP_ENCRYPT) ||
48 (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)) {
49 memcpy(ip, test_vector->plaintext.data, m->data_len);
51 ip->total_length = rte_cpu_to_be_16(m->data_len);
56 cperf_set_ops_security(struct rte_crypto_op **ops,
57 uint32_t src_buf_offset __rte_unused,
58 uint32_t dst_buf_offset __rte_unused,
59 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
60 const struct cperf_options *options,
61 const struct cperf_test_vector *test_vector,
62 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
65 uint64_t tsc_start_temp, tsc_end_temp;
68 for (i = 0; i < nb_ops; i++) {
69 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
70 struct rte_security_session *sec_sess =
71 (struct rte_security_session *)sess;
74 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
75 uint32_t *, iv_offset);
76 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
78 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
79 rte_security_attach_session(ops[i], sec_sess);
80 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
83 if (options->op_type == CPERF_PDCP ||
84 options->op_type == CPERF_IPSEC) {
85 /* In case of IPsec, headroom is consumed by PMD,
88 sym_op->m_src->data_off = options->headroom_sz;
90 sym_op->m_src->buf_len = options->segment_sz;
91 sym_op->m_src->data_len = options->test_buffer_size;
92 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
94 if ((options->op_type == CPERF_IPSEC) &&
95 (options->test_file == NULL) &&
96 (options->test == CPERF_TEST_TYPE_THROUGHPUT)) {
97 tsc_start_temp = rte_rdtsc_precise();
98 test_ipsec_vec_populate(sym_op->m_src, options,
100 tsc_end_temp = rte_rdtsc_precise();
102 *tsc_start += (tsc_end_temp - tsc_start_temp);
106 if (options->op_type == CPERF_DOCSIS) {
107 if (options->imix_distribution_count) {
108 buf_sz = options->imix_buffer_sizes[*imix_idx];
109 *imix_idx = (*imix_idx + 1) % options->pool_sz;
111 buf_sz = options->test_buffer_size;
113 sym_op->m_src->buf_len = options->segment_sz;
114 sym_op->m_src->data_len = buf_sz;
115 sym_op->m_src->pkt_len = buf_sz;
117 /* DOCSIS header is not CRC'ed */
118 sym_op->auth.data.offset = options->docsis_hdr_sz;
119 sym_op->auth.data.length = buf_sz -
120 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
122 * DOCSIS header and SRC and DST MAC addresses are not
125 sym_op->cipher.data.offset = sym_op->auth.data.offset +
126 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
127 sym_op->cipher.data.length = buf_sz -
128 sym_op->cipher.data.offset;
131 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
132 if (dst_buf_offset == 0)
133 sym_op->m_dst = NULL;
135 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
144 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
145 uint32_t src_buf_offset, uint32_t dst_buf_offset,
146 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
147 const struct cperf_options *options,
148 const struct cperf_test_vector *test_vector __rte_unused,
149 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
150 uint64_t *tsc_start __rte_unused)
154 for (i = 0; i < nb_ops; i++) {
155 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
157 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
158 rte_crypto_op_attach_sym_session(ops[i], sess);
160 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
163 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
164 if (dst_buf_offset == 0)
165 sym_op->m_dst = NULL;
167 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
170 /* cipher parameters */
171 if (options->imix_distribution_count) {
172 sym_op->cipher.data.length =
173 options->imix_buffer_sizes[*imix_idx];
174 *imix_idx = (*imix_idx + 1) % options->pool_sz;
176 sym_op->cipher.data.length = options->test_buffer_size;
177 sym_op->cipher.data.offset = 0;
184 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
185 uint32_t src_buf_offset, uint32_t dst_buf_offset,
186 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
187 const struct cperf_options *options,
188 const struct cperf_test_vector *test_vector __rte_unused,
189 uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
190 uint64_t *tsc_start __rte_unused)
194 for (i = 0; i < nb_ops; i++) {
195 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
197 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
198 rte_crypto_op_attach_sym_session(ops[i], sess);
200 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
203 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
204 if (dst_buf_offset == 0)
205 sym_op->m_dst = NULL;
207 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
210 /* auth parameters */
211 if (options->imix_distribution_count) {
212 sym_op->auth.data.length =
213 options->imix_buffer_sizes[*imix_idx];
214 *imix_idx = (*imix_idx + 1) % options->pool_sz;
216 sym_op->auth.data.length = options->test_buffer_size;
217 sym_op->auth.data.offset = 0;
224 cperf_set_ops_cipher(struct rte_crypto_op **ops,
225 uint32_t src_buf_offset, uint32_t dst_buf_offset,
226 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
227 const struct cperf_options *options,
228 const struct cperf_test_vector *test_vector,
229 uint16_t iv_offset, uint32_t *imix_idx,
230 uint64_t *tsc_start __rte_unused)
234 for (i = 0; i < nb_ops; i++) {
235 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
237 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
238 rte_crypto_op_attach_sym_session(ops[i], sess);
240 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
243 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
244 if (dst_buf_offset == 0)
245 sym_op->m_dst = NULL;
247 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
250 /* cipher parameters */
251 if (options->imix_distribution_count) {
252 sym_op->cipher.data.length =
253 options->imix_buffer_sizes[*imix_idx];
254 *imix_idx = (*imix_idx + 1) % options->pool_sz;
256 sym_op->cipher.data.length = options->test_buffer_size;
258 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
259 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
260 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
261 sym_op->cipher.data.length <<= 3;
263 sym_op->cipher.data.offset = 0;
266 if (options->test == CPERF_TEST_TYPE_VERIFY) {
267 for (i = 0; i < nb_ops; i++) {
268 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
269 uint8_t *, iv_offset);
271 memcpy(iv_ptr, test_vector->cipher_iv.data,
272 test_vector->cipher_iv.length);
281 cperf_set_ops_auth(struct rte_crypto_op **ops,
282 uint32_t src_buf_offset, uint32_t dst_buf_offset,
283 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
284 const struct cperf_options *options,
285 const struct cperf_test_vector *test_vector,
286 uint16_t iv_offset, uint32_t *imix_idx,
287 uint64_t *tsc_start __rte_unused)
291 for (i = 0; i < nb_ops; i++) {
292 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
294 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
295 rte_crypto_op_attach_sym_session(ops[i], sess);
297 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
300 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
301 if (dst_buf_offset == 0)
302 sym_op->m_dst = NULL;
304 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
307 if (test_vector->auth_iv.length) {
308 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
311 memcpy(iv_ptr, test_vector->auth_iv.data,
312 test_vector->auth_iv.length);
315 /* authentication parameters */
316 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
317 sym_op->auth.digest.data = test_vector->digest.data;
318 sym_op->auth.digest.phys_addr =
319 test_vector->digest.phys_addr;
322 uint32_t offset = options->test_buffer_size;
323 struct rte_mbuf *buf, *tbuf;
325 if (options->out_of_place) {
328 tbuf = sym_op->m_src;
329 while ((tbuf->next != NULL) &&
330 (offset >= tbuf->data_len)) {
331 offset -= tbuf->data_len;
335 * If there is not enough room in segment,
336 * place the digest in the next segment
338 if ((tbuf->data_len - offset) < options->digest_sz) {
345 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
347 sym_op->auth.digest.phys_addr =
348 rte_pktmbuf_iova_offset(buf, offset);
352 if (options->imix_distribution_count) {
353 sym_op->auth.data.length =
354 options->imix_buffer_sizes[*imix_idx];
355 *imix_idx = (*imix_idx + 1) % options->pool_sz;
357 sym_op->auth.data.length = options->test_buffer_size;
359 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
360 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
361 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
362 sym_op->auth.data.length <<= 3;
364 sym_op->auth.data.offset = 0;
367 if (options->test == CPERF_TEST_TYPE_VERIFY) {
368 if (test_vector->auth_iv.length) {
369 for (i = 0; i < nb_ops; i++) {
370 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
371 uint8_t *, iv_offset);
373 memcpy(iv_ptr, test_vector->auth_iv.data,
374 test_vector->auth_iv.length);
382 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
383 uint32_t src_buf_offset, uint32_t dst_buf_offset,
384 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
385 const struct cperf_options *options,
386 const struct cperf_test_vector *test_vector,
387 uint16_t iv_offset, uint32_t *imix_idx,
388 uint64_t *tsc_start __rte_unused)
392 for (i = 0; i < nb_ops; i++) {
393 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
395 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
396 rte_crypto_op_attach_sym_session(ops[i], sess);
398 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
401 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
402 if (dst_buf_offset == 0)
403 sym_op->m_dst = NULL;
405 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
408 /* cipher parameters */
409 if (options->imix_distribution_count) {
410 sym_op->cipher.data.length =
411 options->imix_buffer_sizes[*imix_idx];
412 *imix_idx = (*imix_idx + 1) % options->pool_sz;
414 sym_op->cipher.data.length = options->test_buffer_size;
416 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
417 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
418 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
419 sym_op->cipher.data.length <<= 3;
421 sym_op->cipher.data.offset = 0;
423 /* authentication parameters */
424 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
425 sym_op->auth.digest.data = test_vector->digest.data;
426 sym_op->auth.digest.phys_addr =
427 test_vector->digest.phys_addr;
430 uint32_t offset = options->test_buffer_size;
431 struct rte_mbuf *buf, *tbuf;
433 if (options->out_of_place) {
436 tbuf = sym_op->m_src;
437 while ((tbuf->next != NULL) &&
438 (offset >= tbuf->data_len)) {
439 offset -= tbuf->data_len;
443 * If there is not enough room in segment,
444 * place the digest in the next segment
446 if ((tbuf->data_len - offset) < options->digest_sz) {
453 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
455 sym_op->auth.digest.phys_addr =
456 rte_pktmbuf_iova_offset(buf, offset);
459 if (options->imix_distribution_count) {
460 sym_op->auth.data.length =
461 options->imix_buffer_sizes[*imix_idx];
462 *imix_idx = (*imix_idx + 1) % options->pool_sz;
464 sym_op->auth.data.length = options->test_buffer_size;
466 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
467 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
468 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
469 sym_op->auth.data.length <<= 3;
471 sym_op->auth.data.offset = 0;
474 if (options->test == CPERF_TEST_TYPE_VERIFY) {
475 for (i = 0; i < nb_ops; i++) {
476 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
477 uint8_t *, iv_offset);
479 memcpy(iv_ptr, test_vector->cipher_iv.data,
480 test_vector->cipher_iv.length);
481 if (test_vector->auth_iv.length) {
483 * Copy IV after the crypto operation and
486 iv_ptr += test_vector->cipher_iv.length;
487 memcpy(iv_ptr, test_vector->auth_iv.data,
488 test_vector->auth_iv.length);
498 cperf_set_ops_aead(struct rte_crypto_op **ops,
499 uint32_t src_buf_offset, uint32_t dst_buf_offset,
500 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
501 const struct cperf_options *options,
502 const struct cperf_test_vector *test_vector,
503 uint16_t iv_offset, uint32_t *imix_idx,
504 uint64_t *tsc_start __rte_unused)
507 /* AAD is placed after the IV */
508 uint16_t aad_offset = iv_offset +
509 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
511 for (i = 0; i < nb_ops; i++) {
512 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
514 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
515 rte_crypto_op_attach_sym_session(ops[i], sess);
517 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
520 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
521 if (dst_buf_offset == 0)
522 sym_op->m_dst = NULL;
524 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
527 /* AEAD parameters */
528 if (options->imix_distribution_count) {
529 sym_op->aead.data.length =
530 options->imix_buffer_sizes[*imix_idx];
531 *imix_idx = (*imix_idx + 1) % options->pool_sz;
533 sym_op->aead.data.length = options->test_buffer_size;
534 sym_op->aead.data.offset = 0;
536 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
537 uint8_t *, aad_offset);
538 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
541 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
542 sym_op->aead.digest.data = test_vector->digest.data;
543 sym_op->aead.digest.phys_addr =
544 test_vector->digest.phys_addr;
547 uint32_t offset = sym_op->aead.data.length +
548 sym_op->aead.data.offset;
549 struct rte_mbuf *buf, *tbuf;
551 if (options->out_of_place) {
554 tbuf = sym_op->m_src;
555 while ((tbuf->next != NULL) &&
556 (offset >= tbuf->data_len)) {
557 offset -= tbuf->data_len;
561 * If there is not enough room in segment,
562 * place the digest in the next segment
564 if ((tbuf->data_len - offset) < options->digest_sz) {
571 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
573 sym_op->aead.digest.phys_addr =
574 rte_pktmbuf_iova_offset(buf, offset);
578 if (options->test == CPERF_TEST_TYPE_VERIFY) {
579 for (i = 0; i < nb_ops; i++) {
580 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
581 uint8_t *, iv_offset);
584 * If doing AES-CCM, nonce is copied one byte
585 * after the start of IV field, and AAD is copied
586 * 18 bytes after the start of the AAD field.
588 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
589 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
590 test_vector->aead_iv.length);
592 memcpy(ops[i]->sym->aead.aad.data + 18,
593 test_vector->aad.data,
594 test_vector->aad.length);
596 memcpy(iv_ptr, test_vector->aead_iv.data,
597 test_vector->aead_iv.length);
599 memcpy(ops[i]->sym->aead.aad.data,
600 test_vector->aad.data,
601 test_vector->aad.length);
609 static struct rte_cryptodev_sym_session *
610 create_ipsec_session(struct rte_mempool *sess_mp,
611 struct rte_mempool *priv_mp,
613 const struct cperf_options *options,
614 const struct cperf_test_vector *test_vector,
617 struct rte_crypto_sym_xform xform = {0};
618 struct rte_crypto_sym_xform auth_xform = {0};
620 if (options->aead_algo != 0) {
621 /* Setup AEAD Parameters */
622 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
624 xform.aead.algo = options->aead_algo;
625 xform.aead.op = options->aead_op;
626 xform.aead.iv.offset = iv_offset;
627 xform.aead.key.data = test_vector->aead_key.data;
628 xform.aead.key.length = test_vector->aead_key.length;
629 xform.aead.iv.length = test_vector->aead_iv.length;
630 xform.aead.digest_length = options->digest_sz;
631 xform.aead.aad_length = options->aead_aad_sz;
632 } else if (options->cipher_algo != 0 && options->auth_algo != 0) {
633 /* Setup Cipher Parameters */
634 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
636 xform.cipher.algo = options->cipher_algo;
637 xform.cipher.op = options->cipher_op;
638 xform.cipher.iv.offset = iv_offset;
639 xform.cipher.iv.length = test_vector->cipher_iv.length;
640 /* cipher different than null */
641 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
642 xform.cipher.key.data = test_vector->cipher_key.data;
643 xform.cipher.key.length =
644 test_vector->cipher_key.length;
646 xform.cipher.key.data = NULL;
647 xform.cipher.key.length = 0;
650 /* Setup Auth Parameters */
651 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
652 auth_xform.next = NULL;
653 auth_xform.auth.algo = options->auth_algo;
654 auth_xform.auth.op = options->auth_op;
655 auth_xform.auth.iv.offset = iv_offset +
656 xform.cipher.iv.length;
657 /* auth different than null */
658 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
659 auth_xform.auth.digest_length = options->digest_sz;
660 auth_xform.auth.key.length =
661 test_vector->auth_key.length;
662 auth_xform.auth.key.data = test_vector->auth_key.data;
663 auth_xform.auth.iv.length = test_vector->auth_iv.length;
665 auth_xform.auth.digest_length = 0;
666 auth_xform.auth.key.length = 0;
667 auth_xform.auth.key.data = NULL;
668 auth_xform.auth.iv.length = 0;
671 xform.next = &auth_xform;
676 #define CPERF_IPSEC_SRC_IP 0x01010101
677 #define CPERF_IPSEC_DST_IP 0x02020202
678 #define CPERF_IPSEC_SALT 0x0
679 #define CPERF_IPSEC_DEFTTL 64
680 struct rte_security_ipsec_tunnel_param tunnel = {
681 .type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
683 .src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
684 .dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
687 .ttl = CPERF_IPSEC_DEFTTL,
690 struct rte_security_session_conf sess_conf = {
691 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
692 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
694 .spi = rte_lcore_id(),
695 /**< For testing sake, lcore_id is taken as SPI so that
696 * for every core a different session is created.
698 .salt = CPERF_IPSEC_SALT,
702 ((options->cipher_op ==
703 RTE_CRYPTO_CIPHER_OP_ENCRYPT) &&
705 RTE_CRYPTO_AUTH_OP_GENERATE)) ||
707 RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
708 RTE_SECURITY_IPSEC_SA_DIR_EGRESS :
709 RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
710 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
711 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
715 .crypto_xform = &xform
718 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
719 rte_cryptodev_get_sec_ctx(dev_id);
721 /* Create security session */
722 return (void *)rte_security_session_create(ctx,
723 &sess_conf, sess_mp, priv_mp);
726 static struct rte_cryptodev_sym_session *
727 cperf_create_session(struct rte_mempool *sess_mp,
728 struct rte_mempool *priv_mp,
730 const struct cperf_options *options,
731 const struct cperf_test_vector *test_vector,
734 struct rte_crypto_sym_xform cipher_xform;
735 struct rte_crypto_sym_xform auth_xform;
736 struct rte_crypto_sym_xform aead_xform;
737 struct rte_cryptodev_sym_session *sess = NULL;
738 struct rte_crypto_asym_xform xform = {0};
741 if (options->op_type == CPERF_ASYM_MODEX) {
743 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
744 xform.modex.modulus.data = perf_mod_p;
745 xform.modex.modulus.length = sizeof(perf_mod_p);
746 xform.modex.exponent.data = perf_mod_e;
747 xform.modex.exponent.length = sizeof(perf_mod_e);
749 sess = (void *)rte_cryptodev_asym_session_create(sess_mp);
752 rc = rte_cryptodev_asym_session_init(dev_id, (void *)sess,
756 rte_cryptodev_asym_session_clear(dev_id,
758 rte_cryptodev_asym_session_free((void *)sess);
764 #ifdef RTE_LIB_SECURITY
768 if (options->op_type == CPERF_PDCP) {
769 /* Setup Cipher Parameters */
770 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
771 cipher_xform.next = NULL;
772 cipher_xform.cipher.algo = options->cipher_algo;
773 cipher_xform.cipher.op = options->cipher_op;
774 cipher_xform.cipher.iv.offset = iv_offset;
775 cipher_xform.cipher.iv.length = 4;
777 /* cipher different than null */
778 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
779 cipher_xform.cipher.key.data = test_vector->cipher_key.data;
780 cipher_xform.cipher.key.length = test_vector->cipher_key.length;
782 cipher_xform.cipher.key.data = NULL;
783 cipher_xform.cipher.key.length = 0;
786 /* Setup Auth Parameters */
787 if (options->auth_algo != 0) {
788 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
789 auth_xform.next = NULL;
790 auth_xform.auth.algo = options->auth_algo;
791 auth_xform.auth.op = options->auth_op;
792 auth_xform.auth.iv.offset = iv_offset +
793 cipher_xform.cipher.iv.length;
795 /* auth different than null */
796 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
797 auth_xform.auth.digest_length = options->digest_sz;
798 auth_xform.auth.key.length = test_vector->auth_key.length;
799 auth_xform.auth.key.data = test_vector->auth_key.data;
800 auth_xform.auth.iv.length = test_vector->auth_iv.length;
802 auth_xform.auth.digest_length = 0;
803 auth_xform.auth.key.length = 0;
804 auth_xform.auth.key.data = NULL;
805 auth_xform.auth.iv.length = 0;
808 cipher_xform.next = &auth_xform;
810 cipher_xform.next = NULL;
813 struct rte_security_session_conf sess_conf = {
814 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
815 .protocol = RTE_SECURITY_PROTOCOL_PDCP,
818 .domain = options->pdcp_domain,
820 .sn_size = options->pdcp_sn_sz,
821 .hfn = options->pdcp_ses_hfn_en ?
822 PDCP_DEFAULT_HFN : 0,
823 .hfn_threshold = 0x70C0A,
824 .hfn_ovrd = !(options->pdcp_ses_hfn_en),
826 .crypto_xform = &cipher_xform
829 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
830 rte_cryptodev_get_sec_ctx(dev_id);
832 /* Create security session */
833 return (void *)rte_security_session_create(ctx,
834 &sess_conf, sess_mp, priv_mp);
837 if (options->op_type == CPERF_IPSEC) {
838 return create_ipsec_session(sess_mp, priv_mp, dev_id,
839 options, test_vector, iv_offset);
842 if (options->op_type == CPERF_DOCSIS) {
843 enum rte_security_docsis_direction direction;
845 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
846 cipher_xform.next = NULL;
847 cipher_xform.cipher.algo = options->cipher_algo;
848 cipher_xform.cipher.op = options->cipher_op;
849 cipher_xform.cipher.iv.offset = iv_offset;
850 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
851 cipher_xform.cipher.key.data =
852 test_vector->cipher_key.data;
853 cipher_xform.cipher.key.length =
854 test_vector->cipher_key.length;
855 cipher_xform.cipher.iv.length =
856 test_vector->cipher_iv.length;
858 cipher_xform.cipher.key.data = NULL;
859 cipher_xform.cipher.key.length = 0;
860 cipher_xform.cipher.iv.length = 0;
862 cipher_xform.next = NULL;
864 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
865 direction = RTE_SECURITY_DOCSIS_DOWNLINK;
867 direction = RTE_SECURITY_DOCSIS_UPLINK;
869 struct rte_security_session_conf sess_conf = {
871 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
872 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
874 .direction = direction,
876 .crypto_xform = &cipher_xform
878 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
879 rte_cryptodev_get_sec_ctx(dev_id);
881 /* Create security session */
882 return (void *)rte_security_session_create(ctx,
883 &sess_conf, sess_mp, priv_mp);
886 sess = rte_cryptodev_sym_session_create(sess_mp);
890 if (options->op_type == CPERF_CIPHER_ONLY) {
891 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
892 cipher_xform.next = NULL;
893 cipher_xform.cipher.algo = options->cipher_algo;
894 cipher_xform.cipher.op = options->cipher_op;
895 cipher_xform.cipher.iv.offset = iv_offset;
897 /* cipher different than null */
898 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
899 cipher_xform.cipher.key.data =
900 test_vector->cipher_key.data;
901 cipher_xform.cipher.key.length =
902 test_vector->cipher_key.length;
903 cipher_xform.cipher.iv.length =
904 test_vector->cipher_iv.length;
906 cipher_xform.cipher.key.data = NULL;
907 cipher_xform.cipher.key.length = 0;
908 cipher_xform.cipher.iv.length = 0;
910 /* create crypto session */
911 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
916 } else if (options->op_type == CPERF_AUTH_ONLY) {
917 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
918 auth_xform.next = NULL;
919 auth_xform.auth.algo = options->auth_algo;
920 auth_xform.auth.op = options->auth_op;
921 auth_xform.auth.iv.offset = iv_offset;
923 /* auth different than null */
924 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
925 auth_xform.auth.digest_length =
927 auth_xform.auth.key.length =
928 test_vector->auth_key.length;
929 auth_xform.auth.key.data = test_vector->auth_key.data;
930 auth_xform.auth.iv.length =
931 test_vector->auth_iv.length;
933 auth_xform.auth.digest_length = 0;
934 auth_xform.auth.key.length = 0;
935 auth_xform.auth.key.data = NULL;
936 auth_xform.auth.iv.length = 0;
938 /* create crypto session */
939 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
944 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
945 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
949 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
950 cipher_xform.next = NULL;
951 cipher_xform.cipher.algo = options->cipher_algo;
952 cipher_xform.cipher.op = options->cipher_op;
953 cipher_xform.cipher.iv.offset = iv_offset;
955 /* cipher different than null */
956 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
957 cipher_xform.cipher.key.data =
958 test_vector->cipher_key.data;
959 cipher_xform.cipher.key.length =
960 test_vector->cipher_key.length;
961 cipher_xform.cipher.iv.length =
962 test_vector->cipher_iv.length;
964 cipher_xform.cipher.key.data = NULL;
965 cipher_xform.cipher.key.length = 0;
966 cipher_xform.cipher.iv.length = 0;
972 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
973 auth_xform.next = NULL;
974 auth_xform.auth.algo = options->auth_algo;
975 auth_xform.auth.op = options->auth_op;
976 auth_xform.auth.iv.offset = iv_offset +
977 cipher_xform.cipher.iv.length;
979 /* auth different than null */
980 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
981 auth_xform.auth.digest_length = options->digest_sz;
982 auth_xform.auth.iv.length = test_vector->auth_iv.length;
983 auth_xform.auth.key.length =
984 test_vector->auth_key.length;
985 auth_xform.auth.key.data =
986 test_vector->auth_key.data;
988 auth_xform.auth.digest_length = 0;
989 auth_xform.auth.key.length = 0;
990 auth_xform.auth.key.data = NULL;
991 auth_xform.auth.iv.length = 0;
994 /* cipher then auth */
995 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
996 cipher_xform.next = &auth_xform;
997 /* create crypto session */
998 rte_cryptodev_sym_session_init(dev_id,
999 sess, &cipher_xform, priv_mp);
1000 } else { /* auth then cipher */
1001 auth_xform.next = &cipher_xform;
1002 /* create crypto session */
1003 rte_cryptodev_sym_session_init(dev_id,
1004 sess, &auth_xform, priv_mp);
1006 } else { /* options->op_type == CPERF_AEAD */
1007 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
1008 aead_xform.next = NULL;
1009 aead_xform.aead.algo = options->aead_algo;
1010 aead_xform.aead.op = options->aead_op;
1011 aead_xform.aead.iv.offset = iv_offset;
1013 aead_xform.aead.key.data =
1014 test_vector->aead_key.data;
1015 aead_xform.aead.key.length =
1016 test_vector->aead_key.length;
1017 aead_xform.aead.iv.length = test_vector->aead_iv.length;
1019 aead_xform.aead.digest_length = options->digest_sz;
1020 aead_xform.aead.aad_length =
1021 options->aead_aad_sz;
1023 /* Create crypto session */
1024 rte_cryptodev_sym_session_init(dev_id,
1025 sess, &aead_xform, priv_mp);
1032 cperf_get_op_functions(const struct cperf_options *options,
1033 struct cperf_op_fns *op_fns)
1035 memset(op_fns, 0, sizeof(struct cperf_op_fns));
1037 op_fns->sess_create = cperf_create_session;
1039 switch (options->op_type) {
1041 op_fns->populate_ops = cperf_set_ops_aead;
1044 case CPERF_AUTH_THEN_CIPHER:
1045 case CPERF_CIPHER_THEN_AUTH:
1046 op_fns->populate_ops = cperf_set_ops_cipher_auth;
1048 case CPERF_AUTH_ONLY:
1049 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1050 op_fns->populate_ops = cperf_set_ops_null_auth;
1052 op_fns->populate_ops = cperf_set_ops_auth;
1054 case CPERF_CIPHER_ONLY:
1055 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1056 op_fns->populate_ops = cperf_set_ops_null_cipher;
1058 op_fns->populate_ops = cperf_set_ops_cipher;
1060 case CPERF_ASYM_MODEX:
1061 op_fns->populate_ops = cperf_set_ops_asym;
1063 #ifdef RTE_LIB_SECURITY
1067 op_fns->populate_ops = cperf_set_ops_security;