4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 uint32_t src_buf_offset, uint32_t dst_buf_offset,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
52 rte_crypto_op_attach_sym_session(ops[i], sess);
54 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
57 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
58 if (dst_buf_offset == 0)
61 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
64 /* cipher parameters */
65 sym_op->cipher.data.length = options->test_buffer_size;
66 sym_op->cipher.data.offset = 0;
73 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
74 uint32_t src_buf_offset, uint32_t dst_buf_offset,
75 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
76 const struct cperf_options *options,
77 const struct cperf_test_vector *test_vector __rte_unused,
78 uint16_t iv_offset __rte_unused)
82 for (i = 0; i < nb_ops; i++) {
83 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
85 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
86 rte_crypto_op_attach_sym_session(ops[i], sess);
88 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
91 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
92 if (dst_buf_offset == 0)
95 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
99 sym_op->auth.data.length = options->test_buffer_size;
100 sym_op->auth.data.offset = 0;
107 cperf_set_ops_cipher(struct rte_crypto_op **ops,
108 uint32_t src_buf_offset, uint32_t dst_buf_offset,
109 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
110 const struct cperf_options *options,
111 const struct cperf_test_vector *test_vector,
116 for (i = 0; i < nb_ops; i++) {
117 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
119 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
120 rte_crypto_op_attach_sym_session(ops[i], sess);
122 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
125 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
126 if (dst_buf_offset == 0)
127 sym_op->m_dst = NULL;
129 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
132 /* cipher parameters */
133 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
134 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
135 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
136 sym_op->cipher.data.length = options->test_buffer_size << 3;
138 sym_op->cipher.data.length = options->test_buffer_size;
140 sym_op->cipher.data.offset = 0;
143 if (options->test == CPERF_TEST_TYPE_VERIFY) {
144 for (i = 0; i < nb_ops; i++) {
145 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
146 uint8_t *, iv_offset);
148 memcpy(iv_ptr, test_vector->cipher_iv.data,
149 test_vector->cipher_iv.length);
158 cperf_set_ops_auth(struct rte_crypto_op **ops,
159 uint32_t src_buf_offset, uint32_t dst_buf_offset,
160 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
161 const struct cperf_options *options,
162 const struct cperf_test_vector *test_vector,
167 for (i = 0; i < nb_ops; i++) {
168 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
170 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
171 rte_crypto_op_attach_sym_session(ops[i], sess);
173 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
176 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
177 if (dst_buf_offset == 0)
178 sym_op->m_dst = NULL;
180 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
183 if (test_vector->auth_iv.length) {
184 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
187 memcpy(iv_ptr, test_vector->auth_iv.data,
188 test_vector->auth_iv.length);
191 /* authentication parameters */
192 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
193 sym_op->auth.digest.data = test_vector->digest.data;
194 sym_op->auth.digest.phys_addr =
195 test_vector->digest.phys_addr;
198 uint32_t offset = options->test_buffer_size;
199 struct rte_mbuf *buf, *tbuf;
201 if (options->out_of_place) {
204 tbuf = sym_op->m_src;
205 while ((tbuf->next != NULL) &&
206 (offset >= tbuf->data_len)) {
207 offset -= tbuf->data_len;
211 * If there is not enough room in segment,
212 * place the digest in the next segment
214 if ((tbuf->data_len - offset) < options->digest_sz) {
221 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
223 sym_op->auth.digest.phys_addr =
224 rte_pktmbuf_mtophys_offset(buf, offset);
228 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
229 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
230 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
231 sym_op->auth.data.length = options->test_buffer_size << 3;
233 sym_op->auth.data.length = options->test_buffer_size;
235 sym_op->auth.data.offset = 0;
238 if (options->test == CPERF_TEST_TYPE_VERIFY) {
239 if (test_vector->auth_iv.length) {
240 for (i = 0; i < nb_ops; i++) {
241 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
242 uint8_t *, iv_offset);
244 memcpy(iv_ptr, test_vector->auth_iv.data,
245 test_vector->auth_iv.length);
253 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
254 uint32_t src_buf_offset, uint32_t dst_buf_offset,
255 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
256 const struct cperf_options *options,
257 const struct cperf_test_vector *test_vector,
262 for (i = 0; i < nb_ops; i++) {
263 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
265 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
266 rte_crypto_op_attach_sym_session(ops[i], sess);
268 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
271 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
272 if (dst_buf_offset == 0)
273 sym_op->m_dst = NULL;
275 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
278 /* cipher parameters */
279 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
280 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
281 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
282 sym_op->cipher.data.length = options->test_buffer_size << 3;
284 sym_op->cipher.data.length = options->test_buffer_size;
286 sym_op->cipher.data.offset = 0;
288 /* authentication parameters */
289 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
290 sym_op->auth.digest.data = test_vector->digest.data;
291 sym_op->auth.digest.phys_addr =
292 test_vector->digest.phys_addr;
295 uint32_t offset = options->test_buffer_size;
296 struct rte_mbuf *buf, *tbuf;
298 if (options->out_of_place) {
301 tbuf = sym_op->m_src;
302 while ((tbuf->next != NULL) &&
303 (offset >= tbuf->data_len)) {
304 offset -= tbuf->data_len;
308 * If there is not enough room in segment,
309 * place the digest in the next segment
311 if ((tbuf->data_len - offset) < options->digest_sz) {
318 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
320 sym_op->auth.digest.phys_addr =
321 rte_pktmbuf_mtophys_offset(buf, offset);
324 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
325 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
326 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
327 sym_op->auth.data.length = options->test_buffer_size << 3;
329 sym_op->auth.data.length = options->test_buffer_size;
331 sym_op->auth.data.offset = 0;
334 if (options->test == CPERF_TEST_TYPE_VERIFY) {
335 for (i = 0; i < nb_ops; i++) {
336 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
337 uint8_t *, iv_offset);
339 memcpy(iv_ptr, test_vector->cipher_iv.data,
340 test_vector->cipher_iv.length);
341 if (test_vector->auth_iv.length) {
343 * Copy IV after the crypto operation and
346 iv_ptr += test_vector->cipher_iv.length;
347 memcpy(iv_ptr, test_vector->auth_iv.data,
348 test_vector->auth_iv.length);
358 cperf_set_ops_aead(struct rte_crypto_op **ops,
359 uint32_t src_buf_offset, uint32_t dst_buf_offset,
360 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
361 const struct cperf_options *options,
362 const struct cperf_test_vector *test_vector,
366 /* AAD is placed after the IV */
367 uint16_t aad_offset = iv_offset +
368 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
370 for (i = 0; i < nb_ops; i++) {
371 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
373 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
374 rte_crypto_op_attach_sym_session(ops[i], sess);
376 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
379 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
380 if (dst_buf_offset == 0)
381 sym_op->m_dst = NULL;
383 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
386 /* AEAD parameters */
387 sym_op->aead.data.length = options->test_buffer_size;
388 sym_op->aead.data.offset = 0;
390 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
391 uint8_t *, aad_offset);
392 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
395 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
396 sym_op->aead.digest.data = test_vector->digest.data;
397 sym_op->aead.digest.phys_addr =
398 test_vector->digest.phys_addr;
401 uint32_t offset = sym_op->aead.data.length +
402 sym_op->aead.data.offset;
403 struct rte_mbuf *buf, *tbuf;
405 if (options->out_of_place) {
408 tbuf = sym_op->m_src;
409 while ((tbuf->next != NULL) &&
410 (offset >= tbuf->data_len)) {
411 offset -= tbuf->data_len;
415 * If there is not enough room in segment,
416 * place the digest in the next segment
418 if ((tbuf->data_len - offset) < options->digest_sz) {
425 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
427 sym_op->aead.digest.phys_addr =
428 rte_pktmbuf_mtophys_offset(buf, offset);
432 if (options->test == CPERF_TEST_TYPE_VERIFY) {
433 for (i = 0; i < nb_ops; i++) {
434 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
435 uint8_t *, iv_offset);
438 * If doing AES-CCM, nonce is copied one byte
439 * after the start of IV field, and AAD is copied
440 * 18 bytes after the start of the AAD field.
442 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
443 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
444 test_vector->aead_iv.length);
446 memcpy(ops[i]->sym->aead.aad.data + 18,
447 test_vector->aad.data,
448 test_vector->aad.length);
450 memcpy(iv_ptr, test_vector->aead_iv.data,
451 test_vector->aead_iv.length);
453 memcpy(ops[i]->sym->aead.aad.data,
454 test_vector->aad.data,
455 test_vector->aad.length);
463 static struct rte_cryptodev_sym_session *
464 cperf_create_session(struct rte_mempool *sess_mp,
466 const struct cperf_options *options,
467 const struct cperf_test_vector *test_vector,
470 struct rte_crypto_sym_xform cipher_xform;
471 struct rte_crypto_sym_xform auth_xform;
472 struct rte_crypto_sym_xform aead_xform;
473 struct rte_cryptodev_sym_session *sess = NULL;
475 sess = rte_cryptodev_sym_session_create(sess_mp);
479 if (options->op_type == CPERF_CIPHER_ONLY) {
480 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
481 cipher_xform.next = NULL;
482 cipher_xform.cipher.algo = options->cipher_algo;
483 cipher_xform.cipher.op = options->cipher_op;
484 cipher_xform.cipher.iv.offset = iv_offset;
486 /* cipher different than null */
487 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
488 cipher_xform.cipher.key.data =
489 test_vector->cipher_key.data;
490 cipher_xform.cipher.key.length =
491 test_vector->cipher_key.length;
492 cipher_xform.cipher.iv.length =
493 test_vector->cipher_iv.length;
495 cipher_xform.cipher.key.data = NULL;
496 cipher_xform.cipher.key.length = 0;
497 cipher_xform.cipher.iv.length = 0;
499 /* create crypto session */
500 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
505 } else if (options->op_type == CPERF_AUTH_ONLY) {
506 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
507 auth_xform.next = NULL;
508 auth_xform.auth.algo = options->auth_algo;
509 auth_xform.auth.op = options->auth_op;
511 /* auth different than null */
512 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
513 auth_xform.auth.digest_length =
515 auth_xform.auth.key.length =
516 test_vector->auth_key.length;
517 auth_xform.auth.key.data = test_vector->auth_key.data;
518 auth_xform.auth.iv.length =
519 test_vector->auth_iv.length;
521 auth_xform.auth.digest_length = 0;
522 auth_xform.auth.key.length = 0;
523 auth_xform.auth.key.data = NULL;
524 auth_xform.auth.iv.length = 0;
526 /* create crypto session */
527 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
532 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
533 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
537 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
538 cipher_xform.next = NULL;
539 cipher_xform.cipher.algo = options->cipher_algo;
540 cipher_xform.cipher.op = options->cipher_op;
541 cipher_xform.cipher.iv.offset = iv_offset;
543 /* cipher different than null */
544 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
545 cipher_xform.cipher.key.data =
546 test_vector->cipher_key.data;
547 cipher_xform.cipher.key.length =
548 test_vector->cipher_key.length;
549 cipher_xform.cipher.iv.length =
550 test_vector->cipher_iv.length;
552 cipher_xform.cipher.key.data = NULL;
553 cipher_xform.cipher.key.length = 0;
554 cipher_xform.cipher.iv.length = 0;
560 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
561 auth_xform.next = NULL;
562 auth_xform.auth.algo = options->auth_algo;
563 auth_xform.auth.op = options->auth_op;
565 /* auth different than null */
566 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
567 auth_xform.auth.digest_length = options->digest_sz;
568 auth_xform.auth.iv.length = test_vector->auth_iv.length;
569 auth_xform.auth.key.length =
570 test_vector->auth_key.length;
571 auth_xform.auth.key.data =
572 test_vector->auth_key.data;
574 auth_xform.auth.digest_length = 0;
575 auth_xform.auth.key.length = 0;
576 auth_xform.auth.key.data = NULL;
577 auth_xform.auth.iv.length = 0;
580 /* cipher then auth */
581 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
582 cipher_xform.next = &auth_xform;
583 /* create crypto session */
584 rte_cryptodev_sym_session_init(dev_id,
585 sess, &cipher_xform, sess_mp);
586 } else { /* auth then cipher */
587 auth_xform.next = &cipher_xform;
588 /* create crypto session */
589 rte_cryptodev_sym_session_init(dev_id,
590 sess, &auth_xform, sess_mp);
592 } else { /* options->op_type == CPERF_AEAD */
593 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
594 aead_xform.next = NULL;
595 aead_xform.aead.algo = options->aead_algo;
596 aead_xform.aead.op = options->aead_op;
597 aead_xform.aead.iv.offset = iv_offset;
599 aead_xform.aead.key.data =
600 test_vector->aead_key.data;
601 aead_xform.aead.key.length =
602 test_vector->aead_key.length;
603 aead_xform.aead.iv.length = test_vector->aead_iv.length;
605 aead_xform.aead.digest_length = options->digest_sz;
606 aead_xform.aead.aad_length =
607 options->aead_aad_sz;
609 /* Create crypto session */
610 rte_cryptodev_sym_session_init(dev_id,
611 sess, &aead_xform, sess_mp);
618 cperf_get_op_functions(const struct cperf_options *options,
619 struct cperf_op_fns *op_fns)
621 memset(op_fns, 0, sizeof(struct cperf_op_fns));
623 op_fns->sess_create = cperf_create_session;
625 if (options->op_type == CPERF_AEAD) {
626 op_fns->populate_ops = cperf_set_ops_aead;
630 if (options->op_type == CPERF_AUTH_THEN_CIPHER
631 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
632 op_fns->populate_ops = cperf_set_ops_cipher_auth;
635 if (options->op_type == CPERF_AUTH_ONLY) {
636 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
637 op_fns->populate_ops = cperf_set_ops_null_auth;
639 op_fns->populate_ops = cperf_set_ops_auth;
642 if (options->op_type == CPERF_CIPHER_ONLY) {
643 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
644 op_fns->populate_ops = cperf_set_ops_null_cipher;
646 op_fns->populate_ops = cperf_set_ops_cipher;