4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 rte_crypto_op_attach_sym_session(ops[i], sess);
53 sym_op->m_src = bufs_in[i];
54 sym_op->m_dst = bufs_out[i];
56 /* cipher parameters */
57 sym_op->cipher.data.length = options->test_buffer_size;
58 sym_op->cipher.data.offset = 0;
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68 const struct cperf_options *options,
69 const struct cperf_test_vector *test_vector __rte_unused,
70 uint16_t iv_offset __rte_unused)
74 for (i = 0; i < nb_ops; i++) {
75 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
77 rte_crypto_op_attach_sym_session(ops[i], sess);
79 sym_op->m_src = bufs_in[i];
80 sym_op->m_dst = bufs_out[i];
83 sym_op->auth.data.length = options->test_buffer_size;
84 sym_op->auth.data.offset = 0;
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94 const struct cperf_options *options,
95 const struct cperf_test_vector *test_vector,
100 for (i = 0; i < nb_ops; i++) {
101 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
103 rte_crypto_op_attach_sym_session(ops[i], sess);
105 sym_op->m_src = bufs_in[i];
106 sym_op->m_dst = bufs_out[i];
108 /* cipher parameters */
109 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112 sym_op->cipher.data.length = options->test_buffer_size << 3;
114 sym_op->cipher.data.length = options->test_buffer_size;
116 sym_op->cipher.data.offset = 0;
119 if (options->test == CPERF_TEST_TYPE_VERIFY) {
120 for (i = 0; i < nb_ops; i++) {
121 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122 uint8_t *, iv_offset);
124 memcpy(iv_ptr, test_vector->cipher_iv.data,
125 test_vector->cipher_iv.length);
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137 const struct cperf_options *options,
138 const struct cperf_test_vector *test_vector,
143 for (i = 0; i < nb_ops; i++) {
144 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
146 rte_crypto_op_attach_sym_session(ops[i], sess);
148 sym_op->m_src = bufs_in[i];
149 sym_op->m_dst = bufs_out[i];
151 if (test_vector->auth_iv.length) {
152 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
155 memcpy(iv_ptr, test_vector->auth_iv.data,
156 test_vector->auth_iv.length);
159 /* authentication parameters */
160 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161 sym_op->auth.digest.data = test_vector->digest.data;
162 sym_op->auth.digest.phys_addr =
163 test_vector->digest.phys_addr;
166 uint32_t offset = options->test_buffer_size;
167 struct rte_mbuf *buf, *tbuf;
169 if (options->out_of_place) {
173 while ((tbuf->next != NULL) &&
174 (offset >= tbuf->data_len)) {
175 offset -= tbuf->data_len;
179 * If there is not enough room in segment,
180 * place the digest in the next segment
182 if ((tbuf->data_len - offset) < options->digest_sz) {
189 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
191 sym_op->auth.digest.phys_addr =
192 rte_pktmbuf_mtophys_offset(buf, offset);
196 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
197 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
198 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
199 sym_op->auth.data.length = options->test_buffer_size << 3;
201 sym_op->auth.data.length = options->test_buffer_size;
203 sym_op->auth.data.offset = 0;
206 if (options->test == CPERF_TEST_TYPE_VERIFY) {
207 if (test_vector->auth_iv.length) {
208 for (i = 0; i < nb_ops; i++) {
209 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
210 uint8_t *, iv_offset);
212 memcpy(iv_ptr, test_vector->auth_iv.data,
213 test_vector->auth_iv.length);
221 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
222 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
223 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
224 const struct cperf_options *options,
225 const struct cperf_test_vector *test_vector,
230 for (i = 0; i < nb_ops; i++) {
231 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
233 rte_crypto_op_attach_sym_session(ops[i], sess);
235 sym_op->m_src = bufs_in[i];
236 sym_op->m_dst = bufs_out[i];
238 /* cipher parameters */
239 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
240 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
241 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
242 sym_op->cipher.data.length = options->test_buffer_size << 3;
244 sym_op->cipher.data.length = options->test_buffer_size;
246 sym_op->cipher.data.offset = 0;
248 /* authentication parameters */
249 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
250 sym_op->auth.digest.data = test_vector->digest.data;
251 sym_op->auth.digest.phys_addr =
252 test_vector->digest.phys_addr;
255 uint32_t offset = options->test_buffer_size;
256 struct rte_mbuf *buf, *tbuf;
258 if (options->out_of_place) {
262 while ((tbuf->next != NULL) &&
263 (offset >= tbuf->data_len)) {
264 offset -= tbuf->data_len;
268 * If there is not enough room in segment,
269 * place the digest in the next segment
271 if ((tbuf->data_len - offset) < options->digest_sz) {
278 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
280 sym_op->auth.digest.phys_addr =
281 rte_pktmbuf_mtophys_offset(buf, offset);
284 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
285 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
286 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
287 sym_op->auth.data.length = options->test_buffer_size << 3;
289 sym_op->auth.data.length = options->test_buffer_size;
291 sym_op->auth.data.offset = 0;
294 if (options->test == CPERF_TEST_TYPE_VERIFY) {
295 for (i = 0; i < nb_ops; i++) {
296 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
297 uint8_t *, iv_offset);
299 memcpy(iv_ptr, test_vector->cipher_iv.data,
300 test_vector->cipher_iv.length);
301 if (test_vector->auth_iv.length) {
303 * Copy IV after the crypto operation and
306 iv_ptr += test_vector->cipher_iv.length;
307 memcpy(iv_ptr, test_vector->auth_iv.data,
308 test_vector->auth_iv.length);
318 cperf_set_ops_aead(struct rte_crypto_op **ops,
319 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
320 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
321 const struct cperf_options *options,
322 const struct cperf_test_vector *test_vector,
326 uint16_t aad_offset = iv_offset +
327 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
329 for (i = 0; i < nb_ops; i++) {
330 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
332 rte_crypto_op_attach_sym_session(ops[i], sess);
334 sym_op->m_src = bufs_in[i];
335 sym_op->m_dst = bufs_out[i];
337 /* AEAD parameters */
338 sym_op->aead.data.length = options->test_buffer_size;
339 sym_op->aead.data.offset = 0;
341 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
342 uint8_t *, aad_offset);
343 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
346 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
347 sym_op->aead.digest.data = test_vector->digest.data;
348 sym_op->aead.digest.phys_addr =
349 test_vector->digest.phys_addr;
352 uint32_t offset = sym_op->aead.data.length +
353 sym_op->aead.data.offset;
354 struct rte_mbuf *buf, *tbuf;
356 if (options->out_of_place) {
360 while ((tbuf->next != NULL) &&
361 (offset >= tbuf->data_len)) {
362 offset -= tbuf->data_len;
366 * If there is not enough room in segment,
367 * place the digest in the next segment
369 if ((tbuf->data_len - offset) < options->digest_sz) {
376 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
378 sym_op->aead.digest.phys_addr =
379 rte_pktmbuf_mtophys_offset(buf, offset);
383 if (options->test == CPERF_TEST_TYPE_VERIFY) {
384 for (i = 0; i < nb_ops; i++) {
385 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
386 uint8_t *, iv_offset);
388 memcpy(iv_ptr, test_vector->aead_iv.data,
389 test_vector->aead_iv.length);
391 /* Copy AAD after the IV */
392 memcpy(ops[i]->sym->aead.aad.data,
393 test_vector->aad.data,
394 test_vector->aad.length);
401 static struct rte_cryptodev_sym_session *
402 cperf_create_session(struct rte_mempool *sess_mp,
404 const struct cperf_options *options,
405 const struct cperf_test_vector *test_vector,
408 struct rte_crypto_sym_xform cipher_xform;
409 struct rte_crypto_sym_xform auth_xform;
410 struct rte_crypto_sym_xform aead_xform;
411 struct rte_cryptodev_sym_session *sess = NULL;
413 sess = rte_cryptodev_sym_session_create(sess_mp);
417 if (options->op_type == CPERF_CIPHER_ONLY) {
418 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
419 cipher_xform.next = NULL;
420 cipher_xform.cipher.algo = options->cipher_algo;
421 cipher_xform.cipher.op = options->cipher_op;
422 cipher_xform.cipher.iv.offset = iv_offset;
424 /* cipher different than null */
425 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
426 cipher_xform.cipher.key.data =
427 test_vector->cipher_key.data;
428 cipher_xform.cipher.key.length =
429 test_vector->cipher_key.length;
430 cipher_xform.cipher.iv.length =
431 test_vector->cipher_iv.length;
433 cipher_xform.cipher.key.data = NULL;
434 cipher_xform.cipher.key.length = 0;
435 cipher_xform.cipher.iv.length = 0;
437 /* create crypto session */
438 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
443 } else if (options->op_type == CPERF_AUTH_ONLY) {
444 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
445 auth_xform.next = NULL;
446 auth_xform.auth.algo = options->auth_algo;
447 auth_xform.auth.op = options->auth_op;
449 /* auth different than null */
450 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
451 auth_xform.auth.digest_length =
453 auth_xform.auth.key.length =
454 test_vector->auth_key.length;
455 auth_xform.auth.key.data = test_vector->auth_key.data;
456 auth_xform.auth.iv.length =
457 test_vector->auth_iv.length;
459 auth_xform.auth.digest_length = 0;
460 auth_xform.auth.key.length = 0;
461 auth_xform.auth.key.data = NULL;
462 auth_xform.auth.iv.length = 0;
464 /* create crypto session */
465 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
470 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
471 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
475 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
476 cipher_xform.next = NULL;
477 cipher_xform.cipher.algo = options->cipher_algo;
478 cipher_xform.cipher.op = options->cipher_op;
479 cipher_xform.cipher.iv.offset = iv_offset;
481 /* cipher different than null */
482 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
483 cipher_xform.cipher.key.data =
484 test_vector->cipher_key.data;
485 cipher_xform.cipher.key.length =
486 test_vector->cipher_key.length;
487 cipher_xform.cipher.iv.length =
488 test_vector->cipher_iv.length;
490 cipher_xform.cipher.key.data = NULL;
491 cipher_xform.cipher.key.length = 0;
492 cipher_xform.cipher.iv.length = 0;
498 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
499 auth_xform.next = NULL;
500 auth_xform.auth.algo = options->auth_algo;
501 auth_xform.auth.op = options->auth_op;
503 /* auth different than null */
504 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
505 auth_xform.auth.digest_length = options->digest_sz;
506 auth_xform.auth.iv.length = test_vector->auth_iv.length;
507 auth_xform.auth.key.length =
508 test_vector->auth_key.length;
509 auth_xform.auth.key.data =
510 test_vector->auth_key.data;
512 auth_xform.auth.digest_length = 0;
513 auth_xform.auth.key.length = 0;
514 auth_xform.auth.key.data = NULL;
515 auth_xform.auth.iv.length = 0;
518 /* cipher then auth */
519 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
520 cipher_xform.next = &auth_xform;
521 /* create crypto session */
522 rte_cryptodev_sym_session_init(dev_id,
523 sess, &cipher_xform, sess_mp);
524 } else { /* auth then cipher */
525 auth_xform.next = &cipher_xform;
526 /* create crypto session */
527 rte_cryptodev_sym_session_init(dev_id,
528 sess, &auth_xform, sess_mp);
530 } else { /* options->op_type == CPERF_AEAD */
531 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
532 aead_xform.next = NULL;
533 aead_xform.aead.algo = options->aead_algo;
534 aead_xform.aead.op = options->aead_op;
535 aead_xform.aead.iv.offset = iv_offset;
537 aead_xform.aead.key.data =
538 test_vector->aead_key.data;
539 aead_xform.aead.key.length =
540 test_vector->aead_key.length;
541 aead_xform.aead.iv.length = test_vector->aead_iv.length;
543 aead_xform.aead.digest_length = options->digest_sz;
544 aead_xform.aead.aad_length =
545 options->aead_aad_sz;
547 /* Create crypto session */
548 rte_cryptodev_sym_session_init(dev_id,
549 sess, &aead_xform, sess_mp);
556 cperf_get_op_functions(const struct cperf_options *options,
557 struct cperf_op_fns *op_fns)
559 memset(op_fns, 0, sizeof(struct cperf_op_fns));
561 op_fns->sess_create = cperf_create_session;
563 if (options->op_type == CPERF_AEAD) {
564 op_fns->populate_ops = cperf_set_ops_aead;
568 if (options->op_type == CPERF_AUTH_THEN_CIPHER
569 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
570 op_fns->populate_ops = cperf_set_ops_cipher_auth;
573 if (options->op_type == CPERF_AUTH_ONLY) {
574 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
575 op_fns->populate_ops = cperf_set_ops_null_auth;
577 op_fns->populate_ops = cperf_set_ops_auth;
580 if (options->op_type == CPERF_CIPHER_ONLY) {
581 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
582 op_fns->populate_ops = cperf_set_ops_null_cipher;
584 op_fns->populate_ops = cperf_set_ops_cipher;