4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 rte_crypto_op_attach_sym_session(ops[i], sess);
53 sym_op->m_src = bufs_in[i];
54 sym_op->m_dst = bufs_out[i];
56 /* cipher parameters */
57 sym_op->cipher.data.length = options->test_buffer_size;
58 sym_op->cipher.data.offset = 0;
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68 const struct cperf_options *options,
69 const struct cperf_test_vector *test_vector __rte_unused,
70 uint16_t iv_offset __rte_unused)
74 for (i = 0; i < nb_ops; i++) {
75 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
77 rte_crypto_op_attach_sym_session(ops[i], sess);
79 sym_op->m_src = bufs_in[i];
80 sym_op->m_dst = bufs_out[i];
83 sym_op->auth.data.length = options->test_buffer_size;
84 sym_op->auth.data.offset = 0;
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94 const struct cperf_options *options,
95 const struct cperf_test_vector *test_vector,
100 for (i = 0; i < nb_ops; i++) {
101 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
103 rte_crypto_op_attach_sym_session(ops[i], sess);
105 sym_op->m_src = bufs_in[i];
106 sym_op->m_dst = bufs_out[i];
108 /* cipher parameters */
109 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112 sym_op->cipher.data.length = options->test_buffer_size << 3;
114 sym_op->cipher.data.length = options->test_buffer_size;
116 sym_op->cipher.data.offset = 0;
119 if (options->test == CPERF_TEST_TYPE_VERIFY) {
120 for (i = 0; i < nb_ops; i++) {
121 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122 uint8_t *, iv_offset);
124 memcpy(iv_ptr, test_vector->cipher_iv.data,
125 test_vector->cipher_iv.length);
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137 const struct cperf_options *options,
138 const struct cperf_test_vector *test_vector,
143 for (i = 0; i < nb_ops; i++) {
144 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
146 rte_crypto_op_attach_sym_session(ops[i], sess);
148 sym_op->m_src = bufs_in[i];
149 sym_op->m_dst = bufs_out[i];
151 if (test_vector->auth_iv.length) {
152 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
155 memcpy(iv_ptr, test_vector->auth_iv.data,
156 test_vector->auth_iv.length);
159 /* authentication parameters */
160 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161 sym_op->auth.digest.data = test_vector->digest.data;
162 sym_op->auth.digest.phys_addr =
163 test_vector->digest.phys_addr;
166 uint32_t offset = options->test_buffer_size;
167 struct rte_mbuf *buf, *tbuf;
169 if (options->out_of_place) {
173 while ((tbuf->next != NULL) &&
174 (offset >= tbuf->data_len)) {
175 offset -= tbuf->data_len;
181 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
183 sym_op->auth.digest.phys_addr =
184 rte_pktmbuf_mtophys_offset(buf, offset);
188 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
189 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
190 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
191 sym_op->auth.data.length = options->test_buffer_size << 3;
193 sym_op->auth.data.length = options->test_buffer_size;
195 sym_op->auth.data.offset = 0;
198 if (options->test == CPERF_TEST_TYPE_VERIFY) {
199 if (test_vector->auth_iv.length) {
200 for (i = 0; i < nb_ops; i++) {
201 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
202 uint8_t *, iv_offset);
204 memcpy(iv_ptr, test_vector->auth_iv.data,
205 test_vector->auth_iv.length);
213 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
214 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
215 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
216 const struct cperf_options *options,
217 const struct cperf_test_vector *test_vector,
222 for (i = 0; i < nb_ops; i++) {
223 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
225 rte_crypto_op_attach_sym_session(ops[i], sess);
227 sym_op->m_src = bufs_in[i];
228 sym_op->m_dst = bufs_out[i];
230 /* cipher parameters */
231 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
232 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
233 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
234 sym_op->cipher.data.length = options->test_buffer_size << 3;
236 sym_op->cipher.data.length = options->test_buffer_size;
238 sym_op->cipher.data.offset = 0;
240 /* authentication parameters */
241 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
242 sym_op->auth.digest.data = test_vector->digest.data;
243 sym_op->auth.digest.phys_addr =
244 test_vector->digest.phys_addr;
247 uint32_t offset = options->test_buffer_size;
248 struct rte_mbuf *buf, *tbuf;
250 if (options->out_of_place) {
254 while ((tbuf->next != NULL) &&
255 (offset >= tbuf->data_len)) {
256 offset -= tbuf->data_len;
262 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
264 sym_op->auth.digest.phys_addr =
265 rte_pktmbuf_mtophys_offset(buf, offset);
268 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
269 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
270 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
271 sym_op->auth.data.length = options->test_buffer_size << 3;
273 sym_op->auth.data.length = options->test_buffer_size;
275 sym_op->auth.data.offset = 0;
278 if (options->test == CPERF_TEST_TYPE_VERIFY) {
279 for (i = 0; i < nb_ops; i++) {
280 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
281 uint8_t *, iv_offset);
283 memcpy(iv_ptr, test_vector->cipher_iv.data,
284 test_vector->cipher_iv.length);
285 if (test_vector->auth_iv.length) {
287 * Copy IV after the crypto operation and
290 iv_ptr += test_vector->cipher_iv.length;
291 memcpy(iv_ptr, test_vector->auth_iv.data,
292 test_vector->auth_iv.length);
302 cperf_set_ops_aead(struct rte_crypto_op **ops,
303 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
304 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
305 const struct cperf_options *options,
306 const struct cperf_test_vector *test_vector,
311 for (i = 0; i < nb_ops; i++) {
312 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
314 rte_crypto_op_attach_sym_session(ops[i], sess);
316 sym_op->m_src = bufs_in[i];
317 sym_op->m_dst = bufs_out[i];
319 /* AEAD parameters */
320 sym_op->aead.data.length = options->test_buffer_size;
321 sym_op->aead.data.offset =
322 RTE_ALIGN_CEIL(options->aead_aad_sz, 16);
324 sym_op->aead.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
325 sym_op->aead.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
327 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
328 sym_op->aead.digest.data = test_vector->digest.data;
329 sym_op->aead.digest.phys_addr =
330 test_vector->digest.phys_addr;
333 uint32_t offset = sym_op->aead.data.length +
334 sym_op->aead.data.offset;
335 struct rte_mbuf *buf, *tbuf;
337 if (options->out_of_place) {
341 while ((tbuf->next != NULL) &&
342 (offset >= tbuf->data_len)) {
343 offset -= tbuf->data_len;
349 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
351 sym_op->aead.digest.phys_addr =
352 rte_pktmbuf_mtophys_offset(buf, offset);
356 if (options->test == CPERF_TEST_TYPE_VERIFY) {
357 for (i = 0; i < nb_ops; i++) {
358 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
359 uint8_t *, iv_offset);
361 memcpy(iv_ptr, test_vector->aead_iv.data,
362 test_vector->aead_iv.length);
369 static struct rte_cryptodev_sym_session *
370 cperf_create_session(struct rte_mempool *sess_mp,
372 const struct cperf_options *options,
373 const struct cperf_test_vector *test_vector,
376 struct rte_crypto_sym_xform cipher_xform;
377 struct rte_crypto_sym_xform auth_xform;
378 struct rte_crypto_sym_xform aead_xform;
379 struct rte_cryptodev_sym_session *sess = NULL;
381 sess = rte_cryptodev_sym_session_create(sess_mp);
385 if (options->op_type == CPERF_CIPHER_ONLY) {
386 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
387 cipher_xform.next = NULL;
388 cipher_xform.cipher.algo = options->cipher_algo;
389 cipher_xform.cipher.op = options->cipher_op;
390 cipher_xform.cipher.iv.offset = iv_offset;
392 /* cipher different than null */
393 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
394 cipher_xform.cipher.key.data =
395 test_vector->cipher_key.data;
396 cipher_xform.cipher.key.length =
397 test_vector->cipher_key.length;
398 cipher_xform.cipher.iv.length =
399 test_vector->cipher_iv.length;
401 cipher_xform.cipher.key.data = NULL;
402 cipher_xform.cipher.key.length = 0;
403 cipher_xform.cipher.iv.length = 0;
405 /* create crypto session */
406 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
411 } else if (options->op_type == CPERF_AUTH_ONLY) {
412 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
413 auth_xform.next = NULL;
414 auth_xform.auth.algo = options->auth_algo;
415 auth_xform.auth.op = options->auth_op;
417 /* auth different than null */
418 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
419 auth_xform.auth.digest_length =
421 auth_xform.auth.key.length =
422 test_vector->auth_key.length;
423 auth_xform.auth.key.data = test_vector->auth_key.data;
424 auth_xform.auth.iv.length =
425 test_vector->auth_iv.length;
427 auth_xform.auth.digest_length = 0;
428 auth_xform.auth.key.length = 0;
429 auth_xform.auth.key.data = NULL;
430 auth_xform.auth.iv.length = 0;
432 /* create crypto session */
433 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
438 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
439 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
443 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
444 cipher_xform.next = NULL;
445 cipher_xform.cipher.algo = options->cipher_algo;
446 cipher_xform.cipher.op = options->cipher_op;
447 cipher_xform.cipher.iv.offset = iv_offset;
449 /* cipher different than null */
450 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
451 cipher_xform.cipher.key.data =
452 test_vector->cipher_key.data;
453 cipher_xform.cipher.key.length =
454 test_vector->cipher_key.length;
455 cipher_xform.cipher.iv.length =
456 test_vector->cipher_iv.length;
458 cipher_xform.cipher.key.data = NULL;
459 cipher_xform.cipher.key.length = 0;
460 cipher_xform.cipher.iv.length = 0;
466 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
467 auth_xform.next = NULL;
468 auth_xform.auth.algo = options->auth_algo;
469 auth_xform.auth.op = options->auth_op;
471 /* auth different than null */
472 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
473 auth_xform.auth.digest_length = options->digest_sz;
474 auth_xform.auth.iv.length = test_vector->auth_iv.length;
475 auth_xform.auth.key.length =
476 test_vector->auth_key.length;
477 auth_xform.auth.key.data =
478 test_vector->auth_key.data;
480 auth_xform.auth.digest_length = 0;
481 auth_xform.auth.key.length = 0;
482 auth_xform.auth.key.data = NULL;
483 auth_xform.auth.iv.length = 0;
486 /* cipher then auth */
487 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
488 cipher_xform.next = &auth_xform;
489 /* create crypto session */
490 rte_cryptodev_sym_session_init(dev_id,
491 sess, &cipher_xform, sess_mp);
492 } else { /* auth then cipher */
493 auth_xform.next = &cipher_xform;
494 /* create crypto session */
495 rte_cryptodev_sym_session_init(dev_id,
496 sess, &auth_xform, sess_mp);
498 } else { /* options->op_type == CPERF_AEAD */
499 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
500 aead_xform.next = NULL;
501 aead_xform.aead.algo = options->aead_algo;
502 aead_xform.aead.op = options->aead_op;
503 aead_xform.aead.iv.offset = iv_offset;
505 aead_xform.aead.key.data =
506 test_vector->aead_key.data;
507 aead_xform.aead.key.length =
508 test_vector->aead_key.length;
509 aead_xform.aead.iv.length = test_vector->aead_iv.length;
511 aead_xform.aead.digest_length = options->digest_sz;
512 aead_xform.aead.aad_length =
513 options->aead_aad_sz;
515 /* Create crypto session */
516 rte_cryptodev_sym_session_init(dev_id,
517 sess, &aead_xform, sess_mp);
524 cperf_get_op_functions(const struct cperf_options *options,
525 struct cperf_op_fns *op_fns)
527 memset(op_fns, 0, sizeof(struct cperf_op_fns));
529 op_fns->sess_create = cperf_create_session;
531 if (options->op_type == CPERF_AEAD) {
532 op_fns->populate_ops = cperf_set_ops_aead;
536 if (options->op_type == CPERF_AUTH_THEN_CIPHER
537 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
538 op_fns->populate_ops = cperf_set_ops_cipher_auth;
541 if (options->op_type == CPERF_AUTH_ONLY) {
542 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
543 op_fns->populate_ops = cperf_set_ops_null_auth;
545 op_fns->populate_ops = cperf_set_ops_auth;
548 if (options->op_type == CPERF_CIPHER_ONLY) {
549 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
550 op_fns->populate_ops = cperf_set_ops_null_cipher;
552 op_fns->populate_ops = cperf_set_ops_cipher;