4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 rte_crypto_op_attach_sym_session(ops[i], sess);
53 sym_op->m_src = bufs_in[i];
54 sym_op->m_dst = bufs_out[i];
56 /* cipher parameters */
57 sym_op->cipher.data.length = options->test_buffer_size;
58 sym_op->cipher.data.offset = 0;
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68 const struct cperf_options *options,
69 const struct cperf_test_vector *test_vector __rte_unused,
70 uint16_t iv_offset __rte_unused)
74 for (i = 0; i < nb_ops; i++) {
75 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
77 rte_crypto_op_attach_sym_session(ops[i], sess);
79 sym_op->m_src = bufs_in[i];
80 sym_op->m_dst = bufs_out[i];
83 sym_op->auth.data.length = options->test_buffer_size;
84 sym_op->auth.data.offset = 0;
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94 const struct cperf_options *options,
95 const struct cperf_test_vector *test_vector,
100 for (i = 0; i < nb_ops; i++) {
101 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
103 rte_crypto_op_attach_sym_session(ops[i], sess);
105 sym_op->m_src = bufs_in[i];
106 sym_op->m_dst = bufs_out[i];
108 /* cipher parameters */
109 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112 sym_op->cipher.data.length = options->test_buffer_size << 3;
114 sym_op->cipher.data.length = options->test_buffer_size;
116 sym_op->cipher.data.offset = 0;
119 if (options->test == CPERF_TEST_TYPE_VERIFY) {
120 for (i = 0; i < nb_ops; i++) {
121 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122 uint8_t *, iv_offset);
124 memcpy(iv_ptr, test_vector->cipher_iv.data,
125 test_vector->cipher_iv.length);
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137 const struct cperf_options *options,
138 const struct cperf_test_vector *test_vector,
143 for (i = 0; i < nb_ops; i++) {
144 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
146 rte_crypto_op_attach_sym_session(ops[i], sess);
148 sym_op->m_src = bufs_in[i];
149 sym_op->m_dst = bufs_out[i];
151 if (test_vector->auth_iv.length) {
152 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
155 memcpy(iv_ptr, test_vector->auth_iv.data,
156 test_vector->auth_iv.length);
159 /* authentication parameters */
160 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161 sym_op->auth.digest.data = test_vector->digest.data;
162 sym_op->auth.digest.phys_addr =
163 test_vector->digest.phys_addr;
164 sym_op->auth.digest.length = options->auth_digest_sz;
167 uint32_t offset = options->test_buffer_size;
168 struct rte_mbuf *buf, *tbuf;
170 if (options->out_of_place) {
174 while ((tbuf->next != NULL) &&
175 (offset >= tbuf->data_len)) {
176 offset -= tbuf->data_len;
182 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
184 sym_op->auth.digest.phys_addr =
185 rte_pktmbuf_mtophys_offset(buf, offset);
186 sym_op->auth.digest.length = options->auth_digest_sz;
187 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
188 sym_op->auth.aad.data = test_vector->aad.data;
189 sym_op->auth.aad.length = options->auth_aad_sz;
193 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
194 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
195 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
196 sym_op->auth.data.length = options->test_buffer_size << 3;
198 sym_op->auth.data.length = options->test_buffer_size;
200 sym_op->auth.data.offset = 0;
203 if (options->test == CPERF_TEST_TYPE_VERIFY) {
204 if (test_vector->auth_iv.length) {
205 for (i = 0; i < nb_ops; i++) {
206 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
207 uint8_t *, iv_offset);
209 memcpy(iv_ptr, test_vector->auth_iv.data,
210 test_vector->auth_iv.length);
218 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
219 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
220 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
221 const struct cperf_options *options,
222 const struct cperf_test_vector *test_vector,
227 for (i = 0; i < nb_ops; i++) {
228 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
230 rte_crypto_op_attach_sym_session(ops[i], sess);
232 sym_op->m_src = bufs_in[i];
233 sym_op->m_dst = bufs_out[i];
235 /* cipher parameters */
236 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
237 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
238 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
239 sym_op->cipher.data.length = options->test_buffer_size << 3;
241 sym_op->cipher.data.length = options->test_buffer_size;
243 sym_op->cipher.data.offset = 0;
245 /* authentication parameters */
246 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
247 sym_op->auth.digest.data = test_vector->digest.data;
248 sym_op->auth.digest.phys_addr =
249 test_vector->digest.phys_addr;
250 sym_op->auth.digest.length = options->auth_digest_sz;
253 uint32_t offset = options->test_buffer_size;
254 struct rte_mbuf *buf, *tbuf;
256 if (options->out_of_place) {
260 while ((tbuf->next != NULL) &&
261 (offset >= tbuf->data_len)) {
262 offset -= tbuf->data_len;
268 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
270 sym_op->auth.digest.phys_addr =
271 rte_pktmbuf_mtophys_offset(buf, offset);
272 sym_op->auth.digest.length = options->auth_digest_sz;
273 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
274 sym_op->auth.aad.data = test_vector->aad.data;
275 sym_op->auth.aad.length = options->auth_aad_sz;
278 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
279 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
280 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
281 sym_op->auth.data.length = options->test_buffer_size << 3;
283 sym_op->auth.data.length = options->test_buffer_size;
285 sym_op->auth.data.offset = 0;
288 if (options->test == CPERF_TEST_TYPE_VERIFY) {
289 for (i = 0; i < nb_ops; i++) {
290 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
291 uint8_t *, iv_offset);
293 memcpy(iv_ptr, test_vector->cipher_iv.data,
294 test_vector->cipher_iv.length);
295 if (test_vector->auth_iv.length) {
297 * Copy IV after the crypto operation and
300 iv_ptr += test_vector->cipher_iv.length;
301 memcpy(iv_ptr, test_vector->auth_iv.data,
302 test_vector->auth_iv.length);
312 cperf_set_ops_aead(struct rte_crypto_op **ops,
313 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
314 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
315 const struct cperf_options *options,
316 const struct cperf_test_vector *test_vector,
321 for (i = 0; i < nb_ops; i++) {
322 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
324 rte_crypto_op_attach_sym_session(ops[i], sess);
326 sym_op->m_src = bufs_in[i];
327 sym_op->m_dst = bufs_out[i];
329 /* cipher parameters */
330 sym_op->cipher.data.length = options->test_buffer_size;
331 sym_op->cipher.data.offset =
332 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
334 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
335 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
336 sym_op->auth.aad.length = options->auth_aad_sz;
338 /* authentication parameters */
339 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
340 sym_op->auth.digest.data = test_vector->digest.data;
341 sym_op->auth.digest.phys_addr =
342 test_vector->digest.phys_addr;
343 sym_op->auth.digest.length = options->auth_digest_sz;
346 uint32_t offset = sym_op->cipher.data.length +
347 sym_op->cipher.data.offset;
348 struct rte_mbuf *buf, *tbuf;
350 if (options->out_of_place) {
354 while ((tbuf->next != NULL) &&
355 (offset >= tbuf->data_len)) {
356 offset -= tbuf->data_len;
362 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
364 sym_op->auth.digest.phys_addr =
365 rte_pktmbuf_mtophys_offset(buf, offset);
367 sym_op->auth.digest.length = options->auth_digest_sz;
370 sym_op->auth.data.length = options->test_buffer_size;
371 sym_op->auth.data.offset = options->auth_aad_sz;
374 if (options->test == CPERF_TEST_TYPE_VERIFY) {
375 for (i = 0; i < nb_ops; i++) {
376 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
377 uint8_t *, iv_offset);
379 memcpy(iv_ptr, test_vector->cipher_iv.data,
380 test_vector->cipher_iv.length);
387 static struct rte_cryptodev_sym_session *
388 cperf_create_session(uint8_t dev_id,
389 const struct cperf_options *options,
390 const struct cperf_test_vector *test_vector,
393 struct rte_crypto_sym_xform cipher_xform;
394 struct rte_crypto_sym_xform auth_xform;
395 struct rte_cryptodev_sym_session *sess = NULL;
400 if (options->op_type == CPERF_CIPHER_ONLY) {
401 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
402 cipher_xform.next = NULL;
403 cipher_xform.cipher.algo = options->cipher_algo;
404 cipher_xform.cipher.op = options->cipher_op;
405 cipher_xform.cipher.iv.offset = iv_offset;
407 /* cipher different than null */
408 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
409 cipher_xform.cipher.key.data =
410 test_vector->cipher_key.data;
411 cipher_xform.cipher.key.length =
412 test_vector->cipher_key.length;
413 cipher_xform.cipher.iv.length =
414 test_vector->cipher_iv.length;
416 cipher_xform.cipher.key.data = NULL;
417 cipher_xform.cipher.key.length = 0;
418 cipher_xform.cipher.iv.length = 0;
420 /* create crypto session */
421 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
425 } else if (options->op_type == CPERF_AUTH_ONLY) {
426 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
427 auth_xform.next = NULL;
428 auth_xform.auth.algo = options->auth_algo;
429 auth_xform.auth.op = options->auth_op;
431 /* auth different than null */
432 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
433 auth_xform.auth.digest_length =
434 options->auth_digest_sz;
435 auth_xform.auth.add_auth_data_length =
436 options->auth_aad_sz;
437 auth_xform.auth.key.length =
438 test_vector->auth_key.length;
439 auth_xform.auth.key.data = test_vector->auth_key.data;
440 auth_xform.auth.iv.length =
441 test_vector->auth_iv.length;
443 auth_xform.auth.digest_length = 0;
444 auth_xform.auth.add_auth_data_length = 0;
445 auth_xform.auth.key.length = 0;
446 auth_xform.auth.key.data = NULL;
447 auth_xform.auth.iv.length = 0;
449 /* create crypto session */
450 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
454 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
455 || options->op_type == CPERF_AUTH_THEN_CIPHER
456 || options->op_type == CPERF_AEAD) {
461 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
462 cipher_xform.next = NULL;
463 cipher_xform.cipher.algo = options->cipher_algo;
464 cipher_xform.cipher.op = options->cipher_op;
465 cipher_xform.cipher.iv.offset = iv_offset;
467 /* cipher different than null */
468 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
469 cipher_xform.cipher.key.data =
470 test_vector->cipher_key.data;
471 cipher_xform.cipher.key.length =
472 test_vector->cipher_key.length;
473 cipher_xform.cipher.iv.length =
474 test_vector->cipher_iv.length;
476 cipher_xform.cipher.key.data = NULL;
477 cipher_xform.cipher.key.length = 0;
478 cipher_xform.cipher.iv.length = 0;
484 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
485 auth_xform.next = NULL;
486 auth_xform.auth.algo = options->auth_algo;
487 auth_xform.auth.op = options->auth_op;
489 /* auth different than null */
490 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
491 auth_xform.auth.digest_length = options->auth_digest_sz;
492 auth_xform.auth.add_auth_data_length =
493 options->auth_aad_sz;
494 /* auth options for aes gcm */
495 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
496 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
497 auth_xform.auth.key.length = 0;
498 auth_xform.auth.key.data = NULL;
499 auth_xform.auth.iv.length = 0;
500 } else { /* auth options for others */
501 auth_xform.auth.key.length =
502 test_vector->auth_key.length;
503 auth_xform.auth.key.data =
504 test_vector->auth_key.data;
505 auth_xform.auth.iv.length =
506 test_vector->auth_iv.length;
509 auth_xform.auth.digest_length = 0;
510 auth_xform.auth.add_auth_data_length = 0;
511 auth_xform.auth.key.length = 0;
512 auth_xform.auth.key.data = NULL;
513 auth_xform.auth.iv.length = 0;
516 /* create crypto session for aes gcm */
517 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
518 if (options->cipher_op ==
519 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
520 cipher_xform.next = &auth_xform;
521 /* create crypto session */
522 sess = rte_cryptodev_sym_session_create(dev_id,
524 } else { /* decrypt */
525 auth_xform.next = &cipher_xform;
526 /* create crypto session */
527 sess = rte_cryptodev_sym_session_create(dev_id,
530 } else { /* create crypto session for other */
531 /* cipher then auth */
532 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
533 cipher_xform.next = &auth_xform;
534 /* create crypto session */
535 sess = rte_cryptodev_sym_session_create(dev_id,
537 } else { /* auth then cipher */
538 auth_xform.next = &cipher_xform;
539 /* create crypto session */
540 sess = rte_cryptodev_sym_session_create(dev_id,
549 cperf_get_op_functions(const struct cperf_options *options,
550 struct cperf_op_fns *op_fns)
552 memset(op_fns, 0, sizeof(struct cperf_op_fns));
554 op_fns->sess_create = cperf_create_session;
556 if (options->op_type == CPERF_AEAD
557 || options->op_type == CPERF_AUTH_THEN_CIPHER
558 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
559 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
560 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
561 op_fns->populate_ops = cperf_set_ops_aead;
563 op_fns->populate_ops = cperf_set_ops_cipher_auth;
566 if (options->op_type == CPERF_AUTH_ONLY) {
567 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
568 op_fns->populate_ops = cperf_set_ops_null_auth;
570 op_fns->populate_ops = cperf_set_ops_auth;
573 if (options->op_type == CPERF_CIPHER_ONLY) {
574 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
575 op_fns->populate_ops = cperf_set_ops_null_cipher;
577 op_fns->populate_ops = cperf_set_ops_cipher;