4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 rte_crypto_op_attach_sym_session(ops[i], sess);
53 sym_op->m_src = bufs_in[i];
54 sym_op->m_dst = bufs_out[i];
56 /* cipher parameters */
57 sym_op->cipher.data.length = options->test_buffer_size;
58 sym_op->cipher.data.offset = 0;
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68 const struct cperf_options *options,
69 const struct cperf_test_vector *test_vector __rte_unused,
70 uint16_t iv_offset __rte_unused)
74 for (i = 0; i < nb_ops; i++) {
75 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
77 rte_crypto_op_attach_sym_session(ops[i], sess);
79 sym_op->m_src = bufs_in[i];
80 sym_op->m_dst = bufs_out[i];
83 sym_op->auth.data.length = options->test_buffer_size;
84 sym_op->auth.data.offset = 0;
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94 const struct cperf_options *options,
95 const struct cperf_test_vector *test_vector,
100 for (i = 0; i < nb_ops; i++) {
101 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
103 rte_crypto_op_attach_sym_session(ops[i], sess);
105 sym_op->m_src = bufs_in[i];
106 sym_op->m_dst = bufs_out[i];
108 /* cipher parameters */
109 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112 sym_op->cipher.data.length = options->test_buffer_size << 3;
114 sym_op->cipher.data.length = options->test_buffer_size;
116 sym_op->cipher.data.offset = 0;
119 if (options->test == CPERF_TEST_TYPE_VERIFY) {
120 for (i = 0; i < nb_ops; i++) {
121 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122 uint8_t *, iv_offset);
124 memcpy(iv_ptr, test_vector->iv.data,
125 test_vector->iv.length);
132 cperf_set_ops_auth(struct rte_crypto_op **ops,
133 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
134 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
135 const struct cperf_options *options,
136 const struct cperf_test_vector *test_vector,
137 uint16_t iv_offset __rte_unused)
141 for (i = 0; i < nb_ops; i++) {
142 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
144 rte_crypto_op_attach_sym_session(ops[i], sess);
146 sym_op->m_src = bufs_in[i];
147 sym_op->m_dst = bufs_out[i];
149 /* authentication parameters */
150 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
151 sym_op->auth.digest.data = test_vector->digest.data;
152 sym_op->auth.digest.phys_addr =
153 test_vector->digest.phys_addr;
154 sym_op->auth.digest.length = options->auth_digest_sz;
157 uint32_t offset = options->test_buffer_size;
158 struct rte_mbuf *buf, *tbuf;
160 if (options->out_of_place) {
164 while ((tbuf->next != NULL) &&
165 (offset >= tbuf->data_len)) {
166 offset -= tbuf->data_len;
172 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
174 sym_op->auth.digest.phys_addr =
175 rte_pktmbuf_mtophys_offset(buf, offset);
176 sym_op->auth.digest.length = options->auth_digest_sz;
177 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
178 sym_op->auth.aad.data = test_vector->aad.data;
179 sym_op->auth.aad.length = options->auth_aad_sz;
183 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
184 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
185 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
186 sym_op->auth.data.length = options->test_buffer_size << 3;
188 sym_op->auth.data.length = options->test_buffer_size;
190 sym_op->auth.data.offset = 0;
197 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
198 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
199 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
200 const struct cperf_options *options,
201 const struct cperf_test_vector *test_vector,
206 for (i = 0; i < nb_ops; i++) {
207 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
209 rte_crypto_op_attach_sym_session(ops[i], sess);
211 sym_op->m_src = bufs_in[i];
212 sym_op->m_dst = bufs_out[i];
214 /* cipher parameters */
215 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
216 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
217 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
218 sym_op->cipher.data.length = options->test_buffer_size << 3;
220 sym_op->cipher.data.length = options->test_buffer_size;
222 sym_op->cipher.data.offset = 0;
224 /* authentication parameters */
225 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
226 sym_op->auth.digest.data = test_vector->digest.data;
227 sym_op->auth.digest.phys_addr =
228 test_vector->digest.phys_addr;
229 sym_op->auth.digest.length = options->auth_digest_sz;
232 uint32_t offset = options->test_buffer_size;
233 struct rte_mbuf *buf, *tbuf;
235 if (options->out_of_place) {
239 while ((tbuf->next != NULL) &&
240 (offset >= tbuf->data_len)) {
241 offset -= tbuf->data_len;
247 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
249 sym_op->auth.digest.phys_addr =
250 rte_pktmbuf_mtophys_offset(buf, offset);
251 sym_op->auth.digest.length = options->auth_digest_sz;
252 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
253 sym_op->auth.aad.data = test_vector->aad.data;
254 sym_op->auth.aad.length = options->auth_aad_sz;
257 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
258 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
259 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
260 sym_op->auth.data.length = options->test_buffer_size << 3;
262 sym_op->auth.data.length = options->test_buffer_size;
264 sym_op->auth.data.offset = 0;
267 if (options->test == CPERF_TEST_TYPE_VERIFY) {
268 for (i = 0; i < nb_ops; i++) {
269 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
270 uint8_t *, iv_offset);
272 memcpy(iv_ptr, test_vector->iv.data,
273 test_vector->iv.length);
281 cperf_set_ops_aead(struct rte_crypto_op **ops,
282 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
283 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
284 const struct cperf_options *options,
285 const struct cperf_test_vector *test_vector,
290 for (i = 0; i < nb_ops; i++) {
291 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
293 rte_crypto_op_attach_sym_session(ops[i], sess);
295 sym_op->m_src = bufs_in[i];
296 sym_op->m_dst = bufs_out[i];
298 /* cipher parameters */
299 sym_op->cipher.data.length = options->test_buffer_size;
300 sym_op->cipher.data.offset =
301 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
303 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
304 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
305 sym_op->auth.aad.length = options->auth_aad_sz;
307 /* authentication parameters */
308 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
309 sym_op->auth.digest.data = test_vector->digest.data;
310 sym_op->auth.digest.phys_addr =
311 test_vector->digest.phys_addr;
312 sym_op->auth.digest.length = options->auth_digest_sz;
315 uint32_t offset = sym_op->cipher.data.length +
316 sym_op->cipher.data.offset;
317 struct rte_mbuf *buf, *tbuf;
319 if (options->out_of_place) {
323 while ((tbuf->next != NULL) &&
324 (offset >= tbuf->data_len)) {
325 offset -= tbuf->data_len;
331 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
333 sym_op->auth.digest.phys_addr =
334 rte_pktmbuf_mtophys_offset(buf, offset);
336 sym_op->auth.digest.length = options->auth_digest_sz;
339 sym_op->auth.data.length = options->test_buffer_size;
340 sym_op->auth.data.offset = options->auth_aad_sz;
343 if (options->test == CPERF_TEST_TYPE_VERIFY) {
344 for (i = 0; i < nb_ops; i++) {
345 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
346 uint8_t *, iv_offset);
348 memcpy(iv_ptr, test_vector->iv.data,
349 test_vector->iv.length);
356 static struct rte_cryptodev_sym_session *
357 cperf_create_session(uint8_t dev_id,
358 const struct cperf_options *options,
359 const struct cperf_test_vector *test_vector,
362 struct rte_crypto_sym_xform cipher_xform;
363 struct rte_crypto_sym_xform auth_xform;
364 struct rte_cryptodev_sym_session *sess = NULL;
369 if (options->op_type == CPERF_CIPHER_ONLY) {
370 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
371 cipher_xform.next = NULL;
372 cipher_xform.cipher.algo = options->cipher_algo;
373 cipher_xform.cipher.op = options->cipher_op;
374 cipher_xform.cipher.iv.offset = iv_offset;
376 /* cipher different than null */
377 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
378 cipher_xform.cipher.key.data =
379 test_vector->cipher_key.data;
380 cipher_xform.cipher.key.length =
381 test_vector->cipher_key.length;
382 cipher_xform.cipher.iv.length = test_vector->iv.length;
385 cipher_xform.cipher.key.data = NULL;
386 cipher_xform.cipher.key.length = 0;
387 cipher_xform.cipher.iv.length = 0;
389 /* create crypto session */
390 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
394 } else if (options->op_type == CPERF_AUTH_ONLY) {
395 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
396 auth_xform.next = NULL;
397 auth_xform.auth.algo = options->auth_algo;
398 auth_xform.auth.op = options->auth_op;
400 /* auth different than null */
401 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
402 auth_xform.auth.digest_length =
403 options->auth_digest_sz;
404 auth_xform.auth.add_auth_data_length =
405 options->auth_aad_sz;
406 auth_xform.auth.key.length =
407 test_vector->auth_key.length;
408 auth_xform.auth.key.data = test_vector->auth_key.data;
410 auth_xform.auth.digest_length = 0;
411 auth_xform.auth.add_auth_data_length = 0;
412 auth_xform.auth.key.length = 0;
413 auth_xform.auth.key.data = NULL;
415 /* create crypto session */
416 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
420 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
421 || options->op_type == CPERF_AUTH_THEN_CIPHER
422 || options->op_type == CPERF_AEAD) {
427 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
428 cipher_xform.next = NULL;
429 cipher_xform.cipher.algo = options->cipher_algo;
430 cipher_xform.cipher.op = options->cipher_op;
431 cipher_xform.cipher.iv.offset = iv_offset;
433 /* cipher different than null */
434 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
435 cipher_xform.cipher.key.data =
436 test_vector->cipher_key.data;
437 cipher_xform.cipher.key.length =
438 test_vector->cipher_key.length;
439 cipher_xform.cipher.iv.length = test_vector->iv.length;
441 cipher_xform.cipher.key.data = NULL;
442 cipher_xform.cipher.key.length = 0;
443 cipher_xform.cipher.iv.length = 0;
449 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
450 auth_xform.next = NULL;
451 auth_xform.auth.algo = options->auth_algo;
452 auth_xform.auth.op = options->auth_op;
454 /* auth different than null */
455 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
456 auth_xform.auth.digest_length = options->auth_digest_sz;
457 auth_xform.auth.add_auth_data_length =
458 options->auth_aad_sz;
459 /* auth options for aes gcm */
460 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
461 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
462 auth_xform.auth.key.length = 0;
463 auth_xform.auth.key.data = NULL;
464 } else { /* auth options for others */
465 auth_xform.auth.key.length =
466 test_vector->auth_key.length;
467 auth_xform.auth.key.data =
468 test_vector->auth_key.data;
471 auth_xform.auth.digest_length = 0;
472 auth_xform.auth.add_auth_data_length = 0;
473 auth_xform.auth.key.length = 0;
474 auth_xform.auth.key.data = NULL;
477 /* create crypto session for aes gcm */
478 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
479 if (options->cipher_op ==
480 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
481 cipher_xform.next = &auth_xform;
482 /* create crypto session */
483 sess = rte_cryptodev_sym_session_create(dev_id,
485 } else { /* decrypt */
486 auth_xform.next = &cipher_xform;
487 /* create crypto session */
488 sess = rte_cryptodev_sym_session_create(dev_id,
491 } else { /* create crypto session for other */
492 /* cipher then auth */
493 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
494 cipher_xform.next = &auth_xform;
495 /* create crypto session */
496 sess = rte_cryptodev_sym_session_create(dev_id,
498 } else { /* auth then cipher */
499 auth_xform.next = &cipher_xform;
500 /* create crypto session */
501 sess = rte_cryptodev_sym_session_create(dev_id,
510 cperf_get_op_functions(const struct cperf_options *options,
511 struct cperf_op_fns *op_fns)
513 memset(op_fns, 0, sizeof(struct cperf_op_fns));
515 op_fns->sess_create = cperf_create_session;
517 if (options->op_type == CPERF_AEAD
518 || options->op_type == CPERF_AUTH_THEN_CIPHER
519 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
520 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
521 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
522 op_fns->populate_ops = cperf_set_ops_aead;
524 op_fns->populate_ops = cperf_set_ops_cipher_auth;
527 if (options->op_type == CPERF_AUTH_ONLY) {
528 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
529 op_fns->populate_ops = cperf_set_ops_null_auth;
531 op_fns->populate_ops = cperf_set_ops_auth;
534 if (options->op_type == CPERF_CIPHER_ONLY) {
535 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
536 op_fns->populate_ops = cperf_set_ops_null_cipher;
538 op_fns->populate_ops = cperf_set_ops_cipher;