4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused,
44 uint16_t iv_offset __rte_unused)
48 for (i = 0; i < nb_ops; i++) {
49 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
51 rte_crypto_op_attach_sym_session(ops[i], sess);
53 sym_op->m_src = bufs_in[i];
54 sym_op->m_dst = bufs_out[i];
56 /* cipher parameters */
57 sym_op->cipher.data.length = options->test_buffer_size;
58 sym_op->cipher.data.offset = 0;
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68 const struct cperf_options *options,
69 const struct cperf_test_vector *test_vector __rte_unused,
70 uint16_t iv_offset __rte_unused)
74 for (i = 0; i < nb_ops; i++) {
75 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
77 rte_crypto_op_attach_sym_session(ops[i], sess);
79 sym_op->m_src = bufs_in[i];
80 sym_op->m_dst = bufs_out[i];
83 sym_op->auth.data.length = options->test_buffer_size;
84 sym_op->auth.data.offset = 0;
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94 const struct cperf_options *options,
95 const struct cperf_test_vector *test_vector,
100 for (i = 0; i < nb_ops; i++) {
101 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
103 rte_crypto_op_attach_sym_session(ops[i], sess);
105 sym_op->m_src = bufs_in[i];
106 sym_op->m_dst = bufs_out[i];
108 /* cipher parameters */
109 sym_op->cipher.iv.offset = iv_offset;
110 sym_op->cipher.iv.length = test_vector->iv.length;
112 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
113 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
114 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
115 sym_op->cipher.data.length = options->test_buffer_size << 3;
117 sym_op->cipher.data.length = options->test_buffer_size;
119 sym_op->cipher.data.offset = 0;
122 if (options->test == CPERF_TEST_TYPE_VERIFY) {
123 for (i = 0; i < nb_ops; i++) {
124 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
125 uint8_t *, iv_offset);
127 memcpy(iv_ptr, test_vector->iv.data,
128 test_vector->iv.length);
135 cperf_set_ops_auth(struct rte_crypto_op **ops,
136 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
137 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
138 const struct cperf_options *options,
139 const struct cperf_test_vector *test_vector,
140 uint16_t iv_offset __rte_unused)
144 for (i = 0; i < nb_ops; i++) {
145 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
147 rte_crypto_op_attach_sym_session(ops[i], sess);
149 sym_op->m_src = bufs_in[i];
150 sym_op->m_dst = bufs_out[i];
152 /* authentication parameters */
153 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
154 sym_op->auth.digest.data = test_vector->digest.data;
155 sym_op->auth.digest.phys_addr =
156 test_vector->digest.phys_addr;
157 sym_op->auth.digest.length = options->auth_digest_sz;
160 uint32_t offset = options->test_buffer_size;
161 struct rte_mbuf *buf, *tbuf;
163 if (options->out_of_place) {
167 while ((tbuf->next != NULL) &&
168 (offset >= tbuf->data_len)) {
169 offset -= tbuf->data_len;
175 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
177 sym_op->auth.digest.phys_addr =
178 rte_pktmbuf_mtophys_offset(buf, offset);
179 sym_op->auth.digest.length = options->auth_digest_sz;
180 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
181 sym_op->auth.aad.data = test_vector->aad.data;
182 sym_op->auth.aad.length = options->auth_aad_sz;
186 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
187 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
188 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
189 sym_op->auth.data.length = options->test_buffer_size << 3;
191 sym_op->auth.data.length = options->test_buffer_size;
193 sym_op->auth.data.offset = 0;
200 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
201 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
202 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
203 const struct cperf_options *options,
204 const struct cperf_test_vector *test_vector,
209 for (i = 0; i < nb_ops; i++) {
210 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
212 rte_crypto_op_attach_sym_session(ops[i], sess);
214 sym_op->m_src = bufs_in[i];
215 sym_op->m_dst = bufs_out[i];
217 /* cipher parameters */
218 sym_op->cipher.iv.offset = iv_offset;
219 sym_op->cipher.iv.length = test_vector->iv.length;
221 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
222 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
223 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
224 sym_op->cipher.data.length = options->test_buffer_size << 3;
226 sym_op->cipher.data.length = options->test_buffer_size;
228 sym_op->cipher.data.offset = 0;
230 /* authentication parameters */
231 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
232 sym_op->auth.digest.data = test_vector->digest.data;
233 sym_op->auth.digest.phys_addr =
234 test_vector->digest.phys_addr;
235 sym_op->auth.digest.length = options->auth_digest_sz;
238 uint32_t offset = options->test_buffer_size;
239 struct rte_mbuf *buf, *tbuf;
241 if (options->out_of_place) {
245 while ((tbuf->next != NULL) &&
246 (offset >= tbuf->data_len)) {
247 offset -= tbuf->data_len;
253 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
255 sym_op->auth.digest.phys_addr =
256 rte_pktmbuf_mtophys_offset(buf, offset);
257 sym_op->auth.digest.length = options->auth_digest_sz;
258 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
259 sym_op->auth.aad.data = test_vector->aad.data;
260 sym_op->auth.aad.length = options->auth_aad_sz;
263 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
264 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
265 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
266 sym_op->auth.data.length = options->test_buffer_size << 3;
268 sym_op->auth.data.length = options->test_buffer_size;
270 sym_op->auth.data.offset = 0;
273 if (options->test == CPERF_TEST_TYPE_VERIFY) {
274 for (i = 0; i < nb_ops; i++) {
275 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
276 uint8_t *, iv_offset);
278 memcpy(iv_ptr, test_vector->iv.data,
279 test_vector->iv.length);
287 cperf_set_ops_aead(struct rte_crypto_op **ops,
288 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
289 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
290 const struct cperf_options *options,
291 const struct cperf_test_vector *test_vector,
296 for (i = 0; i < nb_ops; i++) {
297 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
299 rte_crypto_op_attach_sym_session(ops[i], sess);
301 sym_op->m_src = bufs_in[i];
302 sym_op->m_dst = bufs_out[i];
304 /* cipher parameters */
305 sym_op->cipher.iv.offset = iv_offset;
306 sym_op->cipher.iv.length = test_vector->iv.length;
308 sym_op->cipher.data.length = options->test_buffer_size;
309 sym_op->cipher.data.offset =
310 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
312 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
313 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
314 sym_op->auth.aad.length = options->auth_aad_sz;
316 /* authentication parameters */
317 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
318 sym_op->auth.digest.data = test_vector->digest.data;
319 sym_op->auth.digest.phys_addr =
320 test_vector->digest.phys_addr;
321 sym_op->auth.digest.length = options->auth_digest_sz;
324 uint32_t offset = sym_op->cipher.data.length +
325 sym_op->cipher.data.offset;
326 struct rte_mbuf *buf, *tbuf;
328 if (options->out_of_place) {
332 while ((tbuf->next != NULL) &&
333 (offset >= tbuf->data_len)) {
334 offset -= tbuf->data_len;
340 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
342 sym_op->auth.digest.phys_addr =
343 rte_pktmbuf_mtophys_offset(buf, offset);
345 sym_op->auth.digest.length = options->auth_digest_sz;
348 sym_op->auth.data.length = options->test_buffer_size;
349 sym_op->auth.data.offset = options->auth_aad_sz;
352 if (options->test == CPERF_TEST_TYPE_VERIFY) {
353 for (i = 0; i < nb_ops; i++) {
354 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
355 uint8_t *, iv_offset);
357 memcpy(iv_ptr, test_vector->iv.data,
358 test_vector->iv.length);
365 static struct rte_cryptodev_sym_session *
366 cperf_create_session(uint8_t dev_id,
367 const struct cperf_options *options,
368 const struct cperf_test_vector *test_vector)
370 struct rte_crypto_sym_xform cipher_xform;
371 struct rte_crypto_sym_xform auth_xform;
372 struct rte_cryptodev_sym_session *sess = NULL;
377 if (options->op_type == CPERF_CIPHER_ONLY) {
378 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
379 cipher_xform.next = NULL;
380 cipher_xform.cipher.algo = options->cipher_algo;
381 cipher_xform.cipher.op = options->cipher_op;
383 /* cipher different than null */
384 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
385 cipher_xform.cipher.key.data =
386 test_vector->cipher_key.data;
387 cipher_xform.cipher.key.length =
388 test_vector->cipher_key.length;
390 cipher_xform.cipher.key.data = NULL;
391 cipher_xform.cipher.key.length = 0;
393 /* create crypto session */
394 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
398 } else if (options->op_type == CPERF_AUTH_ONLY) {
399 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
400 auth_xform.next = NULL;
401 auth_xform.auth.algo = options->auth_algo;
402 auth_xform.auth.op = options->auth_op;
404 /* auth different than null */
405 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
406 auth_xform.auth.digest_length =
407 options->auth_digest_sz;
408 auth_xform.auth.add_auth_data_length =
409 options->auth_aad_sz;
410 auth_xform.auth.key.length =
411 test_vector->auth_key.length;
412 auth_xform.auth.key.data = test_vector->auth_key.data;
414 auth_xform.auth.digest_length = 0;
415 auth_xform.auth.add_auth_data_length = 0;
416 auth_xform.auth.key.length = 0;
417 auth_xform.auth.key.data = NULL;
419 /* create crypto session */
420 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
424 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
425 || options->op_type == CPERF_AUTH_THEN_CIPHER
426 || options->op_type == CPERF_AEAD) {
431 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
432 cipher_xform.next = NULL;
433 cipher_xform.cipher.algo = options->cipher_algo;
434 cipher_xform.cipher.op = options->cipher_op;
436 /* cipher different than null */
437 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
438 cipher_xform.cipher.key.data =
439 test_vector->cipher_key.data;
440 cipher_xform.cipher.key.length =
441 test_vector->cipher_key.length;
443 cipher_xform.cipher.key.data = NULL;
444 cipher_xform.cipher.key.length = 0;
450 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
451 auth_xform.next = NULL;
452 auth_xform.auth.algo = options->auth_algo;
453 auth_xform.auth.op = options->auth_op;
455 /* auth different than null */
456 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
457 auth_xform.auth.digest_length = options->auth_digest_sz;
458 auth_xform.auth.add_auth_data_length =
459 options->auth_aad_sz;
460 /* auth options for aes gcm */
461 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
462 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
463 auth_xform.auth.key.length = 0;
464 auth_xform.auth.key.data = NULL;
465 } else { /* auth options for others */
466 auth_xform.auth.key.length =
467 test_vector->auth_key.length;
468 auth_xform.auth.key.data =
469 test_vector->auth_key.data;
472 auth_xform.auth.digest_length = 0;
473 auth_xform.auth.add_auth_data_length = 0;
474 auth_xform.auth.key.length = 0;
475 auth_xform.auth.key.data = NULL;
478 /* create crypto session for aes gcm */
479 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
480 if (options->cipher_op ==
481 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
482 cipher_xform.next = &auth_xform;
483 /* create crypto session */
484 sess = rte_cryptodev_sym_session_create(dev_id,
486 } else { /* decrypt */
487 auth_xform.next = &cipher_xform;
488 /* create crypto session */
489 sess = rte_cryptodev_sym_session_create(dev_id,
492 } else { /* create crypto session for other */
493 /* cipher then auth */
494 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
495 cipher_xform.next = &auth_xform;
496 /* create crypto session */
497 sess = rte_cryptodev_sym_session_create(dev_id,
499 } else { /* auth then cipher */
500 auth_xform.next = &cipher_xform;
501 /* create crypto session */
502 sess = rte_cryptodev_sym_session_create(dev_id,
511 cperf_get_op_functions(const struct cperf_options *options,
512 struct cperf_op_fns *op_fns)
514 memset(op_fns, 0, sizeof(struct cperf_op_fns));
516 op_fns->sess_create = cperf_create_session;
518 if (options->op_type == CPERF_AEAD
519 || options->op_type == CPERF_AUTH_THEN_CIPHER
520 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
521 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
522 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
523 op_fns->populate_ops = cperf_set_ops_aead;
525 op_fns->populate_ops = cperf_set_ops_cipher_auth;
528 if (options->op_type == CPERF_AUTH_ONLY) {
529 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
530 op_fns->populate_ops = cperf_set_ops_null_auth;
532 op_fns->populate_ops = cperf_set_ops_auth;
535 if (options->op_type == CPERF_CIPHER_ONLY) {
536 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
537 op_fns->populate_ops = cperf_set_ops_null_cipher;
539 op_fns->populate_ops = cperf_set_ops_cipher;