1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
5 #include <rte_cryptodev.h>
8 #include "cperf_test_vectors.h"
11 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
12 uint32_t src_buf_offset, uint32_t dst_buf_offset,
13 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
14 const struct cperf_options *options,
15 const struct cperf_test_vector *test_vector __rte_unused,
16 uint16_t iv_offset __rte_unused)
20 for (i = 0; i < nb_ops; i++) {
21 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
23 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
24 rte_crypto_op_attach_sym_session(ops[i], sess);
26 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
29 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
30 if (dst_buf_offset == 0)
33 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
36 /* cipher parameters */
37 sym_op->cipher.data.length = options->test_buffer_size;
38 sym_op->cipher.data.offset = 0;
45 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
46 uint32_t src_buf_offset, uint32_t dst_buf_offset,
47 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
48 const struct cperf_options *options,
49 const struct cperf_test_vector *test_vector __rte_unused,
50 uint16_t iv_offset __rte_unused)
54 for (i = 0; i < nb_ops; i++) {
55 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
57 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
58 rte_crypto_op_attach_sym_session(ops[i], sess);
60 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
63 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
64 if (dst_buf_offset == 0)
67 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
71 sym_op->auth.data.length = options->test_buffer_size;
72 sym_op->auth.data.offset = 0;
79 cperf_set_ops_cipher(struct rte_crypto_op **ops,
80 uint32_t src_buf_offset, uint32_t dst_buf_offset,
81 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
82 const struct cperf_options *options,
83 const struct cperf_test_vector *test_vector,
88 for (i = 0; i < nb_ops; i++) {
89 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
91 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
92 rte_crypto_op_attach_sym_session(ops[i], sess);
94 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
97 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
98 if (dst_buf_offset == 0)
101 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
104 /* cipher parameters */
105 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
106 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
107 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
108 sym_op->cipher.data.length = options->test_buffer_size << 3;
110 sym_op->cipher.data.length = options->test_buffer_size;
112 sym_op->cipher.data.offset = 0;
115 if (options->test == CPERF_TEST_TYPE_VERIFY) {
116 for (i = 0; i < nb_ops; i++) {
117 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
118 uint8_t *, iv_offset);
120 memcpy(iv_ptr, test_vector->cipher_iv.data,
121 test_vector->cipher_iv.length);
130 cperf_set_ops_auth(struct rte_crypto_op **ops,
131 uint32_t src_buf_offset, uint32_t dst_buf_offset,
132 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
133 const struct cperf_options *options,
134 const struct cperf_test_vector *test_vector,
139 for (i = 0; i < nb_ops; i++) {
140 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
142 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
143 rte_crypto_op_attach_sym_session(ops[i], sess);
145 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
148 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
149 if (dst_buf_offset == 0)
150 sym_op->m_dst = NULL;
152 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
155 if (test_vector->auth_iv.length) {
156 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
159 memcpy(iv_ptr, test_vector->auth_iv.data,
160 test_vector->auth_iv.length);
163 /* authentication parameters */
164 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
165 sym_op->auth.digest.data = test_vector->digest.data;
166 sym_op->auth.digest.phys_addr =
167 test_vector->digest.phys_addr;
170 uint32_t offset = options->test_buffer_size;
171 struct rte_mbuf *buf, *tbuf;
173 if (options->out_of_place) {
176 tbuf = sym_op->m_src;
177 while ((tbuf->next != NULL) &&
178 (offset >= tbuf->data_len)) {
179 offset -= tbuf->data_len;
183 * If there is not enough room in segment,
184 * place the digest in the next segment
186 if ((tbuf->data_len - offset) < options->digest_sz) {
193 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
195 sym_op->auth.digest.phys_addr =
196 rte_pktmbuf_iova_offset(buf, offset);
200 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
201 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
202 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
203 sym_op->auth.data.length = options->test_buffer_size << 3;
205 sym_op->auth.data.length = options->test_buffer_size;
207 sym_op->auth.data.offset = 0;
210 if (options->test == CPERF_TEST_TYPE_VERIFY) {
211 if (test_vector->auth_iv.length) {
212 for (i = 0; i < nb_ops; i++) {
213 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
214 uint8_t *, iv_offset);
216 memcpy(iv_ptr, test_vector->auth_iv.data,
217 test_vector->auth_iv.length);
225 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
226 uint32_t src_buf_offset, uint32_t dst_buf_offset,
227 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
228 const struct cperf_options *options,
229 const struct cperf_test_vector *test_vector,
234 for (i = 0; i < nb_ops; i++) {
235 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
237 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
238 rte_crypto_op_attach_sym_session(ops[i], sess);
240 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
243 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
244 if (dst_buf_offset == 0)
245 sym_op->m_dst = NULL;
247 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
250 /* cipher parameters */
251 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
252 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
253 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
254 sym_op->cipher.data.length = options->test_buffer_size << 3;
256 sym_op->cipher.data.length = options->test_buffer_size;
258 sym_op->cipher.data.offset = 0;
260 /* authentication parameters */
261 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
262 sym_op->auth.digest.data = test_vector->digest.data;
263 sym_op->auth.digest.phys_addr =
264 test_vector->digest.phys_addr;
267 uint32_t offset = options->test_buffer_size;
268 struct rte_mbuf *buf, *tbuf;
270 if (options->out_of_place) {
273 tbuf = sym_op->m_src;
274 while ((tbuf->next != NULL) &&
275 (offset >= tbuf->data_len)) {
276 offset -= tbuf->data_len;
280 * If there is not enough room in segment,
281 * place the digest in the next segment
283 if ((tbuf->data_len - offset) < options->digest_sz) {
290 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
292 sym_op->auth.digest.phys_addr =
293 rte_pktmbuf_iova_offset(buf, offset);
296 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
297 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
298 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
299 sym_op->auth.data.length = options->test_buffer_size << 3;
301 sym_op->auth.data.length = options->test_buffer_size;
303 sym_op->auth.data.offset = 0;
306 if (options->test == CPERF_TEST_TYPE_VERIFY) {
307 for (i = 0; i < nb_ops; i++) {
308 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
309 uint8_t *, iv_offset);
311 memcpy(iv_ptr, test_vector->cipher_iv.data,
312 test_vector->cipher_iv.length);
313 if (test_vector->auth_iv.length) {
315 * Copy IV after the crypto operation and
318 iv_ptr += test_vector->cipher_iv.length;
319 memcpy(iv_ptr, test_vector->auth_iv.data,
320 test_vector->auth_iv.length);
330 cperf_set_ops_aead(struct rte_crypto_op **ops,
331 uint32_t src_buf_offset, uint32_t dst_buf_offset,
332 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
333 const struct cperf_options *options,
334 const struct cperf_test_vector *test_vector,
338 /* AAD is placed after the IV */
339 uint16_t aad_offset = iv_offset +
340 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
342 for (i = 0; i < nb_ops; i++) {
343 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
345 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
346 rte_crypto_op_attach_sym_session(ops[i], sess);
348 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
351 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
352 if (dst_buf_offset == 0)
353 sym_op->m_dst = NULL;
355 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
358 /* AEAD parameters */
359 sym_op->aead.data.length = options->test_buffer_size;
360 sym_op->aead.data.offset = 0;
362 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
363 uint8_t *, aad_offset);
364 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
367 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
368 sym_op->aead.digest.data = test_vector->digest.data;
369 sym_op->aead.digest.phys_addr =
370 test_vector->digest.phys_addr;
373 uint32_t offset = sym_op->aead.data.length +
374 sym_op->aead.data.offset;
375 struct rte_mbuf *buf, *tbuf;
377 if (options->out_of_place) {
380 tbuf = sym_op->m_src;
381 while ((tbuf->next != NULL) &&
382 (offset >= tbuf->data_len)) {
383 offset -= tbuf->data_len;
387 * If there is not enough room in segment,
388 * place the digest in the next segment
390 if ((tbuf->data_len - offset) < options->digest_sz) {
397 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
399 sym_op->aead.digest.phys_addr =
400 rte_pktmbuf_iova_offset(buf, offset);
404 if (options->test == CPERF_TEST_TYPE_VERIFY) {
405 for (i = 0; i < nb_ops; i++) {
406 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
407 uint8_t *, iv_offset);
410 * If doing AES-CCM, nonce is copied one byte
411 * after the start of IV field, and AAD is copied
412 * 18 bytes after the start of the AAD field.
414 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
415 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
416 test_vector->aead_iv.length);
418 memcpy(ops[i]->sym->aead.aad.data + 18,
419 test_vector->aad.data,
420 test_vector->aad.length);
422 memcpy(iv_ptr, test_vector->aead_iv.data,
423 test_vector->aead_iv.length);
425 memcpy(ops[i]->sym->aead.aad.data,
426 test_vector->aad.data,
427 test_vector->aad.length);
435 static struct rte_cryptodev_sym_session *
436 cperf_create_session(struct rte_mempool *sess_mp,
438 const struct cperf_options *options,
439 const struct cperf_test_vector *test_vector,
442 struct rte_crypto_sym_xform cipher_xform;
443 struct rte_crypto_sym_xform auth_xform;
444 struct rte_crypto_sym_xform aead_xform;
445 struct rte_cryptodev_sym_session *sess = NULL;
447 sess = rte_cryptodev_sym_session_create(sess_mp);
451 if (options->op_type == CPERF_CIPHER_ONLY) {
452 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
453 cipher_xform.next = NULL;
454 cipher_xform.cipher.algo = options->cipher_algo;
455 cipher_xform.cipher.op = options->cipher_op;
456 cipher_xform.cipher.iv.offset = iv_offset;
458 /* cipher different than null */
459 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
460 cipher_xform.cipher.key.data =
461 test_vector->cipher_key.data;
462 cipher_xform.cipher.key.length =
463 test_vector->cipher_key.length;
464 cipher_xform.cipher.iv.length =
465 test_vector->cipher_iv.length;
467 cipher_xform.cipher.key.data = NULL;
468 cipher_xform.cipher.key.length = 0;
469 cipher_xform.cipher.iv.length = 0;
471 /* create crypto session */
472 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
477 } else if (options->op_type == CPERF_AUTH_ONLY) {
478 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
479 auth_xform.next = NULL;
480 auth_xform.auth.algo = options->auth_algo;
481 auth_xform.auth.op = options->auth_op;
483 /* auth different than null */
484 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
485 auth_xform.auth.digest_length =
487 auth_xform.auth.key.length =
488 test_vector->auth_key.length;
489 auth_xform.auth.key.data = test_vector->auth_key.data;
490 auth_xform.auth.iv.length =
491 test_vector->auth_iv.length;
493 auth_xform.auth.digest_length = 0;
494 auth_xform.auth.key.length = 0;
495 auth_xform.auth.key.data = NULL;
496 auth_xform.auth.iv.length = 0;
498 /* create crypto session */
499 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
504 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
505 || options->op_type == CPERF_AUTH_THEN_CIPHER) {
509 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
510 cipher_xform.next = NULL;
511 cipher_xform.cipher.algo = options->cipher_algo;
512 cipher_xform.cipher.op = options->cipher_op;
513 cipher_xform.cipher.iv.offset = iv_offset;
515 /* cipher different than null */
516 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
517 cipher_xform.cipher.key.data =
518 test_vector->cipher_key.data;
519 cipher_xform.cipher.key.length =
520 test_vector->cipher_key.length;
521 cipher_xform.cipher.iv.length =
522 test_vector->cipher_iv.length;
524 cipher_xform.cipher.key.data = NULL;
525 cipher_xform.cipher.key.length = 0;
526 cipher_xform.cipher.iv.length = 0;
532 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
533 auth_xform.next = NULL;
534 auth_xform.auth.algo = options->auth_algo;
535 auth_xform.auth.op = options->auth_op;
537 /* auth different than null */
538 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
539 auth_xform.auth.digest_length = options->digest_sz;
540 auth_xform.auth.iv.length = test_vector->auth_iv.length;
541 auth_xform.auth.key.length =
542 test_vector->auth_key.length;
543 auth_xform.auth.key.data =
544 test_vector->auth_key.data;
546 auth_xform.auth.digest_length = 0;
547 auth_xform.auth.key.length = 0;
548 auth_xform.auth.key.data = NULL;
549 auth_xform.auth.iv.length = 0;
552 /* cipher then auth */
553 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
554 cipher_xform.next = &auth_xform;
555 /* create crypto session */
556 rte_cryptodev_sym_session_init(dev_id,
557 sess, &cipher_xform, sess_mp);
558 } else { /* auth then cipher */
559 auth_xform.next = &cipher_xform;
560 /* create crypto session */
561 rte_cryptodev_sym_session_init(dev_id,
562 sess, &auth_xform, sess_mp);
564 } else { /* options->op_type == CPERF_AEAD */
565 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
566 aead_xform.next = NULL;
567 aead_xform.aead.algo = options->aead_algo;
568 aead_xform.aead.op = options->aead_op;
569 aead_xform.aead.iv.offset = iv_offset;
571 aead_xform.aead.key.data =
572 test_vector->aead_key.data;
573 aead_xform.aead.key.length =
574 test_vector->aead_key.length;
575 aead_xform.aead.iv.length = test_vector->aead_iv.length;
577 aead_xform.aead.digest_length = options->digest_sz;
578 aead_xform.aead.aad_length =
579 options->aead_aad_sz;
581 /* Create crypto session */
582 rte_cryptodev_sym_session_init(dev_id,
583 sess, &aead_xform, sess_mp);
590 cperf_get_op_functions(const struct cperf_options *options,
591 struct cperf_op_fns *op_fns)
593 memset(op_fns, 0, sizeof(struct cperf_op_fns));
595 op_fns->sess_create = cperf_create_session;
597 if (options->op_type == CPERF_AEAD) {
598 op_fns->populate_ops = cperf_set_ops_aead;
602 if (options->op_type == CPERF_AUTH_THEN_CIPHER
603 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
604 op_fns->populate_ops = cperf_set_ops_cipher_auth;
607 if (options->op_type == CPERF_AUTH_ONLY) {
608 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
609 op_fns->populate_ops = cperf_set_ops_null_auth;
611 op_fns->populate_ops = cperf_set_ops_auth;
614 if (options->op_type == CPERF_CIPHER_ONLY) {
615 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
616 op_fns->populate_ops = cperf_set_ops_null_cipher;
618 op_fns->populate_ops = cperf_set_ops_cipher;