4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused)
47 for (i = 0; i < nb_ops; i++) {
48 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50 rte_crypto_op_attach_sym_session(ops[i], sess);
52 sym_op->m_src = bufs_in[i];
53 sym_op->m_dst = bufs_out[i];
55 /* cipher parameters */
56 sym_op->cipher.data.length = options->test_buffer_size;
57 sym_op->cipher.data.offset = 0;
64 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
65 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
66 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
67 const struct cperf_options *options,
68 const struct cperf_test_vector *test_vector __rte_unused)
72 for (i = 0; i < nb_ops; i++) {
73 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
75 rte_crypto_op_attach_sym_session(ops[i], sess);
77 sym_op->m_src = bufs_in[i];
78 sym_op->m_dst = bufs_out[i];
81 sym_op->auth.data.length = options->test_buffer_size;
82 sym_op->auth.data.offset = 0;
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
91 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92 const struct cperf_options *options,
93 const struct cperf_test_vector *test_vector)
97 for (i = 0; i < nb_ops; i++) {
98 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
100 rte_crypto_op_attach_sym_session(ops[i], sess);
102 sym_op->m_src = bufs_in[i];
103 sym_op->m_dst = bufs_out[i];
105 /* cipher parameters */
106 sym_op->cipher.iv.data = test_vector->iv.data;
107 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
108 sym_op->cipher.iv.length = test_vector->iv.length;
110 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
111 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
112 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
113 sym_op->cipher.data.length = options->test_buffer_size << 3;
115 sym_op->cipher.data.length = options->test_buffer_size;
117 sym_op->cipher.data.offset = 0;
124 cperf_set_ops_auth(struct rte_crypto_op **ops,
125 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
126 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
127 const struct cperf_options *options,
128 const struct cperf_test_vector *test_vector)
132 for (i = 0; i < nb_ops; i++) {
133 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
135 rte_crypto_op_attach_sym_session(ops[i], sess);
137 sym_op->m_src = bufs_in[i];
138 sym_op->m_dst = bufs_out[i];
140 /* authentication parameters */
141 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
142 sym_op->auth.digest.data = test_vector->digest.data;
143 sym_op->auth.digest.phys_addr =
144 test_vector->digest.phys_addr;
145 sym_op->auth.digest.length = options->auth_digest_sz;
148 uint32_t offset = options->test_buffer_size;
149 struct rte_mbuf *buf, *tbuf;
151 if (options->out_of_place) {
157 while ((tbuf->next != NULL) &&
158 (offset >= tbuf->data_len)) {
159 offset -= tbuf->data_len;
164 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
166 sym_op->auth.digest.phys_addr =
167 rte_pktmbuf_mtophys_offset(buf, offset);
168 sym_op->auth.digest.length = options->auth_digest_sz;
169 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
170 sym_op->auth.aad.data = test_vector->aad.data;
171 sym_op->auth.aad.length = options->auth_aad_sz;
175 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
176 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
177 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
178 sym_op->auth.data.length = options->test_buffer_size << 3;
180 sym_op->auth.data.length = options->test_buffer_size;
182 sym_op->auth.data.offset = 0;
189 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
190 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
191 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
192 const struct cperf_options *options,
193 const struct cperf_test_vector *test_vector)
197 for (i = 0; i < nb_ops; i++) {
198 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
200 rte_crypto_op_attach_sym_session(ops[i], sess);
202 sym_op->m_src = bufs_in[i];
203 sym_op->m_dst = bufs_out[i];
205 /* cipher parameters */
206 sym_op->cipher.iv.data = test_vector->iv.data;
207 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
208 sym_op->cipher.iv.length = test_vector->iv.length;
210 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
211 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
212 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
213 sym_op->cipher.data.length = options->test_buffer_size << 3;
215 sym_op->cipher.data.length = options->test_buffer_size;
217 sym_op->cipher.data.offset = 0;
219 /* authentication parameters */
220 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
221 sym_op->auth.digest.data = test_vector->digest.data;
222 sym_op->auth.digest.phys_addr =
223 test_vector->digest.phys_addr;
224 sym_op->auth.digest.length = options->auth_digest_sz;
227 uint32_t offset = options->test_buffer_size;
228 struct rte_mbuf *buf, *tbuf;
230 if (options->out_of_place) {
236 while ((tbuf->next != NULL) &&
237 (offset >= tbuf->data_len)) {
238 offset -= tbuf->data_len;
243 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
245 sym_op->auth.digest.phys_addr =
246 rte_pktmbuf_mtophys_offset(buf, offset);
247 sym_op->auth.digest.length = options->auth_digest_sz;
248 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
249 sym_op->auth.aad.data = test_vector->aad.data;
250 sym_op->auth.aad.length = options->auth_aad_sz;
253 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
254 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
255 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
256 sym_op->auth.data.length = options->test_buffer_size << 3;
258 sym_op->auth.data.length = options->test_buffer_size;
260 sym_op->auth.data.offset = 0;
267 cperf_set_ops_aead(struct rte_crypto_op **ops,
268 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
269 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
270 const struct cperf_options *options,
271 const struct cperf_test_vector *test_vector)
275 for (i = 0; i < nb_ops; i++) {
276 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
278 rte_crypto_op_attach_sym_session(ops[i], sess);
280 sym_op->m_src = bufs_in[i];
281 sym_op->m_dst = bufs_out[i];
283 /* cipher parameters */
284 sym_op->cipher.iv.data = test_vector->iv.data;
285 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
286 sym_op->cipher.iv.length = test_vector->iv.length;
288 sym_op->cipher.data.length = options->test_buffer_size;
289 sym_op->cipher.data.offset =
290 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
292 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
293 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
294 sym_op->auth.aad.length = options->auth_aad_sz;
296 /* authentication parameters */
297 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
298 sym_op->auth.digest.data = test_vector->digest.data;
299 sym_op->auth.digest.phys_addr =
300 test_vector->digest.phys_addr;
301 sym_op->auth.digest.length = options->auth_digest_sz;
304 uint32_t offset = sym_op->cipher.data.length +
305 sym_op->cipher.data.offset;
306 struct rte_mbuf *buf, *tbuf;
308 if (options->out_of_place) {
314 while ((tbuf->next != NULL) &&
315 (offset >= tbuf->data_len)) {
316 offset -= tbuf->data_len;
321 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
323 sym_op->auth.digest.phys_addr =
324 rte_pktmbuf_mtophys_offset(buf, offset);
326 sym_op->auth.digest.length = options->auth_digest_sz;
329 sym_op->auth.data.length = options->test_buffer_size;
330 sym_op->auth.data.offset = options->auth_aad_sz;
336 static struct rte_cryptodev_sym_session *
337 cperf_create_session(uint8_t dev_id,
338 const struct cperf_options *options,
339 const struct cperf_test_vector *test_vector)
341 struct rte_crypto_sym_xform cipher_xform;
342 struct rte_crypto_sym_xform auth_xform;
343 struct rte_cryptodev_sym_session *sess = NULL;
348 if (options->op_type == CPERF_CIPHER_ONLY) {
349 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
350 cipher_xform.next = NULL;
351 cipher_xform.cipher.algo = options->cipher_algo;
352 cipher_xform.cipher.op = options->cipher_op;
354 /* cipher different than null */
355 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
356 cipher_xform.cipher.key.data =
357 test_vector->cipher_key.data;
358 cipher_xform.cipher.key.length =
359 test_vector->cipher_key.length;
361 cipher_xform.cipher.key.data = NULL;
362 cipher_xform.cipher.key.length = 0;
364 /* create crypto session */
365 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
369 } else if (options->op_type == CPERF_AUTH_ONLY) {
370 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
371 auth_xform.next = NULL;
372 auth_xform.auth.algo = options->auth_algo;
373 auth_xform.auth.op = options->auth_op;
375 /* auth different than null */
376 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
377 auth_xform.auth.digest_length =
378 options->auth_digest_sz;
379 auth_xform.auth.add_auth_data_length =
380 options->auth_aad_sz;
381 auth_xform.auth.key.length =
382 test_vector->auth_key.length;
383 auth_xform.auth.key.data = test_vector->auth_key.data;
385 auth_xform.auth.digest_length = 0;
386 auth_xform.auth.add_auth_data_length = 0;
387 auth_xform.auth.key.length = 0;
388 auth_xform.auth.key.data = NULL;
390 /* create crypto session */
391 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
395 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
396 || options->op_type == CPERF_AUTH_THEN_CIPHER
397 || options->op_type == CPERF_AEAD) {
402 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
403 cipher_xform.next = NULL;
404 cipher_xform.cipher.algo = options->cipher_algo;
405 cipher_xform.cipher.op = options->cipher_op;
407 /* cipher different than null */
408 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
409 cipher_xform.cipher.key.data =
410 test_vector->cipher_key.data;
411 cipher_xform.cipher.key.length =
412 test_vector->cipher_key.length;
414 cipher_xform.cipher.key.data = NULL;
415 cipher_xform.cipher.key.length = 0;
421 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
422 auth_xform.next = NULL;
423 auth_xform.auth.algo = options->auth_algo;
424 auth_xform.auth.op = options->auth_op;
426 /* auth different than null */
427 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
428 auth_xform.auth.digest_length = options->auth_digest_sz;
429 auth_xform.auth.add_auth_data_length =
430 options->auth_aad_sz;
431 /* auth options for aes gcm */
432 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
433 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
434 auth_xform.auth.key.length = 0;
435 auth_xform.auth.key.data = NULL;
436 } else { /* auth options for others */
437 auth_xform.auth.key.length =
438 test_vector->auth_key.length;
439 auth_xform.auth.key.data =
440 test_vector->auth_key.data;
443 auth_xform.auth.digest_length = 0;
444 auth_xform.auth.add_auth_data_length = 0;
445 auth_xform.auth.key.length = 0;
446 auth_xform.auth.key.data = NULL;
449 /* create crypto session for aes gcm */
450 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
451 if (options->cipher_op ==
452 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
453 cipher_xform.next = &auth_xform;
454 /* create crypto session */
455 sess = rte_cryptodev_sym_session_create(dev_id,
457 } else { /* decrypt */
458 auth_xform.next = &cipher_xform;
459 /* create crypto session */
460 sess = rte_cryptodev_sym_session_create(dev_id,
463 } else { /* create crypto session for other */
464 /* cipher then auth */
465 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
466 cipher_xform.next = &auth_xform;
467 /* create crypto session */
468 sess = rte_cryptodev_sym_session_create(dev_id,
470 } else { /* auth then cipher */
471 auth_xform.next = &cipher_xform;
472 /* create crypto session */
473 sess = rte_cryptodev_sym_session_create(dev_id,
482 cperf_get_op_functions(const struct cperf_options *options,
483 struct cperf_op_fns *op_fns)
485 memset(op_fns, 0, sizeof(struct cperf_op_fns));
487 op_fns->sess_create = cperf_create_session;
489 if (options->op_type == CPERF_AEAD
490 || options->op_type == CPERF_AUTH_THEN_CIPHER
491 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
492 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
493 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
494 op_fns->populate_ops = cperf_set_ops_aead;
496 op_fns->populate_ops = cperf_set_ops_cipher_auth;
499 if (options->op_type == CPERF_AUTH_ONLY) {
500 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
501 op_fns->populate_ops = cperf_set_ops_null_auth;
503 op_fns->populate_ops = cperf_set_ops_auth;
506 if (options->op_type == CPERF_CIPHER_ONLY) {
507 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
508 op_fns->populate_ops = cperf_set_ops_null_cipher;
510 op_fns->populate_ops = cperf_set_ops_cipher;