4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused)
47 for (i = 0; i < nb_ops; i++) {
48 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50 rte_crypto_op_attach_sym_session(ops[i], sess);
52 sym_op->m_src = bufs_in[i];
53 sym_op->m_dst = bufs_out[i];
55 /* cipher parameters */
56 sym_op->cipher.data.length = options->test_buffer_size;
57 sym_op->cipher.data.offset = 0;
64 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
65 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
66 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
67 const struct cperf_options *options,
68 const struct cperf_test_vector *test_vector __rte_unused)
72 for (i = 0; i < nb_ops; i++) {
73 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
75 rte_crypto_op_attach_sym_session(ops[i], sess);
77 sym_op->m_src = bufs_in[i];
78 sym_op->m_dst = bufs_out[i];
81 sym_op->auth.data.length = options->test_buffer_size;
82 sym_op->auth.data.offset = 0;
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
91 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92 const struct cperf_options *options,
93 const struct cperf_test_vector *test_vector)
97 for (i = 0; i < nb_ops; i++) {
98 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
100 rte_crypto_op_attach_sym_session(ops[i], sess);
102 sym_op->m_src = bufs_in[i];
103 sym_op->m_dst = bufs_out[i];
105 /* cipher parameters */
106 sym_op->cipher.iv.data = test_vector->iv.data;
107 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
108 sym_op->cipher.iv.length = test_vector->iv.length;
110 sym_op->cipher.data.length = options->test_buffer_size;
111 sym_op->cipher.data.offset = 0;
118 cperf_set_ops_auth(struct rte_crypto_op **ops,
119 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
120 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
121 const struct cperf_options *options,
122 const struct cperf_test_vector *test_vector)
126 for (i = 0; i < nb_ops; i++) {
127 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
129 rte_crypto_op_attach_sym_session(ops[i], sess);
131 sym_op->m_src = bufs_in[i];
132 sym_op->m_dst = bufs_out[i];
134 /* authentication parameters */
135 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
136 sym_op->auth.digest.data = test_vector->digest.data;
137 sym_op->auth.digest.phys_addr =
138 test_vector->digest.phys_addr;
139 sym_op->auth.digest.length = options->auth_digest_sz;
142 uint32_t offset = options->test_buffer_size;
143 struct rte_mbuf *buf, *tbuf;
145 if (options->out_of_place) {
151 while ((tbuf->next != NULL) &&
152 (offset >= tbuf->data_len)) {
153 offset -= tbuf->data_len;
158 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
160 sym_op->auth.digest.phys_addr =
161 rte_pktmbuf_mtophys_offset(buf, offset);
162 sym_op->auth.digest.length = options->auth_digest_sz;
163 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
164 sym_op->auth.aad.data = test_vector->aad.data;
165 sym_op->auth.aad.length = options->auth_aad_sz;
169 sym_op->auth.data.length = options->test_buffer_size;
170 sym_op->auth.data.offset = 0;
177 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
178 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
179 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
180 const struct cperf_options *options,
181 const struct cperf_test_vector *test_vector)
185 for (i = 0; i < nb_ops; i++) {
186 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
188 rte_crypto_op_attach_sym_session(ops[i], sess);
190 sym_op->m_src = bufs_in[i];
191 sym_op->m_dst = bufs_out[i];
193 /* cipher parameters */
194 sym_op->cipher.iv.data = test_vector->iv.data;
195 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
196 sym_op->cipher.iv.length = test_vector->iv.length;
198 sym_op->cipher.data.length = options->test_buffer_size;
199 sym_op->cipher.data.offset = 0;
201 /* authentication parameters */
202 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
203 sym_op->auth.digest.data = test_vector->digest.data;
204 sym_op->auth.digest.phys_addr =
205 test_vector->digest.phys_addr;
206 sym_op->auth.digest.length = options->auth_digest_sz;
209 uint32_t offset = options->test_buffer_size;
210 struct rte_mbuf *buf, *tbuf;
212 if (options->out_of_place) {
218 while ((tbuf->next != NULL) &&
219 (offset >= tbuf->data_len)) {
220 offset -= tbuf->data_len;
225 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
227 sym_op->auth.digest.phys_addr =
228 rte_pktmbuf_mtophys_offset(buf, offset);
229 sym_op->auth.digest.length = options->auth_digest_sz;
230 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
231 sym_op->auth.aad.data = test_vector->aad.data;
232 sym_op->auth.aad.length = options->auth_aad_sz;
235 sym_op->auth.data.length = options->test_buffer_size;
236 sym_op->auth.data.offset = 0;
243 cperf_set_ops_aead(struct rte_crypto_op **ops,
244 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
245 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
246 const struct cperf_options *options,
247 const struct cperf_test_vector *test_vector)
251 for (i = 0; i < nb_ops; i++) {
252 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
254 rte_crypto_op_attach_sym_session(ops[i], sess);
256 sym_op->m_src = bufs_in[i];
257 sym_op->m_dst = bufs_out[i];
259 /* cipher parameters */
260 sym_op->cipher.iv.data = test_vector->iv.data;
261 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
262 sym_op->cipher.iv.length = test_vector->iv.length;
264 sym_op->cipher.data.length = options->test_buffer_size;
265 sym_op->cipher.data.offset =
266 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
268 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
269 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
270 sym_op->auth.aad.length = options->auth_aad_sz;
272 /* authentication parameters */
273 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
274 sym_op->auth.digest.data = test_vector->digest.data;
275 sym_op->auth.digest.phys_addr =
276 test_vector->digest.phys_addr;
277 sym_op->auth.digest.length = options->auth_digest_sz;
280 uint32_t offset = sym_op->cipher.data.length +
281 sym_op->cipher.data.offset;
282 struct rte_mbuf *buf, *tbuf;
284 if (options->out_of_place) {
290 while ((tbuf->next != NULL) &&
291 (offset >= tbuf->data_len)) {
292 offset -= tbuf->data_len;
297 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
299 sym_op->auth.digest.phys_addr =
300 rte_pktmbuf_mtophys_offset(buf, offset);
302 sym_op->auth.digest.length = options->auth_digest_sz;
305 sym_op->auth.data.length = options->test_buffer_size;
306 sym_op->auth.data.offset = options->auth_aad_sz;
312 static struct rte_cryptodev_sym_session *
313 cperf_create_session(uint8_t dev_id,
314 const struct cperf_options *options,
315 const struct cperf_test_vector *test_vector)
317 struct rte_crypto_sym_xform cipher_xform;
318 struct rte_crypto_sym_xform auth_xform;
319 struct rte_cryptodev_sym_session *sess = NULL;
324 if (options->op_type == CPERF_CIPHER_ONLY) {
325 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
326 cipher_xform.next = NULL;
327 cipher_xform.cipher.algo = options->cipher_algo;
328 cipher_xform.cipher.op = options->cipher_op;
330 /* cipher different than null */
331 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
332 cipher_xform.cipher.key.data =
333 test_vector->cipher_key.data;
334 cipher_xform.cipher.key.length =
335 test_vector->cipher_key.length;
337 cipher_xform.cipher.key.data = NULL;
338 cipher_xform.cipher.key.length = 0;
340 /* create crypto session */
341 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
345 } else if (options->op_type == CPERF_AUTH_ONLY) {
346 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
347 auth_xform.next = NULL;
348 auth_xform.auth.algo = options->auth_algo;
349 auth_xform.auth.op = options->auth_op;
351 /* auth different than null */
352 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
353 auth_xform.auth.digest_length =
354 options->auth_digest_sz;
355 auth_xform.auth.add_auth_data_length =
356 options->auth_aad_sz;
357 auth_xform.auth.key.length =
358 test_vector->auth_key.length;
359 auth_xform.auth.key.data = test_vector->auth_key.data;
361 auth_xform.auth.digest_length = 0;
362 auth_xform.auth.add_auth_data_length = 0;
363 auth_xform.auth.key.length = 0;
364 auth_xform.auth.key.data = NULL;
366 /* create crypto session */
367 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
371 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
372 || options->op_type == CPERF_AUTH_THEN_CIPHER
373 || options->op_type == CPERF_AEAD) {
378 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
379 cipher_xform.next = NULL;
380 cipher_xform.cipher.algo = options->cipher_algo;
381 cipher_xform.cipher.op = options->cipher_op;
383 /* cipher different than null */
384 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
385 cipher_xform.cipher.key.data =
386 test_vector->cipher_key.data;
387 cipher_xform.cipher.key.length =
388 test_vector->cipher_key.length;
390 cipher_xform.cipher.key.data = NULL;
391 cipher_xform.cipher.key.length = 0;
397 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
398 auth_xform.next = NULL;
399 auth_xform.auth.algo = options->auth_algo;
400 auth_xform.auth.op = options->auth_op;
402 /* auth different than null */
403 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
404 auth_xform.auth.digest_length = options->auth_digest_sz;
405 auth_xform.auth.add_auth_data_length =
406 options->auth_aad_sz;
407 /* auth options for aes gcm */
408 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
409 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
410 auth_xform.auth.key.length = 0;
411 auth_xform.auth.key.data = NULL;
412 } else { /* auth options for others */
413 auth_xform.auth.key.length =
414 test_vector->auth_key.length;
415 auth_xform.auth.key.data =
416 test_vector->auth_key.data;
419 auth_xform.auth.digest_length = 0;
420 auth_xform.auth.add_auth_data_length = 0;
421 auth_xform.auth.key.length = 0;
422 auth_xform.auth.key.data = NULL;
425 /* create crypto session for aes gcm */
426 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
427 if (options->cipher_op ==
428 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
429 cipher_xform.next = &auth_xform;
430 /* create crypto session */
431 sess = rte_cryptodev_sym_session_create(dev_id,
433 } else { /* decrypt */
434 auth_xform.next = &cipher_xform;
435 /* create crypto session */
436 sess = rte_cryptodev_sym_session_create(dev_id,
439 } else { /* create crypto session for other */
440 /* cipher then auth */
441 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
442 cipher_xform.next = &auth_xform;
443 /* create crypto session */
444 sess = rte_cryptodev_sym_session_create(dev_id,
446 } else { /* auth then cipher */
447 auth_xform.next = &cipher_xform;
448 /* create crypto session */
449 sess = rte_cryptodev_sym_session_create(dev_id,
458 cperf_get_op_functions(const struct cperf_options *options,
459 struct cperf_op_fns *op_fns)
461 memset(op_fns, 0, sizeof(struct cperf_op_fns));
463 op_fns->sess_create = cperf_create_session;
465 if (options->op_type == CPERF_AEAD
466 || options->op_type == CPERF_AUTH_THEN_CIPHER
467 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
468 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
469 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
470 op_fns->populate_ops = cperf_set_ops_aead;
472 op_fns->populate_ops = cperf_set_ops_cipher_auth;
475 if (options->op_type == CPERF_AUTH_ONLY) {
476 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
477 op_fns->populate_ops = cperf_set_ops_null_auth;
479 op_fns->populate_ops = cperf_set_ops_auth;
482 if (options->op_type == CPERF_CIPHER_ONLY) {
483 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
484 op_fns->populate_ops = cperf_set_ops_null_cipher;
486 op_fns->populate_ops = cperf_set_ops_cipher;