4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Intel Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include <rte_cryptodev.h>
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42 const struct cperf_options *options,
43 const struct cperf_test_vector *test_vector __rte_unused)
47 for (i = 0; i < nb_ops; i++) {
48 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50 rte_crypto_op_attach_sym_session(ops[i], sess);
52 sym_op->m_src = bufs_in[i];
53 sym_op->m_dst = bufs_out[i];
55 /* cipher parameters */
56 sym_op->cipher.data.length = options->buffer_sz;
57 sym_op->cipher.data.offset = 0;
64 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
65 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
66 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
67 const struct cperf_options *options,
68 const struct cperf_test_vector *test_vector __rte_unused)
72 for (i = 0; i < nb_ops; i++) {
73 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
75 rte_crypto_op_attach_sym_session(ops[i], sess);
77 sym_op->m_src = bufs_in[i];
78 sym_op->m_dst = bufs_out[i];
81 sym_op->auth.data.length = options->buffer_sz;
82 sym_op->auth.data.offset = 0;
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
91 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92 const struct cperf_options *options,
93 const struct cperf_test_vector *test_vector)
97 for (i = 0; i < nb_ops; i++) {
98 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
100 rte_crypto_op_attach_sym_session(ops[i], sess);
102 sym_op->m_src = bufs_in[i];
103 sym_op->m_dst = bufs_out[i];
105 /* cipher parameters */
106 sym_op->cipher.iv.data = test_vector->iv.data;
107 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
108 sym_op->cipher.iv.length = test_vector->iv.length;
110 sym_op->cipher.data.length = options->buffer_sz;
111 sym_op->cipher.data.offset = 0;
118 cperf_set_ops_auth(struct rte_crypto_op **ops,
119 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
120 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
121 const struct cperf_options *options,
122 const struct cperf_test_vector *test_vector)
126 for (i = 0; i < nb_ops; i++) {
127 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
129 rte_crypto_op_attach_sym_session(ops[i], sess);
131 sym_op->m_src = bufs_in[i];
132 sym_op->m_dst = bufs_out[i];
134 /* authentication parameters */
135 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
136 sym_op->auth.digest.data = test_vector->digest.data;
137 sym_op->auth.digest.phys_addr =
138 test_vector->digest.phys_addr;
139 sym_op->auth.digest.length = options->auth_digest_sz;
142 uint32_t offset = options->buffer_sz;
143 struct rte_mbuf *buf, *tbuf;
145 if (options->out_of_place) {
151 while ((tbuf->next != NULL) &&
152 (offset >= tbuf->data_len)) {
153 offset -= tbuf->data_len;
158 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
160 sym_op->auth.digest.phys_addr =
161 rte_pktmbuf_mtophys_offset(buf, offset);
162 sym_op->auth.digest.length = options->auth_digest_sz;
163 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
164 sym_op->auth.aad.data = test_vector->aad.data;
165 sym_op->auth.aad.length = options->auth_aad_sz;
169 sym_op->auth.data.length = options->buffer_sz;
170 sym_op->auth.data.offset = 0;
177 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
178 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
179 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
180 const struct cperf_options *options,
181 const struct cperf_test_vector *test_vector)
185 for (i = 0; i < nb_ops; i++) {
186 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
188 rte_crypto_op_attach_sym_session(ops[i], sess);
190 sym_op->m_src = bufs_in[i];
191 sym_op->m_dst = bufs_out[i];
193 /* cipher parameters */
194 sym_op->cipher.iv.data = test_vector->iv.data;
195 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
196 sym_op->cipher.iv.length = test_vector->iv.length;
198 sym_op->cipher.data.length = options->buffer_sz;
199 sym_op->cipher.data.offset = 0;
201 /* authentication parameters */
202 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
203 sym_op->auth.digest.data = test_vector->digest.data;
204 sym_op->auth.digest.phys_addr =
205 test_vector->digest.phys_addr;
206 sym_op->auth.digest.length = options->auth_digest_sz;
209 uint32_t offset = options->buffer_sz;
210 struct rte_mbuf *buf, *tbuf;
212 if (options->out_of_place) {
218 while ((tbuf->next != NULL) &&
219 (offset >= tbuf->data_len)) {
220 offset -= tbuf->data_len;
225 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
227 sym_op->auth.digest.phys_addr =
228 rte_pktmbuf_mtophys_offset(buf, offset);
229 sym_op->auth.digest.length = options->auth_digest_sz;
230 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
231 sym_op->auth.aad.data = test_vector->aad.data;
232 sym_op->auth.aad.length = options->auth_aad_sz;
235 sym_op->auth.data.length = options->buffer_sz;
236 sym_op->auth.data.offset = 0;
243 cperf_set_ops_aead(struct rte_crypto_op **ops,
244 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
245 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
246 const struct cperf_options *options,
247 const struct cperf_test_vector *test_vector)
251 for (i = 0; i < nb_ops; i++) {
252 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
254 rte_crypto_op_attach_sym_session(ops[i], sess);
256 sym_op->m_src = bufs_in[i];
257 sym_op->m_dst = bufs_out[i];
259 /* cipher parameters */
260 sym_op->cipher.iv.data = test_vector->iv.data;
261 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
262 sym_op->cipher.iv.length = test_vector->iv.length;
264 sym_op->cipher.data.length = options->buffer_sz;
265 sym_op->cipher.data.offset =
266 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
268 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
269 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
270 sym_op->auth.aad.length = options->auth_aad_sz;
272 /* authentication parameters */
273 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
274 sym_op->auth.digest.data = test_vector->digest.data;
275 sym_op->auth.digest.phys_addr =
276 test_vector->digest.phys_addr;
277 sym_op->auth.digest.length = options->auth_digest_sz;
280 uint32_t offset = sym_op->cipher.data.length +
281 sym_op->cipher.data.offset;
282 struct rte_mbuf *buf, *tbuf;
284 if (options->out_of_place) {
290 while ((tbuf->next != NULL) &&
291 (offset >= tbuf->data_len)) {
292 offset -= tbuf->data_len;
297 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
299 sym_op->auth.digest.phys_addr =
300 rte_pktmbuf_mtophys_offset(buf, offset);
302 sym_op->auth.digest.length = options->auth_digest_sz;
305 sym_op->auth.data.length = options->buffer_sz;
306 sym_op->auth.data.offset = options->auth_aad_sz;
312 static struct rte_cryptodev_sym_session *
313 cperf_create_session(uint8_t dev_id,
314 const struct cperf_options *options,
315 const struct cperf_test_vector *test_vector)
317 struct rte_crypto_sym_xform cipher_xform;
318 struct rte_crypto_sym_xform auth_xform;
319 struct rte_cryptodev_sym_session *sess = NULL;
324 if (options->op_type == CPERF_CIPHER_ONLY) {
325 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
326 cipher_xform.next = NULL;
327 cipher_xform.cipher.algo = options->cipher_algo;
328 cipher_xform.cipher.op = options->cipher_op;
330 /* cipher different than null */
331 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
332 cipher_xform.cipher.key.data =
333 test_vector->cipher_key.data;
334 cipher_xform.cipher.key.length =
335 test_vector->cipher_key.length;
337 /* create crypto session */
338 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
342 } else if (options->op_type == CPERF_AUTH_ONLY) {
343 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
344 auth_xform.next = NULL;
345 auth_xform.auth.algo = options->auth_algo;
346 auth_xform.auth.op = options->auth_op;
348 /* auth different than null */
349 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
350 auth_xform.auth.digest_length =
351 options->auth_digest_sz;
352 auth_xform.auth.add_auth_data_length =
353 options->auth_aad_sz;
354 auth_xform.auth.key.length =
355 test_vector->auth_key.length;
356 auth_xform.auth.key.data = test_vector->auth_key.data;
358 /* create crypto session */
359 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform);
363 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
364 || options->op_type == CPERF_AUTH_THEN_CIPHER
365 || options->op_type == CPERF_AEAD) {
370 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
371 cipher_xform.next = NULL;
372 cipher_xform.cipher.algo = options->cipher_algo;
373 cipher_xform.cipher.op = options->cipher_op;
375 /* cipher different than null */
376 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
377 cipher_xform.cipher.key.data =
378 test_vector->cipher_key.data;
379 cipher_xform.cipher.key.length =
380 test_vector->cipher_key.length;
386 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
387 auth_xform.next = NULL;
388 auth_xform.auth.algo = options->auth_algo;
389 auth_xform.auth.op = options->auth_op;
391 /* auth different than null */
392 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
393 auth_xform.auth.digest_length = options->auth_digest_sz;
394 auth_xform.auth.add_auth_data_length =
395 options->auth_aad_sz;
396 /* auth options for aes gcm */
397 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
398 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
399 auth_xform.auth.key.length = 0;
400 auth_xform.auth.key.data = NULL;
401 } else { /* auth options for others */
402 auth_xform.auth.key.length =
403 test_vector->auth_key.length;
404 auth_xform.auth.key.data =
405 test_vector->auth_key.data;
409 /* create crypto session for aes gcm */
410 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
411 if (options->cipher_op ==
412 RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
413 cipher_xform.next = &auth_xform;
414 /* create crypto session */
415 sess = rte_cryptodev_sym_session_create(dev_id,
417 } else { /* decrypt */
418 auth_xform.next = &cipher_xform;
419 /* create crypto session */
420 sess = rte_cryptodev_sym_session_create(dev_id,
423 } else { /* create crypto session for other */
424 /* cipher then auth */
425 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
426 cipher_xform.next = &auth_xform;
427 /* create crypto session */
428 sess = rte_cryptodev_sym_session_create(dev_id,
430 } else { /* auth then cipher */
431 auth_xform.next = &cipher_xform;
432 /* create crypto session */
433 sess = rte_cryptodev_sym_session_create(dev_id,
442 cperf_get_op_functions(const struct cperf_options *options,
443 struct cperf_op_fns *op_fns)
445 memset(op_fns, 0, sizeof(struct cperf_op_fns));
447 op_fns->sess_create = cperf_create_session;
449 if (options->op_type == CPERF_AEAD
450 || options->op_type == CPERF_AUTH_THEN_CIPHER
451 || options->op_type == CPERF_CIPHER_THEN_AUTH) {
452 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
453 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
454 op_fns->populate_ops = cperf_set_ops_aead;
456 op_fns->populate_ops = cperf_set_ops_cipher_auth;
459 if (options->op_type == CPERF_AUTH_ONLY) {
460 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
461 op_fns->populate_ops = cperf_set_ops_null_auth;
463 op_fns->populate_ops = cperf_set_ops_auth;
466 if (options->op_type == CPERF_CIPHER_ONLY) {
467 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
468 op_fns->populate_ops = cperf_set_ops_null_cipher;
470 op_fns->populate_ops = cperf_set_ops_cipher;