app/crypto-perf: use single mempool
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
52                 rte_crypto_op_attach_sym_session(ops[i], sess);
53
54                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
55                                                         src_buf_offset);
56
57                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
58                 if (dst_buf_offset == 0)
59                         sym_op->m_dst = NULL;
60                 else
61                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
62                                                         dst_buf_offset);
63
64                 /* cipher parameters */
65                 sym_op->cipher.data.length = options->test_buffer_size;
66                 sym_op->cipher.data.offset = 0;
67         }
68
69         return 0;
70 }
71
72 static int
73 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
74                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
75                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
76                 const struct cperf_options *options,
77                 const struct cperf_test_vector *test_vector __rte_unused,
78                 uint16_t iv_offset __rte_unused)
79 {
80         uint16_t i;
81
82         for (i = 0; i < nb_ops; i++) {
83                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
84
85                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
86                 rte_crypto_op_attach_sym_session(ops[i], sess);
87
88                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
89                                                         src_buf_offset);
90
91                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
92                 if (dst_buf_offset == 0)
93                         sym_op->m_dst = NULL;
94                 else
95                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
96                                                         dst_buf_offset);
97
98                 /* auth parameters */
99                 sym_op->auth.data.length = options->test_buffer_size;
100                 sym_op->auth.data.offset = 0;
101         }
102
103         return 0;
104 }
105
106 static int
107 cperf_set_ops_cipher(struct rte_crypto_op **ops,
108                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
109                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
110                 const struct cperf_options *options,
111                 const struct cperf_test_vector *test_vector,
112                 uint16_t iv_offset)
113 {
114         uint16_t i;
115
116         for (i = 0; i < nb_ops; i++) {
117                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
118
119                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
120                 rte_crypto_op_attach_sym_session(ops[i], sess);
121
122                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
123                                                         src_buf_offset);
124
125                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
126                 if (dst_buf_offset == 0)
127                         sym_op->m_dst = NULL;
128                 else
129                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
130                                                         dst_buf_offset);
131
132                 /* cipher parameters */
133                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
134                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
135                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
136                         sym_op->cipher.data.length = options->test_buffer_size << 3;
137                 else
138                         sym_op->cipher.data.length = options->test_buffer_size;
139
140                 sym_op->cipher.data.offset = 0;
141         }
142
143         if (options->test == CPERF_TEST_TYPE_VERIFY) {
144                 for (i = 0; i < nb_ops; i++) {
145                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
146                                         uint8_t *, iv_offset);
147
148                         memcpy(iv_ptr, test_vector->cipher_iv.data,
149                                         test_vector->cipher_iv.length);
150
151                 }
152         }
153
154         return 0;
155 }
156
157 static int
158 cperf_set_ops_auth(struct rte_crypto_op **ops,
159                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
160                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
161                 const struct cperf_options *options,
162                 const struct cperf_test_vector *test_vector,
163                 uint16_t iv_offset)
164 {
165         uint16_t i;
166
167         for (i = 0; i < nb_ops; i++) {
168                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
169
170                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
171                 rte_crypto_op_attach_sym_session(ops[i], sess);
172
173                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
174                                                         src_buf_offset);
175
176                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
177                 if (dst_buf_offset == 0)
178                         sym_op->m_dst = NULL;
179                 else
180                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
181                                                         dst_buf_offset);
182
183                 if (test_vector->auth_iv.length) {
184                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
185                                                                 uint8_t *,
186                                                                 iv_offset);
187                         memcpy(iv_ptr, test_vector->auth_iv.data,
188                                         test_vector->auth_iv.length);
189                 }
190
191                 /* authentication parameters */
192                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
193                         sym_op->auth.digest.data = test_vector->digest.data;
194                         sym_op->auth.digest.phys_addr =
195                                         test_vector->digest.phys_addr;
196                 } else {
197
198                         uint32_t offset = options->test_buffer_size;
199                         struct rte_mbuf *buf, *tbuf;
200
201                         if (options->out_of_place) {
202                                 buf = sym_op->m_dst;
203                         } else {
204                                 tbuf = sym_op->m_src;
205                                 while ((tbuf->next != NULL) &&
206                                                 (offset >= tbuf->data_len)) {
207                                         offset -= tbuf->data_len;
208                                         tbuf = tbuf->next;
209                                 }
210                                 /*
211                                  * If there is not enough room in segment,
212                                  * place the digest in the next segment
213                                  */
214                                 if ((tbuf->data_len - offset) < options->digest_sz) {
215                                         tbuf = tbuf->next;
216                                         offset = 0;
217                                 }
218                                 buf = tbuf;
219                         }
220
221                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
222                                         uint8_t *, offset);
223                         sym_op->auth.digest.phys_addr =
224                                         rte_pktmbuf_mtophys_offset(buf, offset);
225
226                 }
227
228                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
229                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
230                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
231                         sym_op->auth.data.length = options->test_buffer_size << 3;
232                 else
233                         sym_op->auth.data.length = options->test_buffer_size;
234
235                 sym_op->auth.data.offset = 0;
236         }
237
238         if (options->test == CPERF_TEST_TYPE_VERIFY) {
239                 if (test_vector->auth_iv.length) {
240                         for (i = 0; i < nb_ops; i++) {
241                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
242                                                 uint8_t *, iv_offset);
243
244                                 memcpy(iv_ptr, test_vector->auth_iv.data,
245                                                 test_vector->auth_iv.length);
246                         }
247                 }
248         }
249         return 0;
250 }
251
252 static int
253 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
254                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
255                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
256                 const struct cperf_options *options,
257                 const struct cperf_test_vector *test_vector,
258                 uint16_t iv_offset)
259 {
260         uint16_t i;
261
262         for (i = 0; i < nb_ops; i++) {
263                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
264
265                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
266                 rte_crypto_op_attach_sym_session(ops[i], sess);
267
268                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
269                                                         src_buf_offset);
270
271                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
272                 if (dst_buf_offset == 0)
273                         sym_op->m_dst = NULL;
274                 else
275                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
276                                                         dst_buf_offset);
277
278                 /* cipher parameters */
279                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
280                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
281                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
282                         sym_op->cipher.data.length = options->test_buffer_size << 3;
283                 else
284                         sym_op->cipher.data.length = options->test_buffer_size;
285
286                 sym_op->cipher.data.offset = 0;
287
288                 /* authentication parameters */
289                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
290                         sym_op->auth.digest.data = test_vector->digest.data;
291                         sym_op->auth.digest.phys_addr =
292                                         test_vector->digest.phys_addr;
293                 } else {
294
295                         uint32_t offset = options->test_buffer_size;
296                         struct rte_mbuf *buf, *tbuf;
297
298                         if (options->out_of_place) {
299                                 buf = sym_op->m_dst;
300                         } else {
301                                 tbuf = sym_op->m_src;
302                                 while ((tbuf->next != NULL) &&
303                                                 (offset >= tbuf->data_len)) {
304                                         offset -= tbuf->data_len;
305                                         tbuf = tbuf->next;
306                                 }
307                                 /*
308                                  * If there is not enough room in segment,
309                                  * place the digest in the next segment
310                                  */
311                                 if ((tbuf->data_len - offset) < options->digest_sz) {
312                                         tbuf = tbuf->next;
313                                         offset = 0;
314                                 }
315                                 buf = tbuf;
316                         }
317
318                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
319                                         uint8_t *, offset);
320                         sym_op->auth.digest.phys_addr =
321                                         rte_pktmbuf_mtophys_offset(buf, offset);
322                 }
323
324                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
325                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
326                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
327                         sym_op->auth.data.length = options->test_buffer_size << 3;
328                 else
329                         sym_op->auth.data.length = options->test_buffer_size;
330
331                 sym_op->auth.data.offset = 0;
332         }
333
334         if (options->test == CPERF_TEST_TYPE_VERIFY) {
335                 for (i = 0; i < nb_ops; i++) {
336                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
337                                         uint8_t *, iv_offset);
338
339                         memcpy(iv_ptr, test_vector->cipher_iv.data,
340                                         test_vector->cipher_iv.length);
341                         if (test_vector->auth_iv.length) {
342                                 /*
343                                  * Copy IV after the crypto operation and
344                                  * the cipher IV
345                                  */
346                                 iv_ptr += test_vector->cipher_iv.length;
347                                 memcpy(iv_ptr, test_vector->auth_iv.data,
348                                                 test_vector->auth_iv.length);
349                         }
350                 }
351
352         }
353
354         return 0;
355 }
356
357 static int
358 cperf_set_ops_aead(struct rte_crypto_op **ops,
359                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
360                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
361                 const struct cperf_options *options,
362                 const struct cperf_test_vector *test_vector,
363                 uint16_t iv_offset)
364 {
365         uint16_t i;
366         uint16_t aad_offset = iv_offset +
367                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
368
369         for (i = 0; i < nb_ops; i++) {
370                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
371
372                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
373                 rte_crypto_op_attach_sym_session(ops[i], sess);
374
375                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
376                                                         src_buf_offset);
377
378                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
379                 if (dst_buf_offset == 0)
380                         sym_op->m_dst = NULL;
381                 else
382                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
383                                                         dst_buf_offset);
384
385                 /* AEAD parameters */
386                 sym_op->aead.data.length = options->test_buffer_size;
387                 sym_op->aead.data.offset = 0;
388
389                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
390                                         uint8_t *, aad_offset);
391                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
392                                         aad_offset);
393
394                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
395                         sym_op->aead.digest.data = test_vector->digest.data;
396                         sym_op->aead.digest.phys_addr =
397                                         test_vector->digest.phys_addr;
398                 } else {
399
400                         uint32_t offset = sym_op->aead.data.length +
401                                                 sym_op->aead.data.offset;
402                         struct rte_mbuf *buf, *tbuf;
403
404                         if (options->out_of_place) {
405                                 buf = sym_op->m_dst;
406                         } else {
407                                 tbuf = sym_op->m_src;
408                                 while ((tbuf->next != NULL) &&
409                                                 (offset >= tbuf->data_len)) {
410                                         offset -= tbuf->data_len;
411                                         tbuf = tbuf->next;
412                                 }
413                                 /*
414                                  * If there is not enough room in segment,
415                                  * place the digest in the next segment
416                                  */
417                                 if ((tbuf->data_len - offset) < options->digest_sz) {
418                                         tbuf = tbuf->next;
419                                         offset = 0;
420                                 }
421                                 buf = tbuf;
422                         }
423
424                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
425                                         uint8_t *, offset);
426                         sym_op->aead.digest.phys_addr =
427                                         rte_pktmbuf_mtophys_offset(buf, offset);
428                 }
429         }
430
431         if (options->test == CPERF_TEST_TYPE_VERIFY) {
432                 for (i = 0; i < nb_ops; i++) {
433                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
434                                         uint8_t *, iv_offset);
435
436                         memcpy(iv_ptr, test_vector->aead_iv.data,
437                                         test_vector->aead_iv.length);
438
439                         /* Copy AAD after the IV */
440                         memcpy(ops[i]->sym->aead.aad.data,
441                                 test_vector->aad.data,
442                                 test_vector->aad.length);
443                 }
444         }
445
446         return 0;
447 }
448
449 static struct rte_cryptodev_sym_session *
450 cperf_create_session(struct rte_mempool *sess_mp,
451         uint8_t dev_id,
452         const struct cperf_options *options,
453         const struct cperf_test_vector *test_vector,
454         uint16_t iv_offset)
455 {
456         struct rte_crypto_sym_xform cipher_xform;
457         struct rte_crypto_sym_xform auth_xform;
458         struct rte_crypto_sym_xform aead_xform;
459         struct rte_cryptodev_sym_session *sess = NULL;
460
461         sess = rte_cryptodev_sym_session_create(sess_mp);
462         /*
463          * cipher only
464          */
465         if (options->op_type == CPERF_CIPHER_ONLY) {
466                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
467                 cipher_xform.next = NULL;
468                 cipher_xform.cipher.algo = options->cipher_algo;
469                 cipher_xform.cipher.op = options->cipher_op;
470                 cipher_xform.cipher.iv.offset = iv_offset;
471
472                 /* cipher different than null */
473                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
474                         cipher_xform.cipher.key.data =
475                                         test_vector->cipher_key.data;
476                         cipher_xform.cipher.key.length =
477                                         test_vector->cipher_key.length;
478                         cipher_xform.cipher.iv.length =
479                                         test_vector->cipher_iv.length;
480                 } else {
481                         cipher_xform.cipher.key.data = NULL;
482                         cipher_xform.cipher.key.length = 0;
483                         cipher_xform.cipher.iv.length = 0;
484                 }
485                 /* create crypto session */
486                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
487                                 sess_mp);
488         /*
489          *  auth only
490          */
491         } else if (options->op_type == CPERF_AUTH_ONLY) {
492                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
493                 auth_xform.next = NULL;
494                 auth_xform.auth.algo = options->auth_algo;
495                 auth_xform.auth.op = options->auth_op;
496
497                 /* auth different than null */
498                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
499                         auth_xform.auth.digest_length =
500                                         options->digest_sz;
501                         auth_xform.auth.key.length =
502                                         test_vector->auth_key.length;
503                         auth_xform.auth.key.data = test_vector->auth_key.data;
504                         auth_xform.auth.iv.length =
505                                         test_vector->auth_iv.length;
506                 } else {
507                         auth_xform.auth.digest_length = 0;
508                         auth_xform.auth.key.length = 0;
509                         auth_xform.auth.key.data = NULL;
510                         auth_xform.auth.iv.length = 0;
511                 }
512                 /* create crypto session */
513                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
514                                 sess_mp);
515         /*
516          * cipher and auth
517          */
518         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
519                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
520                 /*
521                  * cipher
522                  */
523                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
524                 cipher_xform.next = NULL;
525                 cipher_xform.cipher.algo = options->cipher_algo;
526                 cipher_xform.cipher.op = options->cipher_op;
527                 cipher_xform.cipher.iv.offset = iv_offset;
528
529                 /* cipher different than null */
530                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
531                         cipher_xform.cipher.key.data =
532                                         test_vector->cipher_key.data;
533                         cipher_xform.cipher.key.length =
534                                         test_vector->cipher_key.length;
535                         cipher_xform.cipher.iv.length =
536                                         test_vector->cipher_iv.length;
537                 } else {
538                         cipher_xform.cipher.key.data = NULL;
539                         cipher_xform.cipher.key.length = 0;
540                         cipher_xform.cipher.iv.length = 0;
541                 }
542
543                 /*
544                  * auth
545                  */
546                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
547                 auth_xform.next = NULL;
548                 auth_xform.auth.algo = options->auth_algo;
549                 auth_xform.auth.op = options->auth_op;
550
551                 /* auth different than null */
552                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
553                         auth_xform.auth.digest_length = options->digest_sz;
554                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
555                         auth_xform.auth.key.length =
556                                         test_vector->auth_key.length;
557                         auth_xform.auth.key.data =
558                                         test_vector->auth_key.data;
559                 } else {
560                         auth_xform.auth.digest_length = 0;
561                         auth_xform.auth.key.length = 0;
562                         auth_xform.auth.key.data = NULL;
563                         auth_xform.auth.iv.length = 0;
564                 }
565
566                 /* cipher then auth */
567                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
568                         cipher_xform.next = &auth_xform;
569                         /* create crypto session */
570                         rte_cryptodev_sym_session_init(dev_id,
571                                         sess, &cipher_xform, sess_mp);
572                 } else { /* auth then cipher */
573                         auth_xform.next = &cipher_xform;
574                         /* create crypto session */
575                         rte_cryptodev_sym_session_init(dev_id,
576                                         sess, &auth_xform, sess_mp);
577                 }
578         } else { /* options->op_type == CPERF_AEAD */
579                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
580                 aead_xform.next = NULL;
581                 aead_xform.aead.algo = options->aead_algo;
582                 aead_xform.aead.op = options->aead_op;
583                 aead_xform.aead.iv.offset = iv_offset;
584
585                 aead_xform.aead.key.data =
586                                         test_vector->aead_key.data;
587                 aead_xform.aead.key.length =
588                                         test_vector->aead_key.length;
589                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
590
591                 aead_xform.aead.digest_length = options->digest_sz;
592                 aead_xform.aead.aad_length =
593                                         options->aead_aad_sz;
594
595                 /* Create crypto session */
596                 rte_cryptodev_sym_session_init(dev_id,
597                                         sess, &aead_xform, sess_mp);
598         }
599
600         return sess;
601 }
602
603 int
604 cperf_get_op_functions(const struct cperf_options *options,
605                 struct cperf_op_fns *op_fns)
606 {
607         memset(op_fns, 0, sizeof(struct cperf_op_fns));
608
609         op_fns->sess_create = cperf_create_session;
610
611         if (options->op_type == CPERF_AEAD) {
612                 op_fns->populate_ops = cperf_set_ops_aead;
613                 return 0;
614         }
615
616         if (options->op_type == CPERF_AUTH_THEN_CIPHER
617                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
618                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
619                 return 0;
620         }
621         if (options->op_type == CPERF_AUTH_ONLY) {
622                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
623                         op_fns->populate_ops = cperf_set_ops_null_auth;
624                 else
625                         op_fns->populate_ops = cperf_set_ops_auth;
626                 return 0;
627         }
628         if (options->op_type == CPERF_CIPHER_ONLY) {
629                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
630                         op_fns->populate_ops = cperf_set_ops_null_cipher;
631                 else
632                         op_fns->populate_ops = cperf_set_ops_cipher;
633                 return 0;
634         }
635
636         return -1;
637 }