mempool/octeontx2: add devargs for max pool selection
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4
5 #include <rte_cryptodev.h>
6
7 #include "cperf_ops.h"
8 #include "cperf_test_vectors.h"
9
10 static int
11 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
12                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
13                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
14                 const struct cperf_options *options,
15                 const struct cperf_test_vector *test_vector __rte_unused,
16                 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
17 {
18         uint16_t i;
19
20         for (i = 0; i < nb_ops; i++) {
21                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
22
23                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
24                 rte_crypto_op_attach_sym_session(ops[i], sess);
25
26                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
27                                                         src_buf_offset);
28
29                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
30                 if (dst_buf_offset == 0)
31                         sym_op->m_dst = NULL;
32                 else
33                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
34                                                         dst_buf_offset);
35
36                 /* cipher parameters */
37                 if (options->imix_distribution_count) {
38                         sym_op->cipher.data.length =
39                                 options->imix_buffer_sizes[*imix_idx];
40                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
41                 } else
42                         sym_op->cipher.data.length = options->test_buffer_size;
43                 sym_op->cipher.data.offset = 0;
44         }
45
46         return 0;
47 }
48
49 static int
50 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
51                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
52                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
53                 const struct cperf_options *options,
54                 const struct cperf_test_vector *test_vector __rte_unused,
55                 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
56 {
57         uint16_t i;
58
59         for (i = 0; i < nb_ops; i++) {
60                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
61
62                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
63                 rte_crypto_op_attach_sym_session(ops[i], sess);
64
65                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
66                                                         src_buf_offset);
67
68                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
69                 if (dst_buf_offset == 0)
70                         sym_op->m_dst = NULL;
71                 else
72                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
73                                                         dst_buf_offset);
74
75                 /* auth parameters */
76                 if (options->imix_distribution_count) {
77                         sym_op->auth.data.length =
78                                 options->imix_buffer_sizes[*imix_idx];
79                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
80                 } else
81                         sym_op->auth.data.length = options->test_buffer_size;
82                 sym_op->auth.data.offset = 0;
83         }
84
85         return 0;
86 }
87
88 static int
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
91                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92                 const struct cperf_options *options,
93                 const struct cperf_test_vector *test_vector,
94                 uint16_t iv_offset, uint32_t *imix_idx)
95 {
96         uint16_t i;
97
98         for (i = 0; i < nb_ops; i++) {
99                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
100
101                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
102                 rte_crypto_op_attach_sym_session(ops[i], sess);
103
104                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
105                                                         src_buf_offset);
106
107                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
108                 if (dst_buf_offset == 0)
109                         sym_op->m_dst = NULL;
110                 else
111                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
112                                                         dst_buf_offset);
113
114                 /* cipher parameters */
115                 if (options->imix_distribution_count) {
116                         sym_op->cipher.data.length =
117                                 options->imix_buffer_sizes[*imix_idx];
118                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
119                 } else
120                         sym_op->cipher.data.length = options->test_buffer_size;
121
122                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
123                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
124                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
125                         sym_op->cipher.data.length <<= 3;
126
127                 sym_op->cipher.data.offset = 0;
128         }
129
130         if (options->test == CPERF_TEST_TYPE_VERIFY) {
131                 for (i = 0; i < nb_ops; i++) {
132                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
133                                         uint8_t *, iv_offset);
134
135                         memcpy(iv_ptr, test_vector->cipher_iv.data,
136                                         test_vector->cipher_iv.length);
137
138                 }
139         }
140
141         return 0;
142 }
143
144 static int
145 cperf_set_ops_auth(struct rte_crypto_op **ops,
146                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
147                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
148                 const struct cperf_options *options,
149                 const struct cperf_test_vector *test_vector,
150                 uint16_t iv_offset, uint32_t *imix_idx)
151 {
152         uint16_t i;
153
154         for (i = 0; i < nb_ops; i++) {
155                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
156
157                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
158                 rte_crypto_op_attach_sym_session(ops[i], sess);
159
160                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
161                                                         src_buf_offset);
162
163                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
164                 if (dst_buf_offset == 0)
165                         sym_op->m_dst = NULL;
166                 else
167                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
168                                                         dst_buf_offset);
169
170                 if (test_vector->auth_iv.length) {
171                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
172                                                                 uint8_t *,
173                                                                 iv_offset);
174                         memcpy(iv_ptr, test_vector->auth_iv.data,
175                                         test_vector->auth_iv.length);
176                 }
177
178                 /* authentication parameters */
179                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
180                         sym_op->auth.digest.data = test_vector->digest.data;
181                         sym_op->auth.digest.phys_addr =
182                                         test_vector->digest.phys_addr;
183                 } else {
184
185                         uint32_t offset = options->test_buffer_size;
186                         struct rte_mbuf *buf, *tbuf;
187
188                         if (options->out_of_place) {
189                                 buf = sym_op->m_dst;
190                         } else {
191                                 tbuf = sym_op->m_src;
192                                 while ((tbuf->next != NULL) &&
193                                                 (offset >= tbuf->data_len)) {
194                                         offset -= tbuf->data_len;
195                                         tbuf = tbuf->next;
196                                 }
197                                 /*
198                                  * If there is not enough room in segment,
199                                  * place the digest in the next segment
200                                  */
201                                 if ((tbuf->data_len - offset) < options->digest_sz) {
202                                         tbuf = tbuf->next;
203                                         offset = 0;
204                                 }
205                                 buf = tbuf;
206                         }
207
208                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
209                                         uint8_t *, offset);
210                         sym_op->auth.digest.phys_addr =
211                                         rte_pktmbuf_iova_offset(buf, offset);
212
213                 }
214
215                 if (options->imix_distribution_count) {
216                         sym_op->auth.data.length =
217                                 options->imix_buffer_sizes[*imix_idx];
218                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
219                 } else
220                         sym_op->auth.data.length = options->test_buffer_size;
221
222                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
223                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
224                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
225                         sym_op->auth.data.length <<= 3;
226
227                 sym_op->auth.data.offset = 0;
228         }
229
230         if (options->test == CPERF_TEST_TYPE_VERIFY) {
231                 if (test_vector->auth_iv.length) {
232                         for (i = 0; i < nb_ops; i++) {
233                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
234                                                 uint8_t *, iv_offset);
235
236                                 memcpy(iv_ptr, test_vector->auth_iv.data,
237                                                 test_vector->auth_iv.length);
238                         }
239                 }
240         }
241         return 0;
242 }
243
244 static int
245 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
246                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
247                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
248                 const struct cperf_options *options,
249                 const struct cperf_test_vector *test_vector,
250                 uint16_t iv_offset, uint32_t *imix_idx)
251 {
252         uint16_t i;
253
254         for (i = 0; i < nb_ops; i++) {
255                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
256
257                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
258                 rte_crypto_op_attach_sym_session(ops[i], sess);
259
260                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
261                                                         src_buf_offset);
262
263                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
264                 if (dst_buf_offset == 0)
265                         sym_op->m_dst = NULL;
266                 else
267                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
268                                                         dst_buf_offset);
269
270                 /* cipher parameters */
271                 if (options->imix_distribution_count) {
272                         sym_op->cipher.data.length =
273                                 options->imix_buffer_sizes[*imix_idx];
274                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
275                 } else
276                         sym_op->cipher.data.length = options->test_buffer_size;
277
278                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
279                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
280                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
281                         sym_op->cipher.data.length <<= 3;
282
283                 sym_op->cipher.data.offset = 0;
284
285                 /* authentication parameters */
286                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
287                         sym_op->auth.digest.data = test_vector->digest.data;
288                         sym_op->auth.digest.phys_addr =
289                                         test_vector->digest.phys_addr;
290                 } else {
291
292                         uint32_t offset = options->test_buffer_size;
293                         struct rte_mbuf *buf, *tbuf;
294
295                         if (options->out_of_place) {
296                                 buf = sym_op->m_dst;
297                         } else {
298                                 tbuf = sym_op->m_src;
299                                 while ((tbuf->next != NULL) &&
300                                                 (offset >= tbuf->data_len)) {
301                                         offset -= tbuf->data_len;
302                                         tbuf = tbuf->next;
303                                 }
304                                 /*
305                                  * If there is not enough room in segment,
306                                  * place the digest in the next segment
307                                  */
308                                 if ((tbuf->data_len - offset) < options->digest_sz) {
309                                         tbuf = tbuf->next;
310                                         offset = 0;
311                                 }
312                                 buf = tbuf;
313                         }
314
315                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
316                                         uint8_t *, offset);
317                         sym_op->auth.digest.phys_addr =
318                                         rte_pktmbuf_iova_offset(buf, offset);
319                 }
320
321                 if (options->imix_distribution_count) {
322                         sym_op->auth.data.length =
323                                 options->imix_buffer_sizes[*imix_idx];
324                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
325                 } else
326                         sym_op->auth.data.length = options->test_buffer_size;
327
328                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
329                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
330                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
331                         sym_op->auth.data.length <<= 3;
332
333                 sym_op->auth.data.offset = 0;
334         }
335
336         if (options->test == CPERF_TEST_TYPE_VERIFY) {
337                 for (i = 0; i < nb_ops; i++) {
338                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
339                                         uint8_t *, iv_offset);
340
341                         memcpy(iv_ptr, test_vector->cipher_iv.data,
342                                         test_vector->cipher_iv.length);
343                         if (test_vector->auth_iv.length) {
344                                 /*
345                                  * Copy IV after the crypto operation and
346                                  * the cipher IV
347                                  */
348                                 iv_ptr += test_vector->cipher_iv.length;
349                                 memcpy(iv_ptr, test_vector->auth_iv.data,
350                                                 test_vector->auth_iv.length);
351                         }
352                 }
353
354         }
355
356         return 0;
357 }
358
359 static int
360 cperf_set_ops_aead(struct rte_crypto_op **ops,
361                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
362                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
363                 const struct cperf_options *options,
364                 const struct cperf_test_vector *test_vector,
365                 uint16_t iv_offset, uint32_t *imix_idx)
366 {
367         uint16_t i;
368         /* AAD is placed after the IV */
369         uint16_t aad_offset = iv_offset +
370                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
371
372         for (i = 0; i < nb_ops; i++) {
373                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
374
375                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
376                 rte_crypto_op_attach_sym_session(ops[i], sess);
377
378                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
379                                                         src_buf_offset);
380
381                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
382                 if (dst_buf_offset == 0)
383                         sym_op->m_dst = NULL;
384                 else
385                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
386                                                         dst_buf_offset);
387
388                 /* AEAD parameters */
389                 if (options->imix_distribution_count) {
390                         sym_op->aead.data.length =
391                                 options->imix_buffer_sizes[*imix_idx];
392                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
393                 } else
394                         sym_op->aead.data.length = options->test_buffer_size;
395                 sym_op->aead.data.offset = 0;
396
397                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
398                                         uint8_t *, aad_offset);
399                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
400                                         aad_offset);
401
402                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
403                         sym_op->aead.digest.data = test_vector->digest.data;
404                         sym_op->aead.digest.phys_addr =
405                                         test_vector->digest.phys_addr;
406                 } else {
407
408                         uint32_t offset = sym_op->aead.data.length +
409                                                 sym_op->aead.data.offset;
410                         struct rte_mbuf *buf, *tbuf;
411
412                         if (options->out_of_place) {
413                                 buf = sym_op->m_dst;
414                         } else {
415                                 tbuf = sym_op->m_src;
416                                 while ((tbuf->next != NULL) &&
417                                                 (offset >= tbuf->data_len)) {
418                                         offset -= tbuf->data_len;
419                                         tbuf = tbuf->next;
420                                 }
421                                 /*
422                                  * If there is not enough room in segment,
423                                  * place the digest in the next segment
424                                  */
425                                 if ((tbuf->data_len - offset) < options->digest_sz) {
426                                         tbuf = tbuf->next;
427                                         offset = 0;
428                                 }
429                                 buf = tbuf;
430                         }
431
432                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
433                                         uint8_t *, offset);
434                         sym_op->aead.digest.phys_addr =
435                                         rte_pktmbuf_iova_offset(buf, offset);
436                 }
437         }
438
439         if (options->test == CPERF_TEST_TYPE_VERIFY) {
440                 for (i = 0; i < nb_ops; i++) {
441                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
442                                         uint8_t *, iv_offset);
443
444                         /*
445                          * If doing AES-CCM, nonce is copied one byte
446                          * after the start of IV field, and AAD is copied
447                          * 18 bytes after the start of the AAD field.
448                          */
449                         if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
450                                 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
451                                         test_vector->aead_iv.length);
452
453                                 memcpy(ops[i]->sym->aead.aad.data + 18,
454                                         test_vector->aad.data,
455                                         test_vector->aad.length);
456                         } else {
457                                 memcpy(iv_ptr, test_vector->aead_iv.data,
458                                         test_vector->aead_iv.length);
459
460                                 memcpy(ops[i]->sym->aead.aad.data,
461                                         test_vector->aad.data,
462                                         test_vector->aad.length);
463                         }
464                 }
465         }
466
467         return 0;
468 }
469
470 static struct rte_cryptodev_sym_session *
471 cperf_create_session(struct rte_mempool *sess_mp,
472         struct rte_mempool *priv_mp,
473         uint8_t dev_id,
474         const struct cperf_options *options,
475         const struct cperf_test_vector *test_vector,
476         uint16_t iv_offset)
477 {
478         struct rte_crypto_sym_xform cipher_xform;
479         struct rte_crypto_sym_xform auth_xform;
480         struct rte_crypto_sym_xform aead_xform;
481         struct rte_cryptodev_sym_session *sess = NULL;
482
483         sess = rte_cryptodev_sym_session_create(sess_mp);
484         /*
485          * cipher only
486          */
487         if (options->op_type == CPERF_CIPHER_ONLY) {
488                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
489                 cipher_xform.next = NULL;
490                 cipher_xform.cipher.algo = options->cipher_algo;
491                 cipher_xform.cipher.op = options->cipher_op;
492                 cipher_xform.cipher.iv.offset = iv_offset;
493
494                 /* cipher different than null */
495                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
496                         cipher_xform.cipher.key.data =
497                                         test_vector->cipher_key.data;
498                         cipher_xform.cipher.key.length =
499                                         test_vector->cipher_key.length;
500                         cipher_xform.cipher.iv.length =
501                                         test_vector->cipher_iv.length;
502                 } else {
503                         cipher_xform.cipher.key.data = NULL;
504                         cipher_xform.cipher.key.length = 0;
505                         cipher_xform.cipher.iv.length = 0;
506                 }
507                 /* create crypto session */
508                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
509                                 priv_mp);
510         /*
511          *  auth only
512          */
513         } else if (options->op_type == CPERF_AUTH_ONLY) {
514                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
515                 auth_xform.next = NULL;
516                 auth_xform.auth.algo = options->auth_algo;
517                 auth_xform.auth.op = options->auth_op;
518                 auth_xform.auth.iv.offset = iv_offset;
519
520                 /* auth different than null */
521                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
522                         auth_xform.auth.digest_length =
523                                         options->digest_sz;
524                         auth_xform.auth.key.length =
525                                         test_vector->auth_key.length;
526                         auth_xform.auth.key.data = test_vector->auth_key.data;
527                         auth_xform.auth.iv.length =
528                                         test_vector->auth_iv.length;
529                 } else {
530                         auth_xform.auth.digest_length = 0;
531                         auth_xform.auth.key.length = 0;
532                         auth_xform.auth.key.data = NULL;
533                         auth_xform.auth.iv.length = 0;
534                 }
535                 /* create crypto session */
536                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
537                                 priv_mp);
538         /*
539          * cipher and auth
540          */
541         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
542                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
543                 /*
544                  * cipher
545                  */
546                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
547                 cipher_xform.next = NULL;
548                 cipher_xform.cipher.algo = options->cipher_algo;
549                 cipher_xform.cipher.op = options->cipher_op;
550                 cipher_xform.cipher.iv.offset = iv_offset;
551
552                 /* cipher different than null */
553                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
554                         cipher_xform.cipher.key.data =
555                                         test_vector->cipher_key.data;
556                         cipher_xform.cipher.key.length =
557                                         test_vector->cipher_key.length;
558                         cipher_xform.cipher.iv.length =
559                                         test_vector->cipher_iv.length;
560                 } else {
561                         cipher_xform.cipher.key.data = NULL;
562                         cipher_xform.cipher.key.length = 0;
563                         cipher_xform.cipher.iv.length = 0;
564                 }
565
566                 /*
567                  * auth
568                  */
569                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
570                 auth_xform.next = NULL;
571                 auth_xform.auth.algo = options->auth_algo;
572                 auth_xform.auth.op = options->auth_op;
573                 auth_xform.auth.iv.offset = iv_offset +
574                         cipher_xform.cipher.iv.length;
575
576                 /* auth different than null */
577                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
578                         auth_xform.auth.digest_length = options->digest_sz;
579                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
580                         auth_xform.auth.key.length =
581                                         test_vector->auth_key.length;
582                         auth_xform.auth.key.data =
583                                         test_vector->auth_key.data;
584                 } else {
585                         auth_xform.auth.digest_length = 0;
586                         auth_xform.auth.key.length = 0;
587                         auth_xform.auth.key.data = NULL;
588                         auth_xform.auth.iv.length = 0;
589                 }
590
591                 /* cipher then auth */
592                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
593                         cipher_xform.next = &auth_xform;
594                         /* create crypto session */
595                         rte_cryptodev_sym_session_init(dev_id,
596                                         sess, &cipher_xform, priv_mp);
597                 } else { /* auth then cipher */
598                         auth_xform.next = &cipher_xform;
599                         /* create crypto session */
600                         rte_cryptodev_sym_session_init(dev_id,
601                                         sess, &auth_xform, priv_mp);
602                 }
603         } else { /* options->op_type == CPERF_AEAD */
604                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
605                 aead_xform.next = NULL;
606                 aead_xform.aead.algo = options->aead_algo;
607                 aead_xform.aead.op = options->aead_op;
608                 aead_xform.aead.iv.offset = iv_offset;
609
610                 aead_xform.aead.key.data =
611                                         test_vector->aead_key.data;
612                 aead_xform.aead.key.length =
613                                         test_vector->aead_key.length;
614                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
615
616                 aead_xform.aead.digest_length = options->digest_sz;
617                 aead_xform.aead.aad_length =
618                                         options->aead_aad_sz;
619
620                 /* Create crypto session */
621                 rte_cryptodev_sym_session_init(dev_id,
622                                         sess, &aead_xform, priv_mp);
623         }
624
625         return sess;
626 }
627
628 int
629 cperf_get_op_functions(const struct cperf_options *options,
630                 struct cperf_op_fns *op_fns)
631 {
632         memset(op_fns, 0, sizeof(struct cperf_op_fns));
633
634         op_fns->sess_create = cperf_create_session;
635
636         if (options->op_type == CPERF_AEAD) {
637                 op_fns->populate_ops = cperf_set_ops_aead;
638                 return 0;
639         }
640
641         if (options->op_type == CPERF_AUTH_THEN_CIPHER
642                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
643                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
644                 return 0;
645         }
646         if (options->op_type == CPERF_AUTH_ONLY) {
647                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
648                         op_fns->populate_ops = cperf_set_ops_null_auth;
649                 else
650                         op_fns->populate_ops = cperf_set_ops_auth;
651                 return 0;
652         }
653         if (options->op_type == CPERF_CIPHER_ONLY) {
654                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
655                         op_fns->populate_ops = cperf_set_ops_null_cipher;
656                 else
657                         op_fns->populate_ops = cperf_set_ops_cipher;
658                 return 0;
659         }
660
661         return -1;
662 }