app/flow-perf: use macro for cache alignment
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4
5 #include <rte_cryptodev.h>
6
7 #include "cperf_ops.h"
8 #include "cperf_test_vectors.h"
9
10 #ifdef RTE_LIBRTE_SECURITY
11 static int
12 cperf_set_ops_security(struct rte_crypto_op **ops,
13                 uint32_t src_buf_offset __rte_unused,
14                 uint32_t dst_buf_offset __rte_unused,
15                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
16                 const struct cperf_options *options __rte_unused,
17                 const struct cperf_test_vector *test_vector __rte_unused,
18                 uint16_t iv_offset __rte_unused,
19                 uint32_t *imix_idx __rte_unused)
20 {
21         uint16_t i;
22
23         for (i = 0; i < nb_ops; i++) {
24                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25                 struct rte_security_session *sec_sess =
26                         (struct rte_security_session *)sess;
27
28                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
29                 rte_security_attach_session(ops[i], sec_sess);
30                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
31                                                         src_buf_offset);
32                 sym_op->m_src->buf_len = options->segment_sz;
33                 sym_op->m_src->data_len = options->test_buffer_size;
34                 sym_op->m_src->pkt_len = sym_op->m_src->data_len;
35
36                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
37                 if (dst_buf_offset == 0)
38                         sym_op->m_dst = NULL;
39                 else
40                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
41                                                         dst_buf_offset);
42         }
43
44         return 0;
45 }
46 #endif
47
48 static int
49 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
50                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
51                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
52                 const struct cperf_options *options,
53                 const struct cperf_test_vector *test_vector __rte_unused,
54                 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
55 {
56         uint16_t i;
57
58         for (i = 0; i < nb_ops; i++) {
59                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
60
61                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
62                 rte_crypto_op_attach_sym_session(ops[i], sess);
63
64                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
65                                                         src_buf_offset);
66
67                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
68                 if (dst_buf_offset == 0)
69                         sym_op->m_dst = NULL;
70                 else
71                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
72                                                         dst_buf_offset);
73
74                 /* cipher parameters */
75                 if (options->imix_distribution_count) {
76                         sym_op->cipher.data.length =
77                                 options->imix_buffer_sizes[*imix_idx];
78                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
79                 } else
80                         sym_op->cipher.data.length = options->test_buffer_size;
81                 sym_op->cipher.data.offset = 0;
82         }
83
84         return 0;
85 }
86
87 static int
88 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
89                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
90                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
91                 const struct cperf_options *options,
92                 const struct cperf_test_vector *test_vector __rte_unused,
93                 uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
94 {
95         uint16_t i;
96
97         for (i = 0; i < nb_ops; i++) {
98                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
99
100                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
101                 rte_crypto_op_attach_sym_session(ops[i], sess);
102
103                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
104                                                         src_buf_offset);
105
106                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
107                 if (dst_buf_offset == 0)
108                         sym_op->m_dst = NULL;
109                 else
110                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
111                                                         dst_buf_offset);
112
113                 /* auth parameters */
114                 if (options->imix_distribution_count) {
115                         sym_op->auth.data.length =
116                                 options->imix_buffer_sizes[*imix_idx];
117                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
118                 } else
119                         sym_op->auth.data.length = options->test_buffer_size;
120                 sym_op->auth.data.offset = 0;
121         }
122
123         return 0;
124 }
125
126 static int
127 cperf_set_ops_cipher(struct rte_crypto_op **ops,
128                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
129                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
130                 const struct cperf_options *options,
131                 const struct cperf_test_vector *test_vector,
132                 uint16_t iv_offset, uint32_t *imix_idx)
133 {
134         uint16_t i;
135
136         for (i = 0; i < nb_ops; i++) {
137                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
138
139                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
140                 rte_crypto_op_attach_sym_session(ops[i], sess);
141
142                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
143                                                         src_buf_offset);
144
145                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
146                 if (dst_buf_offset == 0)
147                         sym_op->m_dst = NULL;
148                 else
149                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
150                                                         dst_buf_offset);
151
152                 /* cipher parameters */
153                 if (options->imix_distribution_count) {
154                         sym_op->cipher.data.length =
155                                 options->imix_buffer_sizes[*imix_idx];
156                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
157                 } else
158                         sym_op->cipher.data.length = options->test_buffer_size;
159
160                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
161                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
162                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
163                         sym_op->cipher.data.length <<= 3;
164
165                 sym_op->cipher.data.offset = 0;
166         }
167
168         if (options->test == CPERF_TEST_TYPE_VERIFY) {
169                 for (i = 0; i < nb_ops; i++) {
170                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
171                                         uint8_t *, iv_offset);
172
173                         memcpy(iv_ptr, test_vector->cipher_iv.data,
174                                         test_vector->cipher_iv.length);
175
176                 }
177         }
178
179         return 0;
180 }
181
182 static int
183 cperf_set_ops_auth(struct rte_crypto_op **ops,
184                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
185                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
186                 const struct cperf_options *options,
187                 const struct cperf_test_vector *test_vector,
188                 uint16_t iv_offset, uint32_t *imix_idx)
189 {
190         uint16_t i;
191
192         for (i = 0; i < nb_ops; i++) {
193                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
194
195                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
196                 rte_crypto_op_attach_sym_session(ops[i], sess);
197
198                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
199                                                         src_buf_offset);
200
201                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
202                 if (dst_buf_offset == 0)
203                         sym_op->m_dst = NULL;
204                 else
205                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
206                                                         dst_buf_offset);
207
208                 if (test_vector->auth_iv.length) {
209                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
210                                                                 uint8_t *,
211                                                                 iv_offset);
212                         memcpy(iv_ptr, test_vector->auth_iv.data,
213                                         test_vector->auth_iv.length);
214                 }
215
216                 /* authentication parameters */
217                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
218                         sym_op->auth.digest.data = test_vector->digest.data;
219                         sym_op->auth.digest.phys_addr =
220                                         test_vector->digest.phys_addr;
221                 } else {
222
223                         uint32_t offset = options->test_buffer_size;
224                         struct rte_mbuf *buf, *tbuf;
225
226                         if (options->out_of_place) {
227                                 buf = sym_op->m_dst;
228                         } else {
229                                 tbuf = sym_op->m_src;
230                                 while ((tbuf->next != NULL) &&
231                                                 (offset >= tbuf->data_len)) {
232                                         offset -= tbuf->data_len;
233                                         tbuf = tbuf->next;
234                                 }
235                                 /*
236                                  * If there is not enough room in segment,
237                                  * place the digest in the next segment
238                                  */
239                                 if ((tbuf->data_len - offset) < options->digest_sz) {
240                                         tbuf = tbuf->next;
241                                         offset = 0;
242                                 }
243                                 buf = tbuf;
244                         }
245
246                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
247                                         uint8_t *, offset);
248                         sym_op->auth.digest.phys_addr =
249                                         rte_pktmbuf_iova_offset(buf, offset);
250
251                 }
252
253                 if (options->imix_distribution_count) {
254                         sym_op->auth.data.length =
255                                 options->imix_buffer_sizes[*imix_idx];
256                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
257                 } else
258                         sym_op->auth.data.length = options->test_buffer_size;
259
260                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
261                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
262                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
263                         sym_op->auth.data.length <<= 3;
264
265                 sym_op->auth.data.offset = 0;
266         }
267
268         if (options->test == CPERF_TEST_TYPE_VERIFY) {
269                 if (test_vector->auth_iv.length) {
270                         for (i = 0; i < nb_ops; i++) {
271                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
272                                                 uint8_t *, iv_offset);
273
274                                 memcpy(iv_ptr, test_vector->auth_iv.data,
275                                                 test_vector->auth_iv.length);
276                         }
277                 }
278         }
279         return 0;
280 }
281
282 static int
283 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
284                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
285                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
286                 const struct cperf_options *options,
287                 const struct cperf_test_vector *test_vector,
288                 uint16_t iv_offset, uint32_t *imix_idx)
289 {
290         uint16_t i;
291
292         for (i = 0; i < nb_ops; i++) {
293                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
294
295                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
296                 rte_crypto_op_attach_sym_session(ops[i], sess);
297
298                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
299                                                         src_buf_offset);
300
301                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
302                 if (dst_buf_offset == 0)
303                         sym_op->m_dst = NULL;
304                 else
305                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
306                                                         dst_buf_offset);
307
308                 /* cipher parameters */
309                 if (options->imix_distribution_count) {
310                         sym_op->cipher.data.length =
311                                 options->imix_buffer_sizes[*imix_idx];
312                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
313                 } else
314                         sym_op->cipher.data.length = options->test_buffer_size;
315
316                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
317                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
318                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
319                         sym_op->cipher.data.length <<= 3;
320
321                 sym_op->cipher.data.offset = 0;
322
323                 /* authentication parameters */
324                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
325                         sym_op->auth.digest.data = test_vector->digest.data;
326                         sym_op->auth.digest.phys_addr =
327                                         test_vector->digest.phys_addr;
328                 } else {
329
330                         uint32_t offset = options->test_buffer_size;
331                         struct rte_mbuf *buf, *tbuf;
332
333                         if (options->out_of_place) {
334                                 buf = sym_op->m_dst;
335                         } else {
336                                 tbuf = sym_op->m_src;
337                                 while ((tbuf->next != NULL) &&
338                                                 (offset >= tbuf->data_len)) {
339                                         offset -= tbuf->data_len;
340                                         tbuf = tbuf->next;
341                                 }
342                                 /*
343                                  * If there is not enough room in segment,
344                                  * place the digest in the next segment
345                                  */
346                                 if ((tbuf->data_len - offset) < options->digest_sz) {
347                                         tbuf = tbuf->next;
348                                         offset = 0;
349                                 }
350                                 buf = tbuf;
351                         }
352
353                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
354                                         uint8_t *, offset);
355                         sym_op->auth.digest.phys_addr =
356                                         rte_pktmbuf_iova_offset(buf, offset);
357                 }
358
359                 if (options->imix_distribution_count) {
360                         sym_op->auth.data.length =
361                                 options->imix_buffer_sizes[*imix_idx];
362                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
363                 } else
364                         sym_op->auth.data.length = options->test_buffer_size;
365
366                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
367                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
368                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
369                         sym_op->auth.data.length <<= 3;
370
371                 sym_op->auth.data.offset = 0;
372         }
373
374         if (options->test == CPERF_TEST_TYPE_VERIFY) {
375                 for (i = 0; i < nb_ops; i++) {
376                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
377                                         uint8_t *, iv_offset);
378
379                         memcpy(iv_ptr, test_vector->cipher_iv.data,
380                                         test_vector->cipher_iv.length);
381                         if (test_vector->auth_iv.length) {
382                                 /*
383                                  * Copy IV after the crypto operation and
384                                  * the cipher IV
385                                  */
386                                 iv_ptr += test_vector->cipher_iv.length;
387                                 memcpy(iv_ptr, test_vector->auth_iv.data,
388                                                 test_vector->auth_iv.length);
389                         }
390                 }
391
392         }
393
394         return 0;
395 }
396
397 static int
398 cperf_set_ops_aead(struct rte_crypto_op **ops,
399                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
400                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
401                 const struct cperf_options *options,
402                 const struct cperf_test_vector *test_vector,
403                 uint16_t iv_offset, uint32_t *imix_idx)
404 {
405         uint16_t i;
406         /* AAD is placed after the IV */
407         uint16_t aad_offset = iv_offset +
408                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
409
410         for (i = 0; i < nb_ops; i++) {
411                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
412
413                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
414                 rte_crypto_op_attach_sym_session(ops[i], sess);
415
416                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
417                                                         src_buf_offset);
418
419                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
420                 if (dst_buf_offset == 0)
421                         sym_op->m_dst = NULL;
422                 else
423                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
424                                                         dst_buf_offset);
425
426                 /* AEAD parameters */
427                 if (options->imix_distribution_count) {
428                         sym_op->aead.data.length =
429                                 options->imix_buffer_sizes[*imix_idx];
430                         *imix_idx = (*imix_idx + 1) % options->pool_sz;
431                 } else
432                         sym_op->aead.data.length = options->test_buffer_size;
433                 sym_op->aead.data.offset = 0;
434
435                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
436                                         uint8_t *, aad_offset);
437                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
438                                         aad_offset);
439
440                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
441                         sym_op->aead.digest.data = test_vector->digest.data;
442                         sym_op->aead.digest.phys_addr =
443                                         test_vector->digest.phys_addr;
444                 } else {
445
446                         uint32_t offset = sym_op->aead.data.length +
447                                                 sym_op->aead.data.offset;
448                         struct rte_mbuf *buf, *tbuf;
449
450                         if (options->out_of_place) {
451                                 buf = sym_op->m_dst;
452                         } else {
453                                 tbuf = sym_op->m_src;
454                                 while ((tbuf->next != NULL) &&
455                                                 (offset >= tbuf->data_len)) {
456                                         offset -= tbuf->data_len;
457                                         tbuf = tbuf->next;
458                                 }
459                                 /*
460                                  * If there is not enough room in segment,
461                                  * place the digest in the next segment
462                                  */
463                                 if ((tbuf->data_len - offset) < options->digest_sz) {
464                                         tbuf = tbuf->next;
465                                         offset = 0;
466                                 }
467                                 buf = tbuf;
468                         }
469
470                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
471                                         uint8_t *, offset);
472                         sym_op->aead.digest.phys_addr =
473                                         rte_pktmbuf_iova_offset(buf, offset);
474                 }
475         }
476
477         if (options->test == CPERF_TEST_TYPE_VERIFY) {
478                 for (i = 0; i < nb_ops; i++) {
479                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
480                                         uint8_t *, iv_offset);
481
482                         /*
483                          * If doing AES-CCM, nonce is copied one byte
484                          * after the start of IV field, and AAD is copied
485                          * 18 bytes after the start of the AAD field.
486                          */
487                         if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
488                                 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
489                                         test_vector->aead_iv.length);
490
491                                 memcpy(ops[i]->sym->aead.aad.data + 18,
492                                         test_vector->aad.data,
493                                         test_vector->aad.length);
494                         } else {
495                                 memcpy(iv_ptr, test_vector->aead_iv.data,
496                                         test_vector->aead_iv.length);
497
498                                 memcpy(ops[i]->sym->aead.aad.data,
499                                         test_vector->aad.data,
500                                         test_vector->aad.length);
501                         }
502                 }
503         }
504
505         return 0;
506 }
507
508 static struct rte_cryptodev_sym_session *
509 cperf_create_session(struct rte_mempool *sess_mp,
510         struct rte_mempool *priv_mp,
511         uint8_t dev_id,
512         const struct cperf_options *options,
513         const struct cperf_test_vector *test_vector,
514         uint16_t iv_offset)
515 {
516         struct rte_crypto_sym_xform cipher_xform;
517         struct rte_crypto_sym_xform auth_xform;
518         struct rte_crypto_sym_xform aead_xform;
519         struct rte_cryptodev_sym_session *sess = NULL;
520
521 #ifdef RTE_LIBRTE_SECURITY
522         /*
523          * security only
524          */
525         if (options->op_type == CPERF_PDCP) {
526                 /* Setup Cipher Parameters */
527                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
528                 cipher_xform.next = NULL;
529                 cipher_xform.cipher.algo = options->cipher_algo;
530                 cipher_xform.cipher.op = options->cipher_op;
531                 cipher_xform.cipher.iv.offset = iv_offset;
532
533                 /* cipher different than null */
534                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
535                         cipher_xform.cipher.key.data = test_vector->cipher_key.data;
536                         cipher_xform.cipher.key.length = test_vector->cipher_key.length;
537                         cipher_xform.cipher.iv.length = test_vector->cipher_iv.length;
538                 } else {
539                         cipher_xform.cipher.key.data = NULL;
540                         cipher_xform.cipher.key.length = 0;
541                         cipher_xform.cipher.iv.length = 0;
542                 }
543
544                 /* Setup Auth Parameters */
545                 if (options->auth_algo != 0) {
546                         auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
547                         auth_xform.next = NULL;
548                         auth_xform.auth.algo = options->auth_algo;
549                         auth_xform.auth.op = options->auth_op;
550                         auth_xform.auth.iv.offset = iv_offset +
551                                 cipher_xform.cipher.iv.length;
552
553                         /* auth different than null */
554                         if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
555                                 auth_xform.auth.digest_length = options->digest_sz;
556                                 auth_xform.auth.key.length = test_vector->auth_key.length;
557                                 auth_xform.auth.key.data = test_vector->auth_key.data;
558                                 auth_xform.auth.iv.length = test_vector->auth_iv.length;
559                         } else {
560                                 auth_xform.auth.digest_length = 0;
561                                 auth_xform.auth.key.length = 0;
562                                 auth_xform.auth.key.data = NULL;
563                                 auth_xform.auth.iv.length = 0;
564                         }
565
566                         cipher_xform.next = &auth_xform;
567                 } else {
568                         cipher_xform.next = NULL;
569                 }
570
571                 struct rte_security_session_conf sess_conf = {
572                         .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
573                         .protocol = RTE_SECURITY_PROTOCOL_PDCP,
574                         {.pdcp = {
575                                 .bearer = 0x16,
576                                 .domain = options->pdcp_domain,
577                                 .pkt_dir = 0,
578                                 .sn_size = options->pdcp_sn_sz,
579                                 .hfn = 0x1,
580                                 .hfn_threshold = 0x70C0A,
581                         } },
582                         .crypto_xform = &cipher_xform
583                 };
584
585                 struct rte_security_ctx *ctx = (struct rte_security_ctx *)
586                                         rte_cryptodev_get_sec_ctx(dev_id);
587
588                 /* Create security session */
589                 return (void *)rte_security_session_create(ctx,
590                                         &sess_conf, sess_mp);
591         }
592 #endif
593         sess = rte_cryptodev_sym_session_create(sess_mp);
594         /*
595          * cipher only
596          */
597         if (options->op_type == CPERF_CIPHER_ONLY) {
598                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
599                 cipher_xform.next = NULL;
600                 cipher_xform.cipher.algo = options->cipher_algo;
601                 cipher_xform.cipher.op = options->cipher_op;
602                 cipher_xform.cipher.iv.offset = iv_offset;
603
604                 /* cipher different than null */
605                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
606                         cipher_xform.cipher.key.data =
607                                         test_vector->cipher_key.data;
608                         cipher_xform.cipher.key.length =
609                                         test_vector->cipher_key.length;
610                         cipher_xform.cipher.iv.length =
611                                         test_vector->cipher_iv.length;
612                 } else {
613                         cipher_xform.cipher.key.data = NULL;
614                         cipher_xform.cipher.key.length = 0;
615                         cipher_xform.cipher.iv.length = 0;
616                 }
617                 /* create crypto session */
618                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
619                                 priv_mp);
620         /*
621          *  auth only
622          */
623         } else if (options->op_type == CPERF_AUTH_ONLY) {
624                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
625                 auth_xform.next = NULL;
626                 auth_xform.auth.algo = options->auth_algo;
627                 auth_xform.auth.op = options->auth_op;
628                 auth_xform.auth.iv.offset = iv_offset;
629
630                 /* auth different than null */
631                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
632                         auth_xform.auth.digest_length =
633                                         options->digest_sz;
634                         auth_xform.auth.key.length =
635                                         test_vector->auth_key.length;
636                         auth_xform.auth.key.data = test_vector->auth_key.data;
637                         auth_xform.auth.iv.length =
638                                         test_vector->auth_iv.length;
639                 } else {
640                         auth_xform.auth.digest_length = 0;
641                         auth_xform.auth.key.length = 0;
642                         auth_xform.auth.key.data = NULL;
643                         auth_xform.auth.iv.length = 0;
644                 }
645                 /* create crypto session */
646                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
647                                 priv_mp);
648         /*
649          * cipher and auth
650          */
651         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
652                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
653                 /*
654                  * cipher
655                  */
656                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
657                 cipher_xform.next = NULL;
658                 cipher_xform.cipher.algo = options->cipher_algo;
659                 cipher_xform.cipher.op = options->cipher_op;
660                 cipher_xform.cipher.iv.offset = iv_offset;
661
662                 /* cipher different than null */
663                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
664                         cipher_xform.cipher.key.data =
665                                         test_vector->cipher_key.data;
666                         cipher_xform.cipher.key.length =
667                                         test_vector->cipher_key.length;
668                         cipher_xform.cipher.iv.length =
669                                         test_vector->cipher_iv.length;
670                 } else {
671                         cipher_xform.cipher.key.data = NULL;
672                         cipher_xform.cipher.key.length = 0;
673                         cipher_xform.cipher.iv.length = 0;
674                 }
675
676                 /*
677                  * auth
678                  */
679                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
680                 auth_xform.next = NULL;
681                 auth_xform.auth.algo = options->auth_algo;
682                 auth_xform.auth.op = options->auth_op;
683                 auth_xform.auth.iv.offset = iv_offset +
684                         cipher_xform.cipher.iv.length;
685
686                 /* auth different than null */
687                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
688                         auth_xform.auth.digest_length = options->digest_sz;
689                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
690                         auth_xform.auth.key.length =
691                                         test_vector->auth_key.length;
692                         auth_xform.auth.key.data =
693                                         test_vector->auth_key.data;
694                 } else {
695                         auth_xform.auth.digest_length = 0;
696                         auth_xform.auth.key.length = 0;
697                         auth_xform.auth.key.data = NULL;
698                         auth_xform.auth.iv.length = 0;
699                 }
700
701                 /* cipher then auth */
702                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
703                         cipher_xform.next = &auth_xform;
704                         /* create crypto session */
705                         rte_cryptodev_sym_session_init(dev_id,
706                                         sess, &cipher_xform, priv_mp);
707                 } else { /* auth then cipher */
708                         auth_xform.next = &cipher_xform;
709                         /* create crypto session */
710                         rte_cryptodev_sym_session_init(dev_id,
711                                         sess, &auth_xform, priv_mp);
712                 }
713         } else { /* options->op_type == CPERF_AEAD */
714                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
715                 aead_xform.next = NULL;
716                 aead_xform.aead.algo = options->aead_algo;
717                 aead_xform.aead.op = options->aead_op;
718                 aead_xform.aead.iv.offset = iv_offset;
719
720                 aead_xform.aead.key.data =
721                                         test_vector->aead_key.data;
722                 aead_xform.aead.key.length =
723                                         test_vector->aead_key.length;
724                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
725
726                 aead_xform.aead.digest_length = options->digest_sz;
727                 aead_xform.aead.aad_length =
728                                         options->aead_aad_sz;
729
730                 /* Create crypto session */
731                 rte_cryptodev_sym_session_init(dev_id,
732                                         sess, &aead_xform, priv_mp);
733         }
734
735         return sess;
736 }
737
738 int
739 cperf_get_op_functions(const struct cperf_options *options,
740                 struct cperf_op_fns *op_fns)
741 {
742         memset(op_fns, 0, sizeof(struct cperf_op_fns));
743
744         op_fns->sess_create = cperf_create_session;
745
746         if (options->op_type == CPERF_AEAD) {
747                 op_fns->populate_ops = cperf_set_ops_aead;
748                 return 0;
749         }
750
751         if (options->op_type == CPERF_AUTH_THEN_CIPHER
752                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
753                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
754                 return 0;
755         }
756         if (options->op_type == CPERF_AUTH_ONLY) {
757                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
758                         op_fns->populate_ops = cperf_set_ops_null_auth;
759                 else
760                         op_fns->populate_ops = cperf_set_ops_auth;
761                 return 0;
762         }
763         if (options->op_type == CPERF_CIPHER_ONLY) {
764                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
765                         op_fns->populate_ops = cperf_set_ops_null_cipher;
766                 else
767                         op_fns->populate_ops = cperf_set_ops_cipher;
768                 return 0;
769         }
770 #ifdef RTE_LIBRTE_SECURITY
771         if (options->op_type == CPERF_PDCP) {
772                 op_fns->populate_ops = cperf_set_ops_security;
773                 return 0;
774         }
775 #endif
776         return -1;
777 }