app/crypto-perf: parse segment size
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 rte_crypto_op_attach_sym_session(ops[i], sess);
52
53                 sym_op->m_src = bufs_in[i];
54                 sym_op->m_dst = bufs_out[i];
55
56                 /* cipher parameters */
57                 sym_op->cipher.data.length = options->test_buffer_size;
58                 sym_op->cipher.data.offset = 0;
59         }
60
61         return 0;
62 }
63
64 static int
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68                 const struct cperf_options *options,
69                 const struct cperf_test_vector *test_vector __rte_unused,
70                 uint16_t iv_offset __rte_unused)
71 {
72         uint16_t i;
73
74         for (i = 0; i < nb_ops; i++) {
75                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
76
77                 rte_crypto_op_attach_sym_session(ops[i], sess);
78
79                 sym_op->m_src = bufs_in[i];
80                 sym_op->m_dst = bufs_out[i];
81
82                 /* auth parameters */
83                 sym_op->auth.data.length = options->test_buffer_size;
84                 sym_op->auth.data.offset = 0;
85         }
86
87         return 0;
88 }
89
90 static int
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94                 const struct cperf_options *options,
95                 const struct cperf_test_vector *test_vector,
96                 uint16_t iv_offset)
97 {
98         uint16_t i;
99
100         for (i = 0; i < nb_ops; i++) {
101                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
102
103                 rte_crypto_op_attach_sym_session(ops[i], sess);
104
105                 sym_op->m_src = bufs_in[i];
106                 sym_op->m_dst = bufs_out[i];
107
108                 /* cipher parameters */
109                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112                         sym_op->cipher.data.length = options->test_buffer_size << 3;
113                 else
114                         sym_op->cipher.data.length = options->test_buffer_size;
115
116                 sym_op->cipher.data.offset = 0;
117         }
118
119         if (options->test == CPERF_TEST_TYPE_VERIFY) {
120                 for (i = 0; i < nb_ops; i++) {
121                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122                                         uint8_t *, iv_offset);
123
124                         memcpy(iv_ptr, test_vector->cipher_iv.data,
125                                         test_vector->cipher_iv.length);
126
127                 }
128         }
129
130         return 0;
131 }
132
133 static int
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137                 const struct cperf_options *options,
138                 const struct cperf_test_vector *test_vector,
139                 uint16_t iv_offset)
140 {
141         uint16_t i;
142
143         for (i = 0; i < nb_ops; i++) {
144                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
145
146                 rte_crypto_op_attach_sym_session(ops[i], sess);
147
148                 sym_op->m_src = bufs_in[i];
149                 sym_op->m_dst = bufs_out[i];
150
151                 if (test_vector->auth_iv.length) {
152                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
153                                                                 uint8_t *,
154                                                                 iv_offset);
155                         memcpy(iv_ptr, test_vector->auth_iv.data,
156                                         test_vector->auth_iv.length);
157                 }
158
159                 /* authentication parameters */
160                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161                         sym_op->auth.digest.data = test_vector->digest.data;
162                         sym_op->auth.digest.phys_addr =
163                                         test_vector->digest.phys_addr;
164                 } else {
165
166                         uint32_t offset = options->test_buffer_size;
167                         struct rte_mbuf *buf, *tbuf;
168
169                         if (options->out_of_place) {
170                                 buf =  bufs_out[i];
171                         } else {
172                                 tbuf =  bufs_in[i];
173                                 while ((tbuf->next != NULL) &&
174                                                 (offset >= tbuf->data_len)) {
175                                         offset -= tbuf->data_len;
176                                         tbuf = tbuf->next;
177                                 }
178                                 /*
179                                  * If there is not enough room in segment,
180                                  * place the digest in the next segment
181                                  */
182                                 if ((tbuf->data_len - offset) < options->digest_sz) {
183                                         tbuf = tbuf->next;
184                                         offset = 0;
185                                 }
186                                 buf = tbuf;
187                         }
188
189                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
190                                         uint8_t *, offset);
191                         sym_op->auth.digest.phys_addr =
192                                         rte_pktmbuf_mtophys_offset(buf, offset);
193
194                 }
195
196                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
197                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
198                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
199                         sym_op->auth.data.length = options->test_buffer_size << 3;
200                 else
201                         sym_op->auth.data.length = options->test_buffer_size;
202
203                 sym_op->auth.data.offset = 0;
204         }
205
206         if (options->test == CPERF_TEST_TYPE_VERIFY) {
207                 if (test_vector->auth_iv.length) {
208                         for (i = 0; i < nb_ops; i++) {
209                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
210                                                 uint8_t *, iv_offset);
211
212                                 memcpy(iv_ptr, test_vector->auth_iv.data,
213                                                 test_vector->auth_iv.length);
214                         }
215                 }
216         }
217         return 0;
218 }
219
220 static int
221 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
222                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
223                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
224                 const struct cperf_options *options,
225                 const struct cperf_test_vector *test_vector,
226                 uint16_t iv_offset)
227 {
228         uint16_t i;
229
230         for (i = 0; i < nb_ops; i++) {
231                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
232
233                 rte_crypto_op_attach_sym_session(ops[i], sess);
234
235                 sym_op->m_src = bufs_in[i];
236                 sym_op->m_dst = bufs_out[i];
237
238                 /* cipher parameters */
239                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
240                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
241                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
242                         sym_op->cipher.data.length = options->test_buffer_size << 3;
243                 else
244                         sym_op->cipher.data.length = options->test_buffer_size;
245
246                 sym_op->cipher.data.offset = 0;
247
248                 /* authentication parameters */
249                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
250                         sym_op->auth.digest.data = test_vector->digest.data;
251                         sym_op->auth.digest.phys_addr =
252                                         test_vector->digest.phys_addr;
253                 } else {
254
255                         uint32_t offset = options->test_buffer_size;
256                         struct rte_mbuf *buf, *tbuf;
257
258                         if (options->out_of_place) {
259                                 buf =  bufs_out[i];
260                         } else {
261                                 tbuf =  bufs_in[i];
262                                 while ((tbuf->next != NULL) &&
263                                                 (offset >= tbuf->data_len)) {
264                                         offset -= tbuf->data_len;
265                                         tbuf = tbuf->next;
266                                 }
267                                 /*
268                                  * If there is not enough room in segment,
269                                  * place the digest in the next segment
270                                  */
271                                 if ((tbuf->data_len - offset) < options->digest_sz) {
272                                         tbuf = tbuf->next;
273                                         offset = 0;
274                                 }
275                                 buf = tbuf;
276                         }
277
278                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
279                                         uint8_t *, offset);
280                         sym_op->auth.digest.phys_addr =
281                                         rte_pktmbuf_mtophys_offset(buf, offset);
282                 }
283
284                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
285                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
286                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
287                         sym_op->auth.data.length = options->test_buffer_size << 3;
288                 else
289                         sym_op->auth.data.length = options->test_buffer_size;
290
291                 sym_op->auth.data.offset = 0;
292         }
293
294         if (options->test == CPERF_TEST_TYPE_VERIFY) {
295                 for (i = 0; i < nb_ops; i++) {
296                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
297                                         uint8_t *, iv_offset);
298
299                         memcpy(iv_ptr, test_vector->cipher_iv.data,
300                                         test_vector->cipher_iv.length);
301                         if (test_vector->auth_iv.length) {
302                                 /*
303                                  * Copy IV after the crypto operation and
304                                  * the cipher IV
305                                  */
306                                 iv_ptr += test_vector->cipher_iv.length;
307                                 memcpy(iv_ptr, test_vector->auth_iv.data,
308                                                 test_vector->auth_iv.length);
309                         }
310                 }
311
312         }
313
314         return 0;
315 }
316
317 static int
318 cperf_set_ops_aead(struct rte_crypto_op **ops,
319                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
320                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
321                 const struct cperf_options *options,
322                 const struct cperf_test_vector *test_vector,
323                 uint16_t iv_offset)
324 {
325         uint16_t i;
326         uint16_t aad_offset = iv_offset +
327                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
328
329         for (i = 0; i < nb_ops; i++) {
330                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
331
332                 rte_crypto_op_attach_sym_session(ops[i], sess);
333
334                 sym_op->m_src = bufs_in[i];
335                 sym_op->m_dst = bufs_out[i];
336
337                 /* AEAD parameters */
338                 sym_op->aead.data.length = options->test_buffer_size;
339                 sym_op->aead.data.offset = 0;
340
341                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
342                                         uint8_t *, aad_offset);
343                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
344                                         aad_offset);
345
346                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
347                         sym_op->aead.digest.data = test_vector->digest.data;
348                         sym_op->aead.digest.phys_addr =
349                                         test_vector->digest.phys_addr;
350                 } else {
351
352                         uint32_t offset = sym_op->aead.data.length +
353                                                 sym_op->aead.data.offset;
354                         struct rte_mbuf *buf, *tbuf;
355
356                         if (options->out_of_place) {
357                                 buf =  bufs_out[i];
358                         } else {
359                                 tbuf =  bufs_in[i];
360                                 while ((tbuf->next != NULL) &&
361                                                 (offset >= tbuf->data_len)) {
362                                         offset -= tbuf->data_len;
363                                         tbuf = tbuf->next;
364                                 }
365                                 /*
366                                  * If there is not enough room in segment,
367                                  * place the digest in the next segment
368                                  */
369                                 if ((tbuf->data_len - offset) < options->digest_sz) {
370                                         tbuf = tbuf->next;
371                                         offset = 0;
372                                 }
373                                 buf = tbuf;
374                         }
375
376                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
377                                         uint8_t *, offset);
378                         sym_op->aead.digest.phys_addr =
379                                         rte_pktmbuf_mtophys_offset(buf, offset);
380                 }
381         }
382
383         if (options->test == CPERF_TEST_TYPE_VERIFY) {
384                 for (i = 0; i < nb_ops; i++) {
385                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
386                                         uint8_t *, iv_offset);
387
388                         memcpy(iv_ptr, test_vector->aead_iv.data,
389                                         test_vector->aead_iv.length);
390
391                         /* Copy AAD after the IV */
392                         memcpy(ops[i]->sym->aead.aad.data,
393                                 test_vector->aad.data,
394                                 test_vector->aad.length);
395                 }
396         }
397
398         return 0;
399 }
400
401 static struct rte_cryptodev_sym_session *
402 cperf_create_session(struct rte_mempool *sess_mp,
403         uint8_t dev_id,
404         const struct cperf_options *options,
405         const struct cperf_test_vector *test_vector,
406         uint16_t iv_offset)
407 {
408         struct rte_crypto_sym_xform cipher_xform;
409         struct rte_crypto_sym_xform auth_xform;
410         struct rte_crypto_sym_xform aead_xform;
411         struct rte_cryptodev_sym_session *sess = NULL;
412
413         sess = rte_cryptodev_sym_session_create(sess_mp);
414         /*
415          * cipher only
416          */
417         if (options->op_type == CPERF_CIPHER_ONLY) {
418                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
419                 cipher_xform.next = NULL;
420                 cipher_xform.cipher.algo = options->cipher_algo;
421                 cipher_xform.cipher.op = options->cipher_op;
422                 cipher_xform.cipher.iv.offset = iv_offset;
423
424                 /* cipher different than null */
425                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
426                         cipher_xform.cipher.key.data =
427                                         test_vector->cipher_key.data;
428                         cipher_xform.cipher.key.length =
429                                         test_vector->cipher_key.length;
430                         cipher_xform.cipher.iv.length =
431                                         test_vector->cipher_iv.length;
432                 } else {
433                         cipher_xform.cipher.key.data = NULL;
434                         cipher_xform.cipher.key.length = 0;
435                         cipher_xform.cipher.iv.length = 0;
436                 }
437                 /* create crypto session */
438                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
439                                 sess_mp);
440         /*
441          *  auth only
442          */
443         } else if (options->op_type == CPERF_AUTH_ONLY) {
444                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
445                 auth_xform.next = NULL;
446                 auth_xform.auth.algo = options->auth_algo;
447                 auth_xform.auth.op = options->auth_op;
448
449                 /* auth different than null */
450                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
451                         auth_xform.auth.digest_length =
452                                         options->digest_sz;
453                         auth_xform.auth.key.length =
454                                         test_vector->auth_key.length;
455                         auth_xform.auth.key.data = test_vector->auth_key.data;
456                         auth_xform.auth.iv.length =
457                                         test_vector->auth_iv.length;
458                 } else {
459                         auth_xform.auth.digest_length = 0;
460                         auth_xform.auth.key.length = 0;
461                         auth_xform.auth.key.data = NULL;
462                         auth_xform.auth.iv.length = 0;
463                 }
464                 /* create crypto session */
465                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
466                                 sess_mp);
467         /*
468          * cipher and auth
469          */
470         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
471                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
472                 /*
473                  * cipher
474                  */
475                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
476                 cipher_xform.next = NULL;
477                 cipher_xform.cipher.algo = options->cipher_algo;
478                 cipher_xform.cipher.op = options->cipher_op;
479                 cipher_xform.cipher.iv.offset = iv_offset;
480
481                 /* cipher different than null */
482                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
483                         cipher_xform.cipher.key.data =
484                                         test_vector->cipher_key.data;
485                         cipher_xform.cipher.key.length =
486                                         test_vector->cipher_key.length;
487                         cipher_xform.cipher.iv.length =
488                                         test_vector->cipher_iv.length;
489                 } else {
490                         cipher_xform.cipher.key.data = NULL;
491                         cipher_xform.cipher.key.length = 0;
492                         cipher_xform.cipher.iv.length = 0;
493                 }
494
495                 /*
496                  * auth
497                  */
498                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
499                 auth_xform.next = NULL;
500                 auth_xform.auth.algo = options->auth_algo;
501                 auth_xform.auth.op = options->auth_op;
502
503                 /* auth different than null */
504                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
505                         auth_xform.auth.digest_length = options->digest_sz;
506                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
507                         auth_xform.auth.key.length =
508                                         test_vector->auth_key.length;
509                         auth_xform.auth.key.data =
510                                         test_vector->auth_key.data;
511                 } else {
512                         auth_xform.auth.digest_length = 0;
513                         auth_xform.auth.key.length = 0;
514                         auth_xform.auth.key.data = NULL;
515                         auth_xform.auth.iv.length = 0;
516                 }
517
518                 /* cipher then auth */
519                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
520                         cipher_xform.next = &auth_xform;
521                         /* create crypto session */
522                         rte_cryptodev_sym_session_init(dev_id,
523                                         sess, &cipher_xform, sess_mp);
524                 } else { /* auth then cipher */
525                         auth_xform.next = &cipher_xform;
526                         /* create crypto session */
527                         rte_cryptodev_sym_session_init(dev_id,
528                                         sess, &auth_xform, sess_mp);
529                 }
530         } else { /* options->op_type == CPERF_AEAD */
531                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
532                 aead_xform.next = NULL;
533                 aead_xform.aead.algo = options->aead_algo;
534                 aead_xform.aead.op = options->aead_op;
535                 aead_xform.aead.iv.offset = iv_offset;
536
537                 aead_xform.aead.key.data =
538                                         test_vector->aead_key.data;
539                 aead_xform.aead.key.length =
540                                         test_vector->aead_key.length;
541                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
542
543                 aead_xform.aead.digest_length = options->digest_sz;
544                 aead_xform.aead.aad_length =
545                                         options->aead_aad_sz;
546
547                 /* Create crypto session */
548                 rte_cryptodev_sym_session_init(dev_id,
549                                         sess, &aead_xform, sess_mp);
550         }
551
552         return sess;
553 }
554
555 int
556 cperf_get_op_functions(const struct cperf_options *options,
557                 struct cperf_op_fns *op_fns)
558 {
559         memset(op_fns, 0, sizeof(struct cperf_op_fns));
560
561         op_fns->sess_create = cperf_create_session;
562
563         if (options->op_type == CPERF_AEAD) {
564                 op_fns->populate_ops = cperf_set_ops_aead;
565                 return 0;
566         }
567
568         if (options->op_type == CPERF_AUTH_THEN_CIPHER
569                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
570                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
571                 return 0;
572         }
573         if (options->op_type == CPERF_AUTH_ONLY) {
574                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
575                         op_fns->populate_ops = cperf_set_ops_null_auth;
576                 else
577                         op_fns->populate_ops = cperf_set_ops_auth;
578                 return 0;
579         }
580         if (options->op_type == CPERF_CIPHER_ONLY) {
581                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
582                         op_fns->populate_ops = cperf_set_ops_null_cipher;
583                 else
584                         op_fns->populate_ops = cperf_set_ops_cipher;
585                 return 0;
586         }
587
588         return -1;
589 }