app/crypto-perf: set AAD after the crypto operation
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 rte_crypto_op_attach_sym_session(ops[i], sess);
52
53                 sym_op->m_src = bufs_in[i];
54                 sym_op->m_dst = bufs_out[i];
55
56                 /* cipher parameters */
57                 sym_op->cipher.data.length = options->test_buffer_size;
58                 sym_op->cipher.data.offset = 0;
59         }
60
61         return 0;
62 }
63
64 static int
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68                 const struct cperf_options *options,
69                 const struct cperf_test_vector *test_vector __rte_unused,
70                 uint16_t iv_offset __rte_unused)
71 {
72         uint16_t i;
73
74         for (i = 0; i < nb_ops; i++) {
75                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
76
77                 rte_crypto_op_attach_sym_session(ops[i], sess);
78
79                 sym_op->m_src = bufs_in[i];
80                 sym_op->m_dst = bufs_out[i];
81
82                 /* auth parameters */
83                 sym_op->auth.data.length = options->test_buffer_size;
84                 sym_op->auth.data.offset = 0;
85         }
86
87         return 0;
88 }
89
90 static int
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94                 const struct cperf_options *options,
95                 const struct cperf_test_vector *test_vector,
96                 uint16_t iv_offset)
97 {
98         uint16_t i;
99
100         for (i = 0; i < nb_ops; i++) {
101                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
102
103                 rte_crypto_op_attach_sym_session(ops[i], sess);
104
105                 sym_op->m_src = bufs_in[i];
106                 sym_op->m_dst = bufs_out[i];
107
108                 /* cipher parameters */
109                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112                         sym_op->cipher.data.length = options->test_buffer_size << 3;
113                 else
114                         sym_op->cipher.data.length = options->test_buffer_size;
115
116                 sym_op->cipher.data.offset = 0;
117         }
118
119         if (options->test == CPERF_TEST_TYPE_VERIFY) {
120                 for (i = 0; i < nb_ops; i++) {
121                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122                                         uint8_t *, iv_offset);
123
124                         memcpy(iv_ptr, test_vector->cipher_iv.data,
125                                         test_vector->cipher_iv.length);
126
127                 }
128         }
129
130         return 0;
131 }
132
133 static int
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137                 const struct cperf_options *options,
138                 const struct cperf_test_vector *test_vector,
139                 uint16_t iv_offset)
140 {
141         uint16_t i;
142
143         for (i = 0; i < nb_ops; i++) {
144                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
145
146                 rte_crypto_op_attach_sym_session(ops[i], sess);
147
148                 sym_op->m_src = bufs_in[i];
149                 sym_op->m_dst = bufs_out[i];
150
151                 if (test_vector->auth_iv.length) {
152                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
153                                                                 uint8_t *,
154                                                                 iv_offset);
155                         memcpy(iv_ptr, test_vector->auth_iv.data,
156                                         test_vector->auth_iv.length);
157                 }
158
159                 /* authentication parameters */
160                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161                         sym_op->auth.digest.data = test_vector->digest.data;
162                         sym_op->auth.digest.phys_addr =
163                                         test_vector->digest.phys_addr;
164                 } else {
165
166                         uint32_t offset = options->test_buffer_size;
167                         struct rte_mbuf *buf, *tbuf;
168
169                         if (options->out_of_place) {
170                                 buf =  bufs_out[i];
171                         } else {
172                                 tbuf =  bufs_in[i];
173                                 while ((tbuf->next != NULL) &&
174                                                 (offset >= tbuf->data_len)) {
175                                         offset -= tbuf->data_len;
176                                         tbuf = tbuf->next;
177                                 }
178                                 buf = tbuf;
179                         }
180
181                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
182                                         uint8_t *, offset);
183                         sym_op->auth.digest.phys_addr =
184                                         rte_pktmbuf_mtophys_offset(buf, offset);
185
186                 }
187
188                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
189                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
190                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
191                         sym_op->auth.data.length = options->test_buffer_size << 3;
192                 else
193                         sym_op->auth.data.length = options->test_buffer_size;
194
195                 sym_op->auth.data.offset = 0;
196         }
197
198         if (options->test == CPERF_TEST_TYPE_VERIFY) {
199                 if (test_vector->auth_iv.length) {
200                         for (i = 0; i < nb_ops; i++) {
201                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
202                                                 uint8_t *, iv_offset);
203
204                                 memcpy(iv_ptr, test_vector->auth_iv.data,
205                                                 test_vector->auth_iv.length);
206                         }
207                 }
208         }
209         return 0;
210 }
211
212 static int
213 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
214                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
215                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
216                 const struct cperf_options *options,
217                 const struct cperf_test_vector *test_vector,
218                 uint16_t iv_offset)
219 {
220         uint16_t i;
221
222         for (i = 0; i < nb_ops; i++) {
223                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
224
225                 rte_crypto_op_attach_sym_session(ops[i], sess);
226
227                 sym_op->m_src = bufs_in[i];
228                 sym_op->m_dst = bufs_out[i];
229
230                 /* cipher parameters */
231                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
232                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
233                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
234                         sym_op->cipher.data.length = options->test_buffer_size << 3;
235                 else
236                         sym_op->cipher.data.length = options->test_buffer_size;
237
238                 sym_op->cipher.data.offset = 0;
239
240                 /* authentication parameters */
241                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
242                         sym_op->auth.digest.data = test_vector->digest.data;
243                         sym_op->auth.digest.phys_addr =
244                                         test_vector->digest.phys_addr;
245                 } else {
246
247                         uint32_t offset = options->test_buffer_size;
248                         struct rte_mbuf *buf, *tbuf;
249
250                         if (options->out_of_place) {
251                                 buf =  bufs_out[i];
252                         } else {
253                                 tbuf =  bufs_in[i];
254                                 while ((tbuf->next != NULL) &&
255                                                 (offset >= tbuf->data_len)) {
256                                         offset -= tbuf->data_len;
257                                         tbuf = tbuf->next;
258                                 }
259                                 buf = tbuf;
260                         }
261
262                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
263                                         uint8_t *, offset);
264                         sym_op->auth.digest.phys_addr =
265                                         rte_pktmbuf_mtophys_offset(buf, offset);
266                 }
267
268                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
269                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
270                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
271                         sym_op->auth.data.length = options->test_buffer_size << 3;
272                 else
273                         sym_op->auth.data.length = options->test_buffer_size;
274
275                 sym_op->auth.data.offset = 0;
276         }
277
278         if (options->test == CPERF_TEST_TYPE_VERIFY) {
279                 for (i = 0; i < nb_ops; i++) {
280                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
281                                         uint8_t *, iv_offset);
282
283                         memcpy(iv_ptr, test_vector->cipher_iv.data,
284                                         test_vector->cipher_iv.length);
285                         if (test_vector->auth_iv.length) {
286                                 /*
287                                  * Copy IV after the crypto operation and
288                                  * the cipher IV
289                                  */
290                                 iv_ptr += test_vector->cipher_iv.length;
291                                 memcpy(iv_ptr, test_vector->auth_iv.data,
292                                                 test_vector->auth_iv.length);
293                         }
294                 }
295
296         }
297
298         return 0;
299 }
300
301 static int
302 cperf_set_ops_aead(struct rte_crypto_op **ops,
303                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
304                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
305                 const struct cperf_options *options,
306                 const struct cperf_test_vector *test_vector,
307                 uint16_t iv_offset)
308 {
309         uint16_t i;
310         uint16_t aad_offset = iv_offset +
311                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
312
313         for (i = 0; i < nb_ops; i++) {
314                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
315
316                 rte_crypto_op_attach_sym_session(ops[i], sess);
317
318                 sym_op->m_src = bufs_in[i];
319                 sym_op->m_dst = bufs_out[i];
320
321                 /* AEAD parameters */
322                 sym_op->aead.data.length = options->test_buffer_size;
323                 sym_op->aead.data.offset = 0;
324
325                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
326                                         uint8_t *, aad_offset);
327                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
328                                         aad_offset);
329
330                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
331                         sym_op->aead.digest.data = test_vector->digest.data;
332                         sym_op->aead.digest.phys_addr =
333                                         test_vector->digest.phys_addr;
334                 } else {
335
336                         uint32_t offset = sym_op->aead.data.length +
337                                                 sym_op->aead.data.offset;
338                         struct rte_mbuf *buf, *tbuf;
339
340                         if (options->out_of_place) {
341                                 buf =  bufs_out[i];
342                         } else {
343                                 tbuf =  bufs_in[i];
344                                 while ((tbuf->next != NULL) &&
345                                                 (offset >= tbuf->data_len)) {
346                                         offset -= tbuf->data_len;
347                                         tbuf = tbuf->next;
348                                 }
349                                 buf = tbuf;
350                         }
351
352                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
353                                         uint8_t *, offset);
354                         sym_op->aead.digest.phys_addr =
355                                         rte_pktmbuf_mtophys_offset(buf, offset);
356                 }
357         }
358
359         if (options->test == CPERF_TEST_TYPE_VERIFY) {
360                 for (i = 0; i < nb_ops; i++) {
361                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
362                                         uint8_t *, iv_offset);
363
364                         memcpy(iv_ptr, test_vector->aead_iv.data,
365                                         test_vector->aead_iv.length);
366
367                         /* Copy AAD after the IV */
368                         memcpy(ops[i]->sym->aead.aad.data,
369                                 test_vector->aad.data,
370                                 test_vector->aad.length);
371                 }
372         }
373
374         return 0;
375 }
376
377 static struct rte_cryptodev_sym_session *
378 cperf_create_session(struct rte_mempool *sess_mp,
379         uint8_t dev_id,
380         const struct cperf_options *options,
381         const struct cperf_test_vector *test_vector,
382         uint16_t iv_offset)
383 {
384         struct rte_crypto_sym_xform cipher_xform;
385         struct rte_crypto_sym_xform auth_xform;
386         struct rte_crypto_sym_xform aead_xform;
387         struct rte_cryptodev_sym_session *sess = NULL;
388
389         sess = rte_cryptodev_sym_session_create(sess_mp);
390         /*
391          * cipher only
392          */
393         if (options->op_type == CPERF_CIPHER_ONLY) {
394                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
395                 cipher_xform.next = NULL;
396                 cipher_xform.cipher.algo = options->cipher_algo;
397                 cipher_xform.cipher.op = options->cipher_op;
398                 cipher_xform.cipher.iv.offset = iv_offset;
399
400                 /* cipher different than null */
401                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
402                         cipher_xform.cipher.key.data =
403                                         test_vector->cipher_key.data;
404                         cipher_xform.cipher.key.length =
405                                         test_vector->cipher_key.length;
406                         cipher_xform.cipher.iv.length =
407                                         test_vector->cipher_iv.length;
408                 } else {
409                         cipher_xform.cipher.key.data = NULL;
410                         cipher_xform.cipher.key.length = 0;
411                         cipher_xform.cipher.iv.length = 0;
412                 }
413                 /* create crypto session */
414                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
415                                 sess_mp);
416         /*
417          *  auth only
418          */
419         } else if (options->op_type == CPERF_AUTH_ONLY) {
420                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
421                 auth_xform.next = NULL;
422                 auth_xform.auth.algo = options->auth_algo;
423                 auth_xform.auth.op = options->auth_op;
424
425                 /* auth different than null */
426                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
427                         auth_xform.auth.digest_length =
428                                         options->digest_sz;
429                         auth_xform.auth.key.length =
430                                         test_vector->auth_key.length;
431                         auth_xform.auth.key.data = test_vector->auth_key.data;
432                         auth_xform.auth.iv.length =
433                                         test_vector->auth_iv.length;
434                 } else {
435                         auth_xform.auth.digest_length = 0;
436                         auth_xform.auth.key.length = 0;
437                         auth_xform.auth.key.data = NULL;
438                         auth_xform.auth.iv.length = 0;
439                 }
440                 /* create crypto session */
441                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
442                                 sess_mp);
443         /*
444          * cipher and auth
445          */
446         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
447                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
448                 /*
449                  * cipher
450                  */
451                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
452                 cipher_xform.next = NULL;
453                 cipher_xform.cipher.algo = options->cipher_algo;
454                 cipher_xform.cipher.op = options->cipher_op;
455                 cipher_xform.cipher.iv.offset = iv_offset;
456
457                 /* cipher different than null */
458                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
459                         cipher_xform.cipher.key.data =
460                                         test_vector->cipher_key.data;
461                         cipher_xform.cipher.key.length =
462                                         test_vector->cipher_key.length;
463                         cipher_xform.cipher.iv.length =
464                                         test_vector->cipher_iv.length;
465                 } else {
466                         cipher_xform.cipher.key.data = NULL;
467                         cipher_xform.cipher.key.length = 0;
468                         cipher_xform.cipher.iv.length = 0;
469                 }
470
471                 /*
472                  * auth
473                  */
474                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
475                 auth_xform.next = NULL;
476                 auth_xform.auth.algo = options->auth_algo;
477                 auth_xform.auth.op = options->auth_op;
478
479                 /* auth different than null */
480                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
481                         auth_xform.auth.digest_length = options->digest_sz;
482                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
483                         auth_xform.auth.key.length =
484                                         test_vector->auth_key.length;
485                         auth_xform.auth.key.data =
486                                         test_vector->auth_key.data;
487                 } else {
488                         auth_xform.auth.digest_length = 0;
489                         auth_xform.auth.key.length = 0;
490                         auth_xform.auth.key.data = NULL;
491                         auth_xform.auth.iv.length = 0;
492                 }
493
494                 /* cipher then auth */
495                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
496                         cipher_xform.next = &auth_xform;
497                         /* create crypto session */
498                         rte_cryptodev_sym_session_init(dev_id,
499                                         sess, &cipher_xform, sess_mp);
500                 } else { /* auth then cipher */
501                         auth_xform.next = &cipher_xform;
502                         /* create crypto session */
503                         rte_cryptodev_sym_session_init(dev_id,
504                                         sess, &auth_xform, sess_mp);
505                 }
506         } else { /* options->op_type == CPERF_AEAD */
507                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
508                 aead_xform.next = NULL;
509                 aead_xform.aead.algo = options->aead_algo;
510                 aead_xform.aead.op = options->aead_op;
511                 aead_xform.aead.iv.offset = iv_offset;
512
513                 aead_xform.aead.key.data =
514                                         test_vector->aead_key.data;
515                 aead_xform.aead.key.length =
516                                         test_vector->aead_key.length;
517                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
518
519                 aead_xform.aead.digest_length = options->digest_sz;
520                 aead_xform.aead.aad_length =
521                                         options->aead_aad_sz;
522
523                 /* Create crypto session */
524                 rte_cryptodev_sym_session_init(dev_id,
525                                         sess, &aead_xform, sess_mp);
526         }
527
528         return sess;
529 }
530
531 int
532 cperf_get_op_functions(const struct cperf_options *options,
533                 struct cperf_op_fns *op_fns)
534 {
535         memset(op_fns, 0, sizeof(struct cperf_op_fns));
536
537         op_fns->sess_create = cperf_create_session;
538
539         if (options->op_type == CPERF_AEAD) {
540                 op_fns->populate_ops = cperf_set_ops_aead;
541                 return 0;
542         }
543
544         if (options->op_type == CPERF_AUTH_THEN_CIPHER
545                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
546                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
547                 return 0;
548         }
549         if (options->op_type == CPERF_AUTH_ONLY) {
550                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
551                         op_fns->populate_ops = cperf_set_ops_null_auth;
552                 else
553                         op_fns->populate_ops = cperf_set_ops_auth;
554                 return 0;
555         }
556         if (options->op_type == CPERF_CIPHER_ONLY) {
557                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
558                         op_fns->populate_ops = cperf_set_ops_null_cipher;
559                 else
560                         op_fns->populate_ops = cperf_set_ops_cipher;
561                 return 0;
562         }
563
564         return -1;
565 }