cryptodev: remove digest length from crypto op
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 rte_crypto_op_attach_sym_session(ops[i], sess);
52
53                 sym_op->m_src = bufs_in[i];
54                 sym_op->m_dst = bufs_out[i];
55
56                 /* cipher parameters */
57                 sym_op->cipher.data.length = options->test_buffer_size;
58                 sym_op->cipher.data.offset = 0;
59         }
60
61         return 0;
62 }
63
64 static int
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68                 const struct cperf_options *options,
69                 const struct cperf_test_vector *test_vector __rte_unused,
70                 uint16_t iv_offset __rte_unused)
71 {
72         uint16_t i;
73
74         for (i = 0; i < nb_ops; i++) {
75                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
76
77                 rte_crypto_op_attach_sym_session(ops[i], sess);
78
79                 sym_op->m_src = bufs_in[i];
80                 sym_op->m_dst = bufs_out[i];
81
82                 /* auth parameters */
83                 sym_op->auth.data.length = options->test_buffer_size;
84                 sym_op->auth.data.offset = 0;
85         }
86
87         return 0;
88 }
89
90 static int
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94                 const struct cperf_options *options,
95                 const struct cperf_test_vector *test_vector,
96                 uint16_t iv_offset)
97 {
98         uint16_t i;
99
100         for (i = 0; i < nb_ops; i++) {
101                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
102
103                 rte_crypto_op_attach_sym_session(ops[i], sess);
104
105                 sym_op->m_src = bufs_in[i];
106                 sym_op->m_dst = bufs_out[i];
107
108                 /* cipher parameters */
109                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112                         sym_op->cipher.data.length = options->test_buffer_size << 3;
113                 else
114                         sym_op->cipher.data.length = options->test_buffer_size;
115
116                 sym_op->cipher.data.offset = 0;
117         }
118
119         if (options->test == CPERF_TEST_TYPE_VERIFY) {
120                 for (i = 0; i < nb_ops; i++) {
121                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122                                         uint8_t *, iv_offset);
123
124                         memcpy(iv_ptr, test_vector->cipher_iv.data,
125                                         test_vector->cipher_iv.length);
126
127                 }
128         }
129
130         return 0;
131 }
132
133 static int
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137                 const struct cperf_options *options,
138                 const struct cperf_test_vector *test_vector,
139                 uint16_t iv_offset)
140 {
141         uint16_t i;
142
143         for (i = 0; i < nb_ops; i++) {
144                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
145
146                 rte_crypto_op_attach_sym_session(ops[i], sess);
147
148                 sym_op->m_src = bufs_in[i];
149                 sym_op->m_dst = bufs_out[i];
150
151                 if (test_vector->auth_iv.length) {
152                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
153                                                                 uint8_t *,
154                                                                 iv_offset);
155                         memcpy(iv_ptr, test_vector->auth_iv.data,
156                                         test_vector->auth_iv.length);
157                 }
158
159                 /* authentication parameters */
160                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161                         sym_op->auth.digest.data = test_vector->digest.data;
162                         sym_op->auth.digest.phys_addr =
163                                         test_vector->digest.phys_addr;
164                 } else {
165
166                         uint32_t offset = options->test_buffer_size;
167                         struct rte_mbuf *buf, *tbuf;
168
169                         if (options->out_of_place) {
170                                 buf =  bufs_out[i];
171                         } else {
172                                 tbuf =  bufs_in[i];
173                                 while ((tbuf->next != NULL) &&
174                                                 (offset >= tbuf->data_len)) {
175                                         offset -= tbuf->data_len;
176                                         tbuf = tbuf->next;
177                                 }
178                                 buf = tbuf;
179                         }
180
181                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
182                                         uint8_t *, offset);
183                         sym_op->auth.digest.phys_addr =
184                                         rte_pktmbuf_mtophys_offset(buf, offset);
185                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
186                         sym_op->auth.aad.data = test_vector->aad.data;
187
188                 }
189
190                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
191                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
192                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
193                         sym_op->auth.data.length = options->test_buffer_size << 3;
194                 else
195                         sym_op->auth.data.length = options->test_buffer_size;
196
197                 sym_op->auth.data.offset = 0;
198         }
199
200         if (options->test == CPERF_TEST_TYPE_VERIFY) {
201                 if (test_vector->auth_iv.length) {
202                         for (i = 0; i < nb_ops; i++) {
203                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
204                                                 uint8_t *, iv_offset);
205
206                                 memcpy(iv_ptr, test_vector->auth_iv.data,
207                                                 test_vector->auth_iv.length);
208                         }
209                 }
210         }
211         return 0;
212 }
213
214 static int
215 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
216                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
217                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
218                 const struct cperf_options *options,
219                 const struct cperf_test_vector *test_vector,
220                 uint16_t iv_offset)
221 {
222         uint16_t i;
223
224         for (i = 0; i < nb_ops; i++) {
225                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
226
227                 rte_crypto_op_attach_sym_session(ops[i], sess);
228
229                 sym_op->m_src = bufs_in[i];
230                 sym_op->m_dst = bufs_out[i];
231
232                 /* cipher parameters */
233                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
234                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
235                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
236                         sym_op->cipher.data.length = options->test_buffer_size << 3;
237                 else
238                         sym_op->cipher.data.length = options->test_buffer_size;
239
240                 sym_op->cipher.data.offset = 0;
241
242                 /* authentication parameters */
243                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
244                         sym_op->auth.digest.data = test_vector->digest.data;
245                         sym_op->auth.digest.phys_addr =
246                                         test_vector->digest.phys_addr;
247                 } else {
248
249                         uint32_t offset = options->test_buffer_size;
250                         struct rte_mbuf *buf, *tbuf;
251
252                         if (options->out_of_place) {
253                                 buf =  bufs_out[i];
254                         } else {
255                                 tbuf =  bufs_in[i];
256                                 while ((tbuf->next != NULL) &&
257                                                 (offset >= tbuf->data_len)) {
258                                         offset -= tbuf->data_len;
259                                         tbuf = tbuf->next;
260                                 }
261                                 buf = tbuf;
262                         }
263
264                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
265                                         uint8_t *, offset);
266                         sym_op->auth.digest.phys_addr =
267                                         rte_pktmbuf_mtophys_offset(buf, offset);
268                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
269                         sym_op->auth.aad.data = test_vector->aad.data;
270                 }
271
272                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
273                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
274                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
275                         sym_op->auth.data.length = options->test_buffer_size << 3;
276                 else
277                         sym_op->auth.data.length = options->test_buffer_size;
278
279                 sym_op->auth.data.offset = 0;
280         }
281
282         if (options->test == CPERF_TEST_TYPE_VERIFY) {
283                 for (i = 0; i < nb_ops; i++) {
284                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
285                                         uint8_t *, iv_offset);
286
287                         memcpy(iv_ptr, test_vector->cipher_iv.data,
288                                         test_vector->cipher_iv.length);
289                         if (test_vector->auth_iv.length) {
290                                 /*
291                                  * Copy IV after the crypto operation and
292                                  * the cipher IV
293                                  */
294                                 iv_ptr += test_vector->cipher_iv.length;
295                                 memcpy(iv_ptr, test_vector->auth_iv.data,
296                                                 test_vector->auth_iv.length);
297                         }
298                 }
299
300         }
301
302         return 0;
303 }
304
305 static int
306 cperf_set_ops_aead(struct rte_crypto_op **ops,
307                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
308                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
309                 const struct cperf_options *options,
310                 const struct cperf_test_vector *test_vector,
311                 uint16_t iv_offset)
312 {
313         uint16_t i;
314
315         for (i = 0; i < nb_ops; i++) {
316                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
317
318                 rte_crypto_op_attach_sym_session(ops[i], sess);
319
320                 sym_op->m_src = bufs_in[i];
321                 sym_op->m_dst = bufs_out[i];
322
323                 /* cipher parameters */
324                 sym_op->cipher.data.length = options->test_buffer_size;
325                 sym_op->cipher.data.offset =
326                                 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
327
328                 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
329                 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
330
331                 /* authentication parameters */
332                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
333                         sym_op->auth.digest.data = test_vector->digest.data;
334                         sym_op->auth.digest.phys_addr =
335                                         test_vector->digest.phys_addr;
336                 } else {
337
338                         uint32_t offset = sym_op->cipher.data.length +
339                                                 sym_op->cipher.data.offset;
340                         struct rte_mbuf *buf, *tbuf;
341
342                         if (options->out_of_place) {
343                                 buf =  bufs_out[i];
344                         } else {
345                                 tbuf =  bufs_in[i];
346                                 while ((tbuf->next != NULL) &&
347                                                 (offset >= tbuf->data_len)) {
348                                         offset -= tbuf->data_len;
349                                         tbuf = tbuf->next;
350                                 }
351                                 buf = tbuf;
352                         }
353
354                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
355                                         uint8_t *, offset);
356                         sym_op->auth.digest.phys_addr =
357                                         rte_pktmbuf_mtophys_offset(buf, offset);
358                 }
359
360                 sym_op->auth.data.length = options->test_buffer_size;
361                 sym_op->auth.data.offset = options->auth_aad_sz;
362         }
363
364         if (options->test == CPERF_TEST_TYPE_VERIFY) {
365                 for (i = 0; i < nb_ops; i++) {
366                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
367                                         uint8_t *, iv_offset);
368
369                         memcpy(iv_ptr, test_vector->cipher_iv.data,
370                                         test_vector->cipher_iv.length);
371                 }
372         }
373
374         return 0;
375 }
376
377 static struct rte_cryptodev_sym_session *
378 cperf_create_session(uint8_t dev_id,
379         const struct cperf_options *options,
380         const struct cperf_test_vector *test_vector,
381         uint16_t iv_offset)
382 {
383         struct rte_crypto_sym_xform cipher_xform;
384         struct rte_crypto_sym_xform auth_xform;
385         struct rte_cryptodev_sym_session *sess = NULL;
386
387         /*
388          * cipher only
389          */
390         if (options->op_type == CPERF_CIPHER_ONLY) {
391                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
392                 cipher_xform.next = NULL;
393                 cipher_xform.cipher.algo = options->cipher_algo;
394                 cipher_xform.cipher.op = options->cipher_op;
395                 cipher_xform.cipher.iv.offset = iv_offset;
396
397                 /* cipher different than null */
398                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
399                         cipher_xform.cipher.key.data =
400                                         test_vector->cipher_key.data;
401                         cipher_xform.cipher.key.length =
402                                         test_vector->cipher_key.length;
403                         cipher_xform.cipher.iv.length =
404                                         test_vector->cipher_iv.length;
405                 } else {
406                         cipher_xform.cipher.key.data = NULL;
407                         cipher_xform.cipher.key.length = 0;
408                         cipher_xform.cipher.iv.length = 0;
409                 }
410                 /* create crypto session */
411                 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
412         /*
413          *  auth only
414          */
415         } else if (options->op_type == CPERF_AUTH_ONLY) {
416                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
417                 auth_xform.next = NULL;
418                 auth_xform.auth.algo = options->auth_algo;
419                 auth_xform.auth.op = options->auth_op;
420
421                 /* auth different than null */
422                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
423                         auth_xform.auth.digest_length =
424                                         options->auth_digest_sz;
425                         auth_xform.auth.add_auth_data_length =
426                                         options->auth_aad_sz;
427                         auth_xform.auth.key.length =
428                                         test_vector->auth_key.length;
429                         auth_xform.auth.key.data = test_vector->auth_key.data;
430                         auth_xform.auth.iv.length =
431                                         test_vector->auth_iv.length;
432                 } else {
433                         auth_xform.auth.digest_length = 0;
434                         auth_xform.auth.add_auth_data_length = 0;
435                         auth_xform.auth.key.length = 0;
436                         auth_xform.auth.key.data = NULL;
437                         auth_xform.auth.iv.length = 0;
438                 }
439                 /* create crypto session */
440                 sess =  rte_cryptodev_sym_session_create(dev_id, &auth_xform);
441         /*
442          * cipher and auth
443          */
444         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
445                         || options->op_type == CPERF_AUTH_THEN_CIPHER
446                         || options->op_type == CPERF_AEAD) {
447
448                 /*
449                  * cipher
450                  */
451                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
452                 cipher_xform.next = NULL;
453                 cipher_xform.cipher.algo = options->cipher_algo;
454                 cipher_xform.cipher.op = options->cipher_op;
455                 cipher_xform.cipher.iv.offset = iv_offset;
456
457                 /* cipher different than null */
458                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
459                         cipher_xform.cipher.key.data =
460                                         test_vector->cipher_key.data;
461                         cipher_xform.cipher.key.length =
462                                         test_vector->cipher_key.length;
463                         cipher_xform.cipher.iv.length =
464                                         test_vector->cipher_iv.length;
465                 } else {
466                         cipher_xform.cipher.key.data = NULL;
467                         cipher_xform.cipher.key.length = 0;
468                         cipher_xform.cipher.iv.length = 0;
469                 }
470
471                 /*
472                  * auth
473                  */
474                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
475                 auth_xform.next = NULL;
476                 auth_xform.auth.algo = options->auth_algo;
477                 auth_xform.auth.op = options->auth_op;
478
479                 /* auth different than null */
480                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
481                         auth_xform.auth.digest_length = options->auth_digest_sz;
482                         auth_xform.auth.add_auth_data_length =
483                                         options->auth_aad_sz;
484                         /* auth options for aes gcm */
485                         if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
486                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
487                                 auth_xform.auth.key.length = 0;
488                                 auth_xform.auth.key.data = NULL;
489                                 auth_xform.auth.iv.length = 0;
490                         } else { /* auth options for others */
491                                 auth_xform.auth.key.length =
492                                         test_vector->auth_key.length;
493                                 auth_xform.auth.key.data =
494                                                 test_vector->auth_key.data;
495                                 auth_xform.auth.iv.length =
496                                                 test_vector->auth_iv.length;
497                         }
498                 } else {
499                         auth_xform.auth.digest_length = 0;
500                         auth_xform.auth.add_auth_data_length = 0;
501                         auth_xform.auth.key.length = 0;
502                         auth_xform.auth.key.data = NULL;
503                         auth_xform.auth.iv.length = 0;
504                 }
505
506                 /* create crypto session for aes gcm */
507                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
508                         if (options->cipher_op ==
509                                         RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
510                                 cipher_xform.next = &auth_xform;
511                                 /* create crypto session */
512                                 sess = rte_cryptodev_sym_session_create(dev_id,
513                                         &cipher_xform);
514                         } else { /* decrypt */
515                                 auth_xform.next = &cipher_xform;
516                                 /* create crypto session */
517                                 sess = rte_cryptodev_sym_session_create(dev_id,
518                                         &auth_xform);
519                         }
520                 } else { /* create crypto session for other */
521                         /* cipher then auth */
522                         if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
523                                 cipher_xform.next = &auth_xform;
524                                 /* create crypto session */
525                                 sess = rte_cryptodev_sym_session_create(dev_id,
526                                                 &cipher_xform);
527                         } else { /* auth then cipher */
528                                 auth_xform.next = &cipher_xform;
529                                 /* create crypto session */
530                                 sess = rte_cryptodev_sym_session_create(dev_id,
531                                                 &auth_xform);
532                         }
533                 }
534         }
535         return sess;
536 }
537
538 int
539 cperf_get_op_functions(const struct cperf_options *options,
540                 struct cperf_op_fns *op_fns)
541 {
542         memset(op_fns, 0, sizeof(struct cperf_op_fns));
543
544         op_fns->sess_create = cperf_create_session;
545
546         if (options->op_type == CPERF_AEAD
547                         || options->op_type == CPERF_AUTH_THEN_CIPHER
548                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
549                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
550                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
551                         op_fns->populate_ops = cperf_set_ops_aead;
552                 else
553                         op_fns->populate_ops = cperf_set_ops_cipher_auth;
554                 return 0;
555         }
556         if (options->op_type == CPERF_AUTH_ONLY) {
557                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
558                         op_fns->populate_ops = cperf_set_ops_null_auth;
559                 else
560                         op_fns->populate_ops = cperf_set_ops_auth;
561                 return 0;
562         }
563         if (options->op_type == CPERF_CIPHER_ONLY) {
564                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
565                         op_fns->populate_ops = cperf_set_ops_null_cipher;
566                 else
567                         op_fns->populate_ops = cperf_set_ops_cipher;
568                 return 0;
569         }
570
571         return -1;
572 }