cryptodev: move IV parameters to session
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 rte_crypto_op_attach_sym_session(ops[i], sess);
52
53                 sym_op->m_src = bufs_in[i];
54                 sym_op->m_dst = bufs_out[i];
55
56                 /* cipher parameters */
57                 sym_op->cipher.data.length = options->test_buffer_size;
58                 sym_op->cipher.data.offset = 0;
59         }
60
61         return 0;
62 }
63
64 static int
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68                 const struct cperf_options *options,
69                 const struct cperf_test_vector *test_vector __rte_unused,
70                 uint16_t iv_offset __rte_unused)
71 {
72         uint16_t i;
73
74         for (i = 0; i < nb_ops; i++) {
75                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
76
77                 rte_crypto_op_attach_sym_session(ops[i], sess);
78
79                 sym_op->m_src = bufs_in[i];
80                 sym_op->m_dst = bufs_out[i];
81
82                 /* auth parameters */
83                 sym_op->auth.data.length = options->test_buffer_size;
84                 sym_op->auth.data.offset = 0;
85         }
86
87         return 0;
88 }
89
90 static int
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94                 const struct cperf_options *options,
95                 const struct cperf_test_vector *test_vector,
96                 uint16_t iv_offset)
97 {
98         uint16_t i;
99
100         for (i = 0; i < nb_ops; i++) {
101                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
102
103                 rte_crypto_op_attach_sym_session(ops[i], sess);
104
105                 sym_op->m_src = bufs_in[i];
106                 sym_op->m_dst = bufs_out[i];
107
108                 /* cipher parameters */
109                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112                         sym_op->cipher.data.length = options->test_buffer_size << 3;
113                 else
114                         sym_op->cipher.data.length = options->test_buffer_size;
115
116                 sym_op->cipher.data.offset = 0;
117         }
118
119         if (options->test == CPERF_TEST_TYPE_VERIFY) {
120                 for (i = 0; i < nb_ops; i++) {
121                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122                                         uint8_t *, iv_offset);
123
124                         memcpy(iv_ptr, test_vector->iv.data,
125                                         test_vector->iv.length);
126         }       }
127
128         return 0;
129 }
130
131 static int
132 cperf_set_ops_auth(struct rte_crypto_op **ops,
133                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
134                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
135                 const struct cperf_options *options,
136                 const struct cperf_test_vector *test_vector,
137                 uint16_t iv_offset __rte_unused)
138 {
139         uint16_t i;
140
141         for (i = 0; i < nb_ops; i++) {
142                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
143
144                 rte_crypto_op_attach_sym_session(ops[i], sess);
145
146                 sym_op->m_src = bufs_in[i];
147                 sym_op->m_dst = bufs_out[i];
148
149                 /* authentication parameters */
150                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
151                         sym_op->auth.digest.data = test_vector->digest.data;
152                         sym_op->auth.digest.phys_addr =
153                                         test_vector->digest.phys_addr;
154                         sym_op->auth.digest.length = options->auth_digest_sz;
155                 } else {
156
157                         uint32_t offset = options->test_buffer_size;
158                         struct rte_mbuf *buf, *tbuf;
159
160                         if (options->out_of_place) {
161                                 buf =  bufs_out[i];
162                         } else {
163                                 tbuf =  bufs_in[i];
164                                 while ((tbuf->next != NULL) &&
165                                                 (offset >= tbuf->data_len)) {
166                                         offset -= tbuf->data_len;
167                                         tbuf = tbuf->next;
168                                 }
169                                 buf = tbuf;
170                         }
171
172                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
173                                         uint8_t *, offset);
174                         sym_op->auth.digest.phys_addr =
175                                         rte_pktmbuf_mtophys_offset(buf, offset);
176                         sym_op->auth.digest.length = options->auth_digest_sz;
177                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
178                         sym_op->auth.aad.data = test_vector->aad.data;
179                         sym_op->auth.aad.length = options->auth_aad_sz;
180
181                 }
182
183                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
184                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
185                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
186                         sym_op->auth.data.length = options->test_buffer_size << 3;
187                 else
188                         sym_op->auth.data.length = options->test_buffer_size;
189
190                 sym_op->auth.data.offset = 0;
191         }
192
193         return 0;
194 }
195
196 static int
197 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
198                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
199                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
200                 const struct cperf_options *options,
201                 const struct cperf_test_vector *test_vector,
202                 uint16_t iv_offset)
203 {
204         uint16_t i;
205
206         for (i = 0; i < nb_ops; i++) {
207                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
208
209                 rte_crypto_op_attach_sym_session(ops[i], sess);
210
211                 sym_op->m_src = bufs_in[i];
212                 sym_op->m_dst = bufs_out[i];
213
214                 /* cipher parameters */
215                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
216                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
217                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
218                         sym_op->cipher.data.length = options->test_buffer_size << 3;
219                 else
220                         sym_op->cipher.data.length = options->test_buffer_size;
221
222                 sym_op->cipher.data.offset = 0;
223
224                 /* authentication parameters */
225                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
226                         sym_op->auth.digest.data = test_vector->digest.data;
227                         sym_op->auth.digest.phys_addr =
228                                         test_vector->digest.phys_addr;
229                         sym_op->auth.digest.length = options->auth_digest_sz;
230                 } else {
231
232                         uint32_t offset = options->test_buffer_size;
233                         struct rte_mbuf *buf, *tbuf;
234
235                         if (options->out_of_place) {
236                                 buf =  bufs_out[i];
237                         } else {
238                                 tbuf =  bufs_in[i];
239                                 while ((tbuf->next != NULL) &&
240                                                 (offset >= tbuf->data_len)) {
241                                         offset -= tbuf->data_len;
242                                         tbuf = tbuf->next;
243                                 }
244                                 buf = tbuf;
245                         }
246
247                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
248                                         uint8_t *, offset);
249                         sym_op->auth.digest.phys_addr =
250                                         rte_pktmbuf_mtophys_offset(buf, offset);
251                         sym_op->auth.digest.length = options->auth_digest_sz;
252                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
253                         sym_op->auth.aad.data = test_vector->aad.data;
254                         sym_op->auth.aad.length = options->auth_aad_sz;
255                 }
256
257                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
258                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
259                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
260                         sym_op->auth.data.length = options->test_buffer_size << 3;
261                 else
262                         sym_op->auth.data.length = options->test_buffer_size;
263
264                 sym_op->auth.data.offset = 0;
265         }
266
267         if (options->test == CPERF_TEST_TYPE_VERIFY) {
268                 for (i = 0; i < nb_ops; i++) {
269                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
270                                         uint8_t *, iv_offset);
271
272                         memcpy(iv_ptr, test_vector->iv.data,
273                                         test_vector->iv.length);
274                 }
275         }
276
277         return 0;
278 }
279
280 static int
281 cperf_set_ops_aead(struct rte_crypto_op **ops,
282                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
283                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
284                 const struct cperf_options *options,
285                 const struct cperf_test_vector *test_vector,
286                 uint16_t iv_offset)
287 {
288         uint16_t i;
289
290         for (i = 0; i < nb_ops; i++) {
291                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
292
293                 rte_crypto_op_attach_sym_session(ops[i], sess);
294
295                 sym_op->m_src = bufs_in[i];
296                 sym_op->m_dst = bufs_out[i];
297
298                 /* cipher parameters */
299                 sym_op->cipher.data.length = options->test_buffer_size;
300                 sym_op->cipher.data.offset =
301                                 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
302
303                 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
304                 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
305                 sym_op->auth.aad.length = options->auth_aad_sz;
306
307                 /* authentication parameters */
308                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
309                         sym_op->auth.digest.data = test_vector->digest.data;
310                         sym_op->auth.digest.phys_addr =
311                                         test_vector->digest.phys_addr;
312                         sym_op->auth.digest.length = options->auth_digest_sz;
313                 } else {
314
315                         uint32_t offset = sym_op->cipher.data.length +
316                                                 sym_op->cipher.data.offset;
317                         struct rte_mbuf *buf, *tbuf;
318
319                         if (options->out_of_place) {
320                                 buf =  bufs_out[i];
321                         } else {
322                                 tbuf =  bufs_in[i];
323                                 while ((tbuf->next != NULL) &&
324                                                 (offset >= tbuf->data_len)) {
325                                         offset -= tbuf->data_len;
326                                         tbuf = tbuf->next;
327                                 }
328                                 buf = tbuf;
329                         }
330
331                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
332                                         uint8_t *, offset);
333                         sym_op->auth.digest.phys_addr =
334                                         rte_pktmbuf_mtophys_offset(buf, offset);
335
336                         sym_op->auth.digest.length = options->auth_digest_sz;
337                 }
338
339                 sym_op->auth.data.length = options->test_buffer_size;
340                 sym_op->auth.data.offset = options->auth_aad_sz;
341         }
342
343         if (options->test == CPERF_TEST_TYPE_VERIFY) {
344                 for (i = 0; i < nb_ops; i++) {
345                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
346                                         uint8_t *, iv_offset);
347
348                         memcpy(iv_ptr, test_vector->iv.data,
349                                         test_vector->iv.length);
350                 }
351         }
352
353         return 0;
354 }
355
356 static struct rte_cryptodev_sym_session *
357 cperf_create_session(uint8_t dev_id,
358         const struct cperf_options *options,
359         const struct cperf_test_vector *test_vector,
360         uint16_t iv_offset)
361 {
362         struct rte_crypto_sym_xform cipher_xform;
363         struct rte_crypto_sym_xform auth_xform;
364         struct rte_cryptodev_sym_session *sess = NULL;
365
366         /*
367          * cipher only
368          */
369         if (options->op_type == CPERF_CIPHER_ONLY) {
370                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
371                 cipher_xform.next = NULL;
372                 cipher_xform.cipher.algo = options->cipher_algo;
373                 cipher_xform.cipher.op = options->cipher_op;
374                 cipher_xform.cipher.iv.offset = iv_offset;
375
376                 /* cipher different than null */
377                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
378                         cipher_xform.cipher.key.data =
379                                         test_vector->cipher_key.data;
380                         cipher_xform.cipher.key.length =
381                                         test_vector->cipher_key.length;
382                         cipher_xform.cipher.iv.length = test_vector->iv.length;
383
384                 } else {
385                         cipher_xform.cipher.key.data = NULL;
386                         cipher_xform.cipher.key.length = 0;
387                         cipher_xform.cipher.iv.length = 0;
388                 }
389                 /* create crypto session */
390                 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
391         /*
392          *  auth only
393          */
394         } else if (options->op_type == CPERF_AUTH_ONLY) {
395                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
396                 auth_xform.next = NULL;
397                 auth_xform.auth.algo = options->auth_algo;
398                 auth_xform.auth.op = options->auth_op;
399
400                 /* auth different than null */
401                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
402                         auth_xform.auth.digest_length =
403                                         options->auth_digest_sz;
404                         auth_xform.auth.add_auth_data_length =
405                                         options->auth_aad_sz;
406                         auth_xform.auth.key.length =
407                                         test_vector->auth_key.length;
408                         auth_xform.auth.key.data = test_vector->auth_key.data;
409                 } else {
410                         auth_xform.auth.digest_length = 0;
411                         auth_xform.auth.add_auth_data_length = 0;
412                         auth_xform.auth.key.length = 0;
413                         auth_xform.auth.key.data = NULL;
414                 }
415                 /* create crypto session */
416                 sess =  rte_cryptodev_sym_session_create(dev_id, &auth_xform);
417         /*
418          * cipher and auth
419          */
420         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
421                         || options->op_type == CPERF_AUTH_THEN_CIPHER
422                         || options->op_type == CPERF_AEAD) {
423
424                 /*
425                  * cipher
426                  */
427                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
428                 cipher_xform.next = NULL;
429                 cipher_xform.cipher.algo = options->cipher_algo;
430                 cipher_xform.cipher.op = options->cipher_op;
431                 cipher_xform.cipher.iv.offset = iv_offset;
432
433                 /* cipher different than null */
434                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
435                         cipher_xform.cipher.key.data =
436                                         test_vector->cipher_key.data;
437                         cipher_xform.cipher.key.length =
438                                         test_vector->cipher_key.length;
439                         cipher_xform.cipher.iv.length = test_vector->iv.length;
440                 } else {
441                         cipher_xform.cipher.key.data = NULL;
442                         cipher_xform.cipher.key.length = 0;
443                         cipher_xform.cipher.iv.length = 0;
444                 }
445
446                 /*
447                  * auth
448                  */
449                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
450                 auth_xform.next = NULL;
451                 auth_xform.auth.algo = options->auth_algo;
452                 auth_xform.auth.op = options->auth_op;
453
454                 /* auth different than null */
455                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
456                         auth_xform.auth.digest_length = options->auth_digest_sz;
457                         auth_xform.auth.add_auth_data_length =
458                                         options->auth_aad_sz;
459                         /* auth options for aes gcm */
460                         if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
461                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
462                                 auth_xform.auth.key.length = 0;
463                                 auth_xform.auth.key.data = NULL;
464                         } else { /* auth options for others */
465                                 auth_xform.auth.key.length =
466                                         test_vector->auth_key.length;
467                                 auth_xform.auth.key.data =
468                                                 test_vector->auth_key.data;
469                         }
470                 } else {
471                         auth_xform.auth.digest_length = 0;
472                         auth_xform.auth.add_auth_data_length = 0;
473                         auth_xform.auth.key.length = 0;
474                         auth_xform.auth.key.data = NULL;
475                 }
476
477                 /* create crypto session for aes gcm */
478                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
479                         if (options->cipher_op ==
480                                         RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
481                                 cipher_xform.next = &auth_xform;
482                                 /* create crypto session */
483                                 sess = rte_cryptodev_sym_session_create(dev_id,
484                                         &cipher_xform);
485                         } else { /* decrypt */
486                                 auth_xform.next = &cipher_xform;
487                                 /* create crypto session */
488                                 sess = rte_cryptodev_sym_session_create(dev_id,
489                                         &auth_xform);
490                         }
491                 } else { /* create crypto session for other */
492                         /* cipher then auth */
493                         if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
494                                 cipher_xform.next = &auth_xform;
495                                 /* create crypto session */
496                                 sess = rte_cryptodev_sym_session_create(dev_id,
497                                                 &cipher_xform);
498                         } else { /* auth then cipher */
499                                 auth_xform.next = &cipher_xform;
500                                 /* create crypto session */
501                                 sess = rte_cryptodev_sym_session_create(dev_id,
502                                                 &auth_xform);
503                         }
504                 }
505         }
506         return sess;
507 }
508
509 int
510 cperf_get_op_functions(const struct cperf_options *options,
511                 struct cperf_op_fns *op_fns)
512 {
513         memset(op_fns, 0, sizeof(struct cperf_op_fns));
514
515         op_fns->sess_create = cperf_create_session;
516
517         if (options->op_type == CPERF_AEAD
518                         || options->op_type == CPERF_AUTH_THEN_CIPHER
519                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
520                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
521                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
522                         op_fns->populate_ops = cperf_set_ops_aead;
523                 else
524                         op_fns->populate_ops = cperf_set_ops_cipher_auth;
525                 return 0;
526         }
527         if (options->op_type == CPERF_AUTH_ONLY) {
528                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
529                         op_fns->populate_ops = cperf_set_ops_null_auth;
530                 else
531                         op_fns->populate_ops = cperf_set_ops_auth;
532                 return 0;
533         }
534         if (options->op_type == CPERF_CIPHER_ONLY) {
535                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
536                         op_fns->populate_ops = cperf_set_ops_null_cipher;
537                 else
538                         op_fns->populate_ops = cperf_set_ops_cipher;
539                 return 0;
540         }
541
542         return -1;
543 }