cryptodev: move IV parameters to session
[dpdk.git] / drivers / crypto / armv8 / rte_armv8_pmd.c
1 /*
2  *   BSD LICENSE
3  *
4  *   Copyright (C) Cavium networks Ltd. 2017.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Cavium networks nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <stdbool.h>
34
35 #include <rte_common.h>
36 #include <rte_hexdump.h>
37 #include <rte_cryptodev.h>
38 #include <rte_cryptodev_pmd.h>
39 #include <rte_cryptodev_vdev.h>
40 #include <rte_vdev.h>
41 #include <rte_malloc.h>
42 #include <rte_cpuflags.h>
43
44 #include "armv8_crypto_defs.h"
45
46 #include "rte_armv8_pmd_private.h"
47
48 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
49
50 /**
51  * Pointers to the supported combined mode crypto functions are stored
52  * in the static tables. Each combined (chained) cryptographic operation
53  * can be described by a set of numbers:
54  * - order:     order of operations (cipher, auth) or (auth, cipher)
55  * - direction: encryption or decryption
56  * - calg:      cipher algorithm such as AES_CBC, AES_CTR, etc.
57  * - aalg:      authentication algorithm such as SHA1, SHA256, etc.
58  * - keyl:      cipher key length, for example 128, 192, 256 bits
59  *
60  * In order to quickly acquire each function pointer based on those numbers,
61  * a hierarchy of arrays is maintained. The final level, 3D array is indexed
62  * by the combined mode function parameters only (cipher algorithm,
63  * authentication algorithm and key length).
64  *
65  * This gives 3 memory accesses to obtain a function pointer instead of
66  * traversing the array manually and comparing function parameters on each loop.
67  *
68  *                   +--+CRYPTO_FUNC
69  *            +--+ENC|
70  *      +--+CA|
71  *      |     +--+DEC
72  * ORDER|
73  *      |     +--+ENC
74  *      +--+AC|
75  *            +--+DEC
76  *
77  */
78
79 /**
80  * 3D array type for ARM Combined Mode crypto functions pointers.
81  * CRYPTO_CIPHER_MAX:                   max cipher ID number
82  * CRYPTO_AUTH_MAX:                     max auth ID number
83  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
84  */
85 typedef const crypto_func_t
86 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
87
88 /* Evaluate to key length definition */
89 #define KEYL(keyl)              (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
90
91 /* Local aliases for supported ciphers */
92 #define CIPH_AES_CBC            RTE_CRYPTO_CIPHER_AES_CBC
93 /* Local aliases for supported hashes */
94 #define AUTH_SHA1_HMAC          RTE_CRYPTO_AUTH_SHA1_HMAC
95 #define AUTH_SHA256_HMAC        RTE_CRYPTO_AUTH_SHA256_HMAC
96
97 /**
98  * Arrays containing pointers to particular cryptographic,
99  * combined mode functions.
100  * crypto_op_ca_encrypt:        cipher (encrypt), authenticate
101  * crypto_op_ca_decrypt:        cipher (decrypt), authenticate
102  * crypto_op_ac_encrypt:        authenticate, cipher (encrypt)
103  * crypto_op_ac_decrypt:        authenticate, cipher (decrypt)
104  */
105 static const crypto_func_tbl_t
106 crypto_op_ca_encrypt = {
107         /* [cipher alg][auth alg][key length] = crypto_function, */
108         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
109         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
110 };
111
112 static const crypto_func_tbl_t
113 crypto_op_ca_decrypt = {
114         NULL
115 };
116
117 static const crypto_func_tbl_t
118 crypto_op_ac_encrypt = {
119         NULL
120 };
121
122 static const crypto_func_tbl_t
123 crypto_op_ac_decrypt = {
124         /* [cipher alg][auth alg][key length] = crypto_function, */
125         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
126         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
127 };
128
129 /**
130  * Arrays containing pointers to particular cryptographic function sets,
131  * covering given cipher operation directions (encrypt, decrypt)
132  * for each order of cipher and authentication pairs.
133  */
134 static const crypto_func_tbl_t *
135 crypto_cipher_auth[] = {
136         &crypto_op_ca_encrypt,
137         &crypto_op_ca_decrypt,
138         NULL
139 };
140
141 static const crypto_func_tbl_t *
142 crypto_auth_cipher[] = {
143         &crypto_op_ac_encrypt,
144         &crypto_op_ac_decrypt,
145         NULL
146 };
147
148 /**
149  * Top level array containing pointers to particular cryptographic
150  * function sets, covering given order of chained operations.
151  * crypto_cipher_auth:  cipher first, authenticate after
152  * crypto_auth_cipher:  authenticate first, cipher after
153  */
154 static const crypto_func_tbl_t **
155 crypto_chain_order[] = {
156         crypto_cipher_auth,
157         crypto_auth_cipher,
158         NULL
159 };
160
161 /**
162  * Extract particular combined mode crypto function from the 3D array.
163  */
164 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl)                   \
165 ({                                                                      \
166         crypto_func_tbl_t *func_tbl =                                   \
167                                 (crypto_chain_order[(order)])[(cop)];   \
168                                                                         \
169         ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]);              \
170 })
171
172 /*----------------------------------------------------------------------------*/
173
174 /**
175  * 2D array type for ARM key schedule functions pointers.
176  * CRYPTO_CIPHER_MAX:                   max cipher ID number
177  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
178  */
179 typedef const crypto_key_sched_t
180 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
181
182 static const crypto_key_sched_tbl_t
183 crypto_key_sched_encrypt = {
184         /* [cipher alg][key length] = key_expand_func, */
185         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
186 };
187
188 static const crypto_key_sched_tbl_t
189 crypto_key_sched_decrypt = {
190         /* [cipher alg][key length] = key_expand_func, */
191         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
192 };
193
194 /**
195  * Top level array containing pointers to particular key generation
196  * function sets, covering given operation direction.
197  * crypto_key_sched_encrypt:    keys for encryption
198  * crypto_key_sched_decrypt:    keys for decryption
199  */
200 static const crypto_key_sched_tbl_t *
201 crypto_key_sched_dir[] = {
202         &crypto_key_sched_encrypt,
203         &crypto_key_sched_decrypt,
204         NULL
205 };
206
207 /**
208  * Extract particular combined mode crypto function from the 3D array.
209  */
210 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl)                           \
211 ({                                                                      \
212         crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)];   \
213                                                                         \
214         ((*ks_tbl)[(calg)][KEYL(keyl)]);                                \
215 })
216
217 /*----------------------------------------------------------------------------*/
218
219 /*
220  *------------------------------------------------------------------------------
221  * Session Prepare
222  *------------------------------------------------------------------------------
223  */
224
225 /** Get xform chain order */
226 static enum armv8_crypto_chain_order
227 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
228 {
229
230         /*
231          * This driver currently covers only chained operations.
232          * Ignore only cipher or only authentication operations
233          * or chains longer than 2 xform structures.
234          */
235         if (xform->next == NULL || xform->next->next != NULL)
236                 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
237
238         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
239                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
240                         return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
241         }
242
243         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
244                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
245                         return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
246         }
247
248         return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
249 }
250
251 static inline void
252 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
253                                 const struct rte_crypto_sym_xform *xform)
254 {
255         size_t i;
256
257         /* Generate i_key_pad and o_key_pad */
258         memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
259         rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
260                                                         xform->auth.key.length);
261         memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
262         rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
263                                                         xform->auth.key.length);
264         /*
265          * XOR key with IPAD/OPAD values to obtain i_key_pad
266          * and o_key_pad.
267          * Byte-by-byte operation may seem to be the less efficient
268          * here but in fact it's the opposite.
269          * The result ASM code is likely operate on NEON registers
270          * (load auth key to Qx, load IPAD/OPAD to multiple
271          * elements of Qy, eor 128 bits at once).
272          */
273         for (i = 0; i < SHA_BLOCK_MAX; i++) {
274                 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
275                 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
276         }
277 }
278
279 static inline int
280 auth_set_prerequisites(struct armv8_crypto_session *sess,
281                         const struct rte_crypto_sym_xform *xform)
282 {
283         uint8_t partial[64] = { 0 };
284         int error;
285
286         switch (xform->auth.algo) {
287         case RTE_CRYPTO_AUTH_SHA1_HMAC:
288                 /*
289                  * Generate authentication key, i_key_pad and o_key_pad.
290                  */
291                 /* Zero memory under key */
292                 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH);
293
294                 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) {
295                         /*
296                          * In case the key is longer than 160 bits
297                          * the algorithm will use SHA1(key) instead.
298                          */
299                         error = sha1_block(NULL, xform->auth.key.data,
300                                 sess->auth.hmac.key, xform->auth.key.length);
301                         if (error != 0)
302                                 return -1;
303                 } else {
304                         /*
305                          * Now copy the given authentication key to the session
306                          * key assuming that the session key is zeroed there is
307                          * no need for additional zero padding if the key is
308                          * shorter than SHA1_AUTH_KEY_LENGTH.
309                          */
310                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
311                                                         xform->auth.key.length);
312                 }
313
314                 /* Prepare HMAC padding: key|pattern */
315                 auth_hmac_pad_prepare(sess, xform);
316                 /*
317                  * Calculate partial hash values for i_key_pad and o_key_pad.
318                  * Will be used as initialization state for final HMAC.
319                  */
320                 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
321                     partial, SHA1_BLOCK_SIZE);
322                 if (error != 0)
323                         return -1;
324                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
325
326                 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
327                     partial, SHA1_BLOCK_SIZE);
328                 if (error != 0)
329                         return -1;
330                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
331
332                 break;
333         case RTE_CRYPTO_AUTH_SHA256_HMAC:
334                 /*
335                  * Generate authentication key, i_key_pad and o_key_pad.
336                  */
337                 /* Zero memory under key */
338                 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH);
339
340                 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) {
341                         /*
342                          * In case the key is longer than 256 bits
343                          * the algorithm will use SHA256(key) instead.
344                          */
345                         error = sha256_block(NULL, xform->auth.key.data,
346                                 sess->auth.hmac.key, xform->auth.key.length);
347                         if (error != 0)
348                                 return -1;
349                 } else {
350                         /*
351                          * Now copy the given authentication key to the session
352                          * key assuming that the session key is zeroed there is
353                          * no need for additional zero padding if the key is
354                          * shorter than SHA256_AUTH_KEY_LENGTH.
355                          */
356                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
357                                                         xform->auth.key.length);
358                 }
359
360                 /* Prepare HMAC padding: key|pattern */
361                 auth_hmac_pad_prepare(sess, xform);
362                 /*
363                  * Calculate partial hash values for i_key_pad and o_key_pad.
364                  * Will be used as initialization state for final HMAC.
365                  */
366                 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
367                     partial, SHA256_BLOCK_SIZE);
368                 if (error != 0)
369                         return -1;
370                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
371
372                 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
373                     partial, SHA256_BLOCK_SIZE);
374                 if (error != 0)
375                         return -1;
376                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
377
378                 break;
379         default:
380                 break;
381         }
382
383         return 0;
384 }
385
386 static inline int
387 cipher_set_prerequisites(struct armv8_crypto_session *sess,
388                         const struct rte_crypto_sym_xform *xform)
389 {
390         crypto_key_sched_t cipher_key_sched;
391
392         cipher_key_sched = sess->cipher.key_sched;
393         if (likely(cipher_key_sched != NULL)) {
394                 /* Set up cipher session key */
395                 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
396         }
397
398         return 0;
399 }
400
401 static int
402 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
403                 const struct rte_crypto_sym_xform *cipher_xform,
404                 const struct rte_crypto_sym_xform *auth_xform)
405 {
406         enum armv8_crypto_chain_order order;
407         enum armv8_crypto_cipher_operation cop;
408         enum rte_crypto_cipher_algorithm calg;
409         enum rte_crypto_auth_algorithm aalg;
410
411         /* Validate and prepare scratch order of combined operations */
412         switch (sess->chain_order) {
413         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
414         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
415                 order = sess->chain_order;
416                 break;
417         default:
418                 return -EINVAL;
419         }
420         /* Select cipher direction */
421         sess->cipher.direction = cipher_xform->cipher.op;
422         /* Select cipher key */
423         sess->cipher.key.length = cipher_xform->cipher.key.length;
424         /* Set cipher direction */
425         cop = sess->cipher.direction;
426         /* Set cipher algorithm */
427         calg = cipher_xform->cipher.algo;
428
429         /* Select cipher algo */
430         switch (calg) {
431         /* Cover supported cipher algorithms */
432         case RTE_CRYPTO_CIPHER_AES_CBC:
433                 sess->cipher.algo = calg;
434                 /* IV len is always 16 bytes (block size) for AES CBC */
435                 sess->cipher.iv.length = 16;
436                 break;
437         default:
438                 return -EINVAL;
439         }
440         /* Select auth generate/verify */
441         sess->auth.operation = auth_xform->auth.op;
442
443         /* Select auth algo */
444         switch (auth_xform->auth.algo) {
445         /* Cover supported hash algorithms */
446         case RTE_CRYPTO_AUTH_SHA1_HMAC:
447         case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
448                 aalg = auth_xform->auth.algo;
449                 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
450                 break;
451         default:
452                 return -EINVAL;
453         }
454
455         /* Verify supported key lengths and extract proper algorithm */
456         switch (cipher_xform->cipher.key.length << 3) {
457         case 128:
458                 sess->crypto_func =
459                                 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
460                 sess->cipher.key_sched =
461                                 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
462                 break;
463         case 192:
464         case 256:
465                 /* These key lengths are not supported yet */
466         default: /* Fall through */
467                 sess->crypto_func = NULL;
468                 sess->cipher.key_sched = NULL;
469                 return -EINVAL;
470         }
471
472         if (unlikely(sess->crypto_func == NULL)) {
473                 /*
474                  * If we got here that means that there must be a bug
475                  * in the algorithms selection above. Nevertheless keep
476                  * it here to catch bug immediately and avoid NULL pointer
477                  * dereference in OPs processing.
478                  */
479                 ARMV8_CRYPTO_LOG_ERR(
480                         "No appropriate crypto function for given parameters");
481                 return -EINVAL;
482         }
483
484         /* Set up cipher session prerequisites */
485         if (cipher_set_prerequisites(sess, cipher_xform) != 0)
486                 return -EINVAL;
487
488         /* Set up authentication session prerequisites */
489         if (auth_set_prerequisites(sess, auth_xform) != 0)
490                 return -EINVAL;
491
492         return 0;
493 }
494
495 /** Parse crypto xform chain and set private session parameters */
496 int
497 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
498                 const struct rte_crypto_sym_xform *xform)
499 {
500         const struct rte_crypto_sym_xform *cipher_xform = NULL;
501         const struct rte_crypto_sym_xform *auth_xform = NULL;
502         bool is_chained_op;
503         int ret;
504
505         /* Filter out spurious/broken requests */
506         if (xform == NULL)
507                 return -EINVAL;
508
509         sess->chain_order = armv8_crypto_get_chain_order(xform);
510         switch (sess->chain_order) {
511         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
512                 cipher_xform = xform;
513                 auth_xform = xform->next;
514                 is_chained_op = true;
515                 break;
516         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
517                 auth_xform = xform;
518                 cipher_xform = xform->next;
519                 is_chained_op = true;
520                 break;
521         default:
522                 is_chained_op = false;
523                 return -EINVAL;
524         }
525
526         /* Set IV offset */
527         sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
528
529         if (is_chained_op) {
530                 ret = armv8_crypto_set_session_chained_parameters(sess,
531                                                 cipher_xform, auth_xform);
532                 if (unlikely(ret != 0)) {
533                         ARMV8_CRYPTO_LOG_ERR(
534                         "Invalid/unsupported chained (cipher/auth) parameters");
535                         return -EINVAL;
536                 }
537         } else {
538                 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
539                 return -EINVAL;
540         }
541
542         return 0;
543 }
544
545 /** Provide session for operation */
546 static inline struct armv8_crypto_session *
547 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
548 {
549         struct armv8_crypto_session *sess = NULL;
550
551         if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
552                 /* get existing session */
553                 if (likely(op->sym->session != NULL &&
554                                 op->sym->session->dev_type ==
555                                 RTE_CRYPTODEV_ARMV8_PMD)) {
556                         sess = (struct armv8_crypto_session *)
557                                 op->sym->session->_private;
558                 }
559         } else {
560                 /* provide internal session */
561                 void *_sess = NULL;
562
563                 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) {
564                         sess = (struct armv8_crypto_session *)
565                                 ((struct rte_cryptodev_sym_session *)_sess)
566                                 ->_private;
567
568                         if (unlikely(armv8_crypto_set_session_parameters(
569                                         sess, op->sym->xform) != 0)) {
570                                 rte_mempool_put(qp->sess_mp, _sess);
571                                 sess = NULL;
572                         } else
573                                 op->sym->session = _sess;
574                 }
575         }
576
577         if (unlikely(sess == NULL))
578                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
579
580         return sess;
581 }
582
583 /*
584  *------------------------------------------------------------------------------
585  * Process Operations
586  *------------------------------------------------------------------------------
587  */
588
589 /*----------------------------------------------------------------------------*/
590
591 /** Process cipher operation */
592 static inline void
593 process_armv8_chained_op
594                 (struct rte_crypto_op *op, struct armv8_crypto_session *sess,
595                 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
596 {
597         crypto_func_t crypto_func;
598         crypto_arg_t arg;
599         struct rte_mbuf *m_asrc, *m_adst;
600         uint8_t *csrc, *cdst;
601         uint8_t *adst, *asrc;
602         uint64_t clen, alen;
603         int error;
604
605         clen = op->sym->cipher.data.length;
606         alen = op->sym->auth.data.length;
607
608         csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
609                         op->sym->cipher.data.offset);
610         cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
611                         op->sym->cipher.data.offset);
612
613         switch (sess->chain_order) {
614         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
615                 m_asrc = m_adst = mbuf_dst;
616                 break;
617         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
618                 m_asrc = mbuf_src;
619                 m_adst = mbuf_dst;
620                 break;
621         default:
622                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
623                 return;
624         }
625         asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
626                                 op->sym->auth.data.offset);
627
628         switch (sess->auth.mode) {
629         case ARMV8_CRYPTO_AUTH_AS_AUTH:
630                 /* Nothing to do here, just verify correct option */
631                 break;
632         case ARMV8_CRYPTO_AUTH_AS_HMAC:
633                 arg.digest.hmac.key = sess->auth.hmac.key;
634                 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
635                 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
636                 break;
637         default:
638                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
639                 return;
640         }
641
642         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
643                 adst = op->sym->auth.digest.data;
644                 if (adst == NULL) {
645                         adst = rte_pktmbuf_mtod_offset(m_adst,
646                                         uint8_t *,
647                                         op->sym->auth.data.offset +
648                                         op->sym->auth.data.length);
649                 }
650         } else {
651                 adst = (uint8_t *)rte_pktmbuf_append(m_asrc,
652                                 op->sym->auth.digest.length);
653         }
654
655         arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
656                                         sess->cipher.iv.offset);
657         arg.cipher.key = sess->cipher.key.data;
658         /* Acquire combined mode function */
659         crypto_func = sess->crypto_func;
660         ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
661         error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
662         if (error != 0) {
663                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
664                 return;
665         }
666
667         op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
668         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
669                 if (memcmp(adst, op->sym->auth.digest.data,
670                                 op->sym->auth.digest.length) != 0) {
671                         op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
672                 }
673                 /* Trim area used for digest from mbuf. */
674                 rte_pktmbuf_trim(m_asrc,
675                                 op->sym->auth.digest.length);
676         }
677 }
678
679 /** Process crypto operation for mbuf */
680 static inline int
681 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
682                 struct armv8_crypto_session *sess)
683 {
684         struct rte_mbuf *msrc, *mdst;
685
686         msrc = op->sym->m_src;
687         mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
688
689         op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
690
691         switch (sess->chain_order) {
692         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
693         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
694                 process_armv8_chained_op(op, sess, msrc, mdst);
695                 break;
696         default:
697                 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
698                 break;
699         }
700
701         /* Free session if a session-less crypto op */
702         if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
703                 memset(sess, 0, sizeof(struct armv8_crypto_session));
704                 rte_mempool_put(qp->sess_mp, op->sym->session);
705                 op->sym->session = NULL;
706         }
707
708         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
709                 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
710
711         if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
712                 return -1;
713
714         return 0;
715 }
716
717 /*
718  *------------------------------------------------------------------------------
719  * PMD Framework
720  *------------------------------------------------------------------------------
721  */
722
723 /** Enqueue burst */
724 static uint16_t
725 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
726                 uint16_t nb_ops)
727 {
728         struct armv8_crypto_session *sess;
729         struct armv8_crypto_qp *qp = queue_pair;
730         int i, retval;
731
732         for (i = 0; i < nb_ops; i++) {
733                 sess = get_session(qp, ops[i]);
734                 if (unlikely(sess == NULL))
735                         goto enqueue_err;
736
737                 retval = process_op(qp, ops[i], sess);
738                 if (unlikely(retval < 0))
739                         goto enqueue_err;
740         }
741
742         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
743                         NULL);
744         qp->stats.enqueued_count += retval;
745
746         return retval;
747
748 enqueue_err:
749         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
750                         NULL);
751         if (ops[i] != NULL)
752                 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
753
754         qp->stats.enqueue_err_count++;
755         return retval;
756 }
757
758 /** Dequeue burst */
759 static uint16_t
760 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
761                 uint16_t nb_ops)
762 {
763         struct armv8_crypto_qp *qp = queue_pair;
764
765         unsigned int nb_dequeued = 0;
766
767         nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
768                         (void **)ops, nb_ops, NULL);
769         qp->stats.dequeued_count += nb_dequeued;
770
771         return nb_dequeued;
772 }
773
774 /** Create ARMv8 crypto device */
775 static int
776 cryptodev_armv8_crypto_create(const char *name,
777                         struct rte_vdev_device *vdev,
778                         struct rte_crypto_vdev_init_params *init_params)
779 {
780         struct rte_cryptodev *dev;
781         struct armv8_crypto_private *internals;
782
783         /* Check CPU for support for AES instruction set */
784         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
785                 ARMV8_CRYPTO_LOG_ERR(
786                         "AES instructions not supported by CPU");
787                 return -EFAULT;
788         }
789
790         /* Check CPU for support for SHA instruction set */
791         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
792             !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
793                 ARMV8_CRYPTO_LOG_ERR(
794                         "SHA1/SHA2 instructions not supported by CPU");
795                 return -EFAULT;
796         }
797
798         /* Check CPU for support for Advance SIMD instruction set */
799         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
800                 ARMV8_CRYPTO_LOG_ERR(
801                         "Advanced SIMD instructions not supported by CPU");
802                 return -EFAULT;
803         }
804
805         if (init_params->name[0] == '\0')
806                 snprintf(init_params->name, sizeof(init_params->name),
807                                 "%s", name);
808
809         dev = rte_cryptodev_vdev_pmd_init(init_params->name,
810                                 sizeof(struct armv8_crypto_private),
811                                 init_params->socket_id,
812                                 vdev);
813         if (dev == NULL) {
814                 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
815                 goto init_error;
816         }
817
818         dev->dev_type = RTE_CRYPTODEV_ARMV8_PMD;
819         dev->dev_ops = rte_armv8_crypto_pmd_ops;
820
821         /* register rx/tx burst functions for data path */
822         dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
823         dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
824
825         dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
826                         RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
827                         RTE_CRYPTODEV_FF_CPU_NEON |
828                         RTE_CRYPTODEV_FF_CPU_ARM_CE;
829
830         /* Set vector instructions mode supported */
831         internals = dev->data->dev_private;
832
833         internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
834         internals->max_nb_sessions = init_params->max_nb_sessions;
835
836         return 0;
837
838 init_error:
839         ARMV8_CRYPTO_LOG_ERR(
840                 "driver %s: cryptodev_armv8_crypto_create failed",
841                 init_params->name);
842
843         cryptodev_armv8_crypto_uninit(vdev);
844         return -EFAULT;
845 }
846
847 /** Initialise ARMv8 crypto device */
848 static int
849 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
850 {
851         struct rte_crypto_vdev_init_params init_params = {
852                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS,
853                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS,
854                 rte_socket_id(),
855                 {0}
856         };
857         const char *name;
858         const char *input_args;
859
860         name = rte_vdev_device_name(vdev);
861         if (name == NULL)
862                 return -EINVAL;
863         input_args = rte_vdev_device_args(vdev);
864         rte_cryptodev_vdev_parse_init_params(&init_params, input_args);
865
866         RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
867                         init_params.socket_id);
868         if (init_params.name[0] != '\0') {
869                 RTE_LOG(INFO, PMD, "  User defined name = %s\n",
870                         init_params.name);
871         }
872         RTE_LOG(INFO, PMD, "  Max number of queue pairs = %d\n",
873                         init_params.max_nb_queue_pairs);
874         RTE_LOG(INFO, PMD, "  Max number of sessions = %d\n",
875                         init_params.max_nb_sessions);
876
877         return cryptodev_armv8_crypto_create(name, vdev, &init_params);
878 }
879
880 /** Uninitialise ARMv8 crypto device */
881 static int
882 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
883 {
884         const char *name;
885
886         name = rte_vdev_device_name(vdev);
887         if (name == NULL)
888                 return -EINVAL;
889
890         RTE_LOG(INFO, PMD,
891                 "Closing ARMv8 crypto device %s on numa socket %u\n",
892                 name, rte_socket_id());
893
894         return 0;
895 }
896
897 static struct rte_vdev_driver armv8_crypto_drv = {
898         .probe = cryptodev_armv8_crypto_init,
899         .remove = cryptodev_armv8_crypto_uninit
900 };
901
902 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv);
903 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
904 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
905         "max_nb_queue_pairs=<int> "
906         "max_nb_sessions=<int> "
907         "socket_id=<int>");