cryptodev: remove digest length from crypto op
[dpdk.git] / drivers / crypto / armv8 / rte_armv8_pmd.c
1 /*
2  *   BSD LICENSE
3  *
4  *   Copyright (C) Cavium networks Ltd. 2017.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Cavium networks nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <stdbool.h>
34
35 #include <rte_common.h>
36 #include <rte_hexdump.h>
37 #include <rte_cryptodev.h>
38 #include <rte_cryptodev_pmd.h>
39 #include <rte_cryptodev_vdev.h>
40 #include <rte_vdev.h>
41 #include <rte_malloc.h>
42 #include <rte_cpuflags.h>
43
44 #include "armv8_crypto_defs.h"
45
46 #include "rte_armv8_pmd_private.h"
47
48 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
49
50 /**
51  * Pointers to the supported combined mode crypto functions are stored
52  * in the static tables. Each combined (chained) cryptographic operation
53  * can be described by a set of numbers:
54  * - order:     order of operations (cipher, auth) or (auth, cipher)
55  * - direction: encryption or decryption
56  * - calg:      cipher algorithm such as AES_CBC, AES_CTR, etc.
57  * - aalg:      authentication algorithm such as SHA1, SHA256, etc.
58  * - keyl:      cipher key length, for example 128, 192, 256 bits
59  *
60  * In order to quickly acquire each function pointer based on those numbers,
61  * a hierarchy of arrays is maintained. The final level, 3D array is indexed
62  * by the combined mode function parameters only (cipher algorithm,
63  * authentication algorithm and key length).
64  *
65  * This gives 3 memory accesses to obtain a function pointer instead of
66  * traversing the array manually and comparing function parameters on each loop.
67  *
68  *                   +--+CRYPTO_FUNC
69  *            +--+ENC|
70  *      +--+CA|
71  *      |     +--+DEC
72  * ORDER|
73  *      |     +--+ENC
74  *      +--+AC|
75  *            +--+DEC
76  *
77  */
78
79 /**
80  * 3D array type for ARM Combined Mode crypto functions pointers.
81  * CRYPTO_CIPHER_MAX:                   max cipher ID number
82  * CRYPTO_AUTH_MAX:                     max auth ID number
83  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
84  */
85 typedef const crypto_func_t
86 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
87
88 /* Evaluate to key length definition */
89 #define KEYL(keyl)              (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
90
91 /* Local aliases for supported ciphers */
92 #define CIPH_AES_CBC            RTE_CRYPTO_CIPHER_AES_CBC
93 /* Local aliases for supported hashes */
94 #define AUTH_SHA1_HMAC          RTE_CRYPTO_AUTH_SHA1_HMAC
95 #define AUTH_SHA256_HMAC        RTE_CRYPTO_AUTH_SHA256_HMAC
96
97 /**
98  * Arrays containing pointers to particular cryptographic,
99  * combined mode functions.
100  * crypto_op_ca_encrypt:        cipher (encrypt), authenticate
101  * crypto_op_ca_decrypt:        cipher (decrypt), authenticate
102  * crypto_op_ac_encrypt:        authenticate, cipher (encrypt)
103  * crypto_op_ac_decrypt:        authenticate, cipher (decrypt)
104  */
105 static const crypto_func_tbl_t
106 crypto_op_ca_encrypt = {
107         /* [cipher alg][auth alg][key length] = crypto_function, */
108         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
109         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
110 };
111
112 static const crypto_func_tbl_t
113 crypto_op_ca_decrypt = {
114         NULL
115 };
116
117 static const crypto_func_tbl_t
118 crypto_op_ac_encrypt = {
119         NULL
120 };
121
122 static const crypto_func_tbl_t
123 crypto_op_ac_decrypt = {
124         /* [cipher alg][auth alg][key length] = crypto_function, */
125         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
126         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
127 };
128
129 /**
130  * Arrays containing pointers to particular cryptographic function sets,
131  * covering given cipher operation directions (encrypt, decrypt)
132  * for each order of cipher and authentication pairs.
133  */
134 static const crypto_func_tbl_t *
135 crypto_cipher_auth[] = {
136         &crypto_op_ca_encrypt,
137         &crypto_op_ca_decrypt,
138         NULL
139 };
140
141 static const crypto_func_tbl_t *
142 crypto_auth_cipher[] = {
143         &crypto_op_ac_encrypt,
144         &crypto_op_ac_decrypt,
145         NULL
146 };
147
148 /**
149  * Top level array containing pointers to particular cryptographic
150  * function sets, covering given order of chained operations.
151  * crypto_cipher_auth:  cipher first, authenticate after
152  * crypto_auth_cipher:  authenticate first, cipher after
153  */
154 static const crypto_func_tbl_t **
155 crypto_chain_order[] = {
156         crypto_cipher_auth,
157         crypto_auth_cipher,
158         NULL
159 };
160
161 /**
162  * Extract particular combined mode crypto function from the 3D array.
163  */
164 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl)                   \
165 ({                                                                      \
166         crypto_func_tbl_t *func_tbl =                                   \
167                                 (crypto_chain_order[(order)])[(cop)];   \
168                                                                         \
169         ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]);              \
170 })
171
172 /*----------------------------------------------------------------------------*/
173
174 /**
175  * 2D array type for ARM key schedule functions pointers.
176  * CRYPTO_CIPHER_MAX:                   max cipher ID number
177  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
178  */
179 typedef const crypto_key_sched_t
180 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
181
182 static const crypto_key_sched_tbl_t
183 crypto_key_sched_encrypt = {
184         /* [cipher alg][key length] = key_expand_func, */
185         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
186 };
187
188 static const crypto_key_sched_tbl_t
189 crypto_key_sched_decrypt = {
190         /* [cipher alg][key length] = key_expand_func, */
191         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
192 };
193
194 /**
195  * Top level array containing pointers to particular key generation
196  * function sets, covering given operation direction.
197  * crypto_key_sched_encrypt:    keys for encryption
198  * crypto_key_sched_decrypt:    keys for decryption
199  */
200 static const crypto_key_sched_tbl_t *
201 crypto_key_sched_dir[] = {
202         &crypto_key_sched_encrypt,
203         &crypto_key_sched_decrypt,
204         NULL
205 };
206
207 /**
208  * Extract particular combined mode crypto function from the 3D array.
209  */
210 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl)                           \
211 ({                                                                      \
212         crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)];   \
213                                                                         \
214         ((*ks_tbl)[(calg)][KEYL(keyl)]);                                \
215 })
216
217 /*----------------------------------------------------------------------------*/
218
219 /*
220  *------------------------------------------------------------------------------
221  * Session Prepare
222  *------------------------------------------------------------------------------
223  */
224
225 /** Get xform chain order */
226 static enum armv8_crypto_chain_order
227 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
228 {
229
230         /*
231          * This driver currently covers only chained operations.
232          * Ignore only cipher or only authentication operations
233          * or chains longer than 2 xform structures.
234          */
235         if (xform->next == NULL || xform->next->next != NULL)
236                 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
237
238         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
239                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
240                         return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
241         }
242
243         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
244                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
245                         return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
246         }
247
248         return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
249 }
250
251 static inline void
252 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
253                                 const struct rte_crypto_sym_xform *xform)
254 {
255         size_t i;
256
257         /* Generate i_key_pad and o_key_pad */
258         memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
259         rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
260                                                         xform->auth.key.length);
261         memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
262         rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
263                                                         xform->auth.key.length);
264         /*
265          * XOR key with IPAD/OPAD values to obtain i_key_pad
266          * and o_key_pad.
267          * Byte-by-byte operation may seem to be the less efficient
268          * here but in fact it's the opposite.
269          * The result ASM code is likely operate on NEON registers
270          * (load auth key to Qx, load IPAD/OPAD to multiple
271          * elements of Qy, eor 128 bits at once).
272          */
273         for (i = 0; i < SHA_BLOCK_MAX; i++) {
274                 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
275                 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
276         }
277 }
278
279 static inline int
280 auth_set_prerequisites(struct armv8_crypto_session *sess,
281                         const struct rte_crypto_sym_xform *xform)
282 {
283         uint8_t partial[64] = { 0 };
284         int error;
285
286         switch (xform->auth.algo) {
287         case RTE_CRYPTO_AUTH_SHA1_HMAC:
288                 /*
289                  * Generate authentication key, i_key_pad and o_key_pad.
290                  */
291                 /* Zero memory under key */
292                 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH);
293
294                 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) {
295                         /*
296                          * In case the key is longer than 160 bits
297                          * the algorithm will use SHA1(key) instead.
298                          */
299                         error = sha1_block(NULL, xform->auth.key.data,
300                                 sess->auth.hmac.key, xform->auth.key.length);
301                         if (error != 0)
302                                 return -1;
303                 } else {
304                         /*
305                          * Now copy the given authentication key to the session
306                          * key assuming that the session key is zeroed there is
307                          * no need for additional zero padding if the key is
308                          * shorter than SHA1_AUTH_KEY_LENGTH.
309                          */
310                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
311                                                         xform->auth.key.length);
312                 }
313
314                 /* Prepare HMAC padding: key|pattern */
315                 auth_hmac_pad_prepare(sess, xform);
316                 /*
317                  * Calculate partial hash values for i_key_pad and o_key_pad.
318                  * Will be used as initialization state for final HMAC.
319                  */
320                 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
321                     partial, SHA1_BLOCK_SIZE);
322                 if (error != 0)
323                         return -1;
324                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
325
326                 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
327                     partial, SHA1_BLOCK_SIZE);
328                 if (error != 0)
329                         return -1;
330                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
331
332                 break;
333         case RTE_CRYPTO_AUTH_SHA256_HMAC:
334                 /*
335                  * Generate authentication key, i_key_pad and o_key_pad.
336                  */
337                 /* Zero memory under key */
338                 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH);
339
340                 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) {
341                         /*
342                          * In case the key is longer than 256 bits
343                          * the algorithm will use SHA256(key) instead.
344                          */
345                         error = sha256_block(NULL, xform->auth.key.data,
346                                 sess->auth.hmac.key, xform->auth.key.length);
347                         if (error != 0)
348                                 return -1;
349                 } else {
350                         /*
351                          * Now copy the given authentication key to the session
352                          * key assuming that the session key is zeroed there is
353                          * no need for additional zero padding if the key is
354                          * shorter than SHA256_AUTH_KEY_LENGTH.
355                          */
356                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
357                                                         xform->auth.key.length);
358                 }
359
360                 /* Prepare HMAC padding: key|pattern */
361                 auth_hmac_pad_prepare(sess, xform);
362                 /*
363                  * Calculate partial hash values for i_key_pad and o_key_pad.
364                  * Will be used as initialization state for final HMAC.
365                  */
366                 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
367                     partial, SHA256_BLOCK_SIZE);
368                 if (error != 0)
369                         return -1;
370                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
371
372                 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
373                     partial, SHA256_BLOCK_SIZE);
374                 if (error != 0)
375                         return -1;
376                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
377
378                 break;
379         default:
380                 break;
381         }
382
383         return 0;
384 }
385
386 static inline int
387 cipher_set_prerequisites(struct armv8_crypto_session *sess,
388                         const struct rte_crypto_sym_xform *xform)
389 {
390         crypto_key_sched_t cipher_key_sched;
391
392         cipher_key_sched = sess->cipher.key_sched;
393         if (likely(cipher_key_sched != NULL)) {
394                 /* Set up cipher session key */
395                 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
396         }
397
398         return 0;
399 }
400
401 static int
402 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
403                 const struct rte_crypto_sym_xform *cipher_xform,
404                 const struct rte_crypto_sym_xform *auth_xform)
405 {
406         enum armv8_crypto_chain_order order;
407         enum armv8_crypto_cipher_operation cop;
408         enum rte_crypto_cipher_algorithm calg;
409         enum rte_crypto_auth_algorithm aalg;
410
411         /* Validate and prepare scratch order of combined operations */
412         switch (sess->chain_order) {
413         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
414         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
415                 order = sess->chain_order;
416                 break;
417         default:
418                 return -EINVAL;
419         }
420         /* Select cipher direction */
421         sess->cipher.direction = cipher_xform->cipher.op;
422         /* Select cipher key */
423         sess->cipher.key.length = cipher_xform->cipher.key.length;
424         /* Set cipher direction */
425         cop = sess->cipher.direction;
426         /* Set cipher algorithm */
427         calg = cipher_xform->cipher.algo;
428
429         /* Select cipher algo */
430         switch (calg) {
431         /* Cover supported cipher algorithms */
432         case RTE_CRYPTO_CIPHER_AES_CBC:
433                 sess->cipher.algo = calg;
434                 /* IV len is always 16 bytes (block size) for AES CBC */
435                 sess->cipher.iv.length = 16;
436                 break;
437         default:
438                 return -EINVAL;
439         }
440         /* Select auth generate/verify */
441         sess->auth.operation = auth_xform->auth.op;
442
443         /* Select auth algo */
444         switch (auth_xform->auth.algo) {
445         /* Cover supported hash algorithms */
446         case RTE_CRYPTO_AUTH_SHA1_HMAC:
447         case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
448                 aalg = auth_xform->auth.algo;
449                 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
450                 break;
451         default:
452                 return -EINVAL;
453         }
454
455         /* Set the digest length */
456         sess->auth.digest_length = auth_xform->auth.digest_length;
457
458         /* Verify supported key lengths and extract proper algorithm */
459         switch (cipher_xform->cipher.key.length << 3) {
460         case 128:
461                 sess->crypto_func =
462                                 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
463                 sess->cipher.key_sched =
464                                 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
465                 break;
466         case 192:
467         case 256:
468                 /* These key lengths are not supported yet */
469         default: /* Fall through */
470                 sess->crypto_func = NULL;
471                 sess->cipher.key_sched = NULL;
472                 return -EINVAL;
473         }
474
475         if (unlikely(sess->crypto_func == NULL)) {
476                 /*
477                  * If we got here that means that there must be a bug
478                  * in the algorithms selection above. Nevertheless keep
479                  * it here to catch bug immediately and avoid NULL pointer
480                  * dereference in OPs processing.
481                  */
482                 ARMV8_CRYPTO_LOG_ERR(
483                         "No appropriate crypto function for given parameters");
484                 return -EINVAL;
485         }
486
487         /* Set up cipher session prerequisites */
488         if (cipher_set_prerequisites(sess, cipher_xform) != 0)
489                 return -EINVAL;
490
491         /* Set up authentication session prerequisites */
492         if (auth_set_prerequisites(sess, auth_xform) != 0)
493                 return -EINVAL;
494
495         return 0;
496 }
497
498 /** Parse crypto xform chain and set private session parameters */
499 int
500 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
501                 const struct rte_crypto_sym_xform *xform)
502 {
503         const struct rte_crypto_sym_xform *cipher_xform = NULL;
504         const struct rte_crypto_sym_xform *auth_xform = NULL;
505         bool is_chained_op;
506         int ret;
507
508         /* Filter out spurious/broken requests */
509         if (xform == NULL)
510                 return -EINVAL;
511
512         sess->chain_order = armv8_crypto_get_chain_order(xform);
513         switch (sess->chain_order) {
514         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
515                 cipher_xform = xform;
516                 auth_xform = xform->next;
517                 is_chained_op = true;
518                 break;
519         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
520                 auth_xform = xform;
521                 cipher_xform = xform->next;
522                 is_chained_op = true;
523                 break;
524         default:
525                 is_chained_op = false;
526                 return -EINVAL;
527         }
528
529         /* Set IV offset */
530         sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
531
532         if (is_chained_op) {
533                 ret = armv8_crypto_set_session_chained_parameters(sess,
534                                                 cipher_xform, auth_xform);
535                 if (unlikely(ret != 0)) {
536                         ARMV8_CRYPTO_LOG_ERR(
537                         "Invalid/unsupported chained (cipher/auth) parameters");
538                         return -EINVAL;
539                 }
540         } else {
541                 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
542                 return -EINVAL;
543         }
544
545         return 0;
546 }
547
548 /** Provide session for operation */
549 static inline struct armv8_crypto_session *
550 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
551 {
552         struct armv8_crypto_session *sess = NULL;
553
554         if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
555                 /* get existing session */
556                 if (likely(op->sym->session != NULL &&
557                                 op->sym->session->dev_type ==
558                                 RTE_CRYPTODEV_ARMV8_PMD)) {
559                         sess = (struct armv8_crypto_session *)
560                                 op->sym->session->_private;
561                 }
562         } else {
563                 /* provide internal session */
564                 void *_sess = NULL;
565
566                 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) {
567                         sess = (struct armv8_crypto_session *)
568                                 ((struct rte_cryptodev_sym_session *)_sess)
569                                 ->_private;
570
571                         if (unlikely(armv8_crypto_set_session_parameters(
572                                         sess, op->sym->xform) != 0)) {
573                                 rte_mempool_put(qp->sess_mp, _sess);
574                                 sess = NULL;
575                         } else
576                                 op->sym->session = _sess;
577                 }
578         }
579
580         if (unlikely(sess == NULL))
581                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
582
583         return sess;
584 }
585
586 /*
587  *------------------------------------------------------------------------------
588  * Process Operations
589  *------------------------------------------------------------------------------
590  */
591
592 /*----------------------------------------------------------------------------*/
593
594 /** Process cipher operation */
595 static inline void
596 process_armv8_chained_op
597                 (struct rte_crypto_op *op, struct armv8_crypto_session *sess,
598                 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
599 {
600         crypto_func_t crypto_func;
601         crypto_arg_t arg;
602         struct rte_mbuf *m_asrc, *m_adst;
603         uint8_t *csrc, *cdst;
604         uint8_t *adst, *asrc;
605         uint64_t clen, alen;
606         int error;
607
608         clen = op->sym->cipher.data.length;
609         alen = op->sym->auth.data.length;
610
611         csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
612                         op->sym->cipher.data.offset);
613         cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
614                         op->sym->cipher.data.offset);
615
616         switch (sess->chain_order) {
617         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
618                 m_asrc = m_adst = mbuf_dst;
619                 break;
620         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
621                 m_asrc = mbuf_src;
622                 m_adst = mbuf_dst;
623                 break;
624         default:
625                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
626                 return;
627         }
628         asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
629                                 op->sym->auth.data.offset);
630
631         switch (sess->auth.mode) {
632         case ARMV8_CRYPTO_AUTH_AS_AUTH:
633                 /* Nothing to do here, just verify correct option */
634                 break;
635         case ARMV8_CRYPTO_AUTH_AS_HMAC:
636                 arg.digest.hmac.key = sess->auth.hmac.key;
637                 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
638                 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
639                 break;
640         default:
641                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
642                 return;
643         }
644
645         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
646                 adst = op->sym->auth.digest.data;
647                 if (adst == NULL) {
648                         adst = rte_pktmbuf_mtod_offset(m_adst,
649                                         uint8_t *,
650                                         op->sym->auth.data.offset +
651                                         op->sym->auth.data.length);
652                 }
653         } else {
654                 adst = (uint8_t *)rte_pktmbuf_append(m_asrc,
655                                 sess->auth.digest_length);
656         }
657
658         arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
659                                         sess->cipher.iv.offset);
660         arg.cipher.key = sess->cipher.key.data;
661         /* Acquire combined mode function */
662         crypto_func = sess->crypto_func;
663         ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
664         error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
665         if (error != 0) {
666                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
667                 return;
668         }
669
670         op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
671         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
672                 if (memcmp(adst, op->sym->auth.digest.data,
673                                 sess->auth.digest_length) != 0) {
674                         op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
675                 }
676                 /* Trim area used for digest from mbuf. */
677                 rte_pktmbuf_trim(m_asrc,
678                                 sess->auth.digest_length);
679         }
680 }
681
682 /** Process crypto operation for mbuf */
683 static inline int
684 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
685                 struct armv8_crypto_session *sess)
686 {
687         struct rte_mbuf *msrc, *mdst;
688
689         msrc = op->sym->m_src;
690         mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
691
692         op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
693
694         switch (sess->chain_order) {
695         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
696         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
697                 process_armv8_chained_op(op, sess, msrc, mdst);
698                 break;
699         default:
700                 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
701                 break;
702         }
703
704         /* Free session if a session-less crypto op */
705         if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
706                 memset(sess, 0, sizeof(struct armv8_crypto_session));
707                 rte_mempool_put(qp->sess_mp, op->sym->session);
708                 op->sym->session = NULL;
709         }
710
711         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
712                 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
713
714         if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
715                 return -1;
716
717         return 0;
718 }
719
720 /*
721  *------------------------------------------------------------------------------
722  * PMD Framework
723  *------------------------------------------------------------------------------
724  */
725
726 /** Enqueue burst */
727 static uint16_t
728 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
729                 uint16_t nb_ops)
730 {
731         struct armv8_crypto_session *sess;
732         struct armv8_crypto_qp *qp = queue_pair;
733         int i, retval;
734
735         for (i = 0; i < nb_ops; i++) {
736                 sess = get_session(qp, ops[i]);
737                 if (unlikely(sess == NULL))
738                         goto enqueue_err;
739
740                 retval = process_op(qp, ops[i], sess);
741                 if (unlikely(retval < 0))
742                         goto enqueue_err;
743         }
744
745         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
746                         NULL);
747         qp->stats.enqueued_count += retval;
748
749         return retval;
750
751 enqueue_err:
752         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
753                         NULL);
754         if (ops[i] != NULL)
755                 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
756
757         qp->stats.enqueue_err_count++;
758         return retval;
759 }
760
761 /** Dequeue burst */
762 static uint16_t
763 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
764                 uint16_t nb_ops)
765 {
766         struct armv8_crypto_qp *qp = queue_pair;
767
768         unsigned int nb_dequeued = 0;
769
770         nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
771                         (void **)ops, nb_ops, NULL);
772         qp->stats.dequeued_count += nb_dequeued;
773
774         return nb_dequeued;
775 }
776
777 /** Create ARMv8 crypto device */
778 static int
779 cryptodev_armv8_crypto_create(const char *name,
780                         struct rte_vdev_device *vdev,
781                         struct rte_crypto_vdev_init_params *init_params)
782 {
783         struct rte_cryptodev *dev;
784         struct armv8_crypto_private *internals;
785
786         /* Check CPU for support for AES instruction set */
787         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
788                 ARMV8_CRYPTO_LOG_ERR(
789                         "AES instructions not supported by CPU");
790                 return -EFAULT;
791         }
792
793         /* Check CPU for support for SHA instruction set */
794         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
795             !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
796                 ARMV8_CRYPTO_LOG_ERR(
797                         "SHA1/SHA2 instructions not supported by CPU");
798                 return -EFAULT;
799         }
800
801         /* Check CPU for support for Advance SIMD instruction set */
802         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
803                 ARMV8_CRYPTO_LOG_ERR(
804                         "Advanced SIMD instructions not supported by CPU");
805                 return -EFAULT;
806         }
807
808         if (init_params->name[0] == '\0')
809                 snprintf(init_params->name, sizeof(init_params->name),
810                                 "%s", name);
811
812         dev = rte_cryptodev_vdev_pmd_init(init_params->name,
813                                 sizeof(struct armv8_crypto_private),
814                                 init_params->socket_id,
815                                 vdev);
816         if (dev == NULL) {
817                 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
818                 goto init_error;
819         }
820
821         dev->dev_type = RTE_CRYPTODEV_ARMV8_PMD;
822         dev->dev_ops = rte_armv8_crypto_pmd_ops;
823
824         /* register rx/tx burst functions for data path */
825         dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
826         dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
827
828         dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
829                         RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
830                         RTE_CRYPTODEV_FF_CPU_NEON |
831                         RTE_CRYPTODEV_FF_CPU_ARM_CE;
832
833         /* Set vector instructions mode supported */
834         internals = dev->data->dev_private;
835
836         internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
837         internals->max_nb_sessions = init_params->max_nb_sessions;
838
839         return 0;
840
841 init_error:
842         ARMV8_CRYPTO_LOG_ERR(
843                 "driver %s: cryptodev_armv8_crypto_create failed",
844                 init_params->name);
845
846         cryptodev_armv8_crypto_uninit(vdev);
847         return -EFAULT;
848 }
849
850 /** Initialise ARMv8 crypto device */
851 static int
852 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
853 {
854         struct rte_crypto_vdev_init_params init_params = {
855                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS,
856                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS,
857                 rte_socket_id(),
858                 {0}
859         };
860         const char *name;
861         const char *input_args;
862
863         name = rte_vdev_device_name(vdev);
864         if (name == NULL)
865                 return -EINVAL;
866         input_args = rte_vdev_device_args(vdev);
867         rte_cryptodev_vdev_parse_init_params(&init_params, input_args);
868
869         RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
870                         init_params.socket_id);
871         if (init_params.name[0] != '\0') {
872                 RTE_LOG(INFO, PMD, "  User defined name = %s\n",
873                         init_params.name);
874         }
875         RTE_LOG(INFO, PMD, "  Max number of queue pairs = %d\n",
876                         init_params.max_nb_queue_pairs);
877         RTE_LOG(INFO, PMD, "  Max number of sessions = %d\n",
878                         init_params.max_nb_sessions);
879
880         return cryptodev_armv8_crypto_create(name, vdev, &init_params);
881 }
882
883 /** Uninitialise ARMv8 crypto device */
884 static int
885 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
886 {
887         const char *name;
888
889         name = rte_vdev_device_name(vdev);
890         if (name == NULL)
891                 return -EINVAL;
892
893         RTE_LOG(INFO, PMD,
894                 "Closing ARMv8 crypto device %s on numa socket %u\n",
895                 name, rte_socket_id());
896
897         return 0;
898 }
899
900 static struct rte_vdev_driver armv8_crypto_drv = {
901         .probe = cryptodev_armv8_crypto_init,
902         .remove = cryptodev_armv8_crypto_uninit
903 };
904
905 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv);
906 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
907 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
908         "max_nb_queue_pairs=<int> "
909         "max_nb_sessions=<int> "
910         "socket_id=<int>");