cryptodev: remove crypto device type enumeration
[dpdk.git] / drivers / crypto / armv8 / rte_armv8_pmd.c
1 /*
2  *   BSD LICENSE
3  *
4  *   Copyright (C) Cavium networks Ltd. 2017.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Cavium networks nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <stdbool.h>
34
35 #include <rte_common.h>
36 #include <rte_hexdump.h>
37 #include <rte_cryptodev.h>
38 #include <rte_cryptodev_pmd.h>
39 #include <rte_cryptodev_vdev.h>
40 #include <rte_vdev.h>
41 #include <rte_malloc.h>
42 #include <rte_cpuflags.h>
43
44 #include "armv8_crypto_defs.h"
45
46 #include "rte_armv8_pmd_private.h"
47
48 static uint8_t cryptodev_driver_id;
49
50 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
51
52 /**
53  * Pointers to the supported combined mode crypto functions are stored
54  * in the static tables. Each combined (chained) cryptographic operation
55  * can be described by a set of numbers:
56  * - order:     order of operations (cipher, auth) or (auth, cipher)
57  * - direction: encryption or decryption
58  * - calg:      cipher algorithm such as AES_CBC, AES_CTR, etc.
59  * - aalg:      authentication algorithm such as SHA1, SHA256, etc.
60  * - keyl:      cipher key length, for example 128, 192, 256 bits
61  *
62  * In order to quickly acquire each function pointer based on those numbers,
63  * a hierarchy of arrays is maintained. The final level, 3D array is indexed
64  * by the combined mode function parameters only (cipher algorithm,
65  * authentication algorithm and key length).
66  *
67  * This gives 3 memory accesses to obtain a function pointer instead of
68  * traversing the array manually and comparing function parameters on each loop.
69  *
70  *                   +--+CRYPTO_FUNC
71  *            +--+ENC|
72  *      +--+CA|
73  *      |     +--+DEC
74  * ORDER|
75  *      |     +--+ENC
76  *      +--+AC|
77  *            +--+DEC
78  *
79  */
80
81 /**
82  * 3D array type for ARM Combined Mode crypto functions pointers.
83  * CRYPTO_CIPHER_MAX:                   max cipher ID number
84  * CRYPTO_AUTH_MAX:                     max auth ID number
85  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
86  */
87 typedef const crypto_func_t
88 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
89
90 /* Evaluate to key length definition */
91 #define KEYL(keyl)              (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
92
93 /* Local aliases for supported ciphers */
94 #define CIPH_AES_CBC            RTE_CRYPTO_CIPHER_AES_CBC
95 /* Local aliases for supported hashes */
96 #define AUTH_SHA1_HMAC          RTE_CRYPTO_AUTH_SHA1_HMAC
97 #define AUTH_SHA256_HMAC        RTE_CRYPTO_AUTH_SHA256_HMAC
98
99 /**
100  * Arrays containing pointers to particular cryptographic,
101  * combined mode functions.
102  * crypto_op_ca_encrypt:        cipher (encrypt), authenticate
103  * crypto_op_ca_decrypt:        cipher (decrypt), authenticate
104  * crypto_op_ac_encrypt:        authenticate, cipher (encrypt)
105  * crypto_op_ac_decrypt:        authenticate, cipher (decrypt)
106  */
107 static const crypto_func_tbl_t
108 crypto_op_ca_encrypt = {
109         /* [cipher alg][auth alg][key length] = crypto_function, */
110         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
111         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
112 };
113
114 static const crypto_func_tbl_t
115 crypto_op_ca_decrypt = {
116         NULL
117 };
118
119 static const crypto_func_tbl_t
120 crypto_op_ac_encrypt = {
121         NULL
122 };
123
124 static const crypto_func_tbl_t
125 crypto_op_ac_decrypt = {
126         /* [cipher alg][auth alg][key length] = crypto_function, */
127         [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
128         [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
129 };
130
131 /**
132  * Arrays containing pointers to particular cryptographic function sets,
133  * covering given cipher operation directions (encrypt, decrypt)
134  * for each order of cipher and authentication pairs.
135  */
136 static const crypto_func_tbl_t *
137 crypto_cipher_auth[] = {
138         &crypto_op_ca_encrypt,
139         &crypto_op_ca_decrypt,
140         NULL
141 };
142
143 static const crypto_func_tbl_t *
144 crypto_auth_cipher[] = {
145         &crypto_op_ac_encrypt,
146         &crypto_op_ac_decrypt,
147         NULL
148 };
149
150 /**
151  * Top level array containing pointers to particular cryptographic
152  * function sets, covering given order of chained operations.
153  * crypto_cipher_auth:  cipher first, authenticate after
154  * crypto_auth_cipher:  authenticate first, cipher after
155  */
156 static const crypto_func_tbl_t **
157 crypto_chain_order[] = {
158         crypto_cipher_auth,
159         crypto_auth_cipher,
160         NULL
161 };
162
163 /**
164  * Extract particular combined mode crypto function from the 3D array.
165  */
166 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl)                   \
167 ({                                                                      \
168         crypto_func_tbl_t *func_tbl =                                   \
169                                 (crypto_chain_order[(order)])[(cop)];   \
170                                                                         \
171         ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]);              \
172 })
173
174 /*----------------------------------------------------------------------------*/
175
176 /**
177  * 2D array type for ARM key schedule functions pointers.
178  * CRYPTO_CIPHER_MAX:                   max cipher ID number
179  * CRYPTO_CIPHER_KEYLEN_MAX:            max key length ID number
180  */
181 typedef const crypto_key_sched_t
182 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
183
184 static const crypto_key_sched_tbl_t
185 crypto_key_sched_encrypt = {
186         /* [cipher alg][key length] = key_expand_func, */
187         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
188 };
189
190 static const crypto_key_sched_tbl_t
191 crypto_key_sched_decrypt = {
192         /* [cipher alg][key length] = key_expand_func, */
193         [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
194 };
195
196 /**
197  * Top level array containing pointers to particular key generation
198  * function sets, covering given operation direction.
199  * crypto_key_sched_encrypt:    keys for encryption
200  * crypto_key_sched_decrypt:    keys for decryption
201  */
202 static const crypto_key_sched_tbl_t *
203 crypto_key_sched_dir[] = {
204         &crypto_key_sched_encrypt,
205         &crypto_key_sched_decrypt,
206         NULL
207 };
208
209 /**
210  * Extract particular combined mode crypto function from the 3D array.
211  */
212 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl)                           \
213 ({                                                                      \
214         crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)];   \
215                                                                         \
216         ((*ks_tbl)[(calg)][KEYL(keyl)]);                                \
217 })
218
219 /*----------------------------------------------------------------------------*/
220
221 /*
222  *------------------------------------------------------------------------------
223  * Session Prepare
224  *------------------------------------------------------------------------------
225  */
226
227 /** Get xform chain order */
228 static enum armv8_crypto_chain_order
229 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
230 {
231
232         /*
233          * This driver currently covers only chained operations.
234          * Ignore only cipher or only authentication operations
235          * or chains longer than 2 xform structures.
236          */
237         if (xform->next == NULL || xform->next->next != NULL)
238                 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
239
240         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
241                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
242                         return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
243         }
244
245         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
246                 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
247                         return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
248         }
249
250         return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
251 }
252
253 static inline void
254 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
255                                 const struct rte_crypto_sym_xform *xform)
256 {
257         size_t i;
258
259         /* Generate i_key_pad and o_key_pad */
260         memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
261         rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
262                                                         xform->auth.key.length);
263         memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
264         rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
265                                                         xform->auth.key.length);
266         /*
267          * XOR key with IPAD/OPAD values to obtain i_key_pad
268          * and o_key_pad.
269          * Byte-by-byte operation may seem to be the less efficient
270          * here but in fact it's the opposite.
271          * The result ASM code is likely operate on NEON registers
272          * (load auth key to Qx, load IPAD/OPAD to multiple
273          * elements of Qy, eor 128 bits at once).
274          */
275         for (i = 0; i < SHA_BLOCK_MAX; i++) {
276                 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
277                 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
278         }
279 }
280
281 static inline int
282 auth_set_prerequisites(struct armv8_crypto_session *sess,
283                         const struct rte_crypto_sym_xform *xform)
284 {
285         uint8_t partial[64] = { 0 };
286         int error;
287
288         switch (xform->auth.algo) {
289         case RTE_CRYPTO_AUTH_SHA1_HMAC:
290                 /*
291                  * Generate authentication key, i_key_pad and o_key_pad.
292                  */
293                 /* Zero memory under key */
294                 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH);
295
296                 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) {
297                         /*
298                          * In case the key is longer than 160 bits
299                          * the algorithm will use SHA1(key) instead.
300                          */
301                         error = sha1_block(NULL, xform->auth.key.data,
302                                 sess->auth.hmac.key, xform->auth.key.length);
303                         if (error != 0)
304                                 return -1;
305                 } else {
306                         /*
307                          * Now copy the given authentication key to the session
308                          * key assuming that the session key is zeroed there is
309                          * no need for additional zero padding if the key is
310                          * shorter than SHA1_AUTH_KEY_LENGTH.
311                          */
312                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
313                                                         xform->auth.key.length);
314                 }
315
316                 /* Prepare HMAC padding: key|pattern */
317                 auth_hmac_pad_prepare(sess, xform);
318                 /*
319                  * Calculate partial hash values for i_key_pad and o_key_pad.
320                  * Will be used as initialization state for final HMAC.
321                  */
322                 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
323                     partial, SHA1_BLOCK_SIZE);
324                 if (error != 0)
325                         return -1;
326                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
327
328                 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
329                     partial, SHA1_BLOCK_SIZE);
330                 if (error != 0)
331                         return -1;
332                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
333
334                 break;
335         case RTE_CRYPTO_AUTH_SHA256_HMAC:
336                 /*
337                  * Generate authentication key, i_key_pad and o_key_pad.
338                  */
339                 /* Zero memory under key */
340                 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH);
341
342                 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) {
343                         /*
344                          * In case the key is longer than 256 bits
345                          * the algorithm will use SHA256(key) instead.
346                          */
347                         error = sha256_block(NULL, xform->auth.key.data,
348                                 sess->auth.hmac.key, xform->auth.key.length);
349                         if (error != 0)
350                                 return -1;
351                 } else {
352                         /*
353                          * Now copy the given authentication key to the session
354                          * key assuming that the session key is zeroed there is
355                          * no need for additional zero padding if the key is
356                          * shorter than SHA256_AUTH_KEY_LENGTH.
357                          */
358                         rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
359                                                         xform->auth.key.length);
360                 }
361
362                 /* Prepare HMAC padding: key|pattern */
363                 auth_hmac_pad_prepare(sess, xform);
364                 /*
365                  * Calculate partial hash values for i_key_pad and o_key_pad.
366                  * Will be used as initialization state for final HMAC.
367                  */
368                 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
369                     partial, SHA256_BLOCK_SIZE);
370                 if (error != 0)
371                         return -1;
372                 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
373
374                 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
375                     partial, SHA256_BLOCK_SIZE);
376                 if (error != 0)
377                         return -1;
378                 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
379
380                 break;
381         default:
382                 break;
383         }
384
385         return 0;
386 }
387
388 static inline int
389 cipher_set_prerequisites(struct armv8_crypto_session *sess,
390                         const struct rte_crypto_sym_xform *xform)
391 {
392         crypto_key_sched_t cipher_key_sched;
393
394         cipher_key_sched = sess->cipher.key_sched;
395         if (likely(cipher_key_sched != NULL)) {
396                 /* Set up cipher session key */
397                 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
398         }
399
400         return 0;
401 }
402
403 static int
404 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
405                 const struct rte_crypto_sym_xform *cipher_xform,
406                 const struct rte_crypto_sym_xform *auth_xform)
407 {
408         enum armv8_crypto_chain_order order;
409         enum armv8_crypto_cipher_operation cop;
410         enum rte_crypto_cipher_algorithm calg;
411         enum rte_crypto_auth_algorithm aalg;
412
413         /* Validate and prepare scratch order of combined operations */
414         switch (sess->chain_order) {
415         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
416         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
417                 order = sess->chain_order;
418                 break;
419         default:
420                 return -EINVAL;
421         }
422         /* Select cipher direction */
423         sess->cipher.direction = cipher_xform->cipher.op;
424         /* Select cipher key */
425         sess->cipher.key.length = cipher_xform->cipher.key.length;
426         /* Set cipher direction */
427         cop = sess->cipher.direction;
428         /* Set cipher algorithm */
429         calg = cipher_xform->cipher.algo;
430
431         /* Select cipher algo */
432         switch (calg) {
433         /* Cover supported cipher algorithms */
434         case RTE_CRYPTO_CIPHER_AES_CBC:
435                 sess->cipher.algo = calg;
436                 /* IV len is always 16 bytes (block size) for AES CBC */
437                 sess->cipher.iv.length = 16;
438                 break;
439         default:
440                 return -EINVAL;
441         }
442         /* Select auth generate/verify */
443         sess->auth.operation = auth_xform->auth.op;
444
445         /* Select auth algo */
446         switch (auth_xform->auth.algo) {
447         /* Cover supported hash algorithms */
448         case RTE_CRYPTO_AUTH_SHA1_HMAC:
449         case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
450                 aalg = auth_xform->auth.algo;
451                 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
452                 break;
453         default:
454                 return -EINVAL;
455         }
456
457         /* Set the digest length */
458         sess->auth.digest_length = auth_xform->auth.digest_length;
459
460         /* Verify supported key lengths and extract proper algorithm */
461         switch (cipher_xform->cipher.key.length << 3) {
462         case 128:
463                 sess->crypto_func =
464                                 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
465                 sess->cipher.key_sched =
466                                 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
467                 break;
468         case 192:
469         case 256:
470                 /* These key lengths are not supported yet */
471         default: /* Fall through */
472                 sess->crypto_func = NULL;
473                 sess->cipher.key_sched = NULL;
474                 return -EINVAL;
475         }
476
477         if (unlikely(sess->crypto_func == NULL)) {
478                 /*
479                  * If we got here that means that there must be a bug
480                  * in the algorithms selection above. Nevertheless keep
481                  * it here to catch bug immediately and avoid NULL pointer
482                  * dereference in OPs processing.
483                  */
484                 ARMV8_CRYPTO_LOG_ERR(
485                         "No appropriate crypto function for given parameters");
486                 return -EINVAL;
487         }
488
489         /* Set up cipher session prerequisites */
490         if (cipher_set_prerequisites(sess, cipher_xform) != 0)
491                 return -EINVAL;
492
493         /* Set up authentication session prerequisites */
494         if (auth_set_prerequisites(sess, auth_xform) != 0)
495                 return -EINVAL;
496
497         return 0;
498 }
499
500 /** Parse crypto xform chain and set private session parameters */
501 int
502 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
503                 const struct rte_crypto_sym_xform *xform)
504 {
505         const struct rte_crypto_sym_xform *cipher_xform = NULL;
506         const struct rte_crypto_sym_xform *auth_xform = NULL;
507         bool is_chained_op;
508         int ret;
509
510         /* Filter out spurious/broken requests */
511         if (xform == NULL)
512                 return -EINVAL;
513
514         sess->chain_order = armv8_crypto_get_chain_order(xform);
515         switch (sess->chain_order) {
516         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
517                 cipher_xform = xform;
518                 auth_xform = xform->next;
519                 is_chained_op = true;
520                 break;
521         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
522                 auth_xform = xform;
523                 cipher_xform = xform->next;
524                 is_chained_op = true;
525                 break;
526         default:
527                 is_chained_op = false;
528                 return -EINVAL;
529         }
530
531         /* Set IV offset */
532         sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
533
534         if (is_chained_op) {
535                 ret = armv8_crypto_set_session_chained_parameters(sess,
536                                                 cipher_xform, auth_xform);
537                 if (unlikely(ret != 0)) {
538                         ARMV8_CRYPTO_LOG_ERR(
539                         "Invalid/unsupported chained (cipher/auth) parameters");
540                         return -EINVAL;
541                 }
542         } else {
543                 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
544                 return -EINVAL;
545         }
546
547         return 0;
548 }
549
550 /** Provide session for operation */
551 static inline struct armv8_crypto_session *
552 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
553 {
554         struct armv8_crypto_session *sess = NULL;
555
556         if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
557                 /* get existing session */
558                 if (likely(op->sym->session != NULL &&
559                                 op->sym->session->driver_id ==
560                                 cryptodev_driver_id)) {
561                         sess = (struct armv8_crypto_session *)
562                                 op->sym->session->_private;
563                 }
564         } else {
565                 /* provide internal session */
566                 void *_sess = NULL;
567
568                 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) {
569                         sess = (struct armv8_crypto_session *)
570                                 ((struct rte_cryptodev_sym_session *)_sess)
571                                 ->_private;
572
573                         if (unlikely(armv8_crypto_set_session_parameters(
574                                         sess, op->sym->xform) != 0)) {
575                                 rte_mempool_put(qp->sess_mp, _sess);
576                                 sess = NULL;
577                         } else
578                                 op->sym->session = _sess;
579                 }
580         }
581
582         if (unlikely(sess == NULL))
583                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
584
585         return sess;
586 }
587
588 /*
589  *------------------------------------------------------------------------------
590  * Process Operations
591  *------------------------------------------------------------------------------
592  */
593
594 /*----------------------------------------------------------------------------*/
595
596 /** Process cipher operation */
597 static inline void
598 process_armv8_chained_op
599                 (struct rte_crypto_op *op, struct armv8_crypto_session *sess,
600                 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
601 {
602         crypto_func_t crypto_func;
603         crypto_arg_t arg;
604         struct rte_mbuf *m_asrc, *m_adst;
605         uint8_t *csrc, *cdst;
606         uint8_t *adst, *asrc;
607         uint64_t clen, alen;
608         int error;
609
610         clen = op->sym->cipher.data.length;
611         alen = op->sym->auth.data.length;
612
613         csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
614                         op->sym->cipher.data.offset);
615         cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
616                         op->sym->cipher.data.offset);
617
618         switch (sess->chain_order) {
619         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
620                 m_asrc = m_adst = mbuf_dst;
621                 break;
622         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
623                 m_asrc = mbuf_src;
624                 m_adst = mbuf_dst;
625                 break;
626         default:
627                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
628                 return;
629         }
630         asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
631                                 op->sym->auth.data.offset);
632
633         switch (sess->auth.mode) {
634         case ARMV8_CRYPTO_AUTH_AS_AUTH:
635                 /* Nothing to do here, just verify correct option */
636                 break;
637         case ARMV8_CRYPTO_AUTH_AS_HMAC:
638                 arg.digest.hmac.key = sess->auth.hmac.key;
639                 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
640                 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
641                 break;
642         default:
643                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
644                 return;
645         }
646
647         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
648                 adst = op->sym->auth.digest.data;
649                 if (adst == NULL) {
650                         adst = rte_pktmbuf_mtod_offset(m_adst,
651                                         uint8_t *,
652                                         op->sym->auth.data.offset +
653                                         op->sym->auth.data.length);
654                 }
655         } else {
656                 adst = (uint8_t *)rte_pktmbuf_append(m_asrc,
657                                 sess->auth.digest_length);
658         }
659
660         arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
661                                         sess->cipher.iv.offset);
662         arg.cipher.key = sess->cipher.key.data;
663         /* Acquire combined mode function */
664         crypto_func = sess->crypto_func;
665         ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
666         error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
667         if (error != 0) {
668                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
669                 return;
670         }
671
672         op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
673         if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
674                 if (memcmp(adst, op->sym->auth.digest.data,
675                                 sess->auth.digest_length) != 0) {
676                         op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
677                 }
678                 /* Trim area used for digest from mbuf. */
679                 rte_pktmbuf_trim(m_asrc,
680                                 sess->auth.digest_length);
681         }
682 }
683
684 /** Process crypto operation for mbuf */
685 static inline int
686 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
687                 struct armv8_crypto_session *sess)
688 {
689         struct rte_mbuf *msrc, *mdst;
690
691         msrc = op->sym->m_src;
692         mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
693
694         op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
695
696         switch (sess->chain_order) {
697         case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
698         case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
699                 process_armv8_chained_op(op, sess, msrc, mdst);
700                 break;
701         default:
702                 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
703                 break;
704         }
705
706         /* Free session if a session-less crypto op */
707         if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
708                 memset(sess, 0, sizeof(struct armv8_crypto_session));
709                 rte_mempool_put(qp->sess_mp, op->sym->session);
710                 op->sym->session = NULL;
711         }
712
713         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
714                 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
715
716         if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
717                 return -1;
718
719         return 0;
720 }
721
722 /*
723  *------------------------------------------------------------------------------
724  * PMD Framework
725  *------------------------------------------------------------------------------
726  */
727
728 /** Enqueue burst */
729 static uint16_t
730 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
731                 uint16_t nb_ops)
732 {
733         struct armv8_crypto_session *sess;
734         struct armv8_crypto_qp *qp = queue_pair;
735         int i, retval;
736
737         for (i = 0; i < nb_ops; i++) {
738                 sess = get_session(qp, ops[i]);
739                 if (unlikely(sess == NULL))
740                         goto enqueue_err;
741
742                 retval = process_op(qp, ops[i], sess);
743                 if (unlikely(retval < 0))
744                         goto enqueue_err;
745         }
746
747         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
748                         NULL);
749         qp->stats.enqueued_count += retval;
750
751         return retval;
752
753 enqueue_err:
754         retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
755                         NULL);
756         if (ops[i] != NULL)
757                 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
758
759         qp->stats.enqueue_err_count++;
760         return retval;
761 }
762
763 /** Dequeue burst */
764 static uint16_t
765 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
766                 uint16_t nb_ops)
767 {
768         struct armv8_crypto_qp *qp = queue_pair;
769
770         unsigned int nb_dequeued = 0;
771
772         nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
773                         (void **)ops, nb_ops, NULL);
774         qp->stats.dequeued_count += nb_dequeued;
775
776         return nb_dequeued;
777 }
778
779 /** Create ARMv8 crypto device */
780 static int
781 cryptodev_armv8_crypto_create(const char *name,
782                         struct rte_vdev_device *vdev,
783                         struct rte_crypto_vdev_init_params *init_params)
784 {
785         struct rte_cryptodev *dev;
786         struct armv8_crypto_private *internals;
787
788         /* Check CPU for support for AES instruction set */
789         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
790                 ARMV8_CRYPTO_LOG_ERR(
791                         "AES instructions not supported by CPU");
792                 return -EFAULT;
793         }
794
795         /* Check CPU for support for SHA instruction set */
796         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
797             !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
798                 ARMV8_CRYPTO_LOG_ERR(
799                         "SHA1/SHA2 instructions not supported by CPU");
800                 return -EFAULT;
801         }
802
803         /* Check CPU for support for Advance SIMD instruction set */
804         if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
805                 ARMV8_CRYPTO_LOG_ERR(
806                         "Advanced SIMD instructions not supported by CPU");
807                 return -EFAULT;
808         }
809
810         if (init_params->name[0] == '\0')
811                 snprintf(init_params->name, sizeof(init_params->name),
812                                 "%s", name);
813
814         dev = rte_cryptodev_vdev_pmd_init(init_params->name,
815                                 sizeof(struct armv8_crypto_private),
816                                 init_params->socket_id,
817                                 vdev);
818         if (dev == NULL) {
819                 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
820                 goto init_error;
821         }
822
823         dev->driver_id = cryptodev_driver_id;
824         dev->dev_ops = rte_armv8_crypto_pmd_ops;
825
826         /* register rx/tx burst functions for data path */
827         dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
828         dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
829
830         dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
831                         RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
832                         RTE_CRYPTODEV_FF_CPU_NEON |
833                         RTE_CRYPTODEV_FF_CPU_ARM_CE;
834
835         /* Set vector instructions mode supported */
836         internals = dev->data->dev_private;
837
838         internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
839         internals->max_nb_sessions = init_params->max_nb_sessions;
840
841         return 0;
842
843 init_error:
844         ARMV8_CRYPTO_LOG_ERR(
845                 "driver %s: cryptodev_armv8_crypto_create failed",
846                 init_params->name);
847
848         cryptodev_armv8_crypto_uninit(vdev);
849         return -EFAULT;
850 }
851
852 /** Initialise ARMv8 crypto device */
853 static int
854 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
855 {
856         struct rte_crypto_vdev_init_params init_params = {
857                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS,
858                 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS,
859                 rte_socket_id(),
860                 {0}
861         };
862         const char *name;
863         const char *input_args;
864
865         name = rte_vdev_device_name(vdev);
866         if (name == NULL)
867                 return -EINVAL;
868         input_args = rte_vdev_device_args(vdev);
869         rte_cryptodev_vdev_parse_init_params(&init_params, input_args);
870
871         RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
872                         init_params.socket_id);
873         if (init_params.name[0] != '\0') {
874                 RTE_LOG(INFO, PMD, "  User defined name = %s\n",
875                         init_params.name);
876         }
877         RTE_LOG(INFO, PMD, "  Max number of queue pairs = %d\n",
878                         init_params.max_nb_queue_pairs);
879         RTE_LOG(INFO, PMD, "  Max number of sessions = %d\n",
880                         init_params.max_nb_sessions);
881
882         return cryptodev_armv8_crypto_create(name, vdev, &init_params);
883 }
884
885 /** Uninitialise ARMv8 crypto device */
886 static int
887 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
888 {
889         const char *name;
890
891         name = rte_vdev_device_name(vdev);
892         if (name == NULL)
893                 return -EINVAL;
894
895         RTE_LOG(INFO, PMD,
896                 "Closing ARMv8 crypto device %s on numa socket %u\n",
897                 name, rte_socket_id());
898
899         return 0;
900 }
901
902 static struct rte_vdev_driver armv8_crypto_drv = {
903         .probe = cryptodev_armv8_crypto_init,
904         .remove = cryptodev_armv8_crypto_uninit
905 };
906
907 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv);
908 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
909 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
910         "max_nb_queue_pairs=<int> "
911         "max_nb_sessions=<int> "
912         "socket_id=<int>");
913 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, cryptodev_driver_id);