4 * Copyright (C) Cavium networks Ltd. 2017.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Cavium networks nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #include <rte_common.h>
36 #include <rte_hexdump.h>
37 #include <rte_cryptodev.h>
38 #include <rte_cryptodev_pmd.h>
39 #include <rte_cryptodev_vdev.h>
41 #include <rte_malloc.h>
42 #include <rte_cpuflags.h>
44 #include "armv8_crypto_defs.h"
46 #include "rte_armv8_pmd_private.h"
48 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
51 * Pointers to the supported combined mode crypto functions are stored
52 * in the static tables. Each combined (chained) cryptographic operation
53 * can be described by a set of numbers:
54 * - order: order of operations (cipher, auth) or (auth, cipher)
55 * - direction: encryption or decryption
56 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc.
57 * - aalg: authentication algorithm such as SHA1, SHA256, etc.
58 * - keyl: cipher key length, for example 128, 192, 256 bits
60 * In order to quickly acquire each function pointer based on those numbers,
61 * a hierarchy of arrays is maintained. The final level, 3D array is indexed
62 * by the combined mode function parameters only (cipher algorithm,
63 * authentication algorithm and key length).
65 * This gives 3 memory accesses to obtain a function pointer instead of
66 * traversing the array manually and comparing function parameters on each loop.
80 * 3D array type for ARM Combined Mode crypto functions pointers.
81 * CRYPTO_CIPHER_MAX: max cipher ID number
82 * CRYPTO_AUTH_MAX: max auth ID number
83 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
85 typedef const crypto_func_t
86 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
88 /* Evaluate to key length definition */
89 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
91 /* Local aliases for supported ciphers */
92 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC
93 /* Local aliases for supported hashes */
94 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC
95 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC
98 * Arrays containing pointers to particular cryptographic,
99 * combined mode functions.
100 * crypto_op_ca_encrypt: cipher (encrypt), authenticate
101 * crypto_op_ca_decrypt: cipher (decrypt), authenticate
102 * crypto_op_ac_encrypt: authenticate, cipher (encrypt)
103 * crypto_op_ac_decrypt: authenticate, cipher (decrypt)
105 static const crypto_func_tbl_t
106 crypto_op_ca_encrypt = {
107 /* [cipher alg][auth alg][key length] = crypto_function, */
108 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
109 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
112 static const crypto_func_tbl_t
113 crypto_op_ca_decrypt = {
117 static const crypto_func_tbl_t
118 crypto_op_ac_encrypt = {
122 static const crypto_func_tbl_t
123 crypto_op_ac_decrypt = {
124 /* [cipher alg][auth alg][key length] = crypto_function, */
125 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
126 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
130 * Arrays containing pointers to particular cryptographic function sets,
131 * covering given cipher operation directions (encrypt, decrypt)
132 * for each order of cipher and authentication pairs.
134 static const crypto_func_tbl_t *
135 crypto_cipher_auth[] = {
136 &crypto_op_ca_encrypt,
137 &crypto_op_ca_decrypt,
141 static const crypto_func_tbl_t *
142 crypto_auth_cipher[] = {
143 &crypto_op_ac_encrypt,
144 &crypto_op_ac_decrypt,
149 * Top level array containing pointers to particular cryptographic
150 * function sets, covering given order of chained operations.
151 * crypto_cipher_auth: cipher first, authenticate after
152 * crypto_auth_cipher: authenticate first, cipher after
154 static const crypto_func_tbl_t **
155 crypto_chain_order[] = {
162 * Extract particular combined mode crypto function from the 3D array.
164 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \
166 crypto_func_tbl_t *func_tbl = \
167 (crypto_chain_order[(order)])[(cop)]; \
169 ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \
172 /*----------------------------------------------------------------------------*/
175 * 2D array type for ARM key schedule functions pointers.
176 * CRYPTO_CIPHER_MAX: max cipher ID number
177 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
179 typedef const crypto_key_sched_t
180 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
182 static const crypto_key_sched_tbl_t
183 crypto_key_sched_encrypt = {
184 /* [cipher alg][key length] = key_expand_func, */
185 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
188 static const crypto_key_sched_tbl_t
189 crypto_key_sched_decrypt = {
190 /* [cipher alg][key length] = key_expand_func, */
191 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
195 * Top level array containing pointers to particular key generation
196 * function sets, covering given operation direction.
197 * crypto_key_sched_encrypt: keys for encryption
198 * crypto_key_sched_decrypt: keys for decryption
200 static const crypto_key_sched_tbl_t *
201 crypto_key_sched_dir[] = {
202 &crypto_key_sched_encrypt,
203 &crypto_key_sched_decrypt,
208 * Extract particular combined mode crypto function from the 3D array.
210 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \
212 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \
214 ((*ks_tbl)[(calg)][KEYL(keyl)]); \
217 /*----------------------------------------------------------------------------*/
220 *------------------------------------------------------------------------------
222 *------------------------------------------------------------------------------
225 /** Get xform chain order */
226 static enum armv8_crypto_chain_order
227 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
231 * This driver currently covers only chained operations.
232 * Ignore only cipher or only authentication operations
233 * or chains longer than 2 xform structures.
235 if (xform->next == NULL || xform->next->next != NULL)
236 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
238 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
239 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
240 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
243 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
244 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
245 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
248 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
252 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
253 const struct rte_crypto_sym_xform *xform)
257 /* Generate i_key_pad and o_key_pad */
258 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
259 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
260 xform->auth.key.length);
261 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
262 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
263 xform->auth.key.length);
265 * XOR key with IPAD/OPAD values to obtain i_key_pad
267 * Byte-by-byte operation may seem to be the less efficient
268 * here but in fact it's the opposite.
269 * The result ASM code is likely operate on NEON registers
270 * (load auth key to Qx, load IPAD/OPAD to multiple
271 * elements of Qy, eor 128 bits at once).
273 for (i = 0; i < SHA_BLOCK_MAX; i++) {
274 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
275 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
280 auth_set_prerequisites(struct armv8_crypto_session *sess,
281 const struct rte_crypto_sym_xform *xform)
283 uint8_t partial[64] = { 0 };
286 switch (xform->auth.algo) {
287 case RTE_CRYPTO_AUTH_SHA1_HMAC:
289 * Generate authentication key, i_key_pad and o_key_pad.
291 /* Zero memory under key */
292 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH);
294 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) {
296 * In case the key is longer than 160 bits
297 * the algorithm will use SHA1(key) instead.
299 error = sha1_block(NULL, xform->auth.key.data,
300 sess->auth.hmac.key, xform->auth.key.length);
305 * Now copy the given authentication key to the session
306 * key assuming that the session key is zeroed there is
307 * no need for additional zero padding if the key is
308 * shorter than SHA1_AUTH_KEY_LENGTH.
310 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
311 xform->auth.key.length);
314 /* Prepare HMAC padding: key|pattern */
315 auth_hmac_pad_prepare(sess, xform);
317 * Calculate partial hash values for i_key_pad and o_key_pad.
318 * Will be used as initialization state for final HMAC.
320 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
321 partial, SHA1_BLOCK_SIZE);
324 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
326 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
327 partial, SHA1_BLOCK_SIZE);
330 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
333 case RTE_CRYPTO_AUTH_SHA256_HMAC:
335 * Generate authentication key, i_key_pad and o_key_pad.
337 /* Zero memory under key */
338 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH);
340 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) {
342 * In case the key is longer than 256 bits
343 * the algorithm will use SHA256(key) instead.
345 error = sha256_block(NULL, xform->auth.key.data,
346 sess->auth.hmac.key, xform->auth.key.length);
351 * Now copy the given authentication key to the session
352 * key assuming that the session key is zeroed there is
353 * no need for additional zero padding if the key is
354 * shorter than SHA256_AUTH_KEY_LENGTH.
356 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
357 xform->auth.key.length);
360 /* Prepare HMAC padding: key|pattern */
361 auth_hmac_pad_prepare(sess, xform);
363 * Calculate partial hash values for i_key_pad and o_key_pad.
364 * Will be used as initialization state for final HMAC.
366 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
367 partial, SHA256_BLOCK_SIZE);
370 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
372 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
373 partial, SHA256_BLOCK_SIZE);
376 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
387 cipher_set_prerequisites(struct armv8_crypto_session *sess,
388 const struct rte_crypto_sym_xform *xform)
390 crypto_key_sched_t cipher_key_sched;
392 cipher_key_sched = sess->cipher.key_sched;
393 if (likely(cipher_key_sched != NULL)) {
394 /* Set up cipher session key */
395 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
402 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
403 const struct rte_crypto_sym_xform *cipher_xform,
404 const struct rte_crypto_sym_xform *auth_xform)
406 enum armv8_crypto_chain_order order;
407 enum armv8_crypto_cipher_operation cop;
408 enum rte_crypto_cipher_algorithm calg;
409 enum rte_crypto_auth_algorithm aalg;
411 /* Validate and prepare scratch order of combined operations */
412 switch (sess->chain_order) {
413 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
414 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
415 order = sess->chain_order;
420 /* Select cipher direction */
421 sess->cipher.direction = cipher_xform->cipher.op;
422 /* Select cipher key */
423 sess->cipher.key.length = cipher_xform->cipher.key.length;
424 /* Set cipher direction */
425 cop = sess->cipher.direction;
426 /* Set cipher algorithm */
427 calg = cipher_xform->cipher.algo;
429 /* Select cipher algo */
431 /* Cover supported cipher algorithms */
432 case RTE_CRYPTO_CIPHER_AES_CBC:
433 sess->cipher.algo = calg;
434 /* IV len is always 16 bytes (block size) for AES CBC */
435 sess->cipher.iv.length = 16;
440 /* Select auth generate/verify */
441 sess->auth.operation = auth_xform->auth.op;
443 /* Select auth algo */
444 switch (auth_xform->auth.algo) {
445 /* Cover supported hash algorithms */
446 case RTE_CRYPTO_AUTH_SHA1_HMAC:
447 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
448 aalg = auth_xform->auth.algo;
449 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
455 /* Set the digest length */
456 sess->auth.digest_length = auth_xform->auth.digest_length;
458 /* Verify supported key lengths and extract proper algorithm */
459 switch (cipher_xform->cipher.key.length << 3) {
462 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
463 sess->cipher.key_sched =
464 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
468 /* These key lengths are not supported yet */
469 default: /* Fall through */
470 sess->crypto_func = NULL;
471 sess->cipher.key_sched = NULL;
475 if (unlikely(sess->crypto_func == NULL)) {
477 * If we got here that means that there must be a bug
478 * in the algorithms selection above. Nevertheless keep
479 * it here to catch bug immediately and avoid NULL pointer
480 * dereference in OPs processing.
482 ARMV8_CRYPTO_LOG_ERR(
483 "No appropriate crypto function for given parameters");
487 /* Set up cipher session prerequisites */
488 if (cipher_set_prerequisites(sess, cipher_xform) != 0)
491 /* Set up authentication session prerequisites */
492 if (auth_set_prerequisites(sess, auth_xform) != 0)
498 /** Parse crypto xform chain and set private session parameters */
500 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
501 const struct rte_crypto_sym_xform *xform)
503 const struct rte_crypto_sym_xform *cipher_xform = NULL;
504 const struct rte_crypto_sym_xform *auth_xform = NULL;
508 /* Filter out spurious/broken requests */
512 sess->chain_order = armv8_crypto_get_chain_order(xform);
513 switch (sess->chain_order) {
514 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
515 cipher_xform = xform;
516 auth_xform = xform->next;
517 is_chained_op = true;
519 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
521 cipher_xform = xform->next;
522 is_chained_op = true;
525 is_chained_op = false;
530 sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
533 ret = armv8_crypto_set_session_chained_parameters(sess,
534 cipher_xform, auth_xform);
535 if (unlikely(ret != 0)) {
536 ARMV8_CRYPTO_LOG_ERR(
537 "Invalid/unsupported chained (cipher/auth) parameters");
541 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
548 /** Provide session for operation */
549 static inline struct armv8_crypto_session *
550 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
552 struct armv8_crypto_session *sess = NULL;
554 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
555 /* get existing session */
556 if (likely(op->sym->session != NULL &&
557 op->sym->session->dev_type ==
558 RTE_CRYPTODEV_ARMV8_PMD)) {
559 sess = (struct armv8_crypto_session *)
560 op->sym->session->_private;
563 /* provide internal session */
566 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) {
567 sess = (struct armv8_crypto_session *)
568 ((struct rte_cryptodev_sym_session *)_sess)
571 if (unlikely(armv8_crypto_set_session_parameters(
572 sess, op->sym->xform) != 0)) {
573 rte_mempool_put(qp->sess_mp, _sess);
576 op->sym->session = _sess;
580 if (unlikely(sess == NULL))
581 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
587 *------------------------------------------------------------------------------
589 *------------------------------------------------------------------------------
592 /*----------------------------------------------------------------------------*/
594 /** Process cipher operation */
596 process_armv8_chained_op
597 (struct rte_crypto_op *op, struct armv8_crypto_session *sess,
598 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
600 crypto_func_t crypto_func;
602 struct rte_mbuf *m_asrc, *m_adst;
603 uint8_t *csrc, *cdst;
604 uint8_t *adst, *asrc;
608 clen = op->sym->cipher.data.length;
609 alen = op->sym->auth.data.length;
611 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
612 op->sym->cipher.data.offset);
613 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
614 op->sym->cipher.data.offset);
616 switch (sess->chain_order) {
617 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
618 m_asrc = m_adst = mbuf_dst;
620 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
625 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
628 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
629 op->sym->auth.data.offset);
631 switch (sess->auth.mode) {
632 case ARMV8_CRYPTO_AUTH_AS_AUTH:
633 /* Nothing to do here, just verify correct option */
635 case ARMV8_CRYPTO_AUTH_AS_HMAC:
636 arg.digest.hmac.key = sess->auth.hmac.key;
637 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
638 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
641 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
645 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
646 adst = op->sym->auth.digest.data;
648 adst = rte_pktmbuf_mtod_offset(m_adst,
650 op->sym->auth.data.offset +
651 op->sym->auth.data.length);
654 adst = (uint8_t *)rte_pktmbuf_append(m_asrc,
655 sess->auth.digest_length);
658 arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
659 sess->cipher.iv.offset);
660 arg.cipher.key = sess->cipher.key.data;
661 /* Acquire combined mode function */
662 crypto_func = sess->crypto_func;
663 ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
664 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
666 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
670 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
671 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
672 if (memcmp(adst, op->sym->auth.digest.data,
673 sess->auth.digest_length) != 0) {
674 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
676 /* Trim area used for digest from mbuf. */
677 rte_pktmbuf_trim(m_asrc,
678 sess->auth.digest_length);
682 /** Process crypto operation for mbuf */
684 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
685 struct armv8_crypto_session *sess)
687 struct rte_mbuf *msrc, *mdst;
689 msrc = op->sym->m_src;
690 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
692 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
694 switch (sess->chain_order) {
695 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
696 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
697 process_armv8_chained_op(op, sess, msrc, mdst);
700 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
704 /* Free session if a session-less crypto op */
705 if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
706 memset(sess, 0, sizeof(struct armv8_crypto_session));
707 rte_mempool_put(qp->sess_mp, op->sym->session);
708 op->sym->session = NULL;
711 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
712 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
714 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
721 *------------------------------------------------------------------------------
723 *------------------------------------------------------------------------------
728 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
731 struct armv8_crypto_session *sess;
732 struct armv8_crypto_qp *qp = queue_pair;
735 for (i = 0; i < nb_ops; i++) {
736 sess = get_session(qp, ops[i]);
737 if (unlikely(sess == NULL))
740 retval = process_op(qp, ops[i], sess);
741 if (unlikely(retval < 0))
745 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
747 qp->stats.enqueued_count += retval;
752 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
755 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
757 qp->stats.enqueue_err_count++;
763 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
766 struct armv8_crypto_qp *qp = queue_pair;
768 unsigned int nb_dequeued = 0;
770 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
771 (void **)ops, nb_ops, NULL);
772 qp->stats.dequeued_count += nb_dequeued;
777 /** Create ARMv8 crypto device */
779 cryptodev_armv8_crypto_create(const char *name,
780 struct rte_vdev_device *vdev,
781 struct rte_crypto_vdev_init_params *init_params)
783 struct rte_cryptodev *dev;
784 struct armv8_crypto_private *internals;
786 /* Check CPU for support for AES instruction set */
787 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
788 ARMV8_CRYPTO_LOG_ERR(
789 "AES instructions not supported by CPU");
793 /* Check CPU for support for SHA instruction set */
794 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
795 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
796 ARMV8_CRYPTO_LOG_ERR(
797 "SHA1/SHA2 instructions not supported by CPU");
801 /* Check CPU for support for Advance SIMD instruction set */
802 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
803 ARMV8_CRYPTO_LOG_ERR(
804 "Advanced SIMD instructions not supported by CPU");
808 if (init_params->name[0] == '\0')
809 snprintf(init_params->name, sizeof(init_params->name),
812 dev = rte_cryptodev_vdev_pmd_init(init_params->name,
813 sizeof(struct armv8_crypto_private),
814 init_params->socket_id,
817 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
821 dev->dev_type = RTE_CRYPTODEV_ARMV8_PMD;
822 dev->dev_ops = rte_armv8_crypto_pmd_ops;
824 /* register rx/tx burst functions for data path */
825 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
826 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
828 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
829 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
830 RTE_CRYPTODEV_FF_CPU_NEON |
831 RTE_CRYPTODEV_FF_CPU_ARM_CE;
833 /* Set vector instructions mode supported */
834 internals = dev->data->dev_private;
836 internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
837 internals->max_nb_sessions = init_params->max_nb_sessions;
842 ARMV8_CRYPTO_LOG_ERR(
843 "driver %s: cryptodev_armv8_crypto_create failed",
846 cryptodev_armv8_crypto_uninit(vdev);
850 /** Initialise ARMv8 crypto device */
852 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
854 struct rte_crypto_vdev_init_params init_params = {
855 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS,
856 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS,
861 const char *input_args;
863 name = rte_vdev_device_name(vdev);
866 input_args = rte_vdev_device_args(vdev);
867 rte_cryptodev_vdev_parse_init_params(&init_params, input_args);
869 RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
870 init_params.socket_id);
871 if (init_params.name[0] != '\0') {
872 RTE_LOG(INFO, PMD, " User defined name = %s\n",
875 RTE_LOG(INFO, PMD, " Max number of queue pairs = %d\n",
876 init_params.max_nb_queue_pairs);
877 RTE_LOG(INFO, PMD, " Max number of sessions = %d\n",
878 init_params.max_nb_sessions);
880 return cryptodev_armv8_crypto_create(name, vdev, &init_params);
883 /** Uninitialise ARMv8 crypto device */
885 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
889 name = rte_vdev_device_name(vdev);
894 "Closing ARMv8 crypto device %s on numa socket %u\n",
895 name, rte_socket_id());
900 static struct rte_vdev_driver armv8_crypto_drv = {
901 .probe = cryptodev_armv8_crypto_init,
902 .remove = cryptodev_armv8_crypto_uninit
905 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv);
906 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
907 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
908 "max_nb_queue_pairs=<int> "
909 "max_nb_sessions=<int> "