1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
7 #include <rte_common.h>
8 #include <rte_hexdump.h>
9 #include <rte_cryptodev.h>
10 #include <rte_cryptodev_pmd.h>
11 #include <rte_bus_vdev.h>
12 #include <rte_malloc.h>
13 #include <rte_cpuflags.h>
15 #include "armv8_crypto_defs.h"
17 #include "rte_armv8_pmd_private.h"
19 static uint8_t cryptodev_driver_id;
21 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
24 * Pointers to the supported combined mode crypto functions are stored
25 * in the static tables. Each combined (chained) cryptographic operation
26 * can be described by a set of numbers:
27 * - order: order of operations (cipher, auth) or (auth, cipher)
28 * - direction: encryption or decryption
29 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc.
30 * - aalg: authentication algorithm such as SHA1, SHA256, etc.
31 * - keyl: cipher key length, for example 128, 192, 256 bits
33 * In order to quickly acquire each function pointer based on those numbers,
34 * a hierarchy of arrays is maintained. The final level, 3D array is indexed
35 * by the combined mode function parameters only (cipher algorithm,
36 * authentication algorithm and key length).
38 * This gives 3 memory accesses to obtain a function pointer instead of
39 * traversing the array manually and comparing function parameters on each loop.
53 * 3D array type for ARM Combined Mode crypto functions pointers.
54 * CRYPTO_CIPHER_MAX: max cipher ID number
55 * CRYPTO_AUTH_MAX: max auth ID number
56 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
58 typedef const crypto_func_t
59 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
61 /* Evaluate to key length definition */
62 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
64 /* Local aliases for supported ciphers */
65 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC
66 /* Local aliases for supported hashes */
67 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC
68 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC
71 * Arrays containing pointers to particular cryptographic,
72 * combined mode functions.
73 * crypto_op_ca_encrypt: cipher (encrypt), authenticate
74 * crypto_op_ca_decrypt: cipher (decrypt), authenticate
75 * crypto_op_ac_encrypt: authenticate, cipher (encrypt)
76 * crypto_op_ac_decrypt: authenticate, cipher (decrypt)
78 static const crypto_func_tbl_t
79 crypto_op_ca_encrypt = {
80 /* [cipher alg][auth alg][key length] = crypto_function, */
81 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
82 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
85 static const crypto_func_tbl_t
86 crypto_op_ca_decrypt = {
90 static const crypto_func_tbl_t
91 crypto_op_ac_encrypt = {
95 static const crypto_func_tbl_t
96 crypto_op_ac_decrypt = {
97 /* [cipher alg][auth alg][key length] = crypto_function, */
98 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
99 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
103 * Arrays containing pointers to particular cryptographic function sets,
104 * covering given cipher operation directions (encrypt, decrypt)
105 * for each order of cipher and authentication pairs.
107 static const crypto_func_tbl_t *
108 crypto_cipher_auth[] = {
109 &crypto_op_ca_encrypt,
110 &crypto_op_ca_decrypt,
114 static const crypto_func_tbl_t *
115 crypto_auth_cipher[] = {
116 &crypto_op_ac_encrypt,
117 &crypto_op_ac_decrypt,
122 * Top level array containing pointers to particular cryptographic
123 * function sets, covering given order of chained operations.
124 * crypto_cipher_auth: cipher first, authenticate after
125 * crypto_auth_cipher: authenticate first, cipher after
127 static const crypto_func_tbl_t **
128 crypto_chain_order[] = {
135 * Extract particular combined mode crypto function from the 3D array.
137 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \
139 crypto_func_tbl_t *func_tbl = \
140 (crypto_chain_order[(order)])[(cop)]; \
142 ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \
145 /*----------------------------------------------------------------------------*/
148 * 2D array type for ARM key schedule functions pointers.
149 * CRYPTO_CIPHER_MAX: max cipher ID number
150 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
152 typedef const crypto_key_sched_t
153 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
155 static const crypto_key_sched_tbl_t
156 crypto_key_sched_encrypt = {
157 /* [cipher alg][key length] = key_expand_func, */
158 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
161 static const crypto_key_sched_tbl_t
162 crypto_key_sched_decrypt = {
163 /* [cipher alg][key length] = key_expand_func, */
164 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
168 * Top level array containing pointers to particular key generation
169 * function sets, covering given operation direction.
170 * crypto_key_sched_encrypt: keys for encryption
171 * crypto_key_sched_decrypt: keys for decryption
173 static const crypto_key_sched_tbl_t *
174 crypto_key_sched_dir[] = {
175 &crypto_key_sched_encrypt,
176 &crypto_key_sched_decrypt,
181 * Extract particular combined mode crypto function from the 3D array.
183 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \
185 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \
187 ((*ks_tbl)[(calg)][KEYL(keyl)]); \
190 /*----------------------------------------------------------------------------*/
193 *------------------------------------------------------------------------------
195 *------------------------------------------------------------------------------
198 /** Get xform chain order */
199 static enum armv8_crypto_chain_order
200 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
204 * This driver currently covers only chained operations.
205 * Ignore only cipher or only authentication operations
206 * or chains longer than 2 xform structures.
208 if (xform->next == NULL || xform->next->next != NULL)
209 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
211 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
212 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
213 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
216 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
217 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
218 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
221 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
225 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
226 const struct rte_crypto_sym_xform *xform)
230 /* Generate i_key_pad and o_key_pad */
231 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
232 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
233 xform->auth.key.length);
234 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
235 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
236 xform->auth.key.length);
238 * XOR key with IPAD/OPAD values to obtain i_key_pad
240 * Byte-by-byte operation may seem to be the less efficient
241 * here but in fact it's the opposite.
242 * The result ASM code is likely operate on NEON registers
243 * (load auth key to Qx, load IPAD/OPAD to multiple
244 * elements of Qy, eor 128 bits at once).
246 for (i = 0; i < SHA_BLOCK_MAX; i++) {
247 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
248 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
253 auth_set_prerequisites(struct armv8_crypto_session *sess,
254 const struct rte_crypto_sym_xform *xform)
256 uint8_t partial[64] = { 0 };
259 switch (xform->auth.algo) {
260 case RTE_CRYPTO_AUTH_SHA1_HMAC:
262 * Generate authentication key, i_key_pad and o_key_pad.
264 /* Zero memory under key */
265 memset(sess->auth.hmac.key, 0, SHA1_BLOCK_SIZE);
268 * Now copy the given authentication key to the session
271 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
272 xform->auth.key.length);
274 /* Prepare HMAC padding: key|pattern */
275 auth_hmac_pad_prepare(sess, xform);
277 * Calculate partial hash values for i_key_pad and o_key_pad.
278 * Will be used as initialization state for final HMAC.
280 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
281 partial, SHA1_BLOCK_SIZE);
284 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
286 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
287 partial, SHA1_BLOCK_SIZE);
290 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
293 case RTE_CRYPTO_AUTH_SHA256_HMAC:
295 * Generate authentication key, i_key_pad and o_key_pad.
297 /* Zero memory under key */
298 memset(sess->auth.hmac.key, 0, SHA256_BLOCK_SIZE);
301 * Now copy the given authentication key to the session
304 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
305 xform->auth.key.length);
307 /* Prepare HMAC padding: key|pattern */
308 auth_hmac_pad_prepare(sess, xform);
310 * Calculate partial hash values for i_key_pad and o_key_pad.
311 * Will be used as initialization state for final HMAC.
313 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
314 partial, SHA256_BLOCK_SIZE);
317 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
319 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
320 partial, SHA256_BLOCK_SIZE);
323 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
334 cipher_set_prerequisites(struct armv8_crypto_session *sess,
335 const struct rte_crypto_sym_xform *xform)
337 crypto_key_sched_t cipher_key_sched;
339 cipher_key_sched = sess->cipher.key_sched;
340 if (likely(cipher_key_sched != NULL)) {
341 /* Set up cipher session key */
342 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
349 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
350 const struct rte_crypto_sym_xform *cipher_xform,
351 const struct rte_crypto_sym_xform *auth_xform)
353 enum armv8_crypto_chain_order order;
354 enum armv8_crypto_cipher_operation cop;
355 enum rte_crypto_cipher_algorithm calg;
356 enum rte_crypto_auth_algorithm aalg;
358 /* Validate and prepare scratch order of combined operations */
359 switch (sess->chain_order) {
360 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
361 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
362 order = sess->chain_order;
367 /* Select cipher direction */
368 sess->cipher.direction = cipher_xform->cipher.op;
369 /* Select cipher key */
370 sess->cipher.key.length = cipher_xform->cipher.key.length;
371 /* Set cipher direction */
372 cop = sess->cipher.direction;
373 /* Set cipher algorithm */
374 calg = cipher_xform->cipher.algo;
376 /* Select cipher algo */
378 /* Cover supported cipher algorithms */
379 case RTE_CRYPTO_CIPHER_AES_CBC:
380 sess->cipher.algo = calg;
381 /* IV len is always 16 bytes (block size) for AES CBC */
382 sess->cipher.iv.length = 16;
387 /* Select auth generate/verify */
388 sess->auth.operation = auth_xform->auth.op;
390 /* Select auth algo */
391 switch (auth_xform->auth.algo) {
392 /* Cover supported hash algorithms */
393 case RTE_CRYPTO_AUTH_SHA1_HMAC:
394 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
395 aalg = auth_xform->auth.algo;
396 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
402 /* Set the digest length */
403 sess->auth.digest_length = auth_xform->auth.digest_length;
405 /* Verify supported key lengths and extract proper algorithm */
406 switch (cipher_xform->cipher.key.length << 3) {
409 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
410 sess->cipher.key_sched =
411 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
415 /* These key lengths are not supported yet */
416 default: /* Fall through */
417 sess->crypto_func = NULL;
418 sess->cipher.key_sched = NULL;
422 if (unlikely(sess->crypto_func == NULL)) {
424 * If we got here that means that there must be a bug
425 * in the algorithms selection above. Nevertheless keep
426 * it here to catch bug immediately and avoid NULL pointer
427 * dereference in OPs processing.
429 ARMV8_CRYPTO_LOG_ERR(
430 "No appropriate crypto function for given parameters");
434 /* Set up cipher session prerequisites */
435 if (cipher_set_prerequisites(sess, cipher_xform) != 0)
438 /* Set up authentication session prerequisites */
439 if (auth_set_prerequisites(sess, auth_xform) != 0)
445 /** Parse crypto xform chain and set private session parameters */
447 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
448 const struct rte_crypto_sym_xform *xform)
450 const struct rte_crypto_sym_xform *cipher_xform = NULL;
451 const struct rte_crypto_sym_xform *auth_xform = NULL;
455 /* Filter out spurious/broken requests */
459 sess->chain_order = armv8_crypto_get_chain_order(xform);
460 switch (sess->chain_order) {
461 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
462 cipher_xform = xform;
463 auth_xform = xform->next;
464 is_chained_op = true;
466 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
468 cipher_xform = xform->next;
469 is_chained_op = true;
472 is_chained_op = false;
477 sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
480 ret = armv8_crypto_set_session_chained_parameters(sess,
481 cipher_xform, auth_xform);
482 if (unlikely(ret != 0)) {
483 ARMV8_CRYPTO_LOG_ERR(
484 "Invalid/unsupported chained (cipher/auth) parameters");
488 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
495 /** Provide session for operation */
496 static inline struct armv8_crypto_session *
497 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
499 struct armv8_crypto_session *sess = NULL;
501 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
502 /* get existing session */
503 if (likely(op->sym->session != NULL)) {
504 sess = (struct armv8_crypto_session *)
505 get_sym_session_private_data(
507 cryptodev_driver_id);
510 /* provide internal session */
512 void *_sess_private_data = NULL;
514 if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
517 if (rte_mempool_get(qp->sess_mp_priv,
518 (void **)&_sess_private_data))
521 sess = (struct armv8_crypto_session *)_sess_private_data;
523 if (unlikely(armv8_crypto_set_session_parameters(sess,
524 op->sym->xform) != 0)) {
525 rte_mempool_put(qp->sess_mp, _sess);
526 rte_mempool_put(qp->sess_mp_priv, _sess_private_data);
529 op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
530 set_sym_session_private_data(op->sym->session,
531 cryptodev_driver_id, _sess_private_data);
534 if (unlikely(sess == NULL))
535 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
541 *------------------------------------------------------------------------------
543 *------------------------------------------------------------------------------
546 /*----------------------------------------------------------------------------*/
548 /** Process cipher operation */
550 process_armv8_chained_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
551 struct armv8_crypto_session *sess,
552 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
554 crypto_func_t crypto_func;
556 struct rte_mbuf *m_asrc, *m_adst;
557 uint8_t *csrc, *cdst;
558 uint8_t *adst, *asrc;
562 clen = op->sym->cipher.data.length;
563 alen = op->sym->auth.data.length;
565 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
566 op->sym->cipher.data.offset);
567 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
568 op->sym->cipher.data.offset);
570 switch (sess->chain_order) {
571 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
572 m_asrc = m_adst = mbuf_dst;
574 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
579 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
582 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
583 op->sym->auth.data.offset);
585 switch (sess->auth.mode) {
586 case ARMV8_CRYPTO_AUTH_AS_AUTH:
587 /* Nothing to do here, just verify correct option */
589 case ARMV8_CRYPTO_AUTH_AS_HMAC:
590 arg.digest.hmac.key = sess->auth.hmac.key;
591 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
592 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
595 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
599 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
600 adst = op->sym->auth.digest.data;
602 adst = rte_pktmbuf_mtod_offset(m_adst,
604 op->sym->auth.data.offset +
605 op->sym->auth.data.length);
608 adst = qp->temp_digest;
611 arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
612 sess->cipher.iv.offset);
613 arg.cipher.key = sess->cipher.key.data;
614 /* Acquire combined mode function */
615 crypto_func = sess->crypto_func;
616 ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
617 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
619 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
623 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
624 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
625 if (memcmp(adst, op->sym->auth.digest.data,
626 sess->auth.digest_length) != 0) {
627 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
632 /** Process crypto operation for mbuf */
634 process_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
635 struct armv8_crypto_session *sess)
637 struct rte_mbuf *msrc, *mdst;
639 msrc = op->sym->m_src;
640 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
642 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
644 switch (sess->chain_order) {
645 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
646 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
647 process_armv8_chained_op(qp, op, sess, msrc, mdst);
650 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
654 /* Free session if a session-less crypto op */
655 if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
656 memset(sess, 0, sizeof(struct armv8_crypto_session));
657 memset(op->sym->session, 0,
658 rte_cryptodev_sym_get_existing_header_session_size(
660 rte_mempool_put(qp->sess_mp, sess);
661 rte_mempool_put(qp->sess_mp_priv, op->sym->session);
662 op->sym->session = NULL;
665 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
666 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
668 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
675 *------------------------------------------------------------------------------
677 *------------------------------------------------------------------------------
682 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
685 struct armv8_crypto_session *sess;
686 struct armv8_crypto_qp *qp = queue_pair;
689 for (i = 0; i < nb_ops; i++) {
690 sess = get_session(qp, ops[i]);
691 if (unlikely(sess == NULL))
694 retval = process_op(qp, ops[i], sess);
695 if (unlikely(retval < 0))
699 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
701 qp->stats.enqueued_count += retval;
706 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
709 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
711 qp->stats.enqueue_err_count++;
717 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
720 struct armv8_crypto_qp *qp = queue_pair;
722 unsigned int nb_dequeued = 0;
724 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
725 (void **)ops, nb_ops, NULL);
726 qp->stats.dequeued_count += nb_dequeued;
731 /** Create ARMv8 crypto device */
733 cryptodev_armv8_crypto_create(const char *name,
734 struct rte_vdev_device *vdev,
735 struct rte_cryptodev_pmd_init_params *init_params)
737 struct rte_cryptodev *dev;
738 struct armv8_crypto_private *internals;
740 /* Check CPU for support for AES instruction set */
741 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
742 ARMV8_CRYPTO_LOG_ERR(
743 "AES instructions not supported by CPU");
747 /* Check CPU for support for SHA instruction set */
748 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
749 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
750 ARMV8_CRYPTO_LOG_ERR(
751 "SHA1/SHA2 instructions not supported by CPU");
755 /* Check CPU for support for Advance SIMD instruction set */
756 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
757 ARMV8_CRYPTO_LOG_ERR(
758 "Advanced SIMD instructions not supported by CPU");
762 dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
764 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
768 dev->driver_id = cryptodev_driver_id;
769 dev->dev_ops = rte_armv8_crypto_pmd_ops;
771 /* register rx/tx burst functions for data path */
772 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
773 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
775 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
776 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
777 RTE_CRYPTODEV_FF_CPU_NEON |
778 RTE_CRYPTODEV_FF_CPU_ARM_CE;
780 /* Set vector instructions mode supported */
781 internals = dev->data->dev_private;
783 internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
788 ARMV8_CRYPTO_LOG_ERR(
789 "driver %s: cryptodev_armv8_crypto_create failed",
792 cryptodev_armv8_crypto_uninit(vdev);
796 /** Initialise ARMv8 crypto device */
798 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
800 struct rte_cryptodev_pmd_init_params init_params = {
802 sizeof(struct armv8_crypto_private),
804 RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS
807 const char *input_args;
809 name = rte_vdev_device_name(vdev);
812 input_args = rte_vdev_device_args(vdev);
813 rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
815 return cryptodev_armv8_crypto_create(name, vdev, &init_params);
818 /** Uninitialise ARMv8 crypto device */
820 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
822 struct rte_cryptodev *cryptodev;
825 name = rte_vdev_device_name(vdev);
830 "Closing ARMv8 crypto device %s on numa socket %u\n",
831 name, rte_socket_id());
833 cryptodev = rte_cryptodev_pmd_get_named_dev(name);
834 if (cryptodev == NULL)
837 return rte_cryptodev_pmd_destroy(cryptodev);
840 static struct rte_vdev_driver armv8_crypto_pmd_drv = {
841 .probe = cryptodev_armv8_crypto_init,
842 .remove = cryptodev_armv8_crypto_uninit
845 static struct cryptodev_driver armv8_crypto_drv;
847 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_pmd_drv);
848 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
849 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
850 "max_nb_queue_pairs=<int> "
852 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, armv8_crypto_pmd_drv.driver,
853 cryptodev_driver_id);