From d6946fe9d41b4d71143bd0346a8a871da422fb95 Mon Sep 17 00:00:00 2001 From: Marcin Smoczynski Date: Tue, 4 Feb 2020 14:12:52 +0100 Subject: [PATCH] crypto/aesni_gcm: support CPU crypto Add support for CPU crypto mode by introducing required handler. Authenticated encryption and decryption are supported with tag generation/verification. CPU crypto support include both AES-GCM and GMAC algorithms. Signed-off-by: Marcin Smoczynski Acked-by: Pablo de Lara Acked-by: Fan Zhang Tested-by: Konstantin Ananyev Acked-by: Konstantin Ananyev --- doc/guides/cryptodevs/aesni_gcm.rst | 7 +- doc/guides/cryptodevs/features/aesni_gcm.ini | 1 + doc/guides/rel_notes/release_20_02.rst | 3 +- drivers/crypto/aesni_gcm/aesni_gcm_ops.h | 11 +- drivers/crypto/aesni_gcm/aesni_gcm_pmd.c | 222 +++++++++++++++++- drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c | 4 +- .../crypto/aesni_gcm/aesni_gcm_pmd_private.h | 13 +- 7 files changed, 249 insertions(+), 12 deletions(-) diff --git a/doc/guides/cryptodevs/aesni_gcm.rst b/doc/guides/cryptodevs/aesni_gcm.rst index 151aa30606..a25b63109f 100644 --- a/doc/guides/cryptodevs/aesni_gcm.rst +++ b/doc/guides/cryptodevs/aesni_gcm.rst @@ -1,5 +1,5 @@ .. SPDX-License-Identifier: BSD-3-Clause - Copyright(c) 2016-2019 Intel Corporation. + Copyright(c) 2016-2020 Intel Corporation. AES-NI GCM Crypto Poll Mode Driver ================================== @@ -9,6 +9,11 @@ The AES-NI GCM PMD (**librte_pmd_aesni_gcm**) provides poll mode crypto driver support for utilizing Intel multi buffer library (see AES-NI Multi-buffer PMD documentation to learn more about it, including installation). +The AES-NI GCM PMD supports synchronous mode of operation with +``rte_cryptodev_sym_cpu_crypto_process`` function call for both AES-GCM and +GMAC, however GMAC support is limited to one segment per operation. Please +refer to ``rte_crypto`` programmer's guide for more detail. + Features -------- diff --git a/doc/guides/cryptodevs/features/aesni_gcm.ini b/doc/guides/cryptodevs/features/aesni_gcm.ini index 87eac0fbff..949d6a088d 100644 --- a/doc/guides/cryptodevs/features/aesni_gcm.ini +++ b/doc/guides/cryptodevs/features/aesni_gcm.ini @@ -14,6 +14,7 @@ CPU AVX512 = Y In Place SGL = Y OOP SGL In LB Out = Y OOP LB In LB Out = Y +CPU crypto = Y ; ; Supported crypto algorithms of the 'aesni_gcm' crypto driver. ; diff --git a/doc/guides/rel_notes/release_20_02.rst b/doc/guides/rel_notes/release_20_02.rst index 0f0519e052..b86e13761d 100644 --- a/doc/guides/rel_notes/release_20_02.rst +++ b/doc/guides/rel_notes/release_20_02.rst @@ -134,7 +134,8 @@ New Features A new API is introduced in crypto library to handle synchronous cryptographic operations allowing to achieve performance gain for cryptodevs which use - CPU based acceleration, such as Intel AES-NI. + CPU based acceleration, such as Intel AES-NI. An implementation for aesni_gcm + cryptodev is provided. * **Added handling of mixed algorithms in encrypted digest requests in QAT PMD.** diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_ops.h b/drivers/crypto/aesni_gcm/aesni_gcm_ops.h index e272f10671..74acac09cd 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_ops.h +++ b/drivers/crypto/aesni_gcm/aesni_gcm_ops.h @@ -1,5 +1,5 @@ /* SPDX-License-Identifier: BSD-3-Clause - * Copyright(c) 2016-2017 Intel Corporation + * Copyright(c) 2016-2020 Intel Corporation */ #ifndef _AESNI_GCM_OPS_H_ @@ -65,4 +65,13 @@ struct aesni_gcm_ops { aesni_gcm_finalize_t finalize_dec; }; +/** GCM per-session operation handlers */ +struct aesni_gcm_session_ops { + aesni_gcm_t cipher; + aesni_gcm_pre_t pre; + aesni_gcm_init_t init; + aesni_gcm_update_t update; + aesni_gcm_finalize_t finalize; +}; + #endif /* _AESNI_GCM_OPS_H_ */ diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c index 1a03be31dc..a1caab9939 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c @@ -1,5 +1,5 @@ /* SPDX-License-Identifier: BSD-3-Clause - * Copyright(c) 2016-2017 Intel Corporation + * Copyright(c) 2016-2020 Intel Corporation */ #include @@ -15,6 +15,31 @@ static uint8_t cryptodev_driver_id; +/* setup session handlers */ +static void +set_func_ops(struct aesni_gcm_session *s, const struct aesni_gcm_ops *gcm_ops) +{ + s->ops.pre = gcm_ops->pre; + s->ops.init = gcm_ops->init; + + switch (s->op) { + case AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION: + s->ops.cipher = gcm_ops->enc; + s->ops.update = gcm_ops->update_enc; + s->ops.finalize = gcm_ops->finalize_enc; + break; + case AESNI_GCM_OP_AUTHENTICATED_DECRYPTION: + s->ops.cipher = gcm_ops->dec; + s->ops.update = gcm_ops->update_dec; + s->ops.finalize = gcm_ops->finalize_dec; + break; + case AESNI_GMAC_OP_GENERATE: + case AESNI_GMAC_OP_VERIFY: + s->ops.finalize = gcm_ops->finalize_enc; + break; + } +} + /** Parse crypto xform chain and set private session parameters */ int aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops, @@ -65,6 +90,7 @@ aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops, /* Select Crypto operation */ if (aead_xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT) sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION; + /* op == RTE_CRYPTO_AEAD_OP_DECRYPT */ else sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION; @@ -78,7 +104,6 @@ aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops, return -ENOTSUP; } - /* IV check */ if (sess->iv.length != 16 && sess->iv.length != 12 && sess->iv.length != 0) { @@ -102,6 +127,10 @@ aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops, return -EINVAL; } + /* setup session handlers */ + set_func_ops(sess, &gcm_ops[sess->key]); + + /* pre-generate key */ gcm_ops[sess->key].pre(key, &sess->gdata_key); /* Digest check */ @@ -356,6 +385,191 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_op *op, return 0; } +static inline void +aesni_gcm_fill_error_code(struct rte_crypto_sym_vec *vec, int32_t errnum) +{ + uint32_t i; + + for (i = 0; i < vec->num; i++) + vec->status[i] = errnum; +} + + +static inline int32_t +aesni_gcm_sgl_op_finalize_encryption(const struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, uint8_t *digest) +{ + if (s->req_digest_length != s->gen_digest_length) { + uint8_t tmpdigest[s->gen_digest_length]; + + s->ops.finalize(&s->gdata_key, gdata_ctx, tmpdigest, + s->gen_digest_length); + memcpy(digest, tmpdigest, s->req_digest_length); + } else { + s->ops.finalize(&s->gdata_key, gdata_ctx, digest, + s->gen_digest_length); + } + + return 0; +} + +static inline int32_t +aesni_gcm_sgl_op_finalize_decryption(const struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, uint8_t *digest) +{ + uint8_t tmpdigest[s->gen_digest_length]; + + s->ops.finalize(&s->gdata_key, gdata_ctx, tmpdigest, + s->gen_digest_length); + + return memcmp(digest, tmpdigest, s->req_digest_length) == 0 ? 0 : + EBADMSG; +} + +static inline void +aesni_gcm_process_gcm_sgl_op(const struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sgl *sgl, + void *iv, void *aad) +{ + uint32_t i; + + /* init crypto operation */ + s->ops.init(&s->gdata_key, gdata_ctx, iv, aad, + (uint64_t)s->aad_length); + + /* update with sgl data */ + for (i = 0; i < sgl->num; i++) { + struct rte_crypto_vec *vec = &sgl->vec[i]; + + s->ops.update(&s->gdata_key, gdata_ctx, vec->base, vec->base, + vec->len); + } +} + +static inline void +aesni_gcm_process_gmac_sgl_op(const struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sgl *sgl, + void *iv) +{ + s->ops.init(&s->gdata_key, gdata_ctx, iv, sgl->vec[0].base, + sgl->vec[0].len); +} + +static inline uint32_t +aesni_gcm_sgl_encrypt(struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sym_vec *vec) +{ + uint32_t i, processed; + + processed = 0; + for (i = 0; i < vec->num; ++i) { + aesni_gcm_process_gcm_sgl_op(s, gdata_ctx, + &vec->sgl[i], vec->iv[i], vec->aad[i]); + vec->status[i] = aesni_gcm_sgl_op_finalize_encryption(s, + gdata_ctx, vec->digest[i]); + processed += (vec->status[i] == 0); + } + + return processed; +} + +static inline uint32_t +aesni_gcm_sgl_decrypt(struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sym_vec *vec) +{ + uint32_t i, processed; + + processed = 0; + for (i = 0; i < vec->num; ++i) { + aesni_gcm_process_gcm_sgl_op(s, gdata_ctx, + &vec->sgl[i], vec->iv[i], vec->aad[i]); + vec->status[i] = aesni_gcm_sgl_op_finalize_decryption(s, + gdata_ctx, vec->digest[i]); + processed += (vec->status[i] == 0); + } + + return processed; +} + +static inline uint32_t +aesni_gmac_sgl_generate(struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sym_vec *vec) +{ + uint32_t i, processed; + + processed = 0; + for (i = 0; i < vec->num; ++i) { + if (vec->sgl[i].num != 1) { + vec->status[i] = ENOTSUP; + continue; + } + + aesni_gcm_process_gmac_sgl_op(s, gdata_ctx, + &vec->sgl[i], vec->iv[i]); + vec->status[i] = aesni_gcm_sgl_op_finalize_encryption(s, + gdata_ctx, vec->digest[i]); + processed += (vec->status[i] == 0); + } + + return processed; +} + +static inline uint32_t +aesni_gmac_sgl_verify(struct aesni_gcm_session *s, + struct gcm_context_data *gdata_ctx, struct rte_crypto_sym_vec *vec) +{ + uint32_t i, processed; + + processed = 0; + for (i = 0; i < vec->num; ++i) { + if (vec->sgl[i].num != 1) { + vec->status[i] = ENOTSUP; + continue; + } + + aesni_gcm_process_gmac_sgl_op(s, gdata_ctx, + &vec->sgl[i], vec->iv[i]); + vec->status[i] = aesni_gcm_sgl_op_finalize_decryption(s, + gdata_ctx, vec->digest[i]); + processed += (vec->status[i] == 0); + } + + return processed; +} + +/** Process CPU crypto bulk operations */ +uint32_t +aesni_gcm_pmd_cpu_crypto_process(struct rte_cryptodev *dev, + struct rte_cryptodev_sym_session *sess, + __rte_unused union rte_crypto_sym_ofs ofs, + struct rte_crypto_sym_vec *vec) +{ + void *sess_priv; + struct aesni_gcm_session *s; + struct gcm_context_data gdata_ctx; + + sess_priv = get_sym_session_private_data(sess, dev->driver_id); + if (unlikely(sess_priv == NULL)) { + aesni_gcm_fill_error_code(vec, EINVAL); + return 0; + } + + s = sess_priv; + switch (s->op) { + case AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION: + return aesni_gcm_sgl_encrypt(s, &gdata_ctx, vec); + case AESNI_GCM_OP_AUTHENTICATED_DECRYPTION: + return aesni_gcm_sgl_decrypt(s, &gdata_ctx, vec); + case AESNI_GMAC_OP_GENERATE: + return aesni_gmac_sgl_generate(s, &gdata_ctx, vec); + case AESNI_GMAC_OP_VERIFY: + return aesni_gmac_sgl_verify(s, &gdata_ctx, vec); + default: + aesni_gcm_fill_error_code(vec, EINVAL); + return 0; + } +} + /** * Process a completed job and return rte_mbuf which job processed * @@ -527,7 +741,8 @@ aesni_gcm_create(const char *name, RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | RTE_CRYPTODEV_FF_IN_PLACE_SGL | RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT | - RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT; + RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT | + RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO; /* Check CPU for support for AES instruction set */ if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) @@ -672,7 +887,6 @@ RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_AESNI_GCM_PMD, RTE_PMD_REGISTER_CRYPTO_DRIVER(aesni_gcm_crypto_drv, aesni_gcm_pmd_drv.driver, cryptodev_driver_id); - RTE_INIT(aesni_gcm_init_log) { aesni_gcm_logtype_driver = rte_log_register("pmd.crypto.aesni_gcm"); diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c index 2f66c7c58e..c5e0878f55 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c @@ -1,5 +1,5 @@ /* SPDX-License-Identifier: BSD-3-Clause - * Copyright(c) 2016 Intel Corporation + * Copyright(c) 2016-2020 Intel Corporation */ #include @@ -331,6 +331,8 @@ struct rte_cryptodev_ops aesni_gcm_pmd_ops = { .queue_pair_release = aesni_gcm_pmd_qp_release, .queue_pair_count = aesni_gcm_pmd_qp_count, + .sym_cpu_process = aesni_gcm_pmd_cpu_crypto_process, + .sym_session_get_size = aesni_gcm_pmd_sym_session_get_size, .sym_session_configure = aesni_gcm_pmd_sym_session_configure, .sym_session_clear = aesni_gcm_pmd_sym_session_clear diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h index 2039adb533..080d4f7e49 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h @@ -1,5 +1,5 @@ /* SPDX-License-Identifier: BSD-3-Clause - * Copyright(c) 2016-2017 Intel Corporation + * Copyright(c) 2016-2020 Intel Corporation */ #ifndef _AESNI_GCM_PMD_PRIVATE_H_ @@ -92,6 +92,8 @@ struct aesni_gcm_session { /**< GCM key type */ struct gcm_key_data gdata_key; /**< GCM parameters */ + struct aesni_gcm_session_ops ops; + /**< Session handlers */ }; @@ -109,10 +111,13 @@ aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *ops, struct aesni_gcm_session *sess, const struct rte_crypto_sym_xform *xform); - -/** - * Device specific operations function pointer structure */ +/* Device specific operations function pointer structure */ extern struct rte_cryptodev_ops *rte_aesni_gcm_pmd_ops; +/** CPU crypto bulk process handler */ +uint32_t +aesni_gcm_pmd_cpu_crypto_process(struct rte_cryptodev *dev, + struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs, + struct rte_crypto_sym_vec *vec); #endif /* _AESNI_GCM_PMD_PRIVATE_H_ */ -- 2.20.1