From: Pablo de Lara Date: Fri, 1 Nov 2019 13:39:23 +0000 (+0000) Subject: crypto/aesni_gcm: support in-place chained mbufs X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=4b701523742e3753d58949846ac4eeebc6d5a78f;p=dpdk.git crypto/aesni_gcm: support in-place chained mbufs IPSec Multi buffer library supports encryption on multiple segments. When dealing with chained buffers (multiple segments), as long as the operation is in-place, the destination buffer does not have to be contiguous (unlike in the case of out-of-place operation). Therefore, the limitation of not supporting in-place chained mbuf can be removed. Signed-off-by: Pablo de Lara --- diff --git a/app/test/test_cryptodev.c b/app/test/test_cryptodev.c index 4c4ff7d189..1b561456d7 100644 --- a/app/test/test_cryptodev.c +++ b/app/test/test_cryptodev.c @@ -12826,6 +12826,8 @@ static struct unit_test_suite cryptodev_aesni_gcm_testsuite = { test_AES_GCM_authenticated_decryption_sessionless_test_case_1), /** Scatter-Gather */ + TEST_CASE_ST(ut_setup, ut_teardown, + test_AES_GCM_auth_encrypt_SGL_in_place_1500B), TEST_CASE_ST(ut_setup, ut_teardown, test_AES_GCM_auth_encrypt_SGL_out_of_place_400B_1seg), diff --git a/doc/guides/cryptodevs/aesni_gcm.rst b/doc/guides/cryptodevs/aesni_gcm.rst index 0ccbc02141..151aa30606 100644 --- a/doc/guides/cryptodevs/aesni_gcm.rst +++ b/doc/guides/cryptodevs/aesni_gcm.rst @@ -25,7 +25,7 @@ AEAD algorithms: Limitations ----------- -* Chained mbufs are supported but only out-of-place (destination mbuf must be contiguous). +* In out-of-place operations, chained destination mbufs are not supported. * Chained mbufs are only supported by RTE_CRYPTO_AEAD_AES_GCM algorithm, not RTE_CRYPTO_AUTH_AES_GMAC. * Cipher only is not supported. diff --git a/doc/guides/cryptodevs/features/aesni_gcm.ini b/doc/guides/cryptodevs/features/aesni_gcm.ini index 86303c5198..87eac0fbff 100644 --- a/doc/guides/cryptodevs/features/aesni_gcm.ini +++ b/doc/guides/cryptodevs/features/aesni_gcm.ini @@ -11,6 +11,7 @@ CPU SSE = Y CPU AVX = Y CPU AVX2 = Y CPU AVX512 = Y +In Place SGL = Y OOP SGL In LB Out = Y OOP LB In LB Out = Y ; @@ -35,4 +36,4 @@ AES GCM (256) = Y ; ; Supported Asymmetric algorithms of the 'aesni_gcm' crypto driver. ; -[Asymmetric] \ No newline at end of file +[Asymmetric] diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c index f0293625bc..1a03be31dc 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c @@ -196,6 +196,7 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_op *op, uint32_t offset, data_offset, data_length; uint32_t part_len, total_len, data_len; uint8_t *tag; + unsigned int oop = 0; if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION || session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) { @@ -217,27 +218,28 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_op *op, RTE_ASSERT(m_src != NULL); } + src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset); + data_len = m_src->data_len - offset; part_len = (data_len < data_length) ? data_len : data_length; - /* Destination buffer is required when segmented source buffer */ - RTE_ASSERT((part_len == data_length) || - ((part_len != data_length) && - (sym_op->m_dst != NULL))); - /* Segmented destination buffer is not supported */ RTE_ASSERT((sym_op->m_dst == NULL) || ((sym_op->m_dst != NULL) && rte_pktmbuf_is_contiguous(sym_op->m_dst))); - - dst = sym_op->m_dst ? - rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *, - data_offset) : - rte_pktmbuf_mtod_offset(sym_op->m_src, uint8_t *, + /* In-place */ + if (sym_op->m_dst == NULL || (sym_op->m_dst == sym_op->m_src)) + dst = src; + /* Out-of-place */ + else { + oop = 1; + /* Segmented destination buffer is not supported if operation is + * Out-of-place */ + RTE_ASSERT(rte_pktmbuf_is_contiguous(sym_op->m_dst)); + dst = rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *, data_offset); - - src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset); + } iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset); @@ -255,12 +257,15 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_op *op, total_len = data_length - part_len; while (total_len) { - dst += part_len; m_src = m_src->next; RTE_ASSERT(m_src != NULL); src = rte_pktmbuf_mtod(m_src, uint8_t *); + if (oop) + dst += part_len; + else + dst = src; part_len = (m_src->data_len < total_len) ? m_src->data_len : total_len; @@ -292,12 +297,15 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_op *op, total_len = data_length - part_len; while (total_len) { - dst += part_len; m_src = m_src->next; RTE_ASSERT(m_src != NULL); src = rte_pktmbuf_mtod(m_src, uint8_t *); + if (oop) + dst += part_len; + else + dst = src; part_len = (m_src->data_len < total_len) ? m_src->data_len : total_len; @@ -517,6 +525,7 @@ aesni_gcm_create(const char *name, dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | + RTE_CRYPTODEV_FF_IN_PLACE_SGL | RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT | RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT;