From aa8128b1d830dbfcd92636a5fb6038a3b877ffc1 Mon Sep 17 00:00:00 2001 From: Fiona Trahe Date: Thu, 24 Nov 2016 18:29:24 +0000 Subject: [PATCH] app/test: check AES cipher-only on QAT Extended functional AES-CBC and AES-CTR cipher-only tests to run on QAT PMD. Added AES_CBC cipher-only performance tests on QAT PMD. No driver changes, but as now tested, QAT documentation is updated to remove constraint. Signed-off-by: Fiona Trahe Acked-by: Arek Kusztal --- app/test/test_cryptodev.c | 18 ++++ app/test/test_cryptodev_aes_test_vectors.h | 36 +++++--- app/test/test_cryptodev_perf.c | 96 ++++++++++++++-------- doc/guides/cryptodevs/qat.rst | 1 - 4 files changed, 102 insertions(+), 49 deletions(-) diff --git a/app/test/test_cryptodev.c b/app/test/test_cryptodev.c index 872f8b431e..00dced5369 100644 --- a/app/test/test_cryptodev.c +++ b/app/test/test_cryptodev.c @@ -1529,6 +1529,22 @@ test_AES_chain_qat_all(void) return TEST_SUCCESS; } +static int +test_AES_cipheronly_qat_all(void) +{ + struct crypto_testsuite_params *ts_params = &testsuite_params; + int status; + + status = test_blockcipher_all_tests(ts_params->mbuf_pool, + ts_params->op_mpool, ts_params->valid_devs[0], + RTE_CRYPTODEV_QAT_SYM_PMD, + BLKCIPHER_AES_CIPHERONLY_TYPE); + + TEST_ASSERT_EQUAL(status, 0, "Test failed"); + + return TEST_SUCCESS; +} + static int test_authonly_openssl_all(void) { @@ -6032,6 +6048,8 @@ static struct unit_test_suite cryptodev_qat_testsuite = { test_multi_session), TEST_CASE_ST(ut_setup, ut_teardown, test_AES_chain_qat_all), + TEST_CASE_ST(ut_setup, ut_teardown, + test_AES_cipheronly_qat_all), TEST_CASE_ST(ut_setup, ut_teardown, test_3DES_chain_qat_all), TEST_CASE_ST(ut_setup, ut_teardown, test_3DES_cipheronly_qat_all), diff --git a/app/test/test_cryptodev_aes_test_vectors.h b/app/test/test_cryptodev_aes_test_vectors.h index 1c68f93e6a..efbe7da6cd 100644 --- a/app/test/test_cryptodev_aes_test_vectors.h +++ b/app/test/test_cryptodev_aes_test_vectors.h @@ -1024,73 +1024,85 @@ static const struct blockcipher_test_case aes_cipheronly_test_cases[] = { .test_descr = "AES-128-CBC Encryption", .test_data = &aes_test_data_4, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-128-CBC Decryption", .test_data = &aes_test_data_4, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-192-CBC Encryption", .test_data = &aes_test_data_10, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-192-CBC Decryption", .test_data = &aes_test_data_10, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-256-CBC Encryption", .test_data = &aes_test_data_11, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-256-CBC Decryption", .test_data = &aes_test_data_11, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-128-CTR Encryption", .test_data = &aes_test_data_1, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-128-CTR Decryption", .test_data = &aes_test_data_1, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-192-CTR Encryption", .test_data = &aes_test_data_2, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-192-CTR Decryption", .test_data = &aes_test_data_2, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-256-CTR Encryption", .test_data = &aes_test_data_3, .op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, { .test_descr = "AES-256-CTR Decryption", .test_data = &aes_test_data_3, .op_mask = BLOCKCIPHER_TEST_OP_DECRYPT, - .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL + .pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL | + BLOCKCIPHER_TEST_TARGET_PMD_QAT }, }; diff --git a/app/test/test_cryptodev_perf.c b/app/test/test_cryptodev_perf.c index 59a6891647..7751ff2dd3 100644 --- a/app/test/test_cryptodev_perf.c +++ b/app/test/test_cryptodev_perf.c @@ -166,15 +166,15 @@ test_perf_set_crypto_op_snow3g(struct rte_crypto_op *op, struct rte_mbuf *m, static inline struct rte_crypto_op * test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, struct rte_cryptodev_sym_session *sess, unsigned int data_len, - unsigned int digest_len); + unsigned int digest_len, enum chain_mode chain); static inline struct rte_crypto_op * test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m, struct rte_cryptodev_sym_session *sess, unsigned int data_len, - unsigned int digest_len); + unsigned int digest_len, enum chain_mode chain __rte_unused); static inline struct rte_crypto_op * test_perf_set_crypto_op_3des(struct rte_crypto_op *op, struct rte_mbuf *m, struct rte_cryptodev_sym_session *sess, unsigned int data_len, - unsigned int digest_len); + unsigned int digest_len, enum chain_mode chain __rte_unused); static uint32_t get_auth_digest_length(enum rte_crypto_auth_algorithm algo); @@ -2285,7 +2285,8 @@ test_perf_openssl_optimise_cyclecount(struct perf_test_params *pparams) static struct rte_crypto_op *(*test_perf_set_crypto_op) (struct rte_crypto_op *, struct rte_mbuf *, struct rte_cryptodev_sym_session *, - unsigned int, unsigned int); + unsigned int, unsigned int, + enum chain_mode); unsigned int digest_length = get_auth_digest_length(pparams->auth_algo); @@ -2323,14 +2324,14 @@ test_perf_openssl_optimise_cyclecount(struct perf_test_params *pparams) break; case RTE_CRYPTO_CIPHER_AES_GCM: test_perf_set_crypto_op = - test_perf_set_crypto_op_aes_gcm; + test_perf_set_crypto_op_aes_gcm; break; default: return TEST_FAILED; } op = test_perf_set_crypto_op(op, m, sess, pparams->buf_size, - digest_length); + digest_length, pparams->chain); TEST_ASSERT_NOT_NULL(op, "Failed to attach op to session"); c_ops[i] = op; @@ -2539,16 +2540,16 @@ test_perf_create_aes_sha_session(uint8_t dev_id, enum chain_mode chain, cipher_xform.cipher.key.data = aes_key; cipher_xform.cipher.key.length = cipher_key_len; - - /* Setup HMAC Parameters */ - auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; - auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE; - auth_xform.auth.algo = auth_algo; - - auth_xform.auth.key.data = hmac_sha_key; - auth_xform.auth.key.length = get_auth_key_max_length(auth_algo); - auth_xform.auth.digest_length = get_auth_digest_length(auth_algo); - + if (chain != CIPHER_ONLY) { + /* Setup HMAC Parameters */ + auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; + auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE; + auth_xform.auth.algo = auth_algo; + auth_xform.auth.key.data = hmac_sha_key; + auth_xform.auth.key.length = get_auth_key_max_length(auth_algo); + auth_xform.auth.digest_length = + get_auth_digest_length(auth_algo); + } switch (chain) { case CIPHER_HASH: cipher_xform.next = &auth_xform; @@ -2560,6 +2561,10 @@ test_perf_create_aes_sha_session(uint8_t dev_id, enum chain_mode chain, cipher_xform.next = NULL; /* Create Crypto session*/ return rte_cryptodev_sym_session_create(dev_id, &auth_xform); + case CIPHER_ONLY: + cipher_xform.next = NULL; + /* Create Crypto session*/ + return rte_cryptodev_sym_session_create(dev_id, &cipher_xform); default: return NULL; } @@ -2706,8 +2711,8 @@ test_perf_create_pktmbuf(struct rte_mempool *mpool, unsigned buf_sz) static inline struct rte_crypto_op * test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, - struct rte_cryptodev_sym_session *sess, unsigned data_len, - unsigned digest_len) + struct rte_cryptodev_sym_session *sess, unsigned int data_len, + unsigned int digest_len, enum chain_mode chain) { if (rte_crypto_op_attach_sym_session(op, sess) != 0) { rte_crypto_op_free(op); @@ -2715,13 +2720,26 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, } /* Authentication Parameters */ - op->sym->auth.digest.data = rte_pktmbuf_mtod_offset(m, uint8_t *, - AES_CIPHER_IV_LENGTH + data_len); - op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m, - AES_CIPHER_IV_LENGTH + data_len); - op->sym->auth.digest.length = digest_len; - op->sym->auth.aad.data = aes_iv; - op->sym->auth.aad.length = AES_CIPHER_IV_LENGTH; + if (chain == CIPHER_ONLY) { + op->sym->auth.digest.data = NULL; + op->sym->auth.digest.phys_addr = 0; + op->sym->auth.digest.length = 0; + op->sym->auth.aad.data = NULL; + op->sym->auth.aad.length = 0; + op->sym->auth.data.offset = 0; + op->sym->auth.data.length = 0; + } else { + op->sym->auth.digest.data = rte_pktmbuf_mtod_offset(m, + uint8_t *, AES_CIPHER_IV_LENGTH + data_len); + op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m, + AES_CIPHER_IV_LENGTH + data_len); + op->sym->auth.digest.length = digest_len; + op->sym->auth.aad.data = aes_iv; + op->sym->auth.aad.length = AES_CIPHER_IV_LENGTH; + op->sym->auth.data.offset = AES_CIPHER_IV_LENGTH; + op->sym->auth.data.length = data_len; + } + /* Cipher Parameters */ op->sym->cipher.iv.data = rte_pktmbuf_mtod(m, uint8_t *); @@ -2730,10 +2748,6 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH); - /* Data lengths/offsets Parameters */ - op->sym->auth.data.offset = AES_CIPHER_IV_LENGTH; - op->sym->auth.data.length = data_len; - op->sym->cipher.data.offset = AES_CIPHER_IV_LENGTH; op->sym->cipher.data.length = data_len; @@ -2745,7 +2759,7 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, static inline struct rte_crypto_op * test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m, struct rte_cryptodev_sym_session *sess, unsigned int data_len, - unsigned int digest_len) + unsigned int digest_len, enum chain_mode chain __rte_unused) { if (rte_crypto_op_attach_sym_session(op, sess) != 0) { rte_crypto_op_free(op); @@ -2878,7 +2892,7 @@ test_perf_set_crypto_op_snow3g_hash(struct rte_crypto_op *op, static inline struct rte_crypto_op * test_perf_set_crypto_op_3des(struct rte_crypto_op *op, struct rte_mbuf *m, struct rte_cryptodev_sym_session *sess, unsigned int data_len, - unsigned int digest_len) + unsigned int digest_len, enum chain_mode chain __rte_unused) { if (rte_crypto_op_attach_sym_session(op, sess) != 0) { rte_crypto_op_free(op); @@ -2961,8 +2975,10 @@ test_perf_aes_sha(uint8_t dev_id, uint16_t queue_id, rte_pktmbuf_free(mbufs[k]); return -1; } + /* Make room for Digest and IV in mbuf */ - rte_pktmbuf_append(mbufs[i], digest_length); + if (pparams->chain != CIPHER_ONLY) + rte_pktmbuf_append(mbufs[i], digest_length); rte_pktmbuf_prepend(mbufs[i], AES_CIPHER_IV_LENGTH); } @@ -2984,7 +3000,8 @@ test_perf_aes_sha(uint8_t dev_id, uint16_t queue_id, ops[i] = test_perf_set_crypto_op_aes(ops[i], mbufs[i + (pparams->burst_size * (j % NUM_MBUF_SETS))], - sess, pparams->buf_size, digest_length); + sess, pparams->buf_size, digest_length, + pparams->chain); /* enqueue burst */ burst_enqueued = rte_cryptodev_enqueue_burst(dev_id, @@ -3233,7 +3250,8 @@ test_perf_openssl(uint8_t dev_id, uint16_t queue_id, static struct rte_crypto_op *(*test_perf_set_crypto_op) (struct rte_crypto_op *, struct rte_mbuf *, struct rte_cryptodev_sym_session *, - unsigned int, unsigned int); + unsigned int, unsigned int, + enum chain_mode); switch (pparams->cipher_algo) { case RTE_CRYPTO_CIPHER_3DES_CBC: @@ -3294,7 +3312,8 @@ test_perf_openssl(uint8_t dev_id, uint16_t queue_id, ops[i] = test_perf_set_crypto_op(ops[i], mbufs[i + (pparams->burst_size * (j % NUM_MBUF_SETS))], - sess, pparams->buf_size, digest_length); + sess, pparams->buf_size, digest_length, + pparams->chain); /* enqueue burst */ burst_enqueued = rte_cryptodev_enqueue_burst(dev_id, @@ -3379,9 +3398,14 @@ test_perf_aes_cbc_encrypt_digest_vary_pkt_size(void) uint8_t i, j; struct perf_test_params params_set[] = { + { + .chain = CIPHER_ONLY, + .cipher_algo = RTE_CRYPTO_CIPHER_AES_CBC, + .cipher_key_length = 16, + .auth_algo = RTE_CRYPTO_AUTH_NULL + }, { .chain = CIPHER_HASH, - .cipher_algo = RTE_CRYPTO_CIPHER_AES_CBC, .cipher_key_length = 16, .auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC diff --git a/doc/guides/cryptodevs/qat.rst b/doc/guides/cryptodevs/qat.rst index 03d5c2dd26..aa09f6dcfc 100644 --- a/doc/guides/cryptodevs/qat.rst +++ b/doc/guides/cryptodevs/qat.rst @@ -75,7 +75,6 @@ Limitations * Chained mbufs are not supported. * Hash only is not supported except SNOW 3G UIA2 and KASUMI F9. -* Cipher only is not supported except SNOW 3G UEA2, KASUMI F8 and 3DES. * Only supports the session-oriented API implementation (session-less APIs are not supported). * SNOW 3G (UEA2) and KASUMI (F8) supported only if cipher length, cipher offset fields are byte-aligned. * SNOW 3G (UIA2) and KASUMI (F9) supported only if hash length, hash offset fields are byte-aligned. -- 2.20.1