{
struct crypto_testsuite_params *ts_params = &testsuite_params;
struct crypto_unittest_params *ut_params = &unittest_params;
- struct rte_cryptodev_sym_capability_idx cap_idx;
int retval;
uint8_t *ciphertext, *auth_tag;
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
tdata->iv.len);
-
- cap_idx.type = RTE_CRYPTO_SYM_XFORM_AEAD;
- cap_idx.algo.aead = tdata->algo;
-
- if (rte_cryptodev_sym_capability_get(ts_params->valid_devs[0],
- &cap_idx) == NULL) {
- return -ENOTSUP;
- }
-
if (retval < 0)
return retval;
{
struct crypto_testsuite_params *ts_params = &testsuite_params;
struct crypto_unittest_params *ut_params = &unittest_params;
- struct rte_cryptodev_sym_capability_idx cap_idx;
int retval;
uint8_t *plaintext;
if (retval < 0)
return retval;
- cap_idx.type = RTE_CRYPTO_SYM_XFORM_AEAD;
- cap_idx.algo.aead = tdata->algo;
-
- if (rte_cryptodev_sym_capability_get(ts_params->valid_devs[0],
- &cap_idx) == NULL) {
- return -ENOTSUP;
- }
-
/* alloc mbuf and set payload */
if (tdata->aad.len > MBUF_SIZE) {
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->large_mbuf_pool);
return 0;
}
-static int
-test_chacha20_poly1305_encrypt_test_case_rfc8439(void)
-{
- return test_authenticated_encryption(&chacha20_poly1305_case_rfc8439);
-}
-
-static int
-test_chacha20_poly1305_decrypt_test_case_rfc8439(void)
-{
- return test_authenticated_decryption(&chacha20_poly1305_case_rfc8439);
-}
-
static int
test_AES_GCM_authenticated_decryption_test_case_1(void)
{
TEST_CASE_ST(ut_setup, ut_teardown,
test_AES_CCM_authenticated_decryption_test_case_128_3),
- TEST_CASE_ST(ut_setup, ut_teardown,
- test_chacha20_poly1305_encrypt_test_case_rfc8439),
- TEST_CASE_ST(ut_setup, ut_teardown,
- test_chacha20_poly1305_decrypt_test_case_rfc8439),
/** AES GCM Authenticated Encryption */
TEST_CASE_ST(ut_setup, ut_teardown,
test_AES_GCM_auth_encrypt_SGL_in_place_1500B),
};
-static uint8_t chacha_aad_rfc8439[] = {
- 0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
- 0xc4, 0xc5, 0xc6, 0xc7
-};
-
-static const struct aead_test_data chacha20_poly1305_case_rfc8439 = {
- .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
- .key = {
- .data = {
- 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
- 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
- 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
- 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f
- },
- .len = 32
- },
- .iv = {
- .data = {
- 0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43,
- 0x44, 0x45, 0x46, 0x47
- },
- .len = 12
- },
- .aad = {
- .data = chacha_aad_rfc8439,
- .len = 12
- },
- .plaintext = {
- .data = {
- 0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61,
- 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c,
- 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20,
- 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73,
- 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39,
- 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63,
- 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66,
- 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f,
- 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20,
- 0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20,
- 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75,
- 0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73,
- 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f,
- 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69,
- 0x74, 0x2e
-
- },
- .len = 114
- },
- .ciphertext = {
- .data = {
- 0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
- 0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
- 0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
- 0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
- 0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
- 0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
- 0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
- 0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
- 0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
- 0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
- 0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
- 0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
- 0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
- 0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
- 0x61, 0x16
- },
- .len = 114
- },
- .auth_tag = {
- .data = {
- 0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
- 0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91
- },
- .len = 16
- }
-};
-
/** AES-GCM-128 Test Vectors */
static const struct aead_test_data gcm_test_case_1 = {
.algo = RTE_CRYPTO_AEAD_AES_GCM,
; Supported AEAD algorithms of a default crypto driver.
;
[AEAD]
-AES GCM (128) =
-AES GCM (192) =
-AES GCM (256) =
-AES CCM (128) =
-AES CCM (192) =
-AES CCM (256) =
-CHACHA20-POLY1305 =
+AES GCM (128) =
+AES GCM (192) =
+AES GCM (256) =
+AES CCM (128) =
+AES CCM (192) =
+AES CCM (256) =
;
; Supported Asymmetric algorithms of a default crypto driver.
;
; Supported AEAD algorithms of the 'qat' crypto driver.
;
[AEAD]
-AES GCM (128) = Y
-AES GCM (192) = Y
-AES GCM (256) = Y
-AES CCM (128) = Y
-AES CCM (192) = Y
-AES CCM (256) = Y
-CHACHA20-POLY1305 = Y
+AES GCM (128) = Y
+AES GCM (192) = Y
+AES GCM (256) = Y
+AES CCM (128) = Y
+AES CCM (192) = Y
+AES CCM (256) = Y
;
; Supported Asymmetric algorithms of the 'qat' crypto driver.
* ``RTE_CRYPTO_AEAD_AES_GCM``
* ``RTE_CRYPTO_AEAD_AES_CCM``
-* ``RTE_CRYPTO_AEAD_CHACHA20_POLY1305``
Supported Chains
* **Added algorithms to cryptodev API.**
- * Chacha20-Poly1305 AEAD algorithm can now be supported in cryptodev.
* ECDSA (Elliptic Curve Digital Signature Algorithm) is added to
asymmetric crypto library specifications.
* ECPM (Elliptic Curve Point Multiplication) is added to
Such algorithm combinations are not supported on GEN1/GEN2 hardware
and executing the request returns RTE_CRYPTO_OP_STATUS_INVALID_SESSION.
-* **Updated the Intel QuickAssist Technology (QAT) symmetric crypto PMD.**
-
- Added Chacha20-Poly1305 AEAD algorithm.
-
* **Added Marvell OCTEON TX2 End Point rawdev PMD.**
Added a new OCTEON TX2 rawdev PMD for End Point mode of operation.
ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
- ICP_QAT_HW_CIPHER_ALGO_SM4 = 10,
- ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 = 11,
- ICP_QAT_HW_CIPHER_DELIMITER = 12
+ ICP_QAT_HW_CIPHER_DELIMITER = 10
};
enum icp_qat_hw_cipher_mode {
#define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
#define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
#define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
-#define ICP_QAT_HW_CHACHAPOLY_KEY_SZ 32
-#define ICP_QAT_HW_CHACHAPOLY_IV_SZ 12
-#define ICP_QAT_HW_CHACHAPOLY_BLK_SZ 64
-#define ICP_QAT_HW_SPC_CTR_SZ 16
-#define ICP_QAT_HW_CHACHAPOLY_ICV_SZ 16
-#define ICP_QAT_HW_CHACHAPOLY_AAD_MAX_LOG 14
#define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ
}, } \
}
-#define QAT_EXTRA_GEN3_SYM_CAPABILITIES \
- { /* Chacha20-Poly1305 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
- .block_size = 64, \
- .key_size = { \
- .min = 32, \
- .max = 32, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 240, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 12, \
- .max = 12, \
- .increment = 0 \
- }, \
- }, } \
- }, } \
- }
-
#endif /* _QAT_SYM_CAPABILITIES_H_ */
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
-static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
- QAT_BASE_GEN1_SYM_CAPABILITIES,
- QAT_EXTRA_GEN2_SYM_CAPABILITIES,
- QAT_EXTRA_GEN3_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
static int qat_sym_qp_release(struct rte_cryptodev *dev,
uint16_t queue_pair_id);
internals->qat_dev_capabilities = qat_gen1_sym_capabilities;
break;
case QAT_GEN2:
- internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
- break;
case QAT_GEN3:
- internals->qat_dev_capabilities = qat_gen3_sym_capabilities;
+ internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
break;
default:
internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
}
static int
-qat_sym_session_handle_single_pass(struct qat_sym_session *session,
+qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
+ struct qat_sym_session *session,
struct rte_crypto_aead_xform *aead_xform)
{
- struct icp_qat_fw_la_cipher_req_params *cipher_param =
- (void *) &session->fw_req.serv_specif_rqpars;
+ enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
- session->is_single_pass = 1;
- session->min_qat_dev_gen = QAT_GEN3;
- session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
- if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
+ if (qat_dev_gen == QAT_GEN3 &&
+ aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
+ /* Use faster Single-Pass GCM */
+ struct icp_qat_fw_la_cipher_req_params *cipher_param =
+ (void *) &session->fw_req.serv_specif_rqpars;
+
+ session->is_single_pass = 1;
+ session->min_qat_dev_gen = QAT_GEN3;
+ session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
+ session->cipher_iv.offset = aead_xform->iv.offset;
+ session->cipher_iv.length = aead_xform->iv.length;
+ if (qat_sym_session_aead_create_cd_cipher(session,
+ aead_xform->key.data, aead_xform->key.length))
+ return -EINVAL;
+ session->aad_len = aead_xform->aad_length;
+ session->digest_length = aead_xform->digest_length;
+ if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
+ session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
+ session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
+ ICP_QAT_FW_LA_RET_AUTH_SET(
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_RET_AUTH_RES);
+ } else {
+ session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
+ session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
+ ICP_QAT_FW_LA_CMP_AUTH_SET(
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_CMP_AUTH_RES);
+ }
+ ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
+ ICP_QAT_FW_LA_PROTO_SET(
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_NO_PROTO);
ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
- session->fw_req.comn_hdr.serv_specif_flags,
- ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
- } else {
- /* Chacha-Poly is special case that use QAT CTR mode */
- session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
- }
- session->cipher_iv.offset = aead_xform->iv.offset;
- session->cipher_iv.length = aead_xform->iv.length;
- if (qat_sym_session_aead_create_cd_cipher(session,
- aead_xform->key.data, aead_xform->key.length))
- return -EINVAL;
- session->aad_len = aead_xform->aad_length;
- session->digest_length = aead_xform->digest_length;
- if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
- session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
- session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
- ICP_QAT_FW_LA_RET_AUTH_SET(
- session->fw_req.comn_hdr.serv_specif_flags,
- ICP_QAT_FW_LA_RET_AUTH_RES);
- } else {
- session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
- session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
- ICP_QAT_FW_LA_CMP_AUTH_SET(
- session->fw_req.comn_hdr.serv_specif_flags,
- ICP_QAT_FW_LA_CMP_AUTH_RES);
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
+ session->fw_req.comn_hdr.service_cmd_id =
+ ICP_QAT_FW_LA_CMD_CIPHER;
+ session->cd.cipher.cipher_config.val =
+ ICP_QAT_HW_CIPHER_CONFIG_BUILD(
+ ICP_QAT_HW_CIPHER_AEAD_MODE,
+ session->qat_cipher_alg,
+ ICP_QAT_HW_CIPHER_NO_CONVERT,
+ session->qat_dir);
+ QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
+ aead_xform->digest_length,
+ QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
+ QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
+ session->cd.cipher.cipher_config.reserved =
+ ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
+ aead_xform->aad_length);
+ cipher_param->spc_aad_sz = aead_xform->aad_length;
+ cipher_param->spc_auth_res_sz = aead_xform->digest_length;
}
- ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
- session->fw_req.comn_hdr.serv_specif_flags,
- ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
- ICP_QAT_FW_LA_PROTO_SET(
- session->fw_req.comn_hdr.serv_specif_flags,
- ICP_QAT_FW_LA_NO_PROTO);
- session->fw_req.comn_hdr.service_cmd_id =
- ICP_QAT_FW_LA_CMD_CIPHER;
- session->cd.cipher.cipher_config.val =
- ICP_QAT_HW_CIPHER_CONFIG_BUILD(
- ICP_QAT_HW_CIPHER_AEAD_MODE,
- session->qat_cipher_alg,
- ICP_QAT_HW_CIPHER_NO_CONVERT,
- session->qat_dir);
- QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
- aead_xform->digest_length,
- QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
- QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
- session->cd.cipher.cipher_config.reserved =
- ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
- aead_xform->aad_length);
- cipher_param->spc_aad_sz = aead_xform->aad_length;
- cipher_param->spc_auth_res_sz = aead_xform->digest_length;
-
return 0;
}
{
struct rte_crypto_aead_xform *aead_xform = &xform->aead;
enum rte_crypto_auth_operation crypto_operation;
- struct qat_sym_dev_private *internals =
- dev->data->dev_private;
- enum qat_device_gen qat_dev_gen =
- internals->qat_dev->qat_dev_gen;
/*
* Store AEAD IV parameters as cipher IV,
session->cipher_iv.offset = xform->aead.iv.offset;
session->cipher_iv.length = xform->aead.iv.length;
- session->is_single_pass = 0;
switch (aead_xform->algo) {
case RTE_CRYPTO_AEAD_AES_GCM:
if (qat_sym_validate_aes_key(aead_xform->key.length,
return -EINVAL;
}
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
- session->qat_hash_alg =
- ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
- if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
- QAT_AES_GCM_SPC_IV_SIZE) {
- return qat_sym_session_handle_single_pass(session,
- aead_xform);
- }
+ session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
break;
case RTE_CRYPTO_AEAD_AES_CCM:
if (qat_sym_validate_aes_key(aead_xform->key.length,
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
break;
- case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
- if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
- return -EINVAL;
- session->qat_cipher_alg =
- ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
- return qat_sym_session_handle_single_pass(session,
- aead_xform);
default:
QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
aead_xform->algo);
return -EINVAL;
}
+ session->is_single_pass = 0;
+ if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
+ /* Use faster Single-Pass GCM if possible */
+ int res = qat_sym_session_handle_single_pass(
+ dev->data->dev_private, session, aead_xform);
+ if (res < 0)
+ return res;
+ if (session->is_single_pass)
+ return 0;
+ }
+
if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
(aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
/**< AES algorithm in CCM mode. */
RTE_CRYPTO_AEAD_AES_GCM,
/**< AES algorithm in GCM mode. */
- RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
- /**< Chacha20 cipher with poly1305 authenticator */
RTE_CRYPTO_AEAD_LIST_END
};
* be allocated, even though the length field will
* have a value less than this.
*
- * - For Chacha20-Poly1305 it is 96-bit nonce.
- * PMD sets initial counter for Poly1305 key generation
- * part to 0 and for Chacha20 encryption to 1 as per
- * rfc8439 2.8. AEAD construction.
- *
* For optimum performance, the data pointed to SHOULD
* be 8-byte aligned.
*/
*
* - For CCM mode, this is the length of the nonce,
* which can be in the range 7 to 13 inclusive.
- *
- * - For Chacha20-Poly1305 this field is always 12.
*/
} iv; /**< Initialisation vector parameters */
rte_crypto_aead_algorithm_strings[] = {
[RTE_CRYPTO_AEAD_AES_CCM] = "aes-ccm",
[RTE_CRYPTO_AEAD_AES_GCM] = "aes-gcm",
- [RTE_CRYPTO_AEAD_CHACHA20_POLY1305] = "chacha20-poly1305"
};
/**