X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=drivers%2Fcrypto%2Fqat%2Fqat_crypto.c;h=f8db12f946b68ff817791eee8735c1e2ae6e4651;hb=f2546f8e51b8;hp=661ebb223b43fa75b7951a4627ea870194111deb;hpb=d905ee32d0dc7b00138df7e87ca18bacbade7970;p=dpdk.git diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index 661ebb223b..f8db12f946 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -240,7 +240,32 @@ static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = { }, } }, } }, - { /* SNOW3G (UIA2) */ + { /* AES GMAC (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_AES_GMAC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .digest_size = { + .min = 8, + .max = 16, + .increment = 4 + }, + .aad_size = { + .min = 1, + .max = 65535, + .increment = 1 + } + }, } + }, } + }, + { /* SNOW 3G (UIA2) */ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, {.sym = { .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, @@ -305,7 +330,7 @@ static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = { }, } }, } }, - { /* SNOW3G (UEA2) */ + { /* SNOW 3G (UEA2) */ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, {.sym = { .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, @@ -345,6 +370,132 @@ static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = { }, } }, } }, + { /* NULL (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .digest_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .aad_size = { 0 } + }, }, + }, }, + }, + { /* NULL (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .iv_size = { + .min = 0, + .max = 0, + .increment = 0 + } + }, }, + }, } + }, + { /* KASUMI (F8) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_KASUMI_F8, + .block_size = 8, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* KASUMI (F9) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_KASUMI_F9, + .block_size = 8, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 4, + .max = 4, + .increment = 0 + }, + .aad_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* 3DES CBC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_3DES_CBC, + .block_size = 8, + .key_size = { + .min = 16, + .max = 24, + .increment = 8 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* 3DES CTR */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_3DES_CTR, + .block_size = 8, + .key_size = { + .min = 16, + .max = 24, + .increment = 8 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() }; @@ -462,18 +613,46 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev, case RTE_CRYPTO_CIPHER_SNOW3G_UEA2: if (qat_alg_validate_snow3g_key(cipher_xform->key.length, &session->qat_cipher_alg) != 0) { - PMD_DRV_LOG(ERR, "Invalid SNOW3G cipher key size"); + PMD_DRV_LOG(ERR, "Invalid SNOW 3G cipher key size"); goto error_out; } session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE; break; case RTE_CRYPTO_CIPHER_NULL: - case RTE_CRYPTO_CIPHER_3DES_ECB: + session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE; + break; + case RTE_CRYPTO_CIPHER_KASUMI_F8: + if (qat_alg_validate_kasumi_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE; + break; case RTE_CRYPTO_CIPHER_3DES_CBC: + if (qat_alg_validate_3des_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE; + break; + case RTE_CRYPTO_CIPHER_3DES_CTR: + if (qat_alg_validate_3des_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; + break; + case RTE_CRYPTO_CIPHER_3DES_ECB: case RTE_CRYPTO_CIPHER_AES_ECB: case RTE_CRYPTO_CIPHER_AES_CCM: - case RTE_CRYPTO_CIPHER_KASUMI_F8: - PMD_DRV_LOG(ERR, "Crypto: Unsupported Cipher alg %u", + case RTE_CRYPTO_CIPHER_AES_F8: + case RTE_CRYPTO_CIPHER_AES_XTS: + case RTE_CRYPTO_CIPHER_ARC4: + case RTE_CRYPTO_CIPHER_ZUC_EEA3: + PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u", cipher_xform->algo); goto error_out; default: @@ -592,6 +771,9 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_AES_GCM: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; break; + case RTE_CRYPTO_AUTH_AES_GMAC: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; + break; case RTE_CRYPTO_AUTH_SNOW3G_UIA2: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2; break; @@ -599,6 +781,11 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5; break; case RTE_CRYPTO_AUTH_NULL: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL; + break; + case RTE_CRYPTO_AUTH_KASUMI_F9: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9; + break; case RTE_CRYPTO_AUTH_SHA1: case RTE_CRYPTO_AUTH_SHA256: case RTE_CRYPTO_AUTH_SHA512: @@ -606,8 +793,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_SHA384: case RTE_CRYPTO_AUTH_MD5: case RTE_CRYPTO_AUTH_AES_CCM: - case RTE_CRYPTO_AUTH_AES_GMAC: - case RTE_CRYPTO_AUTH_KASUMI_F9: case RTE_CRYPTO_AUTH_AES_CMAC: case RTE_CRYPTO_AUTH_AES_CBC_MAC: case RTE_CRYPTO_AUTH_ZUC_EIA3: @@ -811,11 +996,12 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) cipher_param->cipher_length = op->sym->cipher.data.length; cipher_param->cipher_offset = op->sym->cipher.data.offset; - if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) { + if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 || + ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) { if (unlikely((cipher_param->cipher_length % BYTE_LENGTH != 0) || (cipher_param->cipher_offset % BYTE_LENGTH != 0))) { - PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only " + PMD_DRV_LOG(ERR, " For SNOW 3G/KASUMI, QAT PMD only " "supports byte aligned values"); op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; return -EINVAL; @@ -846,7 +1032,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2) { if (unlikely((auth_param->auth_off % BYTE_LENGTH != 0) || (auth_param->auth_len % BYTE_LENGTH != 0))) { - PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only " + PMD_DRV_LOG(ERR, " For SNOW 3G, QAT PMD only " "supports byte aligned values"); op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; return -EINVAL; @@ -854,6 +1040,22 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) auth_param->auth_off >>= 3; auth_param->auth_len >>= 3; } + if ((ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER || + ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) && + ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) { + auth_param->auth_len = (auth_param->auth_len >> 3) + + (auth_param->auth_off >> 3) + + (BYTE_LENGTH >> 3) + - 8; + auth_param->auth_off = 8; + } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH + && ctx->qat_hash_alg == + ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) { + auth_param->auth_len = (auth_param->auth_len >> 3) + + (auth_param->auth_off >> 3) + + (BYTE_LENGTH >> 3); + auth_param->auth_off = 0; + } auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr; if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 || @@ -867,6 +1069,19 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) qat_req->comn_hdr.serv_specif_flags, ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS); } + if (op->sym->cipher.data.length == 0) { + /* + * GMAC + */ + qat_req->comn_mid.dest_data_addr = + qat_req->comn_mid.src_data_addr = + op->sym->auth.aad.phys_addr; + auth_param->u1.aad_adr = 0; + auth_param->auth_len = op->sym->auth.aad.length; + auth_param->u2.aad_sz = 0; + + } + } #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX