* ``RTE_CRYPTO_CIPHER_SNOW3G_UEA2``
* ``RTE_CRYPTO_CIPHER_AES_GCM``
* ``RTE_CRYPTO_CIPHER_NULL``
+* ``RTE_CRYPTO_CIPHER_KASUMI_F8``
Hash algorithms:
* ``RTE_CRYPTO_AUTH_SNOW3G_UIA2``
* ``RTE_CRYPTO_AUTH_MD5_HMAC``
* ``RTE_CRYPTO_AUTH_NULL``
+* ``RTE_CRYPTO_AUTH_KASUMI_F9``
Limitations
-----------
* Chained mbufs are not supported.
-* Hash only is not supported except Snow3G UIA2.
-* Cipher only is not supported except Snow3G UEA2.
+* Hash only is not supported except Snow3G UIA2 and KASUMI F9.
+* Cipher only is not supported except Snow3G UEA2 and KASUMI F8.
* Only supports the session-oriented API implementation (session-less APIs are not supported).
* Not performance tuned.
-* Snow3g(UEA2) supported only if cipher length, cipher offset fields are byte-aligned.
-* Snow3g(UIA2) supported only if hash length, hash offset fields are byte-aligned.
+* Snow3g(UEA2) and KASUMI(F8) supported only if cipher length, cipher offset fields are byte-aligned.
+* Snow3g(UIA2) and KASUMI(F9) supported only if hash length, hash offset fields are byte-aligned.
* No BSD support as BSD QAT kernel driver not available.
* MD5_HMAC algorithm
* SHA224-HMAC algorithm
* SHA384-HMAC algorithm
+ * KASUMI (F8 and F9) algorithm
* NULL algorithm
#include "icp_qat_fw.h"
#include "icp_qat_fw_la.h"
+/*
+ * Key Modifier (KM) value used in Kasumi algorithm in F9 mode to XOR
+ * Integrity Key (IK)
+ */
+#define KASUMI_F9_KEY_MODIFIER_4_BYTES 0xAAAAAAAA
+
+#define KASUMI_F8_KEY_MODIFIER_4_BYTES 0x55555555
+
#define QAT_AES_HW_CONFIG_CBC_ENC(alg) \
ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_CBC_MODE, alg, \
ICP_QAT_HW_CIPHER_NO_CONVERT, \
int qat_alg_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
int qat_alg_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
-
+int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
#endif
case ICP_QAT_HW_AUTH_ALGO_MD5:
return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
QAT_HW_DEFAULT_ALIGNMENT);
+ case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
+ return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
+ QAT_HW_DEFAULT_ALIGNMENT);
case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
/* return maximum state1 size in this case */
return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
uint32_t total_key_size;
uint16_t proto = ICP_QAT_FW_LA_NO_PROTO; /* no CCM/GCM/Snow3G */
uint16_t cipher_offset, cd_size;
-
+ uint32_t wordIndex = 0;
+ uint32_t *temp_key = NULL;
PMD_INIT_FUNC_TRACE();
if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
cipher_cd_ctrl->cipher_state_sz =
ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
proto = ICP_QAT_FW_LA_SNOW_3G_PROTO;
+ } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
+ total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
+ cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
+ cipher_cd_ctrl->cipher_padding_sz =
+ (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
} else {
total_key_size = cipherkeylen;
cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
cdesc->qat_cipher_alg, key_convert,
cdesc->qat_dir);
- memcpy(cipher->aes.key, cipherkey, cipherkeylen);
- cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
- cipherkeylen;
+
+ if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
+ temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
+ sizeof(struct icp_qat_hw_cipher_config)
+ + cipherkeylen);
+ memcpy(cipher->aes.key, cipherkey, cipherkeylen);
+ memcpy(temp_key, cipherkey, cipherkeylen);
+
+ /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
+ for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
+ wordIndex++)
+ temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
+
+ cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
+ cipherkeylen + cipherkeylen;
+ } else {
+ memcpy(cipher->aes.key, cipherkey, cipherkeylen);
+ cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
+ cipherkeylen;
+ }
+
if (total_key_size > cipherkeylen) {
uint32_t padding_size = total_key_size-cipherkeylen;
uint16_t state1_size = 0, state2_size = 0;
uint16_t hash_offset, cd_size;
uint32_t *aad_len = NULL;
+ uint32_t wordIndex = 0;
+ uint32_t *pTempKey;
PMD_INIT_FUNC_TRACE();
ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
cdesc->qat_hash_alg, digestsize);
- if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2)
+ if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
+ || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9)
hash->auth_counter.counter = 0;
else
hash->auth_counter.counter = rte_bswap32(
break;
case ICP_QAT_HW_AUTH_ALGO_NULL:
break;
+ case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
+ state1_size = qat_hash_get_state1_size(
+ ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
+ state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
+ memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
+ pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
+ + authkeylen);
+ /*
+ * The Inner Hash Initial State2 block must contain IK
+ * (Initialisation Key), followed by IK XOR-ed with KM
+ * (Key Modifier): IK||(IK^KM).
+ */
+ /* write the auth key */
+ memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
+ /* initialise temp key with auth key */
+ memcpy(pTempKey, authkey, authkeylen);
+ /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
+ for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
+ pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
+ break;
default:
PMD_DRV_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
return -EFAULT;
}
return 0;
}
+
+int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
+{
+ switch (key_len) {
+ case ICP_QAT_HW_KASUMI_KEY_SZ:
+ *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
+ break;
+ default:
+ return -EINVAL;
+ }
+ return 0;
+}
}, },
}, }
},
+ { /* KASUMI (F8) */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
+ .block_size = 8,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 8,
+ .max = 8,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* KASUMI (F9) */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+ {.auth = {
+ .algo = RTE_CRYPTO_AUTH_KASUMI_F9,
+ .block_size = 8,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .digest_size = {
+ .min = 4,
+ .max = 4,
+ .increment = 0
+ },
+ .aad_size = {
+ .min = 8,
+ .max = 8,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
case RTE_CRYPTO_CIPHER_NULL:
session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
break;
+ case RTE_CRYPTO_CIPHER_KASUMI_F8:
+ if (qat_alg_validate_kasumi_key(cipher_xform->key.length,
+ &session->qat_cipher_alg) != 0) {
+ PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size");
+ goto error_out;
+ }
+ session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
+ break;
case RTE_CRYPTO_CIPHER_3DES_ECB:
case RTE_CRYPTO_CIPHER_3DES_CBC:
case RTE_CRYPTO_CIPHER_AES_ECB:
case RTE_CRYPTO_CIPHER_AES_CCM:
- case RTE_CRYPTO_CIPHER_KASUMI_F8:
PMD_DRV_LOG(ERR, "Crypto: Unsupported Cipher alg %u",
cipher_xform->algo);
goto error_out;
case RTE_CRYPTO_AUTH_NULL:
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
break;
+ case RTE_CRYPTO_AUTH_KASUMI_F9:
+ session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
+ break;
case RTE_CRYPTO_AUTH_SHA1:
case RTE_CRYPTO_AUTH_SHA256:
case RTE_CRYPTO_AUTH_SHA512:
case RTE_CRYPTO_AUTH_MD5:
case RTE_CRYPTO_AUTH_AES_CCM:
case RTE_CRYPTO_AUTH_AES_GMAC:
- case RTE_CRYPTO_AUTH_KASUMI_F9:
case RTE_CRYPTO_AUTH_AES_CMAC:
case RTE_CRYPTO_AUTH_AES_CBC_MAC:
case RTE_CRYPTO_AUTH_ZUC_EIA3:
cipher_param->cipher_length = op->sym->cipher.data.length;
cipher_param->cipher_offset = op->sym->cipher.data.offset;
- if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
+ if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
+ ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
if (unlikely((cipher_param->cipher_length % BYTE_LENGTH != 0) ||
(cipher_param->cipher_offset
% BYTE_LENGTH != 0))) {
- PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only "
+ PMD_DRV_LOG(ERR, " For Snow3g/Kasumi, QAT PMD only "
"supports byte aligned values");
op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
return -EINVAL;
auth_param->auth_off >>= 3;
auth_param->auth_len >>= 3;
}
+ if ((ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER ||
+ ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) &&
+ ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) {
+ auth_param->auth_len = (auth_param->auth_len >> 3)
+ + (auth_param->auth_off >> 3)
+ + (BYTE_LENGTH >> 3)
+ - 8;
+ auth_param->auth_off = 8;
+ } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH
+ && ctx->qat_hash_alg ==
+ ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) {
+ auth_param->auth_len = (auth_param->auth_len >> 3)
+ + (auth_param->auth_off >> 3)
+ + (BYTE_LENGTH >> 3);
+ auth_param->auth_off = 0;
+ }
auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr;
if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||