X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=drivers%2Fcrypto%2Fqat%2Fqat_crypto.c;h=f8db12f946b68ff817791eee8735c1e2ae6e4651;hb=f2546f8e51b8;hp=f267da5c4122147b890e36259f00d3463d75c846;hpb=53d8971cbe81c525ab6d36c46329d514ce154278;p=dpdk.git diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index f267da5c41..f8db12f946 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -54,7 +54,6 @@ #include #include #include -#include #include #include #include @@ -68,6 +67,438 @@ #define BYTE_LENGTH 8 +static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = { + { /* SHA1 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 20, + .max = 20, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* SHA224 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 28, + .max = 28, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* SHA256 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 32, + .max = 32, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* SHA384 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, + .block_size = 64, + .key_size = { + .min = 128, + .max = 128, + .increment = 0 + }, + .digest_size = { + .min = 48, + .max = 48, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* SHA512 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, + .block_size = 128, + .key_size = { + .min = 128, + .max = 128, + .increment = 0 + }, + .digest_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* MD5 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_MD5_HMAC, + .block_size = 64, + .key_size = { + .min = 8, + .max = 64, + .increment = 8 + }, + .digest_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* AES XCBC MAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .aad_size = { 0 } + }, } + }, } + }, + { /* AES GCM (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .digest_size = { + .min = 8, + .max = 16, + .increment = 4 + }, + .aad_size = { + .min = 8, + .max = 12, + .increment = 4 + } + }, } + }, } + }, + { /* AES GMAC (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_AES_GMAC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .digest_size = { + .min = 8, + .max = 16, + .increment = 4 + }, + .aad_size = { + .min = 1, + .max = 65535, + .increment = 1 + } + }, } + }, } + }, + { /* SNOW 3G (UIA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 4, + .max = 4, + .increment = 0 + }, + .aad_size = { + .min = 16, + .max = 16, + .increment = 0 + } + }, } + }, } + }, + { /* AES GCM (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + }, } + }, } + }, + { /* AES CBC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_CBC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + }, } + }, } + }, + { /* SNOW 3G (UEA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + }, } + }, } + }, + { /* AES CTR */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_CTR, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + }, } + }, } + }, + { /* NULL (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .digest_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .aad_size = { 0 } + }, }, + }, }, + }, + { /* NULL (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .iv_size = { + .min = 0, + .max = 0, + .increment = 0 + } + }, }, + }, } + }, + { /* KASUMI (F8) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_KASUMI_F8, + .block_size = 8, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* KASUMI (F9) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + {.auth = { + .algo = RTE_CRYPTO_AUTH_KASUMI_F9, + .block_size = 8, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 4, + .max = 4, + .increment = 0 + }, + .aad_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* 3DES CBC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_3DES_CBC, + .block_size = 8, + .key_size = { + .min = 16, + .max = 24, + .increment = 8 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + { /* 3DES CTR */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + {.sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + {.cipher = { + .algo = RTE_CRYPTO_CIPHER_3DES_CTR, + .block_size = 8, + .key_size = { + .min = 16, + .max = 24, + .increment = 8 + }, + .iv_size = { + .min = 8, + .max = 8, + .increment = 0 + } + }, } + }, } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + static inline uint32_t adf_modulo(uint32_t data, uint32_t shift); @@ -78,14 +509,15 @@ void qat_crypto_sym_clear_session(struct rte_cryptodev *dev, void *session) { struct qat_session *sess = session; - phys_addr_t cd_paddr = sess->cd_paddr; + phys_addr_t cd_paddr; PMD_INIT_FUNC_TRACE(); if (session) { + cd_paddr = sess->cd_paddr; memset(sess, 0, qat_crypto_sym_get_session_private_size(dev)); - sess->cd_paddr = cd_paddr; - } + } else + PMD_DRV_LOG(ERR, "NULL session"); } static int @@ -170,22 +602,57 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev, } session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; break; + case RTE_CRYPTO_CIPHER_AES_CTR: + if (qat_alg_validate_aes_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid AES cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; + break; case RTE_CRYPTO_CIPHER_SNOW3G_UEA2: if (qat_alg_validate_snow3g_key(cipher_xform->key.length, &session->qat_cipher_alg) != 0) { - PMD_DRV_LOG(ERR, "Invalid SNOW3G cipher key size"); + PMD_DRV_LOG(ERR, "Invalid SNOW 3G cipher key size"); goto error_out; } session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE; break; case RTE_CRYPTO_CIPHER_NULL: - case RTE_CRYPTO_CIPHER_3DES_ECB: + session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE; + break; + case RTE_CRYPTO_CIPHER_KASUMI_F8: + if (qat_alg_validate_kasumi_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE; + break; case RTE_CRYPTO_CIPHER_3DES_CBC: + if (qat_alg_validate_3des_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE; + break; + case RTE_CRYPTO_CIPHER_3DES_CTR: + if (qat_alg_validate_3des_key(cipher_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; + break; + case RTE_CRYPTO_CIPHER_3DES_ECB: case RTE_CRYPTO_CIPHER_AES_ECB: - case RTE_CRYPTO_CIPHER_AES_CTR: case RTE_CRYPTO_CIPHER_AES_CCM: - case RTE_CRYPTO_CIPHER_KASUMI_F8: - PMD_DRV_LOG(ERR, "Crypto: Unsupported Cipher alg %u", + case RTE_CRYPTO_CIPHER_AES_F8: + case RTE_CRYPTO_CIPHER_AES_XTS: + case RTE_CRYPTO_CIPHER_ARC4: + case RTE_CRYPTO_CIPHER_ZUC_EEA3: + PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u", cipher_xform->algo); goto error_out; default: @@ -286,9 +753,15 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_SHA1_HMAC: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1; break; + case RTE_CRYPTO_AUTH_SHA224_HMAC: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224; + break; case RTE_CRYPTO_AUTH_SHA256_HMAC: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256; break; + case RTE_CRYPTO_AUTH_SHA384_HMAC: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384; + break; case RTE_CRYPTO_AUTH_SHA512_HMAC: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512; break; @@ -298,22 +771,28 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_AES_GCM: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; break; + case RTE_CRYPTO_AUTH_AES_GMAC: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; + break; case RTE_CRYPTO_AUTH_SNOW3G_UIA2: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2; break; + case RTE_CRYPTO_AUTH_MD5_HMAC: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5; + break; case RTE_CRYPTO_AUTH_NULL: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL; + break; + case RTE_CRYPTO_AUTH_KASUMI_F9: + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9; + break; case RTE_CRYPTO_AUTH_SHA1: case RTE_CRYPTO_AUTH_SHA256: case RTE_CRYPTO_AUTH_SHA512: case RTE_CRYPTO_AUTH_SHA224: - case RTE_CRYPTO_AUTH_SHA224_HMAC: case RTE_CRYPTO_AUTH_SHA384: - case RTE_CRYPTO_AUTH_SHA384_HMAC: case RTE_CRYPTO_AUTH_MD5: - case RTE_CRYPTO_AUTH_MD5_HMAC: case RTE_CRYPTO_AUTH_AES_CCM: - case RTE_CRYPTO_AUTH_AES_GMAC: - case RTE_CRYPTO_AUTH_KASUMI_F9: case RTE_CRYPTO_AUTH_AES_CMAC: case RTE_CRYPTO_AUTH_AES_CBC_MAC: case RTE_CRYPTO_AUTH_ZUC_EIA3: @@ -334,20 +813,23 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, cipher_xform->key.data, cipher_xform->key.length, auth_xform->add_auth_data_length, - auth_xform->digest_length)) + auth_xform->digest_length, + auth_xform->op)) goto error_out; } else { if (qat_alg_aead_session_create_content_desc_auth(session, auth_xform->key.data, auth_xform->key.length, auth_xform->add_auth_data_length, - auth_xform->digest_length)) + auth_xform->digest_length, + auth_xform->op)) goto error_out; } return session; error_out: - rte_mempool_put(internals->sess_mp, session); + if (internals->sess_mp != NULL) + rte_mempool_put(internals->sess_mp, session); return NULL; } @@ -372,6 +854,9 @@ qat_pmd_enqueue_op_burst(void *qp, struct rte_crypto_op **ops, register uint32_t tail; int overflow; + if (unlikely(nb_ops == 0)) + return 0; + /* read params used a lot in main loop into registers */ queue = &(tmp_qp->tx_q); base_addr = (uint8_t *)queue->base_addr; @@ -475,43 +960,48 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) return -EINVAL; } #endif - if (unlikely(op->sym->type == RTE_CRYPTO_SYM_OP_SESSIONLESS)) { + if (unlikely(op->sym->sess_type == RTE_CRYPTO_SYM_OP_SESSIONLESS)) { PMD_DRV_LOG(ERR, "QAT PMD only supports session oriented" " requests, op (%p) is sessionless.", op); return -EINVAL; } - if (unlikely(op->sym->session->type != RTE_CRYPTODEV_QAT_SYM_PMD)) { + if (unlikely(op->sym->session->dev_type != RTE_CRYPTODEV_QAT_SYM_PMD)) { PMD_DRV_LOG(ERR, "Session was not created for this device"); return -EINVAL; } ctx = (struct qat_session *)op->sym->session->_private; qat_req = (struct icp_qat_fw_la_bulk_req *)out_msg; - *qat_req = ctx->fw_req; + rte_mov128((uint8_t *)qat_req, (const uint8_t *)&(ctx->fw_req)); qat_req->comn_mid.opaque_data = (uint64_t)(uintptr_t)op; - /* - * The following code assumes: - * - single entry buffer. - * - always in place. - */ qat_req->comn_mid.dst_length = - qat_req->comn_mid.src_length = - rte_pktmbuf_data_len(op->sym->m_src); + qat_req->comn_mid.src_length = + rte_pktmbuf_data_len(op->sym->m_src); + qat_req->comn_mid.dest_data_addr = - qat_req->comn_mid.src_data_addr = - rte_pktmbuf_mtophys(op->sym->m_src); + qat_req->comn_mid.src_data_addr = + rte_pktmbuf_mtophys(op->sym->m_src); + + if (unlikely(op->sym->m_dst != NULL)) { + qat_req->comn_mid.dest_data_addr = + rte_pktmbuf_mtophys(op->sym->m_dst); + qat_req->comn_mid.dst_length = + rte_pktmbuf_data_len(op->sym->m_dst); + } + cipher_param = (void *)&qat_req->serv_specif_rqpars; auth_param = (void *)((uint8_t *)cipher_param + sizeof(*cipher_param)); cipher_param->cipher_length = op->sym->cipher.data.length; cipher_param->cipher_offset = op->sym->cipher.data.offset; - if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) { + if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 || + ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) { if (unlikely((cipher_param->cipher_length % BYTE_LENGTH != 0) || (cipher_param->cipher_offset % BYTE_LENGTH != 0))) { - PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only " + PMD_DRV_LOG(ERR, " For SNOW 3G/KASUMI, QAT PMD only " "supports byte aligned values"); op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; return -EINVAL; @@ -542,7 +1032,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2) { if (unlikely((auth_param->auth_off % BYTE_LENGTH != 0) || (auth_param->auth_len % BYTE_LENGTH != 0))) { - PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only " + PMD_DRV_LOG(ERR, " For SNOW 3G, QAT PMD only " "supports byte aligned values"); op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; return -EINVAL; @@ -550,21 +1040,26 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) auth_param->auth_off >>= 3; auth_param->auth_len >>= 3; } + if ((ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER || + ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) && + ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) { + auth_param->auth_len = (auth_param->auth_len >> 3) + + (auth_param->auth_off >> 3) + + (BYTE_LENGTH >> 3) + - 8; + auth_param->auth_off = 8; + } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH + && ctx->qat_hash_alg == + ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) { + auth_param->auth_len = (auth_param->auth_len >> 3) + + (auth_param->auth_off >> 3) + + (BYTE_LENGTH >> 3); + auth_param->auth_off = 0; + } auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr; - /* (GCM) aad length(240 max) will be at this location after precompute */ + if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 || ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) { - struct icp_qat_hw_auth_algo_blk *hash; - - if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) - hash = (struct icp_qat_hw_auth_algo_blk *)((char *)&ctx->cd); - else - hash = (struct icp_qat_hw_auth_algo_blk *)((char *)&ctx->cd + - sizeof(struct icp_qat_hw_cipher_algo_blk)); - - auth_param->u2.aad_sz = ALIGN_POW2_ROUNDUP(hash->sha.state1[ - ICP_QAT_HW_GALOIS_128_STATE1_SZ + - ICP_QAT_HW_GALOIS_H_SZ + 3], 16); if (op->sym->cipher.iv.length == 12) { /* * For GCM a 12 bit IV is allowed, @@ -574,9 +1069,20 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg) qat_req->comn_hdr.serv_specif_flags, ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS); } - } - auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3; + if (op->sym->cipher.data.length == 0) { + /* + * GMAC + */ + qat_req->comn_mid.dest_data_addr = + qat_req->comn_mid.src_data_addr = + op->sym->auth.aad.phys_addr; + auth_param->u1.aad_adr = 0; + auth_param->auth_len = op->sym->auth.aad.length; + auth_param->u2.aad_sz = 0; + } + + } #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX rte_hexdump(stdout, "qat_req:", qat_req, @@ -602,12 +1108,15 @@ static inline uint32_t adf_modulo(uint32_t data, uint32_t shift) return data - mult; } -void qat_crypto_sym_session_init(struct rte_mempool *mp, void *priv_sess) +void qat_crypto_sym_session_init(struct rte_mempool *mp, void *sym_sess) { - struct qat_session *s = priv_sess; + struct rte_cryptodev_sym_session *sess = sym_sess; + struct qat_session *s = (void *)sess->_private; PMD_INIT_FUNC_TRACE(); - s->cd_paddr = rte_mempool_virt2phy(mp, &s->cd); + s->cd_paddr = rte_mempool_virt2phy(mp, sess) + + offsetof(struct qat_session, cd) + + offsetof(struct rte_cryptodev_sym_session, _private); } int qat_dev_config(__rte_unused struct rte_cryptodev *dev) @@ -652,7 +1161,8 @@ void qat_dev_info_get(__rte_unused struct rte_cryptodev *dev, info->max_nb_queue_pairs = ADF_NUM_SYM_QPS_PER_BUNDLE * ADF_NUM_BUNDLES_PER_DEV; - + info->feature_flags = dev->feature_flags; + info->capabilities = qat_pmd_capabilities; info->sym.max_nb_sessions = internals->max_nb_sessions; info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD; }