X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=drivers%2Fcrypto%2Fqat%2Fqat_crypto.c;h=7e04f211adb4ca28b54defd29cdc1d6a785dc4c9;hb=7c2d03d65f5e77705dbfbafdef4acab1594ee290;hp=b2c7258cbf879d9daed71dcfc51a4b18b99bdc7a;hpb=e32e4fa8ae074a7d7d2f7c7a5f690d342dc93790;p=dpdk.git diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index b2c7258cbf..7e04f211ad 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -216,23 +216,23 @@ static inline int qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, struct qat_crypto_op_cookie *qat_op_cookie); -void qat_crypto_sym_clear_session(struct rte_cryptodev *dev, - void *session) +void +qat_crypto_sym_clear_session(struct rte_cryptodev *dev, + struct rte_cryptodev_sym_session *sess) { - struct qat_session *sess = session; - phys_addr_t cd_paddr; - PMD_INIT_FUNC_TRACE(); - if (sess) { - if (sess->bpi_ctx) { - bpi_cipher_ctx_free(sess->bpi_ctx); - sess->bpi_ctx = NULL; - } - cd_paddr = sess->cd_paddr; - memset(sess, 0, qat_crypto_sym_get_session_private_size(dev)); - sess->cd_paddr = cd_paddr; - } else - PMD_DRV_LOG(ERR, "NULL session"); + uint8_t index = dev->driver_id; + void *sess_priv = get_session_private_data(sess, index); + struct qat_session *s = (struct qat_session *)sess_priv; + + if (sess_priv) { + if (s->bpi_ctx) + bpi_cipher_ctx_free(s->bpi_ctx); + memset(s, 0, qat_crypto_sym_get_session_private_size(dev)); + struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv); + set_session_private_data(sess, index, NULL); + rte_mempool_put(sess_mp, sess_priv); + } } static int @@ -246,6 +246,14 @@ qat_get_cmd_id(const struct rte_crypto_sym_xform *xform) if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL) return ICP_QAT_FW_LA_CMD_AUTH; + /* AEAD */ + if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) { + if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT) + return ICP_QAT_FW_LA_CMD_CIPHER_HASH; + else + return ICP_QAT_FW_LA_CMD_HASH_CIPHER; + } + if (xform->next == NULL) return -1; @@ -310,14 +318,6 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev, } session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE; break; - case RTE_CRYPTO_CIPHER_AES_GCM: - if (qat_alg_validate_aes_key(cipher_xform->key.length, - &session->qat_cipher_alg) != 0) { - PMD_DRV_LOG(ERR, "Invalid AES cipher key size"); - goto error_out; - } - session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; - break; case RTE_CRYPTO_CIPHER_AES_CTR: if (qat_alg_validate_aes_key(cipher_xform->key.length, &session->qat_cipher_alg) != 0) { @@ -418,7 +418,6 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev, break; case RTE_CRYPTO_CIPHER_3DES_ECB: case RTE_CRYPTO_CIPHER_AES_ECB: - case RTE_CRYPTO_CIPHER_AES_CCM: case RTE_CRYPTO_CIPHER_AES_F8: case RTE_CRYPTO_CIPHER_AES_XTS: case RTE_CRYPTO_CIPHER_ARC4: @@ -451,9 +450,37 @@ error_out: return NULL; } - -void * +int qat_crypto_sym_configure_session(struct rte_cryptodev *dev, + struct rte_crypto_sym_xform *xform, + struct rte_cryptodev_sym_session *sess, + struct rte_mempool *mempool) +{ + void *sess_private_data; + + if (rte_mempool_get(mempool, &sess_private_data)) { + CDEV_LOG_ERR( + "Couldn't get object from session mempool"); + return -1; + } + + if (qat_crypto_set_session_parameters(dev, xform, sess_private_data) != 0) { + PMD_DRV_LOG(ERR, "Crypto QAT PMD: failed to configure " + "session parameters"); + + /* Return session to mempool */ + rte_mempool_put(mempool, sess_private_data); + return -1; + } + + set_session_private_data(sess, dev->driver_id, + sess_private_data); + + return 0; +} + +int +qat_crypto_set_session_parameters(struct rte_cryptodev *dev, struct rte_crypto_sym_xform *xform, void *session_private) { struct qat_session *session = session_private; @@ -461,6 +488,10 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev, int qat_cmd_id; PMD_INIT_FUNC_TRACE(); + /* Set context descriptor physical address */ + session->cd_paddr = rte_mempool_virt2phy(NULL, session) + + offsetof(struct qat_session, cd); + /* Get requested QAT command id */ qat_cmd_id = qat_get_cmd_id(xform); if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) { @@ -476,12 +507,26 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev, session = qat_crypto_sym_configure_session_auth(dev, xform, session); break; case ICP_QAT_FW_LA_CMD_CIPHER_HASH: - session = qat_crypto_sym_configure_session_cipher(dev, xform, session); - session = qat_crypto_sym_configure_session_auth(dev, xform, session); + if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) + session = qat_crypto_sym_configure_session_aead(xform, + session); + else { + session = qat_crypto_sym_configure_session_cipher(dev, + xform, session); + session = qat_crypto_sym_configure_session_auth(dev, + xform, session); + } break; case ICP_QAT_FW_LA_CMD_HASH_CIPHER: - session = qat_crypto_sym_configure_session_auth(dev, xform, session); - session = qat_crypto_sym_configure_session_cipher(dev, xform, session); + if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) + session = qat_crypto_sym_configure_session_aead(xform, + session); + else { + session = qat_crypto_sym_configure_session_auth(dev, + xform, session); + session = qat_crypto_sym_configure_session_cipher(dev, + xform, session); + } break; case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM: case ICP_QAT_FW_LA_CMD_TRNG_TEST: @@ -501,10 +546,10 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev, goto error_out; } - return session; + return 0; error_out: - return NULL; + return -1; } struct qat_session * @@ -515,7 +560,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, struct qat_session *session = session_private; struct rte_crypto_auth_xform *auth_xform = NULL; - struct rte_crypto_cipher_xform *cipher_xform = NULL; struct qat_pmd_private *internals = dev->data->dev_private; auth_xform = qat_get_auth_xform(xform); uint8_t *key_data = auth_xform->key.data; @@ -540,14 +584,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_AES_XCBC_MAC: session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC; break; - case RTE_CRYPTO_AUTH_AES_GCM: - cipher_xform = qat_get_cipher_xform(xform); - - session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; - - key_data = cipher_xform->key.data; - key_length = cipher_xform->key.length; - break; case RTE_CRYPTO_AUTH_AES_GMAC: if (qat_alg_validate_aes_key(auth_xform->key.length, &session->qat_cipher_alg) != 0) { @@ -585,7 +621,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, case RTE_CRYPTO_AUTH_SHA224: case RTE_CRYPTO_AUTH_SHA384: case RTE_CRYPTO_AUTH_MD5: - case RTE_CRYPTO_AUTH_AES_CCM: case RTE_CRYPTO_AUTH_AES_CMAC: case RTE_CRYPTO_AUTH_AES_CBC_MAC: PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u", @@ -646,7 +681,7 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev, if (qat_alg_aead_session_create_content_desc_auth(session, key_data, key_length, - auth_xform->add_auth_data_length, + 0, auth_xform->digest_length, auth_xform->op)) goto error_out; @@ -659,6 +694,85 @@ error_out: return NULL; } +struct qat_session * +qat_crypto_sym_configure_session_aead(struct rte_crypto_sym_xform *xform, + struct qat_session *session_private) +{ + struct qat_session *session = session_private; + struct rte_crypto_aead_xform *aead_xform = &xform->aead; + + /* + * Store AEAD IV parameters as cipher IV, + * to avoid unnecessary memory usage + */ + session->cipher_iv.offset = xform->aead.iv.offset; + session->cipher_iv.length = xform->aead.iv.length; + + switch (aead_xform->algo) { + case RTE_CRYPTO_AEAD_AES_GCM: + if (qat_alg_validate_aes_key(aead_xform->key.length, + &session->qat_cipher_alg) != 0) { + PMD_DRV_LOG(ERR, "Invalid AES key size"); + goto error_out; + } + session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; + session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128; + break; + case RTE_CRYPTO_AEAD_AES_CCM: + PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported AEAD alg %u", + aead_xform->algo); + goto error_out; + default: + PMD_DRV_LOG(ERR, "Crypto: Undefined AEAD specified %u\n", + aead_xform->algo); + goto error_out; + } + + if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) { + session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT; + /* + * It needs to create cipher desc content first, + * then authentication + */ + if (qat_alg_aead_session_create_content_desc_cipher(session, + aead_xform->key.data, + aead_xform->key.length)) + goto error_out; + + if (qat_alg_aead_session_create_content_desc_auth(session, + aead_xform->key.data, + aead_xform->key.length, + aead_xform->add_auth_data_length, + aead_xform->digest_length, + RTE_CRYPTO_AUTH_OP_GENERATE)) + goto error_out; + } else { + session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT; + /* + * It needs to create authentication desc content first, + * then cipher + */ + if (qat_alg_aead_session_create_content_desc_auth(session, + aead_xform->key.data, + aead_xform->key.length, + aead_xform->add_auth_data_length, + aead_xform->digest_length, + RTE_CRYPTO_AUTH_OP_VERIFY)) + goto error_out; + + if (qat_alg_aead_session_create_content_desc_cipher(session, + aead_xform->key.data, + aead_xform->key.length)) + goto error_out; + } + + session->digest_length = aead_xform->digest_length; + return session; + +error_out: + return NULL; +} + unsigned qat_crypto_sym_get_session_private_size( struct rte_cryptodev *dev __rte_unused) { @@ -671,7 +785,8 @@ qat_bpicipher_preprocess(struct qat_session *ctx, { uint8_t block_len = qat_cipher_get_block_size(ctx->qat_cipher_alg); struct rte_crypto_sym_op *sym_op = op->sym; - uint8_t last_block_len = sym_op->cipher.data.length % block_len; + uint8_t last_block_len = block_len > 0 ? + sym_op->cipher.data.length % block_len : 0; if (last_block_len && ctx->qat_dir == ICP_QAT_HW_CIPHER_DECRYPT) { @@ -725,7 +840,8 @@ qat_bpicipher_postprocess(struct qat_session *ctx, { uint8_t block_len = qat_cipher_get_block_size(ctx->qat_cipher_alg); struct rte_crypto_sym_op *sym_op = op->sym; - uint8_t last_block_len = sym_op->cipher.data.length % block_len; + uint8_t last_block_len = block_len > 0 ? + sym_op->cipher.data.length % block_len : 0; if (last_block_len > 0 && ctx->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT) { @@ -864,7 +980,10 @@ qat_pmd_dequeue_op_burst(void *qp, struct rte_crypto_op **ops, rx_op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED; } else { struct qat_session *sess = (struct qat_session *) - (rx_op->sym->session->_private); + get_session_private_data( + rx_op->sym->session, + cryptodev_qat_driver_id); + if (sess->bpi_ctx) qat_bpicipher_postprocess(sess, rx_op); rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; @@ -969,7 +1088,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, struct icp_qat_fw_la_cipher_req_params *cipher_param; struct icp_qat_fw_la_auth_req_params *auth_param; register struct icp_qat_fw_la_bulk_req *qat_req; - uint8_t do_auth = 0, do_cipher = 0; + uint8_t do_auth = 0, do_cipher = 0, do_aead = 0; uint32_t cipher_len = 0, cipher_ofs = 0; uint32_t auth_len = 0, auth_ofs = 0; uint32_t min_ofs = 0; @@ -990,12 +1109,14 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, return -EINVAL; } - if (unlikely(op->sym->session->dev_type != RTE_CRYPTODEV_QAT_SYM_PMD)) { + ctx = (struct qat_session *)get_session_private_data( + op->sym->session, cryptodev_qat_driver_id); + + if (unlikely(ctx == NULL)) { PMD_DRV_LOG(ERR, "Session was not created for this device"); return -EINVAL; } - ctx = (struct qat_session *)op->sym->session->_private; qat_req = (struct icp_qat_fw_la_bulk_req *)out_msg; rte_mov128((uint8_t *)qat_req, (const uint8_t *)&(ctx->fw_req)); qat_req->comn_mid.opaque_data = (uint64_t)(uintptr_t)op; @@ -1003,9 +1124,15 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, auth_param = (void *)((uint8_t *)cipher_param + sizeof(*cipher_param)); if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER || - ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) { - do_auth = 1; - do_cipher = 1; + ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) { + /* AES-GCM */ + if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 || + ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) { + do_aead = 1; + } else { + do_auth = 1; + do_cipher = 1; + } } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) { do_auth = 1; do_cipher = 0; @@ -1087,18 +1214,10 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, ICP_QAT_HW_AUTH_ALGO_GALOIS_128 || ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) { - /* AES-GCM */ - if (do_cipher) { - auth_ofs = op->sym->cipher.data.offset; - auth_len = op->sym->cipher.data.length; - - auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr; /* AES-GMAC */ - } else { - set_cipher_iv(ctx->auth_iv.length, - ctx->auth_iv.offset, - cipher_param, op, qat_req); - } + set_cipher_iv(ctx->auth_iv.length, + ctx->auth_iv.offset, + cipher_param, op, qat_req); } else { auth_ofs = op->sym->auth.data.offset; auth_len = op->sym->auth.data.length; @@ -1110,6 +1229,19 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, } + if (do_aead) { + cipher_len = op->sym->aead.data.length; + cipher_ofs = op->sym->aead.data.offset; + auth_len = op->sym->aead.data.length; + auth_ofs = op->sym->aead.data.offset; + + auth_param->u1.aad_adr = op->sym->aead.aad.phys_addr; + auth_param->auth_res_addr = op->sym->aead.digest.phys_addr; + set_cipher_iv(ctx->cipher_iv.length, ctx->cipher_iv.offset, + cipher_param, op, qat_req); + min_ofs = op->sym->aead.data.offset; + } + if (op->sym->m_src->next || (op->sym->m_dst && op->sym->m_dst->next)) do_sgl = 1; @@ -1151,7 +1283,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, dst_buf_start = src_buf_start; } - if (do_cipher) { + if (do_cipher || do_aead) { cipher_param->cipher_offset = (uint32_t)rte_pktmbuf_mtophys_offset( op->sym->m_src, cipher_ofs) - src_buf_start; @@ -1160,7 +1292,8 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, cipher_param->cipher_offset = 0; cipher_param->cipher_length = 0; } - if (do_auth) { + + if (do_auth || do_aead) { auth_param->auth_off = (uint32_t)rte_pktmbuf_mtophys_offset( op->sym->m_src, auth_ofs) - src_buf_start; auth_param->auth_len = auth_len; @@ -1168,6 +1301,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, auth_param->auth_off = 0; auth_param->auth_len = 0; } + qat_req->comn_mid.dst_length = qat_req->comn_mid.src_length = (cipher_param->cipher_offset + cipher_param->cipher_length) @@ -1226,7 +1360,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS); } /* GMAC */ - if (!do_cipher) { + if (!do_aead) { qat_req->comn_mid.dst_length = qat_req->comn_mid.src_length = rte_pktmbuf_data_len(op->sym->m_src); @@ -1261,7 +1395,12 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, } rte_hexdump(stdout, "digest:", op->sym->auth.digest.data, ctx->digest_length); - rte_hexdump(stdout, "aad:", op->sym->auth.aad.data, + } + + if (do_aead) { + rte_hexdump(stdout, "digest:", op->sym->aead.digest.data, + ctx->digest_length); + rte_hexdump(stdout, "aad:", op->sym->aead.aad.data, ctx->aad_len); } #endif @@ -1276,17 +1415,6 @@ static inline uint32_t adf_modulo(uint32_t data, uint32_t shift) return data - mult; } -void qat_crypto_sym_session_init(struct rte_mempool *mp, void *sym_sess) -{ - struct rte_cryptodev_sym_session *sess = sym_sess; - struct qat_session *s = (void *)sess->_private; - - PMD_INIT_FUNC_TRACE(); - s->cd_paddr = rte_mempool_virt2phy(mp, sess) + - offsetof(struct qat_session, cd) + - offsetof(struct rte_cryptodev_sym_session, _private); -} - int qat_dev_config(__rte_unused struct rte_cryptodev *dev, __rte_unused struct rte_cryptodev_config *config) { @@ -1333,7 +1461,7 @@ void qat_dev_info_get(struct rte_cryptodev *dev, info->feature_flags = dev->feature_flags; info->capabilities = internals->qat_dev_capabilities; info->sym.max_nb_sessions = internals->max_nb_sessions; - info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD; + info->driver_id = cryptodev_qat_driver_id; info->pci_dev = RTE_DEV_TO_PCI(dev->device); } }