& ICP_QAT_FW_COMN_NEXT_ID_MASK) | \
((val) & ICP_QAT_FW_COMN_CURR_ID_MASK)) }
+#define ICP_QAT_FW_LA_USE_WIRELESS_SLICE_TYPE 2
+#define ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE 1
+#define ICP_QAT_FW_LA_USE_LEGACY_SLICE_TYPE 0
+#define QAT_LA_SLICE_TYPE_BITPOS 14
+#define QAT_LA_SLICE_TYPE_MASK 0x3
+#define ICP_QAT_FW_LA_SLICE_TYPE_SET(flags, val) \
+ QAT_FIELD_SET(flags, val, QAT_LA_SLICE_TYPE_BITPOS, \
+ QAT_LA_SLICE_TYPE_MASK)
+
+struct icp_qat_fw_la_cipher_20_req_params {
+ uint32_t cipher_offset;
+ uint32_t cipher_length;
+ union {
+ uint32_t cipher_IV_array[ICP_QAT_FW_NUM_LONGWORDS_4];
+ struct {
+ uint64_t cipher_IV_ptr;
+ uint64_t resrvd1;
+ } s;
+
+ } u;
+ uint32_t spc_aad_offset;
+ uint32_t spc_aad_sz;
+ uint64_t spc_aad_addr;
+ uint64_t spc_auth_res_addr;
+ uint8_t reserved[3];
+ uint8_t spc_auth_res_sz;
+};
+
#endif
{
struct qat_sym_dev_private *internals = dev->data->dev_private;
struct rte_crypto_cipher_xform *cipher_xform = NULL;
+ enum qat_device_gen qat_dev_gen =
+ internals->qat_dev->qat_dev_gen;
int ret;
/* Get cipher xform from crypto xform chain */
goto error_out;
}
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
+ if (qat_dev_gen == QAT_GEN4) {
+ /* TODO: Filter WCP */
+ ICP_QAT_FW_LA_SLICE_TYPE_SET(
+ session->fw_req.comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE);
+ session->is_ucs = 1;
+ }
break;
case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
offsetof(struct qat_sym_session, cd);
session->min_qat_dev_gen = QAT_GEN1;
+ session->is_ucs = 0;
/* Get requested QAT command id */
qat_cmd_id = qat_get_cmd_id(xform);
uint32_t cipherkeylen)
{
struct icp_qat_hw_cipher_algo_blk *cipher;
+ struct icp_qat_hw_cipher_algo_blk20 *cipher20;
struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
qat_proto_flag =
qat_get_crypto_proto_flag(header->serv_specif_flags);
}
- cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
qat_sym_session_init_common_hdr(header, qat_proto_flag);
cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
+ cipher20 = (struct icp_qat_hw_cipher_algo_blk20 *)cdesc->cd_cur_ptr;
cipher->cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
cdesc->qat_cipher_alg, key_convert,
cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
cipherkeylen + cipherkeylen;
+ } else if (cdesc->is_ucs) {
+ const uint8_t *final_key = cipherkey;
+
+ total_key_size = RTE_ALIGN_CEIL(cipherkeylen,
+ ICP_QAT_HW_AES_128_KEY_SZ);
+ cipher20->cipher_config.reserved[0] = 0;
+ cipher20->cipher_config.reserved[1] = 0;
+ cipher20->cipher_config.reserved[2] = 0;
+
+ rte_memcpy(cipher20->key, final_key, cipherkeylen);
+ cdesc->cd_cur_ptr +=
+ sizeof(struct icp_qat_hw_ucs_cipher_config) +
+ cipherkeylen;
} else {
memcpy(cipher->key, cipherkey, cipherkeylen);
cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
}
cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
+ cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
return 0;
}