Features
--------
-The DPAA2 PMD has support for:
+The DPAA2_SEC PMD has support for:
Cipher algorithms:
* ``RTE_CRYPTO_CIPHER_AES128_CBC``
* ``RTE_CRYPTO_CIPHER_AES192_CBC``
* ``RTE_CRYPTO_CIPHER_AES256_CBC``
+* ``RTE_CRYPTO_CIPHER_AES128_CTR``
+* ``RTE_CRYPTO_CIPHER_AES192_CTR``
+* ``RTE_CRYPTO_CIPHER_AES256_CTR``
Hash algorithms:
* ``RTE_CRYPTO_AUTH_SHA512_HMAC``
* ``RTE_CRYPTO_AUTH_MD5_HMAC``
+AEAD algorithms:
+
+* ``RTE_CRYPTO_AEAD_AES_GCM``
+
Supported DPAA2 SoCs
--------------------
AES CBC (128) = Y
AES CBC (192) = Y
AES CBC (256) = Y
+AES CTR (128) = Y
+AES CTR (192) = Y
+AES CTR (256) = Y
3DES CBC = Y
;
SHA512 HMAC = Y
;
-; Supported AEAD algorithms of the 'openssl' crypto driver.
+; Supported AEAD algorithms of the 'dpaa2_sec' crypto driver.
;
[AEAD]
+AES GCM (128) = Y
+AES GCM (192) = Y
+AES GCM (256) = Y
necessary to use a combination of cipher and authentication
structures anymore.
+* **Updated dpaa2_sec crypto PMD.**
+
+ Added support for AES-GCM and AES-CTR
+
Resolved Issues
---------------
#define FSL_MC_DPSECI_DEVID 3
#define NO_PREFETCH 0
-#define TDES_CBC_IV_LEN 8
-#define AES_CBC_IV_LEN 16
-#define AES_CTR_IV_LEN 16
-#define AES_GCM_IV_LEN 12
/* FLE_POOL_NUM_BUFS is set as per the ipsec-secgw application */
#define FLE_POOL_NUM_BUFS 32000
#define FLE_POOL_BUF_SIZE 256
enum rta_sec_era rta_sec_era = RTA_SEC_ERA_8;
+static inline int
+build_authenc_gcm_fd(dpaa2_sec_session *sess,
+ struct rte_crypto_op *op,
+ struct qbman_fd *fd, uint16_t bpid)
+{
+ struct rte_crypto_sym_op *sym_op = op->sym;
+ struct ctxt_priv *priv = sess->ctxt;
+ struct qbman_fle *fle, *sge;
+ struct sec_flow_context *flc;
+ uint32_t auth_only_len = sess->ext_params.aead_ctxt.auth_only_len;
+ int icv_len = sess->digest_length, retval;
+ uint8_t *old_icv;
+ uint8_t *IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sess->iv.offset);
+
+ PMD_INIT_FUNC_TRACE();
+
+ /* TODO we are using the first FLE entry to store Mbuf and session ctxt.
+ * Currently we donot know which FLE has the mbuf stored.
+ * So while retreiving we can go back 1 FLE from the FD -ADDR
+ * to get the MBUF Addr from the previous FLE.
+ * We can have a better approach to use the inline Mbuf
+ */
+ retval = rte_mempool_get(priv->fle_pool, (void **)(&fle));
+ if (retval) {
+ RTE_LOG(ERR, PMD, "Memory alloc failed for SGE\n");
+ return -1;
+ }
+ memset(fle, 0, FLE_POOL_BUF_SIZE);
+ DPAA2_SET_FLE_ADDR(fle, DPAA2_OP_VADDR_TO_IOVA(op));
+ DPAA2_FLE_SAVE_CTXT(fle, priv);
+ fle = fle + 1;
+ sge = fle + 2;
+ if (likely(bpid < MAX_BPID)) {
+ DPAA2_SET_FD_BPID(fd, bpid);
+ DPAA2_SET_FLE_BPID(fle, bpid);
+ DPAA2_SET_FLE_BPID(fle + 1, bpid);
+ DPAA2_SET_FLE_BPID(sge, bpid);
+ DPAA2_SET_FLE_BPID(sge + 1, bpid);
+ DPAA2_SET_FLE_BPID(sge + 2, bpid);
+ DPAA2_SET_FLE_BPID(sge + 3, bpid);
+ } else {
+ DPAA2_SET_FD_IVP(fd);
+ DPAA2_SET_FLE_IVP(fle);
+ DPAA2_SET_FLE_IVP((fle + 1));
+ DPAA2_SET_FLE_IVP(sge);
+ DPAA2_SET_FLE_IVP((sge + 1));
+ DPAA2_SET_FLE_IVP((sge + 2));
+ DPAA2_SET_FLE_IVP((sge + 3));
+ }
+
+ /* Save the shared descriptor */
+ flc = &priv->flc_desc[0].flc;
+ /* Configure FD as a FRAME LIST */
+ DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
+ DPAA2_SET_FD_COMPOUND_FMT(fd);
+ DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc));
+
+ PMD_TX_LOG(DEBUG, "auth_off: 0x%x/length %d, digest-len=%d\n"
+ "iv-len=%d data_off: 0x%x\n",
+ sym_op->aead.data.offset,
+ sym_op->aead.data.length,
+ sym_op->aead.digest.length,
+ sess->iv.length,
+ sym_op->m_src->data_off);
+
+ /* Configure Output FLE with Scatter/Gather Entry */
+ DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
+ if (auth_only_len)
+ DPAA2_SET_FLE_INTERNAL_JD(fle, auth_only_len);
+ fle->length = (sess->dir == DIR_ENC) ?
+ (sym_op->aead.data.length + icv_len + auth_only_len) :
+ sym_op->aead.data.length + auth_only_len;
+
+ DPAA2_SET_FLE_SG_EXT(fle);
+
+ /* Configure Output SGE for Encap/Decap */
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
+ DPAA2_SET_FLE_OFFSET(sge, sym_op->aead.data.offset +
+ sym_op->m_src->data_off - auth_only_len);
+ sge->length = sym_op->aead.data.length + auth_only_len;
+
+ if (sess->dir == DIR_ENC) {
+ sge++;
+ DPAA2_SET_FLE_ADDR(sge,
+ DPAA2_VADDR_TO_IOVA(sym_op->aead.digest.data));
+ sge->length = sess->digest_length;
+ DPAA2_SET_FD_LEN(fd, (sym_op->aead.data.length +
+ sess->iv.length + auth_only_len));
+ }
+ DPAA2_SET_FLE_FIN(sge);
+
+ sge++;
+ fle++;
+
+ /* Configure Input FLE with Scatter/Gather Entry */
+ DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
+ DPAA2_SET_FLE_SG_EXT(fle);
+ DPAA2_SET_FLE_FIN(fle);
+ fle->length = (sess->dir == DIR_ENC) ?
+ (sym_op->aead.data.length + sess->iv.length + auth_only_len) :
+ (sym_op->aead.data.length + sess->iv.length + auth_only_len +
+ sess->digest_length);
+
+ /* Configure Input SGE for Encap/Decap */
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(IV_ptr));
+ sge->length = sess->iv.length;
+ sge++;
+ if (auth_only_len) {
+ DPAA2_SET_FLE_ADDR(sge,
+ DPAA2_VADDR_TO_IOVA(sym_op->aead.aad.data));
+ sge->length = auth_only_len;
+ DPAA2_SET_FLE_BPID(sge, bpid);
+ sge++;
+ }
+
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
+ DPAA2_SET_FLE_OFFSET(sge, sym_op->aead.data.offset +
+ sym_op->m_src->data_off);
+ sge->length = sym_op->aead.data.length;
+ if (sess->dir == DIR_DEC) {
+ sge++;
+ old_icv = (uint8_t *)(sge + 1);
+ memcpy(old_icv, sym_op->aead.digest.data,
+ sess->digest_length);
+ memset(sym_op->aead.digest.data, 0, sess->digest_length);
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(old_icv));
+ sge->length = sess->digest_length;
+ DPAA2_SET_FD_LEN(fd, (sym_op->aead.data.length +
+ sess->digest_length +
+ sess->iv.length +
+ auth_only_len));
+ }
+ DPAA2_SET_FLE_FIN(sge);
+
+ if (auth_only_len) {
+ DPAA2_SET_FLE_INTERNAL_JD(fle, auth_only_len);
+ DPAA2_SET_FD_INTERNAL_JD(fd, auth_only_len);
+ }
+
+ return 0;
+}
+
static inline int
build_authenc_fd(dpaa2_sec_session *sess,
struct rte_crypto_op *op,
case DPAA2_SEC_AUTH:
ret = build_auth_fd(sess, op, fd, bpid);
break;
+ case DPAA2_SEC_AEAD:
+ ret = build_authenc_gcm_fd(sess, op, fd, bpid);
+ break;
case DPAA2_SEC_CIPHER_HASH:
ret = build_authenc_fd(sess, op, fd, bpid);
break;
struct rte_crypto_sym_xform *xform,
dpaa2_sec_session *session)
{
- struct dpaa2_sec_cipher_ctxt *ctxt = &session->ext_params.cipher_ctxt;
struct dpaa2_sec_dev_private *dev_priv = dev->data->dev_private;
struct alginfo cipherdata;
int bufsize, i;
cipherdata.algtype = OP_ALG_ALGSEL_AES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CBC;
- ctxt->iv.length = AES_CBC_IV_LEN;
break;
case RTE_CRYPTO_CIPHER_3DES_CBC:
cipherdata.algtype = OP_ALG_ALGSEL_3DES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_3DES_CBC;
- ctxt->iv.length = TDES_CBC_IV_LEN;
break;
case RTE_CRYPTO_CIPHER_AES_CTR:
+ cipherdata.algtype = OP_ALG_ALGSEL_AES;
+ cipherdata.algmode = OP_ALG_AAI_CTR;
+ session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CTR;
+ break;
case RTE_CRYPTO_CIPHER_3DES_CTR:
case RTE_CRYPTO_CIPHER_AES_ECB:
case RTE_CRYPTO_CIPHER_3DES_ECB:
DIR_ENC : DIR_DEC;
bufsize = cnstr_shdsc_blkcipher(priv->flc_desc[0].desc, 1, 0,
- &cipherdata, NULL, ctxt->iv.length,
- session->dir);
+ &cipherdata, NULL, session->iv.length,
+ session->dir);
if (bufsize < 0) {
RTE_LOG(ERR, PMD, "Crypto: Descriptor build failed\n");
goto error_out;
struct rte_crypto_sym_xform *xform,
dpaa2_sec_session *session)
{
- struct dpaa2_sec_auth_ctxt *ctxt = &session->ext_params.auth_ctxt;
struct dpaa2_sec_dev_private *dev_priv = dev->data->dev_private;
struct alginfo authdata;
unsigned int bufsize, i;
bufsize = cnstr_shdsc_hmac(priv->flc_desc[DESC_INITFINAL].desc,
1, 0, &authdata, !session->dir,
- ctxt->trunc_len);
+ session->digest_length);
flc->word1_sdl = (uint8_t)bufsize;
flc->word2_rflc_31_0 = lower_32_bits(
session->ctxt = priv;
for (i = 0; i < bufsize; i++)
PMD_DRV_LOG(DEBUG, "DESC[%d]:0x%x\n",
- i, priv->flc_desc[0].desc[i]);
+ i, priv->flc_desc[DESC_INITFINAL].desc[i]);
+
return 0;
dpaa2_sec_aead_init(struct rte_cryptodev *dev,
struct rte_crypto_sym_xform *xform,
dpaa2_sec_session *session)
+{
+ struct dpaa2_sec_aead_ctxt *ctxt = &session->ext_params.aead_ctxt;
+ struct dpaa2_sec_dev_private *dev_priv = dev->data->dev_private;
+ struct alginfo aeaddata;
+ unsigned int bufsize, i;
+ struct ctxt_priv *priv;
+ struct sec_flow_context *flc;
+ struct rte_crypto_aead_xform *aead_xform = &xform->aead;
+ int err;
+
+ PMD_INIT_FUNC_TRACE();
+
+ /* Set IV parameters */
+ session->iv.offset = aead_xform->iv.offset;
+ session->iv.length = aead_xform->iv.length;
+ session->ctxt_type = DPAA2_SEC_AEAD;
+
+ /* For SEC AEAD only one descriptor is required */
+ priv = (struct ctxt_priv *)rte_zmalloc(NULL,
+ sizeof(struct ctxt_priv) + sizeof(struct sec_flc_desc),
+ RTE_CACHE_LINE_SIZE);
+ if (priv == NULL) {
+ RTE_LOG(ERR, PMD, "No Memory for priv CTXT");
+ return -1;
+ }
+
+ priv->fle_pool = dev_priv->fle_pool;
+ flc = &priv->flc_desc[0].flc;
+
+ session->aead_key.data = rte_zmalloc(NULL, aead_xform->key.length,
+ RTE_CACHE_LINE_SIZE);
+ if (session->aead_key.data == NULL && aead_xform->key.length > 0) {
+ RTE_LOG(ERR, PMD, "No Memory for aead key");
+ rte_free(priv);
+ return -1;
+ }
+ memcpy(session->aead_key.data, aead_xform->key.data,
+ aead_xform->key.length);
+
+ session->digest_length = aead_xform->digest_length;
+ session->aead_key.length = aead_xform->key.length;
+ ctxt->auth_only_len = aead_xform->add_auth_data_length;
+
+ aeaddata.key = (uint64_t)session->aead_key.data;
+ aeaddata.keylen = session->aead_key.length;
+ aeaddata.key_enc_flags = 0;
+ aeaddata.key_type = RTA_DATA_IMM;
+
+ switch (aead_xform->algo) {
+ case RTE_CRYPTO_AEAD_AES_GCM:
+ aeaddata.algtype = OP_ALG_ALGSEL_AES;
+ aeaddata.algmode = OP_ALG_AAI_GCM;
+ session->cipher_alg = RTE_CRYPTO_AEAD_AES_GCM;
+ break;
+ case RTE_CRYPTO_AEAD_AES_CCM:
+ RTE_LOG(ERR, PMD, "Crypto: Unsupported AEAD alg %u",
+ aead_xform->algo);
+ goto error_out;
+ default:
+ RTE_LOG(ERR, PMD, "Crypto: Undefined AEAD specified %u\n",
+ aead_xform->algo);
+ goto error_out;
+ }
+ session->dir = (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
+ DIR_ENC : DIR_DEC;
+
+ priv->flc_desc[0].desc[0] = aeaddata.keylen;
+ err = rta_inline_query(IPSEC_AUTH_VAR_AES_DEC_BASE_DESC_LEN,
+ MIN_JOB_DESC_SIZE,
+ (unsigned int *)priv->flc_desc[0].desc,
+ &priv->flc_desc[0].desc[1], 1);
+
+ if (err < 0) {
+ PMD_DRV_LOG(ERR, "Crypto: Incorrect key lengths");
+ goto error_out;
+ }
+ if (priv->flc_desc[0].desc[1] & 1) {
+ aeaddata.key_type = RTA_DATA_IMM;
+ } else {
+ aeaddata.key = DPAA2_VADDR_TO_IOVA(aeaddata.key);
+ aeaddata.key_type = RTA_DATA_PTR;
+ }
+ priv->flc_desc[0].desc[0] = 0;
+ priv->flc_desc[0].desc[1] = 0;
+
+ if (session->dir == DIR_ENC)
+ bufsize = cnstr_shdsc_gcm_encap(
+ priv->flc_desc[0].desc, 1, 0,
+ &aeaddata, session->iv.length,
+ session->digest_length);
+ else
+ bufsize = cnstr_shdsc_gcm_decap(
+ priv->flc_desc[0].desc, 1, 0,
+ &aeaddata, session->iv.length,
+ session->digest_length);
+ flc->word1_sdl = (uint8_t)bufsize;
+ flc->word2_rflc_31_0 = lower_32_bits(
+ (uint64_t)&(((struct dpaa2_sec_qp *)
+ dev->data->queue_pairs[0])->rx_vq));
+ flc->word3_rflc_63_32 = upper_32_bits(
+ (uint64_t)&(((struct dpaa2_sec_qp *)
+ dev->data->queue_pairs[0])->rx_vq));
+ session->ctxt = priv;
+ for (i = 0; i < bufsize; i++)
+ PMD_DRV_LOG(DEBUG, "DESC[%d]:0x%x\n",
+ i, priv->flc_desc[0].desc[i]);
+
+ return 0;
+
+error_out:
+ rte_free(session->aead_key.data);
+ rte_free(priv);
+ return -1;
+}
+
+
+static int
+dpaa2_sec_aead_chain_init(struct rte_cryptodev *dev,
+ struct rte_crypto_sym_xform *xform,
+ dpaa2_sec_session *session)
{
struct dpaa2_sec_aead_ctxt *ctxt = &session->ext_params.aead_ctxt;
struct dpaa2_sec_dev_private *dev_priv = dev->data->dev_private;
memcpy(session->auth_key.data, auth_xform->key.data,
auth_xform->key.length);
- ctxt->trunc_len = auth_xform->digest_length;
authdata.key = (uint64_t)session->auth_key.data;
authdata.keylen = session->auth_key.length;
authdata.key_enc_flags = 0;
cipherdata.algtype = OP_ALG_ALGSEL_AES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CBC;
- ctxt->iv.length = AES_CBC_IV_LEN;
break;
case RTE_CRYPTO_CIPHER_3DES_CBC:
cipherdata.algtype = OP_ALG_ALGSEL_3DES;
cipherdata.algmode = OP_ALG_AAI_CBC;
session->cipher_alg = RTE_CRYPTO_CIPHER_3DES_CBC;
- ctxt->iv.length = TDES_CBC_IV_LEN;
+ break;
+ case RTE_CRYPTO_CIPHER_AES_CTR:
+ cipherdata.algtype = OP_ALG_ALGSEL_AES;
+ cipherdata.algmode = OP_ALG_AAI_CTR;
+ session->cipher_alg = RTE_CRYPTO_CIPHER_AES_CTR;
break;
case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
case RTE_CRYPTO_CIPHER_NULL:
case RTE_CRYPTO_CIPHER_3DES_ECB:
case RTE_CRYPTO_CIPHER_AES_ECB:
- case RTE_CRYPTO_CIPHER_AES_CTR:
case RTE_CRYPTO_CIPHER_KASUMI_F8:
RTE_LOG(ERR, PMD, "Crypto: Unsupported Cipher alg %u",
cipher_xform->algo);
if (session->ctxt_type == DPAA2_SEC_CIPHER_HASH) {
bufsize = cnstr_shdsc_authenc(priv->flc_desc[0].desc, 1,
0, &cipherdata, &authdata,
- ctxt->iv.length,
+ session->iv.length,
ctxt->auth_only_len,
- ctxt->trunc_len,
+ session->digest_length,
session->dir);
} else {
RTE_LOG(ERR, PMD, "Hash before cipher not supported");
session->ctxt = priv;
for (i = 0; i < bufsize; i++)
PMD_DRV_LOG(DEBUG, "DESC[%d]:0x%x\n",
- i, priv->flc_desc[DESC_INITFINAL].desc[i]);
-
+ i, priv->flc_desc[0].desc[i]);
return 0;
} else if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
session->ext_params.aead_ctxt.auth_cipher_text = true;
- dpaa2_sec_aead_init(dev, xform, session);
+ dpaa2_sec_aead_chain_init(dev, xform, session);
/* Authenticate then Cipher */
} else if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
session->ext_params.aead_ctxt.auth_cipher_text = false;
+ dpaa2_sec_aead_chain_init(dev, xform, session);
+
+ /* AEAD operation for AES-GCM kind of Algorithms */
+ } else if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD &&
+ xform->next == NULL) {
dpaa2_sec_aead_init(dev, xform, session);
+
} else {
RTE_LOG(ERR, PMD, "Invalid crypto type");
return NULL;
{
PMD_INIT_FUNC_TRACE();
- return -ENOTSUP;
+ return 0;
}
static int
}
static int
-cryptodev_dpaa2_sec_probe(struct rte_dpaa2_driver *dpaa2_drv __rte_unused,
+cryptodev_dpaa2_sec_probe(struct rte_dpaa2_driver *dpaa2_drv,
struct rte_dpaa2_device *dpaa2_dev)
{
struct rte_cryptodev *cryptodev;
dpaa2_dev->cryptodev = cryptodev;
cryptodev->device = &dpaa2_dev->device;
+ cryptodev->device->driver = &dpaa2_drv->driver;
/* init user callbacks */
TAILQ_INIT(&(cryptodev->link_intr_cbs));
DPAA2_SEC_NONE, /*!< No Cipher operations*/
DPAA2_SEC_CIPHER,/*!< CIPHER operations */
DPAA2_SEC_AUTH, /*!< Authentication Operations */
+ DPAA2_SEC_AEAD, /*!< AEAD (AES-GCM/CCM) type operations */
DPAA2_SEC_CIPHER_HASH, /*!< Authenticated Encryption with
* associated data
*/
DPAA2_SEC_MAX
};
-struct dpaa2_sec_cipher_ctxt {
- struct {
- uint8_t *data;
- uint16_t length;
- } iv; /**< Initialisation vector parameters */
- uint8_t *init_counter; /*!< Set initial counter for CTR mode */
-};
-
-struct dpaa2_sec_auth_ctxt {
- uint8_t trunc_len; /*!< Length for output ICV, should
- * be 0 if no truncation required
- */
-};
-
struct dpaa2_sec_aead_ctxt {
- struct {
- uint8_t *data;
- uint16_t length;
- } iv; /**< Initialisation vector parameters */
uint16_t auth_only_len; /*!< Length of data for Auth only */
uint8_t auth_cipher_text; /**< Authenticate/cipher ordering */
- uint8_t trunc_len; /*!< Length for output ICV, should
- * be 0 if no truncation required
- */
};
typedef struct dpaa2_sec_session_entry {
uint8_t dir; /*!< Operation Direction */
enum rte_crypto_cipher_algorithm cipher_alg; /*!< Cipher Algorithm*/
enum rte_crypto_auth_algorithm auth_alg; /*!< Authentication Algorithm*/
- struct {
- uint8_t *data; /**< pointer to key data */
- size_t length; /**< key length in bytes */
- } cipher_key;
- struct {
- uint8_t *data; /**< pointer to key data */
- size_t length; /**< key length in bytes */
- } auth_key;
+ union {
+ struct {
+ uint8_t *data; /**< pointer to key data */
+ size_t length; /**< key length in bytes */
+ } aead_key;
+ struct {
+ struct {
+ uint8_t *data; /**< pointer to key data */
+ size_t length; /**< key length in bytes */
+ } cipher_key;
+ struct {
+ uint8_t *data; /**< pointer to key data */
+ size_t length; /**< key length in bytes */
+ } auth_key;
+ };
+ };
struct {
uint16_t length; /**< IV length in bytes */
uint16_t offset; /**< IV offset in bytes */
uint16_t digest_length;
uint8_t status;
union {
- struct dpaa2_sec_cipher_ctxt cipher_ctxt;
- struct dpaa2_sec_auth_ctxt auth_ctxt;
struct dpaa2_sec_aead_ctxt aead_ctxt;
} ext_params;
} dpaa2_sec_session;
}, }
}, }
},
+ { /* AES GCM */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
+ {.aead = {
+ .algo = RTE_CRYPTO_AEAD_AES_GCM,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .digest_size = {
+ .min = 8,
+ .max = 16,
+ .increment = 4
+ },
+ .aad_size = {
+ .min = 0,
+ .max = 240,
+ .increment = 1
+ },
+ .iv_size = {
+ .min = 12,
+ .max = 12,
+ .increment = 0
+ },
+ }, }
+ }, }
+ },
{ /* AES CBC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
}, }
}, }
},
+ { /* AES CTR */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_AES_CTR,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ }, }
+ }, }
+ },
{ /* 3DES CBC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
return TEST_SUCCESS;
}
+static int
+test_authonly_dpaa2_sec_all(void)
+{
+ struct crypto_testsuite_params *ts_params = &testsuite_params;
+ int status;
+
+ status = test_blockcipher_all_tests(ts_params->mbuf_pool,
+ ts_params->op_mpool, ts_params->valid_devs[0],
+ RTE_CRYPTODEV_DPAA2_SEC_PMD,
+ BLKCIPHER_AUTHONLY_TYPE);
+
+ TEST_ASSERT_EQUAL(status, 0, "Test failed");
+
+ return TEST_SUCCESS;
+}
+
static int
test_authonly_openssl_all(void)
{
.teardown = testsuite_teardown,
.unit_test_cases = {
TEST_CASE_ST(ut_setup, ut_teardown,
- test_device_configure_invalid_dev_id),
+ test_device_configure_invalid_dev_id),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_multi_session),
+ test_multi_session),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_AES_chain_dpaa2_sec_all),
+ test_AES_chain_dpaa2_sec_all),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_3DES_chain_dpaa2_sec_all),
+ test_3DES_chain_dpaa2_sec_all),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_AES_cipheronly_dpaa2_sec_all),
+ test_AES_cipheronly_dpaa2_sec_all),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_3DES_cipheronly_dpaa2_sec_all),
+ test_3DES_cipheronly_dpaa2_sec_all),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_authonly_dpaa2_sec_all),
- /** HMAC_MD5 Authentication */
+ /** AES GCM Authenticated Encryption */
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_1),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_2),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_3),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_4),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_5),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_6),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_encryption_test_case_7),
+
+ /** AES GCM Authenticated Decryption */
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_1),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_2),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_MD5_HMAC_generate_case_1),
+ test_mb_AES_GCM_authenticated_decryption_test_case_3),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_4),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_5),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_6),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_authenticated_decryption_test_case_7),
+
+ /** AES GCM Authenticated Encryption 256 bits key */
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_1),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_2),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_3),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_4),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_MD5_HMAC_verify_case_1),
+ test_mb_AES_GCM_auth_encryption_test_case_256_5),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_6),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_encryption_test_case_256_7),
+
+ /** AES GCM Authenticated Decryption 256 bits key */
TEST_CASE_ST(ut_setup, ut_teardown,
- test_MD5_HMAC_generate_case_2),
+ test_mb_AES_GCM_auth_decryption_test_case_256_1),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_decryption_test_case_256_2),
TEST_CASE_ST(ut_setup, ut_teardown,
- test_MD5_HMAC_verify_case_2),
+ test_mb_AES_GCM_auth_decryption_test_case_256_3),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_decryption_test_case_256_4),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_decryption_test_case_256_5),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_decryption_test_case_256_6),
+ TEST_CASE_ST(ut_setup, ut_teardown,
+ test_mb_AES_GCM_auth_decryption_test_case_256_7),
TEST_CASES_END() /**< NULL terminate unit test array */
}
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CTR HMAC-SHA1 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-192-CTR XCBC Encryption Digest",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CTR HMAC-SHA1 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA1 Encryption Digest",
BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA1 Encryption Digest "
BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA1 Decryption Digest "
BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA256 Encryption Digest "
BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA256 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA512 Encryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA512 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA224 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA384 Encryption Digest",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA384 Decryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_MB |
BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC HMAC-SHA1 Encryption Digest "
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CBC Decryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-192-CBC Encryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-192-CBC Encryption Scater gather",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CBC Encryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CBC Decryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CBC OOP Encryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-128-CTR Decryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-192-CTR Encryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-192-CTR Decryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CTR Encryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "AES-256-CTR Decryption",
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_QAT |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
};
tdata->auth_key.len);
switch (cryptodev_type) {
+ case RTE_CRYPTODEV_DPAA2_SEC_PMD:
case RTE_CRYPTODEV_QAT_SYM_PMD:
case RTE_CRYPTODEV_OPENSSL_PMD:
case RTE_CRYPTODEV_ARMV8_PMD: /* Fall through */
.test_data = &triple_des128cbc_hmac_sha1_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_ENC_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-128-CBC HMAC-SHA1 Decryption Digest Verify",
.test_data = &triple_des128cbc_hmac_sha1_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY_DEC,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-128-CBC SHA1 Encryption Digest",
.test_data = &triple_des192cbc_hmac_sha1_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_ENC_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-192-CBC HMAC-SHA1 Decryption Digest Verify",
.test_data = &triple_des192cbc_hmac_sha1_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY_DEC,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-192-CBC SHA1 Encryption Digest",
.test_data = &triple_des128cbc_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-128-CBC Decryption",
.test_data = &triple_des128cbc_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_DECRYPT,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-192-CBC Encryption",
.test_data = &triple_des192cbc_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_ENCRYPT,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-192-CBC Decryption",
.test_data = &triple_des192cbc_test_vector,
.op_mask = BLOCKCIPHER_TEST_OP_DECRYPT,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
- BLOCKCIPHER_TEST_TARGET_PMD_QAT
+ BLOCKCIPHER_TEST_TARGET_PMD_QAT |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "3DES-128-CTR Encryption",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-MD5 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "SHA1 Digest",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-SHA1 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "SHA224 Digest",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-SHA224 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "SHA256 Digest",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-SHA256 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "SHA384 Digest",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-SHA384 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "SHA512 Digest",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_GEN,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
{
.test_descr = "HMAC-SHA512 Digest Verify",
.op_mask = BLOCKCIPHER_TEST_OP_AUTH_VERIFY,
.pmd_mask = BLOCKCIPHER_TEST_TARGET_PMD_OPENSSL |
BLOCKCIPHER_TEST_TARGET_PMD_MB |
- BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER
+ BLOCKCIPHER_TEST_TARGET_PMD_SCHEDULER |
+ BLOCKCIPHER_TEST_TARGET_PMD_DPAA2_SEC
},
};