1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 /* SHA initial context values */
31 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
38 static enum ccp_cmd_order
39 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
41 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
45 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
46 if (xform->next == NULL)
48 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
49 return CCP_CMD_HASH_CIPHER;
51 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
52 if (xform->next == NULL)
53 return CCP_CMD_CIPHER;
54 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
55 return CCP_CMD_CIPHER_HASH;
57 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
58 return CCP_CMD_COMBINED;
62 /* partial hash using openssl */
63 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
69 SHA1_Transform(&ctx, data_in);
70 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
74 static int generate_partial_hash(struct ccp_session *sess)
77 uint8_t ipad[sess->auth.block_size];
78 uint8_t opad[sess->auth.block_size];
79 uint8_t *ipad_t, *opad_t;
80 uint32_t *hash_value_be32, hash_temp32[8];
83 opad_t = ipad_t = (uint8_t *)sess->auth.key;
85 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
87 /* considering key size is always equal to block size of algorithm */
88 for (i = 0; i < sess->auth.block_size; i++) {
89 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
90 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
93 switch (sess->auth.algo) {
94 case CCP_AUTH_ALGO_SHA1_HMAC:
95 count = SHA1_DIGEST_SIZE >> 2;
97 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
99 for (i = 0; i < count; i++, hash_value_be32++)
100 *hash_value_be32 = hash_temp32[count - 1 - i];
102 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
103 + sess->auth.ctx_len);
104 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
106 for (i = 0; i < count; i++, hash_value_be32++)
107 *hash_value_be32 = hash_temp32[count - 1 - i];
110 CCP_LOG_ERR("Invalid auth algo");
115 /* prepare temporary keys K1 and K2 */
116 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
119 /* Shift block to left, including carry */
120 for (i = 0; i < bl; i++) {
122 if (i < bl - 1 && l[i + 1] & 0x80)
125 /* If MSB set fixup with R */
127 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
130 /* subkeys K1 and K2 generation for CMAC */
132 generate_cmac_subkeys(struct ccp_session *sess)
134 const EVP_CIPHER *algo;
136 unsigned char *ccp_ctx;
139 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
140 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
141 unsigned char k1[AES_BLOCK_SIZE] = {0};
142 unsigned char k2[AES_BLOCK_SIZE] = {0};
144 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
145 algo = EVP_aes_128_cbc();
146 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
147 algo = EVP_aes_192_cbc();
148 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
149 algo = EVP_aes_256_cbc();
151 CCP_LOG_ERR("Invalid CMAC type length");
155 ctx = EVP_CIPHER_CTX_new();
157 CCP_LOG_ERR("ctx creation failed");
160 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
161 (unsigned char *)zero_iv) <= 0)
162 goto key_generate_err;
163 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
164 goto key_generate_err;
165 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
166 AES_BLOCK_SIZE) <= 0)
167 goto key_generate_err;
168 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
169 goto key_generate_err;
171 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
173 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
174 prepare_key(k1, dst, AES_BLOCK_SIZE);
175 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
178 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
179 (2 * CCP_SB_BYTES) - 1);
180 prepare_key(k2, k1, AES_BLOCK_SIZE);
181 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
184 EVP_CIPHER_CTX_free(ctx);
189 CCP_LOG_ERR("CMAC Init failed");
193 /* configure session */
195 ccp_configure_session_cipher(struct ccp_session *sess,
196 const struct rte_crypto_sym_xform *xform)
198 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
201 cipher_xform = &xform->cipher;
203 /* set cipher direction */
204 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
205 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
207 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
210 sess->cipher.key_length = cipher_xform->key.length;
211 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
212 cipher_xform->key.length);
214 /* set iv parameters */
215 sess->iv.offset = cipher_xform->iv.offset;
216 sess->iv.length = cipher_xform->iv.length;
218 switch (cipher_xform->algo) {
219 case RTE_CRYPTO_CIPHER_AES_CTR:
220 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
221 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
222 sess->cipher.engine = CCP_ENGINE_AES;
224 case RTE_CRYPTO_CIPHER_AES_ECB:
225 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
226 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
227 sess->cipher.engine = CCP_ENGINE_AES;
229 case RTE_CRYPTO_CIPHER_AES_CBC:
230 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
231 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
232 sess->cipher.engine = CCP_ENGINE_AES;
234 case RTE_CRYPTO_CIPHER_3DES_CBC:
235 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
236 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
237 sess->cipher.engine = CCP_ENGINE_3DES;
240 CCP_LOG_ERR("Unsupported cipher algo");
245 switch (sess->cipher.engine) {
247 if (sess->cipher.key_length == 16)
248 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
249 else if (sess->cipher.key_length == 24)
250 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
251 else if (sess->cipher.key_length == 32)
252 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
254 CCP_LOG_ERR("Invalid cipher key length");
257 for (i = 0; i < sess->cipher.key_length ; i++)
258 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
261 case CCP_ENGINE_3DES:
262 if (sess->cipher.key_length == 16)
263 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
264 else if (sess->cipher.key_length == 24)
265 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
267 CCP_LOG_ERR("Invalid cipher key length");
270 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
271 for (i = 0; i < 8; i++)
272 sess->cipher.key_ccp[(8 + x) - i - 1] =
273 sess->cipher.key[i + x];
276 CCP_LOG_ERR("Invalid CCP Engine");
279 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
280 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
285 ccp_configure_session_auth(struct ccp_session *sess,
286 const struct rte_crypto_sym_xform *xform)
288 const struct rte_crypto_auth_xform *auth_xform = NULL;
291 auth_xform = &xform->auth;
293 sess->auth.digest_length = auth_xform->digest_length;
294 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
295 sess->auth.op = CCP_AUTH_OP_GENERATE;
297 sess->auth.op = CCP_AUTH_OP_VERIFY;
298 switch (auth_xform->algo) {
299 case RTE_CRYPTO_AUTH_SHA1:
300 sess->auth.engine = CCP_ENGINE_SHA;
301 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
302 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
303 sess->auth.ctx = (void *)ccp_sha1_init;
304 sess->auth.ctx_len = CCP_SB_BYTES;
305 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
307 case RTE_CRYPTO_AUTH_SHA1_HMAC:
308 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
310 sess->auth.engine = CCP_ENGINE_SHA;
311 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
312 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
313 sess->auth.ctx_len = CCP_SB_BYTES;
314 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
315 sess->auth.block_size = SHA1_BLOCK_SIZE;
316 sess->auth.key_length = auth_xform->key.length;
317 memset(sess->auth.key, 0, sess->auth.block_size);
318 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
319 rte_memcpy(sess->auth.key, auth_xform->key.data,
320 auth_xform->key.length);
321 if (generate_partial_hash(sess))
324 case RTE_CRYPTO_AUTH_AES_CMAC:
325 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
326 sess->auth.engine = CCP_ENGINE_AES;
327 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
328 sess->auth.key_length = auth_xform->key.length;
329 /**<padding and hash result*/
330 sess->auth.ctx_len = CCP_SB_BYTES << 1;
331 sess->auth.offset = AES_BLOCK_SIZE;
332 sess->auth.block_size = AES_BLOCK_SIZE;
333 if (sess->auth.key_length == 16)
334 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
335 else if (sess->auth.key_length == 24)
336 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
337 else if (sess->auth.key_length == 32)
338 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
340 CCP_LOG_ERR("Invalid CMAC key length");
343 rte_memcpy(sess->auth.key, auth_xform->key.data,
344 sess->auth.key_length);
345 for (i = 0; i < sess->auth.key_length; i++)
346 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
348 if (generate_cmac_subkeys(sess))
352 CCP_LOG_ERR("Unsupported hash algo");
359 ccp_configure_session_aead(struct ccp_session *sess,
360 const struct rte_crypto_sym_xform *xform)
362 const struct rte_crypto_aead_xform *aead_xform = NULL;
365 aead_xform = &xform->aead;
367 sess->cipher.key_length = aead_xform->key.length;
368 rte_memcpy(sess->cipher.key, aead_xform->key.data,
369 aead_xform->key.length);
371 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
372 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
373 sess->auth.op = CCP_AUTH_OP_GENERATE;
375 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
376 sess->auth.op = CCP_AUTH_OP_VERIFY;
378 sess->aead_algo = aead_xform->algo;
379 sess->auth.aad_length = aead_xform->aad_length;
380 sess->auth.digest_length = aead_xform->digest_length;
382 /* set iv parameters */
383 sess->iv.offset = aead_xform->iv.offset;
384 sess->iv.length = aead_xform->iv.length;
386 switch (aead_xform->algo) {
387 case RTE_CRYPTO_AEAD_AES_GCM:
388 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
389 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
390 sess->cipher.engine = CCP_ENGINE_AES;
391 if (sess->cipher.key_length == 16)
392 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
393 else if (sess->cipher.key_length == 24)
394 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
395 else if (sess->cipher.key_length == 32)
396 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
398 CCP_LOG_ERR("Invalid aead key length");
401 for (i = 0; i < sess->cipher.key_length; i++)
402 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
404 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
405 sess->auth.engine = CCP_ENGINE_AES;
406 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
407 sess->auth.ctx_len = CCP_SB_BYTES;
408 sess->auth.offset = 0;
409 sess->auth.block_size = AES_BLOCK_SIZE;
410 sess->cmd_id = CCP_CMD_COMBINED;
413 CCP_LOG_ERR("Unsupported aead algo");
416 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
417 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
422 ccp_set_session_parameters(struct ccp_session *sess,
423 const struct rte_crypto_sym_xform *xform)
425 const struct rte_crypto_sym_xform *cipher_xform = NULL;
426 const struct rte_crypto_sym_xform *auth_xform = NULL;
427 const struct rte_crypto_sym_xform *aead_xform = NULL;
430 sess->cmd_id = ccp_get_cmd_id(xform);
432 switch (sess->cmd_id) {
434 cipher_xform = xform;
439 case CCP_CMD_CIPHER_HASH:
440 cipher_xform = xform;
441 auth_xform = xform->next;
443 case CCP_CMD_HASH_CIPHER:
445 cipher_xform = xform->next;
447 case CCP_CMD_COMBINED:
451 CCP_LOG_ERR("Unsupported cmd_id");
455 /* Default IV length = 0 */
458 ret = ccp_configure_session_cipher(sess, cipher_xform);
460 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
465 ret = ccp_configure_session_auth(sess, auth_xform);
467 CCP_LOG_ERR("Invalid/unsupported auth parameters");
472 ret = ccp_configure_session_aead(sess, aead_xform);
474 CCP_LOG_ERR("Invalid/unsupported aead parameters");
481 /* calculate CCP descriptors requirement */
483 ccp_cipher_slot(struct ccp_session *session)
487 switch (session->cipher.algo) {
488 case CCP_CIPHER_ALGO_AES_CBC:
490 /**< op + passthrough for iv */
492 case CCP_CIPHER_ALGO_AES_ECB:
496 case CCP_CIPHER_ALGO_AES_CTR:
498 /**< op + passthrough for iv */
500 case CCP_CIPHER_ALGO_3DES_CBC:
502 /**< op + passthrough for iv */
505 CCP_LOG_ERR("Unsupported cipher algo %d",
506 session->cipher.algo);
512 ccp_auth_slot(struct ccp_session *session)
516 switch (session->auth.algo) {
517 case CCP_AUTH_ALGO_SHA1:
519 /**< op + lsb passthrough cpy to/from*/
521 case CCP_AUTH_ALGO_SHA1_HMAC:
524 case CCP_AUTH_ALGO_AES_CMAC:
528 * extra descriptor in padding case
529 * (k1/k2(255:128) with iv(127:0))
534 CCP_LOG_ERR("Unsupported auth algo %d",
542 ccp_aead_slot(struct ccp_session *session)
546 switch (session->aead_algo) {
547 case RTE_CRYPTO_AEAD_AES_GCM:
550 CCP_LOG_ERR("Unsupported aead algo %d",
553 switch (session->auth.algo) {
554 case CCP_AUTH_ALGO_AES_GCM:
565 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
572 ccp_compute_slot_count(struct ccp_session *session)
576 switch (session->cmd_id) {
578 count = ccp_cipher_slot(session);
581 count = ccp_auth_slot(session);
583 case CCP_CMD_CIPHER_HASH:
584 case CCP_CMD_HASH_CIPHER:
585 count = ccp_cipher_slot(session);
586 count += ccp_auth_slot(session);
588 case CCP_CMD_COMBINED:
589 count = ccp_aead_slot(session);
592 CCP_LOG_ERR("Unsupported cmd_id");
600 ccp_perform_passthru(struct ccp_passthru *pst,
601 struct ccp_queue *cmd_q)
603 struct ccp_desc *desc;
604 union ccp_function function;
606 desc = &cmd_q->qbase_desc[cmd_q->qidx];
608 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
610 CCP_CMD_SOC(desc) = 0;
611 CCP_CMD_IOC(desc) = 0;
612 CCP_CMD_INIT(desc) = 0;
613 CCP_CMD_EOM(desc) = 0;
614 CCP_CMD_PROT(desc) = 0;
617 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
618 CCP_PT_BITWISE(&function) = pst->bit_mod;
619 CCP_CMD_FUNCTION(desc) = function.raw;
621 CCP_CMD_LEN(desc) = pst->len;
624 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
625 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
626 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
628 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
629 CCP_CMD_DST_HI(desc) = 0;
630 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
632 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
633 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
636 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
637 CCP_CMD_SRC_HI(desc) = 0;
638 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
640 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
641 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
642 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
645 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
649 ccp_perform_hmac(struct rte_crypto_op *op,
650 struct ccp_queue *cmd_q)
653 struct ccp_session *session;
654 union ccp_function function;
655 struct ccp_desc *desc;
657 phys_addr_t src_addr, dest_addr, dest_addr_t;
658 struct ccp_passthru pst;
659 uint64_t auth_msg_bits;
663 session = (struct ccp_session *)get_session_private_data(
665 ccp_cryptodev_driver_id);
666 addr = session->auth.pre_compute;
668 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
669 op->sym->auth.data.offset);
670 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
671 session->auth.ctx_len);
672 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
673 dest_addr_t = dest_addr;
675 /** Load PHash1 to LSB*/
676 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
677 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
678 pst.len = session->auth.ctx_len;
680 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
681 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
682 ccp_perform_passthru(&pst, cmd_q);
684 /**sha engine command descriptor for IntermediateHash*/
686 desc = &cmd_q->qbase_desc[cmd_q->qidx];
687 memset(desc, 0, Q_DESC_SIZE);
689 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
691 CCP_CMD_SOC(desc) = 0;
692 CCP_CMD_IOC(desc) = 0;
693 CCP_CMD_INIT(desc) = 1;
694 CCP_CMD_EOM(desc) = 1;
695 CCP_CMD_PROT(desc) = 0;
698 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
699 CCP_CMD_FUNCTION(desc) = function.raw;
701 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
702 auth_msg_bits = (op->sym->auth.data.length +
703 session->auth.block_size) * 8;
705 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
706 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
707 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
709 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
710 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
711 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
713 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
717 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
718 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
719 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
720 cmd_q->qcontrol | CMD_Q_RUN);
722 /* Intermediate Hash value retrieve */
723 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
724 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
727 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
728 pst.dest_addr = dest_addr_t;
729 pst.len = CCP_SB_BYTES;
731 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
732 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
733 ccp_perform_passthru(&pst, cmd_q);
735 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
736 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
737 pst.len = CCP_SB_BYTES;
739 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
740 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
741 ccp_perform_passthru(&pst, cmd_q);
744 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
745 pst.dest_addr = dest_addr_t;
746 pst.len = session->auth.ctx_len;
748 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
749 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
750 ccp_perform_passthru(&pst, cmd_q);
754 /** Load PHash2 to LSB*/
755 addr += session->auth.ctx_len;
756 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
757 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
758 pst.len = session->auth.ctx_len;
760 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
761 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
762 ccp_perform_passthru(&pst, cmd_q);
764 /**sha engine command descriptor for FinalHash*/
765 dest_addr_t += session->auth.offset;
767 desc = &cmd_q->qbase_desc[cmd_q->qidx];
768 memset(desc, 0, Q_DESC_SIZE);
770 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
772 CCP_CMD_SOC(desc) = 0;
773 CCP_CMD_IOC(desc) = 0;
774 CCP_CMD_INIT(desc) = 1;
775 CCP_CMD_EOM(desc) = 1;
776 CCP_CMD_PROT(desc) = 0;
779 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
780 CCP_CMD_FUNCTION(desc) = function.raw;
782 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
783 session->auth.offset);
784 auth_msg_bits = (session->auth.block_size +
785 session->auth.ctx_len -
786 session->auth.offset) * 8;
788 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
789 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
790 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
792 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
793 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
794 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
796 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
800 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
801 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
802 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
803 cmd_q->qcontrol | CMD_Q_RUN);
805 /* Retrieve hmac output */
806 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
807 pst.dest_addr = dest_addr;
808 pst.len = session->auth.ctx_len;
810 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
811 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
812 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
813 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
815 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
816 ccp_perform_passthru(&pst, cmd_q);
818 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
824 ccp_perform_sha(struct rte_crypto_op *op,
825 struct ccp_queue *cmd_q)
827 struct ccp_session *session;
828 union ccp_function function;
829 struct ccp_desc *desc;
831 phys_addr_t src_addr, dest_addr;
832 struct ccp_passthru pst;
834 uint64_t auth_msg_bits;
836 session = (struct ccp_session *)get_session_private_data(
838 ccp_cryptodev_driver_id);
840 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
841 op->sym->auth.data.offset);
843 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
844 session->auth.ctx_len);
845 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
847 /** Passthru sha context*/
849 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
851 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
852 pst.len = session->auth.ctx_len;
854 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
855 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
856 ccp_perform_passthru(&pst, cmd_q);
858 /**prepare sha command descriptor*/
860 desc = &cmd_q->qbase_desc[cmd_q->qidx];
861 memset(desc, 0, Q_DESC_SIZE);
863 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
865 CCP_CMD_SOC(desc) = 0;
866 CCP_CMD_IOC(desc) = 0;
867 CCP_CMD_INIT(desc) = 1;
868 CCP_CMD_EOM(desc) = 1;
869 CCP_CMD_PROT(desc) = 0;
872 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
873 CCP_CMD_FUNCTION(desc) = function.raw;
875 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
876 auth_msg_bits = op->sym->auth.data.length * 8;
878 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
879 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
880 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
882 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
883 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
884 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
886 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
890 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
891 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
892 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
893 cmd_q->qcontrol | CMD_Q_RUN);
895 /* Hash value retrieve */
896 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
897 pst.dest_addr = dest_addr;
898 pst.len = session->auth.ctx_len;
900 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
901 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
902 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
903 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
905 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
906 ccp_perform_passthru(&pst, cmd_q);
908 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
914 ccp_perform_aes_cmac(struct rte_crypto_op *op,
915 struct ccp_queue *cmd_q)
917 struct ccp_session *session;
918 union ccp_function function;
919 struct ccp_passthru pst;
920 struct ccp_desc *desc;
922 uint8_t *src_tb, *append_ptr, *ctx_addr;
923 phys_addr_t src_addr, dest_addr, key_addr;
924 int length, non_align_len;
926 session = (struct ccp_session *)get_session_private_data(
928 ccp_cryptodev_driver_id);
929 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
931 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
932 op->sym->auth.data.offset);
933 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
934 session->auth.ctx_len);
935 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
938 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
939 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
940 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
942 if (op->sym->auth.data.length % session->auth.block_size == 0) {
944 ctx_addr = session->auth.pre_compute;
945 memset(ctx_addr, 0, AES_BLOCK_SIZE);
946 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
947 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
948 pst.len = CCP_SB_BYTES;
950 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
951 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
952 ccp_perform_passthru(&pst, cmd_q);
954 desc = &cmd_q->qbase_desc[cmd_q->qidx];
955 memset(desc, 0, Q_DESC_SIZE);
957 /* prepare desc for aes-cmac command */
958 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
959 CCP_CMD_EOM(desc) = 1;
960 CCP_CMD_FUNCTION(desc) = function.raw;
962 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
963 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
964 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
965 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
967 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
968 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
969 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
970 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
972 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
977 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
978 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
979 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
980 cmd_q->qcontrol | CMD_Q_RUN);
982 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
983 memset(ctx_addr, 0, AES_BLOCK_SIZE);
984 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
985 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
986 pst.len = CCP_SB_BYTES;
988 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
989 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
990 ccp_perform_passthru(&pst, cmd_q);
992 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
993 length *= AES_BLOCK_SIZE;
994 non_align_len = op->sym->auth.data.length - length;
995 /* prepare desc for aes-cmac command */
997 desc = &cmd_q->qbase_desc[cmd_q->qidx];
998 memset(desc, 0, Q_DESC_SIZE);
1000 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1001 CCP_CMD_INIT(desc) = 1;
1002 CCP_CMD_FUNCTION(desc) = function.raw;
1004 CCP_CMD_LEN(desc) = length;
1005 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1006 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1007 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1009 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1010 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1011 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1012 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1014 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1017 append_ptr = append_ptr + CCP_SB_BYTES;
1018 memset(append_ptr, 0, AES_BLOCK_SIZE);
1019 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
1021 op->sym->auth.data.offset +
1023 rte_memcpy(append_ptr, src_tb, non_align_len);
1024 append_ptr[non_align_len] = CMAC_PAD_VALUE;
1026 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1027 memset(desc, 0, Q_DESC_SIZE);
1029 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1030 CCP_CMD_EOM(desc) = 1;
1031 CCP_CMD_FUNCTION(desc) = function.raw;
1032 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
1034 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
1035 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
1036 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1038 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1039 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1040 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1041 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1043 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1047 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1048 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1049 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1050 cmd_q->qcontrol | CMD_Q_RUN);
1052 /* Retrieve result */
1053 pst.dest_addr = dest_addr;
1054 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1055 pst.len = CCP_SB_BYTES;
1057 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1058 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1059 ccp_perform_passthru(&pst, cmd_q);
1061 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1066 ccp_perform_aes(struct rte_crypto_op *op,
1067 struct ccp_queue *cmd_q,
1068 struct ccp_batch_info *b_info)
1070 struct ccp_session *session;
1071 union ccp_function function;
1073 struct ccp_passthru pst = {0};
1074 struct ccp_desc *desc;
1075 phys_addr_t src_addr, dest_addr, key_addr;
1078 session = (struct ccp_session *)get_session_private_data(
1080 ccp_cryptodev_driver_id);
1083 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1084 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
1085 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
1086 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
1087 iv, session->iv.length);
1088 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
1089 CCP_AES_SIZE(&function) = 0x1F;
1092 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
1093 rte_memcpy(lsb_buf +
1094 (CCP_SB_BYTES - session->iv.length),
1095 iv, session->iv.length);
1096 pst.src_addr = b_info->lsb_buf_phys +
1097 (b_info->lsb_buf_idx * CCP_SB_BYTES);
1098 b_info->lsb_buf_idx++;
1101 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1102 pst.len = CCP_SB_BYTES;
1104 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1105 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1106 ccp_perform_passthru(&pst, cmd_q);
1109 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1111 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1112 op->sym->cipher.data.offset);
1113 if (likely(op->sym->m_dst != NULL))
1114 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1115 op->sym->cipher.data.offset);
1117 dest_addr = src_addr;
1118 key_addr = session->cipher.key_phys;
1120 /* prepare desc for aes command */
1121 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1122 CCP_CMD_INIT(desc) = 1;
1123 CCP_CMD_EOM(desc) = 1;
1125 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
1126 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
1127 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1128 CCP_CMD_FUNCTION(desc) = function.raw;
1130 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
1132 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1133 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1134 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1136 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1137 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1138 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1140 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1141 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1142 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1144 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
1145 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1147 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1148 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1153 ccp_perform_3des(struct rte_crypto_op *op,
1154 struct ccp_queue *cmd_q,
1155 struct ccp_batch_info *b_info)
1157 struct ccp_session *session;
1158 union ccp_function function;
1159 unsigned char *lsb_buf;
1160 struct ccp_passthru pst;
1161 struct ccp_desc *desc;
1164 phys_addr_t src_addr, dest_addr, key_addr;
1166 session = (struct ccp_session *)get_session_private_data(
1168 ccp_cryptodev_driver_id);
1170 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1171 switch (session->cipher.um.des_mode) {
1172 case CCP_DES_MODE_CBC:
1173 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
1174 b_info->lsb_buf_idx++;
1176 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
1177 iv, session->iv.length);
1179 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
1180 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1181 pst.len = CCP_SB_BYTES;
1183 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1184 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1185 ccp_perform_passthru(&pst, cmd_q);
1187 case CCP_DES_MODE_CFB:
1188 case CCP_DES_MODE_ECB:
1189 CCP_LOG_ERR("Unsupported DES cipher mode");
1193 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1194 op->sym->cipher.data.offset);
1195 if (unlikely(op->sym->m_dst != NULL))
1197 rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1198 op->sym->cipher.data.offset);
1200 dest_addr = src_addr;
1202 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
1204 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1206 memset(desc, 0, Q_DESC_SIZE);
1208 /* prepare desc for des command */
1209 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
1211 CCP_CMD_SOC(desc) = 0;
1212 CCP_CMD_IOC(desc) = 0;
1213 CCP_CMD_INIT(desc) = 1;
1214 CCP_CMD_EOM(desc) = 1;
1215 CCP_CMD_PROT(desc) = 0;
1218 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
1219 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
1220 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
1221 CCP_CMD_FUNCTION(desc) = function.raw;
1223 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
1225 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1226 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1227 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1229 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1230 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1231 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1233 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1234 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1235 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1237 if (session->cipher.um.des_mode)
1238 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1240 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1244 /* Write the new tail address back to the queue register */
1245 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1246 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1247 /* Turn the queue back on using our cached control register */
1248 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1249 cmd_q->qcontrol | CMD_Q_RUN);
1251 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1256 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
1258 struct ccp_session *session;
1259 union ccp_function function;
1261 struct ccp_passthru pst;
1262 struct ccp_desc *desc;
1265 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
1266 phys_addr_t digest_dest_addr;
1267 int length, non_align_len;
1269 session = (struct ccp_session *)get_session_private_data(
1271 ccp_cryptodev_driver_id);
1272 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1273 key_addr = session->cipher.key_phys;
1275 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1276 op->sym->aead.data.offset);
1277 if (unlikely(op->sym->m_dst != NULL))
1278 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1279 op->sym->aead.data.offset);
1281 dest_addr = src_addr;
1282 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
1283 digest_dest_addr = op->sym->aead.digest.phys_addr;
1284 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
1285 *temp++ = rte_bswap64(session->auth.aad_length << 3);
1286 *temp = rte_bswap64(op->sym->aead.data.length << 3);
1288 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
1289 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
1291 aad_addr = op->sym->aead.aad.phys_addr;
1293 /* CMD1 IV Passthru */
1294 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
1295 session->iv.length);
1296 pst.src_addr = session->cipher.nonce_phys;
1297 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1298 pst.len = CCP_SB_BYTES;
1300 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1301 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1302 ccp_perform_passthru(&pst, cmd_q);
1304 /* CMD2 GHASH-AAD */
1306 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
1307 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
1308 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1310 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1311 memset(desc, 0, Q_DESC_SIZE);
1313 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1314 CCP_CMD_INIT(desc) = 1;
1315 CCP_CMD_FUNCTION(desc) = function.raw;
1317 CCP_CMD_LEN(desc) = session->auth.aad_length;
1319 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
1320 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
1321 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1323 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1324 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1325 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1327 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1329 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1332 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1333 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1334 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1335 cmd_q->qcontrol | CMD_Q_RUN);
1337 /* CMD3 : GCTR Plain text */
1339 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
1340 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
1341 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1342 if (non_align_len == 0)
1343 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
1345 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
1348 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1349 memset(desc, 0, Q_DESC_SIZE);
1351 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1352 CCP_CMD_EOM(desc) = 1;
1353 CCP_CMD_FUNCTION(desc) = function.raw;
1355 CCP_CMD_LEN(desc) = length;
1357 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1358 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1359 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1361 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1362 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1363 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1365 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1366 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1367 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1369 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1371 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1374 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1375 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1376 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1377 cmd_q->qcontrol | CMD_Q_RUN);
1379 /* CMD4 : PT to copy IV */
1380 pst.src_addr = session->cipher.nonce_phys;
1381 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1382 pst.len = AES_BLOCK_SIZE;
1384 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1385 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1386 ccp_perform_passthru(&pst, cmd_q);
1388 /* CMD5 : GHASH-Final */
1390 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
1391 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
1392 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1394 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1395 memset(desc, 0, Q_DESC_SIZE);
1397 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1398 CCP_CMD_FUNCTION(desc) = function.raw;
1399 /* Last block (AAD_len || PT_len)*/
1400 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
1402 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
1403 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
1404 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1406 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
1407 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
1408 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1410 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1411 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1412 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1414 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1416 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1419 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1420 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1421 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1422 cmd_q->qcontrol | CMD_Q_RUN);
1424 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1429 ccp_crypto_cipher(struct rte_crypto_op *op,
1430 struct ccp_queue *cmd_q,
1431 struct ccp_batch_info *b_info)
1434 struct ccp_session *session;
1436 session = (struct ccp_session *)get_session_private_data(
1438 ccp_cryptodev_driver_id);
1440 switch (session->cipher.algo) {
1441 case CCP_CIPHER_ALGO_AES_CBC:
1442 result = ccp_perform_aes(op, cmd_q, b_info);
1443 b_info->desccnt += 2;
1445 case CCP_CIPHER_ALGO_AES_CTR:
1446 result = ccp_perform_aes(op, cmd_q, b_info);
1447 b_info->desccnt += 2;
1449 case CCP_CIPHER_ALGO_AES_ECB:
1450 result = ccp_perform_aes(op, cmd_q, b_info);
1451 b_info->desccnt += 1;
1453 case CCP_CIPHER_ALGO_3DES_CBC:
1454 result = ccp_perform_3des(op, cmd_q, b_info);
1455 b_info->desccnt += 2;
1458 CCP_LOG_ERR("Unsupported cipher algo %d",
1459 session->cipher.algo);
1466 ccp_crypto_auth(struct rte_crypto_op *op,
1467 struct ccp_queue *cmd_q,
1468 struct ccp_batch_info *b_info)
1472 struct ccp_session *session;
1474 session = (struct ccp_session *)get_session_private_data(
1476 ccp_cryptodev_driver_id);
1478 switch (session->auth.algo) {
1479 case CCP_AUTH_ALGO_SHA1:
1480 result = ccp_perform_sha(op, cmd_q);
1481 b_info->desccnt += 3;
1483 case CCP_AUTH_ALGO_SHA1_HMAC:
1484 result = ccp_perform_hmac(op, cmd_q);
1485 b_info->desccnt += 6;
1487 case CCP_AUTH_ALGO_AES_CMAC:
1488 result = ccp_perform_aes_cmac(op, cmd_q);
1489 b_info->desccnt += 4;
1492 CCP_LOG_ERR("Unsupported auth algo %d",
1493 session->auth.algo);
1501 ccp_crypto_aead(struct rte_crypto_op *op,
1502 struct ccp_queue *cmd_q,
1503 struct ccp_batch_info *b_info)
1506 struct ccp_session *session;
1508 session = (struct ccp_session *)get_session_private_data(
1510 ccp_cryptodev_driver_id);
1512 switch (session->auth.algo) {
1513 case CCP_AUTH_ALGO_AES_GCM:
1514 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
1515 CCP_LOG_ERR("Incorrect chain order");
1518 result = ccp_perform_aes_gcm(op, cmd_q);
1519 b_info->desccnt += 5;
1522 CCP_LOG_ERR("Unsupported aead algo %d",
1523 session->aead_algo);
1530 process_ops_to_enqueue(const struct ccp_qp *qp,
1531 struct rte_crypto_op **op,
1532 struct ccp_queue *cmd_q,
1537 struct ccp_batch_info *b_info;
1538 struct ccp_session *session;
1540 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
1541 CCP_LOG_ERR("batch info allocation failed");
1544 /* populate batch info necessary for dequeue */
1546 b_info->lsb_buf_idx = 0;
1547 b_info->desccnt = 0;
1548 b_info->cmd_q = cmd_q;
1549 b_info->lsb_buf_phys =
1550 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
1551 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
1553 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
1555 for (i = 0; i < nb_ops; i++) {
1556 session = (struct ccp_session *)get_session_private_data(
1557 op[i]->sym->session,
1558 ccp_cryptodev_driver_id);
1559 switch (session->cmd_id) {
1560 case CCP_CMD_CIPHER:
1561 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1564 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1566 case CCP_CMD_CIPHER_HASH:
1567 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1570 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1572 case CCP_CMD_HASH_CIPHER:
1573 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1576 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1578 case CCP_CMD_COMBINED:
1579 result = ccp_crypto_aead(op[i], cmd_q, b_info);
1582 CCP_LOG_ERR("Unsupported cmd_id");
1585 if (unlikely(result < 0)) {
1586 rte_atomic64_add(&b_info->cmd_q->free_slots,
1587 (slots_req - b_info->desccnt));
1590 b_info->op[i] = op[i];
1594 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
1598 /* Write the new tail address back to the queue register */
1599 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
1600 b_info->tail_offset);
1601 /* Turn the queue back on using our cached control register */
1602 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1603 cmd_q->qcontrol | CMD_Q_RUN);
1605 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
1610 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
1612 struct ccp_session *session;
1613 uint8_t *digest_data, *addr;
1614 struct rte_mbuf *m_last;
1615 int offset, digest_offset;
1616 uint8_t digest_le[64];
1618 session = (struct ccp_session *)get_session_private_data(
1620 ccp_cryptodev_driver_id);
1622 if (session->cmd_id == CCP_CMD_COMBINED) {
1623 digest_data = op->sym->aead.digest.data;
1624 digest_offset = op->sym->aead.data.offset +
1625 op->sym->aead.data.length;
1627 digest_data = op->sym->auth.digest.data;
1628 digest_offset = op->sym->auth.data.offset +
1629 op->sym->auth.data.length;
1631 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
1632 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
1633 m_last->data_len - session->auth.ctx_len);
1636 offset = session->auth.offset;
1638 if (session->auth.engine == CCP_ENGINE_SHA)
1639 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
1640 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
1641 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
1642 /* All other algorithms require byte
1647 offset = session->auth.ctx_len -
1648 session->auth.offset - 1;
1649 for (i = 0; i < session->auth.digest_length; i++)
1650 digest_le[i] = addr[offset - i];
1655 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1656 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
1657 if (memcmp(addr + offset, digest_data,
1658 session->auth.digest_length) != 0)
1659 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1662 if (unlikely(digest_data == 0))
1663 digest_data = rte_pktmbuf_mtod_offset(
1664 op->sym->m_dst, uint8_t *,
1666 rte_memcpy(digest_data, addr + offset,
1667 session->auth.digest_length);
1669 /* Trim area used for digest from mbuf. */
1670 rte_pktmbuf_trim(op->sym->m_src,
1671 session->auth.ctx_len);
1675 ccp_prepare_ops(struct rte_crypto_op **op_d,
1676 struct ccp_batch_info *b_info,
1680 struct ccp_session *session;
1682 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
1684 for (i = 0; i < min_ops; i++) {
1685 op_d[i] = b_info->op[b_info->op_idx++];
1686 session = (struct ccp_session *)get_session_private_data(
1687 op_d[i]->sym->session,
1688 ccp_cryptodev_driver_id);
1689 switch (session->cmd_id) {
1690 case CCP_CMD_CIPHER:
1691 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1694 case CCP_CMD_CIPHER_HASH:
1695 case CCP_CMD_HASH_CIPHER:
1696 case CCP_CMD_COMBINED:
1697 ccp_auth_dq_prepare(op_d[i]);
1700 CCP_LOG_ERR("Unsupported cmd_id");
1704 b_info->opcnt -= min_ops;
1709 process_ops_to_dequeue(struct ccp_qp *qp,
1710 struct rte_crypto_op **op,
1713 struct ccp_batch_info *b_info;
1714 uint32_t cur_head_offset;
1716 if (qp->b_info != NULL) {
1717 b_info = qp->b_info;
1718 if (unlikely(b_info->op_idx > 0))
1720 } else if (rte_ring_dequeue(qp->processed_pkts,
1723 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
1724 CMD_Q_HEAD_LO_BASE);
1726 if (b_info->head_offset < b_info->tail_offset) {
1727 if ((cur_head_offset >= b_info->head_offset) &&
1728 (cur_head_offset < b_info->tail_offset)) {
1729 qp->b_info = b_info;
1733 if ((cur_head_offset >= b_info->head_offset) ||
1734 (cur_head_offset < b_info->tail_offset)) {
1735 qp->b_info = b_info;
1742 nb_ops = ccp_prepare_ops(op, b_info, nb_ops);
1743 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
1744 b_info->desccnt = 0;
1745 if (b_info->opcnt > 0) {
1746 qp->b_info = b_info;
1748 rte_mempool_put(qp->batch_mp, (void *)b_info);