b1337cc618c381df5b12165385826ac48038cdb9
[dpdk.git] / drivers / crypto / cnxk / cnxk_se.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(C) 2021 Marvell.
3  */
4
5 #ifndef _CNXK_SE_H_
6 #define _CNXK_SE_H_
7 #include <stdbool.h>
8
9 #include "cnxk_cryptodev.h"
10 #include "cnxk_cryptodev_ops.h"
11
12 #define SRC_IOV_SIZE                                                           \
13         (sizeof(struct roc_se_iov_ptr) +                                       \
14          (sizeof(struct roc_se_buf_ptr) * ROC_SE_MAX_SG_CNT))
15 #define DST_IOV_SIZE                                                           \
16         (sizeof(struct roc_se_iov_ptr) +                                       \
17          (sizeof(struct roc_se_buf_ptr) * ROC_SE_MAX_SG_CNT))
18
19 struct cnxk_se_sess {
20         uint16_t cpt_op : 4;
21         uint16_t zsk_flag : 4;
22         uint16_t aes_gcm : 1;
23         uint16_t aes_ctr : 1;
24         uint16_t chacha_poly : 1;
25         uint16_t is_null : 1;
26         uint16_t is_gmac : 1;
27         uint16_t rsvd1 : 3;
28         uint16_t aad_length;
29         uint8_t mac_len;
30         uint8_t iv_length;
31         uint8_t auth_iv_length;
32         uint16_t iv_offset;
33         uint16_t auth_iv_offset;
34         uint32_t salt;
35         uint64_t cpt_inst_w7;
36         struct roc_se_ctx roc_se_ctx;
37 } __rte_cache_aligned;
38
39 static __rte_always_inline int
40 cpt_mac_len_verify(struct rte_crypto_auth_xform *auth)
41 {
42         uint16_t mac_len = auth->digest_length;
43         int ret;
44
45         switch (auth->algo) {
46         case RTE_CRYPTO_AUTH_MD5:
47         case RTE_CRYPTO_AUTH_MD5_HMAC:
48                 ret = (mac_len == 16) ? 0 : -1;
49                 break;
50         case RTE_CRYPTO_AUTH_SHA1:
51         case RTE_CRYPTO_AUTH_SHA1_HMAC:
52                 ret = (mac_len == 20) ? 0 : -1;
53                 break;
54         case RTE_CRYPTO_AUTH_SHA224:
55         case RTE_CRYPTO_AUTH_SHA224_HMAC:
56                 ret = (mac_len == 28) ? 0 : -1;
57                 break;
58         case RTE_CRYPTO_AUTH_SHA256:
59         case RTE_CRYPTO_AUTH_SHA256_HMAC:
60                 ret = (mac_len == 32) ? 0 : -1;
61                 break;
62         case RTE_CRYPTO_AUTH_SHA384:
63         case RTE_CRYPTO_AUTH_SHA384_HMAC:
64                 ret = (mac_len == 48) ? 0 : -1;
65                 break;
66         case RTE_CRYPTO_AUTH_SHA512:
67         case RTE_CRYPTO_AUTH_SHA512_HMAC:
68                 ret = (mac_len == 64) ? 0 : -1;
69                 break;
70         case RTE_CRYPTO_AUTH_NULL:
71                 ret = 0;
72                 break;
73         default:
74                 ret = -1;
75         }
76
77         return ret;
78 }
79
80 static __rte_always_inline void
81 cpt_fc_salt_update(struct roc_se_ctx *se_ctx, uint8_t *salt)
82 {
83         struct roc_se_context *fctx = &se_ctx->se_ctx.fctx;
84         memcpy(fctx->enc.encr_iv, salt, 4);
85 }
86
87 static __rte_always_inline uint32_t
88 fill_sg_comp(struct roc_se_sglist_comp *list, uint32_t i, phys_addr_t dma_addr,
89              uint32_t size)
90 {
91         struct roc_se_sglist_comp *to = &list[i >> 2];
92
93         to->u.s.len[i % 4] = rte_cpu_to_be_16(size);
94         to->ptr[i % 4] = rte_cpu_to_be_64(dma_addr);
95         i++;
96         return i;
97 }
98
99 static __rte_always_inline uint32_t
100 fill_sg_comp_from_buf(struct roc_se_sglist_comp *list, uint32_t i,
101                       struct roc_se_buf_ptr *from)
102 {
103         struct roc_se_sglist_comp *to = &list[i >> 2];
104
105         to->u.s.len[i % 4] = rte_cpu_to_be_16(from->size);
106         to->ptr[i % 4] = rte_cpu_to_be_64((uint64_t)from->vaddr);
107         i++;
108         return i;
109 }
110
111 static __rte_always_inline uint32_t
112 fill_sg_comp_from_buf_min(struct roc_se_sglist_comp *list, uint32_t i,
113                           struct roc_se_buf_ptr *from, uint32_t *psize)
114 {
115         struct roc_se_sglist_comp *to = &list[i >> 2];
116         uint32_t size = *psize;
117         uint32_t e_len;
118
119         e_len = (size > from->size) ? from->size : size;
120         to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len);
121         to->ptr[i % 4] = rte_cpu_to_be_64((uint64_t)from->vaddr);
122         *psize -= e_len;
123         i++;
124         return i;
125 }
126
127 /*
128  * This fills the MC expected SGIO list
129  * from IOV given by user.
130  */
131 static __rte_always_inline uint32_t
132 fill_sg_comp_from_iov(struct roc_se_sglist_comp *list, uint32_t i,
133                       struct roc_se_iov_ptr *from, uint32_t from_offset,
134                       uint32_t *psize, struct roc_se_buf_ptr *extra_buf,
135                       uint32_t extra_offset)
136 {
137         int32_t j;
138         uint32_t extra_len = extra_buf ? extra_buf->size : 0;
139         uint32_t size = *psize;
140         struct roc_se_buf_ptr *bufs;
141
142         bufs = from->bufs;
143         for (j = 0; (j < from->buf_cnt) && size; j++) {
144                 uint64_t e_vaddr;
145                 uint32_t e_len;
146                 struct roc_se_sglist_comp *to = &list[i >> 2];
147
148                 if (unlikely(from_offset)) {
149                         if (from_offset >= bufs[j].size) {
150                                 from_offset -= bufs[j].size;
151                                 continue;
152                         }
153                         e_vaddr = (uint64_t)bufs[j].vaddr + from_offset;
154                         e_len = (size > (bufs[j].size - from_offset)) ?
155                                         (bufs[j].size - from_offset) :
156                                         size;
157                         from_offset = 0;
158                 } else {
159                         e_vaddr = (uint64_t)bufs[j].vaddr;
160                         e_len = (size > bufs[j].size) ? bufs[j].size : size;
161                 }
162
163                 to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len);
164                 to->ptr[i % 4] = rte_cpu_to_be_64(e_vaddr);
165
166                 if (extra_len && (e_len >= extra_offset)) {
167                         /* Break the data at given offset */
168                         uint32_t next_len = e_len - extra_offset;
169                         uint64_t next_vaddr = e_vaddr + extra_offset;
170
171                         if (!extra_offset) {
172                                 i--;
173                         } else {
174                                 e_len = extra_offset;
175                                 size -= e_len;
176                                 to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len);
177                         }
178
179                         extra_len = RTE_MIN(extra_len, size);
180                         /* Insert extra data ptr */
181                         if (extra_len) {
182                                 i++;
183                                 to = &list[i >> 2];
184                                 to->u.s.len[i % 4] =
185                                         rte_cpu_to_be_16(extra_len);
186                                 to->ptr[i % 4] = rte_cpu_to_be_64(
187                                         (uint64_t)extra_buf->vaddr);
188                                 size -= extra_len;
189                         }
190
191                         next_len = RTE_MIN(next_len, size);
192                         /* insert the rest of the data */
193                         if (next_len) {
194                                 i++;
195                                 to = &list[i >> 2];
196                                 to->u.s.len[i % 4] = rte_cpu_to_be_16(next_len);
197                                 to->ptr[i % 4] = rte_cpu_to_be_64(next_vaddr);
198                                 size -= next_len;
199                         }
200                         extra_len = 0;
201
202                 } else {
203                         size -= e_len;
204                 }
205                 if (extra_offset)
206                         extra_offset -= size;
207                 i++;
208         }
209
210         *psize = size;
211         return (uint32_t)i;
212 }
213
214 static __rte_always_inline int
215 cpt_enc_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
216                   struct roc_se_fc_params *fc_params, struct cpt_inst_s *inst)
217 {
218         uint32_t iv_offset = 0;
219         int32_t inputlen, outputlen, enc_dlen, auth_dlen;
220         struct roc_se_ctx *se_ctx;
221         uint32_t cipher_type, hash_type;
222         uint32_t mac_len, size;
223         uint8_t iv_len = 16;
224         struct roc_se_buf_ptr *aad_buf = NULL;
225         uint32_t encr_offset, auth_offset;
226         uint32_t encr_data_len, auth_data_len, aad_len = 0;
227         uint32_t passthrough_len = 0;
228         union cpt_inst_w4 cpt_inst_w4;
229         void *offset_vaddr;
230         uint8_t op_minor;
231
232         encr_offset = ROC_SE_ENCR_OFFSET(d_offs);
233         auth_offset = ROC_SE_AUTH_OFFSET(d_offs);
234         encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
235         auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
236         if (unlikely(flags & ROC_SE_VALID_AAD_BUF)) {
237                 /* We don't support both AAD and auth data separately */
238                 auth_data_len = 0;
239                 auth_offset = 0;
240                 aad_len = fc_params->aad_buf.size;
241                 aad_buf = &fc_params->aad_buf;
242         }
243         se_ctx = fc_params->ctx_buf.vaddr;
244         cipher_type = se_ctx->enc_cipher;
245         hash_type = se_ctx->hash_type;
246         mac_len = se_ctx->mac_len;
247         op_minor = se_ctx->template_w4.s.opcode_minor;
248
249         if (unlikely(!(flags & ROC_SE_VALID_IV_BUF))) {
250                 iv_len = 0;
251                 iv_offset = ROC_SE_ENCR_IV_OFFSET(d_offs);
252         }
253
254         if (unlikely(flags & ROC_SE_VALID_AAD_BUF)) {
255                 /*
256                  * When AAD is given, data above encr_offset is pass through
257                  * Since AAD is given as separate pointer and not as offset,
258                  * this is a special case as we need to fragment input data
259                  * into passthrough + encr_data and then insert AAD in between.
260                  */
261                 if (hash_type != ROC_SE_GMAC_TYPE) {
262                         passthrough_len = encr_offset;
263                         auth_offset = passthrough_len + iv_len;
264                         encr_offset = passthrough_len + aad_len + iv_len;
265                         auth_data_len = aad_len + encr_data_len;
266                 } else {
267                         passthrough_len = 16 + aad_len;
268                         auth_offset = passthrough_len + iv_len;
269                         auth_data_len = aad_len;
270                 }
271         } else {
272                 encr_offset += iv_len;
273                 auth_offset += iv_len;
274         }
275
276         /* Encryption */
277         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_FC;
278         cpt_inst_w4.s.opcode_minor = ROC_SE_FC_MINOR_OP_ENCRYPT;
279         cpt_inst_w4.s.opcode_minor |= (uint64_t)op_minor;
280
281         if (hash_type == ROC_SE_GMAC_TYPE) {
282                 encr_offset = 0;
283                 encr_data_len = 0;
284         }
285
286         auth_dlen = auth_offset + auth_data_len;
287         enc_dlen = encr_data_len + encr_offset;
288         if (unlikely(encr_data_len & 0xf)) {
289                 if ((cipher_type == ROC_SE_DES3_CBC) ||
290                     (cipher_type == ROC_SE_DES3_ECB))
291                         enc_dlen =
292                                 RTE_ALIGN_CEIL(encr_data_len, 8) + encr_offset;
293                 else if (likely((cipher_type == ROC_SE_AES_CBC) ||
294                                 (cipher_type == ROC_SE_AES_ECB)))
295                         enc_dlen =
296                                 RTE_ALIGN_CEIL(encr_data_len, 8) + encr_offset;
297         }
298
299         if (unlikely(auth_dlen > enc_dlen)) {
300                 inputlen = auth_dlen;
301                 outputlen = auth_dlen + mac_len;
302         } else {
303                 inputlen = enc_dlen;
304                 outputlen = enc_dlen + mac_len;
305         }
306
307         if (op_minor & ROC_SE_FC_MINOR_OP_HMAC_FIRST)
308                 outputlen = enc_dlen;
309
310         /* GP op header */
311         cpt_inst_w4.s.param1 = encr_data_len;
312         cpt_inst_w4.s.param2 = auth_data_len;
313
314         /*
315          * In cn9k, cn10k since we have a limitation of
316          * IV & Offset control word not part of instruction
317          * and need to be part of Data Buffer, we check if
318          * head room is there and then only do the Direct mode processing
319          */
320         if (likely((flags & ROC_SE_SINGLE_BUF_INPLACE) &&
321                    (flags & ROC_SE_SINGLE_BUF_HEADROOM))) {
322                 void *dm_vaddr = fc_params->bufs[0].vaddr;
323
324                 /* Use Direct mode */
325
326                 offset_vaddr =
327                         (uint8_t *)dm_vaddr - ROC_SE_OFF_CTRL_LEN - iv_len;
328
329                 /* DPTR */
330                 inst->dptr = (uint64_t)offset_vaddr;
331
332                 /* RPTR should just exclude offset control word */
333                 inst->rptr = (uint64_t)dm_vaddr - iv_len;
334
335                 cpt_inst_w4.s.dlen = inputlen + ROC_SE_OFF_CTRL_LEN;
336
337                 if (likely(iv_len)) {
338                         uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
339                                                       ROC_SE_OFF_CTRL_LEN);
340                         uint64_t *src = fc_params->iv_buf;
341                         dest[0] = src[0];
342                         dest[1] = src[1];
343                 }
344
345         } else {
346                 void *m_vaddr = fc_params->meta_buf.vaddr;
347                 uint32_t i, g_size_bytes, s_size_bytes;
348                 struct roc_se_sglist_comp *gather_comp;
349                 struct roc_se_sglist_comp *scatter_comp;
350                 uint8_t *in_buffer;
351
352                 /* This falls under strict SG mode */
353                 offset_vaddr = m_vaddr;
354                 size = ROC_SE_OFF_CTRL_LEN + iv_len;
355
356                 m_vaddr = (uint8_t *)m_vaddr + size;
357
358                 cpt_inst_w4.s.opcode_major |= (uint64_t)ROC_SE_DMA_MODE;
359
360                 if (likely(iv_len)) {
361                         uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
362                                                       ROC_SE_OFF_CTRL_LEN);
363                         uint64_t *src = fc_params->iv_buf;
364                         dest[0] = src[0];
365                         dest[1] = src[1];
366                 }
367
368                 /* DPTR has SG list */
369                 in_buffer = m_vaddr;
370
371                 ((uint16_t *)in_buffer)[0] = 0;
372                 ((uint16_t *)in_buffer)[1] = 0;
373
374                 /* TODO Add error check if space will be sufficient */
375                 gather_comp =
376                         (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
377
378                 /*
379                  * Input Gather List
380                  */
381
382                 i = 0;
383
384                 /* Offset control word that includes iv */
385                 i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
386                                  ROC_SE_OFF_CTRL_LEN + iv_len);
387
388                 /* Add input data */
389                 size = inputlen - iv_len;
390                 if (likely(size)) {
391                         uint32_t aad_offset = aad_len ? passthrough_len : 0;
392
393                         if (unlikely(flags & ROC_SE_SINGLE_BUF_INPLACE)) {
394                                 i = fill_sg_comp_from_buf_min(
395                                         gather_comp, i, fc_params->bufs, &size);
396                         } else {
397                                 i = fill_sg_comp_from_iov(
398                                         gather_comp, i, fc_params->src_iov, 0,
399                                         &size, aad_buf, aad_offset);
400                         }
401
402                         if (unlikely(size)) {
403                                 plt_dp_err("Insufficient buffer space,"
404                                            " size %d needed",
405                                            size);
406                                 return -1;
407                         }
408                 }
409                 ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
410                 g_size_bytes =
411                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
412
413                 /*
414                  * Output Scatter list
415                  */
416                 i = 0;
417                 scatter_comp =
418                         (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
419                                                       g_size_bytes);
420
421                 /* Add IV */
422                 if (likely(iv_len)) {
423                         i = fill_sg_comp(scatter_comp, i,
424                                          (uint64_t)offset_vaddr +
425                                                  ROC_SE_OFF_CTRL_LEN,
426                                          iv_len);
427                 }
428
429                 /* output data or output data + digest*/
430                 if (unlikely(flags & ROC_SE_VALID_MAC_BUF)) {
431                         size = outputlen - iv_len - mac_len;
432                         if (size) {
433                                 uint32_t aad_offset =
434                                         aad_len ? passthrough_len : 0;
435
436                                 if (unlikely(flags &
437                                              ROC_SE_SINGLE_BUF_INPLACE)) {
438                                         i = fill_sg_comp_from_buf_min(
439                                                 scatter_comp, i,
440                                                 fc_params->bufs, &size);
441                                 } else {
442                                         i = fill_sg_comp_from_iov(
443                                                 scatter_comp, i,
444                                                 fc_params->dst_iov, 0, &size,
445                                                 aad_buf, aad_offset);
446                                 }
447                                 if (unlikely(size)) {
448                                         plt_dp_err("Insufficient buffer"
449                                                    " space, size %d needed",
450                                                    size);
451                                         return -1;
452                                 }
453                         }
454                         /* mac_data */
455                         if (mac_len) {
456                                 i = fill_sg_comp_from_buf(scatter_comp, i,
457                                                           &fc_params->mac_buf);
458                         }
459                 } else {
460                         /* Output including mac */
461                         size = outputlen - iv_len;
462                         if (likely(size)) {
463                                 uint32_t aad_offset =
464                                         aad_len ? passthrough_len : 0;
465
466                                 if (unlikely(flags &
467                                              ROC_SE_SINGLE_BUF_INPLACE)) {
468                                         i = fill_sg_comp_from_buf_min(
469                                                 scatter_comp, i,
470                                                 fc_params->bufs, &size);
471                                 } else {
472                                         i = fill_sg_comp_from_iov(
473                                                 scatter_comp, i,
474                                                 fc_params->dst_iov, 0, &size,
475                                                 aad_buf, aad_offset);
476                                 }
477                                 if (unlikely(size)) {
478                                         plt_dp_err("Insufficient buffer"
479                                                    " space, size %d needed",
480                                                    size);
481                                         return -1;
482                                 }
483                         }
484                 }
485                 ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
486                 s_size_bytes =
487                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
488
489                 size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
490
491                 /* This is DPTR len in case of SG mode */
492                 cpt_inst_w4.s.dlen = size;
493
494                 inst->dptr = (uint64_t)in_buffer;
495         }
496
497         if (unlikely((encr_offset >> 16) || (iv_offset >> 8) ||
498                      (auth_offset >> 8))) {
499                 plt_dp_err("Offset not supported");
500                 plt_dp_err("enc_offset: %d", encr_offset);
501                 plt_dp_err("iv_offset : %d", iv_offset);
502                 plt_dp_err("auth_offset: %d", auth_offset);
503                 return -1;
504         }
505
506         *(uint64_t *)offset_vaddr = rte_cpu_to_be_64(
507                 ((uint64_t)encr_offset << 16) | ((uint64_t)iv_offset << 8) |
508                 ((uint64_t)auth_offset));
509
510         inst->w4.u64 = cpt_inst_w4.u64;
511         return 0;
512 }
513
514 static __rte_always_inline int
515 cpt_dec_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
516                   struct roc_se_fc_params *fc_params, struct cpt_inst_s *inst)
517 {
518         uint32_t iv_offset = 0, size;
519         int32_t inputlen, outputlen, enc_dlen, auth_dlen;
520         struct roc_se_ctx *se_ctx;
521         int32_t hash_type, mac_len;
522         uint8_t iv_len = 16;
523         struct roc_se_buf_ptr *aad_buf = NULL;
524         uint32_t encr_offset, auth_offset;
525         uint32_t encr_data_len, auth_data_len, aad_len = 0;
526         uint32_t passthrough_len = 0;
527         union cpt_inst_w4 cpt_inst_w4;
528         void *offset_vaddr;
529         uint8_t op_minor;
530
531         encr_offset = ROC_SE_ENCR_OFFSET(d_offs);
532         auth_offset = ROC_SE_AUTH_OFFSET(d_offs);
533         encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
534         auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
535
536         if (unlikely(flags & ROC_SE_VALID_AAD_BUF)) {
537                 /* We don't support both AAD and auth data separately */
538                 auth_data_len = 0;
539                 auth_offset = 0;
540                 aad_len = fc_params->aad_buf.size;
541                 aad_buf = &fc_params->aad_buf;
542         }
543
544         se_ctx = fc_params->ctx_buf.vaddr;
545         hash_type = se_ctx->hash_type;
546         mac_len = se_ctx->mac_len;
547         op_minor = se_ctx->template_w4.s.opcode_minor;
548
549         if (unlikely(!(flags & ROC_SE_VALID_IV_BUF))) {
550                 iv_len = 0;
551                 iv_offset = ROC_SE_ENCR_IV_OFFSET(d_offs);
552         }
553
554         if (unlikely(flags & ROC_SE_VALID_AAD_BUF)) {
555                 /*
556                  * When AAD is given, data above encr_offset is pass through
557                  * Since AAD is given as separate pointer and not as offset,
558                  * this is a special case as we need to fragment input data
559                  * into passthrough + encr_data and then insert AAD in between.
560                  */
561                 if (hash_type != ROC_SE_GMAC_TYPE) {
562                         passthrough_len = encr_offset;
563                         auth_offset = passthrough_len + iv_len;
564                         encr_offset = passthrough_len + aad_len + iv_len;
565                         auth_data_len = aad_len + encr_data_len;
566                 } else {
567                         passthrough_len = 16 + aad_len;
568                         auth_offset = passthrough_len + iv_len;
569                         auth_data_len = aad_len;
570                 }
571         } else {
572                 encr_offset += iv_len;
573                 auth_offset += iv_len;
574         }
575
576         /* Decryption */
577         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_FC;
578         cpt_inst_w4.s.opcode_minor = ROC_SE_FC_MINOR_OP_DECRYPT;
579         cpt_inst_w4.s.opcode_minor |= (uint64_t)op_minor;
580
581         if (hash_type == ROC_SE_GMAC_TYPE) {
582                 encr_offset = 0;
583                 encr_data_len = 0;
584         }
585
586         enc_dlen = encr_offset + encr_data_len;
587         auth_dlen = auth_offset + auth_data_len;
588
589         if (auth_dlen > enc_dlen) {
590                 inputlen = auth_dlen + mac_len;
591                 outputlen = auth_dlen;
592         } else {
593                 inputlen = enc_dlen + mac_len;
594                 outputlen = enc_dlen;
595         }
596
597         if (op_minor & ROC_SE_FC_MINOR_OP_HMAC_FIRST)
598                 outputlen = inputlen = enc_dlen;
599
600         cpt_inst_w4.s.param1 = encr_data_len;
601         cpt_inst_w4.s.param2 = auth_data_len;
602
603         /*
604          * In cn9k, cn10k since we have a limitation of
605          * IV & Offset control word not part of instruction
606          * and need to be part of Data Buffer, we check if
607          * head room is there and then only do the Direct mode processing
608          */
609         if (likely((flags & ROC_SE_SINGLE_BUF_INPLACE) &&
610                    (flags & ROC_SE_SINGLE_BUF_HEADROOM))) {
611                 void *dm_vaddr = fc_params->bufs[0].vaddr;
612
613                 /* Use Direct mode */
614
615                 offset_vaddr =
616                         (uint8_t *)dm_vaddr - ROC_SE_OFF_CTRL_LEN - iv_len;
617                 inst->dptr = (uint64_t)offset_vaddr;
618
619                 /* RPTR should just exclude offset control word */
620                 inst->rptr = (uint64_t)dm_vaddr - iv_len;
621
622                 cpt_inst_w4.s.dlen = inputlen + ROC_SE_OFF_CTRL_LEN;
623
624                 if (likely(iv_len)) {
625                         uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
626                                                       ROC_SE_OFF_CTRL_LEN);
627                         uint64_t *src = fc_params->iv_buf;
628                         dest[0] = src[0];
629                         dest[1] = src[1];
630                 }
631
632         } else {
633                 void *m_vaddr = fc_params->meta_buf.vaddr;
634                 uint32_t g_size_bytes, s_size_bytes;
635                 struct roc_se_sglist_comp *gather_comp;
636                 struct roc_se_sglist_comp *scatter_comp;
637                 uint8_t *in_buffer;
638                 uint8_t i = 0;
639
640                 /* This falls under strict SG mode */
641                 offset_vaddr = m_vaddr;
642                 size = ROC_SE_OFF_CTRL_LEN + iv_len;
643
644                 m_vaddr = (uint8_t *)m_vaddr + size;
645
646                 cpt_inst_w4.s.opcode_major |= (uint64_t)ROC_SE_DMA_MODE;
647
648                 if (likely(iv_len)) {
649                         uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
650                                                       ROC_SE_OFF_CTRL_LEN);
651                         uint64_t *src = fc_params->iv_buf;
652                         dest[0] = src[0];
653                         dest[1] = src[1];
654                 }
655
656                 /* DPTR has SG list */
657                 in_buffer = m_vaddr;
658
659                 ((uint16_t *)in_buffer)[0] = 0;
660                 ((uint16_t *)in_buffer)[1] = 0;
661
662                 /* TODO Add error check if space will be sufficient */
663                 gather_comp =
664                         (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
665
666                 /*
667                  * Input Gather List
668                  */
669                 i = 0;
670
671                 /* Offset control word that includes iv */
672                 i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
673                                  ROC_SE_OFF_CTRL_LEN + iv_len);
674
675                 /* Add input data */
676                 if (flags & ROC_SE_VALID_MAC_BUF) {
677                         size = inputlen - iv_len - mac_len;
678                         if (size) {
679                                 /* input data only */
680                                 if (unlikely(flags &
681                                              ROC_SE_SINGLE_BUF_INPLACE)) {
682                                         i = fill_sg_comp_from_buf_min(
683                                                 gather_comp, i, fc_params->bufs,
684                                                 &size);
685                                 } else {
686                                         uint32_t aad_offset =
687                                                 aad_len ? passthrough_len : 0;
688
689                                         i = fill_sg_comp_from_iov(
690                                                 gather_comp, i,
691                                                 fc_params->src_iov, 0, &size,
692                                                 aad_buf, aad_offset);
693                                 }
694                                 if (unlikely(size)) {
695                                         plt_dp_err("Insufficient buffer"
696                                                    " space, size %d needed",
697                                                    size);
698                                         return -1;
699                                 }
700                         }
701
702                         /* mac data */
703                         if (mac_len) {
704                                 i = fill_sg_comp_from_buf(gather_comp, i,
705                                                           &fc_params->mac_buf);
706                         }
707                 } else {
708                         /* input data + mac */
709                         size = inputlen - iv_len;
710                         if (size) {
711                                 if (unlikely(flags &
712                                              ROC_SE_SINGLE_BUF_INPLACE)) {
713                                         i = fill_sg_comp_from_buf_min(
714                                                 gather_comp, i, fc_params->bufs,
715                                                 &size);
716                                 } else {
717                                         uint32_t aad_offset =
718                                                 aad_len ? passthrough_len : 0;
719
720                                         if (unlikely(!fc_params->src_iov)) {
721                                                 plt_dp_err("Bad input args");
722                                                 return -1;
723                                         }
724
725                                         i = fill_sg_comp_from_iov(
726                                                 gather_comp, i,
727                                                 fc_params->src_iov, 0, &size,
728                                                 aad_buf, aad_offset);
729                                 }
730
731                                 if (unlikely(size)) {
732                                         plt_dp_err("Insufficient buffer"
733                                                    " space, size %d needed",
734                                                    size);
735                                         return -1;
736                                 }
737                         }
738                 }
739                 ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
740                 g_size_bytes =
741                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
742
743                 /*
744                  * Output Scatter List
745                  */
746
747                 i = 0;
748                 scatter_comp =
749                         (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
750                                                       g_size_bytes);
751
752                 /* Add iv */
753                 if (iv_len) {
754                         i = fill_sg_comp(scatter_comp, i,
755                                          (uint64_t)offset_vaddr +
756                                                  ROC_SE_OFF_CTRL_LEN,
757                                          iv_len);
758                 }
759
760                 /* Add output data */
761                 size = outputlen - iv_len;
762                 if (size) {
763                         if (unlikely(flags & ROC_SE_SINGLE_BUF_INPLACE)) {
764                                 /* handle single buffer here */
765                                 i = fill_sg_comp_from_buf_min(scatter_comp, i,
766                                                               fc_params->bufs,
767                                                               &size);
768                         } else {
769                                 uint32_t aad_offset =
770                                         aad_len ? passthrough_len : 0;
771
772                                 if (unlikely(!fc_params->dst_iov)) {
773                                         plt_dp_err("Bad input args");
774                                         return -1;
775                                 }
776
777                                 i = fill_sg_comp_from_iov(
778                                         scatter_comp, i, fc_params->dst_iov, 0,
779                                         &size, aad_buf, aad_offset);
780                         }
781
782                         if (unlikely(size)) {
783                                 plt_dp_err("Insufficient buffer space,"
784                                            " size %d needed",
785                                            size);
786                                 return -1;
787                         }
788                 }
789
790                 ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
791                 s_size_bytes =
792                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
793
794                 size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
795
796                 /* This is DPTR len in case of SG mode */
797                 cpt_inst_w4.s.dlen = size;
798
799                 inst->dptr = (uint64_t)in_buffer;
800         }
801
802         if (unlikely((encr_offset >> 16) || (iv_offset >> 8) ||
803                      (auth_offset >> 8))) {
804                 plt_dp_err("Offset not supported");
805                 plt_dp_err("enc_offset: %d", encr_offset);
806                 plt_dp_err("iv_offset : %d", iv_offset);
807                 plt_dp_err("auth_offset: %d", auth_offset);
808                 return -1;
809         }
810
811         *(uint64_t *)offset_vaddr = rte_cpu_to_be_64(
812                 ((uint64_t)encr_offset << 16) | ((uint64_t)iv_offset << 8) |
813                 ((uint64_t)auth_offset));
814
815         inst->w4.u64 = cpt_inst_w4.u64;
816         return 0;
817 }
818
819 static __rte_always_inline int
820 cpt_zuc_snow3g_enc_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
821                         struct roc_se_fc_params *params,
822                         struct cpt_inst_s *inst)
823 {
824         uint32_t size;
825         int32_t inputlen, outputlen;
826         struct roc_se_ctx *se_ctx;
827         uint32_t mac_len = 0;
828         uint8_t pdcp_alg_type, j;
829         uint32_t encr_offset = 0, auth_offset = 0;
830         uint32_t encr_data_len = 0, auth_data_len = 0;
831         int flags, iv_len = 16;
832         uint64_t offset_ctrl;
833         uint64_t *offset_vaddr;
834         uint32_t *iv_s, iv[4];
835         union cpt_inst_w4 cpt_inst_w4;
836
837         se_ctx = params->ctx_buf.vaddr;
838         flags = se_ctx->zsk_flags;
839         mac_len = se_ctx->mac_len;
840         pdcp_alg_type = se_ctx->pdcp_alg_type;
841
842         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_ZUC_SNOW3G;
843
844         /* indicates CPTR ctx, operation type, KEY & IV mode from DPTR */
845
846         cpt_inst_w4.s.opcode_minor = ((1 << 7) | (pdcp_alg_type << 5) |
847                                       (0 << 4) | (0 << 3) | (flags & 0x7));
848
849         if (flags == 0x1) {
850                 /*
851                  * Microcode expects offsets in bytes
852                  * TODO: Rounding off
853                  */
854                 auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
855
856                 /* EIA3 or UIA2 */
857                 auth_offset = ROC_SE_AUTH_OFFSET(d_offs);
858                 auth_offset = auth_offset / 8;
859
860                 /* consider iv len */
861                 auth_offset += iv_len;
862
863                 inputlen = auth_offset + (RTE_ALIGN(auth_data_len, 8) / 8);
864                 outputlen = mac_len;
865
866                 offset_ctrl = rte_cpu_to_be_64((uint64_t)auth_offset);
867
868         } else {
869                 /* EEA3 or UEA2 */
870                 /*
871                  * Microcode expects offsets in bytes
872                  * TODO: Rounding off
873                  */
874                 encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
875
876                 encr_offset = ROC_SE_ENCR_OFFSET(d_offs);
877                 encr_offset = encr_offset / 8;
878                 /* consider iv len */
879                 encr_offset += iv_len;
880
881                 inputlen = encr_offset + (RTE_ALIGN(encr_data_len, 8) / 8);
882                 outputlen = inputlen;
883
884                 /* iv offset is 0 */
885                 offset_ctrl = rte_cpu_to_be_64((uint64_t)encr_offset << 16);
886         }
887
888         if (unlikely((encr_offset >> 16) || (auth_offset >> 8))) {
889                 plt_dp_err("Offset not supported");
890                 plt_dp_err("enc_offset: %d", encr_offset);
891                 plt_dp_err("auth_offset: %d", auth_offset);
892                 return -1;
893         }
894
895         /* IV */
896         iv_s = (flags == 0x1) ? params->auth_iv_buf : params->iv_buf;
897
898         if (pdcp_alg_type == ROC_SE_PDCP_ALG_TYPE_SNOW3G) {
899                 /*
900                  * DPDK seems to provide it in form of IV3 IV2 IV1 IV0
901                  * and BigEndian, MC needs it as IV0 IV1 IV2 IV3
902                  */
903
904                 for (j = 0; j < 4; j++)
905                         iv[j] = iv_s[3 - j];
906         } else {
907                 /* ZUC doesn't need a swap */
908                 for (j = 0; j < 4; j++)
909                         iv[j] = iv_s[j];
910         }
911
912         /*
913          * GP op header, lengths are expected in bits.
914          */
915         cpt_inst_w4.s.param1 = encr_data_len;
916         cpt_inst_w4.s.param2 = auth_data_len;
917
918         /*
919          * In cn9k, cn10k since we have a limitation of
920          * IV & Offset control word not part of instruction
921          * and need to be part of Data Buffer, we check if
922          * head room is there and then only do the Direct mode processing
923          */
924         if (likely((req_flags & ROC_SE_SINGLE_BUF_INPLACE) &&
925                    (req_flags & ROC_SE_SINGLE_BUF_HEADROOM))) {
926                 void *dm_vaddr = params->bufs[0].vaddr;
927
928                 /* Use Direct mode */
929
930                 offset_vaddr = (uint64_t *)((uint8_t *)dm_vaddr -
931                                             ROC_SE_OFF_CTRL_LEN - iv_len);
932
933                 /* DPTR */
934                 inst->dptr = (uint64_t)offset_vaddr;
935                 /* RPTR should just exclude offset control word */
936                 inst->rptr = (uint64_t)dm_vaddr - iv_len;
937
938                 cpt_inst_w4.s.dlen = inputlen + ROC_SE_OFF_CTRL_LEN;
939
940                 if (likely(iv_len)) {
941                         uint32_t *iv_d = (uint32_t *)((uint8_t *)offset_vaddr +
942                                                       ROC_SE_OFF_CTRL_LEN);
943                         memcpy(iv_d, iv, 16);
944                 }
945
946                 *offset_vaddr = offset_ctrl;
947         } else {
948                 void *m_vaddr = params->meta_buf.vaddr;
949                 uint32_t i, g_size_bytes, s_size_bytes;
950                 struct roc_se_sglist_comp *gather_comp;
951                 struct roc_se_sglist_comp *scatter_comp;
952                 uint8_t *in_buffer;
953                 uint32_t *iv_d;
954
955                 /* save space for iv */
956                 offset_vaddr = m_vaddr;
957
958                 m_vaddr = (uint8_t *)m_vaddr + ROC_SE_OFF_CTRL_LEN + iv_len;
959
960                 cpt_inst_w4.s.opcode_major |= (uint64_t)ROC_SE_DMA_MODE;
961
962                 /* DPTR has SG list */
963                 in_buffer = m_vaddr;
964
965                 ((uint16_t *)in_buffer)[0] = 0;
966                 ((uint16_t *)in_buffer)[1] = 0;
967
968                 /* TODO Add error check if space will be sufficient */
969                 gather_comp =
970                         (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
971
972                 /*
973                  * Input Gather List
974                  */
975                 i = 0;
976
977                 /* Offset control word followed by iv */
978
979                 i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
980                                  ROC_SE_OFF_CTRL_LEN + iv_len);
981
982                 /* iv offset is 0 */
983                 *offset_vaddr = offset_ctrl;
984
985                 iv_d = (uint32_t *)((uint8_t *)offset_vaddr +
986                                     ROC_SE_OFF_CTRL_LEN);
987                 memcpy(iv_d, iv, 16);
988
989                 /* input data */
990                 size = inputlen - iv_len;
991                 if (size) {
992                         i = fill_sg_comp_from_iov(gather_comp, i,
993                                                   params->src_iov, 0, &size,
994                                                   NULL, 0);
995                         if (unlikely(size)) {
996                                 plt_dp_err("Insufficient buffer space,"
997                                            " size %d needed",
998                                            size);
999                                 return -1;
1000                         }
1001                 }
1002                 ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
1003                 g_size_bytes =
1004                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1005
1006                 /*
1007                  * Output Scatter List
1008                  */
1009
1010                 i = 0;
1011                 scatter_comp =
1012                         (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
1013                                                       g_size_bytes);
1014
1015                 if (flags == 0x1) {
1016                         /* IV in SLIST only for EEA3 & UEA2 */
1017                         iv_len = 0;
1018                 }
1019
1020                 if (iv_len) {
1021                         i = fill_sg_comp(scatter_comp, i,
1022                                          (uint64_t)offset_vaddr +
1023                                                  ROC_SE_OFF_CTRL_LEN,
1024                                          iv_len);
1025                 }
1026
1027                 /* Add output data */
1028                 if (req_flags & ROC_SE_VALID_MAC_BUF) {
1029                         size = outputlen - iv_len - mac_len;
1030                         if (size) {
1031                                 i = fill_sg_comp_from_iov(scatter_comp, i,
1032                                                           params->dst_iov, 0,
1033                                                           &size, NULL, 0);
1034
1035                                 if (unlikely(size)) {
1036                                         plt_dp_err("Insufficient buffer space,"
1037                                                    " size %d needed",
1038                                                    size);
1039                                         return -1;
1040                                 }
1041                         }
1042
1043                         /* mac data */
1044                         if (mac_len) {
1045                                 i = fill_sg_comp_from_buf(scatter_comp, i,
1046                                                           &params->mac_buf);
1047                         }
1048                 } else {
1049                         /* Output including mac */
1050                         size = outputlen - iv_len;
1051                         if (size) {
1052                                 i = fill_sg_comp_from_iov(scatter_comp, i,
1053                                                           params->dst_iov, 0,
1054                                                           &size, NULL, 0);
1055
1056                                 if (unlikely(size)) {
1057                                         plt_dp_err("Insufficient buffer space,"
1058                                                    " size %d needed",
1059                                                    size);
1060                                         return -1;
1061                                 }
1062                         }
1063                 }
1064                 ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
1065                 s_size_bytes =
1066                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1067
1068                 size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
1069
1070                 /* This is DPTR len in case of SG mode */
1071                 cpt_inst_w4.s.dlen = size;
1072
1073                 inst->dptr = (uint64_t)in_buffer;
1074         }
1075
1076         inst->w4.u64 = cpt_inst_w4.u64;
1077
1078         return 0;
1079 }
1080
1081 static __rte_always_inline int
1082 cpt_zuc_snow3g_dec_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
1083                         struct roc_se_fc_params *params,
1084                         struct cpt_inst_s *inst)
1085 {
1086         uint32_t size;
1087         int32_t inputlen = 0, outputlen;
1088         struct roc_se_ctx *se_ctx;
1089         uint8_t pdcp_alg_type, iv_len = 16;
1090         uint32_t encr_offset;
1091         uint32_t encr_data_len;
1092         int flags;
1093         uint64_t *offset_vaddr;
1094         uint32_t *iv_s, iv[4], j;
1095         union cpt_inst_w4 cpt_inst_w4;
1096
1097         /*
1098          * Microcode expects offsets in bytes
1099          * TODO: Rounding off
1100          */
1101         encr_offset = ROC_SE_ENCR_OFFSET(d_offs) / 8;
1102         encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
1103
1104         se_ctx = params->ctx_buf.vaddr;
1105         flags = se_ctx->zsk_flags;
1106         pdcp_alg_type = se_ctx->pdcp_alg_type;
1107
1108         cpt_inst_w4.u64 = 0;
1109         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_ZUC_SNOW3G;
1110
1111         /* indicates CPTR ctx, operation type, KEY & IV mode from DPTR */
1112
1113         cpt_inst_w4.s.opcode_minor = ((1 << 7) | (pdcp_alg_type << 5) |
1114                                       (0 << 4) | (0 << 3) | (flags & 0x7));
1115
1116         /* consider iv len */
1117         encr_offset += iv_len;
1118
1119         inputlen = encr_offset + (RTE_ALIGN(encr_data_len, 8) / 8);
1120         outputlen = inputlen;
1121
1122         /* IV */
1123         iv_s = params->iv_buf;
1124         if (pdcp_alg_type == ROC_SE_PDCP_ALG_TYPE_SNOW3G) {
1125                 /*
1126                  * DPDK seems to provide it in form of IV3 IV2 IV1 IV0
1127                  * and BigEndian, MC needs it as IV0 IV1 IV2 IV3
1128                  */
1129
1130                 for (j = 0; j < 4; j++)
1131                         iv[j] = iv_s[3 - j];
1132         } else {
1133                 /* ZUC doesn't need a swap */
1134                 for (j = 0; j < 4; j++)
1135                         iv[j] = iv_s[j];
1136         }
1137
1138         /*
1139          * GP op header, lengths are expected in bits.
1140          */
1141         cpt_inst_w4.s.param1 = encr_data_len;
1142
1143         /*
1144          * In cn9k, cn10k since we have a limitation of
1145          * IV & Offset control word not part of instruction
1146          * and need to be part of Data Buffer, we check if
1147          * head room is there and then only do the Direct mode processing
1148          */
1149         if (likely((req_flags & ROC_SE_SINGLE_BUF_INPLACE) &&
1150                    (req_flags & ROC_SE_SINGLE_BUF_HEADROOM))) {
1151                 void *dm_vaddr = params->bufs[0].vaddr;
1152
1153                 /* Use Direct mode */
1154
1155                 offset_vaddr = (uint64_t *)((uint8_t *)dm_vaddr -
1156                                             ROC_SE_OFF_CTRL_LEN - iv_len);
1157
1158                 /* DPTR */
1159                 inst->dptr = (uint64_t)offset_vaddr;
1160
1161                 /* RPTR should just exclude offset control word */
1162                 inst->rptr = (uint64_t)dm_vaddr - iv_len;
1163
1164                 cpt_inst_w4.s.dlen = inputlen + ROC_SE_OFF_CTRL_LEN;
1165
1166                 if (likely(iv_len)) {
1167                         uint32_t *iv_d = (uint32_t *)((uint8_t *)offset_vaddr +
1168                                                       ROC_SE_OFF_CTRL_LEN);
1169                         memcpy(iv_d, iv, 16);
1170                 }
1171
1172                 /* iv offset is 0 */
1173                 *offset_vaddr = rte_cpu_to_be_64((uint64_t)encr_offset << 16);
1174         } else {
1175                 void *m_vaddr = params->meta_buf.vaddr;
1176                 uint32_t i, g_size_bytes, s_size_bytes;
1177                 struct roc_se_sglist_comp *gather_comp;
1178                 struct roc_se_sglist_comp *scatter_comp;
1179                 uint8_t *in_buffer;
1180                 uint32_t *iv_d;
1181
1182                 /* save space for offset and iv... */
1183                 offset_vaddr = m_vaddr;
1184
1185                 m_vaddr = (uint8_t *)m_vaddr + ROC_SE_OFF_CTRL_LEN + iv_len;
1186
1187                 cpt_inst_w4.s.opcode_major |= (uint64_t)ROC_SE_DMA_MODE;
1188
1189                 /* DPTR has SG list */
1190                 in_buffer = m_vaddr;
1191
1192                 ((uint16_t *)in_buffer)[0] = 0;
1193                 ((uint16_t *)in_buffer)[1] = 0;
1194
1195                 /* TODO Add error check if space will be sufficient */
1196                 gather_comp =
1197                         (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
1198
1199                 /*
1200                  * Input Gather List
1201                  */
1202                 i = 0;
1203
1204                 /* Offset control word */
1205
1206                 /* iv offset is 0 */
1207                 *offset_vaddr = rte_cpu_to_be_64((uint64_t)encr_offset << 16);
1208
1209                 i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
1210                                  ROC_SE_OFF_CTRL_LEN + iv_len);
1211
1212                 iv_d = (uint32_t *)((uint8_t *)offset_vaddr +
1213                                     ROC_SE_OFF_CTRL_LEN);
1214                 memcpy(iv_d, iv, 16);
1215
1216                 /* Add input data */
1217                 size = inputlen - iv_len;
1218                 if (size) {
1219                         i = fill_sg_comp_from_iov(gather_comp, i,
1220                                                   params->src_iov, 0, &size,
1221                                                   NULL, 0);
1222                         if (unlikely(size)) {
1223                                 plt_dp_err("Insufficient buffer space,"
1224                                            " size %d needed",
1225                                            size);
1226                                 return -1;
1227                         }
1228                 }
1229                 ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
1230                 g_size_bytes =
1231                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1232
1233                 /*
1234                  * Output Scatter List
1235                  */
1236
1237                 i = 0;
1238                 scatter_comp =
1239                         (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
1240                                                       g_size_bytes);
1241
1242                 /* IV */
1243                 i = fill_sg_comp(scatter_comp, i,
1244                                  (uint64_t)offset_vaddr + ROC_SE_OFF_CTRL_LEN,
1245                                  iv_len);
1246
1247                 /* Add output data */
1248                 size = outputlen - iv_len;
1249                 if (size) {
1250                         i = fill_sg_comp_from_iov(scatter_comp, i,
1251                                                   params->dst_iov, 0, &size,
1252                                                   NULL, 0);
1253
1254                         if (unlikely(size)) {
1255                                 plt_dp_err("Insufficient buffer space,"
1256                                            " size %d needed",
1257                                            size);
1258                                 return -1;
1259                         }
1260                 }
1261                 ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
1262                 s_size_bytes =
1263                         ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1264
1265                 size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
1266
1267                 /* This is DPTR len in case of SG mode */
1268                 cpt_inst_w4.s.dlen = size;
1269
1270                 inst->dptr = (uint64_t)in_buffer;
1271         }
1272
1273         if (unlikely((encr_offset >> 16))) {
1274                 plt_dp_err("Offset not supported");
1275                 plt_dp_err("enc_offset: %d", encr_offset);
1276                 return -1;
1277         }
1278
1279         inst->w4.u64 = cpt_inst_w4.u64;
1280
1281         return 0;
1282 }
1283
1284 static __rte_always_inline int
1285 cpt_kasumi_enc_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
1286                     struct roc_se_fc_params *params, struct cpt_inst_s *inst)
1287 {
1288         void *m_vaddr = params->meta_buf.vaddr;
1289         uint32_t size;
1290         int32_t inputlen = 0, outputlen = 0;
1291         struct roc_se_ctx *se_ctx;
1292         uint32_t mac_len = 0;
1293         uint8_t i = 0;
1294         uint32_t encr_offset, auth_offset;
1295         uint32_t encr_data_len, auth_data_len;
1296         int flags;
1297         uint8_t *iv_s, *iv_d, iv_len = 8;
1298         uint8_t dir = 0;
1299         uint64_t *offset_vaddr;
1300         union cpt_inst_w4 cpt_inst_w4;
1301         uint8_t *in_buffer;
1302         uint32_t g_size_bytes, s_size_bytes;
1303         struct roc_se_sglist_comp *gather_comp;
1304         struct roc_se_sglist_comp *scatter_comp;
1305
1306         encr_offset = ROC_SE_ENCR_OFFSET(d_offs) / 8;
1307         auth_offset = ROC_SE_AUTH_OFFSET(d_offs) / 8;
1308         encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
1309         auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
1310
1311         se_ctx = params->ctx_buf.vaddr;
1312         flags = se_ctx->zsk_flags;
1313         mac_len = se_ctx->mac_len;
1314
1315         if (flags == 0x0)
1316                 iv_s = params->iv_buf;
1317         else
1318                 iv_s = params->auth_iv_buf;
1319
1320         dir = iv_s[8] & 0x1;
1321
1322         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_KASUMI | ROC_SE_DMA_MODE;
1323
1324         /* indicates ECB/CBC, direction, ctx from cptr, iv from dptr */
1325         cpt_inst_w4.s.opcode_minor = ((1 << 6) | (se_ctx->k_ecb << 5) |
1326                                       (dir << 4) | (0 << 3) | (flags & 0x7));
1327
1328         /*
1329          * GP op header, lengths are expected in bits.
1330          */
1331         cpt_inst_w4.s.param1 = encr_data_len;
1332         cpt_inst_w4.s.param2 = auth_data_len;
1333
1334         /* consider iv len */
1335         if (flags == 0x0) {
1336                 encr_offset += iv_len;
1337                 auth_offset += iv_len;
1338         }
1339
1340         /* save space for offset ctrl and iv */
1341         offset_vaddr = m_vaddr;
1342
1343         m_vaddr = (uint8_t *)m_vaddr + ROC_SE_OFF_CTRL_LEN + iv_len;
1344
1345         /* DPTR has SG list */
1346         in_buffer = m_vaddr;
1347
1348         ((uint16_t *)in_buffer)[0] = 0;
1349         ((uint16_t *)in_buffer)[1] = 0;
1350
1351         /* TODO Add error check if space will be sufficient */
1352         gather_comp = (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
1353
1354         /*
1355          * Input Gather List
1356          */
1357         i = 0;
1358
1359         /* Offset control word followed by iv */
1360
1361         if (flags == 0x0) {
1362                 inputlen = encr_offset + (RTE_ALIGN(encr_data_len, 8) / 8);
1363                 outputlen = inputlen;
1364                 /* iv offset is 0 */
1365                 *offset_vaddr = rte_cpu_to_be_64((uint64_t)encr_offset << 16);
1366                 if (unlikely((encr_offset >> 16))) {
1367                         plt_dp_err("Offset not supported");
1368                         plt_dp_err("enc_offset: %d", encr_offset);
1369                         return -1;
1370                 }
1371         } else {
1372                 inputlen = auth_offset + (RTE_ALIGN(auth_data_len, 8) / 8);
1373                 outputlen = mac_len;
1374                 /* iv offset is 0 */
1375                 *offset_vaddr = rte_cpu_to_be_64((uint64_t)auth_offset);
1376                 if (unlikely((auth_offset >> 8))) {
1377                         plt_dp_err("Offset not supported");
1378                         plt_dp_err("auth_offset: %d", auth_offset);
1379                         return -1;
1380                 }
1381         }
1382
1383         i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
1384                          ROC_SE_OFF_CTRL_LEN + iv_len);
1385
1386         /* IV */
1387         iv_d = (uint8_t *)offset_vaddr + ROC_SE_OFF_CTRL_LEN;
1388         memcpy(iv_d, iv_s, iv_len);
1389
1390         /* input data */
1391         size = inputlen - iv_len;
1392         if (size) {
1393                 i = fill_sg_comp_from_iov(gather_comp, i, params->src_iov, 0,
1394                                           &size, NULL, 0);
1395
1396                 if (unlikely(size)) {
1397                         plt_dp_err("Insufficient buffer space,"
1398                                    " size %d needed",
1399                                    size);
1400                         return -1;
1401                 }
1402         }
1403         ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
1404         g_size_bytes = ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1405
1406         /*
1407          * Output Scatter List
1408          */
1409
1410         i = 0;
1411         scatter_comp = (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
1412                                                      g_size_bytes);
1413
1414         if (flags == 0x1) {
1415                 /* IV in SLIST only for F8 */
1416                 iv_len = 0;
1417         }
1418
1419         /* IV */
1420         if (iv_len) {
1421                 i = fill_sg_comp(scatter_comp, i,
1422                                  (uint64_t)offset_vaddr + ROC_SE_OFF_CTRL_LEN,
1423                                  iv_len);
1424         }
1425
1426         /* Add output data */
1427         if (req_flags & ROC_SE_VALID_MAC_BUF) {
1428                 size = outputlen - iv_len - mac_len;
1429                 if (size) {
1430                         i = fill_sg_comp_from_iov(scatter_comp, i,
1431                                                   params->dst_iov, 0, &size,
1432                                                   NULL, 0);
1433
1434                         if (unlikely(size)) {
1435                                 plt_dp_err("Insufficient buffer space,"
1436                                            " size %d needed",
1437                                            size);
1438                                 return -1;
1439                         }
1440                 }
1441
1442                 /* mac data */
1443                 if (mac_len) {
1444                         i = fill_sg_comp_from_buf(scatter_comp, i,
1445                                                   &params->mac_buf);
1446                 }
1447         } else {
1448                 /* Output including mac */
1449                 size = outputlen - iv_len;
1450                 if (size) {
1451                         i = fill_sg_comp_from_iov(scatter_comp, i,
1452                                                   params->dst_iov, 0, &size,
1453                                                   NULL, 0);
1454
1455                         if (unlikely(size)) {
1456                                 plt_dp_err("Insufficient buffer space,"
1457                                            " size %d needed",
1458                                            size);
1459                                 return -1;
1460                         }
1461                 }
1462         }
1463         ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
1464         s_size_bytes = ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1465
1466         size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
1467
1468         /* This is DPTR len in case of SG mode */
1469         cpt_inst_w4.s.dlen = size;
1470
1471         inst->dptr = (uint64_t)in_buffer;
1472         inst->w4.u64 = cpt_inst_w4.u64;
1473
1474         return 0;
1475 }
1476
1477 static __rte_always_inline int
1478 cpt_kasumi_dec_prep(uint64_t d_offs, uint64_t d_lens,
1479                     struct roc_se_fc_params *params, struct cpt_inst_s *inst)
1480 {
1481         void *m_vaddr = params->meta_buf.vaddr;
1482         uint32_t size;
1483         int32_t inputlen = 0, outputlen;
1484         struct roc_se_ctx *se_ctx;
1485         uint8_t i = 0, iv_len = 8;
1486         uint32_t encr_offset;
1487         uint32_t encr_data_len;
1488         int flags;
1489         uint8_t dir = 0;
1490         uint64_t *offset_vaddr;
1491         union cpt_inst_w4 cpt_inst_w4;
1492         uint8_t *in_buffer;
1493         uint32_t g_size_bytes, s_size_bytes;
1494         struct roc_se_sglist_comp *gather_comp;
1495         struct roc_se_sglist_comp *scatter_comp;
1496
1497         encr_offset = ROC_SE_ENCR_OFFSET(d_offs) / 8;
1498         encr_data_len = ROC_SE_ENCR_DLEN(d_lens);
1499
1500         se_ctx = params->ctx_buf.vaddr;
1501         flags = se_ctx->zsk_flags;
1502
1503         cpt_inst_w4.u64 = 0;
1504         cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_KASUMI | ROC_SE_DMA_MODE;
1505
1506         /* indicates ECB/CBC, direction, ctx from cptr, iv from dptr */
1507         cpt_inst_w4.s.opcode_minor = ((1 << 6) | (se_ctx->k_ecb << 5) |
1508                                       (dir << 4) | (0 << 3) | (flags & 0x7));
1509
1510         /*
1511          * GP op header, lengths are expected in bits.
1512          */
1513         cpt_inst_w4.s.param1 = encr_data_len;
1514
1515         /* consider iv len */
1516         encr_offset += iv_len;
1517
1518         inputlen = iv_len + (RTE_ALIGN(encr_data_len, 8) / 8);
1519         outputlen = inputlen;
1520
1521         /* save space for offset ctrl & iv */
1522         offset_vaddr = m_vaddr;
1523
1524         m_vaddr = (uint8_t *)m_vaddr + ROC_SE_OFF_CTRL_LEN + iv_len;
1525
1526         /* DPTR has SG list */
1527         in_buffer = m_vaddr;
1528
1529         ((uint16_t *)in_buffer)[0] = 0;
1530         ((uint16_t *)in_buffer)[1] = 0;
1531
1532         /* TODO Add error check if space will be sufficient */
1533         gather_comp = (struct roc_se_sglist_comp *)((uint8_t *)m_vaddr + 8);
1534
1535         /*
1536          * Input Gather List
1537          */
1538         i = 0;
1539
1540         /* Offset control word followed by iv */
1541         *offset_vaddr = rte_cpu_to_be_64((uint64_t)encr_offset << 16);
1542         if (unlikely((encr_offset >> 16))) {
1543                 plt_dp_err("Offset not supported");
1544                 plt_dp_err("enc_offset: %d", encr_offset);
1545                 return -1;
1546         }
1547
1548         i = fill_sg_comp(gather_comp, i, (uint64_t)offset_vaddr,
1549                          ROC_SE_OFF_CTRL_LEN + iv_len);
1550
1551         /* IV */
1552         memcpy((uint8_t *)offset_vaddr + ROC_SE_OFF_CTRL_LEN, params->iv_buf,
1553                iv_len);
1554
1555         /* Add input data */
1556         size = inputlen - iv_len;
1557         if (size) {
1558                 i = fill_sg_comp_from_iov(gather_comp, i, params->src_iov, 0,
1559                                           &size, NULL, 0);
1560                 if (unlikely(size)) {
1561                         plt_dp_err("Insufficient buffer space,"
1562                                    " size %d needed",
1563                                    size);
1564                         return -1;
1565                 }
1566         }
1567         ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i);
1568         g_size_bytes = ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1569
1570         /*
1571          * Output Scatter List
1572          */
1573
1574         i = 0;
1575         scatter_comp = (struct roc_se_sglist_comp *)((uint8_t *)gather_comp +
1576                                                      g_size_bytes);
1577
1578         /* IV */
1579         i = fill_sg_comp(scatter_comp, i,
1580                          (uint64_t)offset_vaddr + ROC_SE_OFF_CTRL_LEN, iv_len);
1581
1582         /* Add output data */
1583         size = outputlen - iv_len;
1584         if (size) {
1585                 i = fill_sg_comp_from_iov(scatter_comp, i, params->dst_iov, 0,
1586                                           &size, NULL, 0);
1587                 if (unlikely(size)) {
1588                         plt_dp_err("Insufficient buffer space,"
1589                                    " size %d needed",
1590                                    size);
1591                         return -1;
1592                 }
1593         }
1594         ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i);
1595         s_size_bytes = ((i + 3) / 4) * sizeof(struct roc_se_sglist_comp);
1596
1597         size = g_size_bytes + s_size_bytes + ROC_SE_SG_LIST_HDR_SIZE;
1598
1599         /* This is DPTR len in case of SG mode */
1600         cpt_inst_w4.s.dlen = size;
1601
1602         inst->dptr = (uint64_t)in_buffer;
1603         inst->w4.u64 = cpt_inst_w4.u64;
1604
1605         return 0;
1606 }
1607
1608 static __rte_always_inline int
1609 cpt_fc_dec_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
1610                      struct roc_se_fc_params *fc_params,
1611                      struct cpt_inst_s *inst)
1612 {
1613         struct roc_se_ctx *ctx = fc_params->ctx_buf.vaddr;
1614         uint8_t fc_type;
1615         int ret = -1;
1616
1617         fc_type = ctx->fc_type;
1618
1619         if (likely(fc_type == ROC_SE_FC_GEN)) {
1620                 ret = cpt_dec_hmac_prep(flags, d_offs, d_lens, fc_params, inst);
1621         } else if (fc_type == ROC_SE_PDCP) {
1622                 ret = cpt_zuc_snow3g_dec_prep(flags, d_offs, d_lens, fc_params,
1623                                               inst);
1624         } else if (fc_type == ROC_SE_KASUMI) {
1625                 ret = cpt_kasumi_dec_prep(d_offs, d_lens, fc_params, inst);
1626         }
1627         return ret;
1628 }
1629
1630 static __rte_always_inline int
1631 cpt_fc_enc_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
1632                      struct roc_se_fc_params *fc_params,
1633                      struct cpt_inst_s *inst)
1634 {
1635         struct roc_se_ctx *ctx = fc_params->ctx_buf.vaddr;
1636         uint8_t fc_type;
1637         int ret = -1;
1638
1639         fc_type = ctx->fc_type;
1640
1641         if (likely(fc_type == ROC_SE_FC_GEN)) {
1642                 ret = cpt_enc_hmac_prep(flags, d_offs, d_lens, fc_params, inst);
1643         } else if (fc_type == ROC_SE_PDCP) {
1644                 ret = cpt_zuc_snow3g_enc_prep(flags, d_offs, d_lens, fc_params,
1645                                               inst);
1646         } else if (fc_type == ROC_SE_KASUMI) {
1647                 ret = cpt_kasumi_enc_prep(flags, d_offs, d_lens, fc_params,
1648                                           inst);
1649         }
1650
1651         return ret;
1652 }
1653
1654 static __rte_always_inline int
1655 fill_sess_aead(struct rte_crypto_sym_xform *xform, struct cnxk_se_sess *sess)
1656 {
1657         struct rte_crypto_aead_xform *aead_form;
1658         roc_se_cipher_type enc_type = 0; /* NULL Cipher type */
1659         roc_se_auth_type auth_type = 0;  /* NULL Auth type */
1660         uint32_t cipher_key_len = 0;
1661         uint8_t aes_gcm = 0;
1662         aead_form = &xform->aead;
1663
1664         if (aead_form->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1665                 sess->cpt_op |= ROC_SE_OP_CIPHER_ENCRYPT;
1666                 sess->cpt_op |= ROC_SE_OP_AUTH_GENERATE;
1667         } else if (aead_form->op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
1668                 sess->cpt_op |= ROC_SE_OP_CIPHER_DECRYPT;
1669                 sess->cpt_op |= ROC_SE_OP_AUTH_VERIFY;
1670         } else {
1671                 plt_dp_err("Unknown aead operation\n");
1672                 return -1;
1673         }
1674         switch (aead_form->algo) {
1675         case RTE_CRYPTO_AEAD_AES_GCM:
1676                 enc_type = ROC_SE_AES_GCM;
1677                 cipher_key_len = 16;
1678                 aes_gcm = 1;
1679                 break;
1680         case RTE_CRYPTO_AEAD_AES_CCM:
1681                 plt_dp_err("Crypto: Unsupported cipher algo %u",
1682                            aead_form->algo);
1683                 return -1;
1684         case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
1685                 enc_type = ROC_SE_CHACHA20;
1686                 auth_type = ROC_SE_POLY1305;
1687                 cipher_key_len = 32;
1688                 sess->chacha_poly = 1;
1689                 break;
1690         default:
1691                 plt_dp_err("Crypto: Undefined cipher algo %u specified",
1692                            aead_form->algo);
1693                 return -1;
1694         }
1695         if (aead_form->key.length < cipher_key_len) {
1696                 plt_dp_err("Invalid cipher params keylen %u",
1697                            aead_form->key.length);
1698                 return -1;
1699         }
1700         sess->zsk_flag = 0;
1701         sess->aes_gcm = aes_gcm;
1702         sess->mac_len = aead_form->digest_length;
1703         sess->iv_offset = aead_form->iv.offset;
1704         sess->iv_length = aead_form->iv.length;
1705         sess->aad_length = aead_form->aad_length;
1706
1707         if (unlikely(roc_se_ciph_key_set(&sess->roc_se_ctx, enc_type,
1708                                          aead_form->key.data,
1709                                          aead_form->key.length, NULL)))
1710                 return -1;
1711
1712         if (unlikely(roc_se_auth_key_set(&sess->roc_se_ctx, auth_type, NULL, 0,
1713                                          aead_form->digest_length)))
1714                 return -1;
1715
1716         return 0;
1717 }
1718
1719 static __rte_always_inline int
1720 fill_sess_cipher(struct rte_crypto_sym_xform *xform, struct cnxk_se_sess *sess)
1721 {
1722         struct rte_crypto_cipher_xform *c_form;
1723         roc_se_cipher_type enc_type = 0; /* NULL Cipher type */
1724         uint32_t cipher_key_len = 0;
1725         uint8_t zsk_flag = 0, aes_ctr = 0, is_null = 0;
1726
1727         c_form = &xform->cipher;
1728
1729         if (c_form->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
1730                 sess->cpt_op |= ROC_SE_OP_CIPHER_ENCRYPT;
1731         else if (c_form->op == RTE_CRYPTO_CIPHER_OP_DECRYPT) {
1732                 sess->cpt_op |= ROC_SE_OP_CIPHER_DECRYPT;
1733                 if (xform->next != NULL &&
1734                     xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
1735                         /* Perform decryption followed by auth verify */
1736                         sess->roc_se_ctx.template_w4.s.opcode_minor =
1737                                 ROC_SE_FC_MINOR_OP_HMAC_FIRST;
1738                 }
1739         } else {
1740                 plt_dp_err("Unknown cipher operation\n");
1741                 return -1;
1742         }
1743
1744         switch (c_form->algo) {
1745         case RTE_CRYPTO_CIPHER_AES_CBC:
1746                 enc_type = ROC_SE_AES_CBC;
1747                 cipher_key_len = 16;
1748                 break;
1749         case RTE_CRYPTO_CIPHER_3DES_CBC:
1750                 enc_type = ROC_SE_DES3_CBC;
1751                 cipher_key_len = 24;
1752                 break;
1753         case RTE_CRYPTO_CIPHER_DES_CBC:
1754                 /* DES is implemented using 3DES in hardware */
1755                 enc_type = ROC_SE_DES3_CBC;
1756                 cipher_key_len = 8;
1757                 break;
1758         case RTE_CRYPTO_CIPHER_AES_CTR:
1759                 enc_type = ROC_SE_AES_CTR;
1760                 cipher_key_len = 16;
1761                 aes_ctr = 1;
1762                 break;
1763         case RTE_CRYPTO_CIPHER_NULL:
1764                 enc_type = 0;
1765                 is_null = 1;
1766                 break;
1767         case RTE_CRYPTO_CIPHER_KASUMI_F8:
1768                 enc_type = ROC_SE_KASUMI_F8_ECB;
1769                 cipher_key_len = 16;
1770                 zsk_flag = ROC_SE_K_F8;
1771                 break;
1772         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
1773                 enc_type = ROC_SE_SNOW3G_UEA2;
1774                 cipher_key_len = 16;
1775                 zsk_flag = ROC_SE_ZS_EA;
1776                 break;
1777         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
1778                 enc_type = ROC_SE_ZUC_EEA3;
1779                 cipher_key_len = 16;
1780                 zsk_flag = ROC_SE_ZS_EA;
1781                 break;
1782         case RTE_CRYPTO_CIPHER_AES_XTS:
1783                 enc_type = ROC_SE_AES_XTS;
1784                 cipher_key_len = 16;
1785                 break;
1786         case RTE_CRYPTO_CIPHER_3DES_ECB:
1787                 enc_type = ROC_SE_DES3_ECB;
1788                 cipher_key_len = 24;
1789                 break;
1790         case RTE_CRYPTO_CIPHER_AES_ECB:
1791                 enc_type = ROC_SE_AES_ECB;
1792                 cipher_key_len = 16;
1793                 break;
1794         case RTE_CRYPTO_CIPHER_3DES_CTR:
1795         case RTE_CRYPTO_CIPHER_AES_F8:
1796         case RTE_CRYPTO_CIPHER_ARC4:
1797                 plt_dp_err("Crypto: Unsupported cipher algo %u", c_form->algo);
1798                 return -1;
1799         default:
1800                 plt_dp_err("Crypto: Undefined cipher algo %u specified",
1801                            c_form->algo);
1802                 return -1;
1803         }
1804
1805         if (c_form->key.length < cipher_key_len) {
1806                 plt_dp_err("Invalid cipher params keylen %u",
1807                            c_form->key.length);
1808                 return -1;
1809         }
1810
1811         sess->zsk_flag = zsk_flag;
1812         sess->aes_gcm = 0;
1813         sess->aes_ctr = aes_ctr;
1814         sess->iv_offset = c_form->iv.offset;
1815         sess->iv_length = c_form->iv.length;
1816         sess->is_null = is_null;
1817
1818         if (unlikely(roc_se_ciph_key_set(&sess->roc_se_ctx, enc_type,
1819                                          c_form->key.data, c_form->key.length,
1820                                          NULL)))
1821                 return -1;
1822
1823         return 0;
1824 }
1825
1826 static __rte_always_inline int
1827 fill_sess_auth(struct rte_crypto_sym_xform *xform, struct cnxk_se_sess *sess)
1828 {
1829         struct rte_crypto_auth_xform *a_form;
1830         roc_se_auth_type auth_type = 0; /* NULL Auth type */
1831         uint8_t zsk_flag = 0, aes_gcm = 0, is_null = 0;
1832
1833         if (xform->next != NULL &&
1834             xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
1835             xform->next->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
1836                 /* Perform auth followed by encryption */
1837                 sess->roc_se_ctx.template_w4.s.opcode_minor =
1838                         ROC_SE_FC_MINOR_OP_HMAC_FIRST;
1839         }
1840
1841         a_form = &xform->auth;
1842
1843         if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY)
1844                 sess->cpt_op |= ROC_SE_OP_AUTH_VERIFY;
1845         else if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE)
1846                 sess->cpt_op |= ROC_SE_OP_AUTH_GENERATE;
1847         else {
1848                 plt_dp_err("Unknown auth operation");
1849                 return -1;
1850         }
1851
1852         switch (a_form->algo) {
1853         case RTE_CRYPTO_AUTH_SHA1_HMAC:
1854                 /* Fall through */
1855         case RTE_CRYPTO_AUTH_SHA1:
1856                 auth_type = ROC_SE_SHA1_TYPE;
1857                 break;
1858         case RTE_CRYPTO_AUTH_SHA256_HMAC:
1859         case RTE_CRYPTO_AUTH_SHA256:
1860                 auth_type = ROC_SE_SHA2_SHA256;
1861                 break;
1862         case RTE_CRYPTO_AUTH_SHA512_HMAC:
1863         case RTE_CRYPTO_AUTH_SHA512:
1864                 auth_type = ROC_SE_SHA2_SHA512;
1865                 break;
1866         case RTE_CRYPTO_AUTH_AES_GMAC:
1867                 auth_type = ROC_SE_GMAC_TYPE;
1868                 aes_gcm = 1;
1869                 break;
1870         case RTE_CRYPTO_AUTH_SHA224_HMAC:
1871         case RTE_CRYPTO_AUTH_SHA224:
1872                 auth_type = ROC_SE_SHA2_SHA224;
1873                 break;
1874         case RTE_CRYPTO_AUTH_SHA384_HMAC:
1875         case RTE_CRYPTO_AUTH_SHA384:
1876                 auth_type = ROC_SE_SHA2_SHA384;
1877                 break;
1878         case RTE_CRYPTO_AUTH_MD5_HMAC:
1879         case RTE_CRYPTO_AUTH_MD5:
1880                 auth_type = ROC_SE_MD5_TYPE;
1881                 break;
1882         case RTE_CRYPTO_AUTH_KASUMI_F9:
1883                 auth_type = ROC_SE_KASUMI_F9_ECB;
1884                 /*
1885                  * Indicate that direction needs to be taken out
1886                  * from end of src
1887                  */
1888                 zsk_flag = ROC_SE_K_F9;
1889                 break;
1890         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
1891                 auth_type = ROC_SE_SNOW3G_UIA2;
1892                 zsk_flag = ROC_SE_ZS_IA;
1893                 break;
1894         case RTE_CRYPTO_AUTH_ZUC_EIA3:
1895                 auth_type = ROC_SE_ZUC_EIA3;
1896                 zsk_flag = ROC_SE_ZS_IA;
1897                 break;
1898         case RTE_CRYPTO_AUTH_NULL:
1899                 auth_type = 0;
1900                 is_null = 1;
1901                 break;
1902         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
1903         case RTE_CRYPTO_AUTH_AES_CMAC:
1904         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
1905                 plt_dp_err("Crypto: Unsupported hash algo %u", a_form->algo);
1906                 return -1;
1907         default:
1908                 plt_dp_err("Crypto: Undefined Hash algo %u specified",
1909                            a_form->algo);
1910                 return -1;
1911         }
1912
1913         sess->zsk_flag = zsk_flag;
1914         sess->aes_gcm = aes_gcm;
1915         sess->mac_len = a_form->digest_length;
1916         sess->is_null = is_null;
1917         if (zsk_flag) {
1918                 sess->auth_iv_offset = a_form->iv.offset;
1919                 sess->auth_iv_length = a_form->iv.length;
1920         }
1921         if (unlikely(roc_se_auth_key_set(&sess->roc_se_ctx, auth_type,
1922                                          a_form->key.data, a_form->key.length,
1923                                          a_form->digest_length)))
1924                 return -1;
1925
1926         return 0;
1927 }
1928
1929 static __rte_always_inline int
1930 fill_sess_gmac(struct rte_crypto_sym_xform *xform, struct cnxk_se_sess *sess)
1931 {
1932         struct rte_crypto_auth_xform *a_form;
1933         roc_se_cipher_type enc_type = 0; /* NULL Cipher type */
1934         roc_se_auth_type auth_type = 0;  /* NULL Auth type */
1935
1936         a_form = &xform->auth;
1937
1938         if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE)
1939                 sess->cpt_op |= ROC_SE_OP_ENCODE;
1940         else if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY)
1941                 sess->cpt_op |= ROC_SE_OP_DECODE;
1942         else {
1943                 plt_dp_err("Unknown auth operation");
1944                 return -1;
1945         }
1946
1947         switch (a_form->algo) {
1948         case RTE_CRYPTO_AUTH_AES_GMAC:
1949                 enc_type = ROC_SE_AES_GCM;
1950                 auth_type = ROC_SE_GMAC_TYPE;
1951                 break;
1952         default:
1953                 plt_dp_err("Crypto: Undefined cipher algo %u specified",
1954                            a_form->algo);
1955                 return -1;
1956         }
1957
1958         sess->zsk_flag = 0;
1959         sess->aes_gcm = 0;
1960         sess->is_gmac = 1;
1961         sess->iv_offset = a_form->iv.offset;
1962         sess->iv_length = a_form->iv.length;
1963         sess->mac_len = a_form->digest_length;
1964
1965         if (unlikely(roc_se_ciph_key_set(&sess->roc_se_ctx, enc_type,
1966                                          a_form->key.data, a_form->key.length,
1967                                          NULL)))
1968                 return -1;
1969
1970         if (unlikely(roc_se_auth_key_set(&sess->roc_se_ctx, auth_type, NULL, 0,
1971                                          a_form->digest_length)))
1972                 return -1;
1973
1974         return 0;
1975 }
1976
1977 static __rte_always_inline void *
1978 alloc_op_meta(struct roc_se_buf_ptr *buf, int32_t len,
1979               struct rte_mempool *cpt_meta_pool,
1980               struct cpt_inflight_req *infl_req)
1981 {
1982         uint8_t *mdata;
1983
1984         if (unlikely(rte_mempool_get(cpt_meta_pool, (void **)&mdata) < 0))
1985                 return NULL;
1986
1987         buf->vaddr = mdata;
1988         buf->size = len;
1989
1990         infl_req->mdata = mdata;
1991         infl_req->op_flags |= CPT_OP_FLAGS_METABUF;
1992
1993         return mdata;
1994 }
1995
1996 static __rte_always_inline uint32_t
1997 prepare_iov_from_pkt(struct rte_mbuf *pkt, struct roc_se_iov_ptr *iovec,
1998                      uint32_t start_offset)
1999 {
2000         uint16_t index = 0;
2001         void *seg_data = NULL;
2002         int32_t seg_size = 0;
2003
2004         if (!pkt) {
2005                 iovec->buf_cnt = 0;
2006                 return 0;
2007         }
2008
2009         if (!start_offset) {
2010                 seg_data = rte_pktmbuf_mtod(pkt, void *);
2011                 seg_size = pkt->data_len;
2012         } else {
2013                 while (start_offset >= pkt->data_len) {
2014                         start_offset -= pkt->data_len;
2015                         pkt = pkt->next;
2016                 }
2017
2018                 seg_data = rte_pktmbuf_mtod_offset(pkt, void *, start_offset);
2019                 seg_size = pkt->data_len - start_offset;
2020                 if (!seg_size)
2021                         return 1;
2022         }
2023
2024         /* first seg */
2025         iovec->bufs[index].vaddr = seg_data;
2026         iovec->bufs[index].size = seg_size;
2027         index++;
2028         pkt = pkt->next;
2029
2030         while (unlikely(pkt != NULL)) {
2031                 seg_data = rte_pktmbuf_mtod(pkt, void *);
2032                 seg_size = pkt->data_len;
2033                 if (!seg_size)
2034                         break;
2035
2036                 iovec->bufs[index].vaddr = seg_data;
2037                 iovec->bufs[index].size = seg_size;
2038
2039                 index++;
2040
2041                 pkt = pkt->next;
2042         }
2043
2044         iovec->buf_cnt = index;
2045         return 0;
2046 }
2047
2048 static __rte_always_inline uint32_t
2049 prepare_iov_from_pkt_inplace(struct rte_mbuf *pkt,
2050                              struct roc_se_fc_params *param, uint32_t *flags)
2051 {
2052         uint16_t index = 0;
2053         void *seg_data = NULL;
2054         uint32_t seg_size = 0;
2055         struct roc_se_iov_ptr *iovec;
2056
2057         seg_data = rte_pktmbuf_mtod(pkt, void *);
2058         seg_size = pkt->data_len;
2059
2060         /* first seg */
2061         if (likely(!pkt->next)) {
2062                 uint32_t headroom;
2063
2064                 *flags |= ROC_SE_SINGLE_BUF_INPLACE;
2065                 headroom = rte_pktmbuf_headroom(pkt);
2066                 if (likely(headroom >= 24))
2067                         *flags |= ROC_SE_SINGLE_BUF_HEADROOM;
2068
2069                 param->bufs[0].vaddr = seg_data;
2070                 param->bufs[0].size = seg_size;
2071                 return 0;
2072         }
2073         iovec = param->src_iov;
2074         iovec->bufs[index].vaddr = seg_data;
2075         iovec->bufs[index].size = seg_size;
2076         index++;
2077         pkt = pkt->next;
2078
2079         while (unlikely(pkt != NULL)) {
2080                 seg_data = rte_pktmbuf_mtod(pkt, void *);
2081                 seg_size = pkt->data_len;
2082
2083                 if (!seg_size)
2084                         break;
2085
2086                 iovec->bufs[index].vaddr = seg_data;
2087                 iovec->bufs[index].size = seg_size;
2088
2089                 index++;
2090
2091                 pkt = pkt->next;
2092         }
2093
2094         iovec->buf_cnt = index;
2095         return 0;
2096 }
2097
2098 static __rte_always_inline int
2099 fill_fc_params(struct rte_crypto_op *cop, struct cnxk_se_sess *sess,
2100                struct cpt_qp_meta_info *m_info,
2101                struct cpt_inflight_req *infl_req, struct cpt_inst_s *inst)
2102 {
2103         struct roc_se_ctx *ctx = &sess->roc_se_ctx;
2104         uint8_t op_minor = ctx->template_w4.s.opcode_minor;
2105         struct rte_crypto_sym_op *sym_op = cop->sym;
2106         void *mdata = NULL;
2107         uint32_t mc_hash_off;
2108         uint32_t flags = 0;
2109         uint64_t d_offs, d_lens;
2110         struct rte_mbuf *m_src, *m_dst;
2111         uint8_t cpt_op = sess->cpt_op;
2112 #ifdef CPT_ALWAYS_USE_SG_MODE
2113         uint8_t inplace = 0;
2114 #else
2115         uint8_t inplace = 1;
2116 #endif
2117         struct roc_se_fc_params fc_params;
2118         char src[SRC_IOV_SIZE];
2119         char dst[SRC_IOV_SIZE];
2120         uint32_t iv_buf[4];
2121         int ret;
2122
2123         if (likely(sess->iv_length)) {
2124                 flags |= ROC_SE_VALID_IV_BUF;
2125                 fc_params.iv_buf = rte_crypto_op_ctod_offset(cop, uint8_t *,
2126                                                              sess->iv_offset);
2127                 if (sess->aes_ctr && unlikely(sess->iv_length != 16)) {
2128                         memcpy((uint8_t *)iv_buf,
2129                                rte_crypto_op_ctod_offset(cop, uint8_t *,
2130                                                          sess->iv_offset),
2131                                12);
2132                         iv_buf[3] = rte_cpu_to_be_32(0x1);
2133                         fc_params.iv_buf = iv_buf;
2134                 }
2135         }
2136
2137         if (sess->zsk_flag) {
2138                 fc_params.auth_iv_buf = rte_crypto_op_ctod_offset(
2139                         cop, uint8_t *, sess->auth_iv_offset);
2140                 if (sess->zsk_flag != ROC_SE_ZS_EA)
2141                         inplace = 0;
2142         }
2143         m_src = sym_op->m_src;
2144         m_dst = sym_op->m_dst;
2145
2146         if (sess->aes_gcm || sess->chacha_poly) {
2147                 uint8_t *salt;
2148                 uint8_t *aad_data;
2149                 uint16_t aad_len;
2150
2151                 d_offs = sym_op->aead.data.offset;
2152                 d_lens = sym_op->aead.data.length;
2153                 mc_hash_off =
2154                         sym_op->aead.data.offset + sym_op->aead.data.length;
2155
2156                 aad_data = sym_op->aead.aad.data;
2157                 aad_len = sess->aad_length;
2158                 if (likely((aad_data + aad_len) ==
2159                            rte_pktmbuf_mtod_offset(m_src, uint8_t *,
2160                                                    sym_op->aead.data.offset))) {
2161                         d_offs = (d_offs - aad_len) | (d_offs << 16);
2162                         d_lens = (d_lens + aad_len) | (d_lens << 32);
2163                 } else {
2164                         fc_params.aad_buf.vaddr = sym_op->aead.aad.data;
2165                         fc_params.aad_buf.size = aad_len;
2166                         flags |= ROC_SE_VALID_AAD_BUF;
2167                         inplace = 0;
2168                         d_offs = d_offs << 16;
2169                         d_lens = d_lens << 32;
2170                 }
2171
2172                 salt = fc_params.iv_buf;
2173                 if (unlikely(*(uint32_t *)salt != sess->salt)) {
2174                         cpt_fc_salt_update(&sess->roc_se_ctx, salt);
2175                         sess->salt = *(uint32_t *)salt;
2176                 }
2177                 fc_params.iv_buf = salt + 4;
2178                 if (likely(sess->mac_len)) {
2179                         struct rte_mbuf *m =
2180                                 (cpt_op & ROC_SE_OP_ENCODE) ? m_dst : m_src;
2181
2182                         if (!m)
2183                                 m = m_src;
2184
2185                         /* hmac immediately following data is best case */
2186                         if (unlikely(rte_pktmbuf_mtod(m, uint8_t *) +
2187                                              mc_hash_off !=
2188                                      (uint8_t *)sym_op->aead.digest.data)) {
2189                                 flags |= ROC_SE_VALID_MAC_BUF;
2190                                 fc_params.mac_buf.size = sess->mac_len;
2191                                 fc_params.mac_buf.vaddr =
2192                                         sym_op->aead.digest.data;
2193                                 inplace = 0;
2194                         }
2195                 }
2196         } else {
2197                 d_offs = sym_op->cipher.data.offset;
2198                 d_lens = sym_op->cipher.data.length;
2199                 mc_hash_off =
2200                         sym_op->cipher.data.offset + sym_op->cipher.data.length;
2201                 d_offs = (d_offs << 16) | sym_op->auth.data.offset;
2202                 d_lens = (d_lens << 32) | sym_op->auth.data.length;
2203
2204                 if (mc_hash_off <
2205                     (sym_op->auth.data.offset + sym_op->auth.data.length)) {
2206                         mc_hash_off = (sym_op->auth.data.offset +
2207                                        sym_op->auth.data.length);
2208                 }
2209                 /* for gmac, salt should be updated like in gcm */
2210                 if (unlikely(sess->is_gmac)) {
2211                         uint8_t *salt;
2212                         salt = fc_params.iv_buf;
2213                         if (unlikely(*(uint32_t *)salt != sess->salt)) {
2214                                 cpt_fc_salt_update(&sess->roc_se_ctx, salt);
2215                                 sess->salt = *(uint32_t *)salt;
2216                         }
2217                         fc_params.iv_buf = salt + 4;
2218                 }
2219                 if (likely(sess->mac_len)) {
2220                         struct rte_mbuf *m;
2221
2222                         m = (cpt_op & ROC_SE_OP_ENCODE) ? m_dst : m_src;
2223                         if (!m)
2224                                 m = m_src;
2225
2226                         /* hmac immediately following data is best case */
2227                         if (!(op_minor & ROC_SE_FC_MINOR_OP_HMAC_FIRST) &&
2228                             (unlikely(rte_pktmbuf_mtod(m, uint8_t *) +
2229                                               mc_hash_off !=
2230                                       (uint8_t *)sym_op->auth.digest.data))) {
2231                                 flags |= ROC_SE_VALID_MAC_BUF;
2232                                 fc_params.mac_buf.size = sess->mac_len;
2233                                 fc_params.mac_buf.vaddr =
2234                                         sym_op->auth.digest.data;
2235                                 inplace = 0;
2236                         }
2237                 }
2238         }
2239         fc_params.ctx_buf.vaddr = &sess->roc_se_ctx;
2240
2241         if (!(op_minor & ROC_SE_FC_MINOR_OP_HMAC_FIRST) &&
2242             unlikely(sess->is_null || sess->cpt_op == ROC_SE_OP_DECODE))
2243                 inplace = 0;
2244
2245         if (likely(!m_dst && inplace)) {
2246                 /* Case of single buffer without AAD buf or
2247                  * separate mac buf in place and
2248                  * not air crypto
2249                  */
2250                 fc_params.dst_iov = fc_params.src_iov = (void *)src;
2251
2252                 if (unlikely(prepare_iov_from_pkt_inplace(m_src, &fc_params,
2253                                                           &flags))) {
2254                         plt_dp_err("Prepare inplace src iov failed");
2255                         ret = -EINVAL;
2256                         goto err_exit;
2257                 }
2258
2259         } else {
2260                 /* Out of place processing */
2261                 fc_params.src_iov = (void *)src;
2262                 fc_params.dst_iov = (void *)dst;
2263
2264                 /* Store SG I/O in the api for reuse */
2265                 if (prepare_iov_from_pkt(m_src, fc_params.src_iov, 0)) {
2266                         plt_dp_err("Prepare src iov failed");
2267                         ret = -EINVAL;
2268                         goto err_exit;
2269                 }
2270
2271                 if (unlikely(m_dst != NULL)) {
2272                         uint32_t pkt_len;
2273
2274                         /* Try to make room as much as src has */
2275                         pkt_len = rte_pktmbuf_pkt_len(m_dst);
2276
2277                         if (unlikely(pkt_len < rte_pktmbuf_pkt_len(m_src))) {
2278                                 pkt_len = rte_pktmbuf_pkt_len(m_src) - pkt_len;
2279                                 if (!rte_pktmbuf_append(m_dst, pkt_len)) {
2280                                         plt_dp_err("Not enough space in "
2281                                                    "m_dst %p, need %u"
2282                                                    " more",
2283                                                    m_dst, pkt_len);
2284                                         ret = -EINVAL;
2285                                         goto err_exit;
2286                                 }
2287                         }
2288
2289                         if (prepare_iov_from_pkt(m_dst, fc_params.dst_iov, 0)) {
2290                                 plt_dp_err("Prepare dst iov failed for "
2291                                            "m_dst %p",
2292                                            m_dst);
2293                                 ret = -EINVAL;
2294                                 goto err_exit;
2295                         }
2296                 } else {
2297                         fc_params.dst_iov = (void *)src;
2298                 }
2299         }
2300
2301         if (unlikely(!((flags & ROC_SE_SINGLE_BUF_INPLACE) &&
2302                        (flags & ROC_SE_SINGLE_BUF_HEADROOM) &&
2303                        ((ctx->fc_type == ROC_SE_FC_GEN) ||
2304                         (ctx->fc_type == ROC_SE_PDCP))))) {
2305                 mdata = alloc_op_meta(&fc_params.meta_buf, m_info->mlen,
2306                                       m_info->pool, infl_req);
2307                 if (mdata == NULL) {
2308                         plt_dp_err("Error allocating meta buffer for request");
2309                         return -ENOMEM;
2310                 }
2311         }
2312
2313         /* Finally prepare the instruction */
2314         if (cpt_op & ROC_SE_OP_ENCODE)
2315                 ret = cpt_fc_enc_hmac_prep(flags, d_offs, d_lens, &fc_params,
2316                                            inst);
2317         else
2318                 ret = cpt_fc_dec_hmac_prep(flags, d_offs, d_lens, &fc_params,
2319                                            inst);
2320
2321         if (unlikely(ret)) {
2322                 plt_dp_err("Preparing request failed due to bad input arg");
2323                 goto free_mdata_and_exit;
2324         }
2325
2326         return 0;
2327
2328 free_mdata_and_exit:
2329         if (infl_req->op_flags & CPT_OP_FLAGS_METABUF)
2330                 rte_mempool_put(m_info->pool, infl_req->mdata);
2331 err_exit:
2332         return ret;
2333 }
2334
2335 #endif /*_CNXK_SE_H_ */