1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Intel Corporation
10 * Contains crypto specific functions/structures/macros used internally
15 * AES-CTR counter block format.
18 struct aesctr_cnt_blk {
25 * CHACHA20-POLY1305 devices have some specific requirements
26 * for IV and AAD formats.
27 * Ideally that to be done by the driver itself.
30 struct aead_chacha20_poly1305_iv {
36 struct aead_chacha20_poly1305_aad {
39 * RFC 4106, section 5:
40 * Two formats of the AAD are defined:
41 * one for 32-bit sequence numbers, and one for 64-bit ESN.
47 uint32_t align0; /* align to 16B boundary */
50 struct chacha20_poly1305_esph_iv {
51 struct rte_esp_hdr esph;
56 * AES-GCM devices have some specific requirements for IV and AAD formats.
57 * Ideally that to be done by the driver itself.
69 * RFC 4106, section 5:
70 * Two formats of the AAD are defined:
71 * one for 32-bit sequence numbers, and one for 64-bit ESN.
77 uint32_t align0; /* align to 16B boundary */
81 struct rte_esp_hdr esph;
86 * AES-CCM devices have some specific requirements for IV and AAD formats.
87 * Ideally that to be done by the driver itself.
105 struct aead_ccm_aad {
109 * RFC 4309, section 5:
110 * Two formats of the AAD are defined:
111 * one for 32-bit sequence numbers, and one for 64-bit ESN.
117 uint32_t align0; /* align to 16B boundary */
121 struct rte_esp_hdr esph;
127 aes_ctr_cnt_blk_fill(struct aesctr_cnt_blk *ctr, uint64_t iv, uint32_t nonce)
131 ctr->cnt = rte_cpu_to_be_32(1);
135 aead_chacha20_poly1305_iv_fill(struct aead_chacha20_poly1305_iv
137 uint64_t iv, uint32_t salt)
139 chacha20_poly1305->salt = salt;
140 chacha20_poly1305->iv = iv;
141 chacha20_poly1305->cnt = rte_cpu_to_be_32(1);
145 aead_gcm_iv_fill(struct aead_gcm_iv *gcm, uint64_t iv, uint32_t salt)
149 gcm->cnt = rte_cpu_to_be_32(1);
153 aead_ccm_iv_fill(struct aead_ccm_iv *ccm, uint64_t iv, uint32_t salt)
155 union aead_ccm_salt tsalt;
158 ccm->ccm_flags = tsalt.inner.ccm_flags;
159 ccm->salt[0] = tsalt.inner.salt8[0];
160 ccm->salt[1] = tsalt.inner.salt8[1];
161 ccm->salt[2] = tsalt.inner.salt8[2];
163 ccm->cnt = rte_cpu_to_be_32(1);
168 * RFC 4106, 5 AAD Construction
169 * spi and sqn should already be converted into network byte order.
170 * Make sure that not used bytes are zeroed.
173 aead_gcm_aad_fill(struct aead_gcm_aad *aad, rte_be32_t spi, rte_be64_t sqn,
180 aad->sqn.u32[0] = sqn_low32(sqn);
187 * RFC 4309, 5 AAD Construction
188 * spi and sqn should already be converted into network byte order.
189 * Make sure that not used bytes are zeroed.
192 aead_ccm_aad_fill(struct aead_ccm_aad *aad, rte_be32_t spi, rte_be64_t sqn,
199 aad->sqn.u32[0] = sqn_low32(sqn);
206 gen_iv(uint64_t iv[IPSEC_MAX_IV_QWORD], rte_be64_t sqn)
214 * RFC 7634, 2.1 AAD Construction
215 * spi and sqn should already be converted into network byte order.
216 * Make sure that not used bytes are zeroed.
219 aead_chacha20_poly1305_aad_fill(struct aead_chacha20_poly1305_aad *aad,
220 rte_be32_t spi, rte_be64_t sqn,
227 aad->sqn.u32[0] = sqn_low32(sqn);
234 * Helper routine to copy IV
235 * Right now we support only algorithms with IV length equals 0/8/16 bytes.
238 copy_iv(uint64_t dst[IPSEC_MAX_IV_QWORD],
239 const uint64_t src[IPSEC_MAX_IV_QWORD], uint32_t len)
241 RTE_BUILD_BUG_ON(IPSEC_MAX_IV_SIZE != 2 * sizeof(uint64_t));
244 case IPSEC_MAX_IV_SIZE:
247 case sizeof(uint64_t):
253 /* should never happen */
259 * from RFC 4303 3.3.2.1.4:
260 * If the ESN option is enabled for the SA, the high-order 32
261 * bits of the sequence number are appended after the Next Header field
262 * for purposes of this computation, but are not transmitted.
266 * Helper function that moves ICV by 4B below, and inserts SQN.hibits.
267 * icv parameter points to the new start of ICV.
270 insert_sqh(uint32_t sqh, void *picv, uint32_t icv_len)
275 RTE_ASSERT(icv_len % sizeof(uint32_t) == 0);
278 icv_len = icv_len / sizeof(uint32_t);
279 for (i = icv_len; i-- != 0; icv[i] = icv[i - 1])
286 * Helper function that moves ICV by 4B up, and removes SQN.hibits.
287 * icv parameter points to the new start of ICV.
290 remove_sqh(void *picv, uint32_t icv_len)
294 RTE_ASSERT(icv_len % sizeof(uint32_t) == 0);
297 icv_len = icv_len / sizeof(uint32_t);
298 for (i = 0; i != icv_len; i++)
303 * setup crypto ops for LOOKASIDE_NONE (pure crypto) type of devices.
306 lksd_none_cop_prepare(struct rte_crypto_op *cop,
307 struct rte_cryptodev_sym_session *cs, struct rte_mbuf *mb)
309 struct rte_crypto_sym_op *sop;
312 cop->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
313 cop->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
314 cop->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
316 __rte_crypto_sym_op_attach_sym_session(sop, cs);
319 #endif /* _CRYPTO_H_ */