1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2016-2017 Intel Corporation
11 * RTE Cryptography Common Definitions
21 #include <rte_memory.h>
22 #include <rte_mempool.h>
23 #include <rte_common.h>
25 #include "rte_crypto_sym.h"
26 #include "rte_crypto_asym.h"
28 /** Crypto operation types */
29 enum rte_crypto_op_type {
30 RTE_CRYPTO_OP_TYPE_UNDEFINED,
31 /**< Undefined operation type */
32 RTE_CRYPTO_OP_TYPE_SYMMETRIC,
33 /**< Symmetric operation */
34 RTE_CRYPTO_OP_TYPE_ASYMMETRIC
35 /**< Asymmetric operation */
38 /** Status of crypto operation */
39 enum rte_crypto_op_status {
40 RTE_CRYPTO_OP_STATUS_SUCCESS,
41 /**< Operation completed successfully */
42 RTE_CRYPTO_OP_STATUS_NOT_PROCESSED,
43 /**< Operation has not yet been processed by a crypto device */
44 RTE_CRYPTO_OP_STATUS_AUTH_FAILED,
45 /**< Authentication verification failed */
46 RTE_CRYPTO_OP_STATUS_INVALID_SESSION,
48 * Symmetric operation failed due to invalid session arguments, or if
49 * in session-less mode, failed to allocate private operation material.
51 RTE_CRYPTO_OP_STATUS_INVALID_ARGS,
52 /**< Operation failed due to invalid arguments in request */
53 RTE_CRYPTO_OP_STATUS_ERROR,
54 /**< Error handling operation */
58 * Crypto operation session type. This is used to specify whether a crypto
59 * operation has session structure attached for immutable parameters or if all
60 * operation information is included in the operation data structure.
62 enum rte_crypto_op_sess_type {
63 RTE_CRYPTO_OP_WITH_SESSION, /**< Session based crypto operation */
64 RTE_CRYPTO_OP_SESSIONLESS, /**< Session-less crypto operation */
65 RTE_CRYPTO_OP_SECURITY_SESSION /**< Security session crypto operation */
68 /* Auxiliary flags related to IPsec offload with RTE_SECURITY */
70 #define RTE_CRYPTO_OP_AUX_FLAGS_IPSEC_SOFT_EXPIRY (1 << 0)
71 /**< SA soft expiry limit has been reached */
74 * Cryptographic Operation.
76 * This structure contains data relating to performing cryptographic
77 * operations. This operation structure is used to contain any operation which
78 * is supported by the cryptodev API, PMDs should check the type parameter to
79 * verify that the operation is a support function of the device. Crypto
80 * operations are enqueued and dequeued in crypto PMDs using the
81 * rte_cryptodev_enqueue_burst() / rte_cryptodev_dequeue_burst() .
83 struct rte_crypto_op {
90 /**< operation type */
93 * operation status - this is reset to
94 * RTE_CRYPTO_OP_STATUS_NOT_PROCESSED on allocation
95 * from mempool and will be set to
96 * RTE_CRYPTO_OP_STATUS_SUCCESS after crypto operation
97 * is successfully processed by a crypto PMD
100 /**< operation session type */
102 /**< Operation specific auxiliary/additional flags.
103 * These flags carry additional information from the
104 * operation. Processing of the same is optional.
107 /**< Reserved bytes to fill 64 bits for
110 uint16_t private_data_offset;
111 /**< Offset to indicate start of private data (if any).
112 * The offset is counted from the start of the
113 * rte_crypto_op including IV.
114 * The private data may be used by the application
115 * to store information which should remain untouched
116 * in the library/driver
120 struct rte_mempool *mempool;
121 /**< crypto operation mempool which operation is allocated from */
123 rte_iova_t phys_addr;
124 /**< physical address of crypto operation */
128 struct rte_crypto_sym_op sym[0];
129 /**< Symmetric operation parameters */
131 struct rte_crypto_asym_op asym[0];
132 /**< Asymmetric operation parameters */
134 }; /**< operation specific parameters */
138 * Reset the fields of a crypto operation to their default values.
140 * @param op The crypto operation to be reset.
141 * @param type The crypto operation type.
144 __rte_crypto_op_reset(struct rte_crypto_op *op, enum rte_crypto_op_type type)
147 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
148 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS;
151 case RTE_CRYPTO_OP_TYPE_SYMMETRIC:
152 __rte_crypto_sym_op_reset(op->sym);
154 case RTE_CRYPTO_OP_TYPE_ASYMMETRIC:
155 memset(op->asym, 0, sizeof(struct rte_crypto_asym_op));
157 case RTE_CRYPTO_OP_TYPE_UNDEFINED:
164 * Private data structure belonging to a crypto symmetric operation pool.
166 struct rte_crypto_op_pool_private {
167 enum rte_crypto_op_type type;
168 /**< Crypto op pool type operation. */
170 /**< Size of private area in each crypto operation. */
175 * Returns the size of private data allocated with each rte_crypto_op object by
178 * @param mempool rte_crypto_op mempool
180 * @return private data size
182 static inline uint16_t
183 __rte_crypto_op_get_priv_data_size(struct rte_mempool *mempool)
185 struct rte_crypto_op_pool_private *priv =
186 (struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool);
188 return priv->priv_size;
193 * Creates a crypto operation pool
195 * @param name pool name
196 * @param type crypto operation type, use
197 * RTE_CRYPTO_OP_TYPE_UNDEFINED for a pool which
198 * supports all operation types
199 * @param nb_elts number of elements in pool
200 * @param cache_size Number of elements to cache on lcore, see
201 * *rte_mempool_create* for further details about
203 * @param priv_size Size of private data to allocate with each
205 * @param socket_id Socket to allocate memory on
208 * - On success pointer to mempool
211 extern struct rte_mempool *
212 rte_crypto_op_pool_create(const char *name, enum rte_crypto_op_type type,
213 unsigned nb_elts, unsigned cache_size, uint16_t priv_size,
217 * Bulk allocate raw element from mempool and return as crypto operations
219 * @param mempool crypto operation mempool.
220 * @param type crypto operation type.
221 * @param ops Array to place allocated crypto operations
222 * @param nb_ops Number of crypto operations to allocate
225 * - On success returns number of ops allocated
228 __rte_crypto_op_raw_bulk_alloc(struct rte_mempool *mempool,
229 enum rte_crypto_op_type type,
230 struct rte_crypto_op **ops, uint16_t nb_ops)
232 struct rte_crypto_op_pool_private *priv;
234 priv = (struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool);
235 if (unlikely(priv->type != type &&
236 priv->type != RTE_CRYPTO_OP_TYPE_UNDEFINED))
239 if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0)
246 * Allocate a crypto operation from a mempool with default parameters set
248 * @param mempool crypto operation mempool
249 * @param type operation type to allocate
252 * - On success returns a valid rte_crypto_op structure
253 * - On failure returns NULL
255 static inline struct rte_crypto_op *
256 rte_crypto_op_alloc(struct rte_mempool *mempool, enum rte_crypto_op_type type)
258 struct rte_crypto_op *op = NULL;
261 retval = __rte_crypto_op_raw_bulk_alloc(mempool, type, &op, 1);
262 if (unlikely(retval != 1))
265 __rte_crypto_op_reset(op, type);
272 * Bulk allocate crypto operations from a mempool with default parameters set
274 * @param mempool crypto operation mempool
275 * @param type operation type to allocate
276 * @param ops Array to place allocated crypto operations
277 * @param nb_ops Number of crypto operations to allocate
280 * - nb_ops if the number of operations requested were allocated.
281 * - 0 if the requested number of ops are not available.
282 * None are allocated in this case.
285 static inline unsigned
286 rte_crypto_op_bulk_alloc(struct rte_mempool *mempool,
287 enum rte_crypto_op_type type,
288 struct rte_crypto_op **ops, uint16_t nb_ops)
292 if (unlikely(__rte_crypto_op_raw_bulk_alloc(mempool, type, ops, nb_ops)
296 for (i = 0; i < nb_ops; i++)
297 __rte_crypto_op_reset(ops[i], type);
305 * Returns a pointer to the private data of a crypto operation if
306 * that operation has enough capacity for requested size.
308 * @param op crypto operation.
309 * @param size size of space requested in private data.
312 * - if sufficient space available returns pointer to start of private data
313 * - if insufficient space returns NULL
316 __rte_crypto_op_get_priv_data(struct rte_crypto_op *op, uint32_t size)
320 if (likely(op->mempool != NULL)) {
321 priv_size = __rte_crypto_op_get_priv_data_size(op->mempool);
323 if (likely(priv_size >= size)) {
324 if (op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC)
325 return (void *)((uint8_t *)(op + 1) +
326 sizeof(struct rte_crypto_sym_op));
327 if (op->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC)
328 return (void *)((uint8_t *)(op + 1) +
329 sizeof(struct rte_crypto_asym_op));
337 * free crypto operation structure
338 * If operation has been allocate from a rte_mempool, then the operation will
339 * be returned to the mempool.
341 * @param op symmetric crypto operation
344 rte_crypto_op_free(struct rte_crypto_op *op)
346 if (op != NULL && op->mempool != NULL)
347 rte_mempool_put(op->mempool, op);
351 * Allocate a symmetric crypto operation in the private data of an mbuf.
353 * @param m mbuf which is associated with the crypto operation, the
354 * operation will be allocated in the private data of that
358 * - On success returns a pointer to the crypto operation.
359 * - On failure returns NULL.
361 static inline struct rte_crypto_op *
362 rte_crypto_sym_op_alloc_from_mbuf_priv_data(struct rte_mbuf *m)
364 if (unlikely(m == NULL))
368 * check that the mbuf's private data size is sufficient to contain a
371 if (unlikely(m->priv_size < (sizeof(struct rte_crypto_op) +
372 sizeof(struct rte_crypto_sym_op))))
375 /* private data starts immediately after the mbuf header in the mbuf. */
376 struct rte_crypto_op *op = (struct rte_crypto_op *)(m + 1);
378 __rte_crypto_op_reset(op, RTE_CRYPTO_OP_TYPE_SYMMETRIC);
387 * Allocate space for symmetric crypto xforms in the private data space of the
388 * crypto operation. This also defaults the crypto xform type and configures
389 * the chaining of the xforms in the crypto operation
392 * - On success returns pointer to first crypto xform in crypto operations chain
393 * - On failure returns NULL
395 static inline struct rte_crypto_sym_xform *
396 rte_crypto_op_sym_xforms_alloc(struct rte_crypto_op *op, uint8_t nb_xforms)
401 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC))
404 size = sizeof(struct rte_crypto_sym_xform) * nb_xforms;
406 priv_data = __rte_crypto_op_get_priv_data(op, size);
407 if (priv_data == NULL)
410 return __rte_crypto_sym_op_sym_xforms_alloc(op->sym, priv_data,
416 * Attach a session to a crypto operation
418 * @param op crypto operation, must be of type symmetric
419 * @param sess cryptodev session
422 rte_crypto_op_attach_sym_session(struct rte_crypto_op *op,
423 struct rte_cryptodev_sym_session *sess)
425 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC))
428 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
430 return __rte_crypto_sym_op_attach_sym_session(op->sym, sess);
434 * Attach a asymmetric session to a crypto operation
436 * @param op crypto operation, must be of type asymmetric
437 * @param sess cryptodev session
440 rte_crypto_op_attach_asym_session(struct rte_crypto_op *op,
441 struct rte_cryptodev_asym_session *sess)
443 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_ASYMMETRIC))
446 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
447 op->asym->session = sess;
455 #endif /* _RTE_CRYPTO_H_ */