1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015-2020
5 #include <rte_ethdev_pci.h>
6 #include <rte_security_driver.h>
7 #include <rte_cryptodev.h>
9 #include "base/txgbe.h"
10 #include "txgbe_ethdev.h"
12 static const struct rte_security_capability *
13 txgbe_crypto_capabilities_get(void *device __rte_unused)
15 static const struct rte_cryptodev_capabilities
16 aes_gcm_gmac_crypto_capabilities[] = {
17 { /* AES GMAC (128-bit) */
18 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
20 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
22 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
42 { /* AES GCM (128-bit) */
43 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
45 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
47 .algo = RTE_CRYPTO_AEAD_AES_GCM,
73 .op = RTE_CRYPTO_OP_TYPE_UNDEFINED,
75 .xform_type = RTE_CRYPTO_SYM_XFORM_NOT_SPECIFIED
80 static const struct rte_security_capability
81 txgbe_security_capabilities[] = {
82 { /* IPsec Inline Crypto ESP Transport Egress */
83 .action = RTE_SECURITY_ACTION_TYPE_INLINE_CRYPTO,
84 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
86 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
87 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
88 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
91 .crypto_capabilities = aes_gcm_gmac_crypto_capabilities,
92 .ol_flags = RTE_SECURITY_TX_OLOAD_NEED_MDATA
94 { /* IPsec Inline Crypto ESP Transport Ingress */
95 .action = RTE_SECURITY_ACTION_TYPE_INLINE_CRYPTO,
96 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
98 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
99 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
100 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
103 .crypto_capabilities = aes_gcm_gmac_crypto_capabilities,
106 { /* IPsec Inline Crypto ESP Tunnel Egress */
107 .action = RTE_SECURITY_ACTION_TYPE_INLINE_CRYPTO,
108 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
110 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
111 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
112 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
115 .crypto_capabilities = aes_gcm_gmac_crypto_capabilities,
116 .ol_flags = RTE_SECURITY_TX_OLOAD_NEED_MDATA
118 { /* IPsec Inline Crypto ESP Tunnel Ingress */
119 .action = RTE_SECURITY_ACTION_TYPE_INLINE_CRYPTO,
120 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
122 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
123 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
124 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
127 .crypto_capabilities = aes_gcm_gmac_crypto_capabilities,
131 .action = RTE_SECURITY_ACTION_TYPE_NONE
135 return txgbe_security_capabilities;
138 static struct rte_security_ops txgbe_security_ops = {
139 .capabilities_get = txgbe_crypto_capabilities_get
143 txgbe_crypto_capable(struct rte_eth_dev *dev)
145 struct txgbe_hw *hw = TXGBE_DEV_HW(dev);
146 uint32_t reg_i, reg, capable = 1;
147 /* test if rx crypto can be enabled and then write back initial value*/
148 reg_i = rd32(hw, TXGBE_SECRXCTL);
149 wr32m(hw, TXGBE_SECRXCTL, TXGBE_SECRXCTL_ODSA, 0);
150 reg = rd32m(hw, TXGBE_SECRXCTL, TXGBE_SECRXCTL_ODSA);
153 wr32(hw, TXGBE_SECRXCTL, reg_i);
158 txgbe_ipsec_ctx_create(struct rte_eth_dev *dev)
160 struct rte_security_ctx *ctx = NULL;
162 if (txgbe_crypto_capable(dev)) {
163 ctx = rte_malloc("rte_security_instances_ops",
164 sizeof(struct rte_security_ctx), 0);
166 ctx->device = (void *)dev;
167 ctx->ops = &txgbe_security_ops;
169 dev->security_ctx = ctx;
174 if (rte_security_dynfield_register() < 0)