1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
5 #include <rte_string_fns.h>
9 TAILQ_HEAD(rte_acl_list, rte_tailq_entry);
11 static struct rte_tailq_elem rte_acl_tailq = {
14 EAL_REGISTER_TAILQ(rte_acl_tailq)
17 #ifndef CC_AVX2_SUPPORT
19 * If the compiler doesn't support AVX2 instructions,
20 * then the dummy one would be used instead for AVX2 classify method.
23 rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx,
24 __rte_unused const uint8_t **data,
25 __rte_unused uint32_t *results,
26 __rte_unused uint32_t num,
27 __rte_unused uint32_t categories)
34 rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx *ctx,
35 __rte_unused const uint8_t **data,
36 __rte_unused uint32_t *results,
37 __rte_unused uint32_t num,
38 __rte_unused uint32_t categories)
45 #ifndef RTE_ARCH_ARM64
47 rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx *ctx,
48 __rte_unused const uint8_t **data,
49 __rte_unused uint32_t *results,
50 __rte_unused uint32_t num,
51 __rte_unused uint32_t categories)
58 #ifndef RTE_ARCH_PPC_64
60 rte_acl_classify_altivec(__rte_unused const struct rte_acl_ctx *ctx,
61 __rte_unused const uint8_t **data,
62 __rte_unused uint32_t *results,
63 __rte_unused uint32_t num,
64 __rte_unused uint32_t categories)
70 static const rte_acl_classify_t classify_fns[] = {
71 [RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar,
72 [RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar,
73 [RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse,
74 [RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2,
75 [RTE_ACL_CLASSIFY_NEON] = rte_acl_classify_neon,
76 [RTE_ACL_CLASSIFY_ALTIVEC] = rte_acl_classify_altivec,
79 /* by default, use always available scalar code path. */
80 static enum rte_acl_classify_alg rte_acl_default_classify =
81 RTE_ACL_CLASSIFY_SCALAR;
84 rte_acl_set_default_classify(enum rte_acl_classify_alg alg)
86 rte_acl_default_classify = alg;
90 rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg)
92 if (ctx == NULL || (uint32_t)alg >= RTE_DIM(classify_fns))
100 * Select highest available classify method as default one.
101 * Note that CLASSIFY_AVX2 should be set as a default only
102 * if both conditions are met:
103 * at build time compiler supports AVX2 and target cpu supports AVX2.
105 RTE_INIT(rte_acl_init)
107 enum rte_acl_classify_alg alg = RTE_ACL_CLASSIFY_DEFAULT;
109 #if defined(RTE_ARCH_ARM64)
110 alg = RTE_ACL_CLASSIFY_NEON;
111 #elif defined(RTE_ARCH_ARM)
112 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON))
113 alg = RTE_ACL_CLASSIFY_NEON;
114 #elif defined(RTE_ARCH_PPC_64)
115 alg = RTE_ACL_CLASSIFY_ALTIVEC;
117 #ifdef CC_AVX2_SUPPORT
118 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2))
119 alg = RTE_ACL_CLASSIFY_AVX2;
120 else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
122 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
124 alg = RTE_ACL_CLASSIFY_SSE;
127 rte_acl_set_default_classify(alg);
131 rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data,
132 uint32_t *results, uint32_t num, uint32_t categories,
133 enum rte_acl_classify_alg alg)
135 if (categories != 1 &&
136 ((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0)
139 return classify_fns[alg](ctx, data, results, num, categories);
143 rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data,
144 uint32_t *results, uint32_t num, uint32_t categories)
146 return rte_acl_classify_alg(ctx, data, results, num, categories,
151 rte_acl_find_existing(const char *name)
153 struct rte_acl_ctx *ctx = NULL;
154 struct rte_acl_list *acl_list;
155 struct rte_tailq_entry *te;
157 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
159 rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK);
160 TAILQ_FOREACH(te, acl_list, next) {
161 ctx = (struct rte_acl_ctx *) te->data;
162 if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0)
165 rte_rwlock_read_unlock(RTE_EAL_TAILQ_RWLOCK);
175 rte_acl_free(struct rte_acl_ctx *ctx)
177 struct rte_acl_list *acl_list;
178 struct rte_tailq_entry *te;
183 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
185 rte_rwlock_write_lock(RTE_EAL_TAILQ_RWLOCK);
187 /* find our tailq entry */
188 TAILQ_FOREACH(te, acl_list, next) {
189 if (te->data == (void *) ctx)
193 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
197 TAILQ_REMOVE(acl_list, te, next);
199 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
207 rte_acl_create(const struct rte_acl_param *param)
210 struct rte_acl_ctx *ctx;
211 struct rte_acl_list *acl_list;
212 struct rte_tailq_entry *te;
213 char name[sizeof(ctx->name)];
215 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
217 /* check that input parameters are valid. */
218 if (param == NULL || param->name == NULL) {
223 snprintf(name, sizeof(name), "ACL_%s", param->name);
225 /* calculate amount of memory required for pattern set. */
226 sz = sizeof(*ctx) + param->max_rule_num * param->rule_size;
228 /* get EAL TAILQ lock. */
229 rte_rwlock_write_lock(RTE_EAL_TAILQ_RWLOCK);
231 /* if we already have one with that name */
232 TAILQ_FOREACH(te, acl_list, next) {
233 ctx = (struct rte_acl_ctx *) te->data;
234 if (strncmp(param->name, ctx->name, sizeof(ctx->name)) == 0)
238 /* if ACL with such name doesn't exist, then create a new one. */
241 te = rte_zmalloc("ACL_TAILQ_ENTRY", sizeof(*te), 0);
244 RTE_LOG(ERR, ACL, "Cannot allocate tailq entry!\n");
248 ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id);
252 "allocation of %zu bytes on socket %d for %s failed\n",
253 sz, param->socket_id, name);
257 /* init new allocated context. */
258 ctx->rules = ctx + 1;
259 ctx->max_rules = param->max_rule_num;
260 ctx->rule_sz = param->rule_size;
261 ctx->socket_id = param->socket_id;
262 ctx->alg = rte_acl_default_classify;
263 strlcpy(ctx->name, param->name, sizeof(ctx->name));
265 te->data = (void *) ctx;
267 TAILQ_INSERT_TAIL(acl_list, te, next);
271 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
276 acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num)
280 if (num + ctx->num_rules > ctx->max_rules)
284 pos += ctx->rule_sz * ctx->num_rules;
285 memcpy(pos, rules, num * ctx->rule_sz);
286 ctx->num_rules += num;
292 acl_check_rule(const struct rte_acl_rule_data *rd)
294 if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) &
295 rd->category_mask) == 0 ||
296 rd->priority > RTE_ACL_MAX_PRIORITY ||
297 rd->priority < RTE_ACL_MIN_PRIORITY)
303 rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules,
306 const struct rte_acl_rule *rv;
310 if (ctx == NULL || rules == NULL || 0 == ctx->rule_sz)
313 for (i = 0; i != num; i++) {
314 rv = (const struct rte_acl_rule *)
315 ((uintptr_t)rules + i * ctx->rule_sz);
316 rc = acl_check_rule(&rv->data);
318 RTE_LOG(ERR, ACL, "%s(%s): rule #%u is invalid\n",
319 __func__, ctx->name, i + 1);
324 return acl_add_rules(ctx, rules, num);
329 * Note that RT structures are not affected.
332 rte_acl_reset_rules(struct rte_acl_ctx *ctx)
339 * Reset all rules and destroys RT structures.
342 rte_acl_reset(struct rte_acl_ctx *ctx)
345 rte_acl_reset_rules(ctx);
346 rte_acl_build(ctx, &ctx->config);
351 * Dump ACL context to the stdout.
354 rte_acl_dump(const struct rte_acl_ctx *ctx)
358 printf("acl context <%s>@%p\n", ctx->name, ctx);
359 printf(" socket_id=%"PRId32"\n", ctx->socket_id);
360 printf(" alg=%"PRId32"\n", ctx->alg);
361 printf(" max_rules=%"PRIu32"\n", ctx->max_rules);
362 printf(" rule_size=%"PRIu32"\n", ctx->rule_sz);
363 printf(" num_rules=%"PRIu32"\n", ctx->num_rules);
364 printf(" num_categories=%"PRIu32"\n", ctx->num_categories);
365 printf(" num_tries=%"PRIu32"\n", ctx->num_tries);
369 * Dump all ACL contexts to the stdout.
372 rte_acl_list_dump(void)
374 struct rte_acl_ctx *ctx;
375 struct rte_acl_list *acl_list;
376 struct rte_tailq_entry *te;
378 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
380 rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK);
381 TAILQ_FOREACH(te, acl_list, next) {
382 ctx = (struct rte_acl_ctx *) te->data;
385 rte_rwlock_read_unlock(RTE_EAL_TAILQ_RWLOCK);