4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 TAILQ_HEAD(rte_acl_list, rte_tailq_entry);
39 static struct rte_tailq_elem rte_acl_tailq = {
42 EAL_REGISTER_TAILQ(rte_acl_tailq)
45 * If the compiler doesn't support AVX2 instructions,
46 * then the dummy one would be used instead for AVX2 classify method.
48 int __attribute__ ((weak))
49 rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx,
50 __rte_unused const uint8_t **data,
51 __rte_unused uint32_t *results,
52 __rte_unused uint32_t num,
53 __rte_unused uint32_t categories)
58 int __attribute__ ((weak))
59 rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx *ctx,
60 __rte_unused const uint8_t **data,
61 __rte_unused uint32_t *results,
62 __rte_unused uint32_t num,
63 __rte_unused uint32_t categories)
68 int __attribute__ ((weak))
69 rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx *ctx,
70 __rte_unused const uint8_t **data,
71 __rte_unused uint32_t *results,
72 __rte_unused uint32_t num,
73 __rte_unused uint32_t categories)
78 static const rte_acl_classify_t classify_fns[] = {
79 [RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar,
80 [RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar,
81 [RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse,
82 [RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2,
83 [RTE_ACL_CLASSIFY_NEON] = rte_acl_classify_neon,
86 /* by default, use always available scalar code path. */
87 static enum rte_acl_classify_alg rte_acl_default_classify =
88 RTE_ACL_CLASSIFY_SCALAR;
91 rte_acl_set_default_classify(enum rte_acl_classify_alg alg)
93 rte_acl_default_classify = alg;
97 rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg)
99 if (ctx == NULL || (uint32_t)alg >= RTE_DIM(classify_fns))
107 * Select highest available classify method as default one.
108 * Note that CLASSIFY_AVX2 should be set as a default only
109 * if both conditions are met:
110 * at build time compiler supports AVX2 and target cpu supports AVX2.
112 static void __attribute__((constructor))
115 enum rte_acl_classify_alg alg = RTE_ACL_CLASSIFY_DEFAULT;
117 #if defined(RTE_ARCH_ARM64)
118 alg = RTE_ACL_CLASSIFY_NEON;
119 #elif defined(RTE_ARCH_ARM)
120 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON))
121 alg = RTE_ACL_CLASSIFY_NEON;
123 #ifdef CC_AVX2_SUPPORT
124 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2))
125 alg = RTE_ACL_CLASSIFY_AVX2;
126 else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
128 if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
130 alg = RTE_ACL_CLASSIFY_SSE;
133 rte_acl_set_default_classify(alg);
137 rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data,
138 uint32_t *results, uint32_t num, uint32_t categories,
139 enum rte_acl_classify_alg alg)
141 if (categories != 1 &&
142 ((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0)
145 return classify_fns[alg](ctx, data, results, num, categories);
149 rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data,
150 uint32_t *results, uint32_t num, uint32_t categories)
152 return rte_acl_classify_alg(ctx, data, results, num, categories,
157 rte_acl_find_existing(const char *name)
159 struct rte_acl_ctx *ctx = NULL;
160 struct rte_acl_list *acl_list;
161 struct rte_tailq_entry *te;
163 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
165 rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK);
166 TAILQ_FOREACH(te, acl_list, next) {
167 ctx = (struct rte_acl_ctx *) te->data;
168 if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0)
171 rte_rwlock_read_unlock(RTE_EAL_TAILQ_RWLOCK);
181 rte_acl_free(struct rte_acl_ctx *ctx)
183 struct rte_acl_list *acl_list;
184 struct rte_tailq_entry *te;
189 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
191 rte_rwlock_write_lock(RTE_EAL_TAILQ_RWLOCK);
193 /* find our tailq entry */
194 TAILQ_FOREACH(te, acl_list, next) {
195 if (te->data == (void *) ctx)
199 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
203 TAILQ_REMOVE(acl_list, te, next);
205 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
213 rte_acl_create(const struct rte_acl_param *param)
216 struct rte_acl_ctx *ctx;
217 struct rte_acl_list *acl_list;
218 struct rte_tailq_entry *te;
219 char name[sizeof(ctx->name)];
221 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
223 /* check that input parameters are valid. */
224 if (param == NULL || param->name == NULL) {
229 snprintf(name, sizeof(name), "ACL_%s", param->name);
231 /* calculate amount of memory required for pattern set. */
232 sz = sizeof(*ctx) + param->max_rule_num * param->rule_size;
234 /* get EAL TAILQ lock. */
235 rte_rwlock_write_lock(RTE_EAL_TAILQ_RWLOCK);
237 /* if we already have one with that name */
238 TAILQ_FOREACH(te, acl_list, next) {
239 ctx = (struct rte_acl_ctx *) te->data;
240 if (strncmp(param->name, ctx->name, sizeof(ctx->name)) == 0)
244 /* if ACL with such name doesn't exist, then create a new one. */
247 te = rte_zmalloc("ACL_TAILQ_ENTRY", sizeof(*te), 0);
250 RTE_LOG(ERR, ACL, "Cannot allocate tailq entry!\n");
254 ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id);
258 "allocation of %zu bytes on socket %d for %s failed\n",
259 sz, param->socket_id, name);
263 /* init new allocated context. */
264 ctx->rules = ctx + 1;
265 ctx->max_rules = param->max_rule_num;
266 ctx->rule_sz = param->rule_size;
267 ctx->socket_id = param->socket_id;
268 ctx->alg = rte_acl_default_classify;
269 snprintf(ctx->name, sizeof(ctx->name), "%s", param->name);
271 te->data = (void *) ctx;
273 TAILQ_INSERT_TAIL(acl_list, te, next);
277 rte_rwlock_write_unlock(RTE_EAL_TAILQ_RWLOCK);
282 acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num)
286 if (num + ctx->num_rules > ctx->max_rules)
290 pos += ctx->rule_sz * ctx->num_rules;
291 memcpy(pos, rules, num * ctx->rule_sz);
292 ctx->num_rules += num;
298 acl_check_rule(const struct rte_acl_rule_data *rd)
300 if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) &
301 rd->category_mask) == 0 ||
302 rd->priority > RTE_ACL_MAX_PRIORITY ||
303 rd->priority < RTE_ACL_MIN_PRIORITY ||
304 rd->userdata == RTE_ACL_INVALID_USERDATA)
310 rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules,
313 const struct rte_acl_rule *rv;
317 if (ctx == NULL || rules == NULL || 0 == ctx->rule_sz)
320 for (i = 0; i != num; i++) {
321 rv = (const struct rte_acl_rule *)
322 ((uintptr_t)rules + i * ctx->rule_sz);
323 rc = acl_check_rule(&rv->data);
325 RTE_LOG(ERR, ACL, "%s(%s): rule #%u is invalid\n",
326 __func__, ctx->name, i + 1);
331 return acl_add_rules(ctx, rules, num);
336 * Note that RT structures are not affected.
339 rte_acl_reset_rules(struct rte_acl_ctx *ctx)
346 * Reset all rules and destroys RT structures.
349 rte_acl_reset(struct rte_acl_ctx *ctx)
352 rte_acl_reset_rules(ctx);
353 rte_acl_build(ctx, &ctx->config);
358 * Dump ACL context to the stdout.
361 rte_acl_dump(const struct rte_acl_ctx *ctx)
365 printf("acl context <%s>@%p\n", ctx->name, ctx);
366 printf(" socket_id=%"PRId32"\n", ctx->socket_id);
367 printf(" alg=%"PRId32"\n", ctx->alg);
368 printf(" max_rules=%"PRIu32"\n", ctx->max_rules);
369 printf(" rule_size=%"PRIu32"\n", ctx->rule_sz);
370 printf(" num_rules=%"PRIu32"\n", ctx->num_rules);
371 printf(" num_categories=%"PRIu32"\n", ctx->num_categories);
372 printf(" num_tries=%"PRIu32"\n", ctx->num_tries);
376 * Dump all ACL contexts to the stdout.
379 rte_acl_list_dump(void)
381 struct rte_acl_ctx *ctx;
382 struct rte_acl_list *acl_list;
383 struct rte_tailq_entry *te;
385 acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
387 rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK);
388 TAILQ_FOREACH(te, acl_list, next) {
389 ctx = (struct rte_acl_ctx *) te->data;
392 rte_rwlock_read_unlock(RTE_EAL_TAILQ_RWLOCK);