X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=lib%2Flibrte_acl%2Frte_acl.c;h=a54d531ad0b6d46ba2a6ff8926c011f8b30ebcf7;hb=0462d115441d1619ba19313b8daaff679854cc49;hp=4b21b8e6c22d1fc71ef1be47b9a7504267513df3;hpb=7eef9194ab6f3b743061ffa2597ad39f27880bf8;p=dpdk.git diff --git a/lib/librte_acl/rte_acl.c b/lib/librte_acl/rte_acl.c index 4b21b8e6c2..a54d531ad0 100644 --- a/lib/librte_acl/rte_acl.c +++ b/lib/librte_acl/rte_acl.c @@ -38,10 +38,30 @@ TAILQ_HEAD(rte_acl_list, rte_tailq_entry); +static struct rte_tailq_elem rte_acl_tailq = { + .name = "RTE_ACL", +}; +EAL_REGISTER_TAILQ(rte_acl_tailq) + +/* + * If the compiler doesn't support AVX2 instructions, + * then the dummy one would be used instead for AVX2 classify method. + */ +int __attribute__ ((weak)) +rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx, + __rte_unused const uint8_t **data, + __rte_unused uint32_t *results, + __rte_unused uint32_t num, + __rte_unused uint32_t categories) +{ + return -ENOTSUP; +} + static const rte_acl_classify_t classify_fns[] = { [RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar, [RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar, [RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse, + [RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2, }; /* by default, use always available scalar code path. */ @@ -64,32 +84,49 @@ rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg) return 0; } +/* + * Select highest available classify method as default one. + * Note that CLASSIFY_AVX2 should be set as a default only + * if both conditions are met: + * at build time compiler supports AVX2 and target cpu supports AVX2. + */ static void __attribute__((constructor)) rte_acl_init(void) { enum rte_acl_classify_alg alg = RTE_ACL_CLASSIFY_DEFAULT; +#ifdef CC_AVX2_SUPPORT + if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2)) + alg = RTE_ACL_CLASSIFY_AVX2; + else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1)) +#else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1)) +#endif alg = RTE_ACL_CLASSIFY_SSE; rte_acl_set_default_classify(alg); } -int -rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data, - uint32_t *results, uint32_t num, uint32_t categories) -{ - return classify_fns[ctx->alg](ctx, data, results, num, categories); -} - int rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data, uint32_t *results, uint32_t num, uint32_t categories, enum rte_acl_classify_alg alg) { + if (categories != 1 && + ((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0) + return -EINVAL; + return classify_fns[alg](ctx, data, results, num, categories); } +int +rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data, + uint32_t *results, uint32_t num, uint32_t categories) +{ + return rte_acl_classify_alg(ctx, data, results, num, categories, + ctx->alg); +} + struct rte_acl_ctx * rte_acl_find_existing(const char *name) { @@ -97,12 +134,7 @@ rte_acl_find_existing(const char *name) struct rte_acl_list *acl_list; struct rte_tailq_entry *te; - /* check that we have an initialised tail queue */ - acl_list = RTE_TAILQ_LOOKUP_BY_IDX(RTE_TAILQ_ACL, rte_acl_list); - if (acl_list == NULL) { - rte_errno = E_RTE_NO_TAILQ; - return NULL; - } + acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list); rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK); TAILQ_FOREACH(te, acl_list, next) { @@ -128,12 +160,7 @@ rte_acl_free(struct rte_acl_ctx *ctx) if (ctx == NULL) return; - /* check that we have an initialised tail queue */ - acl_list = RTE_TAILQ_LOOKUP_BY_IDX(RTE_TAILQ_ACL, rte_acl_list); - if (acl_list == NULL) { - rte_errno = E_RTE_NO_TAILQ; - return; - } + acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list); rte_rwlock_write_lock(RTE_EAL_TAILQ_RWLOCK); @@ -165,12 +192,7 @@ rte_acl_create(const struct rte_acl_param *param) struct rte_tailq_entry *te; char name[sizeof(ctx->name)]; - /* check that we have an initialised tail queue */ - acl_list = RTE_TAILQ_LOOKUP_BY_IDX(RTE_TAILQ_ACL, rte_acl_list); - if (acl_list == NULL) { - rte_errno = E_RTE_NO_TAILQ; - return NULL; - } + acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list); /* check that input parameters are valid. */ if (param == NULL || param->name == NULL) { @@ -203,7 +225,7 @@ rte_acl_create(const struct rte_acl_param *param) goto exit; } - ctx = rte_zmalloc_socket(name, sz, CACHE_LINE_SIZE, param->socket_id); + ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id); if (ctx == NULL) { RTE_LOG(ERR, ACL, @@ -249,7 +271,8 @@ acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num) static int acl_check_rule(const struct rte_acl_rule_data *rd) { - if ((rd->category_mask & LEN2MASK(RTE_ACL_MAX_CATEGORIES)) == 0 || + if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) & + rd->category_mask) == 0 || rd->priority > RTE_ACL_MAX_PRIORITY || rd->priority < RTE_ACL_MIN_PRIORITY || rd->userdata == RTE_ACL_INVALID_USERDATA) @@ -333,12 +356,7 @@ rte_acl_list_dump(void) struct rte_acl_list *acl_list; struct rte_tailq_entry *te; - /* check that we have an initialised tail queue */ - acl_list = RTE_TAILQ_LOOKUP_BY_IDX(RTE_TAILQ_ACL, rte_acl_list); - if (acl_list == NULL) { - rte_errno = E_RTE_NO_TAILQ; - return; - } + acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list); rte_rwlock_read_lock(RTE_EAL_TAILQ_RWLOCK); TAILQ_FOREACH(te, acl_list, next) { @@ -511,6 +529,7 @@ rte_acl_ipv4vlan_build(struct rte_acl_ctx *ctx, if (ctx == NULL || layout == NULL) return -EINVAL; + memset(&cfg, 0, sizeof(cfg)); acl_ipv4vlan_config(&cfg, layout, num_categories); return rte_acl_build(ctx, &cfg); }