* elements. If the mempool is near-empty to the point that this is a
* concern, the user should consider increasing the mempool size.
*/
- return (unsigned int)__atomic_load_n(&s->stack_lf.used.len.cnt,
+ return (unsigned int)__atomic_load_n(&s->stack_lf.used.len,
__ATOMIC_RELAXED);
}
struct rte_stack_lf_elem *last,
unsigned int num)
{
-#ifndef RTE_ARCH_X86_64
- RTE_SET_USED(first);
- RTE_SET_USED(last);
- RTE_SET_USED(list);
- RTE_SET_USED(num);
-#else
struct rte_stack_lf_head old_head;
int success;
/* Ensure the stack modifications are not reordered with respect
* to the LIFO len update.
*/
- __atomic_add_fetch(&list->len.cnt, num, __ATOMIC_RELEASE);
-#endif
+ __atomic_add_fetch(&list->len, num, __ATOMIC_RELEASE);
}
static __rte_always_inline struct rte_stack_lf_elem *
void **obj_table,
struct rte_stack_lf_elem **last)
{
-#ifndef RTE_ARCH_X86_64
- RTE_SET_USED(obj_table);
- RTE_SET_USED(last);
- RTE_SET_USED(list);
- RTE_SET_USED(num);
-
- return NULL;
-#else
struct rte_stack_lf_head old_head;
uint64_t len;
int success;
/* Reserve num elements, if available */
- len = __atomic_load_n(&list->len.cnt, __ATOMIC_ACQUIRE);
+ len = __atomic_load_n(&list->len, __ATOMIC_ACQUIRE);
while (1) {
/* Does the list contain enough elements? */
return NULL;
/* len is updated on failure */
- if (__atomic_compare_exchange_n(&list->len.cnt,
+ if (__atomic_compare_exchange_n(&list->len,
&len, len - num,
0, __ATOMIC_ACQUIRE,
__ATOMIC_ACQUIRE))
} while (success == 0);
return old_head.top;
-#endif
}
#endif /* _RTE_STACK_LF_C11_H_ */