* elements. If the mempool is near-empty to the point that this is a
* concern, the user should consider increasing the mempool size.
*/
- return (unsigned int)rte_atomic64_read(&s->stack_lf.used.len);
+ return (unsigned int)rte_atomic64_read((rte_atomic64_t *)
+ &s->stack_lf.used.len);
}
static __rte_always_inline void
struct rte_stack_lf_elem *last,
unsigned int num)
{
-#ifndef RTE_ARCH_X86_64
- RTE_SET_USED(first);
- RTE_SET_USED(last);
- RTE_SET_USED(list);
- RTE_SET_USED(num);
-#else
struct rte_stack_lf_head old_head;
int success;
__ATOMIC_RELAXED);
} while (success == 0);
- rte_atomic64_add(&list->len, num);
-#endif
+ rte_atomic64_add((rte_atomic64_t *)&list->len, num);
}
static __rte_always_inline struct rte_stack_lf_elem *
void **obj_table,
struct rte_stack_lf_elem **last)
{
-#ifndef RTE_ARCH_X86_64
- RTE_SET_USED(obj_table);
- RTE_SET_USED(last);
- RTE_SET_USED(list);
- RTE_SET_USED(num);
-
- return NULL;
-#else
struct rte_stack_lf_head old_head;
int success;
/* Reserve num elements, if available */
while (1) {
- uint64_t len = rte_atomic64_read(&list->len);
+ uint64_t len = rte_atomic64_read((rte_atomic64_t *)&list->len);
/* Does the list contain enough elements? */
if (unlikely(len < num))
} while (success == 0);
return old_head.top;
-#endif
}
#endif /* _RTE_STACK_LF_GENERIC_H_ */