* elements. If the mempool is near-empty to the point that this is a
* concern, the user should consider increasing the mempool size.
*/
- return (unsigned int)__atomic_load_n(&s->stack_lf.used.len.cnt,
+ return (unsigned int)__atomic_load_n(&s->stack_lf.used.len,
__ATOMIC_RELAXED);
}
/* Ensure the stack modifications are not reordered with respect
* to the LIFO len update.
*/
- __atomic_add_fetch(&list->len.cnt, num, __ATOMIC_RELEASE);
+ __atomic_add_fetch(&list->len, num, __ATOMIC_RELEASE);
#endif
}
int success;
/* Reserve num elements, if available */
- len = __atomic_load_n(&list->len.cnt, __ATOMIC_ACQUIRE);
+ len = __atomic_load_n(&list->len, __ATOMIC_ACQUIRE);
while (1) {
/* Does the list contain enough elements? */
return NULL;
/* len is updated on failure */
- if (__atomic_compare_exchange_n(&list->len.cnt,
+ if (__atomic_compare_exchange_n(&list->len,
&len, len - num,
0, __ATOMIC_ACQUIRE,
__ATOMIC_ACQUIRE))
* elements. If the mempool is near-empty to the point that this is a
* concern, the user should consider increasing the mempool size.
*/
- return (unsigned int)rte_atomic64_read(&s->stack_lf.used.len);
+ return (unsigned int)rte_atomic64_read((rte_atomic64_t *)
+ &s->stack_lf.used.len);
}
static __rte_always_inline void
__ATOMIC_RELAXED);
} while (success == 0);
- rte_atomic64_add(&list->len, num);
+ rte_atomic64_add((rte_atomic64_t *)&list->len, num);
#endif
}
/* Reserve num elements, if available */
while (1) {
- uint64_t len = rte_atomic64_read(&list->len);
+ uint64_t len = rte_atomic64_read((rte_atomic64_t *)&list->len);
/* Does the list contain enough elements? */
if (unlikely(len < num))