2 * SPDX-License-Identifier: BSD-3-Clause
3 * Inspired from FreeBSD src/sys/powerpc/include/atomic.h
4 * Copyright (c) 2021 IBM Corporation
5 * Copyright (c) 2008 Marcel Moolenaar
6 * Copyright (c) 2001 Benno Rice
7 * Copyright (c) 2001 David E. O'Brien
8 * Copyright (c) 1998 Doug Rabson
12 #ifndef _RTE_ATOMIC_PPC_64_H_
13 #define _RTE_ATOMIC_PPC_64_H_
20 #include <rte_compat.h>
21 #include "generic/rte_atomic.h"
23 #define rte_mb() asm volatile("sync" : : : "memory")
25 #define rte_wmb() asm volatile("sync" : : : "memory")
27 #define rte_rmb() asm volatile("sync" : : : "memory")
29 #define rte_smp_mb() rte_mb()
31 #define rte_smp_wmb() rte_wmb()
33 #define rte_smp_rmb() rte_rmb()
35 #define rte_io_mb() rte_mb()
37 #define rte_io_wmb() rte_wmb()
39 #define rte_io_rmb() rte_rmb()
41 static __rte_always_inline void
42 rte_atomic_thread_fence(int memorder)
44 __atomic_thread_fence(memorder);
47 /*------------------------- 16 bit atomic operations -------------------------*/
48 #ifndef RTE_FORCE_INTRINSICS
50 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
52 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
53 __ATOMIC_ACQUIRE) ? 1 : 0;
56 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
58 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
62 rte_atomic16_inc(rte_atomic16_t *v)
64 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
68 rte_atomic16_dec(rte_atomic16_t *v)
70 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
73 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
75 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
78 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
80 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
83 static inline uint16_t
84 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
86 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
89 /*------------------------- 32 bit atomic operations -------------------------*/
92 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
94 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
95 __ATOMIC_ACQUIRE) ? 1 : 0;
98 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
100 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
104 rte_atomic32_inc(rte_atomic32_t *v)
106 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
110 rte_atomic32_dec(rte_atomic32_t *v)
112 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
115 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
117 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
120 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
122 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
125 static inline uint32_t
126 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
128 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
131 /*------------------------- 64 bit atomic operations -------------------------*/
134 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
136 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
137 __ATOMIC_ACQUIRE) ? 1 : 0;
141 rte_atomic64_init(rte_atomic64_t *v)
146 static inline int64_t
147 rte_atomic64_read(rte_atomic64_t *v)
153 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
159 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
161 __atomic_add_fetch(&v->cnt, inc, __ATOMIC_ACQUIRE);
165 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
167 __atomic_sub_fetch(&v->cnt, dec, __ATOMIC_ACQUIRE);
171 rte_atomic64_inc(rte_atomic64_t *v)
173 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
177 rte_atomic64_dec(rte_atomic64_t *v)
179 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
182 static inline int64_t
183 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
185 return __atomic_add_fetch(&v->cnt, inc, __ATOMIC_ACQUIRE);
188 static inline int64_t
189 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
191 return __atomic_sub_fetch(&v->cnt, dec, __ATOMIC_ACQUIRE);
194 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
196 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
199 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
201 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
204 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
206 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
209 static inline void rte_atomic64_clear(rte_atomic64_t *v)
214 static inline uint64_t
215 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
217 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
226 #endif /* _RTE_ATOMIC_PPC_64_H_ */