2 * SPDX-License-Identifier: BSD-3-Clause
3 * Inspired from FreeBSD src/sys/powerpc/include/atomic.h
4 * Copyright (c) 2008 Marcel Moolenaar
5 * Copyright (c) 2001 Benno Rice
6 * Copyright (c) 2001 David E. O'Brien
7 * Copyright (c) 1998 Doug Rabson
11 #ifndef _RTE_ATOMIC_PPC_64_H_
12 #define _RTE_ATOMIC_PPC_64_H_
19 #include "generic/rte_atomic.h"
21 #define rte_mb() asm volatile("sync" : : : "memory")
23 #define rte_wmb() asm volatile("sync" : : : "memory")
25 #define rte_rmb() asm volatile("sync" : : : "memory")
27 #define rte_smp_mb() rte_mb()
29 #define rte_smp_wmb() rte_wmb()
31 #define rte_smp_rmb() rte_rmb()
33 #define rte_io_mb() rte_mb()
35 #define rte_io_wmb() rte_wmb()
37 #define rte_io_rmb() rte_rmb()
39 #define rte_cio_wmb() rte_wmb()
41 #define rte_cio_rmb() rte_rmb()
43 static __rte_always_inline void
44 rte_atomic_thread_fence(int memory_order)
46 __atomic_thread_fence(memory_order);
49 /*------------------------- 16 bit atomic operations -------------------------*/
50 /* To be compatible with Power7, use GCC built-in functions for 16 bit
53 #ifndef RTE_FORCE_INTRINSICS
55 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
57 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
58 __ATOMIC_ACQUIRE) ? 1 : 0;
61 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
63 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
67 rte_atomic16_inc(rte_atomic16_t *v)
69 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
73 rte_atomic16_dec(rte_atomic16_t *v)
75 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
78 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
80 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
83 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
85 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
88 static inline uint16_t
89 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
91 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
94 /*------------------------- 32 bit atomic operations -------------------------*/
97 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
103 "1:\tlwarx %[ret], 0, %[dst]\n"
104 "cmplw %[exp], %[ret]\n"
106 "stwcx. %[src], 0, %[dst]\n"
111 "stwcx. %[ret], 0, %[dst]\n"
115 : [ret] "=&r" (ret), "=m" (*dst)
125 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
127 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
131 rte_atomic32_inc(rte_atomic32_t *v)
136 "1: lwarx %[t],0,%[cnt]\n"
137 "addic %[t],%[t],1\n"
138 "stwcx. %[t],0,%[cnt]\n"
140 : [t] "=&r" (t), "=m" (v->cnt)
141 : [cnt] "r" (&v->cnt), "m" (v->cnt)
142 : "cc", "xer", "memory");
146 rte_atomic32_dec(rte_atomic32_t *v)
151 "1: lwarx %[t],0,%[cnt]\n"
152 "addic %[t],%[t],-1\n"
153 "stwcx. %[t],0,%[cnt]\n"
155 : [t] "=&r" (t), "=m" (v->cnt)
156 : [cnt] "r" (&v->cnt), "m" (v->cnt)
157 : "cc", "xer", "memory");
160 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
166 "1: lwarx %[ret],0,%[cnt]\n"
167 "addic %[ret],%[ret],1\n"
168 "stwcx. %[ret],0,%[cnt]\n"
172 : [cnt] "r" (&v->cnt)
173 : "cc", "xer", "memory");
178 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
184 "1: lwarx %[ret],0,%[cnt]\n"
185 "addic %[ret],%[ret],-1\n"
186 "stwcx. %[ret],0,%[cnt]\n"
190 : [cnt] "r" (&v->cnt)
191 : "cc", "xer", "memory");
196 static inline uint32_t
197 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
199 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
202 /*------------------------- 64 bit atomic operations -------------------------*/
205 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
207 unsigned int ret = 0;
211 "1: ldarx %[ret], 0, %[dst]\n"
212 "cmpld %[exp], %[ret]\n"
214 "stdcx. %[src], 0, %[dst]\n"
219 "stdcx. %[ret], 0, %[dst]\n"
223 : [ret] "=&r" (ret), "=m" (*dst)
233 rte_atomic64_init(rte_atomic64_t *v)
238 static inline int64_t
239 rte_atomic64_read(rte_atomic64_t *v)
243 asm volatile("ld%U1%X1 %[ret],%[cnt]"
245 : [cnt] "m"(v->cnt));
251 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
253 asm volatile("std%U0%X0 %[new_value],%[cnt]"
255 : [new_value] "r"(new_value));
259 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
264 "1: ldarx %[t],0,%[cnt]\n"
265 "add %[t],%[inc],%[t]\n"
266 "stdcx. %[t],0,%[cnt]\n"
268 : [t] "=&r" (t), "=m" (v->cnt)
269 : [cnt] "r" (&v->cnt), [inc] "r" (inc), "m" (v->cnt)
274 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
279 "1: ldarx %[t],0,%[cnt]\n"
280 "subf %[t],%[dec],%[t]\n"
281 "stdcx. %[t],0,%[cnt]\n"
283 : [t] "=&r" (t), "+m" (v->cnt)
284 : [cnt] "r" (&v->cnt), [dec] "r" (dec), "m" (v->cnt)
289 rte_atomic64_inc(rte_atomic64_t *v)
294 "1: ldarx %[t],0,%[cnt]\n"
295 "addic %[t],%[t],1\n"
296 "stdcx. %[t],0,%[cnt]\n"
298 : [t] "=&r" (t), "+m" (v->cnt)
299 : [cnt] "r" (&v->cnt), "m" (v->cnt)
300 : "cc", "xer", "memory");
304 rte_atomic64_dec(rte_atomic64_t *v)
309 "1: ldarx %[t],0,%[cnt]\n"
310 "addic %[t],%[t],-1\n"
311 "stdcx. %[t],0,%[cnt]\n"
313 : [t] "=&r" (t), "+m" (v->cnt)
314 : [cnt] "r" (&v->cnt), "m" (v->cnt)
315 : "cc", "xer", "memory");
318 static inline int64_t
319 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
325 "1: ldarx %[ret],0,%[cnt]\n"
326 "add %[ret],%[inc],%[ret]\n"
327 "stdcx. %[ret],0,%[cnt]\n"
331 : [inc] "r" (inc), [cnt] "r" (&v->cnt)
337 static inline int64_t
338 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
344 "1: ldarx %[ret],0,%[cnt]\n"
345 "subf %[ret],%[dec],%[ret]\n"
346 "stdcx. %[ret],0,%[cnt]\n"
350 : [dec] "r" (dec), [cnt] "r" (&v->cnt)
356 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
362 "1: ldarx %[ret],0,%[cnt]\n"
363 "addic %[ret],%[ret],1\n"
364 "stdcx. %[ret],0,%[cnt]\n"
368 : [cnt] "r" (&v->cnt)
369 : "cc", "xer", "memory");
374 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
380 "1: ldarx %[ret],0,%[cnt]\n"
381 "addic %[ret],%[ret],-1\n"
382 "stdcx. %[ret],0,%[cnt]\n"
386 : [cnt] "r" (&v->cnt)
387 : "cc", "xer", "memory");
392 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
394 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
397 * Atomically set a 64-bit counter to 0.
400 * A pointer to the atomic counter.
402 static inline void rte_atomic64_clear(rte_atomic64_t *v)
407 static inline uint64_t
408 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
410 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
419 #endif /* _RTE_ATOMIC_PPC_64_H_ */