2 * SPDX-License-Identifier: BSD-3-Clause
3 * Inspired from FreeBSD src/sys/powerpc/include/atomic.h
4 * Copyright (c) 2008 Marcel Moolenaar
5 * Copyright (c) 2001 Benno Rice
6 * Copyright (c) 2001 David E. O'Brien
7 * Copyright (c) 1998 Doug Rabson
11 #ifndef _RTE_ATOMIC_PPC_64_H_
12 #define _RTE_ATOMIC_PPC_64_H_
19 #include "generic/rte_atomic.h"
21 #define rte_mb() asm volatile("sync" : : : "memory")
23 #define rte_wmb() asm volatile("sync" : : : "memory")
25 #define rte_rmb() asm volatile("sync" : : : "memory")
27 #define rte_smp_mb() rte_mb()
29 #define rte_smp_wmb() rte_wmb()
31 #define rte_smp_rmb() rte_rmb()
33 #define rte_io_mb() rte_mb()
35 #define rte_io_wmb() rte_wmb()
37 #define rte_io_rmb() rte_rmb()
39 #define rte_cio_wmb() rte_wmb()
41 #define rte_cio_rmb() rte_rmb()
43 /*------------------------- 16 bit atomic operations -------------------------*/
44 /* To be compatible with Power7, use GCC built-in functions for 16 bit
47 #ifndef RTE_FORCE_INTRINSICS
49 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
51 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
52 __ATOMIC_ACQUIRE) ? 1 : 0;
55 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
57 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
61 rte_atomic16_inc(rte_atomic16_t *v)
63 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
67 rte_atomic16_dec(rte_atomic16_t *v)
69 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
72 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
74 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
77 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
79 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
82 static inline uint16_t
83 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
85 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
88 /*------------------------- 32 bit atomic operations -------------------------*/
91 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
97 "1:\tlwarx %[ret], 0, %[dst]\n"
98 "cmplw %[exp], %[ret]\n"
100 "stwcx. %[src], 0, %[dst]\n"
105 "stwcx. %[ret], 0, %[dst]\n"
109 : [ret] "=&r" (ret), "=m" (*dst)
119 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
121 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
125 rte_atomic32_inc(rte_atomic32_t *v)
130 "1: lwarx %[t],0,%[cnt]\n"
131 "addic %[t],%[t],1\n"
132 "stwcx. %[t],0,%[cnt]\n"
134 : [t] "=&r" (t), "=m" (v->cnt)
135 : [cnt] "r" (&v->cnt), "m" (v->cnt)
136 : "cc", "xer", "memory");
140 rte_atomic32_dec(rte_atomic32_t *v)
145 "1: lwarx %[t],0,%[cnt]\n"
146 "addic %[t],%[t],-1\n"
147 "stwcx. %[t],0,%[cnt]\n"
149 : [t] "=&r" (t), "=m" (v->cnt)
150 : [cnt] "r" (&v->cnt), "m" (v->cnt)
151 : "cc", "xer", "memory");
154 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
160 "1: lwarx %[ret],0,%[cnt]\n"
161 "addic %[ret],%[ret],1\n"
162 "stwcx. %[ret],0,%[cnt]\n"
166 : [cnt] "r" (&v->cnt)
167 : "cc", "xer", "memory");
172 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
178 "1: lwarx %[ret],0,%[cnt]\n"
179 "addic %[ret],%[ret],-1\n"
180 "stwcx. %[ret],0,%[cnt]\n"
184 : [cnt] "r" (&v->cnt)
185 : "cc", "xer", "memory");
190 static inline uint32_t
191 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
193 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
196 /*------------------------- 64 bit atomic operations -------------------------*/
199 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
201 unsigned int ret = 0;
205 "1: ldarx %[ret], 0, %[dst]\n"
206 "cmpld %[exp], %[ret]\n"
208 "stdcx. %[src], 0, %[dst]\n"
213 "stdcx. %[ret], 0, %[dst]\n"
217 : [ret] "=&r" (ret), "=m" (*dst)
227 rte_atomic64_init(rte_atomic64_t *v)
232 static inline int64_t
233 rte_atomic64_read(rte_atomic64_t *v)
237 asm volatile("ld%U1%X1 %[ret],%[cnt]"
239 : [cnt] "m"(v->cnt));
245 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
247 asm volatile("std%U0%X0 %[new_value],%[cnt]"
249 : [new_value] "r"(new_value));
253 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
258 "1: ldarx %[t],0,%[cnt]\n"
259 "add %[t],%[inc],%[t]\n"
260 "stdcx. %[t],0,%[cnt]\n"
262 : [t] "=&r" (t), "=m" (v->cnt)
263 : [cnt] "r" (&v->cnt), [inc] "r" (inc), "m" (v->cnt)
268 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
273 "1: ldarx %[t],0,%[cnt]\n"
274 "subf %[t],%[dec],%[t]\n"
275 "stdcx. %[t],0,%[cnt]\n"
277 : [t] "=&r" (t), "+m" (v->cnt)
278 : [cnt] "r" (&v->cnt), [dec] "r" (dec), "m" (v->cnt)
283 rte_atomic64_inc(rte_atomic64_t *v)
288 "1: ldarx %[t],0,%[cnt]\n"
289 "addic %[t],%[t],1\n"
290 "stdcx. %[t],0,%[cnt]\n"
292 : [t] "=&r" (t), "+m" (v->cnt)
293 : [cnt] "r" (&v->cnt), "m" (v->cnt)
294 : "cc", "xer", "memory");
298 rte_atomic64_dec(rte_atomic64_t *v)
303 "1: ldarx %[t],0,%[cnt]\n"
304 "addic %[t],%[t],-1\n"
305 "stdcx. %[t],0,%[cnt]\n"
307 : [t] "=&r" (t), "+m" (v->cnt)
308 : [cnt] "r" (&v->cnt), "m" (v->cnt)
309 : "cc", "xer", "memory");
312 static inline int64_t
313 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
319 "1: ldarx %[ret],0,%[cnt]\n"
320 "add %[ret],%[inc],%[ret]\n"
321 "stdcx. %[ret],0,%[cnt]\n"
325 : [inc] "r" (inc), [cnt] "r" (&v->cnt)
331 static inline int64_t
332 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
338 "1: ldarx %[ret],0,%[cnt]\n"
339 "subf %[ret],%[dec],%[ret]\n"
340 "stdcx. %[ret],0,%[cnt]\n"
344 : [dec] "r" (dec), [cnt] "r" (&v->cnt)
350 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
356 "1: ldarx %[ret],0,%[cnt]\n"
357 "addic %[ret],%[ret],1\n"
358 "stdcx. %[ret],0,%[cnt]\n"
362 : [cnt] "r" (&v->cnt)
363 : "cc", "xer", "memory");
368 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
374 "1: ldarx %[ret],0,%[cnt]\n"
375 "addic %[ret],%[ret],-1\n"
376 "stdcx. %[ret],0,%[cnt]\n"
380 : [cnt] "r" (&v->cnt)
381 : "cc", "xer", "memory");
386 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
388 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
391 * Atomically set a 64-bit counter to 0.
394 * A pointer to the atomic counter.
396 static inline void rte_atomic64_clear(rte_atomic64_t *v)
401 static inline uint64_t
402 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
404 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
413 #endif /* _RTE_ATOMIC_PPC_64_H_ */