4 * Copyright (C) Cavium networks Ltd. 2015.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Cavium networks nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #ifndef _RTE_ATOMIC_ARM64_H_
34 #define _RTE_ATOMIC_ARM64_H_
36 #ifndef RTE_FORCE_INTRINSICS
37 # error Platform must be built with CONFIG_RTE_FORCE_INTRINSICS
44 #include "generic/rte_atomic.h"
46 #define dsb(opt) { asm volatile("dsb " #opt : : : "memory"); }
47 #define dmb(opt) { asm volatile("dmb " #opt : : : "memory"); }
50 * General memory barrier.
52 * Guarantees that the LOAD and STORE operations generated before the
53 * barrier occur before the LOAD and STORE operations generated after.
54 * This function is architecture dependent.
56 static inline void rte_mb(void)
62 * Write memory barrier.
64 * Guarantees that the STORE operations generated before the barrier
65 * occur before the STORE operations generated after.
66 * This function is architecture dependent.
68 static inline void rte_wmb(void)
74 * Read memory barrier.
76 * Guarantees that the LOAD operations generated before the barrier
77 * occur before the LOAD operations generated after.
78 * This function is architecture dependent.
80 static inline void rte_rmb(void)
85 #define rte_smp_mb() dmb(ish)
87 #define rte_smp_wmb() dmb(ishst)
89 #define rte_smp_rmb() dmb(ishld)
91 #define rte_io_mb() rte_mb()
93 #define rte_io_wmb() rte_wmb()
95 #define rte_io_rmb() rte_rmb()
101 #endif /* _RTE_ATOMIC_ARM64_H_ */