4 * Copyright(c) 2015 RehiveTech. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of RehiveTech nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #ifndef _RTE_ATOMIC_ARM32_H_
34 #define _RTE_ATOMIC_ARM32_H_
36 #ifndef RTE_FORCE_INTRINSICS
37 # error Platform must be built with CONFIG_RTE_FORCE_INTRINSICS
44 #include "generic/rte_atomic.h"
47 * General memory barrier.
49 * Guarantees that the LOAD and STORE operations generated before the
50 * barrier occur before the LOAD and STORE operations generated after.
52 #define rte_mb() __sync_synchronize()
55 * Write memory barrier.
57 * Guarantees that the STORE operations generated before the barrier
58 * occur before the STORE operations generated after.
60 #define rte_wmb() do { asm volatile ("dmb st" : : : "memory"); } while (0)
63 * Read memory barrier.
65 * Guarantees that the LOAD operations generated before the barrier
66 * occur before the LOAD operations generated after.
68 #define rte_rmb() __sync_synchronize()
70 #define rte_smp_mb() rte_mb()
72 #define rte_smp_wmb() rte_wmb()
74 #define rte_smp_rmb() rte_rmb()
76 #define rte_io_mb() rte_mb()
78 #define rte_io_wmb() rte_wmb()
80 #define rte_io_rmb() rte_rmb()
82 #define rte_cio_wmb() rte_wmb()
84 #define rte_cio_rmb() rte_rmb()
90 #endif /* _RTE_ATOMIC_ARM32_H_ */