4 * Copyright (C) IBM Corporation 2014.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of IBM Corporation nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #ifndef _RTE_SPINLOCK_PPC_64_H_
34 #define _RTE_SPINLOCK_PPC_64_H_
40 #include <rte_common.h>
41 #include <rte_pause.h>
42 #include "generic/rte_spinlock.h"
44 /* Fixme: Use intrinsics to implement the spinlock on Power architecture */
46 #ifndef RTE_FORCE_INTRINSICS
49 rte_spinlock_lock(rte_spinlock_t *sl)
51 while (__sync_lock_test_and_set(&sl->locked, 1))
57 rte_spinlock_unlock(rte_spinlock_t *sl)
59 __sync_lock_release(&sl->locked);
63 rte_spinlock_trylock(rte_spinlock_t *sl)
65 return __sync_lock_test_and_set(&sl->locked, 1) == 0;
70 static inline int rte_tm_supported(void)
76 rte_spinlock_lock_tm(rte_spinlock_t *sl)
78 rte_spinlock_lock(sl); /* fall-back */
82 rte_spinlock_trylock_tm(rte_spinlock_t *sl)
84 return rte_spinlock_trylock(sl);
88 rte_spinlock_unlock_tm(rte_spinlock_t *sl)
90 rte_spinlock_unlock(sl);
94 rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
96 rte_spinlock_recursive_lock(slr); /* fall-back */
100 rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
102 rte_spinlock_recursive_unlock(slr);
106 rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
108 return rte_spinlock_recursive_trylock(slr);
115 #endif /* _RTE_SPINLOCK_PPC_64_H_ */