1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2020 Intel Corporation
5 #ifndef _RTE_POWER_INTRINSIC_X86_H_
6 #define _RTE_POWER_INTRINSIC_X86_H_
12 #include <rte_common.h>
14 #include "generic/rte_power_intrinsics.h"
16 static inline uint64_t
17 __rte_power_get_umwait_val(const volatile void *p, const uint8_t sz)
21 return *(const volatile uint8_t *)p;
22 case sizeof(uint16_t):
23 return *(const volatile uint16_t *)p;
24 case sizeof(uint32_t):
25 return *(const volatile uint32_t *)p;
26 case sizeof(uint64_t):
27 return *(const volatile uint64_t *)p;
29 /* this is an intrinsic, so we can't have any error handling */
36 * This function uses UMONITOR/UMWAIT instructions and will enter C0.2 state.
37 * For more information about usage of these instructions, please refer to
38 * Intel(R) 64 and IA-32 Architectures Software Developer's Manual.
41 rte_power_monitor(const volatile void *p, const uint64_t expected_value,
42 const uint64_t value_mask, const uint64_t tsc_timestamp,
43 const uint8_t data_sz)
45 const uint32_t tsc_l = (uint32_t)tsc_timestamp;
46 const uint32_t tsc_h = (uint32_t)(tsc_timestamp >> 32);
48 * we're using raw byte codes for now as only the newest compiler
49 * versions support this instruction natively.
52 /* set address for UMONITOR */
53 asm volatile(".byte 0xf3, 0x0f, 0xae, 0xf7;"
58 const uint64_t cur_value = __rte_power_get_umwait_val(p, data_sz);
59 const uint64_t masked = cur_value & value_mask;
61 /* if the masked value is already matching, abort */
62 if (masked == expected_value)
66 asm volatile(".byte 0xf2, 0x0f, 0xae, 0xf7;"
68 : "D"(0), /* enter C0.2 */
69 "a"(tsc_l), "d"(tsc_h));
73 * This function uses UMONITOR/UMWAIT instructions and will enter C0.2 state.
74 * For more information about usage of these instructions, please refer to
75 * Intel(R) 64 and IA-32 Architectures Software Developer's Manual.
78 rte_power_monitor_sync(const volatile void *p, const uint64_t expected_value,
79 const uint64_t value_mask, const uint64_t tsc_timestamp,
80 const uint8_t data_sz, rte_spinlock_t *lck)
82 const uint32_t tsc_l = (uint32_t)tsc_timestamp;
83 const uint32_t tsc_h = (uint32_t)(tsc_timestamp >> 32);
85 * we're using raw byte codes for now as only the newest compiler
86 * versions support this instruction natively.
89 /* set address for UMONITOR */
90 asm volatile(".byte 0xf3, 0x0f, 0xae, 0xf7;"
95 const uint64_t cur_value = __rte_power_get_umwait_val(p, data_sz);
96 const uint64_t masked = cur_value & value_mask;
98 /* if the masked value is already matching, abort */
99 if (masked == expected_value)
102 rte_spinlock_unlock(lck);
105 asm volatile(".byte 0xf2, 0x0f, 0xae, 0xf7;"
106 : /* ignore rflags */
107 : "D"(0), /* enter C0.2 */
108 "a"(tsc_l), "d"(tsc_h));
110 rte_spinlock_lock(lck);
114 * This function uses TPAUSE instruction and will enter C0.2 state. For more
115 * information about usage of this instruction, please refer to Intel(R) 64 and
116 * IA-32 Architectures Software Developer's Manual.
119 rte_power_pause(const uint64_t tsc_timestamp)
121 const uint32_t tsc_l = (uint32_t)tsc_timestamp;
122 const uint32_t tsc_h = (uint32_t)(tsc_timestamp >> 32);
125 asm volatile(".byte 0x66, 0x0f, 0xae, 0xf7;"
126 : /* ignore rflags */
127 : "D"(0), /* enter C0.2 */
128 "a"(tsc_l), "d"(tsc_h));
135 #endif /* _RTE_POWER_INTRINSIC_X86_H_ */