1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 * Copyright(c) 2019 Arm Limited
4 */
5
6 #ifndef _RTE_PAUSE_ARM64_H_
7 #define _RTE_PAUSE_ARM64_H_
8
9 #ifdef __cplusplus
10 extern "C" {
11 #endif
12
13 #include <rte_common.h>
14
15 #ifdef RTE_ARM_USE_WFE
16 #define RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
17 #endif
18
19 #include "generic/rte_pause.h"
20
rte_pause(void)21 static inline void rte_pause(void)
22 {
23 asm volatile("yield" ::: "memory");
24 }
25
26 #ifdef RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
27
28 /* Send an event to quit WFE. */
29 #define __RTE_ARM_SEVL() { asm volatile("sevl" : : : "memory"); }
30
31 /* Put processor into low power WFE(Wait For Event) state. */
32 #define __RTE_ARM_WFE() { asm volatile("wfe" : : : "memory"); }
33
34 /*
35 * Atomic exclusive load from addr, it returns the 16-bit content of
36 * *addr while making it 'monitored', when it is written by someone
37 * else, the 'monitored' state is cleared and an event is generated
38 * implicitly to exit WFE.
39 */
40 #define __RTE_ARM_LOAD_EXC_16(src, dst, memorder) { \
41 if (memorder == __ATOMIC_RELAXED) { \
42 asm volatile("ldxrh %w[tmp], [%x[addr]]" \
43 : [tmp] "=&r" (dst) \
44 : [addr] "r" (src) \
45 : "memory"); \
46 } else { \
47 asm volatile("ldaxrh %w[tmp], [%x[addr]]" \
48 : [tmp] "=&r" (dst) \
49 : [addr] "r" (src) \
50 : "memory"); \
51 } }
52
53 /*
54 * Atomic exclusive load from addr, it returns the 32-bit content of
55 * *addr while making it 'monitored', when it is written by someone
56 * else, the 'monitored' state is cleared and an event is generated
57 * implicitly to exit WFE.
58 */
59 #define __RTE_ARM_LOAD_EXC_32(src, dst, memorder) { \
60 if (memorder == __ATOMIC_RELAXED) { \
61 asm volatile("ldxr %w[tmp], [%x[addr]]" \
62 : [tmp] "=&r" (dst) \
63 : [addr] "r" (src) \
64 : "memory"); \
65 } else { \
66 asm volatile("ldaxr %w[tmp], [%x[addr]]" \
67 : [tmp] "=&r" (dst) \
68 : [addr] "r" (src) \
69 : "memory"); \
70 } }
71
72 /*
73 * Atomic exclusive load from addr, it returns the 64-bit content of
74 * *addr while making it 'monitored', when it is written by someone
75 * else, the 'monitored' state is cleared and an event is generated
76 * implicitly to exit WFE.
77 */
78 #define __RTE_ARM_LOAD_EXC_64(src, dst, memorder) { \
79 if (memorder == __ATOMIC_RELAXED) { \
80 asm volatile("ldxr %x[tmp], [%x[addr]]" \
81 : [tmp] "=&r" (dst) \
82 : [addr] "r" (src) \
83 : "memory"); \
84 } else { \
85 asm volatile("ldaxr %x[tmp], [%x[addr]]" \
86 : [tmp] "=&r" (dst) \
87 : [addr] "r" (src) \
88 : "memory"); \
89 } }
90
91 /*
92 * Atomic exclusive load from addr, it returns the 128-bit content of
93 * *addr while making it 'monitored', when it is written by someone
94 * else, the 'monitored' state is cleared and an event is generated
95 * implicitly to exit WFE.
96 */
97 #define __RTE_ARM_LOAD_EXC_128(src, dst, memorder) { \
98 volatile rte_int128_t *dst_128 = (volatile rte_int128_t *)&dst; \
99 if (memorder == __ATOMIC_RELAXED) { \
100 asm volatile("ldxp %x[tmp0], %x[tmp1], [%x[addr]]" \
101 : [tmp0] "=&r" (dst_128->val[0]), \
102 [tmp1] "=&r" (dst_128->val[1]) \
103 : [addr] "r" (src) \
104 : "memory"); \
105 } else { \
106 asm volatile("ldaxp %x[tmp0], %x[tmp1], [%x[addr]]" \
107 : [tmp0] "=&r" (dst_128->val[0]), \
108 [tmp1] "=&r" (dst_128->val[1]) \
109 : [addr] "r" (src) \
110 : "memory"); \
111 } } \
112
113 #define __RTE_ARM_LOAD_EXC(src, dst, memorder, size) { \
114 RTE_BUILD_BUG_ON(size != 16 && size != 32 && \
115 size != 64 && size != 128); \
116 if (size == 16) \
117 __RTE_ARM_LOAD_EXC_16(src, dst, memorder) \
118 else if (size == 32) \
119 __RTE_ARM_LOAD_EXC_32(src, dst, memorder) \
120 else if (size == 64) \
121 __RTE_ARM_LOAD_EXC_64(src, dst, memorder) \
122 else if (size == 128) \
123 __RTE_ARM_LOAD_EXC_128(src, dst, memorder) \
124 }
125
126 static __rte_always_inline void
rte_wait_until_equal_16(volatile uint16_t * addr,uint16_t expected,int memorder)127 rte_wait_until_equal_16(volatile uint16_t *addr, uint16_t expected,
128 int memorder)
129 {
130 uint16_t value;
131
132 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
133 memorder != __ATOMIC_RELAXED);
134
135 __RTE_ARM_LOAD_EXC_16(addr, value, memorder)
136 if (value != expected) {
137 __RTE_ARM_SEVL()
138 do {
139 __RTE_ARM_WFE()
140 __RTE_ARM_LOAD_EXC_16(addr, value, memorder)
141 } while (value != expected);
142 }
143 }
144
145 static __rte_always_inline void
rte_wait_until_equal_32(volatile uint32_t * addr,uint32_t expected,int memorder)146 rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected,
147 int memorder)
148 {
149 uint32_t value;
150
151 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
152 memorder != __ATOMIC_RELAXED);
153
154 __RTE_ARM_LOAD_EXC_32(addr, value, memorder)
155 if (value != expected) {
156 __RTE_ARM_SEVL()
157 do {
158 __RTE_ARM_WFE()
159 __RTE_ARM_LOAD_EXC_32(addr, value, memorder)
160 } while (value != expected);
161 }
162 }
163
164 static __rte_always_inline void
rte_wait_until_equal_64(volatile uint64_t * addr,uint64_t expected,int memorder)165 rte_wait_until_equal_64(volatile uint64_t *addr, uint64_t expected,
166 int memorder)
167 {
168 uint64_t value;
169
170 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
171 memorder != __ATOMIC_RELAXED);
172
173 __RTE_ARM_LOAD_EXC_64(addr, value, memorder)
174 if (value != expected) {
175 __RTE_ARM_SEVL()
176 do {
177 __RTE_ARM_WFE()
178 __RTE_ARM_LOAD_EXC_64(addr, value, memorder)
179 } while (value != expected);
180 }
181 }
182
183 #define RTE_WAIT_UNTIL_MASKED(addr, mask, cond, expected, memorder) do { \
184 RTE_BUILD_BUG_ON(!__builtin_constant_p(memorder)); \
185 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE && \
186 memorder != __ATOMIC_RELAXED); \
187 const uint32_t size = sizeof(*(addr)) << 3; \
188 typeof(*(addr)) expected_value = (expected); \
189 typeof(*(addr)) value; \
190 __RTE_ARM_LOAD_EXC((addr), value, memorder, size) \
191 if (!((value & (mask)) cond expected_value)) { \
192 __RTE_ARM_SEVL() \
193 do { \
194 __RTE_ARM_WFE() \
195 __RTE_ARM_LOAD_EXC((addr), value, memorder, size) \
196 } while (!((value & (mask)) cond expected_value)); \
197 } \
198 } while (0)
199
200 #endif /* RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED */
201
202 #ifdef __cplusplus
203 }
204 #endif
205
206 #endif /* _RTE_PAUSE_ARM64_H_ */
207