1 /*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * Inspired from FreeBSD src/sys/powerpc/include/atomic.h
4 * Copyright (c) 2021 IBM Corporation
5 * Copyright (c) 2008 Marcel Moolenaar
6 * Copyright (c) 2001 Benno Rice
7 * Copyright (c) 2001 David E. O'Brien
8 * Copyright (c) 1998 Doug Rabson
9 * All rights reserved.
10 */
11
12 #ifndef _RTE_ATOMIC_PPC_64_H_
13 #define _RTE_ATOMIC_PPC_64_H_
14
15 #ifdef __cplusplus
16 extern "C" {
17 #endif
18
19 #include <stdint.h>
20 #include <rte_compat.h>
21 #include "generic/rte_atomic.h"
22
23 #define rte_mb() asm volatile("sync" : : : "memory")
24
25 #define rte_wmb() asm volatile("sync" : : : "memory")
26
27 #define rte_rmb() asm volatile("sync" : : : "memory")
28
29 #define rte_smp_mb() rte_mb()
30
31 #define rte_smp_wmb() rte_wmb()
32
33 #define rte_smp_rmb() rte_rmb()
34
35 #define rte_io_mb() rte_mb()
36
37 #define rte_io_wmb() rte_wmb()
38
39 #define rte_io_rmb() rte_rmb()
40
41 static __rte_always_inline void
rte_atomic_thread_fence(int memorder)42 rte_atomic_thread_fence(int memorder)
43 {
44 __atomic_thread_fence(memorder);
45 }
46
47 /*------------------------- 16 bit atomic operations -------------------------*/
48 #ifndef RTE_FORCE_INTRINSICS
49 static inline int
rte_atomic16_cmpset(volatile uint16_t * dst,uint16_t exp,uint16_t src)50 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
51 {
52 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
53 __ATOMIC_ACQUIRE) ? 1 : 0;
54 }
55
rte_atomic16_test_and_set(rte_atomic16_t * v)56 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
57 {
58 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
59 }
60
61 static inline void
rte_atomic16_inc(rte_atomic16_t * v)62 rte_atomic16_inc(rte_atomic16_t *v)
63 {
64 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
65 }
66
67 static inline void
rte_atomic16_dec(rte_atomic16_t * v)68 rte_atomic16_dec(rte_atomic16_t *v)
69 {
70 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
71 }
72
rte_atomic16_inc_and_test(rte_atomic16_t * v)73 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
74 {
75 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
76 }
77
rte_atomic16_dec_and_test(rte_atomic16_t * v)78 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
79 {
80 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
81 }
82
83 static inline uint16_t
rte_atomic16_exchange(volatile uint16_t * dst,uint16_t val)84 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
85 {
86 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
87 }
88
89 /*------------------------- 32 bit atomic operations -------------------------*/
90
91 static inline int
rte_atomic32_cmpset(volatile uint32_t * dst,uint32_t exp,uint32_t src)92 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
93 {
94 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
95 __ATOMIC_ACQUIRE) ? 1 : 0;
96 }
97
rte_atomic32_test_and_set(rte_atomic32_t * v)98 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
99 {
100 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
101 }
102
103 static inline void
rte_atomic32_inc(rte_atomic32_t * v)104 rte_atomic32_inc(rte_atomic32_t *v)
105 {
106 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
107 }
108
109 static inline void
rte_atomic32_dec(rte_atomic32_t * v)110 rte_atomic32_dec(rte_atomic32_t *v)
111 {
112 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
113 }
114
rte_atomic32_inc_and_test(rte_atomic32_t * v)115 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
116 {
117 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
118 }
119
rte_atomic32_dec_and_test(rte_atomic32_t * v)120 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
121 {
122 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
123 }
124
125 static inline uint32_t
rte_atomic32_exchange(volatile uint32_t * dst,uint32_t val)126 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
127 {
128 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
129 }
130
131 /*------------------------- 64 bit atomic operations -------------------------*/
132
133 static inline int
rte_atomic64_cmpset(volatile uint64_t * dst,uint64_t exp,uint64_t src)134 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
135 {
136 return __atomic_compare_exchange(dst, &exp, &src, 0, __ATOMIC_ACQUIRE,
137 __ATOMIC_ACQUIRE) ? 1 : 0;
138 }
139
140 static inline void
rte_atomic64_init(rte_atomic64_t * v)141 rte_atomic64_init(rte_atomic64_t *v)
142 {
143 v->cnt = 0;
144 }
145
146 static inline int64_t
rte_atomic64_read(rte_atomic64_t * v)147 rte_atomic64_read(rte_atomic64_t *v)
148 {
149 return v->cnt;
150 }
151
152 static inline void
rte_atomic64_set(rte_atomic64_t * v,int64_t new_value)153 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
154 {
155 v->cnt = new_value;
156 }
157
158 static inline void
rte_atomic64_add(rte_atomic64_t * v,int64_t inc)159 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
160 {
161 __atomic_add_fetch(&v->cnt, inc, __ATOMIC_ACQUIRE);
162 }
163
164 static inline void
rte_atomic64_sub(rte_atomic64_t * v,int64_t dec)165 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
166 {
167 __atomic_sub_fetch(&v->cnt, dec, __ATOMIC_ACQUIRE);
168 }
169
170 static inline void
rte_atomic64_inc(rte_atomic64_t * v)171 rte_atomic64_inc(rte_atomic64_t *v)
172 {
173 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
174 }
175
176 static inline void
rte_atomic64_dec(rte_atomic64_t * v)177 rte_atomic64_dec(rte_atomic64_t *v)
178 {
179 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE);
180 }
181
182 static inline int64_t
rte_atomic64_add_return(rte_atomic64_t * v,int64_t inc)183 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
184 {
185 return __atomic_add_fetch(&v->cnt, inc, __ATOMIC_ACQUIRE);
186 }
187
188 static inline int64_t
rte_atomic64_sub_return(rte_atomic64_t * v,int64_t dec)189 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
190 {
191 return __atomic_sub_fetch(&v->cnt, dec, __ATOMIC_ACQUIRE);
192 }
193
rte_atomic64_inc_and_test(rte_atomic64_t * v)194 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
195 {
196 return __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
197 }
198
rte_atomic64_dec_and_test(rte_atomic64_t * v)199 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
200 {
201 return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0;
202 }
203
rte_atomic64_test_and_set(rte_atomic64_t * v)204 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
205 {
206 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
207 }
208
rte_atomic64_clear(rte_atomic64_t * v)209 static inline void rte_atomic64_clear(rte_atomic64_t *v)
210 {
211 v->cnt = 0;
212 }
213
214 static inline uint64_t
rte_atomic64_exchange(volatile uint64_t * dst,uint64_t val)215 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
216 {
217 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
218 }
219
220 #endif
221
222 #ifdef __cplusplus
223 }
224 #endif
225
226 #endif /* _RTE_ATOMIC_PPC_64_H_ */
227