|
28 | 28 |
|
29 | 29 | #include <stdbool.h>
|
30 | 30 | #include <stdint.h>
|
31 |
| -#include <nuttx/spinlock.h> |
| 31 | +#include <nuttx/irq.h> |
| 32 | + |
| 33 | +/**************************************************************************** |
| 34 | + * Private Data |
| 35 | + ****************************************************************************/ |
| 36 | + |
| 37 | +#ifdef CONFIG_SMP |
| 38 | +static inline_function irqstate_t atomic_lock(void) |
| 39 | +{ |
| 40 | + return enter_critical_section(); |
| 41 | +} |
| 42 | + |
| 43 | +static inline_function void atomic_unlock(irqstate_t flags) |
| 44 | +{ |
| 45 | + leave_critical_section(flags); |
| 46 | +} |
| 47 | +#else |
| 48 | +static inline_function irqstate_t atomic_lock(void) |
| 49 | +{ |
| 50 | + return up_irq_save(); |
| 51 | +} |
| 52 | + |
| 53 | +static inline_function void atomic_unlock(irqstate_t flags) |
| 54 | +{ |
| 55 | + up_irq_restore(flags); |
| 56 | +} |
| 57 | +#endif |
32 | 58 |
|
33 | 59 | /****************************************************************************
|
34 | 60 | * Pre-processor Definitions
|
|
39 | 65 | void weak_function __atomic_store_##n (FAR volatile void *ptr, \
|
40 | 66 | type value, int memorder) \
|
41 | 67 | { \
|
42 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 68 | + irqstate_t irqstate = atomic_lock(); \ |
43 | 69 | \
|
44 | 70 | *(FAR type *)ptr = value; \
|
45 | 71 | \
|
46 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 72 | + atomic_unlock(irqstate); \ |
47 | 73 | }
|
48 | 74 |
|
49 | 75 | #define LOAD(n, type) \
|
50 | 76 | \
|
51 | 77 | type weak_function __atomic_load_##n (FAR const volatile void *ptr, \
|
52 | 78 | int memorder) \
|
53 | 79 | { \
|
54 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 80 | + irqstate_t irqstate = atomic_lock(); \ |
55 | 81 | \
|
56 | 82 | type ret = *(FAR type *)ptr; \
|
57 | 83 | \
|
58 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 84 | + atomic_unlock(irqstate); \ |
59 | 85 | return ret; \
|
60 | 86 | }
|
61 | 87 |
|
|
64 | 90 | type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \
|
65 | 91 | type value, int memorder) \
|
66 | 92 | { \
|
67 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 93 | + irqstate_t irqstate = atomic_lock(); \ |
68 | 94 | FAR type *tmp = (FAR type *)ptr; \
|
69 | 95 | \
|
70 | 96 | type ret = *tmp; \
|
71 | 97 | *tmp = value; \
|
72 | 98 | \
|
73 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 99 | + atomic_unlock(irqstate); \ |
74 | 100 | return ret; \
|
75 | 101 | }
|
76 | 102 |
|
|
82 | 108 | int success, int failure) \
|
83 | 109 | { \
|
84 | 110 | bool ret = false; \
|
85 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 111 | + irqstate_t irqstate = atomic_lock(); \ |
86 | 112 | FAR type *tmpmem = (FAR type *)mem; \
|
87 | 113 | FAR type *tmpexp = (FAR type *)expect; \
|
88 | 114 | \
|
|
96 | 122 | *tmpexp = *tmpmem; \
|
97 | 123 | } \
|
98 | 124 | \
|
99 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 125 | + atomic_unlock(irqstate); \ |
100 | 126 | return ret; \
|
101 | 127 | }
|
102 | 128 |
|
|
105 | 131 | type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \
|
106 | 132 | int memorder) \
|
107 | 133 | { \
|
108 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 134 | + irqstate_t irqstate = atomic_lock(); \ |
109 | 135 | FAR type *tmp = (FAR type *)ptr; \
|
110 | 136 | type ret = *tmp; \
|
111 | 137 | \
|
112 | 138 | *(FAR type *)ptr = 1; \
|
113 | 139 | \
|
114 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 140 | + atomic_unlock(irqstate); \ |
115 | 141 | return ret; \
|
116 | 142 | }
|
117 | 143 |
|
|
120 | 146 | type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \
|
121 | 147 | type value, int memorder) \
|
122 | 148 | { \
|
123 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 149 | + irqstate_t irqstate = atomic_lock(); \ |
124 | 150 | FAR type *tmp = (FAR type *)ptr; \
|
125 | 151 | type ret = *tmp; \
|
126 | 152 | \
|
127 | 153 | *tmp = *tmp + value; \
|
128 | 154 | \
|
129 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 155 | + atomic_unlock(irqstate); \ |
130 | 156 | return ret; \
|
131 | 157 | }
|
132 | 158 |
|
|
135 | 161 | type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \
|
136 | 162 | type value, int memorder) \
|
137 | 163 | { \
|
138 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 164 | + irqstate_t irqstate = atomic_lock(); \ |
139 | 165 | FAR type *tmp = (FAR type *)ptr; \
|
140 | 166 | type ret = *tmp; \
|
141 | 167 | \
|
142 | 168 | *tmp = *tmp - value; \
|
143 | 169 | \
|
144 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 170 | + atomic_unlock(irqstate); \ |
145 | 171 | return ret; \
|
146 | 172 | }
|
147 | 173 |
|
|
150 | 176 | type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \
|
151 | 177 | type value, int memorder) \
|
152 | 178 | { \
|
153 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 179 | + irqstate_t irqstate = atomic_lock(); \ |
154 | 180 | FAR type *tmp = (FAR type *)ptr; \
|
155 | 181 | type ret = *tmp; \
|
156 | 182 | \
|
157 | 183 | *tmp = *tmp & value; \
|
158 | 184 | \
|
159 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 185 | + atomic_unlock(irqstate); \ |
160 | 186 | return ret; \
|
161 | 187 | }
|
162 | 188 |
|
|
165 | 191 | type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \
|
166 | 192 | type value, int memorder) \
|
167 | 193 | { \
|
168 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 194 | + irqstate_t irqstate = atomic_lock(); \ |
169 | 195 | FAR type *tmp = (FAR type *)ptr; \
|
170 | 196 | type ret = *tmp; \
|
171 | 197 | \
|
172 | 198 | *tmp = *tmp | value; \
|
173 | 199 | \
|
174 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 200 | + atomic_unlock(irqstate); \ |
175 | 201 | return ret; \
|
176 | 202 | }
|
177 | 203 |
|
|
180 | 206 | type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \
|
181 | 207 | type value, int memorder) \
|
182 | 208 | { \
|
183 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 209 | + irqstate_t irqstate = atomic_lock(); \ |
184 | 210 | FAR type *tmp = (FAR type *)ptr; \
|
185 | 211 | type ret = *tmp; \
|
186 | 212 | \
|
187 | 213 | *tmp = *tmp ^ value; \
|
188 | 214 | \
|
189 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 215 | + atomic_unlock(irqstate); \ |
190 | 216 | return ret; \
|
191 | 217 | }
|
192 | 218 |
|
|
195 | 221 | type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \
|
196 | 222 | type value) \
|
197 | 223 | { \
|
198 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 224 | + irqstate_t irqstate = atomic_lock(); \ |
199 | 225 | FAR type *tmp = (FAR type *)ptr; \
|
200 | 226 | \
|
201 | 227 | *tmp = *tmp + value; \
|
202 | 228 | \
|
203 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 229 | + atomic_unlock(irqstate); \ |
204 | 230 | return *tmp; \
|
205 | 231 | }
|
206 | 232 |
|
|
209 | 235 | type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \
|
210 | 236 | type value) \
|
211 | 237 | { \
|
212 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 238 | + irqstate_t irqstate = atomic_lock(); \ |
213 | 239 | FAR type *tmp = (FAR type *)ptr; \
|
214 | 240 | \
|
215 | 241 | *tmp = *tmp - value; \
|
216 | 242 | \
|
217 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 243 | + atomic_unlock(irqstate); \ |
218 | 244 | return *tmp; \
|
219 | 245 | }
|
220 | 246 |
|
|
223 | 249 | type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \
|
224 | 250 | type value) \
|
225 | 251 | { \
|
226 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 252 | + irqstate_t irqstate = atomic_lock(); \ |
227 | 253 | FAR type *tmp = (FAR type *)ptr; \
|
228 | 254 | \
|
229 | 255 | *tmp = *tmp | value; \
|
230 | 256 | \
|
231 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 257 | + atomic_unlock(irqstate); \ |
232 | 258 | return *tmp; \
|
233 | 259 | }
|
234 | 260 |
|
|
237 | 263 | type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \
|
238 | 264 | type value) \
|
239 | 265 | { \
|
240 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 266 | + irqstate_t irqstate = atomic_lock(); \ |
241 | 267 | FAR type *tmp = (FAR type *)ptr; \
|
242 | 268 | \
|
243 | 269 | *tmp = *tmp & value; \
|
244 | 270 | \
|
245 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 271 | + atomic_unlock(irqstate); \ |
246 | 272 | return *tmp; \
|
247 | 273 | }
|
248 | 274 |
|
|
251 | 277 | type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \
|
252 | 278 | type value) \
|
253 | 279 | { \
|
254 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 280 | + irqstate_t irqstate = atomic_lock(); \ |
255 | 281 | FAR type *tmp = (FAR type *)ptr; \
|
256 | 282 | \
|
257 | 283 | *tmp = *tmp ^ value; \
|
258 | 284 | \
|
259 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 285 | + atomic_unlock(irqstate); \ |
260 | 286 | return *tmp; \
|
261 | 287 | }
|
262 | 288 |
|
|
265 | 291 | type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \
|
266 | 292 | type value) \
|
267 | 293 | { \
|
268 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 294 | + irqstate_t irqstate = atomic_lock(); \ |
269 | 295 | FAR type *tmp = (FAR type *)ptr; \
|
270 | 296 | \
|
271 | 297 | *tmp = ~(*tmp & value); \
|
272 | 298 | \
|
273 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 299 | + atomic_unlock(irqstate); \ |
274 | 300 | return *tmp; \
|
275 | 301 | }
|
276 | 302 |
|
|
281 | 307 | type newvalue) \
|
282 | 308 | { \
|
283 | 309 | bool ret = false; \
|
284 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 310 | + irqstate_t irqstate = atomic_lock(); \ |
285 | 311 | FAR type *tmp = (FAR type *)ptr; \
|
286 | 312 | \
|
287 | 313 | if (*tmp == oldvalue) \
|
|
290 | 316 | *tmp = newvalue; \
|
291 | 317 | } \
|
292 | 318 | \
|
293 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 319 | + atomic_unlock(irqstate); \ |
294 | 320 | return ret; \
|
295 | 321 | }
|
296 | 322 |
|
|
300 | 326 | type oldvalue, \
|
301 | 327 | type newvalue) \
|
302 | 328 | { \
|
303 |
| - irqstate_t irqstate = spin_lock_irqsave(NULL); \ |
| 329 | + irqstate_t irqstate = atomic_lock(); \ |
304 | 330 | FAR type *tmp = (FAR type *)ptr; \
|
305 | 331 | type ret = *tmp; \
|
306 | 332 | \
|
|
309 | 335 | *tmp = newvalue; \
|
310 | 336 | } \
|
311 | 337 | \
|
312 |
| - spin_unlock_irqrestore(NULL, irqstate); \ |
| 338 | + atomic_unlock(irqstate); \ |
313 | 339 | return ret; \
|
314 | 340 | }
|
315 | 341 |
|
|
0 commit comments