Skip to content

Commit 12d2ce1

Browse files
committed
libc/atomic: decoupling atomic and spinlock to avoid recursion
1. use irq save in AMP mode 2. use mutex lock in SMP mode Signed-off-by: chao an <anchao@lixiang.com>
1 parent 1bba720 commit 12d2ce1

File tree

1 file changed

+67
-37
lines changed

1 file changed

+67
-37
lines changed

libs/libc/machine/arch_atomic.c

+67-37
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,37 @@
2828

2929
#include <stdbool.h>
3030
#include <stdint.h>
31-
#include <nuttx/spinlock.h>
31+
#include <nuttx/irq.h>
32+
#include <nuttx/mutex.h>
33+
34+
/****************************************************************************
35+
* Private Data
36+
****************************************************************************/
37+
38+
#ifdef CONFIG_SMP
39+
static mutex_t g_atomic_lock = NXMUTEX_INITIALIZER;
40+
41+
static inline irqstate_t atomic_lock(void)
42+
{
43+
return nxmutex_lock(&g_atomic_lock);
44+
}
45+
46+
static inline void atomic_unlock(irqstate_t flags)
47+
{
48+
UNUSED(flags);
49+
nxmutex_unlock(&g_atomic_lock);
50+
}
51+
#else
52+
static inline irqstate_t atomic_lock(void)
53+
{
54+
return up_irq_save();
55+
}
56+
57+
static inline void atomic_unlock(irqstate_t flags)
58+
{
59+
up_irq_restore(flags);
60+
}
61+
#endif
3262

3363
/****************************************************************************
3464
* Pre-processor Definitions
@@ -39,23 +69,23 @@
3969
void weak_function __atomic_store_##n (FAR volatile void *ptr, \
4070
type value, int memorder) \
4171
{ \
42-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
72+
irqstate_t irqstate = atomic_lock(); \
4373
\
4474
*(FAR type *)ptr = value; \
4575
\
46-
spin_unlock_irqrestore(NULL, irqstate); \
76+
atomic_unlock(irqstate); \
4777
}
4878

4979
#define LOAD(n, type) \
5080
\
5181
type weak_function __atomic_load_##n (FAR const volatile void *ptr, \
5282
int memorder) \
5383
{ \
54-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
84+
irqstate_t irqstate = atomic_lock(); \
5585
\
5686
type ret = *(FAR type *)ptr; \
5787
\
58-
spin_unlock_irqrestore(NULL, irqstate); \
88+
atomic_unlock(irqstate); \
5989
return ret; \
6090
}
6191

@@ -64,13 +94,13 @@
6494
type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \
6595
type value, int memorder) \
6696
{ \
67-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
97+
irqstate_t irqstate = atomic_lock(); \
6898
FAR type *tmp = (FAR type *)ptr; \
6999
\
70100
type ret = *tmp; \
71101
*tmp = value; \
72102
\
73-
spin_unlock_irqrestore(NULL, irqstate); \
103+
atomic_unlock(irqstate); \
74104
return ret; \
75105
}
76106

@@ -82,7 +112,7 @@
82112
int success, int failure) \
83113
{ \
84114
bool ret = false; \
85-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
115+
irqstate_t irqstate = atomic_lock(); \
86116
FAR type *tmpmem = (FAR type *)mem; \
87117
FAR type *tmpexp = (FAR type *)expect; \
88118
\
@@ -96,7 +126,7 @@
96126
*tmpexp = *tmpmem; \
97127
} \
98128
\
99-
spin_unlock_irqrestore(NULL, irqstate); \
129+
atomic_unlock(irqstate); \
100130
return ret; \
101131
}
102132

@@ -105,13 +135,13 @@
105135
type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \
106136
int memorder) \
107137
{ \
108-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
138+
irqstate_t irqstate = atomic_lock(); \
109139
FAR type *tmp = (FAR type *)ptr; \
110140
type ret = *tmp; \
111141
\
112142
*(FAR type *)ptr = 1; \
113143
\
114-
spin_unlock_irqrestore(NULL, irqstate); \
144+
atomic_unlock(irqstate); \
115145
return ret; \
116146
}
117147

@@ -120,13 +150,13 @@
120150
type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \
121151
type value, int memorder) \
122152
{ \
123-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
153+
irqstate_t irqstate = atomic_lock(); \
124154
FAR type *tmp = (FAR type *)ptr; \
125155
type ret = *tmp; \
126156
\
127157
*tmp = *tmp + value; \
128158
\
129-
spin_unlock_irqrestore(NULL, irqstate); \
159+
atomic_unlock(irqstate); \
130160
return ret; \
131161
}
132162

@@ -135,13 +165,13 @@
135165
type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \
136166
type value, int memorder) \
137167
{ \
138-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
168+
irqstate_t irqstate = atomic_lock(); \
139169
FAR type *tmp = (FAR type *)ptr; \
140170
type ret = *tmp; \
141171
\
142172
*tmp = *tmp - value; \
143173
\
144-
spin_unlock_irqrestore(NULL, irqstate); \
174+
atomic_unlock(irqstate); \
145175
return ret; \
146176
}
147177

@@ -150,13 +180,13 @@
150180
type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \
151181
type value, int memorder) \
152182
{ \
153-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
183+
irqstate_t irqstate = atomic_lock(); \
154184
FAR type *tmp = (FAR type *)ptr; \
155185
type ret = *tmp; \
156186
\
157187
*tmp = *tmp & value; \
158188
\
159-
spin_unlock_irqrestore(NULL, irqstate); \
189+
atomic_unlock(irqstate); \
160190
return ret; \
161191
}
162192

@@ -165,13 +195,13 @@
165195
type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \
166196
type value, int memorder) \
167197
{ \
168-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
198+
irqstate_t irqstate = atomic_lock(); \
169199
FAR type *tmp = (FAR type *)ptr; \
170200
type ret = *tmp; \
171201
\
172202
*tmp = *tmp | value; \
173203
\
174-
spin_unlock_irqrestore(NULL, irqstate); \
204+
atomic_unlock(irqstate); \
175205
return ret; \
176206
}
177207

@@ -180,13 +210,13 @@
180210
type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \
181211
type value, int memorder) \
182212
{ \
183-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
213+
irqstate_t irqstate = atomic_lock(); \
184214
FAR type *tmp = (FAR type *)ptr; \
185215
type ret = *tmp; \
186216
\
187217
*tmp = *tmp ^ value; \
188218
\
189-
spin_unlock_irqrestore(NULL, irqstate); \
219+
atomic_unlock(irqstate); \
190220
return ret; \
191221
}
192222

@@ -195,12 +225,12 @@
195225
type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \
196226
type value) \
197227
{ \
198-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
228+
irqstate_t irqstate = atomic_lock(); \
199229
FAR type *tmp = (FAR type *)ptr; \
200230
\
201231
*tmp = *tmp + value; \
202232
\
203-
spin_unlock_irqrestore(NULL, irqstate); \
233+
atomic_unlock(irqstate); \
204234
return *tmp; \
205235
}
206236

@@ -209,12 +239,12 @@
209239
type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \
210240
type value) \
211241
{ \
212-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
242+
irqstate_t irqstate = atomic_lock(); \
213243
FAR type *tmp = (FAR type *)ptr; \
214244
\
215245
*tmp = *tmp - value; \
216246
\
217-
spin_unlock_irqrestore(NULL, irqstate); \
247+
atomic_unlock(irqstate); \
218248
return *tmp; \
219249
}
220250

@@ -223,12 +253,12 @@
223253
type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \
224254
type value) \
225255
{ \
226-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
256+
irqstate_t irqstate = atomic_lock(); \
227257
FAR type *tmp = (FAR type *)ptr; \
228258
\
229259
*tmp = *tmp | value; \
230260
\
231-
spin_unlock_irqrestore(NULL, irqstate); \
261+
atomic_unlock(irqstate); \
232262
return *tmp; \
233263
}
234264

@@ -237,12 +267,12 @@
237267
type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \
238268
type value) \
239269
{ \
240-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
270+
irqstate_t irqstate = atomic_lock(); \
241271
FAR type *tmp = (FAR type *)ptr; \
242272
\
243273
*tmp = *tmp & value; \
244274
\
245-
spin_unlock_irqrestore(NULL, irqstate); \
275+
atomic_unlock(irqstate); \
246276
return *tmp; \
247277
}
248278

@@ -251,12 +281,12 @@
251281
type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \
252282
type value) \
253283
{ \
254-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
284+
irqstate_t irqstate = atomic_lock(); \
255285
FAR type *tmp = (FAR type *)ptr; \
256286
\
257287
*tmp = *tmp ^ value; \
258288
\
259-
spin_unlock_irqrestore(NULL, irqstate); \
289+
atomic_unlock(irqstate); \
260290
return *tmp; \
261291
}
262292

@@ -265,12 +295,12 @@
265295
type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \
266296
type value) \
267297
{ \
268-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
298+
irqstate_t irqstate = atomic_lock(); \
269299
FAR type *tmp = (FAR type *)ptr; \
270300
\
271301
*tmp = ~(*tmp & value); \
272302
\
273-
spin_unlock_irqrestore(NULL, irqstate); \
303+
atomic_unlock(irqstate); \
274304
return *tmp; \
275305
}
276306

@@ -281,7 +311,7 @@
281311
type newvalue) \
282312
{ \
283313
bool ret = false; \
284-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
314+
irqstate_t irqstate = atomic_lock(); \
285315
FAR type *tmp = (FAR type *)ptr; \
286316
\
287317
if (*tmp == oldvalue) \
@@ -290,7 +320,7 @@
290320
*tmp = newvalue; \
291321
} \
292322
\
293-
spin_unlock_irqrestore(NULL, irqstate); \
323+
atomic_unlock(irqstate); \
294324
return ret; \
295325
}
296326

@@ -300,7 +330,7 @@
300330
type oldvalue, \
301331
type newvalue) \
302332
{ \
303-
irqstate_t irqstate = spin_lock_irqsave(NULL); \
333+
irqstate_t irqstate = atomic_lock(); \
304334
FAR type *tmp = (FAR type *)ptr; \
305335
type ret = *tmp; \
306336
\
@@ -309,7 +339,7 @@
309339
*tmp = newvalue; \
310340
} \
311341
\
312-
spin_unlock_irqrestore(NULL, irqstate); \
342+
atomic_unlock(irqstate); \
313343
return ret; \
314344
}
315345

0 commit comments

Comments
 (0)