38
38
39
39
#include <atomic_ops.h>
40
40
41
- #define handle_memory_order (ret , proc , order , ...) \
41
+ #define handle_memory_order_impl (ret , proc , order , ...) \
42
42
do { \
43
43
switch (order) { \
44
44
case memory_order_relaxed: \
58
58
} \
59
59
} while (0)
60
60
61
+ #if defined(_MSC_VER )
62
+ # define msvc_workaround (x ) x
63
+ # define handle_memory_order (ret , proc , order , ...) \
64
+ msvc_workaround(handle_memory_order_impl(ret, proc, order, __VA_ARGS__))
65
+ #else
66
+ #define handle_memory_order handle_memory_order_impl
67
+ #endif
68
+
69
+
61
70
static void ao_store_explicit (volatile AO_t * o , AO_t v , memory_order order )
62
71
{
63
72
switch (order ) {
@@ -106,48 +115,97 @@ static void ao_fetch_add(volatile AO_t *o, AO_t v, memory_order order)
106
115
}
107
116
static void ao_fetch_sub (volatile AO_t * o , AO_t v , memory_order order )
108
117
{
109
- ao_fetch_add (o , - v , order );
118
+ ao_fetch_add (o , - (( long ) v ) , order );
110
119
}
120
+ /* Fxxking MAVC, the macro doesn't work... */
111
121
static void ao_fetch_or (volatile AO_t * o , AO_t v , memory_order order )
112
122
{
113
- handle_memory_order ((void ), AO_or , order , o , v );
123
+ /* handle_memory_order((void), AO_or, order, o, v); */
124
+ switch (order ) {
125
+ case memory_order_relaxed :
126
+ AO_or (o , v );
127
+ break ;
128
+ case memory_order_consume :
129
+ case memory_order_acquire :
130
+ AO_or_acquire (o , v );
131
+ break ;
132
+ case memory_order_release :
133
+ AO_or_release (o , v );
134
+ break ;
135
+ case memory_order_acq_rel :
136
+ case memory_order_seq_cst :
137
+ AO_or_full (o , v );
138
+ break ;
139
+ }
114
140
}
115
141
static void ao_fetch_xor (volatile AO_t * o , AO_t v , memory_order order )
116
142
{
117
- handle_memory_order ((void ), AO_xor , order , o , v );
143
+ /* handle_memory_order((void), AO_xor, order, o, v); */
144
+ switch (order ) {
145
+ case memory_order_relaxed :
146
+ AO_xor (o , v );
147
+ break ;
148
+ case memory_order_consume :
149
+ case memory_order_acquire :
150
+ AO_xor_acquire (o , v );
151
+ break ;
152
+ case memory_order_release :
153
+ AO_xor_release (o , v );
154
+ break ;
155
+ case memory_order_acq_rel :
156
+ case memory_order_seq_cst :
157
+ AO_xor_full (o , v );
158
+ break ;
159
+ }
118
160
}
119
161
static void ao_fetch_and (volatile AO_t * o , AO_t v , memory_order order )
120
162
{
121
- handle_memory_order ((void ), AO_and , order , o , v );
163
+ /* handle_memory_order((void), AO_and, order, o, v); */
164
+ switch (order ) {
165
+ case memory_order_relaxed :
166
+ AO_and (o , v );
167
+ break ;
168
+ case memory_order_consume :
169
+ case memory_order_acquire :
170
+ AO_and_acquire (o , v );
171
+ break ;
172
+ case memory_order_release :
173
+ AO_and_release (o , v );
174
+ break ;
175
+ case memory_order_acq_rel :
176
+ case memory_order_seq_cst :
177
+ AO_and_full (o , v );
178
+ break ;
179
+ }
122
180
}
123
181
124
182
#define atomic_store (o , v ) ao_store_explicit(o, v, memory_order_seq_cst)
125
183
#define atomic_store_explicit ao_store_explicit
126
-
184
+
127
185
#define atomic_load (o ) ao_load_explicit(o, memory_order_seq_cst)
128
186
#define atomic_load_explicit ao_load_explicit
129
-
187
+
130
188
#define atomic_exchange (o , v ) ao_exchange_explicit(o, v, memory_order_seq_cst)
131
189
#define atomic_exchange_explicit ao_exchange_explicit
132
-
190
+
133
191
#define atomic_compare_exchange_strong (o , e , v ) ao_compare_exchange_strong(o, e, v, memory_order_seq_cst, memory_order_seq_cst)
134
192
#define atomic_compare_exchange_strong_explicit ao_compare_exchange_strong
135
-
193
+
136
194
#define atomic_compare_exchange_weak (o , e , v ) ao_compare_exchange_weak(o, e, v, memory_order_seq_cst, memory_order_seq_cst)
137
195
#define atomic_compare_exchange_weak_explicit ao_compare_exchange_weak
138
-
196
+
139
197
#define atomic_fetch_add (ob , op ) ao_fetch_add(ob, op, memory_order_seq_cst)
140
198
#define atomic_fetch_add_explicit ao_fetch_add
141
-
199
+
142
200
#define atomic_fetch_sub (ob , op ) ao_fetch_sub(ob, op, memory_order_seq_cst)
143
201
#define atomic_fetch_sub_explicit ao_fetch_sub
144
-
202
+
145
203
#define atomic_fetch_or (ob , op ) ao_fetch_or(ob, op, memory_order_seq_cst)
146
204
#define atomic_fetch_or_explicit ao_fetch_or
147
-
205
+
148
206
#define atomic_fetch_xor (ob , op ) ao_fetch_xor(ob, op, memory_order_seq_cst)
149
207
#define atomic_fetch_xor_explicit ao_fetch_xor
150
-
208
+
151
209
#define atomic_fetch_and (ob , op ) ao_fetch_and(ob, op, memory_order_seq_cst)
152
210
#define atomic_fetch_and_explicit ao_fetch_and
153
211
@@ -246,7 +304,7 @@ SgObject Sg_AtomicExchange(volatile SgAtomic *o, SgObject v, SgMemoryOrder order
246
304
object_t r = atomic_exchange_explicit (& SG_ATOMIC_REF_OBJECT (o ),
247
305
(object_t )v , order );
248
306
return SG_OBJ (r );
249
- }
307
+ }
250
308
}
251
309
252
310
long Sg_AtomicFixnumExchange (volatile SgAtomic * o , long v , SgMemoryOrder order )
@@ -264,7 +322,7 @@ long Sg_AtomicFixnumLoad(volatile SgAtomic *o, SgMemoryOrder order)
264
322
Sg_Error (UC ("atomic-fixnum is required" ));
265
323
266
324
}
267
- return atomic_load_explicit (& SG_ATOMIC_REF_FIXNUM (o ), order );
325
+ return atomic_load_explicit (& SG_ATOMIC_REF_FIXNUM (o ), order );
268
326
}
269
327
270
328
void Sg_AtomicFixnumStore (volatile SgAtomic * o , long v , SgMemoryOrder order )
0 commit comments