blob: 1d9c98ba51ee58e5ce18d882893a3f5ec10a80f7 [file] [log] [blame]
Emeric Brun7122ab32017-07-07 10:26:46 +02001#ifndef PL_ATOMIC_OPS_H
2#define PL_ATOMIC_OPS_H
3
4
5/* compiler-only memory barrier, for use around locks */
Willy Tarreaub5f27152017-11-20 20:55:06 +01006#define pl_barrier() do { \
7 asm volatile("" ::: "memory"); \
8 } while (0)
Emeric Brun7122ab32017-07-07 10:26:46 +02009
Willy Tarreau01b83982017-07-18 15:01:39 +020010#if defined(__i386__) || defined (__i486__) || defined (__i586__) || defined (__i686__) || defined (__x86_64__)
11
12/* full memory barrier using mfence when SSE2 is supported, falling back to
13 * "lock add %esp" (gcc uses "lock add" or "lock or").
14 */
Willy Tarreau01b83982017-07-18 15:01:39 +020015#if defined(__SSE2__)
Willy Tarreaub5f27152017-11-20 20:55:06 +010016
17#define pl_mb() do { \
18 asm volatile("mfence" ::: "memory"); \
19 } while (0)
20
Willy Tarreau01b83982017-07-18 15:01:39 +020021#elif defined(__x86_64__)
Willy Tarreaub5f27152017-11-20 20:55:06 +010022
23#define pl_mb() do { \
24 asm volatile("lock addl $0,0 (%%rsp)" ::: "memory", "cc"); \
25 } while (0)
26
27#else /* ix86 */
28
29#define pl_mb() do { \
30 asm volatile("lock addl $0,0 (%%esp)" ::: "memory", "cc"); \
31 } while (0)
32
33#endif /* end of pl_mb() case for sse2/x86_64/x86 */
Emeric Brun7122ab32017-07-07 10:26:46 +020034
Emeric Brun7122ab32017-07-07 10:26:46 +020035/*
36 * Generic functions common to the x86 family
37 */
38
Willy Tarreaub5f27152017-11-20 20:55:06 +010039#define pl_cpu_relax() do { \
40 asm volatile("rep;nop\n"); \
41 } while (0)
Emeric Brun7122ab32017-07-07 10:26:46 +020042
43/* increment integer value pointed to by pointer <ptr>, and return non-zero if
44 * result is non-null.
45 */
46#define pl_inc(ptr) ( \
47 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
48 unsigned char ret; \
49 asm volatile("lock incq %0\n" \
50 "setne %1\n" \
51 : "+m" (*(ptr)), "=qm" (ret) \
52 : \
53 : "cc"); \
54 ret; /* return value */ \
55 }) : (sizeof(*(ptr)) == 4) ? ({ \
56 unsigned char ret; \
57 asm volatile("lock incl %0\n" \
58 "setne %1\n" \
59 : "+m" (*(ptr)), "=qm" (ret) \
60 : \
61 : "cc"); \
62 ret; /* return value */ \
63 }) : (sizeof(*(ptr)) == 2) ? ({ \
64 unsigned char ret; \
65 asm volatile("lock incw %0\n" \
66 "setne %1\n" \
67 : "+m" (*(ptr)), "=qm" (ret) \
68 : \
69 : "cc"); \
70 ret; /* return value */ \
71 }) : (sizeof(*(ptr)) == 1) ? ({ \
72 unsigned char ret; \
73 asm volatile("lock incb %0\n" \
74 "setne %1\n" \
75 : "+m" (*(ptr)), "=qm" (ret) \
76 : \
77 : "cc"); \
78 ret; /* return value */ \
79 }) : ({ \
80 void __unsupported_argument_size_for_pl_inc__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +010081 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
82 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
83 __unsupported_argument_size_for_pl_inc__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +020084 0; \
85 }) \
86)
87
88/* decrement integer value pointed to by pointer <ptr>, and return non-zero if
89 * result is non-null.
90 */
91#define pl_dec(ptr) ( \
92 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
93 unsigned char ret; \
94 asm volatile("lock decq %0\n" \
95 "setne %1\n" \
96 : "+m" (*(ptr)), "=qm" (ret) \
97 : \
98 : "cc"); \
99 ret; /* return value */ \
100 }) : (sizeof(*(ptr)) == 4) ? ({ \
101 unsigned char ret; \
102 asm volatile("lock decl %0\n" \
103 "setne %1\n" \
104 : "+m" (*(ptr)), "=qm" (ret) \
105 : \
106 : "cc"); \
107 ret; /* return value */ \
108 }) : (sizeof(*(ptr)) == 2) ? ({ \
109 unsigned char ret; \
110 asm volatile("lock decw %0\n" \
111 "setne %1\n" \
112 : "+m" (*(ptr)), "=qm" (ret) \
113 : \
114 : "cc"); \
115 ret; /* return value */ \
116 }) : (sizeof(*(ptr)) == 1) ? ({ \
117 unsigned char ret; \
118 asm volatile("lock decb %0\n" \
119 "setne %1\n" \
120 : "+m" (*(ptr)), "=qm" (ret) \
121 : \
122 : "cc"); \
123 ret; /* return value */ \
124 }) : ({ \
125 void __unsupported_argument_size_for_pl_dec__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100126 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
127 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
128 __unsupported_argument_size_for_pl_dec__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200129 0; \
130 }) \
131)
132
133/* increment integer value pointed to by pointer <ptr>, no return */
134#define pl_inc_noret(ptr) ({ \
135 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
136 asm volatile("lock incq %0\n" \
137 : "+m" (*(ptr)) \
138 : \
139 : "cc"); \
140 } else if (sizeof(*(ptr)) == 4) { \
141 asm volatile("lock incl %0\n" \
142 : "+m" (*(ptr)) \
143 : \
144 : "cc"); \
145 } else if (sizeof(*(ptr)) == 2) { \
146 asm volatile("lock incw %0\n" \
147 : "+m" (*(ptr)) \
148 : \
149 : "cc"); \
150 } else if (sizeof(*(ptr)) == 1) { \
151 asm volatile("lock incb %0\n" \
152 : "+m" (*(ptr)) \
153 : \
154 : "cc"); \
155 } else { \
156 void __unsupported_argument_size_for_pl_inc_noret__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100157 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
158 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
159 __unsupported_argument_size_for_pl_inc_noret__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200160 } \
161})
162
163/* decrement integer value pointed to by pointer <ptr>, no return */
164#define pl_dec_noret(ptr) ({ \
165 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
166 asm volatile("lock decq %0\n" \
167 : "+m" (*(ptr)) \
168 : \
169 : "cc"); \
170 } else if (sizeof(*(ptr)) == 4) { \
171 asm volatile("lock decl %0\n" \
172 : "+m" (*(ptr)) \
173 : \
174 : "cc"); \
175 } else if (sizeof(*(ptr)) == 2) { \
176 asm volatile("lock decw %0\n" \
177 : "+m" (*(ptr)) \
178 : \
179 : "cc"); \
180 } else if (sizeof(*(ptr)) == 1) { \
181 asm volatile("lock decb %0\n" \
182 : "+m" (*(ptr)) \
183 : \
184 : "cc"); \
185 } else { \
186 void __unsupported_argument_size_for_pl_dec_noret__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100187 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
188 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
189 __unsupported_argument_size_for_pl_dec_noret__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200190 } \
191})
192
193/* add integer constant <x> to integer value pointed to by pointer <ptr>,
194 * no return. Size of <x> is not checked.
195 */
196#define pl_add(ptr, x) ({ \
197 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
198 asm volatile("lock addq %1, %0\n" \
199 : "+m" (*(ptr)) \
200 : "er" ((unsigned long)(x)) \
201 : "cc"); \
202 } else if (sizeof(*(ptr)) == 4) { \
203 asm volatile("lock addl %1, %0\n" \
204 : "+m" (*(ptr)) \
205 : "er" ((unsigned int)(x)) \
206 : "cc"); \
207 } else if (sizeof(*(ptr)) == 2) { \
208 asm volatile("lock addw %1, %0\n" \
209 : "+m" (*(ptr)) \
210 : "er" ((unsigned short)(x)) \
211 : "cc"); \
212 } else if (sizeof(*(ptr)) == 1) { \
213 asm volatile("lock addb %1, %0\n" \
214 : "+m" (*(ptr)) \
215 : "er" ((unsigned char)(x)) \
216 : "cc"); \
217 } else { \
218 void __unsupported_argument_size_for_pl_add__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100219 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
220 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
221 __unsupported_argument_size_for_pl_add__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200222 } \
223})
224
225/* subtract integer constant <x> from integer value pointed to by pointer
226 * <ptr>, no return. Size of <x> is not checked.
227 */
228#define pl_sub(ptr, x) ({ \
229 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
230 asm volatile("lock subq %1, %0\n" \
231 : "+m" (*(ptr)) \
232 : "er" ((unsigned long)(x)) \
233 : "cc"); \
234 } else if (sizeof(*(ptr)) == 4) { \
235 asm volatile("lock subl %1, %0\n" \
236 : "+m" (*(ptr)) \
237 : "er" ((unsigned int)(x)) \
238 : "cc"); \
239 } else if (sizeof(*(ptr)) == 2) { \
240 asm volatile("lock subw %1, %0\n" \
241 : "+m" (*(ptr)) \
242 : "er" ((unsigned short)(x)) \
243 : "cc"); \
244 } else if (sizeof(*(ptr)) == 1) { \
245 asm volatile("lock subb %1, %0\n" \
246 : "+m" (*(ptr)) \
247 : "er" ((unsigned char)(x)) \
248 : "cc"); \
249 } else { \
250 void __unsupported_argument_size_for_pl_sub__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100251 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
252 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
253 __unsupported_argument_size_for_pl_sub__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200254 } \
255})
256
257/* binary and integer value pointed to by pointer <ptr> with constant <x>, no
258 * return. Size of <x> is not checked.
259 */
260#define pl_and(ptr, x) ({ \
261 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
262 asm volatile("lock andq %1, %0\n" \
263 : "+m" (*(ptr)) \
264 : "er" ((unsigned long)(x)) \
265 : "cc"); \
266 } else if (sizeof(*(ptr)) == 4) { \
267 asm volatile("lock andl %1, %0\n" \
268 : "+m" (*(ptr)) \
269 : "er" ((unsigned int)(x)) \
270 : "cc"); \
271 } else if (sizeof(*(ptr)) == 2) { \
272 asm volatile("lock andw %1, %0\n" \
273 : "+m" (*(ptr)) \
274 : "er" ((unsigned short)(x)) \
275 : "cc"); \
276 } else if (sizeof(*(ptr)) == 1) { \
277 asm volatile("lock andb %1, %0\n" \
278 : "+m" (*(ptr)) \
279 : "er" ((unsigned char)(x)) \
280 : "cc"); \
281 } else { \
282 void __unsupported_argument_size_for_pl_and__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100283 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
284 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
285 __unsupported_argument_size_for_pl_and__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200286 } \
287})
288
289/* binary or integer value pointed to by pointer <ptr> with constant <x>, no
290 * return. Size of <x> is not checked.
291 */
292#define pl_or(ptr, x) ({ \
293 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
294 asm volatile("lock orq %1, %0\n" \
295 : "+m" (*(ptr)) \
296 : "er" ((unsigned long)(x)) \
297 : "cc"); \
298 } else if (sizeof(*(ptr)) == 4) { \
299 asm volatile("lock orl %1, %0\n" \
300 : "+m" (*(ptr)) \
301 : "er" ((unsigned int)(x)) \
302 : "cc"); \
303 } else if (sizeof(*(ptr)) == 2) { \
304 asm volatile("lock orw %1, %0\n" \
305 : "+m" (*(ptr)) \
306 : "er" ((unsigned short)(x)) \
307 : "cc"); \
308 } else if (sizeof(*(ptr)) == 1) { \
309 asm volatile("lock orb %1, %0\n" \
310 : "+m" (*(ptr)) \
311 : "er" ((unsigned char)(x)) \
312 : "cc"); \
313 } else { \
314 void __unsupported_argument_size_for_pl_or__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100315 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
316 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
317 __unsupported_argument_size_for_pl_or__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200318 } \
319})
320
321/* binary xor integer value pointed to by pointer <ptr> with constant <x>, no
322 * return. Size of <x> is not checked.
323 */
324#define pl_xor(ptr, x) ({ \
325 if (sizeof(long) == 8 && sizeof(*(ptr)) == 8) { \
326 asm volatile("lock xorq %1, %0\n" \
327 : "+m" (*(ptr)) \
328 : "er" ((unsigned long)(x)) \
329 : "cc"); \
330 } else if (sizeof(*(ptr)) == 4) { \
331 asm volatile("lock xorl %1, %0\n" \
332 : "+m" (*(ptr)) \
333 : "er" ((unsigned int)(x)) \
334 : "cc"); \
335 } else if (sizeof(*(ptr)) == 2) { \
336 asm volatile("lock xorw %1, %0\n" \
337 : "+m" (*(ptr)) \
338 : "er" ((unsigned short)(x)) \
339 : "cc"); \
340 } else if (sizeof(*(ptr)) == 1) { \
341 asm volatile("lock xorb %1, %0\n" \
342 : "+m" (*(ptr)) \
343 : "er" ((unsigned char)(x)) \
344 : "cc"); \
345 } else { \
346 void __unsupported_argument_size_for_pl_xor__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100347 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
348 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
349 __unsupported_argument_size_for_pl_xor__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200350 } \
351})
352
353/* test and set bit <bit> in integer value pointed to by pointer <ptr>. Returns
354 * 0 if the bit was not set, or ~0 of the same type as *ptr if it was set. Note
355 * that there is no 8-bit equivalent operation.
356 */
357#define pl_bts(ptr, bit) ( \
358 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
359 unsigned long ret; \
360 asm volatile("lock btsq %2, %0\n\t" \
361 "sbb %1, %1\n\t" \
362 : "+m" (*(ptr)), "=r" (ret) \
363 : "Ir" ((unsigned long)(bit)) \
364 : "cc"); \
365 ret; /* return value */ \
366 }) : (sizeof(*(ptr)) == 4) ? ({ \
367 unsigned int ret; \
368 asm volatile("lock btsl %2, %0\n\t" \
369 "sbb %1, %1\n\t" \
370 : "+m" (*(ptr)), "=r" (ret) \
371 : "Ir" ((unsigned int)(bit)) \
372 : "cc"); \
373 ret; /* return value */ \
374 }) : (sizeof(*(ptr)) == 2) ? ({ \
375 unsigned short ret; \
376 asm volatile("lock btsw %2, %0\n\t" \
377 "sbb %1, %1\n\t" \
378 : "+m" (*(ptr)), "=r" (ret) \
379 : "Ir" ((unsigned short)(bit)) \
380 : "cc"); \
381 ret; /* return value */ \
382 }) : ({ \
383 void __unsupported_argument_size_for_pl_bts__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100384 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
385 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
386 __unsupported_argument_size_for_pl_bts__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200387 0; \
388 }) \
389)
390
391/* Note: for an unclear reason, gcc's __sync_fetch_and_add() implementation
392 * produces less optimal than hand-crafted asm code so let's implement here the
393 * operations we need for the most common archs.
394 */
395
396/* fetch-and-add: fetch integer value pointed to by pointer <ptr>, add <x> to
397 * to <*ptr> and return the previous value.
398 */
399#define pl_xadd(ptr, x) ( \
400 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
401 unsigned long ret = (unsigned long)(x); \
402 asm volatile("lock xaddq %0, %1\n" \
403 : "=r" (ret), "+m" (*(ptr)) \
404 : "0" (ret) \
405 : "cc"); \
406 ret; /* return value */ \
407 }) : (sizeof(*(ptr)) == 4) ? ({ \
408 unsigned int ret = (unsigned int)(x); \
409 asm volatile("lock xaddl %0, %1\n" \
410 : "=r" (ret), "+m" (*(ptr)) \
411 : "0" (ret) \
412 : "cc"); \
413 ret; /* return value */ \
414 }) : (sizeof(*(ptr)) == 2) ? ({ \
415 unsigned short ret = (unsigned short)(x); \
416 asm volatile("lock xaddw %0, %1\n" \
417 : "=r" (ret), "+m" (*(ptr)) \
418 : "0" (ret) \
419 : "cc"); \
420 ret; /* return value */ \
421 }) : (sizeof(*(ptr)) == 1) ? ({ \
422 unsigned char ret = (unsigned char)(x); \
423 asm volatile("lock xaddb %0, %1\n" \
424 : "=r" (ret), "+m" (*(ptr)) \
425 : "0" (ret) \
426 : "cc"); \
427 ret; /* return value */ \
428 }) : ({ \
429 void __unsupported_argument_size_for_pl_xadd__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100430 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
431 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
432 __unsupported_argument_size_for_pl_xadd__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200433 0; \
434 }) \
435)
436
Ilya Shipitsin47d17182020-06-21 21:42:57 +0500437/* exchange value <x> with integer value pointed to by pointer <ptr>, and return
Emeric Brun7122ab32017-07-07 10:26:46 +0200438 * previous <*ptr> value. <x> must be of the same size as <*ptr>.
439 */
440#define pl_xchg(ptr, x) ( \
441 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
442 unsigned long ret = (unsigned long)(x); \
443 asm volatile("xchgq %0, %1\n" \
444 : "=r" (ret), "+m" (*(ptr)) \
445 : "0" (ret) \
446 : "cc"); \
447 ret; /* return value */ \
448 }) : (sizeof(*(ptr)) == 4) ? ({ \
449 unsigned int ret = (unsigned int)(x); \
450 asm volatile("xchgl %0, %1\n" \
451 : "=r" (ret), "+m" (*(ptr)) \
452 : "0" (ret) \
453 : "cc"); \
454 ret; /* return value */ \
455 }) : (sizeof(*(ptr)) == 2) ? ({ \
456 unsigned short ret = (unsigned short)(x); \
457 asm volatile("xchgw %0, %1\n" \
458 : "=r" (ret), "+m" (*(ptr)) \
459 : "0" (ret) \
460 : "cc"); \
461 ret; /* return value */ \
462 }) : (sizeof(*(ptr)) == 1) ? ({ \
463 unsigned char ret = (unsigned char)(x); \
464 asm volatile("xchgb %0, %1\n" \
465 : "=r" (ret), "+m" (*(ptr)) \
466 : "0" (ret) \
467 : "cc"); \
468 ret; /* return value */ \
469 }) : ({ \
470 void __unsupported_argument_size_for_pl_xchg__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100471 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
472 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
473 __unsupported_argument_size_for_pl_xchg__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200474 0; \
475 }) \
476)
477
478/* compare integer value <*ptr> with <old> and exchange it with <new> if
479 * it matches, and return <old>. <old> and <new> must be of the same size as
480 * <*ptr>.
481 */
482#define pl_cmpxchg(ptr, old, new) ( \
483 (sizeof(long) == 8 && sizeof(*(ptr)) == 8) ? ({ \
484 unsigned long ret; \
485 asm volatile("lock cmpxchgq %2,%1" \
486 : "=a" (ret), "+m" (*(ptr)) \
487 : "r" ((unsigned long)(new)), \
488 "0" ((unsigned long)(old)) \
489 : "cc"); \
490 ret; /* return value */ \
491 }) : (sizeof(*(ptr)) == 4) ? ({ \
492 unsigned int ret; \
493 asm volatile("lock cmpxchgl %2,%1" \
494 : "=a" (ret), "+m" (*(ptr)) \
495 : "r" ((unsigned int)(new)), \
496 "0" ((unsigned int)(old)) \
497 : "cc"); \
498 ret; /* return value */ \
499 }) : (sizeof(*(ptr)) == 2) ? ({ \
500 unsigned short ret; \
501 asm volatile("lock cmpxchgw %2,%1" \
502 : "=a" (ret), "+m" (*(ptr)) \
503 : "r" ((unsigned short)(new)), \
504 "0" ((unsigned short)(old)) \
505 : "cc"); \
506 ret; /* return value */ \
507 }) : (sizeof(*(ptr)) == 1) ? ({ \
508 unsigned char ret; \
509 asm volatile("lock cmpxchgb %2,%1" \
510 : "=a" (ret), "+m" (*(ptr)) \
511 : "r" ((unsigned char)(new)), \
512 "0" ((unsigned char)(old)) \
513 : "cc"); \
514 ret; /* return value */ \
515 }) : ({ \
516 void __unsupported_argument_size_for_pl_cmpxchg__(char *,int); \
Willy Tarreau2532bd22017-11-20 19:25:18 +0100517 if (sizeof(*(ptr)) != 1 && sizeof(*(ptr)) != 2 && \
518 sizeof(*(ptr)) != 4 && (sizeof(long) != 8 || sizeof(*(ptr)) != 8)) \
519 __unsupported_argument_size_for_pl_cmpxchg__(__FILE__,__LINE__); \
Emeric Brun7122ab32017-07-07 10:26:46 +0200520 0; \
521 }) \
522)
523
524#else
525/* generic implementations */
526
Your Name1e237d02020-11-28 15:37:14 +0000527#if defined(__aarch64__)
528
529/* This was shown to improve fairness on modern ARMv8 such as Neoverse N1 */
530#define pl_cpu_relax() do { \
531 asm volatile("isb" ::: "memory"); \
532 } while (0)
533
534#else
535
Willy Tarreaub5f27152017-11-20 20:55:06 +0100536#define pl_cpu_relax() do { \
537 asm volatile(""); \
538 } while (0)
Emeric Brun7122ab32017-07-07 10:26:46 +0200539
Your Name1e237d02020-11-28 15:37:14 +0000540#endif
541
Willy Tarreau01b83982017-07-18 15:01:39 +0200542/* full memory barrier */
Willy Tarreaub5f27152017-11-20 20:55:06 +0100543#define pl_mb() do { \
544 __sync_synchronize(); \
545 } while (0)
Willy Tarreau01b83982017-07-18 15:01:39 +0200546
Emeric Brun7122ab32017-07-07 10:26:46 +0200547#define pl_inc_noret(ptr) ({ __sync_add_and_fetch((ptr), 1); })
548#define pl_dec_noret(ptr) ({ __sync_sub_and_fetch((ptr), 1); })
549#define pl_inc(ptr) ({ __sync_add_and_fetch((ptr), 1); })
550#define pl_dec(ptr) ({ __sync_sub_and_fetch((ptr), 1); })
551#define pl_add(ptr, x) ({ __sync_add_and_fetch((ptr), (x)); })
552#define pl_and(ptr, x) ({ __sync_and_and_fetch((ptr), (x)); })
553#define pl_or(ptr, x) ({ __sync_or_and_fetch((ptr), (x)); })
554#define pl_xor(ptr, x) ({ __sync_xor_and_fetch((ptr), (x)); })
555#define pl_sub(ptr, x) ({ __sync_sub_and_fetch((ptr), (x)); })
Willy Tarreaud0d8ba52017-07-18 15:05:58 +0200556#define pl_bts(ptr, bit) ({ typeof(*(ptr)) __pl_t = (1u << (bit)); \
557 __sync_fetch_and_or((ptr), __pl_t) & __pl_t; \
558 })
Emeric Brun7122ab32017-07-07 10:26:46 +0200559#define pl_xadd(ptr, x) ({ __sync_fetch_and_add((ptr), (x)); })
560#define pl_cmpxchg(ptr, o, n) ({ __sync_val_compare_and_swap((ptr), (o), (n)); })
Willy Tarreau98409e32017-07-18 14:20:41 +0200561#define pl_xchg(ptr, x) ({ typeof(*(ptr)) __pl_t; \
562 do { __pl_t = *(ptr); \
563 } while (!__sync_bool_compare_and_swap((ptr), __pl_t, (x))); \
564 __pl_t; \
Emeric Brun7122ab32017-07-07 10:26:46 +0200565 })
566
567#endif
568
569#endif /* PL_ATOMIC_OPS_H */