Public headers: use SPDX identifiers
[urcu.git] / include / urcu / uatomic / generic.h
CommitLineData
d3d3857f
MJ
1// SPDX-FileCopyrightText: 1991-1994 by Xerox Corporation. All rights reserved.
2// SPDX-FileCopyrightText: 1996-1999 by Silicon Graphics. All rights reserved.
3// SPDX-FileCopyrightText: 1999-2004 Hewlett-Packard Development Company, L.P.
4// SPDX-FileCopyrightText: 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5// SPDX-FileCopyrightText: 2010 Paolo Bonzini
6//
7// SPDX-License-Identifier: LicenseRef-Boehm-GC
8
8760d94e
PB
9#ifndef _URCU_UATOMIC_GENERIC_H
10#define _URCU_UATOMIC_GENERIC_H
11
12/*
8760d94e
PB
13 * Code inspired from libuatomic_ops-1.2, inherited in part from the
14 * Boehm-Demers-Weiser conservative garbage collector.
15 */
16
2917c006 17#include <stdint.h>
8760d94e
PB
18#include <urcu/compiler.h>
19#include <urcu/system.h>
20
21#ifdef __cplusplus
22extern "C" {
23#endif
24
8760d94e 25#ifndef uatomic_set
3daae22a 26#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
8760d94e
PB
27#endif
28
29#ifndef uatomic_read
6cf3827c 30#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
31#endif
32
33#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
106ed137
MJ
34#ifdef ILLEGAL_INSTR
35static inline __attribute__((always_inline))
5c28497b 36void _uatomic_link_error(void)
8760d94e 37{
d0bbd9c2
MD
38 /*
39 * generate an illegal instruction. Cannot catch this with
40 * linker tricks when optimizations are disabled.
41 */
8760d94e 42 __asm__ __volatile__(ILLEGAL_INSTR);
106ed137 43}
8760d94e 44#else
106ed137
MJ
45static inline __attribute__((always_inline, __noreturn__))
46void _uatomic_link_error(void)
47{
5c28497b 48 __builtin_trap();
8760d94e 49}
106ed137 50#endif
8760d94e
PB
51
52#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
5c28497b 53extern void _uatomic_link_error(void);
8760d94e
PB
54#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56/* cmpxchg */
57
58#ifndef uatomic_cmpxchg
59static inline __attribute__((always_inline))
60unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62{
63 switch (len) {
f469d839
PB
64#ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
2917c006
KR
66 return __sync_val_compare_and_swap_1((uint8_t *) addr, old,
67 _new);
f469d839
PB
68#endif
69#ifdef UATOMIC_HAS_ATOMIC_SHORT
70 case 2:
2917c006
KR
71 return __sync_val_compare_and_swap_2((uint16_t *) addr, old,
72 _new);
f469d839 73#endif
8760d94e 74 case 4:
2917c006
KR
75 return __sync_val_compare_and_swap_4((uint32_t *) addr, old,
76 _new);
b39e1761 77#if (CAA_BITS_PER_LONG == 64)
8760d94e 78 case 8:
2917c006
KR
79 return __sync_val_compare_and_swap_8((uint64_t *) addr, old,
80 _new);
8760d94e
PB
81#endif
82 }
83 _uatomic_link_error();
84 return 0;
85}
86
87
e56d99bf
MD
88#define uatomic_cmpxchg(addr, old, _new) \
89 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
90 caa_cast_long_keep_sign(old), \
91 caa_cast_long_keep_sign(_new),\
8760d94e
PB
92 sizeof(*(addr))))
93
94
bf33aaea
PB
95/* uatomic_and */
96
97#ifndef uatomic_and
98static inline __attribute__((always_inline))
99void _uatomic_and(void *addr, unsigned long val,
100 int len)
101{
102 switch (len) {
103#ifdef UATOMIC_HAS_ATOMIC_BYTE
104 case 1:
2917c006 105 __sync_and_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 106 return;
bf33aaea
PB
107#endif
108#ifdef UATOMIC_HAS_ATOMIC_SHORT
109 case 2:
2917c006 110 __sync_and_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 111 return;
bf33aaea
PB
112#endif
113 case 4:
2917c006 114 __sync_and_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 115 return;
bf33aaea
PB
116#if (CAA_BITS_PER_LONG == 64)
117 case 8:
2917c006 118 __sync_and_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 119 return;
bf33aaea
PB
120#endif
121 }
122 _uatomic_link_error();
bf33aaea
PB
123}
124
125#define uatomic_and(addr, v) \
126 (_uatomic_and((addr), \
e56d99bf
MD
127 caa_cast_long_keep_sign(v), \
128 sizeof(*(addr))))
42e83919
MD
129#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
130#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 131
bf33aaea
PB
132#endif
133
985b35b1
PB
134/* uatomic_or */
135
136#ifndef uatomic_or
137static inline __attribute__((always_inline))
138void _uatomic_or(void *addr, unsigned long val,
139 int len)
140{
141 switch (len) {
142#ifdef UATOMIC_HAS_ATOMIC_BYTE
143 case 1:
2917c006 144 __sync_or_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 145 return;
985b35b1
PB
146#endif
147#ifdef UATOMIC_HAS_ATOMIC_SHORT
148 case 2:
2917c006 149 __sync_or_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 150 return;
985b35b1
PB
151#endif
152 case 4:
2917c006 153 __sync_or_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 154 return;
985b35b1
PB
155#if (CAA_BITS_PER_LONG == 64)
156 case 8:
2917c006 157 __sync_or_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 158 return;
985b35b1
PB
159#endif
160 }
161 _uatomic_link_error();
c51d5c6e 162 return;
985b35b1
PB
163}
164
165#define uatomic_or(addr, v) \
166 (_uatomic_or((addr), \
e56d99bf
MD
167 caa_cast_long_keep_sign(v), \
168 sizeof(*(addr))))
42e83919
MD
169#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
170#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 171
985b35b1
PB
172#endif
173
2812a2d2 174
8760d94e
PB
175/* uatomic_add_return */
176
177#ifndef uatomic_add_return
178static inline __attribute__((always_inline))
179unsigned long _uatomic_add_return(void *addr, unsigned long val,
180 int len)
181{
182 switch (len) {
f469d839
PB
183#ifdef UATOMIC_HAS_ATOMIC_BYTE
184 case 1:
2917c006 185 return __sync_add_and_fetch_1((uint8_t *) addr, val);
f469d839
PB
186#endif
187#ifdef UATOMIC_HAS_ATOMIC_SHORT
188 case 2:
2917c006 189 return __sync_add_and_fetch_2((uint16_t *) addr, val);
f469d839 190#endif
8760d94e 191 case 4:
2917c006 192 return __sync_add_and_fetch_4((uint32_t *) addr, val);
b39e1761 193#if (CAA_BITS_PER_LONG == 64)
8760d94e 194 case 8:
2917c006 195 return __sync_add_and_fetch_8((uint64_t *) addr, val);
8760d94e
PB
196#endif
197 }
198 _uatomic_link_error();
199 return 0;
200}
201
202
e56d99bf
MD
203#define uatomic_add_return(addr, v) \
204 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
205 caa_cast_long_keep_sign(v), \
206 sizeof(*(addr))))
8760d94e
PB
207#endif /* #ifndef uatomic_add_return */
208
209#ifndef uatomic_xchg
210/* xchg */
211
212static inline __attribute__((always_inline))
213unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
214{
215 switch (len) {
f469d839
PB
216#ifdef UATOMIC_HAS_ATOMIC_BYTE
217 case 1:
218 {
2917c006 219 uint8_t old;
f469d839
PB
220
221 do {
2917c006
KR
222 old = uatomic_read((uint8_t *) addr);
223 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr,
224 old, val));
f469d839
PB
225
226 return old;
227 }
228#endif
229#ifdef UATOMIC_HAS_ATOMIC_SHORT
230 case 2:
231 {
2917c006 232 uint16_t old;
f469d839
PB
233
234 do {
2917c006
KR
235 old = uatomic_read((uint16_t *) addr);
236 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr,
237 old, val));
f469d839
PB
238
239 return old;
240 }
241#endif
8760d94e
PB
242 case 4:
243 {
2917c006 244 uint32_t old;
8760d94e
PB
245
246 do {
2917c006
KR
247 old = uatomic_read((uint32_t *) addr);
248 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr,
249 old, val));
8760d94e 250
2f2908d0 251 return old;
8760d94e 252 }
b39e1761 253#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
254 case 8:
255 {
2917c006 256 uint64_t old;
8760d94e
PB
257
258 do {
2917c006
KR
259 old = uatomic_read((uint64_t *) addr);
260 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr,
261 old, val));
8760d94e
PB
262
263 return old;
264 }
265#endif
266 }
267 _uatomic_link_error();
268 return 0;
269}
270
271#define uatomic_xchg(addr, v) \
e56d99bf
MD
272 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
273 caa_cast_long_keep_sign(v), \
8760d94e
PB
274 sizeof(*(addr))))
275#endif /* #ifndef uatomic_xchg */
276
277#else /* #ifndef uatomic_cmpxchg */
278
bf33aaea
PB
279#ifndef uatomic_and
280/* uatomic_and */
281
282static inline __attribute__((always_inline))
283void _uatomic_and(void *addr, unsigned long val, int len)
284{
285 switch (len) {
286#ifdef UATOMIC_HAS_ATOMIC_BYTE
287 case 1:
288 {
2917c006 289 uint8_t old, oldt;
bf33aaea 290
2917c006 291 oldt = uatomic_read((uint8_t *) addr);
bf33aaea
PB
292 do {
293 old = oldt;
294 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
295 } while (oldt != old);
e6e5957d
MD
296
297 return;
bf33aaea
PB
298 }
299#endif
300#ifdef UATOMIC_HAS_ATOMIC_SHORT
301 case 2:
302 {
2917c006 303 uint16_t old, oldt;
bf33aaea 304
2917c006 305 oldt = uatomic_read((uint16_t *) addr);
bf33aaea
PB
306 do {
307 old = oldt;
308 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
309 } while (oldt != old);
310 }
311#endif
312 case 4:
313 {
2917c006 314 uint32_t old, oldt;
bf33aaea 315
2917c006 316 oldt = uatomic_read((uint32_t *) addr);
bf33aaea
PB
317 do {
318 old = oldt;
319 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
320 } while (oldt != old);
e6e5957d
MD
321
322 return;
bf33aaea
PB
323 }
324#if (CAA_BITS_PER_LONG == 64)
325 case 8:
326 {
2917c006 327 uint64_t old, oldt;
bf33aaea 328
2917c006 329 oldt = uatomic_read((uint64_t *) addr);
bf33aaea
PB
330 do {
331 old = oldt;
332 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
333 } while (oldt != old);
e6e5957d
MD
334
335 return;
bf33aaea
PB
336 }
337#endif
338 }
339 _uatomic_link_error();
bf33aaea
PB
340}
341
e56d99bf
MD
342#define uatomic_and(addr, v) \
343 (_uatomic_and((addr), \
344 caa_cast_long_keep_sign(v), \
345 sizeof(*(addr))))
42e83919
MD
346#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
347#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 348
bf33aaea
PB
349#endif /* #ifndef uatomic_and */
350
985b35b1
PB
351#ifndef uatomic_or
352/* uatomic_or */
353
354static inline __attribute__((always_inline))
355void _uatomic_or(void *addr, unsigned long val, int len)
356{
357 switch (len) {
358#ifdef UATOMIC_HAS_ATOMIC_BYTE
359 case 1:
360 {
2917c006 361 uint8_t old, oldt;
985b35b1 362
2917c006 363 oldt = uatomic_read((uint8_t *) addr);
985b35b1
PB
364 do {
365 old = oldt;
366 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
367 } while (oldt != old);
e6e5957d
MD
368
369 return;
985b35b1
PB
370 }
371#endif
372#ifdef UATOMIC_HAS_ATOMIC_SHORT
373 case 2:
374 {
2917c006 375 uint16_t old, oldt;
985b35b1 376
2917c006 377 oldt = uatomic_read((uint16_t *) addr);
985b35b1
PB
378 do {
379 old = oldt;
380 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
381 } while (oldt != old);
e6e5957d
MD
382
383 return;
985b35b1
PB
384 }
385#endif
386 case 4:
387 {
2917c006 388 uint32_t old, oldt;
985b35b1 389
2917c006 390 oldt = uatomic_read((uint32_t *) addr);
985b35b1
PB
391 do {
392 old = oldt;
393 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
394 } while (oldt != old);
e6e5957d
MD
395
396 return;
985b35b1
PB
397 }
398#if (CAA_BITS_PER_LONG == 64)
399 case 8:
400 {
2917c006 401 uint64_t old, oldt;
985b35b1 402
2917c006 403 oldt = uatomic_read((uint64_t *) addr);
985b35b1
PB
404 do {
405 old = oldt;
406 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
407 } while (oldt != old);
e6e5957d
MD
408
409 return;
985b35b1
PB
410 }
411#endif
412 }
413 _uatomic_link_error();
985b35b1
PB
414}
415
e56d99bf
MD
416#define uatomic_or(addr, v) \
417 (_uatomic_or((addr), \
418 caa_cast_long_keep_sign(v), \
419 sizeof(*(addr))))
42e83919
MD
420#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
421#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 422
985b35b1
PB
423#endif /* #ifndef uatomic_or */
424
8760d94e
PB
425#ifndef uatomic_add_return
426/* uatomic_add_return */
427
428static inline __attribute__((always_inline))
429unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
430{
431 switch (len) {
f469d839
PB
432#ifdef UATOMIC_HAS_ATOMIC_BYTE
433 case 1:
434 {
2917c006 435 uint8_t old, oldt;
f469d839 436
2917c006 437 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
438 do {
439 old = oldt;
2917c006 440 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 441 old, old + val);
f469d839
PB
442 } while (oldt != old);
443
444 return old + val;
445 }
446#endif
447#ifdef UATOMIC_HAS_ATOMIC_SHORT
448 case 2:
449 {
2917c006 450 uint16_t old, oldt;
f469d839 451
2917c006 452 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
453 do {
454 old = oldt;
2917c006 455 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 456 old, old + val);
f469d839
PB
457 } while (oldt != old);
458
459 return old + val;
460 }
461#endif
8760d94e
PB
462 case 4:
463 {
2917c006 464 uint32_t old, oldt;
8760d94e 465
2917c006 466 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
467 do {
468 old = oldt;
2917c006 469 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 470 old, old + val);
8760d94e
PB
471 } while (oldt != old);
472
473 return old + val;
474 }
b39e1761 475#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
476 case 8:
477 {
2917c006 478 uint64_t old, oldt;
8760d94e 479
2917c006 480 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
481 do {
482 old = oldt;
2917c006 483 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 484 old, old + val);
8760d94e
PB
485 } while (oldt != old);
486
487 return old + val;
488 }
489#endif
490 }
491 _uatomic_link_error();
492 return 0;
493}
494
e56d99bf
MD
495#define uatomic_add_return(addr, v) \
496 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
497 caa_cast_long_keep_sign(v), \
498 sizeof(*(addr))))
8760d94e
PB
499#endif /* #ifndef uatomic_add_return */
500
501#ifndef uatomic_xchg
502/* xchg */
503
504static inline __attribute__((always_inline))
505unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
506{
507 switch (len) {
f469d839
PB
508#ifdef UATOMIC_HAS_ATOMIC_BYTE
509 case 1:
510 {
2917c006 511 uint8_t old, oldt;
f469d839 512
2917c006 513 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
514 do {
515 old = oldt;
2917c006 516 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 517 old, val);
f469d839
PB
518 } while (oldt != old);
519
520 return old;
521 }
522#endif
523#ifdef UATOMIC_HAS_ATOMIC_SHORT
524 case 2:
525 {
2917c006 526 uint16_t old, oldt;
f469d839 527
2917c006 528 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
529 do {
530 old = oldt;
2917c006 531 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 532 old, val);
f469d839
PB
533 } while (oldt != old);
534
535 return old;
536 }
537#endif
8760d94e
PB
538 case 4:
539 {
2917c006 540 uint32_t old, oldt;
8760d94e 541
2917c006 542 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
543 do {
544 old = oldt;
2917c006 545 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 546 old, val);
8760d94e
PB
547 } while (oldt != old);
548
549 return old;
550 }
b39e1761 551#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
552 case 8:
553 {
2917c006 554 uint64_t old, oldt;
8760d94e 555
2917c006 556 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
557 do {
558 old = oldt;
2917c006 559 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 560 old, val);
8760d94e
PB
561 } while (oldt != old);
562
563 return old;
564 }
565#endif
566 }
567 _uatomic_link_error();
568 return 0;
569}
570
571#define uatomic_xchg(addr, v) \
e56d99bf
MD
572 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
573 caa_cast_long_keep_sign(v), \
8760d94e
PB
574 sizeof(*(addr))))
575#endif /* #ifndef uatomic_xchg */
576
577#endif /* #else #ifndef uatomic_cmpxchg */
578
579/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
580
581#ifndef uatomic_add
582#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
42e83919
MD
583#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
584#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
8760d94e
PB
585#endif
586
e56d99bf
MD
587#define uatomic_sub_return(addr, v) \
588 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
589#define uatomic_sub(addr, v) \
590 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
42e83919
MD
591#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
592#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
593
594#ifndef uatomic_inc
595#define uatomic_inc(addr) uatomic_add((addr), 1)
42e83919
MD
596#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
597#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
598#endif
599
600#ifndef uatomic_dec
601#define uatomic_dec(addr) uatomic_add((addr), -1)
42e83919
MD
602#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
603#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
604#endif
605
606#ifdef __cplusplus
607}
608#endif
609
610#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.085875 seconds and 4 git commands to generate.