1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
25 #include <urcu/compiler.h>
26 #include <urcu/system.h>
33 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
37 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
40 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
41 static inline __attribute__((always_inline
, __noreturn__
))
42 void _uatomic_link_error(void)
46 * generate an illegal instruction. Cannot catch this with
47 * linker tricks when optimizations are disabled.
49 __asm__
__volatile__(ILLEGAL_INSTR
);
55 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
56 extern void _uatomic_link_error(void);
57 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
61 #ifndef uatomic_cmpxchg
62 static inline __attribute__((always_inline
))
63 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
64 unsigned long _new
, int len
)
67 #ifdef UATOMIC_HAS_ATOMIC_BYTE
69 return __sync_val_compare_and_swap_1((uint8_t *) addr
, old
,
72 #ifdef UATOMIC_HAS_ATOMIC_SHORT
74 return __sync_val_compare_and_swap_2((uint16_t *) addr
, old
,
78 return __sync_val_compare_and_swap_4((uint32_t *) addr
, old
,
80 #if (CAA_BITS_PER_LONG == 64)
82 return __sync_val_compare_and_swap_8((uint64_t *) addr
, old
,
86 _uatomic_link_error();
91 #define uatomic_cmpxchg(addr, old, _new) \
92 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
93 caa_cast_long_keep_sign(old), \
94 caa_cast_long_keep_sign(_new),\
101 static inline __attribute__((always_inline
))
102 void _uatomic_and(void *addr
, unsigned long val
,
106 #ifdef UATOMIC_HAS_ATOMIC_BYTE
108 __sync_and_and_fetch_1((uint8_t *) addr
, val
);
111 #ifdef UATOMIC_HAS_ATOMIC_SHORT
113 __sync_and_and_fetch_2((uint16_t *) addr
, val
);
117 __sync_and_and_fetch_4((uint32_t *) addr
, val
);
119 #if (CAA_BITS_PER_LONG == 64)
121 __sync_and_and_fetch_8((uint64_t *) addr
, val
);
125 _uatomic_link_error();
128 #define uatomic_and(addr, v) \
129 (_uatomic_and((addr), \
130 caa_cast_long_keep_sign(v), \
132 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
133 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
140 static inline __attribute__((always_inline
))
141 void _uatomic_or(void *addr
, unsigned long val
,
145 #ifdef UATOMIC_HAS_ATOMIC_BYTE
147 __sync_or_and_fetch_1((uint8_t *) addr
, val
);
150 #ifdef UATOMIC_HAS_ATOMIC_SHORT
152 __sync_or_and_fetch_2((uint16_t *) addr
, val
);
156 __sync_or_and_fetch_4((uint32_t *) addr
, val
);
158 #if (CAA_BITS_PER_LONG == 64)
160 __sync_or_and_fetch_8((uint64_t *) addr
, val
);
164 _uatomic_link_error();
168 #define uatomic_or(addr, v) \
169 (_uatomic_or((addr), \
170 caa_cast_long_keep_sign(v), \
172 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
173 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
178 /* uatomic_add_return */
180 #ifndef uatomic_add_return
181 static inline __attribute__((always_inline
))
182 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
186 #ifdef UATOMIC_HAS_ATOMIC_BYTE
188 return __sync_add_and_fetch_1((uint8_t *) addr
, val
);
190 #ifdef UATOMIC_HAS_ATOMIC_SHORT
192 return __sync_add_and_fetch_2((uint16_t *) addr
, val
);
195 return __sync_add_and_fetch_4((uint32_t *) addr
, val
);
196 #if (CAA_BITS_PER_LONG == 64)
198 return __sync_add_and_fetch_8((uint64_t *) addr
, val
);
201 _uatomic_link_error();
206 #define uatomic_add_return(addr, v) \
207 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
208 caa_cast_long_keep_sign(v), \
210 #endif /* #ifndef uatomic_add_return */
215 static inline __attribute__((always_inline
))
216 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
219 #ifdef UATOMIC_HAS_ATOMIC_BYTE
225 old
= uatomic_read((uint8_t *) addr
);
226 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr
,
232 #ifdef UATOMIC_HAS_ATOMIC_SHORT
238 old
= uatomic_read((uint16_t *) addr
);
239 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr
,
250 old
= uatomic_read((uint32_t *) addr
);
251 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr
,
256 #if (CAA_BITS_PER_LONG == 64)
262 old
= uatomic_read((uint64_t *) addr
);
263 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr
,
270 _uatomic_link_error();
274 #define uatomic_xchg(addr, v) \
275 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
276 caa_cast_long_keep_sign(v), \
278 #endif /* #ifndef uatomic_xchg */
280 #else /* #ifndef uatomic_cmpxchg */
285 static inline __attribute__((always_inline
))
286 void _uatomic_and(void *addr
, unsigned long val
, int len
)
289 #ifdef UATOMIC_HAS_ATOMIC_BYTE
294 oldt
= uatomic_read((uint8_t *) addr
);
297 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
298 } while (oldt
!= old
);
303 #ifdef UATOMIC_HAS_ATOMIC_SHORT
308 oldt
= uatomic_read((uint16_t *) addr
);
311 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
312 } while (oldt
!= old
);
319 oldt
= uatomic_read((uint32_t *) addr
);
322 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
323 } while (oldt
!= old
);
327 #if (CAA_BITS_PER_LONG == 64)
332 oldt
= uatomic_read((uint64_t *) addr
);
335 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
336 } while (oldt
!= old
);
342 _uatomic_link_error();
345 #define uatomic_and(addr, v) \
346 (_uatomic_and((addr), \
347 caa_cast_long_keep_sign(v), \
349 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
350 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
352 #endif /* #ifndef uatomic_and */
357 static inline __attribute__((always_inline
))
358 void _uatomic_or(void *addr
, unsigned long val
, int len
)
361 #ifdef UATOMIC_HAS_ATOMIC_BYTE
366 oldt
= uatomic_read((uint8_t *) addr
);
369 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
370 } while (oldt
!= old
);
375 #ifdef UATOMIC_HAS_ATOMIC_SHORT
380 oldt
= uatomic_read((uint16_t *) addr
);
383 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
384 } while (oldt
!= old
);
393 oldt
= uatomic_read((uint32_t *) addr
);
396 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
397 } while (oldt
!= old
);
401 #if (CAA_BITS_PER_LONG == 64)
406 oldt
= uatomic_read((uint64_t *) addr
);
409 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
410 } while (oldt
!= old
);
416 _uatomic_link_error();
419 #define uatomic_or(addr, v) \
420 (_uatomic_or((addr), \
421 caa_cast_long_keep_sign(v), \
423 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
424 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
426 #endif /* #ifndef uatomic_or */
428 #ifndef uatomic_add_return
429 /* uatomic_add_return */
431 static inline __attribute__((always_inline
))
432 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
435 #ifdef UATOMIC_HAS_ATOMIC_BYTE
440 oldt
= uatomic_read((uint8_t *) addr
);
443 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
445 } while (oldt
!= old
);
450 #ifdef UATOMIC_HAS_ATOMIC_SHORT
455 oldt
= uatomic_read((uint16_t *) addr
);
458 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
460 } while (oldt
!= old
);
469 oldt
= uatomic_read((uint32_t *) addr
);
472 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
474 } while (oldt
!= old
);
478 #if (CAA_BITS_PER_LONG == 64)
483 oldt
= uatomic_read((uint64_t *) addr
);
486 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
488 } while (oldt
!= old
);
494 _uatomic_link_error();
498 #define uatomic_add_return(addr, v) \
499 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
500 caa_cast_long_keep_sign(v), \
502 #endif /* #ifndef uatomic_add_return */
507 static inline __attribute__((always_inline
))
508 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
511 #ifdef UATOMIC_HAS_ATOMIC_BYTE
516 oldt
= uatomic_read((uint8_t *) addr
);
519 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
521 } while (oldt
!= old
);
526 #ifdef UATOMIC_HAS_ATOMIC_SHORT
531 oldt
= uatomic_read((uint16_t *) addr
);
534 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
536 } while (oldt
!= old
);
545 oldt
= uatomic_read((uint32_t *) addr
);
548 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
550 } while (oldt
!= old
);
554 #if (CAA_BITS_PER_LONG == 64)
559 oldt
= uatomic_read((uint64_t *) addr
);
562 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
564 } while (oldt
!= old
);
570 _uatomic_link_error();
574 #define uatomic_xchg(addr, v) \
575 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
576 caa_cast_long_keep_sign(v), \
578 #endif /* #ifndef uatomic_xchg */
580 #endif /* #else #ifndef uatomic_cmpxchg */
582 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
585 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
586 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
587 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
590 #define uatomic_sub_return(addr, v) \
591 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
592 #define uatomic_sub(addr, v) \
593 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
594 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
595 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
598 #define uatomic_inc(addr) uatomic_add((addr), 1)
599 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
600 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
604 #define uatomic_dec(addr) uatomic_add((addr), -1)
605 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
606 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
613 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.042842 seconds and 5 git commands to generate.