fix: warning 'noreturn' function does return on ppc
[urcu.git] / include / urcu / uatomic / generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <stdint.h>
25 #include <urcu/compiler.h>
26 #include <urcu/system.h>
27
28 #ifdef __cplusplus
29 extern "C" {
30 #endif
31
32 #ifndef uatomic_set
33 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
34 #endif
35
36 #ifndef uatomic_read
37 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
38 #endif
39
40 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
41 #ifdef ILLEGAL_INSTR
42 static inline __attribute__((always_inline))
43 void _uatomic_link_error(void)
44 {
45 /*
46 * generate an illegal instruction. Cannot catch this with
47 * linker tricks when optimizations are disabled.
48 */
49 __asm__ __volatile__(ILLEGAL_INSTR);
50 }
51 #else
52 static inline __attribute__((always_inline, __noreturn__))
53 void _uatomic_link_error(void)
54 {
55 __builtin_trap();
56 }
57 #endif
58
59 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
60 extern void _uatomic_link_error(void);
61 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
62
63 /* cmpxchg */
64
65 #ifndef uatomic_cmpxchg
66 static inline __attribute__((always_inline))
67 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
68 unsigned long _new, int len)
69 {
70 switch (len) {
71 #ifdef UATOMIC_HAS_ATOMIC_BYTE
72 case 1:
73 return __sync_val_compare_and_swap_1((uint8_t *) addr, old,
74 _new);
75 #endif
76 #ifdef UATOMIC_HAS_ATOMIC_SHORT
77 case 2:
78 return __sync_val_compare_and_swap_2((uint16_t *) addr, old,
79 _new);
80 #endif
81 case 4:
82 return __sync_val_compare_and_swap_4((uint32_t *) addr, old,
83 _new);
84 #if (CAA_BITS_PER_LONG == 64)
85 case 8:
86 return __sync_val_compare_and_swap_8((uint64_t *) addr, old,
87 _new);
88 #endif
89 }
90 _uatomic_link_error();
91 return 0;
92 }
93
94
95 #define uatomic_cmpxchg(addr, old, _new) \
96 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
97 caa_cast_long_keep_sign(old), \
98 caa_cast_long_keep_sign(_new),\
99 sizeof(*(addr))))
100
101
102 /* uatomic_and */
103
104 #ifndef uatomic_and
105 static inline __attribute__((always_inline))
106 void _uatomic_and(void *addr, unsigned long val,
107 int len)
108 {
109 switch (len) {
110 #ifdef UATOMIC_HAS_ATOMIC_BYTE
111 case 1:
112 __sync_and_and_fetch_1((uint8_t *) addr, val);
113 return;
114 #endif
115 #ifdef UATOMIC_HAS_ATOMIC_SHORT
116 case 2:
117 __sync_and_and_fetch_2((uint16_t *) addr, val);
118 return;
119 #endif
120 case 4:
121 __sync_and_and_fetch_4((uint32_t *) addr, val);
122 return;
123 #if (CAA_BITS_PER_LONG == 64)
124 case 8:
125 __sync_and_and_fetch_8((uint64_t *) addr, val);
126 return;
127 #endif
128 }
129 _uatomic_link_error();
130 }
131
132 #define uatomic_and(addr, v) \
133 (_uatomic_and((addr), \
134 caa_cast_long_keep_sign(v), \
135 sizeof(*(addr))))
136 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
137 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
138
139 #endif
140
141 /* uatomic_or */
142
143 #ifndef uatomic_or
144 static inline __attribute__((always_inline))
145 void _uatomic_or(void *addr, unsigned long val,
146 int len)
147 {
148 switch (len) {
149 #ifdef UATOMIC_HAS_ATOMIC_BYTE
150 case 1:
151 __sync_or_and_fetch_1((uint8_t *) addr, val);
152 return;
153 #endif
154 #ifdef UATOMIC_HAS_ATOMIC_SHORT
155 case 2:
156 __sync_or_and_fetch_2((uint16_t *) addr, val);
157 return;
158 #endif
159 case 4:
160 __sync_or_and_fetch_4((uint32_t *) addr, val);
161 return;
162 #if (CAA_BITS_PER_LONG == 64)
163 case 8:
164 __sync_or_and_fetch_8((uint64_t *) addr, val);
165 return;
166 #endif
167 }
168 _uatomic_link_error();
169 return;
170 }
171
172 #define uatomic_or(addr, v) \
173 (_uatomic_or((addr), \
174 caa_cast_long_keep_sign(v), \
175 sizeof(*(addr))))
176 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
177 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
178
179 #endif
180
181
182 /* uatomic_add_return */
183
184 #ifndef uatomic_add_return
185 static inline __attribute__((always_inline))
186 unsigned long _uatomic_add_return(void *addr, unsigned long val,
187 int len)
188 {
189 switch (len) {
190 #ifdef UATOMIC_HAS_ATOMIC_BYTE
191 case 1:
192 return __sync_add_and_fetch_1((uint8_t *) addr, val);
193 #endif
194 #ifdef UATOMIC_HAS_ATOMIC_SHORT
195 case 2:
196 return __sync_add_and_fetch_2((uint16_t *) addr, val);
197 #endif
198 case 4:
199 return __sync_add_and_fetch_4((uint32_t *) addr, val);
200 #if (CAA_BITS_PER_LONG == 64)
201 case 8:
202 return __sync_add_and_fetch_8((uint64_t *) addr, val);
203 #endif
204 }
205 _uatomic_link_error();
206 return 0;
207 }
208
209
210 #define uatomic_add_return(addr, v) \
211 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
212 caa_cast_long_keep_sign(v), \
213 sizeof(*(addr))))
214 #endif /* #ifndef uatomic_add_return */
215
216 #ifndef uatomic_xchg
217 /* xchg */
218
219 static inline __attribute__((always_inline))
220 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
221 {
222 switch (len) {
223 #ifdef UATOMIC_HAS_ATOMIC_BYTE
224 case 1:
225 {
226 uint8_t old;
227
228 do {
229 old = uatomic_read((uint8_t *) addr);
230 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr,
231 old, val));
232
233 return old;
234 }
235 #endif
236 #ifdef UATOMIC_HAS_ATOMIC_SHORT
237 case 2:
238 {
239 uint16_t old;
240
241 do {
242 old = uatomic_read((uint16_t *) addr);
243 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr,
244 old, val));
245
246 return old;
247 }
248 #endif
249 case 4:
250 {
251 uint32_t old;
252
253 do {
254 old = uatomic_read((uint32_t *) addr);
255 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr,
256 old, val));
257
258 return old;
259 }
260 #if (CAA_BITS_PER_LONG == 64)
261 case 8:
262 {
263 uint64_t old;
264
265 do {
266 old = uatomic_read((uint64_t *) addr);
267 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr,
268 old, val));
269
270 return old;
271 }
272 #endif
273 }
274 _uatomic_link_error();
275 return 0;
276 }
277
278 #define uatomic_xchg(addr, v) \
279 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
280 caa_cast_long_keep_sign(v), \
281 sizeof(*(addr))))
282 #endif /* #ifndef uatomic_xchg */
283
284 #else /* #ifndef uatomic_cmpxchg */
285
286 #ifndef uatomic_and
287 /* uatomic_and */
288
289 static inline __attribute__((always_inline))
290 void _uatomic_and(void *addr, unsigned long val, int len)
291 {
292 switch (len) {
293 #ifdef UATOMIC_HAS_ATOMIC_BYTE
294 case 1:
295 {
296 uint8_t old, oldt;
297
298 oldt = uatomic_read((uint8_t *) addr);
299 do {
300 old = oldt;
301 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
302 } while (oldt != old);
303
304 return;
305 }
306 #endif
307 #ifdef UATOMIC_HAS_ATOMIC_SHORT
308 case 2:
309 {
310 uint16_t old, oldt;
311
312 oldt = uatomic_read((uint16_t *) addr);
313 do {
314 old = oldt;
315 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
316 } while (oldt != old);
317 }
318 #endif
319 case 4:
320 {
321 uint32_t old, oldt;
322
323 oldt = uatomic_read((uint32_t *) addr);
324 do {
325 old = oldt;
326 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
327 } while (oldt != old);
328
329 return;
330 }
331 #if (CAA_BITS_PER_LONG == 64)
332 case 8:
333 {
334 uint64_t old, oldt;
335
336 oldt = uatomic_read((uint64_t *) addr);
337 do {
338 old = oldt;
339 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
340 } while (oldt != old);
341
342 return;
343 }
344 #endif
345 }
346 _uatomic_link_error();
347 }
348
349 #define uatomic_and(addr, v) \
350 (_uatomic_and((addr), \
351 caa_cast_long_keep_sign(v), \
352 sizeof(*(addr))))
353 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
354 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
355
356 #endif /* #ifndef uatomic_and */
357
358 #ifndef uatomic_or
359 /* uatomic_or */
360
361 static inline __attribute__((always_inline))
362 void _uatomic_or(void *addr, unsigned long val, int len)
363 {
364 switch (len) {
365 #ifdef UATOMIC_HAS_ATOMIC_BYTE
366 case 1:
367 {
368 uint8_t old, oldt;
369
370 oldt = uatomic_read((uint8_t *) addr);
371 do {
372 old = oldt;
373 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
374 } while (oldt != old);
375
376 return;
377 }
378 #endif
379 #ifdef UATOMIC_HAS_ATOMIC_SHORT
380 case 2:
381 {
382 uint16_t old, oldt;
383
384 oldt = uatomic_read((uint16_t *) addr);
385 do {
386 old = oldt;
387 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
388 } while (oldt != old);
389
390 return;
391 }
392 #endif
393 case 4:
394 {
395 uint32_t old, oldt;
396
397 oldt = uatomic_read((uint32_t *) addr);
398 do {
399 old = oldt;
400 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
401 } while (oldt != old);
402
403 return;
404 }
405 #if (CAA_BITS_PER_LONG == 64)
406 case 8:
407 {
408 uint64_t old, oldt;
409
410 oldt = uatomic_read((uint64_t *) addr);
411 do {
412 old = oldt;
413 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
414 } while (oldt != old);
415
416 return;
417 }
418 #endif
419 }
420 _uatomic_link_error();
421 }
422
423 #define uatomic_or(addr, v) \
424 (_uatomic_or((addr), \
425 caa_cast_long_keep_sign(v), \
426 sizeof(*(addr))))
427 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
428 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
429
430 #endif /* #ifndef uatomic_or */
431
432 #ifndef uatomic_add_return
433 /* uatomic_add_return */
434
435 static inline __attribute__((always_inline))
436 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
437 {
438 switch (len) {
439 #ifdef UATOMIC_HAS_ATOMIC_BYTE
440 case 1:
441 {
442 uint8_t old, oldt;
443
444 oldt = uatomic_read((uint8_t *) addr);
445 do {
446 old = oldt;
447 oldt = uatomic_cmpxchg((uint8_t *) addr,
448 old, old + val);
449 } while (oldt != old);
450
451 return old + val;
452 }
453 #endif
454 #ifdef UATOMIC_HAS_ATOMIC_SHORT
455 case 2:
456 {
457 uint16_t old, oldt;
458
459 oldt = uatomic_read((uint16_t *) addr);
460 do {
461 old = oldt;
462 oldt = uatomic_cmpxchg((uint16_t *) addr,
463 old, old + val);
464 } while (oldt != old);
465
466 return old + val;
467 }
468 #endif
469 case 4:
470 {
471 uint32_t old, oldt;
472
473 oldt = uatomic_read((uint32_t *) addr);
474 do {
475 old = oldt;
476 oldt = uatomic_cmpxchg((uint32_t *) addr,
477 old, old + val);
478 } while (oldt != old);
479
480 return old + val;
481 }
482 #if (CAA_BITS_PER_LONG == 64)
483 case 8:
484 {
485 uint64_t old, oldt;
486
487 oldt = uatomic_read((uint64_t *) addr);
488 do {
489 old = oldt;
490 oldt = uatomic_cmpxchg((uint64_t *) addr,
491 old, old + val);
492 } while (oldt != old);
493
494 return old + val;
495 }
496 #endif
497 }
498 _uatomic_link_error();
499 return 0;
500 }
501
502 #define uatomic_add_return(addr, v) \
503 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
504 caa_cast_long_keep_sign(v), \
505 sizeof(*(addr))))
506 #endif /* #ifndef uatomic_add_return */
507
508 #ifndef uatomic_xchg
509 /* xchg */
510
511 static inline __attribute__((always_inline))
512 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
513 {
514 switch (len) {
515 #ifdef UATOMIC_HAS_ATOMIC_BYTE
516 case 1:
517 {
518 uint8_t old, oldt;
519
520 oldt = uatomic_read((uint8_t *) addr);
521 do {
522 old = oldt;
523 oldt = uatomic_cmpxchg((uint8_t *) addr,
524 old, val);
525 } while (oldt != old);
526
527 return old;
528 }
529 #endif
530 #ifdef UATOMIC_HAS_ATOMIC_SHORT
531 case 2:
532 {
533 uint16_t old, oldt;
534
535 oldt = uatomic_read((uint16_t *) addr);
536 do {
537 old = oldt;
538 oldt = uatomic_cmpxchg((uint16_t *) addr,
539 old, val);
540 } while (oldt != old);
541
542 return old;
543 }
544 #endif
545 case 4:
546 {
547 uint32_t old, oldt;
548
549 oldt = uatomic_read((uint32_t *) addr);
550 do {
551 old = oldt;
552 oldt = uatomic_cmpxchg((uint32_t *) addr,
553 old, val);
554 } while (oldt != old);
555
556 return old;
557 }
558 #if (CAA_BITS_PER_LONG == 64)
559 case 8:
560 {
561 uint64_t old, oldt;
562
563 oldt = uatomic_read((uint64_t *) addr);
564 do {
565 old = oldt;
566 oldt = uatomic_cmpxchg((uint64_t *) addr,
567 old, val);
568 } while (oldt != old);
569
570 return old;
571 }
572 #endif
573 }
574 _uatomic_link_error();
575 return 0;
576 }
577
578 #define uatomic_xchg(addr, v) \
579 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
580 caa_cast_long_keep_sign(v), \
581 sizeof(*(addr))))
582 #endif /* #ifndef uatomic_xchg */
583
584 #endif /* #else #ifndef uatomic_cmpxchg */
585
586 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
587
588 #ifndef uatomic_add
589 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
590 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
591 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
592 #endif
593
594 #define uatomic_sub_return(addr, v) \
595 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
596 #define uatomic_sub(addr, v) \
597 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
598 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
599 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
600
601 #ifndef uatomic_inc
602 #define uatomic_inc(addr) uatomic_add((addr), 1)
603 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
604 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
605 #endif
606
607 #ifndef uatomic_dec
608 #define uatomic_dec(addr) uatomic_add((addr), -1)
609 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
610 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
611 #endif
612
613 #ifdef __cplusplus
614 }
615 #endif
616
617 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.041355 seconds and 4 git commands to generate.