move whether atomic byte/short exists to uatomic_arch_*.h
[urcu.git] / urcu / uatomic_generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
26
27 #ifdef __cplusplus
28 extern "C" {
29 #endif
30
31 #ifndef BITS_PER_LONG
32 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
33 #endif
34
35 #ifndef uatomic_set
36 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
37 #endif
38
39 #ifndef uatomic_read
40 #define uatomic_read(addr) LOAD_SHARED(*(addr))
41 #endif
42
43 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
44 static inline __attribute__((always_inline))
45 void _uatomic_link_error()
46 {
47 #ifdef ILLEGAL_INSTR
48 /* generate an illegal instruction. Cannot catch this with linker tricks
49 * when optimizations are disabled. */
50 __asm__ __volatile__(ILLEGAL_INSTR);
51 #else
52 __builtin_trap ();
53 #endif
54 }
55
56 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
57 extern void _uatomic_link_error ();
58 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
59
60 /* cmpxchg */
61
62 #ifndef uatomic_cmpxchg
63 static inline __attribute__((always_inline))
64 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
65 unsigned long _new, int len)
66 {
67 switch (len) {
68 #ifdef UATOMIC_HAS_ATOMIC_BYTE
69 case 1:
70 return __sync_val_compare_and_swap_1(addr, old, _new);
71 #endif
72 #ifdef UATOMIC_HAS_ATOMIC_SHORT
73 case 2:
74 return __sync_val_compare_and_swap_2(addr, old, _new);
75 #endif
76 case 4:
77 return __sync_val_compare_and_swap_4(addr, old, _new);
78 #if (BITS_PER_LONG == 64)
79 case 8:
80 return __sync_val_compare_and_swap_8(addr, old, _new);
81 #endif
82 }
83 _uatomic_link_error();
84 return 0;
85 }
86
87
88 #define uatomic_cmpxchg(addr, old, _new) \
89 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
90 (unsigned long)(_new), \
91 sizeof(*(addr))))
92
93
94 /* uatomic_add_return */
95
96 #ifndef uatomic_add_return
97 static inline __attribute__((always_inline))
98 unsigned long _uatomic_add_return(void *addr, unsigned long val,
99 int len)
100 {
101 switch (len) {
102 #ifdef UATOMIC_HAS_ATOMIC_BYTE
103 case 1:
104 return __sync_add_and_fetch_1(addr, val);
105 #endif
106 #ifdef UATOMIC_HAS_ATOMIC_SHORT
107 case 2:
108 return __sync_add_and_fetch_2(addr, val);
109 #endif
110 case 4:
111 return __sync_add_and_fetch_4(addr, val);
112 #if (BITS_PER_LONG == 64)
113 case 8:
114 return __sync_add_and_fetch_8(addr, val);
115 #endif
116 }
117 _uatomic_link_error();
118 return 0;
119 }
120
121
122 #define uatomic_add_return(addr, v) \
123 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
124 (unsigned long)(v), \
125 sizeof(*(addr))))
126 #endif /* #ifndef uatomic_add_return */
127
128 #ifndef uatomic_xchg
129 /* xchg */
130
131 static inline __attribute__((always_inline))
132 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
133 {
134 switch (len) {
135 #ifdef UATOMIC_HAS_ATOMIC_BYTE
136 case 1:
137 {
138 unsigned char old;
139
140 do {
141 old = uatomic_read((unsigned char *)addr);
142 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
143
144 return old;
145 }
146 #endif
147 #ifdef UATOMIC_HAS_ATOMIC_SHORT
148 case 2:
149 {
150 unsigned short old;
151
152 do {
153 old = uatomic_read((unsigned short *)addr);
154 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
155
156 return old;
157 }
158 #endif
159 case 4:
160 {
161 unsigned int old;
162
163 do {
164 old = uatomic_read((unsigned int *)addr);
165 while (!__sync_bool_compare_and_swap_4(addr, old, val));
166
167 } return old;
168 }
169 #if (BITS_PER_LONG == 64)
170 case 8:
171 {
172 unsigned long old;
173
174 do {
175 old = uatomic_read((unsigned long *)addr);
176 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
177
178 return old;
179 }
180 #endif
181 }
182 _uatomic_link_error();
183 return 0;
184 }
185
186 #define uatomic_xchg(addr, v) \
187 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
188 sizeof(*(addr))))
189 #endif /* #ifndef uatomic_xchg */
190
191 #else /* #ifndef uatomic_cmpxchg */
192
193 #ifndef uatomic_add_return
194 /* uatomic_add_return */
195
196 static inline __attribute__((always_inline))
197 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
198 {
199 switch (len) {
200 #ifdef UATOMIC_HAS_ATOMIC_BYTE
201 case 1:
202 {
203 unsigned char old, oldt;
204
205 oldt = uatomic_read((unsigned char *)addr);
206 do {
207 old = oldt;
208 oldt = _uatomic_cmpxchg(addr, old, old + val, 1);
209 } while (oldt != old);
210
211 return old + val;
212 }
213 #endif
214 #ifdef UATOMIC_HAS_ATOMIC_SHORT
215 case 2:
216 {
217 unsigned short old, oldt;
218
219 oldt = uatomic_read((unsigned short *)addr);
220 do {
221 old = oldt;
222 oldt = _uatomic_cmpxchg(addr, old, old + val, 2);
223 } while (oldt != old);
224
225 return old + val;
226 }
227 #endif
228 case 4:
229 {
230 unsigned int old, oldt;
231
232 oldt = uatomic_read((unsigned int *)addr);
233 do {
234 old = oldt;
235 oldt = _uatomic_cmpxchg(addr, old, old + val, 4);
236 } while (oldt != old);
237
238 return old + val;
239 }
240 #if (BITS_PER_LONG == 64)
241 case 8:
242 {
243 unsigned long old, oldt;
244
245 oldt = uatomic_read((unsigned long *)addr);
246 do {
247 old = oldt;
248 oldt = _uatomic_cmpxchg(addr, old, old + val, 8);
249 } while (oldt != old);
250
251 return old + val;
252 }
253 #endif
254 }
255 _uatomic_link_error();
256 return 0;
257 }
258
259 #define uatomic_add_return(addr, v) \
260 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
261 (unsigned long)(v), \
262 sizeof(*(addr))))
263 #endif /* #ifndef uatomic_add_return */
264
265 #ifndef uatomic_xchg
266 /* xchg */
267
268 static inline __attribute__((always_inline))
269 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
270 {
271 switch (len) {
272 #ifdef UATOMIC_HAS_ATOMIC_BYTE
273 case 1:
274 {
275 unsigned char old, oldt;
276
277 oldt = uatomic_read((unsigned char *)addr);
278 do {
279 old = oldt;
280 oldt = _uatomic_cmpxchg(addr, old, val, 1);
281 } while (oldt != old);
282
283 return old;
284 }
285 #endif
286 #ifdef UATOMIC_HAS_ATOMIC_SHORT
287 case 2:
288 {
289 unsigned short old, oldt;
290
291 oldt = uatomic_read((unsigned short *)addr);
292 do {
293 old = oldt;
294 oldt = _uatomic_cmpxchg(addr, old, val, 2);
295 } while (oldt != old);
296
297 return old;
298 }
299 #endif
300 case 4:
301 {
302 unsigned int old, oldt;
303
304 oldt = uatomic_read((unsigned int *)addr);
305 do {
306 old = oldt;
307 oldt = _uatomic_cmpxchg(addr, old, val, 4);
308 } while (oldt != old);
309
310 return old;
311 }
312 #if (BITS_PER_LONG == 64)
313 case 8:
314 {
315 unsigned long old, oldt;
316
317 oldt = uatomic_read((unsigned long *)addr);
318 do {
319 old = oldt;
320 oldt = _uatomic_cmpxchg(addr, old, val, 8);
321 } while (oldt != old);
322
323 return old;
324 }
325 #endif
326 }
327 _uatomic_link_error();
328 return 0;
329 }
330
331 #define uatomic_xchg(addr, v) \
332 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
333 sizeof(*(addr))))
334 #endif /* #ifndef uatomic_xchg */
335
336 #endif /* #else #ifndef uatomic_cmpxchg */
337
338 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
339
340 #ifndef uatomic_add
341 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
342 #endif
343
344 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
345 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
346
347 #ifndef uatomic_inc
348 #define uatomic_inc(addr) uatomic_add((addr), 1)
349 #endif
350
351 #ifndef uatomic_dec
352 #define uatomic_dec(addr) uatomic_add((addr), -1)
353 #endif
354
355 #ifdef __cplusplus
356 }
357 #endif
358
359 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.039077 seconds and 5 git commands to generate.