9fedee65fac00d72a95545b2bcd38c909a786d30
1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
26 #define UATOMIC_HAS_ATOMIC_BYTE
27 #define UATOMIC_HAS_ATOMIC_SHORT
34 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
37 struct __uatomic_dummy
{
40 #define __hp(x) ((struct __uatomic_dummy *)(x))
42 #define _uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
46 static inline __attribute__((always_inline
))
47 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
48 unsigned long _new
, int len
)
53 unsigned char result
= old
;
56 "lock; cmpxchgb %2, %1"
57 : "+a"(result
), "+m"(*__hp(addr
))
58 : "q"((unsigned char)_new
)
64 unsigned short result
= old
;
67 "lock; cmpxchgw %2, %1"
68 : "+a"(result
), "+m"(*__hp(addr
))
69 : "r"((unsigned short)_new
)
75 unsigned int result
= old
;
78 "lock; cmpxchgl %2, %1"
79 : "+a"(result
), "+m"(*__hp(addr
))
80 : "r"((unsigned int)_new
)
84 #if (CAA_BITS_PER_LONG == 64)
87 unsigned long result
= old
;
90 "lock; cmpxchgq %2, %1"
91 : "+a"(result
), "+m"(*__hp(addr
))
92 : "r"((unsigned long)_new
)
98 /* generate an illegal instruction. Cannot catch this with linker tricks
99 * when optimizations are disabled. */
100 __asm__
__volatile__("ud2");
104 #define _uatomic_cmpxchg(addr, old, _new) \
105 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\
106 (unsigned long)(_new), \
111 static inline __attribute__((always_inline
))
112 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
114 /* Note: the "xchg" instruction does not need a "lock" prefix. */
118 unsigned char result
;
119 __asm__
__volatile__(
121 : "=q"(result
), "+m"(*__hp(addr
))
122 : "0" ((unsigned char)val
)
128 unsigned short result
;
129 __asm__
__volatile__(
131 : "=r"(result
), "+m"(*__hp(addr
))
132 : "0" ((unsigned short)val
)
139 __asm__
__volatile__(
141 : "=r"(result
), "+m"(*__hp(addr
))
142 : "0" ((unsigned int)val
)
146 #if (CAA_BITS_PER_LONG == 64)
149 unsigned long result
;
150 __asm__
__volatile__(
152 : "=r"(result
), "+m"(*__hp(addr
))
153 : "0" ((unsigned long)val
)
159 /* generate an illegal instruction. Cannot catch this with linker tricks
160 * when optimizations are disabled. */
161 __asm__
__volatile__("ud2");
165 #define _uatomic_xchg(addr, v) \
166 ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \
169 /* uatomic_add_return */
171 static inline __attribute__((always_inline
))
172 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
178 unsigned char result
= val
;
180 __asm__
__volatile__(
182 : "+m"(*__hp(addr
)), "+q" (result
)
185 return result
+ (unsigned char)val
;
189 unsigned short result
= val
;
191 __asm__
__volatile__(
193 : "+m"(*__hp(addr
)), "+r" (result
)
196 return result
+ (unsigned short)val
;
200 unsigned int result
= val
;
202 __asm__
__volatile__(
204 : "+m"(*__hp(addr
)), "+r" (result
)
207 return result
+ (unsigned int)val
;
209 #if (CAA_BITS_PER_LONG == 64)
212 unsigned long result
= val
;
214 __asm__
__volatile__(
216 : "+m"(*__hp(addr
)), "+r" (result
)
219 return result
+ (unsigned long)val
;
223 /* generate an illegal instruction. Cannot catch this with linker tricks
224 * when optimizations are disabled. */
225 __asm__
__volatile__("ud2");
229 #define _uatomic_add_return(addr, v) \
230 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
231 (unsigned long)(v), \
236 static inline __attribute__((always_inline
))
237 void __uatomic_and(void *addr
, unsigned long val
, int len
)
242 __asm__
__volatile__(
245 : "iq" ((unsigned char)val
)
251 __asm__
__volatile__(
254 : "ir" ((unsigned short)val
)
260 __asm__
__volatile__(
263 : "ir" ((unsigned int)val
)
267 #if (CAA_BITS_PER_LONG == 64)
270 __asm__
__volatile__(
273 : "er" ((unsigned long)val
)
279 /* generate an illegal instruction. Cannot catch this with linker tricks
280 * when optimizations are disabled. */
281 __asm__
__volatile__("ud2");
285 #define _uatomic_and(addr, v) \
286 (__uatomic_and((addr), (unsigned long)(v), sizeof(*(addr))))
290 static inline __attribute__((always_inline
))
291 void __uatomic_or(void *addr
, unsigned long val
, int len
)
296 __asm__
__volatile__(
299 : "iq" ((unsigned char)val
)
305 __asm__
__volatile__(
308 : "ir" ((unsigned short)val
)
314 __asm__
__volatile__(
317 : "ir" ((unsigned int)val
)
321 #if (CAA_BITS_PER_LONG == 64)
324 __asm__
__volatile__(
327 : "er" ((unsigned long)val
)
333 /* generate an illegal instruction. Cannot catch this with linker tricks
334 * when optimizations are disabled. */
335 __asm__
__volatile__("ud2");
339 #define _uatomic_or(addr, v) \
340 (__uatomic_or((addr), (unsigned long)(v), sizeof(*(addr))))
344 static inline __attribute__((always_inline
))
345 void __uatomic_add(void *addr
, unsigned long val
, int len
)
350 __asm__
__volatile__(
353 : "iq" ((unsigned char)val
)
359 __asm__
__volatile__(
362 : "ir" ((unsigned short)val
)
368 __asm__
__volatile__(
371 : "ir" ((unsigned int)val
)
375 #if (CAA_BITS_PER_LONG == 64)
378 __asm__
__volatile__(
381 : "er" ((unsigned long)val
)
387 /* generate an illegal instruction. Cannot catch this with linker tricks
388 * when optimizations are disabled. */
389 __asm__
__volatile__("ud2");
393 #define _uatomic_add(addr, v) \
394 (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr))))
399 static inline __attribute__((always_inline
))
400 void __uatomic_inc(void *addr
, int len
)
405 __asm__
__volatile__(
414 __asm__
__volatile__(
423 __asm__
__volatile__(
430 #if (CAA_BITS_PER_LONG == 64)
433 __asm__
__volatile__(
442 /* generate an illegal instruction. Cannot catch this with linker tricks
443 * when optimizations are disabled. */
444 __asm__
__volatile__("ud2");
448 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
452 static inline __attribute__((always_inline
))
453 void __uatomic_dec(void *addr
, int len
)
458 __asm__
__volatile__(
467 __asm__
__volatile__(
476 __asm__
__volatile__(
483 #if (CAA_BITS_PER_LONG == 64)
486 __asm__
__volatile__(
495 /* generate an illegal instruction. Cannot catch this with linker tricks
496 * when optimizations are disabled. */
497 __asm__
__volatile__("ud2");
501 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
503 #if ((CAA_BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
504 extern int __rcu_cas_avail
;
505 extern int __rcu_cas_init(void);
507 #define UATOMIC_COMPAT(insn) \
508 ((likely(__rcu_cas_avail > 0)) \
509 ? (_uatomic_##insn) \
510 : ((unlikely(__rcu_cas_avail < 0) \
511 ? ((__rcu_cas_init() > 0) \
512 ? (_uatomic_##insn) \
513 : (compat_uatomic_##insn)) \
514 : (compat_uatomic_##insn))))
516 extern unsigned long _compat_uatomic_set(void *addr
,
517 unsigned long _new
, int len
);
518 #define compat_uatomic_set(addr, _new) \
519 ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
520 (unsigned long)(_new), \
524 extern unsigned long _compat_uatomic_xchg(void *addr
,
525 unsigned long _new
, int len
);
526 #define compat_uatomic_xchg(addr, _new) \
527 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
528 (unsigned long)(_new), \
531 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
532 unsigned long _new
, int len
);
533 #define compat_uatomic_cmpxchg(addr, old, _new) \
534 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
535 (unsigned long)(old), \
536 (unsigned long)(_new), \
539 extern unsigned long _compat_uatomic_and(void *addr
,
540 unsigned long _new
, int len
);
541 #define compat_uatomic_and(addr, v) \
542 ((__typeof__(*(addr))) _compat_uatomic_and((addr), \
543 (unsigned long)(v), \
546 extern unsigned long _compat_uatomic_or(void *addr
,
547 unsigned long _new
, int len
);
548 #define compat_uatomic_or(addr, v) \
549 ((__typeof__(*(addr))) _compat_uatomic_or((addr), \
550 (unsigned long)(v), \
553 extern unsigned long _compat_uatomic_add_return(void *addr
,
554 unsigned long _new
, int len
);
555 #define compat_uatomic_add_return(addr, v) \
556 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
557 (unsigned long)(v), \
560 #define compat_uatomic_add(addr, v) \
561 ((void)compat_uatomic_add_return((addr), (v)))
562 #define compat_uatomic_inc(addr) \
563 (compat_uatomic_add((addr), 1))
564 #define compat_uatomic_dec(addr) \
565 (compat_uatomic_add((addr), -1))
568 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
571 /* Read is atomic even in compat mode */
572 #define uatomic_set(addr, v) \
573 UATOMIC_COMPAT(set(addr, v))
575 #define uatomic_cmpxchg(addr, old, _new) \
576 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
577 #define uatomic_xchg(addr, v) \
578 UATOMIC_COMPAT(xchg(addr, v))
579 #define uatomic_and(addr, v) \
580 UATOMIC_COMPAT(and(addr, v))
581 #define uatomic_or(addr, v) \
582 UATOMIC_COMPAT(or(addr, v))
583 #define uatomic_add_return(addr, v) \
584 UATOMIC_COMPAT(add_return(addr, v))
586 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
587 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
588 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
594 #include <urcu/uatomic_generic.h>
596 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.042329 seconds and 4 git commands to generate.