#include <urcu/compiler.h>
#include <urcu/system.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+
#ifndef __SIZEOF_LONG__
#ifdef __s390x__
#define __SIZEOF_LONG__ 8
#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
#endif
+#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
+#define COMPILER_HAVE_SHORT_MEM_OPERAND
+#endif
+
+/*
+ * MEMOP assembler operand rules:
+ * - op refer to MEMOP_IN operand
+ * - MEMOP_IN can expand to more than a single operand. Use it at the end of
+ * operand list only.
+ */
+
+#ifdef COMPILER_HAVE_SHORT_MEM_OPERAND
+
+#define MEMOP_OUT(addr) "=Q" (*(addr))
+#define MEMOP_IN "Q" (*(addr))
+#define MEMOP_REF(op) #op /* op refer to MEMOP_IN operand */
+
+#else /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
+
+#define MEMOP_OUT(addr) "=m" (*(addr))
+#define MEMOP_IN "a" (addr), "m" (*(addr))
+#define MEMOP_REF(op) "0(" #op ")" /* op refer to MEMOP_IN operand */
+
+#endif /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
+
+struct __uatomic_dummy {
+ unsigned long v[10];
+};
+#define __hp(x) ((struct __uatomic_dummy *)(x))
+
#define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
#define uatomic_read(addr) LOAD_SHARED(*(addr))
/* xchg */
+
+static inline __attribute__((always_inline))
unsigned long _uatomic_exchange(volatile void *addr, unsigned long val, int len)
{
switch (len) {
case 4:
+ {
unsigned int old_val;
__asm__ __volatile__(
- "0: cs %0,%2,%1\n"
+ "0: cs %0,%2," MEMOP_REF(%3) "\n"
" brc 4,0b\n"
- : "=&r"(old_val), "=m" (*addr)
- : "r"(val), "m" (*addr)
+ : "=&r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (val), MEMOP_IN (__hp(addr))
: "memory", "cc");
+ return old_val;
+ }
#if (BITS_PER_LONG == 64)
case 8:
+ {
unsigned long old_val;
__asm__ __volatile__(
- "0: csg %0,%2,%1\n"
+ "0: csg %0,%2," MEMOP_REF(%3) "\n"
" brc 4,0b\n"
- : "=&r"(old_val), "=m" (*addr)
- : "r"(val), "m" (*addr)
+ : "=&r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (val), MEMOP_IN (__hp(addr))
: "memory", "cc");
+ return old_val;
+ }
#endif
default:
__asm__ __volatile__(".long 0xd00d00");
static inline __attribute__((always_inline))
unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
- unsigned long new, int len)
+ unsigned long _new, int len)
{
switch (len) {
case 4:
+ {
unsigned int old_val = (unsigned int)old;
__asm__ __volatile__(
- " cs %0,%2,%1\n"
- : "+r"(old_val), "+m"(*addr)
- : "r"(new)
+ " cs %0,%2," MEMOP_REF(%3) "\n"
+ : "+r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (_new), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old_val;
+ }
#if (BITS_PER_LONG == 64)
case 8:
+ {
__asm__ __volatile__(
- " csg %0,%2,%1\n"
- : "+r"(old), "+m"(*addr)
- : "r"(new)
+ " csg %0,%2," MEMOP_REF(%3) "\n"
+ : "+r" (old), MEMOP_OUT (__hp(addr))
+ : "r" (_new), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old;
+ }
#endif
default:
__asm__ __volatile__(".long 0xd00d00");
return 0;
}
-#define uatomic_cmpxchg(addr, old, new) \
+#define uatomic_cmpxchg(addr, old, _new) \
(__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
(unsigned long)(old), \
- (unsigned long)(new), \
+ (unsigned long)(_new), \
sizeof(*(addr)))
/* uatomic_add_return */
#define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new)
+#ifdef __cplusplus
+}
+#endif
+
#endif /* _URCU_UATOMIC_ARCH_S390_H */