-void uatomic_add_32(volatile unsigned int *addr, unsigned int val)
-{
- unsigned int result, old;
-
- __asm__ __volatile__(
- " l %0, %1\n"
- "0: lr %2, %0\n"
- " ar %2, %3\n"
- " cs %0,%2,%1\n"
- " brc 4,0b\n"
- : "=&r"(old), "+m" (*addr),
- "=&r"(result)
- : "r"(val)
- : "memory", "cc");
-}
-
-#if (BITS_PER_LONG == 64)
-
-static inline __attribute__((always_inline))
-void uatomic_add_64(volatile unsigned long *addr, unsigned long val)
-{
- unsigned long result, old;
-
- __asm__ __volatile__(
- " lg %0, %1\n"
- "0: lgr %2, %0\n"
- " agr %2, %3\n"
- " csg %0,%2,%1\n"
- " brc 4,0b\n"
- : "=&r"(old), "+m" (*addr),
- "=&r"(result)
- : "r"(val)
- : "memory", "cc");
-}
-
-#endif
-
-static inline __attribute__((always_inline))
-void _uatomic_add(void *addr, unsigned long val, int len)
-{
- switch (len) {
- case 4:
- uatomic_add_32(addr, val);
- return;
-#if (BITS_PER_LONG == 64)
- case 8:
- uatomic_add_64(addr, val);
- return;
-#endif
- default:
- __asm__ __volatile__(".long 0xd00d00");
- }
-
- return;
-}
-
-#define uatomic_add(addr, val) \
- _uatomic_add((addr), (unsigned long)(val), sizeof(*(addr)))
-
-static inline __attribute__((always_inline))
-unsigned int uatomic_cmpxchg_32(volatile unsigned int *addr, unsigned int old,
- unsigned int new)
-{
- __asm__ __volatile__(
- " cs %0,%2,%1\n"
- : "+r"(old), "+m"(*addr)
- : "r"(new)
- : "memory", "cc");
-
- return old;
-}
-
-#if (BITS_PER_LONG == 64)
-
-static inline __attribute__((always_inline))
-unsigned long uatomic_cmpxchg_64(volatile unsigned long *addr,
- unsigned long old, unsigned long new)
-{
- __asm__ __volatile__(
- " csg %0,%2,%1\n"
- : "+r"(old), "+m"(*addr)
- : "r"(new)
- : "memory", "cc");
-
- return old;
-}
-
-#endif
-