add ppc atomic
[lttv.git] / ltt-usertrace / ltt / ltt-usertrace-ppc.h
1 /*
2 * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
3 */
4 #ifndef __LTT_USERTRACE_PPC_H
5 #define __LTT_USERTRACE_PPC_H
6
7 static __inline__ unsigned long
8 xchg_u32(volatile void *p, unsigned long val)
9 {
10 unsigned long prev;
11
12 __asm__ __volatile__ ("\n\
13 1: lwarx %0,0,%2 \n"
14 " stwcx. %3,0,%2 \n\
15 bne- 1b"
16 : "=&r" (prev), "=m" (*(volatile unsigned long *)p)
17 : "r" (p), "r" (val), "m" (*(volatile unsigned long *)p)
18 : "cc", "memory");
19
20 return prev;
21 }
22
23 /*
24 * This function doesn't exist, so you'll get a linker error
25 * if something tries to do an invalid xchg().
26 */
27 extern void __xchg_called_with_bad_pointer(void);
28
29 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
30 #define tas(ptr) (xchg((ptr),1))
31
32 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
33 {
34 switch (size) {
35 case 4:
36 return (unsigned long) xchg_u32(ptr, x);
37 #if 0 /* xchg_u64 doesn't exist on 32-bit PPC */
38 case 8:
39 return (unsigned long) xchg_u64(ptr, x);
40 #endif /* 0 */
41 }
42 __xchg_called_with_bad_pointer();
43 return x;
44
45
46 }
47
48 extern inline void * xchg_ptr(void * m, void * val)
49 {
50 return (void *) xchg_u32(m, (unsigned long) val);
51 }
52
53
54 #define __HAVE_ARCH_CMPXCHG 1
55
56 static __inline__ unsigned long
57 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
58 {
59 unsigned int prev;
60
61 __asm__ __volatile__ ("\n\
62 1: lwarx %0,0,%2 \n\
63 cmpw 0,%0,%3 \n\
64 bne 2f \n"
65 " stwcx. %4,0,%2 \n\
66 bne- 1b\n"
67 #ifdef CONFIG_SMP
68 " sync\n"
69 #endif /* CONFIG_SMP */
70 "2:"
71 : "=&r" (prev), "=m" (*p)
72 : "r" (p), "r" (old), "r" (new), "m" (*p)
73 : "cc", "memory");
74
75 return prev;
76 }
77
78 /* This function doesn't exist, so you'll get a linker error
79 if something tries to do an invalid cmpxchg(). */
80 extern void __cmpxchg_called_with_bad_pointer(void);
81
82 static __inline__ unsigned long
83 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
84 {
85 switch (size) {
86 case 4:
87 return __cmpxchg_u32(ptr, old, new);
88 #if 0 /* we don't have __cmpxchg_u64 on 32-bit PPC */
89 case 8:
90 return __cmpxchg_u64(ptr, old, new);
91 #endif /* 0 */
92 }
93 __cmpxchg_called_with_bad_pointer();
94 return old;
95 }
96
97 #define cmpxchg(ptr,o,n) \
98 ({ \
99 __typeof__(*(ptr)) _o_ = (o); \
100 __typeof__(*(ptr)) _n_ = (n); \
101 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
102 (unsigned long)_n_, sizeof(*(ptr))); \
103 })
104
105
106 #define CPU_FTR_601 0x00000100
107
108 #define CLOCK_TICK_RATE 1193180 /* Underlying HZ */
109
110 typedef uint64_t cycles_t;
111
112 /* On ppc64 this gets us the whole timebase; on ppc32 just the lower half */
113 static inline unsigned long get_tbl(void)
114 {
115 unsigned long tbl;
116
117 //#if defined(CONFIG_403GCX)
118 // asm volatile("mfspr %0, 0x3dd" : "=r" (tbl));
119 //#else
120 asm volatile("mftb %0" : "=r" (tbl));
121 //#endif
122 return tbl;
123 }
124
125 static inline unsigned int get_tbu(void)
126 {
127 unsigned int tbu;
128
129 //#if defined(CONFIG_403GCX)
130 // asm volatile("mfspr %0, 0x3dc" : "=r" (tbu));
131 //#else
132 asm volatile("mftbu %0" : "=r" (tbu));
133 //#endif
134 return tbu;
135 }
136
137
138 #ifdef __powerpc64__
139 static inline uint64_t get_tb(void)
140 {
141 return mftb();
142 }
143 #else
144 static inline uint64_t get_tb(void)
145 {
146 unsigned int tbhi, tblo, tbhi2;
147
148 do {
149 tbhi = get_tbu();
150 tblo = get_tbl();
151 tbhi2 = get_tbu();
152 } while (tbhi != tbhi2);
153
154 return ((uint64_t)tbhi << 32) | tblo;
155 }
156 #endif
157
158 static inline cycles_t get_cycles(void)
159 {
160 return get_tb();
161 }
162
163
164 #endif /* __LTT_USERTRACE_PPC_H */
This page took 0.033693 seconds and 5 git commands to generate.