update for powerpc
[lttv.git] / ltt-usertrace / ltt / ltt-usertrace-ppc.h
1 /*
2 * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
3 */
4 #ifndef __LTT_USERTRACE_PPC_H
5 #define __LTT_USERTRACE_PPC_H
6
7 static __inline__ unsigned long
8 xchg_u32(volatile void *p, unsigned long val)
9 {
10 unsigned long prev;
11
12 __asm__ __volatile__ ("\n\
13 1: lwarx %0,0,%2 \n"
14 PPC405_ERR77(0,%2)
15 " stwcx. %3,0,%2 \n\
16 bne- 1b"
17 : "=&r" (prev), "=m" (*(volatile unsigned long *)p)
18 : "r" (p), "r" (val), "m" (*(volatile unsigned long *)p)
19 : "cc", "memory");
20
21 return prev;
22 }
23
24 /*
25 * This function doesn't exist, so you'll get a linker error
26 * if something tries to do an invalid xchg().
27 */
28 extern void __xchg_called_with_bad_pointer(void);
29
30 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
31 #define tas(ptr) (xchg((ptr),1))
32
33 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
34 {
35 switch (size) {
36 case 4:
37 return (unsigned long) xchg_u32(ptr, x);
38 #if 0 /* xchg_u64 doesn't exist on 32-bit PPC */
39 case 8:
40 return (unsigned long) xchg_u64(ptr, x);
41 #endif /* 0 */
42 }
43 __xchg_called_with_bad_pointer();
44 return x;
45
46
47 }
48
49 extern inline void * xchg_ptr(void * m, void * val)
50 {
51 return (void *) xchg_u32(m, (unsigned long) val);
52 }
53
54
55 #define __HAVE_ARCH_CMPXCHG 1
56
57 static __inline__ unsigned long
58 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
59 {
60 unsigned int prev;
61
62 __asm__ __volatile__ ("\n\
63 1: lwarx %0,0,%2 \n\
64 cmpw 0,%0,%3 \n\
65 bne 2f \n"
66 PPC405_ERR77(0,%2)
67 " stwcx. %4,0,%2 \n\
68 bne- 1b\n"
69 #ifdef CONFIG_SMP
70 " sync\n"
71 #endif /* CONFIG_SMP */
72 "2:"
73 : "=&r" (prev), "=m" (*p)
74 : "r" (p), "r" (old), "r" (new), "m" (*p)
75 : "cc", "memory");
76
77 return prev;
78 }
79
80 /* This function doesn't exist, so you'll get a linker error
81 if something tries to do an invalid cmpxchg(). */
82 extern void __cmpxchg_called_with_bad_pointer(void);
83
84 static __inline__ unsigned long
85 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
86 {
87 switch (size) {
88 case 4:
89 return __cmpxchg_u32(ptr, old, new);
90 #if 0 /* we don't have __cmpxchg_u64 on 32-bit PPC */
91 case 8:
92 return __cmpxchg_u64(ptr, old, new);
93 #endif /* 0 */
94 }
95 __cmpxchg_called_with_bad_pointer();
96 return old;
97 }
98
99 #define cmpxchg(ptr,o,n) \
100 ({ \
101 __typeof__(*(ptr)) _o_ = (o); \
102 __typeof__(*(ptr)) _n_ = (n); \
103 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
104 (unsigned long)_n_, sizeof(*(ptr))); \
105 })
106
107
108 #define CPU_FTR_601 0x00000100
109
110 #define CLOCK_TICK_RATE 1193180 /* Underlying HZ */
111
112 typedef uint64_t cycles_t;
113
114 /* On ppc64 this gets us the whole timebase; on ppc32 just the lower half */
115 static inline unsigned long get_tbl(void)
116 {
117 unsigned long tbl;
118
119 //#if defined(CONFIG_403GCX)
120 // asm volatile("mfspr %0, 0x3dd" : "=r" (tbl));
121 //#else
122 asm volatile("mftb %0" : "=r" (tbl));
123 //#endif
124 return tbl;
125 }
126
127 static inline unsigned int get_tbu(void)
128 {
129 unsigned int tbu;
130
131 //#if defined(CONFIG_403GCX)
132 // asm volatile("mfspr %0, 0x3dc" : "=r" (tbu));
133 //#else
134 asm volatile("mftbu %0" : "=r" (tbu));
135 //#endif
136 return tbu;
137 }
138
139
140 #ifdef __powerpc64__
141 static inline uint64_t get_tb(void)
142 {
143 return mftb();
144 }
145 #else
146 static inline uint64_t get_tb(void)
147 {
148 unsigned int tbhi, tblo, tbhi2;
149
150 do {
151 tbhi = get_tbu();
152 tblo = get_tbl();
153 tbhi2 = get_tbu();
154 } while (tbhi != tbhi2);
155
156 return ((uint64_t)tbhi << 32) | tblo;
157 }
158 #endif
159
160 static inline cycles_t get_cycles(void)
161 {
162 return get_tb();
163 }
164
165
166 #endif /* __LTT_USERTRACE_PPC_H */
This page took 0.03299 seconds and 5 git commands to generate.