convert from svn repository: remove tags directory
[lttv.git] / trunk / obsolete / ltt-usertrace / ltt / atomic-ppc.h
1 /*
2 * PowerPC atomic operations
3 */
4
5 #ifndef _ASM_PPC_ATOMIC_H_
6 #define _ASM_PPC_ATOMIC_H_
7
8 #ifdef __cplusplus
9 extern "C" {
10 #endif
11
12 typedef struct { volatile int counter; } atomic_t;
13
14 #define ATOMIC_INIT(i) { (i) }
15
16 #define atomic_read(v) ((v)->counter)
17 #define atomic_set(v,i) (((v)->counter) = (i))
18
19 extern void atomic_clear_mask(unsigned long mask, unsigned long *addr);
20
21 #if 0 // We only do operation on one CPU at a time (LTT)
22 #define SMP_SYNC "sync"
23 #define SMP_ISYNC "\n\tisync"
24 #else
25 #define SMP_SYNC ""
26 #define SMP_ISYNC
27 #endif
28
29 /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
30 * The old ATOMIC_SYNC_FIX covered some but not all of this.
31 */
32 #ifdef CONFIG_IBM405_ERR77
33 #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
34 #else
35 #define PPC405_ERR77(ra,rb)
36 #endif
37
38 static __inline__ void atomic_add(int a, atomic_t *v)
39 {
40 int t;
41
42 __asm__ __volatile__(
43 "1: lwarx %0,0,%3 # atomic_add\n\
44 add %0,%2,%0\n"
45 PPC405_ERR77(0,%3)
46 " stwcx. %0,0,%3 \n\
47 bne- 1b"
48 : "=&r" (t), "=m" (v->counter)
49 : "r" (a), "r" (&v->counter), "m" (v->counter)
50 : "cc");
51 }
52
53 static __inline__ int atomic_add_return(int a, atomic_t *v)
54 {
55 int t;
56
57 __asm__ __volatile__(
58 "1: lwarx %0,0,%2 # atomic_add_return\n\
59 add %0,%1,%0\n"
60 PPC405_ERR77(0,%2)
61 " stwcx. %0,0,%2 \n\
62 bne- 1b"
63 SMP_ISYNC
64 : "=&r" (t)
65 : "r" (a), "r" (&v->counter)
66 : "cc", "memory");
67
68 return t;
69 }
70
71 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
72
73 static __inline__ void atomic_sub(int a, atomic_t *v)
74 {
75 int t;
76
77 __asm__ __volatile__(
78 "1: lwarx %0,0,%3 # atomic_sub\n\
79 subf %0,%2,%0\n"
80 PPC405_ERR77(0,%3)
81 " stwcx. %0,0,%3 \n\
82 bne- 1b"
83 : "=&r" (t), "=m" (v->counter)
84 : "r" (a), "r" (&v->counter), "m" (v->counter)
85 : "cc");
86 }
87
88 static __inline__ int atomic_sub_return(int a, atomic_t *v)
89 {
90 int t;
91
92 __asm__ __volatile__(
93 "1: lwarx %0,0,%2 # atomic_sub_return\n\
94 subf %0,%1,%0\n"
95 PPC405_ERR77(0,%2)
96 " stwcx. %0,0,%2 \n\
97 bne- 1b"
98 SMP_ISYNC
99 : "=&r" (t)
100 : "r" (a), "r" (&v->counter)
101 : "cc", "memory");
102
103 return t;
104 }
105
106 static __inline__ void atomic_inc(atomic_t *v)
107 {
108 int t;
109
110 __asm__ __volatile__(
111 "1: lwarx %0,0,%2 # atomic_inc\n\
112 addic %0,%0,1\n"
113 PPC405_ERR77(0,%2)
114 " stwcx. %0,0,%2 \n\
115 bne- 1b"
116 : "=&r" (t), "=m" (v->counter)
117 : "r" (&v->counter), "m" (v->counter)
118 : "cc");
119 }
120
121 static __inline__ int atomic_inc_return(atomic_t *v)
122 {
123 int t;
124
125 __asm__ __volatile__(
126 "1: lwarx %0,0,%1 # atomic_inc_return\n\
127 addic %0,%0,1\n"
128 PPC405_ERR77(0,%1)
129 " stwcx. %0,0,%1 \n\
130 bne- 1b"
131 SMP_ISYNC
132 : "=&r" (t)
133 : "r" (&v->counter)
134 : "cc", "memory");
135
136 return t;
137 }
138
139 /*
140 * atomic_inc_and_test - increment and test
141 * @v: pointer of type atomic_t
142 *
143 * Atomically increments @v by 1
144 * and returns true if the result is zero, or false for all
145 * other cases.
146 */
147 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
148
149 static __inline__ void atomic_dec(atomic_t *v)
150 {
151 int t;
152
153 __asm__ __volatile__(
154 "1: lwarx %0,0,%2 # atomic_dec\n\
155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%2)\
157 " stwcx. %0,0,%2\n\
158 bne- 1b"
159 : "=&r" (t), "=m" (v->counter)
160 : "r" (&v->counter), "m" (v->counter)
161 : "cc");
162 }
163
164 static __inline__ int atomic_dec_return(atomic_t *v)
165 {
166 int t;
167
168 __asm__ __volatile__(
169 "1: lwarx %0,0,%1 # atomic_dec_return\n\
170 addic %0,%0,-1\n"
171 PPC405_ERR77(0,%1)
172 " stwcx. %0,0,%1\n\
173 bne- 1b"
174 SMP_ISYNC
175 : "=&r" (t)
176 : "r" (&v->counter)
177 : "cc", "memory");
178
179 return t;
180 }
181
182 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
183 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
184
185 /*
186 * Atomically test *v and decrement if it is greater than 0.
187 * The function returns the old value of *v minus 1.
188 */
189 static __inline__ int atomic_dec_if_positive(atomic_t *v)
190 {
191 int t;
192
193 __asm__ __volatile__(
194 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
195 addic. %0,%0,-1\n\
196 blt- 2f\n"
197 PPC405_ERR77(0,%1)
198 " stwcx. %0,0,%1\n\
199 bne- 1b"
200 SMP_ISYNC
201 "\n\
202 2:" : "=&r" (t)
203 : "r" (&v->counter)
204 : "cc", "memory");
205
206 return t;
207 }
208
209 #define __MB __asm__ __volatile__ (SMP_SYNC : : : "memory")
210 #define smp_mb__before_atomic_dec() __MB
211 #define smp_mb__after_atomic_dec() __MB
212 #define smp_mb__before_atomic_inc() __MB
213 #define smp_mb__after_atomic_inc() __MB
214
215 #ifdef __cplusplus
216 } /* end of extern "C" */
217 #endif
218
219 #endif /* _ASM_PPC_ATOMIC_H_ */
This page took 0.032803 seconds and 4 git commands to generate.