0bc64be5 |
1 | /* |
2 | * PowerPC atomic operations |
3 | */ |
4 | |
5 | #ifndef _ASM_PPC_ATOMIC_H_ |
6 | #define _ASM_PPC_ATOMIC_H_ |
7 | |
8 | typedef struct { volatile int counter; } atomic_t; |
9 | |
10 | #ifdef __KERNEL__ |
11 | |
12 | #define ATOMIC_INIT(i) { (i) } |
13 | |
14 | #define atomic_read(v) ((v)->counter) |
15 | #define atomic_set(v,i) (((v)->counter) = (i)) |
16 | |
17 | extern void atomic_clear_mask(unsigned long mask, unsigned long *addr); |
18 | |
19 | #ifdef CONFIG_SMP |
20 | #define SMP_SYNC "sync" |
21 | #define SMP_ISYNC "\n\tisync" |
22 | #else |
23 | #define SMP_SYNC "" |
24 | #define SMP_ISYNC |
25 | #endif |
26 | |
27 | /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx. |
28 | * The old ATOMIC_SYNC_FIX covered some but not all of this. |
29 | */ |
30 | #ifdef CONFIG_IBM405_ERR77 |
31 | #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";" |
32 | #else |
33 | #define PPC405_ERR77(ra,rb) |
34 | #endif |
35 | |
36 | static __inline__ void atomic_add(int a, atomic_t *v) |
37 | { |
38 | int t; |
39 | |
40 | __asm__ __volatile__( |
41 | "1: lwarx %0,0,%3 # atomic_add\n\ |
42 | add %0,%2,%0\n" |
43 | PPC405_ERR77(0,%3) |
44 | " stwcx. %0,0,%3 \n\ |
45 | bne- 1b" |
46 | : "=&r" (t), "=m" (v->counter) |
47 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
48 | : "cc"); |
49 | } |
50 | |
51 | static __inline__ int atomic_add_return(int a, atomic_t *v) |
52 | { |
53 | int t; |
54 | |
55 | __asm__ __volatile__( |
56 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
57 | add %0,%1,%0\n" |
58 | PPC405_ERR77(0,%2) |
59 | " stwcx. %0,0,%2 \n\ |
60 | bne- 1b" |
61 | SMP_ISYNC |
62 | : "=&r" (t) |
63 | : "r" (a), "r" (&v->counter) |
64 | : "cc", "memory"); |
65 | |
66 | return t; |
67 | } |
68 | |
69 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
70 | |
71 | static __inline__ void atomic_sub(int a, atomic_t *v) |
72 | { |
73 | int t; |
74 | |
75 | __asm__ __volatile__( |
76 | "1: lwarx %0,0,%3 # atomic_sub\n\ |
77 | subf %0,%2,%0\n" |
78 | PPC405_ERR77(0,%3) |
79 | " stwcx. %0,0,%3 \n\ |
80 | bne- 1b" |
81 | : "=&r" (t), "=m" (v->counter) |
82 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
83 | : "cc"); |
84 | } |
85 | |
86 | static __inline__ int atomic_sub_return(int a, atomic_t *v) |
87 | { |
88 | int t; |
89 | |
90 | __asm__ __volatile__( |
91 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
92 | subf %0,%1,%0\n" |
93 | PPC405_ERR77(0,%2) |
94 | " stwcx. %0,0,%2 \n\ |
95 | bne- 1b" |
96 | SMP_ISYNC |
97 | : "=&r" (t) |
98 | : "r" (a), "r" (&v->counter) |
99 | : "cc", "memory"); |
100 | |
101 | return t; |
102 | } |
103 | |
104 | static __inline__ void atomic_inc(atomic_t *v) |
105 | { |
106 | int t; |
107 | |
108 | __asm__ __volatile__( |
109 | "1: lwarx %0,0,%2 # atomic_inc\n\ |
110 | addic %0,%0,1\n" |
111 | PPC405_ERR77(0,%2) |
112 | " stwcx. %0,0,%2 \n\ |
113 | bne- 1b" |
114 | : "=&r" (t), "=m" (v->counter) |
115 | : "r" (&v->counter), "m" (v->counter) |
116 | : "cc"); |
117 | } |
118 | |
119 | static __inline__ int atomic_inc_return(atomic_t *v) |
120 | { |
121 | int t; |
122 | |
123 | __asm__ __volatile__( |
124 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
125 | addic %0,%0,1\n" |
126 | PPC405_ERR77(0,%1) |
127 | " stwcx. %0,0,%1 \n\ |
128 | bne- 1b" |
129 | SMP_ISYNC |
130 | : "=&r" (t) |
131 | : "r" (&v->counter) |
132 | : "cc", "memory"); |
133 | |
134 | return t; |
135 | } |
136 | |
137 | /* |
138 | * atomic_inc_and_test - increment and test |
139 | * @v: pointer of type atomic_t |
140 | * |
141 | * Atomically increments @v by 1 |
142 | * and returns true if the result is zero, or false for all |
143 | * other cases. |
144 | */ |
145 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) |
146 | |
147 | static __inline__ void atomic_dec(atomic_t *v) |
148 | { |
149 | int t; |
150 | |
151 | __asm__ __volatile__( |
152 | "1: lwarx %0,0,%2 # atomic_dec\n\ |
153 | addic %0,%0,-1\n" |
154 | PPC405_ERR77(0,%2)\ |
155 | " stwcx. %0,0,%2\n\ |
156 | bne- 1b" |
157 | : "=&r" (t), "=m" (v->counter) |
158 | : "r" (&v->counter), "m" (v->counter) |
159 | : "cc"); |
160 | } |
161 | |
162 | static __inline__ int atomic_dec_return(atomic_t *v) |
163 | { |
164 | int t; |
165 | |
166 | __asm__ __volatile__( |
167 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
168 | addic %0,%0,-1\n" |
169 | PPC405_ERR77(0,%1) |
170 | " stwcx. %0,0,%1\n\ |
171 | bne- 1b" |
172 | SMP_ISYNC |
173 | : "=&r" (t) |
174 | : "r" (&v->counter) |
175 | : "cc", "memory"); |
176 | |
177 | return t; |
178 | } |
179 | |
180 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
181 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) |
182 | |
183 | /* |
184 | * Atomically test *v and decrement if it is greater than 0. |
185 | * The function returns the old value of *v minus 1. |
186 | */ |
187 | static __inline__ int atomic_dec_if_positive(atomic_t *v) |
188 | { |
189 | int t; |
190 | |
191 | __asm__ __volatile__( |
192 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
193 | addic. %0,%0,-1\n\ |
194 | blt- 2f\n" |
195 | PPC405_ERR77(0,%1) |
196 | " stwcx. %0,0,%1\n\ |
197 | bne- 1b" |
198 | SMP_ISYNC |
199 | "\n\ |
200 | 2:" : "=&r" (t) |
201 | : "r" (&v->counter) |
202 | : "cc", "memory"); |
203 | |
204 | return t; |
205 | } |
206 | |
207 | #define __MB __asm__ __volatile__ (SMP_SYNC : : : "memory") |
208 | #define smp_mb__before_atomic_dec() __MB |
209 | #define smp_mb__after_atomic_dec() __MB |
210 | #define smp_mb__before_atomic_inc() __MB |
211 | #define smp_mb__after_atomic_inc() __MB |
212 | |
213 | #endif /* __KERNEL__ */ |
214 | #endif /* _ASM_PPC_ATOMIC_H_ */ |