0bc64be5 |
1 | /* |
2 | * PowerPC atomic operations |
3 | */ |
4 | |
5 | #ifndef _ASM_PPC_ATOMIC_H_ |
6 | #define _ASM_PPC_ATOMIC_H_ |
7 | |
8 | typedef struct { volatile int counter; } atomic_t; |
9 | |
0bc64be5 |
10 | #define ATOMIC_INIT(i) { (i) } |
11 | |
12 | #define atomic_read(v) ((v)->counter) |
13 | #define atomic_set(v,i) (((v)->counter) = (i)) |
14 | |
15 | extern void atomic_clear_mask(unsigned long mask, unsigned long *addr); |
16 | |
60009e26 |
17 | #if 0 // We only do operation on one CPU at a time (LTT) |
0bc64be5 |
18 | #define SMP_SYNC "sync" |
19 | #define SMP_ISYNC "\n\tisync" |
20 | #else |
21 | #define SMP_SYNC "" |
22 | #define SMP_ISYNC |
23 | #endif |
24 | |
25 | /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx. |
26 | * The old ATOMIC_SYNC_FIX covered some but not all of this. |
27 | */ |
28 | #ifdef CONFIG_IBM405_ERR77 |
29 | #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";" |
30 | #else |
31 | #define PPC405_ERR77(ra,rb) |
32 | #endif |
33 | |
34 | static __inline__ void atomic_add(int a, atomic_t *v) |
35 | { |
36 | int t; |
37 | |
38 | __asm__ __volatile__( |
39 | "1: lwarx %0,0,%3 # atomic_add\n\ |
40 | add %0,%2,%0\n" |
41 | PPC405_ERR77(0,%3) |
42 | " stwcx. %0,0,%3 \n\ |
43 | bne- 1b" |
44 | : "=&r" (t), "=m" (v->counter) |
45 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
46 | : "cc"); |
47 | } |
48 | |
49 | static __inline__ int atomic_add_return(int a, atomic_t *v) |
50 | { |
51 | int t; |
52 | |
53 | __asm__ __volatile__( |
54 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
55 | add %0,%1,%0\n" |
56 | PPC405_ERR77(0,%2) |
57 | " stwcx. %0,0,%2 \n\ |
58 | bne- 1b" |
59 | SMP_ISYNC |
60 | : "=&r" (t) |
61 | : "r" (a), "r" (&v->counter) |
62 | : "cc", "memory"); |
63 | |
64 | return t; |
65 | } |
66 | |
67 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
68 | |
69 | static __inline__ void atomic_sub(int a, atomic_t *v) |
70 | { |
71 | int t; |
72 | |
73 | __asm__ __volatile__( |
74 | "1: lwarx %0,0,%3 # atomic_sub\n\ |
75 | subf %0,%2,%0\n" |
76 | PPC405_ERR77(0,%3) |
77 | " stwcx. %0,0,%3 \n\ |
78 | bne- 1b" |
79 | : "=&r" (t), "=m" (v->counter) |
80 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
81 | : "cc"); |
82 | } |
83 | |
84 | static __inline__ int atomic_sub_return(int a, atomic_t *v) |
85 | { |
86 | int t; |
87 | |
88 | __asm__ __volatile__( |
89 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
90 | subf %0,%1,%0\n" |
91 | PPC405_ERR77(0,%2) |
92 | " stwcx. %0,0,%2 \n\ |
93 | bne- 1b" |
94 | SMP_ISYNC |
95 | : "=&r" (t) |
96 | : "r" (a), "r" (&v->counter) |
97 | : "cc", "memory"); |
98 | |
99 | return t; |
100 | } |
101 | |
102 | static __inline__ void atomic_inc(atomic_t *v) |
103 | { |
104 | int t; |
105 | |
106 | __asm__ __volatile__( |
107 | "1: lwarx %0,0,%2 # atomic_inc\n\ |
108 | addic %0,%0,1\n" |
109 | PPC405_ERR77(0,%2) |
110 | " stwcx. %0,0,%2 \n\ |
111 | bne- 1b" |
112 | : "=&r" (t), "=m" (v->counter) |
113 | : "r" (&v->counter), "m" (v->counter) |
114 | : "cc"); |
115 | } |
116 | |
117 | static __inline__ int atomic_inc_return(atomic_t *v) |
118 | { |
119 | int t; |
120 | |
121 | __asm__ __volatile__( |
122 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
123 | addic %0,%0,1\n" |
124 | PPC405_ERR77(0,%1) |
125 | " stwcx. %0,0,%1 \n\ |
126 | bne- 1b" |
127 | SMP_ISYNC |
128 | : "=&r" (t) |
129 | : "r" (&v->counter) |
130 | : "cc", "memory"); |
131 | |
132 | return t; |
133 | } |
134 | |
135 | /* |
136 | * atomic_inc_and_test - increment and test |
137 | * @v: pointer of type atomic_t |
138 | * |
139 | * Atomically increments @v by 1 |
140 | * and returns true if the result is zero, or false for all |
141 | * other cases. |
142 | */ |
143 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) |
144 | |
145 | static __inline__ void atomic_dec(atomic_t *v) |
146 | { |
147 | int t; |
148 | |
149 | __asm__ __volatile__( |
150 | "1: lwarx %0,0,%2 # atomic_dec\n\ |
151 | addic %0,%0,-1\n" |
152 | PPC405_ERR77(0,%2)\ |
153 | " stwcx. %0,0,%2\n\ |
154 | bne- 1b" |
155 | : "=&r" (t), "=m" (v->counter) |
156 | : "r" (&v->counter), "m" (v->counter) |
157 | : "cc"); |
158 | } |
159 | |
160 | static __inline__ int atomic_dec_return(atomic_t *v) |
161 | { |
162 | int t; |
163 | |
164 | __asm__ __volatile__( |
165 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
166 | addic %0,%0,-1\n" |
167 | PPC405_ERR77(0,%1) |
168 | " stwcx. %0,0,%1\n\ |
169 | bne- 1b" |
170 | SMP_ISYNC |
171 | : "=&r" (t) |
172 | : "r" (&v->counter) |
173 | : "cc", "memory"); |
174 | |
175 | return t; |
176 | } |
177 | |
178 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
179 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) |
180 | |
181 | /* |
182 | * Atomically test *v and decrement if it is greater than 0. |
183 | * The function returns the old value of *v minus 1. |
184 | */ |
185 | static __inline__ int atomic_dec_if_positive(atomic_t *v) |
186 | { |
187 | int t; |
188 | |
189 | __asm__ __volatile__( |
190 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
191 | addic. %0,%0,-1\n\ |
192 | blt- 2f\n" |
193 | PPC405_ERR77(0,%1) |
194 | " stwcx. %0,0,%1\n\ |
195 | bne- 1b" |
196 | SMP_ISYNC |
197 | "\n\ |
198 | 2:" : "=&r" (t) |
199 | : "r" (&v->counter) |
200 | : "cc", "memory"); |
201 | |
202 | return t; |
203 | } |
204 | |
205 | #define __MB __asm__ __volatile__ (SMP_SYNC : : : "memory") |
206 | #define smp_mb__before_atomic_dec() __MB |
207 | #define smp_mb__after_atomic_dec() __MB |
208 | #define smp_mb__before_atomic_inc() __MB |
209 | #define smp_mb__after_atomic_inc() __MB |
210 | |
0bc64be5 |
211 | #endif /* _ASM_PPC_ATOMIC_H_ */ |