0bc64be5 |
1 | /* |
2 | * PowerPC64 atomic operations |
3 | * |
4 | * Copyright (C) 2001 Paul Mackerras <paulus@au.ibm.com>, IBM |
5 | * Copyright (C) 2001 Anton Blanchard <anton@au.ibm.com>, IBM |
6 | * |
7 | * This program is free software; you can redistribute it and/or |
8 | * modify it under the terms of the GNU General Public License |
9 | * as published by the Free Software Foundation; either version |
10 | * 2 of the License, or (at your option) any later version. |
11 | */ |
12 | |
13 | #ifndef _ASM_PPC64_ATOMIC_H_ |
14 | #define _ASM_PPC64_ATOMIC_H_ |
15 | |
16 | #include <asm/memory.h> |
17 | |
895ad115 |
18 | #ifdef __cplusplus |
19 | extern "C" { |
20 | #endif |
21 | |
0bc64be5 |
22 | typedef struct { volatile int counter; } atomic_t; |
23 | |
24 | #define ATOMIC_INIT(i) { (i) } |
25 | |
26 | #define atomic_read(v) ((v)->counter) |
27 | #define atomic_set(v,i) (((v)->counter) = (i)) |
28 | |
29 | static __inline__ void atomic_add(int a, atomic_t *v) |
30 | { |
31 | int t; |
32 | |
33 | __asm__ __volatile__( |
34 | "1: lwarx %0,0,%3 # atomic_add\n\ |
35 | add %0,%2,%0\n\ |
36 | stwcx. %0,0,%3\n\ |
37 | bne- 1b" |
38 | : "=&r" (t), "=m" (v->counter) |
39 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
40 | : "cc"); |
41 | } |
42 | |
43 | static __inline__ int atomic_add_return(int a, atomic_t *v) |
44 | { |
45 | int t; |
46 | |
47 | __asm__ __volatile__( |
48 | EIEIO_ON_SMP |
49 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
50 | add %0,%1,%0\n\ |
51 | stwcx. %0,0,%2\n\ |
52 | bne- 1b" |
53 | ISYNC_ON_SMP |
54 | : "=&r" (t) |
55 | : "r" (a), "r" (&v->counter) |
56 | : "cc", "memory"); |
57 | |
58 | return t; |
59 | } |
60 | |
61 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
62 | |
63 | static __inline__ void atomic_sub(int a, atomic_t *v) |
64 | { |
65 | int t; |
66 | |
67 | __asm__ __volatile__( |
68 | "1: lwarx %0,0,%3 # atomic_sub\n\ |
69 | subf %0,%2,%0\n\ |
70 | stwcx. %0,0,%3\n\ |
71 | bne- 1b" |
72 | : "=&r" (t), "=m" (v->counter) |
73 | : "r" (a), "r" (&v->counter), "m" (v->counter) |
74 | : "cc"); |
75 | } |
76 | |
77 | static __inline__ int atomic_sub_return(int a, atomic_t *v) |
78 | { |
79 | int t; |
80 | |
81 | __asm__ __volatile__( |
82 | EIEIO_ON_SMP |
83 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
84 | subf %0,%1,%0\n\ |
85 | stwcx. %0,0,%2\n\ |
86 | bne- 1b" |
87 | ISYNC_ON_SMP |
88 | : "=&r" (t) |
89 | : "r" (a), "r" (&v->counter) |
90 | : "cc", "memory"); |
91 | |
92 | return t; |
93 | } |
94 | |
95 | static __inline__ void atomic_inc(atomic_t *v) |
96 | { |
97 | int t; |
98 | |
99 | __asm__ __volatile__( |
100 | "1: lwarx %0,0,%2 # atomic_inc\n\ |
101 | addic %0,%0,1\n\ |
102 | stwcx. %0,0,%2\n\ |
103 | bne- 1b" |
104 | : "=&r" (t), "=m" (v->counter) |
105 | : "r" (&v->counter), "m" (v->counter) |
106 | : "cc"); |
107 | } |
108 | |
109 | static __inline__ int atomic_inc_return(atomic_t *v) |
110 | { |
111 | int t; |
112 | |
113 | __asm__ __volatile__( |
114 | EIEIO_ON_SMP |
115 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
116 | addic %0,%0,1\n\ |
117 | stwcx. %0,0,%1\n\ |
118 | bne- 1b" |
119 | ISYNC_ON_SMP |
120 | : "=&r" (t) |
121 | : "r" (&v->counter) |
122 | : "cc", "memory"); |
123 | |
124 | return t; |
125 | } |
126 | |
127 | /* |
128 | * atomic_inc_and_test - increment and test |
129 | * @v: pointer of type atomic_t |
130 | * |
131 | * Atomically increments @v by 1 |
132 | * and returns true if the result is zero, or false for all |
133 | * other cases. |
134 | */ |
135 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) |
136 | |
137 | static __inline__ void atomic_dec(atomic_t *v) |
138 | { |
139 | int t; |
140 | |
141 | __asm__ __volatile__( |
142 | "1: lwarx %0,0,%2 # atomic_dec\n\ |
143 | addic %0,%0,-1\n\ |
144 | stwcx. %0,0,%2\n\ |
145 | bne- 1b" |
146 | : "=&r" (t), "=m" (v->counter) |
147 | : "r" (&v->counter), "m" (v->counter) |
148 | : "cc"); |
149 | } |
150 | |
151 | static __inline__ int atomic_dec_return(atomic_t *v) |
152 | { |
153 | int t; |
154 | |
155 | __asm__ __volatile__( |
156 | EIEIO_ON_SMP |
157 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
158 | addic %0,%0,-1\n\ |
159 | stwcx. %0,0,%1\n\ |
160 | bne- 1b" |
161 | ISYNC_ON_SMP |
162 | : "=&r" (t) |
163 | : "r" (&v->counter) |
164 | : "cc", "memory"); |
165 | |
166 | return t; |
167 | } |
168 | |
169 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
170 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) |
171 | |
172 | /* |
173 | * Atomically test *v and decrement if it is greater than 0. |
174 | * The function returns the old value of *v minus 1. |
175 | */ |
176 | static __inline__ int atomic_dec_if_positive(atomic_t *v) |
177 | { |
178 | int t; |
179 | |
180 | __asm__ __volatile__( |
181 | EIEIO_ON_SMP |
182 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
183 | addic. %0,%0,-1\n\ |
184 | blt- 2f\n\ |
185 | stwcx. %0,0,%1\n\ |
186 | bne- 1b" |
187 | ISYNC_ON_SMP |
188 | "\n\ |
189 | 2:" : "=&r" (t) |
190 | : "r" (&v->counter) |
191 | : "cc", "memory"); |
192 | |
193 | return t; |
194 | } |
195 | |
196 | #define smp_mb__before_atomic_dec() smp_mb() |
197 | #define smp_mb__after_atomic_dec() smp_mb() |
198 | #define smp_mb__before_atomic_inc() smp_mb() |
199 | #define smp_mb__after_atomic_inc() smp_mb() |
200 | |
895ad115 |
201 | #ifdef __cplusplus |
202 | } /* end of extern "C" */ |
203 | #endif |
204 | |
0bc64be5 |
205 | #endif /* _ASM_PPC64_ATOMIC_H_ */ |