Commit | Line | Data |
---|---|---|
feaf7cf1 BB |
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ |
2 | #define _ASM_POWERPC_ATOMIC_H_ | |
3 | ||
1da177e4 LT |
4 | /* |
5 | * PowerPC atomic operations | |
6 | */ | |
7 | ||
1da177e4 | 8 | #ifdef __KERNEL__ |
ae3a197e DH |
9 | #include <linux/types.h> |
10 | #include <asm/cmpxchg.h> | |
1da177e4 | 11 | |
feaf7cf1 | 12 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 | 13 | |
9f0cbea0 SB |
14 | static __inline__ int atomic_read(const atomic_t *v) |
15 | { | |
16 | int t; | |
17 | ||
18 | __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
19 | ||
20 | return t; | |
21 | } | |
22 | ||
23 | static __inline__ void atomic_set(atomic_t *v, int i) | |
24 | { | |
25 | __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
26 | } | |
1da177e4 | 27 | |
1da177e4 LT |
28 | static __inline__ void atomic_add(int a, atomic_t *v) |
29 | { | |
30 | int t; | |
31 | ||
32 | __asm__ __volatile__( | |
33 | "1: lwarx %0,0,%3 # atomic_add\n\ | |
34 | add %0,%2,%0\n" | |
35 | PPC405_ERR77(0,%3) | |
36 | " stwcx. %0,0,%3 \n\ | |
37 | bne- 1b" | |
e2a3d402 LT |
38 | : "=&r" (t), "+m" (v->counter) |
39 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
40 | : "cc"); |
41 | } | |
42 | ||
43 | static __inline__ int atomic_add_return(int a, atomic_t *v) | |
44 | { | |
45 | int t; | |
46 | ||
47 | __asm__ __volatile__( | |
b97021f8 | 48 | PPC_ATOMIC_ENTRY_BARRIER |
1da177e4 LT |
49 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
50 | add %0,%1,%0\n" | |
51 | PPC405_ERR77(0,%2) | |
52 | " stwcx. %0,0,%2 \n\ | |
53 | bne- 1b" | |
b97021f8 | 54 | PPC_ATOMIC_EXIT_BARRIER |
1da177e4 LT |
55 | : "=&r" (t) |
56 | : "r" (a), "r" (&v->counter) | |
57 | : "cc", "memory"); | |
58 | ||
59 | return t; | |
60 | } | |
61 | ||
62 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | |
63 | ||
64 | static __inline__ void atomic_sub(int a, atomic_t *v) | |
65 | { | |
66 | int t; | |
67 | ||
68 | __asm__ __volatile__( | |
69 | "1: lwarx %0,0,%3 # atomic_sub\n\ | |
70 | subf %0,%2,%0\n" | |
71 | PPC405_ERR77(0,%3) | |
72 | " stwcx. %0,0,%3 \n\ | |
73 | bne- 1b" | |
e2a3d402 LT |
74 | : "=&r" (t), "+m" (v->counter) |
75 | : "r" (a), "r" (&v->counter) | |
1da177e4 LT |
76 | : "cc"); |
77 | } | |
78 | ||
79 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | |
80 | { | |
81 | int t; | |
82 | ||
83 | __asm__ __volatile__( | |
b97021f8 | 84 | PPC_ATOMIC_ENTRY_BARRIER |
1da177e4 LT |
85 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
86 | subf %0,%1,%0\n" | |
87 | PPC405_ERR77(0,%2) | |
88 | " stwcx. %0,0,%2 \n\ | |
89 | bne- 1b" | |
b97021f8 | 90 | PPC_ATOMIC_EXIT_BARRIER |
1da177e4 LT |
91 | : "=&r" (t) |
92 | : "r" (a), "r" (&v->counter) | |
93 | : "cc", "memory"); | |
94 | ||
95 | return t; | |
96 | } | |
97 | ||
98 | static __inline__ void atomic_inc(atomic_t *v) | |
99 | { | |
100 | int t; | |
101 | ||
102 | __asm__ __volatile__( | |
103 | "1: lwarx %0,0,%2 # atomic_inc\n\ | |
104 | addic %0,%0,1\n" | |
105 | PPC405_ERR77(0,%2) | |
106 | " stwcx. %0,0,%2 \n\ | |
107 | bne- 1b" | |
e2a3d402 LT |
108 | : "=&r" (t), "+m" (v->counter) |
109 | : "r" (&v->counter) | |
efc3624c | 110 | : "cc", "xer"); |
1da177e4 LT |
111 | } |
112 | ||
113 | static __inline__ int atomic_inc_return(atomic_t *v) | |
114 | { | |
115 | int t; | |
116 | ||
117 | __asm__ __volatile__( | |
b97021f8 | 118 | PPC_ATOMIC_ENTRY_BARRIER |
1da177e4 LT |
119 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
120 | addic %0,%0,1\n" | |
121 | PPC405_ERR77(0,%1) | |
122 | " stwcx. %0,0,%1 \n\ | |
123 | bne- 1b" | |
b97021f8 | 124 | PPC_ATOMIC_EXIT_BARRIER |
1da177e4 LT |
125 | : "=&r" (t) |
126 | : "r" (&v->counter) | |
efc3624c | 127 | : "cc", "xer", "memory"); |
1da177e4 LT |
128 | |
129 | return t; | |
130 | } | |
131 | ||
132 | /* | |
133 | * atomic_inc_and_test - increment and test | |
134 | * @v: pointer of type atomic_t | |
135 | * | |
136 | * Atomically increments @v by 1 | |
137 | * and returns true if the result is zero, or false for all | |
138 | * other cases. | |
139 | */ | |
140 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
141 | ||
142 | static __inline__ void atomic_dec(atomic_t *v) | |
143 | { | |
144 | int t; | |
145 | ||
146 | __asm__ __volatile__( | |
147 | "1: lwarx %0,0,%2 # atomic_dec\n\ | |
148 | addic %0,%0,-1\n" | |
149 | PPC405_ERR77(0,%2)\ | |
150 | " stwcx. %0,0,%2\n\ | |
151 | bne- 1b" | |
e2a3d402 LT |
152 | : "=&r" (t), "+m" (v->counter) |
153 | : "r" (&v->counter) | |
efc3624c | 154 | : "cc", "xer"); |
1da177e4 LT |
155 | } |
156 | ||
157 | static __inline__ int atomic_dec_return(atomic_t *v) | |
158 | { | |
159 | int t; | |
160 | ||
161 | __asm__ __volatile__( | |
b97021f8 | 162 | PPC_ATOMIC_ENTRY_BARRIER |
1da177e4 LT |
163 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
164 | addic %0,%0,-1\n" | |
165 | PPC405_ERR77(0,%1) | |
166 | " stwcx. %0,0,%1\n\ | |
167 | bne- 1b" | |
b97021f8 | 168 | PPC_ATOMIC_EXIT_BARRIER |
1da177e4 LT |
169 | : "=&r" (t) |
170 | : "r" (&v->counter) | |
efc3624c | 171 | : "cc", "xer", "memory"); |
1da177e4 LT |
172 | |
173 | return t; | |
174 | } | |
175 | ||
f46e477e | 176 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
ffbf670f | 177 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
4a6dae6d | 178 | |
8426e1f6 | 179 | /** |
f24219b4 | 180 | * __atomic_add_unless - add unless the number is a given value |
8426e1f6 NP |
181 | * @v: pointer of type atomic_t |
182 | * @a: the amount to add to v... | |
183 | * @u: ...unless v is equal to u. | |
184 | * | |
185 | * Atomically adds @a to @v, so long as it was not @u. | |
f24219b4 | 186 | * Returns the old value of @v. |
8426e1f6 | 187 | */ |
f24219b4 | 188 | static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) |
f055affb NP |
189 | { |
190 | int t; | |
191 | ||
192 | __asm__ __volatile__ ( | |
b97021f8 | 193 | PPC_ATOMIC_ENTRY_BARRIER |
f24219b4 | 194 | "1: lwarx %0,0,%1 # __atomic_add_unless\n\ |
f055affb NP |
195 | cmpw 0,%0,%3 \n\ |
196 | beq- 2f \n\ | |
197 | add %0,%2,%0 \n" | |
198 | PPC405_ERR77(0,%2) | |
199 | " stwcx. %0,0,%1 \n\ | |
200 | bne- 1b \n" | |
b97021f8 | 201 | PPC_ATOMIC_EXIT_BARRIER |
f055affb NP |
202 | " subf %0,%2,%0 \n\ |
203 | 2:" | |
204 | : "=&r" (t) | |
205 | : "r" (&v->counter), "r" (a), "r" (u) | |
206 | : "cc", "memory"); | |
207 | ||
f24219b4 | 208 | return t; |
f055affb NP |
209 | } |
210 | ||
a6cf7ed5 AB |
211 | /** |
212 | * atomic_inc_not_zero - increment unless the number is zero | |
213 | * @v: pointer of type atomic_t | |
214 | * | |
215 | * Atomically increments @v by 1, so long as @v is non-zero. | |
216 | * Returns non-zero if @v was non-zero, and zero otherwise. | |
217 | */ | |
218 | static __inline__ int atomic_inc_not_zero(atomic_t *v) | |
219 | { | |
220 | int t1, t2; | |
221 | ||
222 | __asm__ __volatile__ ( | |
223 | PPC_ATOMIC_ENTRY_BARRIER | |
224 | "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\ | |
225 | cmpwi 0,%0,0\n\ | |
226 | beq- 2f\n\ | |
227 | addic %1,%0,1\n" | |
228 | PPC405_ERR77(0,%2) | |
229 | " stwcx. %1,0,%2\n\ | |
230 | bne- 1b\n" | |
231 | PPC_ATOMIC_EXIT_BARRIER | |
232 | "\n\ | |
233 | 2:" | |
234 | : "=&r" (t1), "=&r" (t2) | |
235 | : "r" (&v->counter) | |
236 | : "cc", "xer", "memory"); | |
237 | ||
238 | return t1; | |
239 | } | |
240 | #define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) | |
8426e1f6 | 241 | |
1da177e4 LT |
242 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
243 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | |
244 | ||
245 | /* | |
246 | * Atomically test *v and decrement if it is greater than 0. | |
434f98c4 RJ |
247 | * The function returns the old value of *v minus 1, even if |
248 | * the atomic variable, v, was not decremented. | |
1da177e4 LT |
249 | */ |
250 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | |
251 | { | |
252 | int t; | |
253 | ||
254 | __asm__ __volatile__( | |
b97021f8 | 255 | PPC_ATOMIC_ENTRY_BARRIER |
1da177e4 | 256 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
434f98c4 RJ |
257 | cmpwi %0,1\n\ |
258 | addi %0,%0,-1\n\ | |
1da177e4 LT |
259 | blt- 2f\n" |
260 | PPC405_ERR77(0,%1) | |
261 | " stwcx. %0,0,%1\n\ | |
262 | bne- 1b" | |
b97021f8 | 263 | PPC_ATOMIC_EXIT_BARRIER |
1da177e4 | 264 | "\n\ |
434f98c4 | 265 | 2:" : "=&b" (t) |
1da177e4 LT |
266 | : "r" (&v->counter) |
267 | : "cc", "memory"); | |
268 | ||
269 | return t; | |
270 | } | |
e79bee24 | 271 | #define atomic_dec_if_positive atomic_dec_if_positive |
1da177e4 | 272 | |
feaf7cf1 BB |
273 | #define smp_mb__before_atomic_dec() smp_mb() |
274 | #define smp_mb__after_atomic_dec() smp_mb() | |
275 | #define smp_mb__before_atomic_inc() smp_mb() | |
276 | #define smp_mb__after_atomic_inc() smp_mb() | |
1da177e4 | 277 | |
06a98dba SR |
278 | #ifdef __powerpc64__ |
279 | ||
06a98dba SR |
280 | #define ATOMIC64_INIT(i) { (i) } |
281 | ||
9f0cbea0 SB |
282 | static __inline__ long atomic64_read(const atomic64_t *v) |
283 | { | |
284 | long t; | |
285 | ||
286 | __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | |
287 | ||
288 | return t; | |
289 | } | |
290 | ||
291 | static __inline__ void atomic64_set(atomic64_t *v, long i) | |
292 | { | |
293 | __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | |
294 | } | |
06a98dba SR |
295 | |
296 | static __inline__ void atomic64_add(long a, atomic64_t *v) | |
297 | { | |
298 | long t; | |
299 | ||
300 | __asm__ __volatile__( | |
301 | "1: ldarx %0,0,%3 # atomic64_add\n\ | |
302 | add %0,%2,%0\n\ | |
303 | stdcx. %0,0,%3 \n\ | |
304 | bne- 1b" | |
e2a3d402 LT |
305 | : "=&r" (t), "+m" (v->counter) |
306 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
307 | : "cc"); |
308 | } | |
309 | ||
310 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | |
311 | { | |
312 | long t; | |
313 | ||
314 | __asm__ __volatile__( | |
b97021f8 | 315 | PPC_ATOMIC_ENTRY_BARRIER |
06a98dba SR |
316 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ |
317 | add %0,%1,%0\n\ | |
318 | stdcx. %0,0,%2 \n\ | |
319 | bne- 1b" | |
b97021f8 | 320 | PPC_ATOMIC_EXIT_BARRIER |
06a98dba SR |
321 | : "=&r" (t) |
322 | : "r" (a), "r" (&v->counter) | |
323 | : "cc", "memory"); | |
324 | ||
325 | return t; | |
326 | } | |
327 | ||
328 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | |
329 | ||
330 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | |
331 | { | |
332 | long t; | |
333 | ||
334 | __asm__ __volatile__( | |
335 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | |
336 | subf %0,%2,%0\n\ | |
337 | stdcx. %0,0,%3 \n\ | |
338 | bne- 1b" | |
e2a3d402 LT |
339 | : "=&r" (t), "+m" (v->counter) |
340 | : "r" (a), "r" (&v->counter) | |
06a98dba SR |
341 | : "cc"); |
342 | } | |
343 | ||
344 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | |
345 | { | |
346 | long t; | |
347 | ||
348 | __asm__ __volatile__( | |
b97021f8 | 349 | PPC_ATOMIC_ENTRY_BARRIER |
06a98dba SR |
350 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ |
351 | subf %0,%1,%0\n\ | |
352 | stdcx. %0,0,%2 \n\ | |
353 | bne- 1b" | |
b97021f8 | 354 | PPC_ATOMIC_EXIT_BARRIER |
06a98dba SR |
355 | : "=&r" (t) |
356 | : "r" (a), "r" (&v->counter) | |
357 | : "cc", "memory"); | |
358 | ||
359 | return t; | |
360 | } | |
361 | ||
362 | static __inline__ void atomic64_inc(atomic64_t *v) | |
363 | { | |
364 | long t; | |
365 | ||
366 | __asm__ __volatile__( | |
367 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | |
368 | addic %0,%0,1\n\ | |
369 | stdcx. %0,0,%2 \n\ | |
370 | bne- 1b" | |
e2a3d402 LT |
371 | : "=&r" (t), "+m" (v->counter) |
372 | : "r" (&v->counter) | |
efc3624c | 373 | : "cc", "xer"); |
06a98dba SR |
374 | } |
375 | ||
376 | static __inline__ long atomic64_inc_return(atomic64_t *v) | |
377 | { | |
378 | long t; | |
379 | ||
380 | __asm__ __volatile__( | |
b97021f8 | 381 | PPC_ATOMIC_ENTRY_BARRIER |
06a98dba SR |
382 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ |
383 | addic %0,%0,1\n\ | |
384 | stdcx. %0,0,%1 \n\ | |
385 | bne- 1b" | |
b97021f8 | 386 | PPC_ATOMIC_EXIT_BARRIER |
06a98dba SR |
387 | : "=&r" (t) |
388 | : "r" (&v->counter) | |
efc3624c | 389 | : "cc", "xer", "memory"); |
06a98dba SR |
390 | |
391 | return t; | |
392 | } | |
393 | ||
394 | /* | |
395 | * atomic64_inc_and_test - increment and test | |
396 | * @v: pointer of type atomic64_t | |
397 | * | |
398 | * Atomically increments @v by 1 | |
399 | * and returns true if the result is zero, or false for all | |
400 | * other cases. | |
401 | */ | |
402 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
403 | ||
404 | static __inline__ void atomic64_dec(atomic64_t *v) | |
405 | { | |
406 | long t; | |
407 | ||
408 | __asm__ __volatile__( | |
409 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | |
410 | addic %0,%0,-1\n\ | |
411 | stdcx. %0,0,%2\n\ | |
412 | bne- 1b" | |
e2a3d402 LT |
413 | : "=&r" (t), "+m" (v->counter) |
414 | : "r" (&v->counter) | |
efc3624c | 415 | : "cc", "xer"); |
06a98dba SR |
416 | } |
417 | ||
418 | static __inline__ long atomic64_dec_return(atomic64_t *v) | |
419 | { | |
420 | long t; | |
421 | ||
422 | __asm__ __volatile__( | |
b97021f8 | 423 | PPC_ATOMIC_ENTRY_BARRIER |
06a98dba SR |
424 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ |
425 | addic %0,%0,-1\n\ | |
426 | stdcx. %0,0,%1\n\ | |
427 | bne- 1b" | |
b97021f8 | 428 | PPC_ATOMIC_EXIT_BARRIER |
06a98dba SR |
429 | : "=&r" (t) |
430 | : "r" (&v->counter) | |
efc3624c | 431 | : "cc", "xer", "memory"); |
06a98dba SR |
432 | |
433 | return t; | |
434 | } | |
435 | ||
436 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | |
437 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | |
438 | ||
439 | /* | |
440 | * Atomically test *v and decrement if it is greater than 0. | |
441 | * The function returns the old value of *v minus 1. | |
442 | */ | |
443 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |
444 | { | |
445 | long t; | |
446 | ||
447 | __asm__ __volatile__( | |
b97021f8 | 448 | PPC_ATOMIC_ENTRY_BARRIER |
06a98dba SR |
449 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ |
450 | addic. %0,%0,-1\n\ | |
451 | blt- 2f\n\ | |
452 | stdcx. %0,0,%1\n\ | |
453 | bne- 1b" | |
b97021f8 | 454 | PPC_ATOMIC_EXIT_BARRIER |
06a98dba SR |
455 | "\n\ |
456 | 2:" : "=&r" (t) | |
457 | : "r" (&v->counter) | |
efc3624c | 458 | : "cc", "xer", "memory"); |
06a98dba SR |
459 | |
460 | return t; | |
461 | } | |
462 | ||
f46e477e | 463 | #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
41806ef4 MD |
464 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
465 | ||
466 | /** | |
467 | * atomic64_add_unless - add unless the number is a given value | |
468 | * @v: pointer of type atomic64_t | |
469 | * @a: the amount to add to v... | |
470 | * @u: ...unless v is equal to u. | |
471 | * | |
472 | * Atomically adds @a to @v, so long as it was not @u. | |
f24219b4 | 473 | * Returns the old value of @v. |
41806ef4 MD |
474 | */ |
475 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | |
476 | { | |
477 | long t; | |
478 | ||
479 | __asm__ __volatile__ ( | |
b97021f8 | 480 | PPC_ATOMIC_ENTRY_BARRIER |
f24219b4 | 481 | "1: ldarx %0,0,%1 # __atomic_add_unless\n\ |
41806ef4 MD |
482 | cmpd 0,%0,%3 \n\ |
483 | beq- 2f \n\ | |
484 | add %0,%2,%0 \n" | |
485 | " stdcx. %0,0,%1 \n\ | |
486 | bne- 1b \n" | |
b97021f8 | 487 | PPC_ATOMIC_EXIT_BARRIER |
41806ef4 MD |
488 | " subf %0,%2,%0 \n\ |
489 | 2:" | |
490 | : "=&r" (t) | |
491 | : "r" (&v->counter), "r" (a), "r" (u) | |
492 | : "cc", "memory"); | |
493 | ||
494 | return t != u; | |
495 | } | |
496 | ||
a6cf7ed5 AB |
497 | /** |
498 | * atomic_inc64_not_zero - increment unless the number is zero | |
499 | * @v: pointer of type atomic64_t | |
500 | * | |
501 | * Atomically increments @v by 1, so long as @v is non-zero. | |
502 | * Returns non-zero if @v was non-zero, and zero otherwise. | |
503 | */ | |
8e38d086 | 504 | static __inline__ int atomic64_inc_not_zero(atomic64_t *v) |
a6cf7ed5 AB |
505 | { |
506 | long t1, t2; | |
507 | ||
508 | __asm__ __volatile__ ( | |
509 | PPC_ATOMIC_ENTRY_BARRIER | |
510 | "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\ | |
511 | cmpdi 0,%0,0\n\ | |
512 | beq- 2f\n\ | |
513 | addic %1,%0,1\n\ | |
514 | stdcx. %1,0,%2\n\ | |
515 | bne- 1b\n" | |
516 | PPC_ATOMIC_EXIT_BARRIER | |
517 | "\n\ | |
518 | 2:" | |
519 | : "=&r" (t1), "=&r" (t2) | |
520 | : "r" (&v->counter) | |
521 | : "cc", "xer", "memory"); | |
522 | ||
8e38d086 | 523 | return t1 != 0; |
a6cf7ed5 | 524 | } |
41806ef4 | 525 | |
06a98dba SR |
526 | #endif /* __powerpc64__ */ |
527 | ||
1da177e4 | 528 | #endif /* __KERNEL__ */ |
feaf7cf1 | 529 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |