1 #ifndef _ASM_X86_ATOMIC_H
2 #define _ASM_X86_ATOMIC_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
11 * Atomic operations that C can't guarantee us. Useful for
12 * resource counting etc..
15 #define ATOMIC_INIT(i) { (i) }
18 * atomic_read - read atomic variable
19 * @v: pointer of type atomic_t
21 * Atomically reads the value of @v.
23 static inline int atomic_read(const atomic_t
*v
)
25 return (*(volatile int *)&(v
)->counter
);
29 * atomic_set - set atomic variable
30 * @v: pointer of type atomic_t
33 * Atomically sets the value of @v to @i.
35 static inline void atomic_set(atomic_t
*v
, int i
)
41 * atomic_add - add integer to atomic variable
42 * @i: integer value to add
43 * @v: pointer of type atomic_t
45 * Atomically adds @i to @v.
47 static inline void atomic_add(int i
, atomic_t
*v
)
49 asm volatile(LOCK_PREFIX
"addl %1,%0"
55 * atomic_sub - subtract integer from atomic variable
56 * @i: integer value to subtract
57 * @v: pointer of type atomic_t
59 * Atomically subtracts @i from @v.
61 static inline void atomic_sub(int i
, atomic_t
*v
)
63 asm volatile(LOCK_PREFIX
"subl %1,%0"
69 * atomic_sub_and_test - subtract value from variable and test result
70 * @i: integer value to subtract
71 * @v: pointer of type atomic_t
73 * Atomically subtracts @i from @v and returns
74 * true if the result is zero, or false for all
77 static inline int atomic_sub_and_test(int i
, atomic_t
*v
)
81 asm volatile(LOCK_PREFIX
"subl %2,%0; sete %1"
82 : "+m" (v
->counter
), "=qm" (c
)
83 : "ir" (i
) : "memory");
88 * atomic_inc - increment atomic variable
89 * @v: pointer of type atomic_t
91 * Atomically increments @v by 1.
93 static inline void atomic_inc(atomic_t
*v
)
95 asm volatile(LOCK_PREFIX
"incl %0"
100 * atomic_dec - decrement atomic variable
101 * @v: pointer of type atomic_t
103 * Atomically decrements @v by 1.
105 static inline void atomic_dec(atomic_t
*v
)
107 asm volatile(LOCK_PREFIX
"decl %0"
108 : "+m" (v
->counter
));
112 * atomic_dec_and_test - decrement and test
113 * @v: pointer of type atomic_t
115 * Atomically decrements @v by 1 and
116 * returns true if the result is 0, or false for all other
119 static inline int atomic_dec_and_test(atomic_t
*v
)
123 asm volatile(LOCK_PREFIX
"decl %0; sete %1"
124 : "+m" (v
->counter
), "=qm" (c
)
130 * atomic_inc_and_test - increment and test
131 * @v: pointer of type atomic_t
133 * Atomically increments @v by 1
134 * and returns true if the result is zero, or false for all
137 static inline int atomic_inc_and_test(atomic_t
*v
)
141 asm volatile(LOCK_PREFIX
"incl %0; sete %1"
142 : "+m" (v
->counter
), "=qm" (c
)
148 * atomic_add_negative - add and test if negative
149 * @i: integer value to add
150 * @v: pointer of type atomic_t
152 * Atomically adds @i to @v and returns true
153 * if the result is negative, or false when
154 * result is greater than or equal to zero.
156 static inline int atomic_add_negative(int i
, atomic_t
*v
)
160 asm volatile(LOCK_PREFIX
"addl %2,%0; sets %1"
161 : "+m" (v
->counter
), "=qm" (c
)
162 : "ir" (i
) : "memory");
167 * atomic_add_return - add integer and return
168 * @i: integer value to add
169 * @v: pointer of type atomic_t
171 * Atomically adds @i to @v and returns @i + @v
173 static inline int atomic_add_return(int i
, atomic_t
*v
)
178 if (unlikely(boot_cpu_data
.x86
<= 3))
181 /* Modern 486+ processor */
183 asm volatile(LOCK_PREFIX
"xaddl %0, %1"
184 : "+r" (i
), "+m" (v
->counter
)
189 no_xadd
: /* Legacy 386 processor */
190 raw_local_irq_save(flags
);
191 __i
= atomic_read(v
);
192 atomic_set(v
, i
+ __i
);
193 raw_local_irq_restore(flags
);
199 * atomic_sub_return - subtract integer and return
200 * @v: pointer of type atomic_t
201 * @i: integer value to subtract
203 * Atomically subtracts @i from @v and returns @v - @i
205 static inline int atomic_sub_return(int i
, atomic_t
*v
)
207 return atomic_add_return(-i
, v
);
210 #define atomic_inc_return(v) (atomic_add_return(1, v))
211 #define atomic_dec_return(v) (atomic_sub_return(1, v))
213 static inline int atomic_cmpxchg(atomic_t
*v
, int old
, int new)
215 return cmpxchg(&v
->counter
, old
, new);
218 static inline int atomic_xchg(atomic_t
*v
, int new)
220 return xchg(&v
->counter
, new);
224 * atomic_add_unless - add unless the number is already a given value
225 * @v: pointer of type atomic_t
226 * @a: the amount to add to v...
227 * @u: ...unless v is equal to u.
229 * Atomically adds @a to @v, so long as @v was not already @u.
230 * Returns non-zero if @v was not @u, and zero otherwise.
232 static inline int atomic_add_unless(atomic_t
*v
, int a
, int u
)
237 if (unlikely(c
== (u
)))
239 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
240 if (likely(old
== c
))
249 * atomic_dec_if_positive - decrement by 1 if old value positive
250 * @v: pointer of type atomic_t
252 * The function returns the old value of *v minus 1, even if
253 * the atomic variable, v, was not decremented.
255 static inline int atomic_dec_if_positive(atomic_t
*v
)
261 if (unlikely(dec
< 0))
263 old
= atomic_cmpxchg((v
), c
, dec
);
264 if (likely(old
== c
))
272 * atomic_inc_short - increment of a short integer
273 * @v: pointer to type int
275 * Atomically adds 1 to @v
276 * Returns the new value of @u
278 static inline short int atomic_inc_short(short int *v
)
280 asm(LOCK_PREFIX
"addw $1, %0" : "+m" (*v
));
286 * atomic_or_long - OR of two long integers
287 * @v1: pointer to type unsigned long
288 * @v2: pointer to type unsigned long
290 * Atomically ORs @v1 and @v2
291 * Returns the result of the OR
293 static inline void atomic_or_long(unsigned long *v1
, unsigned long v2
)
295 asm(LOCK_PREFIX
"orq %1, %0" : "+m" (*v1
) : "r" (v2
));
299 /* These are x86-specific, used by some header files */
300 #define atomic_clear_mask(mask, addr) \
301 asm volatile(LOCK_PREFIX "andl %0,%1" \
302 : : "r" (~(mask)), "m" (*(addr)) : "memory")
304 #define atomic_set_mask(mask, addr) \
305 asm volatile(LOCK_PREFIX "orl %0,%1" \
306 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
309 /* Atomic operations are already serializing on x86 */
310 #define smp_mb__before_atomic_dec() barrier()
311 #define smp_mb__after_atomic_dec() barrier()
312 #define smp_mb__before_atomic_inc() barrier()
313 #define smp_mb__after_atomic_inc() barrier()
316 # include "atomic64_32.h"
318 # include "atomic64_64.h"
321 #include <asm-generic/atomic-long.h>
322 #endif /* _ASM_X86_ATOMIC_H */