atomic_32.h 10.8 KB
Newer Older
H
H. Peter Anvin 已提交
1 2
#ifndef _ASM_X86_ATOMIC_32_H
#define _ASM_X86_ATOMIC_32_H
L
Linus Torvalds 已提交
3 4

#include <linux/compiler.h>
5
#include <linux/types.h>
L
Linus Torvalds 已提交
6
#include <asm/processor.h>
J
Jeff Dike 已提交
7
#include <asm/cmpxchg.h>
L
Linus Torvalds 已提交
8 9 10 11 12 13 14 15 16 17 18

/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc..
 */

#define ATOMIC_INIT(i)	{ (i) }

/**
 * atomic_read - read atomic variable
 * @v: pointer of type atomic_t
19
 *
L
Linus Torvalds 已提交
20
 * Atomically reads the value of @v.
21
 */
L
Linus Torvalds 已提交
22 23 24 25 26 27
#define atomic_read(v)		((v)->counter)

/**
 * atomic_set - set atomic variable
 * @v: pointer of type atomic_t
 * @i: required value
28
 *
L
Linus Torvalds 已提交
29
 * Atomically sets the value of @v to @i.
30 31
 */
#define atomic_set(v, i)	(((v)->counter) = (i))
L
Linus Torvalds 已提交
32 33 34 35 36

/**
 * atomic_add - add integer to atomic variable
 * @i: integer value to add
 * @v: pointer of type atomic_t
37
 *
L
Linus Torvalds 已提交
38 39
 * Atomically adds @i to @v.
 */
40
static inline void atomic_add(int i, atomic_t *v)
L
Linus Torvalds 已提交
41
{
42 43 44
	asm volatile(LOCK_PREFIX "addl %1,%0"
		     : "+m" (v->counter)
		     : "ir" (i));
L
Linus Torvalds 已提交
45 46 47
}

/**
48
 * atomic_sub - subtract integer from atomic variable
L
Linus Torvalds 已提交
49 50
 * @i: integer value to subtract
 * @v: pointer of type atomic_t
51
 *
L
Linus Torvalds 已提交
52 53
 * Atomically subtracts @i from @v.
 */
54
static inline void atomic_sub(int i, atomic_t *v)
L
Linus Torvalds 已提交
55
{
56 57 58
	asm volatile(LOCK_PREFIX "subl %1,%0"
		     : "+m" (v->counter)
		     : "ir" (i));
L
Linus Torvalds 已提交
59 60 61 62 63 64
}

/**
 * atomic_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer of type atomic_t
65
 *
L
Linus Torvalds 已提交
66 67 68 69
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
70
static inline int atomic_sub_and_test(int i, atomic_t *v)
L
Linus Torvalds 已提交
71 72 73
{
	unsigned char c;

74 75 76
	asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
		     : "+m" (v->counter), "=qm" (c)
		     : "ir" (i) : "memory");
L
Linus Torvalds 已提交
77 78 79 80 81 82
	return c;
}

/**
 * atomic_inc - increment atomic variable
 * @v: pointer of type atomic_t
83
 *
L
Linus Torvalds 已提交
84
 * Atomically increments @v by 1.
85 86
 */
static inline void atomic_inc(atomic_t *v)
L
Linus Torvalds 已提交
87
{
88 89
	asm volatile(LOCK_PREFIX "incl %0"
		     : "+m" (v->counter));
L
Linus Torvalds 已提交
90 91 92 93 94
}

/**
 * atomic_dec - decrement atomic variable
 * @v: pointer of type atomic_t
95
 *
L
Linus Torvalds 已提交
96
 * Atomically decrements @v by 1.
97 98
 */
static inline void atomic_dec(atomic_t *v)
L
Linus Torvalds 已提交
99
{
100 101
	asm volatile(LOCK_PREFIX "decl %0"
		     : "+m" (v->counter));
L
Linus Torvalds 已提交
102 103 104 105 106
}

/**
 * atomic_dec_and_test - decrement and test
 * @v: pointer of type atomic_t
107
 *
L
Linus Torvalds 已提交
108 109 110
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
111 112
 */
static inline int atomic_dec_and_test(atomic_t *v)
L
Linus Torvalds 已提交
113 114 115
{
	unsigned char c;

116 117 118
	asm volatile(LOCK_PREFIX "decl %0; sete %1"
		     : "+m" (v->counter), "=qm" (c)
		     : : "memory");
L
Linus Torvalds 已提交
119 120 121 122
	return c != 0;
}

/**
123
 * atomic_inc_and_test - increment and test
L
Linus Torvalds 已提交
124
 * @v: pointer of type atomic_t
125
 *
L
Linus Torvalds 已提交
126 127 128
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
129 130
 */
static inline int atomic_inc_and_test(atomic_t *v)
L
Linus Torvalds 已提交
131 132 133
{
	unsigned char c;

134 135 136
	asm volatile(LOCK_PREFIX "incl %0; sete %1"
		     : "+m" (v->counter), "=qm" (c)
		     : : "memory");
L
Linus Torvalds 已提交
137 138 139 140 141 142 143
	return c != 0;
}

/**
 * atomic_add_negative - add and test if negative
 * @v: pointer of type atomic_t
 * @i: integer value to add
144
 *
L
Linus Torvalds 已提交
145 146 147
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
148 149
 */
static inline int atomic_add_negative(int i, atomic_t *v)
L
Linus Torvalds 已提交
150 151 152
{
	unsigned char c;

153 154 155
	asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
		     : "+m" (v->counter), "=qm" (c)
		     : "ir" (i) : "memory");
L
Linus Torvalds 已提交
156 157 158 159
	return c;
}

/**
160
 * atomic_add_return - add integer and return
L
Linus Torvalds 已提交
161 162 163 164 165
 * @v: pointer of type atomic_t
 * @i: integer value to add
 *
 * Atomically adds @i to @v and returns @i + @v
 */
166
static inline int atomic_add_return(int i, atomic_t *v)
L
Linus Torvalds 已提交
167 168 169
{
	int __i;
#ifdef CONFIG_M386
170
	unsigned long flags;
171
	if (unlikely(boot_cpu_data.x86 <= 3))
L
Linus Torvalds 已提交
172 173 174 175
		goto no_xadd;
#endif
	/* Modern 486+ processor */
	__i = i;
176 177 178
	asm volatile(LOCK_PREFIX "xaddl %0, %1"
		     : "+r" (i), "+m" (v->counter)
		     : : "memory");
L
Linus Torvalds 已提交
179 180 181 182
	return i + __i;

#ifdef CONFIG_M386
no_xadd: /* Legacy 386 processor */
183
	local_irq_save(flags);
L
Linus Torvalds 已提交
184 185
	__i = atomic_read(v);
	atomic_set(v, i + __i);
186
	local_irq_restore(flags);
L
Linus Torvalds 已提交
187 188 189 190
	return i + __i;
#endif
}

191 192 193 194 195 196 197
/**
 * atomic_sub_return - subtract integer and return
 * @v: pointer of type atomic_t
 * @i: integer value to subtract
 *
 * Atomically subtracts @i from @v and returns @v - @i
 */
198
static inline int atomic_sub_return(int i, atomic_t *v)
L
Linus Torvalds 已提交
199
{
200
	return atomic_add_return(-i, v);
L
Linus Torvalds 已提交
201 202
}

203 204
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
N
Nick Piggin 已提交
205

N
Nick Piggin 已提交
206
/**
207
 * atomic_add_unless - add unless the number is already a given value
N
Nick Piggin 已提交
208 209 210 211
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
212
 * Atomically adds @a to @v, so long as @v was not already @u.
N
Nick Piggin 已提交
213 214
 * Returns non-zero if @v was not @u, and zero otherwise.
 */
215
static inline int atomic_add_unless(atomic_t *v, int a, int u)
216 217 218 219 220 221 222 223 224 225 226 227 228 229
{
	int c, old;
	c = atomic_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c != (u);
}

N
Nick Piggin 已提交
230 231
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

232 233
#define atomic_inc_return(v)  (atomic_add_return(1, v))
#define atomic_dec_return(v)  (atomic_sub_return(1, v))
L
Linus Torvalds 已提交
234 235

/* These are x86-specific, used by some header files */
236 237 238
#define atomic_clear_mask(mask, addr)				\
	asm volatile(LOCK_PREFIX "andl %0,%1"			\
		     : : "r" (~(mask)), "m" (*(addr)) : "memory")
L
Linus Torvalds 已提交
239

240 241 242
#define atomic_set_mask(mask, addr)				\
	asm volatile(LOCK_PREFIX "orl %0,%1"				\
		     : : "r" (mask), "m" (*(addr)) : "memory")
L
Linus Torvalds 已提交
243 244 245 246 247 248 249

/* Atomic operations are already serializing on x86 */
#define smp_mb__before_atomic_dec()	barrier()
#define smp_mb__after_atomic_dec()	barrier()
#define smp_mb__before_atomic_inc()	barrier()
#define smp_mb__after_atomic_inc()	barrier()

I
Ingo Molnar 已提交
250 251 252 253 254 255 256 257 258 259
/* An 64bit atomic type */

typedef struct {
	unsigned long long counter;
} atomic64_t;

#define ATOMIC64_INIT(val)	{ (val) }

/**
 * atomic64_read - read atomic64 variable
260
 * @ptr: pointer of type atomic64_t
I
Ingo Molnar 已提交
261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293
 *
 * Atomically reads the value of @v.
 * Doesn't imply a read memory barrier.
 */
#define __atomic64_read(ptr)		((ptr)->counter)

static inline unsigned long long
cmpxchg8b(unsigned long long *ptr, unsigned long long old, unsigned long long new)
{
	asm volatile(

		LOCK_PREFIX "cmpxchg8b (%[ptr])\n"

		     :		"=A" (old)

		     : [ptr]	"D" (ptr),
				"A" (old),
				"b" (ll_low(new)),
				"c" (ll_high(new))

		     : "memory");

	return old;
}

static inline unsigned long long
atomic64_cmpxchg(atomic64_t *ptr, unsigned long long old_val,
		 unsigned long long new_val)
{
	return cmpxchg8b(&ptr->counter, old_val, new_val);
}

/**
294
 * atomic64_xchg - xchg atomic64 variable
I
Ingo Molnar 已提交
295 296 297
 * @ptr:      pointer to type atomic64_t
 * @new_val:  value to assign
 *
298 299
 * Atomically xchgs the value of @ptr to @new_val and returns
 * the old value.
I
Ingo Molnar 已提交
300
 */
301 302 303

static inline unsigned long long
atomic64_xchg(atomic64_t *ptr, unsigned long long new_val)
I
Ingo Molnar 已提交
304 305 306 307 308 309
{
	unsigned long long old_val;

	do {
		old_val = atomic_read(ptr);
	} while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
310 311 312 313 314 315 316 317 318 319 320 321 322 323

	return old_val;
}

/**
 * atomic64_set - set atomic64 variable
 * @ptr:      pointer to type atomic64_t
 * @new_val:  value to assign
 *
 * Atomically sets the value of @ptr to @new_val.
 */
static inline void atomic64_set(atomic64_t *ptr, unsigned long long new_val)
{
	atomic64_xchg(ptr, new_val);
I
Ingo Molnar 已提交
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484
}

/**
 * atomic64_read - read atomic64 variable
 * @ptr:      pointer to type atomic64_t
 *
 * Atomically reads the value of @ptr and returns it.
 */
static inline unsigned long long atomic64_read(atomic64_t *ptr)
{
	unsigned long long curr_val;

	do {
		curr_val = __atomic64_read(ptr);
	} while (atomic64_cmpxchg(ptr, curr_val, curr_val) != curr_val);

	return curr_val;
}

/**
 * atomic64_add_return - add and return
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr and returns @delta + *@ptr
 */
static inline unsigned long long
atomic64_add_return(unsigned long long delta, atomic64_t *ptr)
{
	unsigned long long old_val, new_val;

	do {
		old_val = atomic_read(ptr);
		new_val = old_val + delta;

	} while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);

	return new_val;
}

static inline long atomic64_sub_return(unsigned long long delta, atomic64_t *ptr)
{
	return atomic64_add_return(-delta, ptr);
}

static inline long atomic64_inc_return(atomic64_t *ptr)
{
	return atomic64_add_return(1, ptr);
}

static inline long atomic64_dec_return(atomic64_t *ptr)
{
	return atomic64_sub_return(1, ptr);
}

/**
 * atomic64_add - add integer to atomic64 variable
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr.
 */
static inline void atomic64_add(unsigned long long delta, atomic64_t *ptr)
{
	atomic64_add_return(delta, ptr);
}

/**
 * atomic64_sub - subtract the atomic64 variable
 * @delta: integer value to subtract
 * @ptr:   pointer to type atomic64_t
 *
 * Atomically subtracts @delta from @ptr.
 */
static inline void atomic64_sub(unsigned long long delta, atomic64_t *ptr)
{
	atomic64_add(-delta, ptr);
}

/**
 * atomic64_sub_and_test - subtract value from variable and test result
 * @delta: integer value to subtract
 * @ptr:   pointer to type atomic64_t
 *
 * Atomically subtracts @delta from @ptr and returns
 * true if the result is zero, or false for all
 * other cases.
 */
static inline int
atomic64_sub_and_test(unsigned long long delta, atomic64_t *ptr)
{
	unsigned long long old_val = atomic64_sub_return(delta, ptr);

	return old_val == 0;
}

/**
 * atomic64_inc - increment atomic64 variable
 * @ptr: pointer to type atomic64_t
 *
 * Atomically increments @ptr by 1.
 */
static inline void atomic64_inc(atomic64_t *ptr)
{
	atomic64_add(1, ptr);
}

/**
 * atomic64_dec - decrement atomic64 variable
 * @ptr: pointer to type atomic64_t
 *
 * Atomically decrements @ptr by 1.
 */
static inline void atomic64_dec(atomic64_t *ptr)
{
	atomic64_sub(1, ptr);
}

/**
 * atomic64_dec_and_test - decrement and test
 * @ptr: pointer to type atomic64_t
 *
 * Atomically decrements @ptr by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
static inline int atomic64_dec_and_test(atomic64_t *ptr)
{
	return atomic64_sub_and_test(1, ptr);
}

/**
 * atomic64_inc_and_test - increment and test
 * @ptr: pointer to type atomic64_t
 *
 * Atomically increments @ptr by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
static inline int atomic64_inc_and_test(atomic64_t *ptr)
{
	return atomic64_sub_and_test(-1, ptr);
}

/**
 * atomic64_add_negative - add and test if negative
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
static inline int
atomic64_add_negative(unsigned long long delta, atomic64_t *ptr)
{
	long long old_val = atomic64_add_return(delta, ptr);

	return old_val < 0;
}

485
#include <asm-generic/atomic-long.h>
H
H. Peter Anvin 已提交
486
#endif /* _ASM_X86_ATOMIC_32_H */