atomic.h 14.4 KB
Newer Older
L
Linus Torvalds 已提交
1
/*
2
 * Atomic operations that C can't guarantee us.  Useful for
L
Linus Torvalds 已提交
3 4 5 6 7 8 9 10 11
 * resource counting etc..
 *
 * But use these as seldom as possible since they are much more slower
 * than regular operations.
 *
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
12
 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
L
Linus Torvalds 已提交
13 14 15 16
 */
#ifndef _ASM_ATOMIC_H
#define _ASM_ATOMIC_H

17
#include <linux/irqflags.h>
18
#include <linux/types.h>
19
#include <asm/barrier.h>
L
Linus Torvalds 已提交
20
#include <asm/cpu-features.h>
21
#include <asm/cmpxchg.h>
L
Linus Torvalds 已提交
22 23
#include <asm/war.h>

R
Ralf Baechle 已提交
24
#define ATOMIC_INIT(i)	  { (i) }
L
Linus Torvalds 已提交
25 26 27 28 29 30 31

/*
 * atomic_read - read atomic variable
 * @v: pointer of type atomic_t
 *
 * Atomically reads the value of @v.
 */
32
#define atomic_read(v)		ACCESS_ONCE((v)->counter)
L
Linus Torvalds 已提交
33 34 35 36 37 38 39 40

/*
 * atomic_set - set atomic variable
 * @v: pointer of type atomic_t
 * @i: required value
 *
 * Atomically sets the value of @v to @i.
 */
41
#define atomic_set(v, i)		((v)->counter = (i))
L
Linus Torvalds 已提交
42

43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
#define ATOMIC_OP(op, c_op, asm_op)						\
static __inline__ void atomic_##op(int i, atomic_t * v)				\
{										\
	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
		int temp;							\
										\
		__asm__ __volatile__(						\
		"	.set	arch=r4000				\n"	\
		"1:	ll	%0, %1		# atomic_" #op "	\n"	\
		"	" #asm_op " %0, %2				\n"	\
		"	sc	%0, %1					\n"	\
		"	beqzl	%0, 1b					\n"	\
		"	.set	mips0					\n"	\
		: "=&r" (temp), "+m" (v->counter)				\
		: "Ir" (i));							\
	} else if (kernel_uses_llsc) {						\
		int temp;							\
										\
		do {								\
			__asm__ __volatile__(					\
			"	.set	arch=r4000			\n"	\
			"	ll	%0, %1		# atomic_" #op "\n"	\
			"	" #asm_op " %0, %2			\n"	\
			"	sc	%0, %1				\n"	\
			"	.set	mips0				\n"	\
			: "=&r" (temp), "+m" (v->counter)			\
			: "Ir" (i));						\
		} while (unlikely(!temp));					\
	} else {								\
		unsigned long flags;						\
										\
		raw_local_irq_save(flags);					\
		v->counter c_op i;						\
		raw_local_irq_restore(flags);					\
	}									\
}										\

#define ATOMIC_OP_RETURN(op, c_op, asm_op)					\
static __inline__ int atomic_##op##_return(int i, atomic_t * v)			\
{										\
	int result;								\
										\
	smp_mb__before_llsc();							\
										\
	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
		int temp;							\
										\
		__asm__ __volatile__(						\
		"	.set	arch=r4000				\n"	\
		"1:	ll	%1, %2		# atomic_" #op "_return	\n"	\
		"	" #asm_op " %0, %1, %3				\n"	\
		"	sc	%0, %2					\n"	\
		"	beqzl	%0, 1b					\n"	\
P
Peter Zijlstra 已提交
96
		"	" #asm_op " %0, %1, %3				\n"	\
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
		"	.set	mips0					\n"	\
		: "=&r" (result), "=&r" (temp), "+m" (v->counter)		\
		: "Ir" (i));							\
	} else if (kernel_uses_llsc) {						\
		int temp;							\
										\
		do {								\
			__asm__ __volatile__(					\
			"	.set	arch=r4000			\n"	\
			"	ll	%1, %2	# atomic_" #op "_return	\n"	\
			"	" #asm_op " %0, %1, %3			\n"	\
			"	sc	%0, %2				\n"	\
			"	.set	mips0				\n"	\
			: "=&r" (result), "=&r" (temp), "+m" (v->counter)	\
			: "Ir" (i));						\
		} while (unlikely(!result));					\
										\
P
Peter Zijlstra 已提交
114
		result = temp; result c_op i;					\
115 116 117 118 119 120 121 122 123 124 125 126 127
	} else {								\
		unsigned long flags;						\
										\
		raw_local_irq_save(flags);					\
		result = v->counter;						\
		result c_op i;							\
		v->counter = result;						\
		raw_local_irq_restore(flags);					\
	}									\
										\
	smp_llsc_mb();								\
										\
	return result;								\
L
Linus Torvalds 已提交
128 129
}

130 131 132
#define ATOMIC_OPS(op, c_op, asm_op)						\
	ATOMIC_OP(op, c_op, asm_op)						\
	ATOMIC_OP_RETURN(op, c_op, asm_op)
L
Linus Torvalds 已提交
133

134 135
ATOMIC_OPS(add, +=, addu)
ATOMIC_OPS(sub, -=, subu)
L
Linus Torvalds 已提交
136

137 138 139
#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
L
Linus Torvalds 已提交
140 141

/*
A
Arnaud Giersch 已提交
142 143
 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 * @i: integer value to subtract
L
Linus Torvalds 已提交
144 145
 * @v: pointer of type atomic_t
 *
A
Arnaud Giersch 已提交
146 147
 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 * The function returns the old value of @v minus @i.
L
Linus Torvalds 已提交
148 149 150
 */
static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
{
151
	int result;
L
Linus Torvalds 已提交
152

153
	smp_mb__before_llsc();
154

155
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
156
		int temp;
L
Linus Torvalds 已提交
157 158

		__asm__ __volatile__(
159
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
160 161 162 163
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	sc	%0, %2					\n"
164
		"	.set	noreorder				\n"
L
Linus Torvalds 已提交
165
		"	beqzl	%0, 1b					\n"
166 167
		"	 subu	%0, %1, %3				\n"
		"	.set	reorder					\n"
L
Linus Torvalds 已提交
168
		"1:							\n"
169
		"	.set	mips0					\n"
J
Joshua Kinard 已提交
170
		: "=&r" (result), "=&r" (temp), "+m" (v->counter)
L
Linus Torvalds 已提交
171 172
		: "Ir" (i), "m" (v->counter)
		: "memory");
173
	} else if (kernel_uses_llsc) {
174
		int temp;
L
Linus Torvalds 已提交
175 176

		__asm__ __volatile__(
177
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
178 179 180 181
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	sc	%0, %2					\n"
182
		"	.set	noreorder				\n"
183
		"	beqz	%0, 1b					\n"
184 185
		"	 subu	%0, %1, %3				\n"
		"	.set	reorder					\n"
186
		"1:							\n"
187
		"	.set	mips0					\n"
J
Joshua Kinard 已提交
188 189
		: "=&r" (result), "=&r" (temp), "+m" (v->counter)
		: "Ir" (i));
L
Linus Torvalds 已提交
190 191 192
	} else {
		unsigned long flags;

193
		raw_local_irq_save(flags);
L
Linus Torvalds 已提交
194 195 196 197
		result = v->counter;
		result -= i;
		if (result >= 0)
			v->counter = result;
198
		raw_local_irq_restore(flags);
L
Linus Torvalds 已提交
199 200
	}

201
	smp_llsc_mb();
202

L
Linus Torvalds 已提交
203 204 205
	return result;
}

206 207
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
N
Nick Piggin 已提交
208

N
Nick Piggin 已提交
209
/**
210
 * __atomic_add_unless - add unless the number is a given value
N
Nick Piggin 已提交
211 212 213 214 215
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
216
 * Returns the old value of @v.
N
Nick Piggin 已提交
217
 */
218
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
219 220 221 222 223 224 225 226 227 228 229
{
	int c, old;
	c = atomic_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
230
	return c;
231
}
N
Nick Piggin 已提交
232

233 234
#define atomic_dec_return(v) atomic_sub_return(1, (v))
#define atomic_inc_return(v) atomic_add_return(1, (v))
L
Linus Torvalds 已提交
235 236 237 238 239 240 241 242 243 244

/*
 * atomic_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer of type atomic_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
245
#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
L
Linus Torvalds 已提交
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278

/*
 * atomic_inc_and_test - increment and test
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)

/*
 * atomic_dec_and_test - decrement by 1 and test
 * @v: pointer of type atomic_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)

/*
 * atomic_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 */
#define atomic_dec_if_positive(v)	atomic_sub_if_positive(1, v)

/*
 * atomic_inc - increment atomic variable
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1.
 */
279
#define atomic_inc(v) atomic_add(1, (v))
L
Linus Torvalds 已提交
280 281 282 283 284 285 286

/*
 * atomic_dec - decrement and test
 * @v: pointer of type atomic_t
 *
 * Atomically decrements @v by 1.
 */
287
#define atomic_dec(v) atomic_sub(1, (v))
L
Linus Torvalds 已提交
288 289 290 291 292 293 294 295 296 297

/*
 * atomic_add_negative - add and test if negative
 * @v: pointer of type atomic_t
 * @i: integer value to add
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
298
#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
L
Linus Torvalds 已提交
299

300
#ifdef CONFIG_64BIT
L
Linus Torvalds 已提交
301 302 303 304 305 306 307 308

#define ATOMIC64_INIT(i)    { (i) }

/*
 * atomic64_read - read atomic variable
 * @v: pointer of type atomic64_t
 *
 */
309
#define atomic64_read(v)	ACCESS_ONCE((v)->counter)
L
Linus Torvalds 已提交
310 311 312 313 314 315

/*
 * atomic64_set - set atomic variable
 * @v: pointer of type atomic64_t
 * @i: required value
 */
316
#define atomic64_set(v, i)	((v)->counter = (i))
L
Linus Torvalds 已提交
317

318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389
#define ATOMIC64_OP(op, c_op, asm_op)						\
static __inline__ void atomic64_##op(long i, atomic64_t * v)			\
{										\
	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
		long temp;							\
										\
		__asm__ __volatile__(						\
		"	.set	arch=r4000				\n"	\
		"1:	lld	%0, %1		# atomic64_" #op "	\n"	\
		"	" #asm_op " %0, %2				\n"	\
		"	scd	%0, %1					\n"	\
		"	beqzl	%0, 1b					\n"	\
		"	.set	mips0					\n"	\
		: "=&r" (temp), "+m" (v->counter)				\
		: "Ir" (i));							\
	} else if (kernel_uses_llsc) {						\
		long temp;							\
										\
		do {								\
			__asm__ __volatile__(					\
			"	.set	arch=r4000			\n"	\
			"	lld	%0, %1		# atomic64_" #op "\n"	\
			"	" #asm_op " %0, %2			\n"	\
			"	scd	%0, %1				\n"	\
			"	.set	mips0				\n"	\
			: "=&r" (temp), "+m" (v->counter)			\
			: "Ir" (i));						\
		} while (unlikely(!temp));					\
	} else {								\
		unsigned long flags;						\
										\
		raw_local_irq_save(flags);					\
		v->counter c_op i;						\
		raw_local_irq_restore(flags);					\
	}									\
}										\

#define ATOMIC64_OP_RETURN(op, c_op, asm_op)					\
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)		\
{										\
	long result;								\
										\
	smp_mb__before_llsc();							\
										\
	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
		long temp;							\
										\
		__asm__ __volatile__(						\
		"	.set	arch=r4000				\n"	\
		"1:	lld	%1, %2		# atomic64_" #op "_return\n"	\
		"	" #asm_op " %0, %1, %3				\n"	\
		"	scd	%0, %2					\n"	\
		"	beqzl	%0, 1b					\n"	\
		"	" #asm_op " %0, %1, %3				\n"	\
		"	.set	mips0					\n"	\
		: "=&r" (result), "=&r" (temp), "+m" (v->counter)		\
		: "Ir" (i));							\
	} else if (kernel_uses_llsc) {						\
		long temp;							\
										\
		do {								\
			__asm__ __volatile__(					\
			"	.set	arch=r4000			\n"	\
			"	lld	%1, %2	# atomic64_" #op "_return\n"	\
			"	" #asm_op " %0, %1, %3			\n"	\
			"	scd	%0, %2				\n"	\
			"	.set	mips0				\n"	\
			: "=&r" (result), "=&r" (temp), "=m" (v->counter)	\
			: "Ir" (i), "m" (v->counter)				\
			: "memory");						\
		} while (unlikely(!result));					\
										\
P
Peter Zijlstra 已提交
390
		result = temp; result c_op i;					\
391 392 393 394 395 396 397 398 399 400 401 402 403
	} else {								\
		unsigned long flags;						\
										\
		raw_local_irq_save(flags);					\
		result = v->counter;						\
		result c_op i;							\
		v->counter = result;						\
		raw_local_irq_restore(flags);					\
	}									\
										\
	smp_llsc_mb();								\
										\
	return result;								\
L
Linus Torvalds 已提交
404 405
}

406 407 408
#define ATOMIC64_OPS(op, c_op, asm_op)						\
	ATOMIC64_OP(op, c_op, asm_op)						\
	ATOMIC64_OP_RETURN(op, c_op, asm_op)
L
Linus Torvalds 已提交
409

410 411
ATOMIC64_OPS(add, +=, daddu)
ATOMIC64_OPS(sub, -=, dsubu)
L
Linus Torvalds 已提交
412

413 414 415
#undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
L
Linus Torvalds 已提交
416 417

/*
A
Arnaud Giersch 已提交
418 419
 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
 * @i: integer value to subtract
L
Linus Torvalds 已提交
420 421
 * @v: pointer of type atomic64_t
 *
A
Arnaud Giersch 已提交
422 423
 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 * The function returns the old value of @v minus @i.
L
Linus Torvalds 已提交
424 425 426
 */
static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
{
427
	long result;
L
Linus Torvalds 已提交
428

429
	smp_mb__before_llsc();
430

431
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
432
		long temp;
L
Linus Torvalds 已提交
433 434

		__asm__ __volatile__(
435
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
436 437 438 439
		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
		"	dsubu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	scd	%0, %2					\n"
440
		"	.set	noreorder				\n"
L
Linus Torvalds 已提交
441
		"	beqzl	%0, 1b					\n"
442 443
		"	 dsubu	%0, %1, %3				\n"
		"	.set	reorder					\n"
L
Linus Torvalds 已提交
444
		"1:							\n"
445
		"	.set	mips0					\n"
L
Linus Torvalds 已提交
446 447 448
		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
		: "Ir" (i), "m" (v->counter)
		: "memory");
449
	} else if (kernel_uses_llsc) {
450
		long temp;
L
Linus Torvalds 已提交
451 452

		__asm__ __volatile__(
453
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
454 455 456 457
		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
		"	dsubu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	scd	%0, %2					\n"
458
		"	.set	noreorder				\n"
459
		"	beqz	%0, 1b					\n"
460 461
		"	 dsubu	%0, %1, %3				\n"
		"	.set	reorder					\n"
462
		"1:							\n"
463
		"	.set	mips0					\n"
J
Joshua Kinard 已提交
464 465
		: "=&r" (result), "=&r" (temp), "+m" (v->counter)
		: "Ir" (i));
L
Linus Torvalds 已提交
466 467 468
	} else {
		unsigned long flags;

469
		raw_local_irq_save(flags);
L
Linus Torvalds 已提交
470 471 472 473
		result = v->counter;
		result -= i;
		if (result >= 0)
			v->counter = result;
474
		raw_local_irq_restore(flags);
L
Linus Torvalds 已提交
475 476
	}

477
	smp_llsc_mb();
478

L
Linus Torvalds 已提交
479 480 481
	return result;
}

482
#define atomic64_cmpxchg(v, o, n) \
483
	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
484 485 486 487 488 489 490 491 492
#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
493
 * Returns the old value of @v.
494
 */
495 496 497 498 499 500 501 502 503 504 505 506 507 508 509
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
	long c, old;
	c = atomic64_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic64_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c != (u);
}

510 511
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

512 513
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
L
Linus Torvalds 已提交
514 515 516 517 518 519 520 521 522 523

/*
 * atomic64_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer of type atomic64_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
524
#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
L
Linus Torvalds 已提交
525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557

/*
 * atomic64_inc_and_test - increment and test
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)

/*
 * atomic64_dec_and_test - decrement by 1 and test
 * @v: pointer of type atomic64_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)

/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic64_t
 */
#define atomic64_dec_if_positive(v)	atomic64_sub_if_positive(1, v)

/*
 * atomic64_inc - increment atomic variable
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1.
 */
558
#define atomic64_inc(v) atomic64_add(1, (v))
L
Linus Torvalds 已提交
559 560 561 562 563 564 565

/*
 * atomic64_dec - decrement and test
 * @v: pointer of type atomic64_t
 *
 * Atomically decrements @v by 1.
 */
566
#define atomic64_dec(v) atomic64_sub(1, (v))
L
Linus Torvalds 已提交
567 568 569 570 571 572 573 574 575 576

/*
 * atomic64_add_negative - add and test if negative
 * @v: pointer of type atomic64_t
 * @i: integer value to add
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
577
#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
L
Linus Torvalds 已提交
578

579
#endif /* CONFIG_64BIT */
L
Linus Torvalds 已提交
580 581

#endif /* _ASM_ATOMIC_H */