atomic.h 19.3 KB
Newer Older
L
Linus Torvalds 已提交
1
/*
2
 * Atomic operations that C can't guarantee us.  Useful for
L
Linus Torvalds 已提交
3 4 5 6 7 8 9 10 11
 * resource counting etc..
 *
 * But use these as seldom as possible since they are much more slower
 * than regular operations.
 *
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
12
 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
L
Linus Torvalds 已提交
13 14 15 16
 */
#ifndef _ASM_ATOMIC_H
#define _ASM_ATOMIC_H

17
#include <linux/irqflags.h>
18
#include <linux/types.h>
19
#include <asm/barrier.h>
20
#include <asm/compiler.h>
L
Linus Torvalds 已提交
21
#include <asm/cpu-features.h>
22
#include <asm/cmpxchg.h>
L
Linus Torvalds 已提交
23 24
#include <asm/war.h>

R
Ralf Baechle 已提交
25
#define ATOMIC_INIT(i)	  { (i) }
L
Linus Torvalds 已提交
26 27 28 29 30 31 32

/*
 * atomic_read - read atomic variable
 * @v: pointer of type atomic_t
 *
 * Atomically reads the value of @v.
 */
33
#define atomic_read(v)		READ_ONCE((v)->counter)
L
Linus Torvalds 已提交
34 35 36 37 38 39 40 41

/*
 * atomic_set - set atomic variable
 * @v: pointer of type atomic_t
 * @i: required value
 *
 * Atomically sets the value of @v to @i.
 */
42
#define atomic_set(v, i)	WRITE_ONCE((v)->counter, (i))
L
Linus Torvalds 已提交
43

44 45 46 47 48 49 50 51 52 53 54 55 56
#define ATOMIC_OP(op, c_op, asm_op)					      \
static __inline__ void atomic_##op(int i, atomic_t * v)			      \
{									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		int temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	ll	%0, %1		# atomic_" #op "	\n"   \
		"	" #asm_op " %0, %2				\n"   \
		"	sc	%0, %1					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	.set	mips0					\n"   \
57
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
58 59 60 61 62 63
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		int temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
64
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
65 66 67 68
			"	ll	%0, %1		# atomic_" #op "\n"   \
			"	" #asm_op " %0, %2			\n"   \
			"	sc	%0, %1				\n"   \
			"	.set	mips0				\n"   \
69
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)  \
70 71 72 73 74 75 76 77 78
			: "Ir" (i));					      \
		} while (unlikely(!temp));				      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		v->counter c_op i;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
L
Linus Torvalds 已提交
79 80
}

81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99
#define ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
static __inline__ int atomic_##op##_return(int i, atomic_t * v)		      \
{									      \
	int result;							      \
									      \
	smp_mb__before_llsc();						      \
									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		int temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	ll	%1, %2		# atomic_" #op "_return	\n"   \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	sc	%0, %2					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	.set	mips0					\n"   \
		: "=&r" (result), "=&r" (temp),				      \
100
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
101 102 103 104 105 106
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		int temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
107
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
108 109 110 111 112
			"	ll	%1, %2	# atomic_" #op "_return	\n"   \
			"	" #asm_op " %0, %1, %3			\n"   \
			"	sc	%0, %2				\n"   \
			"	.set	mips0				\n"   \
			: "=&r" (result), "=&r" (temp),			      \
113
			  "+" GCC_OFF_SMALL_ASM() (v->counter)		      \
114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
			: "Ir" (i));					      \
		} while (unlikely(!result));				      \
									      \
		result = temp; result c_op i;				      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		result = v->counter;					      \
		result c_op i;						      \
		v->counter = result;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
									      \
	smp_llsc_mb();							      \
									      \
	return result;							      \
}

133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183
#define ATOMIC_FETCH_OP(op, c_op, asm_op)				      \
static __inline__ int atomic_fetch_##op(int i, atomic_t * v)		      \
{									      \
	int result;							      \
									      \
	smp_mb__before_llsc();						      \
									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		int temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	ll	%1, %2		# atomic_fetch_" #op "	\n"   \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	sc	%0, %2					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	move	%0, %1					\n"   \
		"	.set	mips0					\n"   \
		: "=&r" (result), "=&r" (temp),				      \
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		int temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
			"	ll	%1, %2	# atomic_fetch_" #op "	\n"   \
			"	" #asm_op " %0, %1, %3			\n"   \
			"	sc	%0, %2				\n"   \
			"	.set	mips0				\n"   \
			: "=&r" (result), "=&r" (temp),			      \
			  "+" GCC_OFF_SMALL_ASM() (v->counter)		      \
			: "Ir" (i));					      \
		} while (unlikely(!result));				      \
									      \
		result = temp;						      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		result = v->counter;					      \
		v->counter c_op i;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
									      \
	smp_llsc_mb();							      \
									      \
	return result;							      \
}

184 185
#define ATOMIC_OPS(op, c_op, asm_op)					      \
	ATOMIC_OP(op, c_op, asm_op)					      \
186 187
	ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
	ATOMIC_FETCH_OP(op, c_op, asm_op)
L
Linus Torvalds 已提交
188

189 190
ATOMIC_OPS(add, +=, addu)
ATOMIC_OPS(sub, -=, subu)
L
Linus Torvalds 已提交
191

192 193 194 195 196 197 198 199 200 201
#undef ATOMIC_OPS
#define ATOMIC_OPS(op, c_op, asm_op)					      \
	ATOMIC_OP(op, c_op, asm_op)					      \
	ATOMIC_FETCH_OP(op, c_op, asm_op)

#define atomic_fetch_or atomic_fetch_or

ATOMIC_OPS(and, &=, and)
ATOMIC_OPS(or, |=, or)
ATOMIC_OPS(xor, ^=, xor)
202

203
#undef ATOMIC_OPS
204
#undef ATOMIC_FETCH_OP
205 206
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
L
Linus Torvalds 已提交
207 208

/*
A
Arnaud Giersch 已提交
209 210
 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 * @i: integer value to subtract
L
Linus Torvalds 已提交
211 212
 * @v: pointer of type atomic_t
 *
A
Arnaud Giersch 已提交
213 214
 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 * The function returns the old value of @v minus @i.
L
Linus Torvalds 已提交
215 216 217
 */
static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
{
218
	int result;
L
Linus Torvalds 已提交
219

220
	smp_mb__before_llsc();
221

222
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
223
		int temp;
L
Linus Torvalds 已提交
224 225

		__asm__ __volatile__(
226
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
227 228 229 230
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	sc	%0, %2					\n"
231
		"	.set	noreorder				\n"
L
Linus Torvalds 已提交
232
		"	beqzl	%0, 1b					\n"
233 234
		"	 subu	%0, %1, %3				\n"
		"	.set	reorder					\n"
L
Linus Torvalds 已提交
235
		"1:							\n"
236
		"	.set	mips0					\n"
237
		: "=&r" (result), "=&r" (temp),
238 239
		  "+" GCC_OFF_SMALL_ASM() (v->counter)
		: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
L
Linus Torvalds 已提交
240
		: "memory");
241
	} else if (kernel_uses_llsc) {
242
		int temp;
L
Linus Torvalds 已提交
243 244

		__asm__ __volatile__(
245
		"	.set	"MIPS_ISA_LEVEL"			\n"
L
Linus Torvalds 已提交
246 247 248 249
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	sc	%0, %2					\n"
250
		"	.set	noreorder				\n"
251
		"	beqz	%0, 1b					\n"
252 253
		"	 subu	%0, %1, %3				\n"
		"	.set	reorder					\n"
254
		"1:							\n"
255
		"	.set	mips0					\n"
256
		: "=&r" (result), "=&r" (temp),
257
		  "+" GCC_OFF_SMALL_ASM() (v->counter)
J
Joshua Kinard 已提交
258
		: "Ir" (i));
L
Linus Torvalds 已提交
259 260 261
	} else {
		unsigned long flags;

262
		raw_local_irq_save(flags);
L
Linus Torvalds 已提交
263 264 265 266
		result = v->counter;
		result -= i;
		if (result >= 0)
			v->counter = result;
267
		raw_local_irq_restore(flags);
L
Linus Torvalds 已提交
268 269
	}

270
	smp_llsc_mb();
271

L
Linus Torvalds 已提交
272 273 274
	return result;
}

275 276
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
N
Nick Piggin 已提交
277

N
Nick Piggin 已提交
278
/**
279
 * __atomic_add_unless - add unless the number is a given value
N
Nick Piggin 已提交
280 281 282 283 284
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
285
 * Returns the old value of @v.
N
Nick Piggin 已提交
286
 */
287
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
288 289 290 291 292 293 294 295 296 297 298
{
	int c, old;
	c = atomic_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
299
	return c;
300
}
N
Nick Piggin 已提交
301

302 303
#define atomic_dec_return(v) atomic_sub_return(1, (v))
#define atomic_inc_return(v) atomic_add_return(1, (v))
L
Linus Torvalds 已提交
304 305 306 307 308 309 310 311 312 313

/*
 * atomic_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer of type atomic_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
314
#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
L
Linus Torvalds 已提交
315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347

/*
 * atomic_inc_and_test - increment and test
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)

/*
 * atomic_dec_and_test - decrement by 1 and test
 * @v: pointer of type atomic_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)

/*
 * atomic_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 */
#define atomic_dec_if_positive(v)	atomic_sub_if_positive(1, v)

/*
 * atomic_inc - increment atomic variable
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1.
 */
348
#define atomic_inc(v) atomic_add(1, (v))
L
Linus Torvalds 已提交
349 350 351 352 353 354 355

/*
 * atomic_dec - decrement and test
 * @v: pointer of type atomic_t
 *
 * Atomically decrements @v by 1.
 */
356
#define atomic_dec(v) atomic_sub(1, (v))
L
Linus Torvalds 已提交
357 358 359 360 361 362 363 364 365 366

/*
 * atomic_add_negative - add and test if negative
 * @v: pointer of type atomic_t
 * @i: integer value to add
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
367
#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
L
Linus Torvalds 已提交
368

369
#ifdef CONFIG_64BIT
L
Linus Torvalds 已提交
370 371 372 373 374 375 376 377

#define ATOMIC64_INIT(i)    { (i) }

/*
 * atomic64_read - read atomic variable
 * @v: pointer of type atomic64_t
 *
 */
378
#define atomic64_read(v)	READ_ONCE((v)->counter)
L
Linus Torvalds 已提交
379 380 381 382 383 384

/*
 * atomic64_set - set atomic variable
 * @v: pointer of type atomic64_t
 * @i: required value
 */
385
#define atomic64_set(v, i)	WRITE_ONCE((v)->counter, (i))
L
Linus Torvalds 已提交
386

387 388 389 390 391 392 393 394 395 396 397 398 399
#define ATOMIC64_OP(op, c_op, asm_op)					      \
static __inline__ void atomic64_##op(long i, atomic64_t * v)		      \
{									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		long temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	lld	%0, %1		# atomic64_" #op "	\n"   \
		"	" #asm_op " %0, %2				\n"   \
		"	scd	%0, %1					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	.set	mips0					\n"   \
400
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
401 402 403 404 405 406
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		long temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
407
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
408 409 410 411
			"	lld	%0, %1		# atomic64_" #op "\n" \
			"	" #asm_op " %0, %2			\n"   \
			"	scd	%0, %1				\n"   \
			"	.set	mips0				\n"   \
412
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442
			: "Ir" (i));					      \
		} while (unlikely(!temp));				      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		v->counter c_op i;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
}

#define ATOMIC64_OP_RETURN(op, c_op, asm_op)				      \
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)	      \
{									      \
	long result;							      \
									      \
	smp_mb__before_llsc();						      \
									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		long temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	lld	%1, %2		# atomic64_" #op "_return\n"  \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	scd	%0, %2					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	.set	mips0					\n"   \
		: "=&r" (result), "=&r" (temp),				      \
443
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
444 445 446 447 448 449
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		long temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
450
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
451 452 453 454 455
			"	lld	%1, %2	# atomic64_" #op "_return\n"  \
			"	" #asm_op " %0, %1, %3			\n"   \
			"	scd	%0, %2				\n"   \
			"	.set	mips0				\n"   \
			: "=&r" (result), "=&r" (temp),			      \
456 457
			  "=" GCC_OFF_SMALL_ASM() (v->counter)		      \
			: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)	      \
458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474
			: "memory");					      \
		} while (unlikely(!result));				      \
									      \
		result = temp; result c_op i;				      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		result = v->counter;					      \
		result c_op i;						      \
		v->counter = result;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
									      \
	smp_llsc_mb();							      \
									      \
	return result;							      \
L
Linus Torvalds 已提交
475 476
}

477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528
#define ATOMIC64_FETCH_OP(op, c_op, asm_op)				      \
static __inline__ long atomic64_fetch_##op(long i, atomic64_t * v)	      \
{									      \
	long result;							      \
									      \
	smp_mb__before_llsc();						      \
									      \
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
		long temp;						      \
									      \
		__asm__ __volatile__(					      \
		"	.set	arch=r4000				\n"   \
		"1:	lld	%1, %2		# atomic64_fetch_" #op "\n"   \
		"	" #asm_op " %0, %1, %3				\n"   \
		"	scd	%0, %2					\n"   \
		"	beqzl	%0, 1b					\n"   \
		"	move	%0, %1					\n"   \
		"	.set	mips0					\n"   \
		: "=&r" (result), "=&r" (temp),				      \
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
		: "Ir" (i));						      \
	} else if (kernel_uses_llsc) {					      \
		long temp;						      \
									      \
		do {							      \
			__asm__ __volatile__(				      \
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
			"	lld	%1, %2	# atomic64_fetch_" #op "\n"   \
			"	" #asm_op " %0, %1, %3			\n"   \
			"	scd	%0, %2				\n"   \
			"	.set	mips0				\n"   \
			: "=&r" (result), "=&r" (temp),			      \
			  "=" GCC_OFF_SMALL_ASM() (v->counter)		      \
			: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)	      \
			: "memory");					      \
		} while (unlikely(!result));				      \
									      \
		result = temp;						      \
	} else {							      \
		unsigned long flags;					      \
									      \
		raw_local_irq_save(flags);				      \
		result = v->counter;					      \
		v->counter c_op i;					      \
		raw_local_irq_restore(flags);				      \
	}								      \
									      \
	smp_llsc_mb();							      \
									      \
	return result;							      \
}

529 530
#define ATOMIC64_OPS(op, c_op, asm_op)					      \
	ATOMIC64_OP(op, c_op, asm_op)					      \
531 532
	ATOMIC64_OP_RETURN(op, c_op, asm_op)				      \
	ATOMIC64_FETCH_OP(op, c_op, asm_op)
L
Linus Torvalds 已提交
533

534 535
ATOMIC64_OPS(add, +=, daddu)
ATOMIC64_OPS(sub, -=, dsubu)
L
Linus Torvalds 已提交
536

537
#undef ATOMIC64_OPS
538 539 540 541 542 543 544 545 546 547
#define ATOMIC64_OPS(op, c_op, asm_op)					      \
	ATOMIC64_OP(op, c_op, asm_op)					      \
	ATOMIC64_FETCH_OP(op, c_op, asm_op)

ATOMIC64_OPS(and, &=, and)
ATOMIC64_OPS(or, |=, or)
ATOMIC64_OPS(xor, ^=, xor)

#undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
548 549
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
L
Linus Torvalds 已提交
550 551

/*
552 553
 * atomic64_sub_if_positive - conditionally subtract integer from atomic
 *                            variable
A
Arnaud Giersch 已提交
554
 * @i: integer value to subtract
L
Linus Torvalds 已提交
555 556
 * @v: pointer of type atomic64_t
 *
A
Arnaud Giersch 已提交
557 558
 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 * The function returns the old value of @v minus @i.
L
Linus Torvalds 已提交
559 560 561
 */
static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
{
562
	long result;
L
Linus Torvalds 已提交
563

564
	smp_mb__before_llsc();
565

566
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
567
		long temp;
L
Linus Torvalds 已提交
568 569

		__asm__ __volatile__(
570
		"	.set	arch=r4000				\n"
L
Linus Torvalds 已提交
571 572 573 574
		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
		"	dsubu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	scd	%0, %2					\n"
575
		"	.set	noreorder				\n"
L
Linus Torvalds 已提交
576
		"	beqzl	%0, 1b					\n"
577 578
		"	 dsubu	%0, %1, %3				\n"
		"	.set	reorder					\n"
L
Linus Torvalds 已提交
579
		"1:							\n"
580
		"	.set	mips0					\n"
581
		: "=&r" (result), "=&r" (temp),
582 583
		  "=" GCC_OFF_SMALL_ASM() (v->counter)
		: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
L
Linus Torvalds 已提交
584
		: "memory");
585
	} else if (kernel_uses_llsc) {
586
		long temp;
L
Linus Torvalds 已提交
587 588

		__asm__ __volatile__(
589
		"	.set	"MIPS_ISA_LEVEL"			\n"
L
Linus Torvalds 已提交
590 591 592 593
		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
		"	dsubu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
		"	scd	%0, %2					\n"
594
		"	.set	noreorder				\n"
595
		"	beqz	%0, 1b					\n"
596 597
		"	 dsubu	%0, %1, %3				\n"
		"	.set	reorder					\n"
598
		"1:							\n"
599
		"	.set	mips0					\n"
600
		: "=&r" (result), "=&r" (temp),
601
		  "+" GCC_OFF_SMALL_ASM() (v->counter)
J
Joshua Kinard 已提交
602
		: "Ir" (i));
L
Linus Torvalds 已提交
603 604 605
	} else {
		unsigned long flags;

606
		raw_local_irq_save(flags);
L
Linus Torvalds 已提交
607 608 609 610
		result = v->counter;
		result -= i;
		if (result >= 0)
			v->counter = result;
611
		raw_local_irq_restore(flags);
L
Linus Torvalds 已提交
612 613
	}

614
	smp_llsc_mb();
615

L
Linus Torvalds 已提交
616 617 618
	return result;
}

619
#define atomic64_cmpxchg(v, o, n) \
620
	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
621 622 623 624 625 626 627 628 629
#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
630
 * Returns true iff @v was not @u.
631
 */
632 633 634 635 636 637 638 639 640 641 642 643 644 645 646
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
	long c, old;
	c = atomic64_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic64_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c != (u);
}

647 648
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

649 650
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
L
Linus Torvalds 已提交
651 652 653 654 655 656 657 658 659 660

/*
 * atomic64_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer of type atomic64_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
661
#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
L
Linus Torvalds 已提交
662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694

/*
 * atomic64_inc_and_test - increment and test
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)

/*
 * atomic64_dec_and_test - decrement by 1 and test
 * @v: pointer of type atomic64_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)

/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic64_t
 */
#define atomic64_dec_if_positive(v)	atomic64_sub_if_positive(1, v)

/*
 * atomic64_inc - increment atomic variable
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1.
 */
695
#define atomic64_inc(v) atomic64_add(1, (v))
L
Linus Torvalds 已提交
696 697 698 699 700 701 702

/*
 * atomic64_dec - decrement and test
 * @v: pointer of type atomic64_t
 *
 * Atomically decrements @v by 1.
 */
703
#define atomic64_dec(v) atomic64_sub(1, (v))
L
Linus Torvalds 已提交
704 705 706 707 708 709 710 711 712 713

/*
 * atomic64_add_negative - add and test if negative
 * @v: pointer of type atomic64_t
 * @i: integer value to add
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
714
#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
L
Linus Torvalds 已提交
715

716
#endif /* CONFIG_64BIT */
L
Linus Torvalds 已提交
717 718

#endif /* _ASM_ATOMIC_H */