atomic.h 10.4 KB
Newer Older
1 2 3
#ifndef _ASM_POWERPC_ATOMIC_H_
#define _ASM_POWERPC_ATOMIC_H_

L
Linus Torvalds 已提交
4 5 6 7
/*
 * PowerPC atomic operations
 */

8
#include <linux/types.h>
L
Linus Torvalds 已提交
9 10

#ifdef __KERNEL__
11
#include <linux/compiler.h>
12
#include <asm/synch.h>
13
#include <asm/asm-compat.h>
14
#include <asm/system.h>
L
Linus Torvalds 已提交
15

16
#define ATOMIC_INIT(i)		{ (i) }
L
Linus Torvalds 已提交
17

18 19 20 21 22 23 24 25 26 27 28 29 30
static __inline__ int atomic_read(const atomic_t *v)
{
	int t;

	__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));

	return t;
}

static __inline__ void atomic_set(atomic_t *v, int i)
{
	__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
}
L
Linus Torvalds 已提交
31 32 33 34 35 36 37 38 39 40 41

static __inline__ void atomic_add(int a, atomic_t *v)
{
	int t;

	__asm__ __volatile__(
"1:	lwarx	%0,0,%3		# atomic_add\n\
	add	%0,%2,%0\n"
	PPC405_ERR77(0,%3)
"	stwcx.	%0,0,%3 \n\
	bne-	1b"
42 43
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
L
Linus Torvalds 已提交
44 45 46 47 48 49 50 51
	: "cc");
}

static __inline__ int atomic_add_return(int a, atomic_t *v)
{
	int t;

	__asm__ __volatile__(
52
	PPC_ATOMIC_ENTRY_BARRIER
L
Linus Torvalds 已提交
53 54 55 56 57
"1:	lwarx	%0,0,%2		# atomic_add_return\n\
	add	%0,%1,%0\n"
	PPC405_ERR77(0,%2)
"	stwcx.	%0,0,%2 \n\
	bne-	1b"
58
	PPC_ATOMIC_EXIT_BARRIER
L
Linus Torvalds 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77
	: "=&r" (t)
	: "r" (a), "r" (&v->counter)
	: "cc", "memory");

	return t;
}

#define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)

static __inline__ void atomic_sub(int a, atomic_t *v)
{
	int t;

	__asm__ __volatile__(
"1:	lwarx	%0,0,%3		# atomic_sub\n\
	subf	%0,%2,%0\n"
	PPC405_ERR77(0,%3)
"	stwcx.	%0,0,%3 \n\
	bne-	1b"
78 79
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
L
Linus Torvalds 已提交
80 81 82 83 84 85 86 87
	: "cc");
}

static __inline__ int atomic_sub_return(int a, atomic_t *v)
{
	int t;

	__asm__ __volatile__(
88
	PPC_ATOMIC_ENTRY_BARRIER
L
Linus Torvalds 已提交
89 90 91 92 93
"1:	lwarx	%0,0,%2		# atomic_sub_return\n\
	subf	%0,%1,%0\n"
	PPC405_ERR77(0,%2)
"	stwcx.	%0,0,%2 \n\
	bne-	1b"
94
	PPC_ATOMIC_EXIT_BARRIER
L
Linus Torvalds 已提交
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
	: "=&r" (t)
	: "r" (a), "r" (&v->counter)
	: "cc", "memory");

	return t;
}

static __inline__ void atomic_inc(atomic_t *v)
{
	int t;

	__asm__ __volatile__(
"1:	lwarx	%0,0,%2		# atomic_inc\n\
	addic	%0,%0,1\n"
	PPC405_ERR77(0,%2)
"	stwcx.	%0,0,%2 \n\
	bne-	1b"
112 113
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
114
	: "cc", "xer");
L
Linus Torvalds 已提交
115 116 117 118 119 120 121
}

static __inline__ int atomic_inc_return(atomic_t *v)
{
	int t;

	__asm__ __volatile__(
122
	PPC_ATOMIC_ENTRY_BARRIER
L
Linus Torvalds 已提交
123 124 125 126 127
"1:	lwarx	%0,0,%1		# atomic_inc_return\n\
	addic	%0,%0,1\n"
	PPC405_ERR77(0,%1)
"	stwcx.	%0,0,%1 \n\
	bne-	1b"
128
	PPC_ATOMIC_EXIT_BARRIER
L
Linus Torvalds 已提交
129 130
	: "=&r" (t)
	: "r" (&v->counter)
131
	: "cc", "xer", "memory");
L
Linus Torvalds 已提交
132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155

	return t;
}

/*
 * atomic_inc_and_test - increment and test
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)

static __inline__ void atomic_dec(atomic_t *v)
{
	int t;

	__asm__ __volatile__(
"1:	lwarx	%0,0,%2		# atomic_dec\n\
	addic	%0,%0,-1\n"
	PPC405_ERR77(0,%2)\
"	stwcx.	%0,0,%2\n\
	bne-	1b"
156 157
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
158
	: "cc", "xer");
L
Linus Torvalds 已提交
159 160 161 162 163 164 165
}

static __inline__ int atomic_dec_return(atomic_t *v)
{
	int t;

	__asm__ __volatile__(
166
	PPC_ATOMIC_ENTRY_BARRIER
L
Linus Torvalds 已提交
167 168 169 170 171
"1:	lwarx	%0,0,%1		# atomic_dec_return\n\
	addic	%0,%0,-1\n"
	PPC405_ERR77(0,%1)
"	stwcx.	%0,0,%1\n\
	bne-	1b"
172
	PPC_ATOMIC_EXIT_BARRIER
L
Linus Torvalds 已提交
173 174
	: "=&r" (t)
	: "r" (&v->counter)
175
	: "cc", "xer", "memory");
L
Linus Torvalds 已提交
176 177 178 179

	return t;
}

180
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
181
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
N
Nick Piggin 已提交
182

N
Nick Piggin 已提交
183
/**
184
 * __atomic_add_unless - add unless the number is a given value
N
Nick Piggin 已提交
185 186 187 188 189
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
190
 * Returns the old value of @v.
N
Nick Piggin 已提交
191
 */
192
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
193 194 195 196
{
	int t;

	__asm__ __volatile__ (
197
	PPC_ATOMIC_ENTRY_BARRIER
198
"1:	lwarx	%0,0,%1		# __atomic_add_unless\n\
199 200 201 202 203 204
	cmpw	0,%0,%3 \n\
	beq-	2f \n\
	add	%0,%2,%0 \n"
	PPC405_ERR77(0,%2)
"	stwcx.	%0,0,%1 \n\
	bne-	1b \n"
205
	PPC_ATOMIC_EXIT_BARRIER
206 207 208 209 210 211
"	subf	%0,%2,%0 \n\
2:"
	: "=&r" (t)
	: "r" (&v->counter), "r" (a), "r" (u)
	: "cc", "memory");

212
	return t;
213 214
}

215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244
/**
 * atomic_inc_not_zero - increment unless the number is zero
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1, so long as @v is non-zero.
 * Returns non-zero if @v was non-zero, and zero otherwise.
 */
static __inline__ int atomic_inc_not_zero(atomic_t *v)
{
	int t1, t2;

	__asm__ __volatile__ (
	PPC_ATOMIC_ENTRY_BARRIER
"1:	lwarx	%0,0,%2		# atomic_inc_not_zero\n\
	cmpwi	0,%0,0\n\
	beq-	2f\n\
	addic	%1,%0,1\n"
	PPC405_ERR77(0,%2)
"	stwcx.	%1,0,%2\n\
	bne-	1b\n"
	PPC_ATOMIC_EXIT_BARRIER
	"\n\
2:"
	: "=&r" (t1), "=&r" (t2)
	: "r" (&v->counter)
	: "cc", "xer", "memory");

	return t1;
}
#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
N
Nick Piggin 已提交
245

L
Linus Torvalds 已提交
246 247 248 249 250
#define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)
#define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0)

/*
 * Atomically test *v and decrement if it is greater than 0.
251 252
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
L
Linus Torvalds 已提交
253 254 255 256 257 258
 */
static __inline__ int atomic_dec_if_positive(atomic_t *v)
{
	int t;

	__asm__ __volatile__(
259
	PPC_ATOMIC_ENTRY_BARRIER
L
Linus Torvalds 已提交
260
"1:	lwarx	%0,0,%1		# atomic_dec_if_positive\n\
261 262
	cmpwi	%0,1\n\
	addi	%0,%0,-1\n\
L
Linus Torvalds 已提交
263 264 265 266
	blt-	2f\n"
	PPC405_ERR77(0,%1)
"	stwcx.	%0,0,%1\n\
	bne-	1b"
267
	PPC_ATOMIC_EXIT_BARRIER
L
Linus Torvalds 已提交
268
	"\n\
269
2:"	: "=&b" (t)
L
Linus Torvalds 已提交
270 271 272 273 274 275
	: "r" (&v->counter)
	: "cc", "memory");

	return t;
}

276 277 278 279
#define smp_mb__before_atomic_dec()     smp_mb()
#define smp_mb__after_atomic_dec()      smp_mb()
#define smp_mb__before_atomic_inc()     smp_mb()
#define smp_mb__after_atomic_inc()      smp_mb()
L
Linus Torvalds 已提交
280

281 282 283 284
#ifdef __powerpc64__

#define ATOMIC64_INIT(i)	{ (i) }

285 286 287 288 289 290 291 292 293 294 295 296 297
static __inline__ long atomic64_read(const atomic64_t *v)
{
	long t;

	__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));

	return t;
}

static __inline__ void atomic64_set(atomic64_t *v, long i)
{
	__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
}
298 299 300 301 302 303 304 305 306 307

static __inline__ void atomic64_add(long a, atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
"1:	ldarx	%0,0,%3		# atomic64_add\n\
	add	%0,%2,%0\n\
	stdcx.	%0,0,%3 \n\
	bne-	1b"
308 309
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
310 311 312 313 314 315 316 317
	: "cc");
}

static __inline__ long atomic64_add_return(long a, atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
318
	PPC_ATOMIC_ENTRY_BARRIER
319 320 321 322
"1:	ldarx	%0,0,%2		# atomic64_add_return\n\
	add	%0,%1,%0\n\
	stdcx.	%0,0,%2 \n\
	bne-	1b"
323
	PPC_ATOMIC_EXIT_BARRIER
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341
	: "=&r" (t)
	: "r" (a), "r" (&v->counter)
	: "cc", "memory");

	return t;
}

#define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)

static __inline__ void atomic64_sub(long a, atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
"1:	ldarx	%0,0,%3		# atomic64_sub\n\
	subf	%0,%2,%0\n\
	stdcx.	%0,0,%3 \n\
	bne-	1b"
342 343
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
344 345 346 347 348 349 350 351
	: "cc");
}

static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
352
	PPC_ATOMIC_ENTRY_BARRIER
353 354 355 356
"1:	ldarx	%0,0,%2		# atomic64_sub_return\n\
	subf	%0,%1,%0\n\
	stdcx.	%0,0,%2 \n\
	bne-	1b"
357
	PPC_ATOMIC_EXIT_BARRIER
358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373
	: "=&r" (t)
	: "r" (a), "r" (&v->counter)
	: "cc", "memory");

	return t;
}

static __inline__ void atomic64_inc(atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
"1:	ldarx	%0,0,%2		# atomic64_inc\n\
	addic	%0,%0,1\n\
	stdcx.	%0,0,%2 \n\
	bne-	1b"
374 375
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
376
	: "cc", "xer");
377 378 379 380 381 382 383
}

static __inline__ long atomic64_inc_return(atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
384
	PPC_ATOMIC_ENTRY_BARRIER
385 386 387 388
"1:	ldarx	%0,0,%1		# atomic64_inc_return\n\
	addic	%0,%0,1\n\
	stdcx.	%0,0,%1 \n\
	bne-	1b"
389
	PPC_ATOMIC_EXIT_BARRIER
390 391
	: "=&r" (t)
	: "r" (&v->counter)
392
	: "cc", "xer", "memory");
393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415

	return t;
}

/*
 * atomic64_inc_and_test - increment and test
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)

static __inline__ void atomic64_dec(atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
"1:	ldarx	%0,0,%2		# atomic64_dec\n\
	addic	%0,%0,-1\n\
	stdcx.	%0,0,%2\n\
	bne-	1b"
416 417
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
418
	: "cc", "xer");
419 420 421 422 423 424 425
}

static __inline__ long atomic64_dec_return(atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
426
	PPC_ATOMIC_ENTRY_BARRIER
427 428 429 430
"1:	ldarx	%0,0,%1		# atomic64_dec_return\n\
	addic	%0,%0,-1\n\
	stdcx.	%0,0,%1\n\
	bne-	1b"
431
	PPC_ATOMIC_EXIT_BARRIER
432 433
	: "=&r" (t)
	: "r" (&v->counter)
434
	: "cc", "xer", "memory");
435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450

	return t;
}

#define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
#define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)

/*
 * Atomically test *v and decrement if it is greater than 0.
 * The function returns the old value of *v minus 1.
 */
static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
{
	long t;

	__asm__ __volatile__(
451
	PPC_ATOMIC_ENTRY_BARRIER
452 453 454 455 456
"1:	ldarx	%0,0,%1		# atomic64_dec_if_positive\n\
	addic.	%0,%0,-1\n\
	blt-	2f\n\
	stdcx.	%0,0,%1\n\
	bne-	1b"
457
	PPC_ATOMIC_EXIT_BARRIER
458 459 460
	"\n\
2:"	: "=&r" (t)
	: "r" (&v->counter)
461
	: "cc", "xer", "memory");
462 463 464 465

	return t;
}

466
#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
467 468 469 470 471 472 473 474 475
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
476
 * Returns the old value of @v.
477 478 479 480 481 482
 */
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
	long t;

	__asm__ __volatile__ (
483
	PPC_ATOMIC_ENTRY_BARRIER
484
"1:	ldarx	%0,0,%1		# __atomic_add_unless\n\
485 486 487 488 489
	cmpd	0,%0,%3 \n\
	beq-	2f \n\
	add	%0,%2,%0 \n"
"	stdcx.	%0,0,%1 \n\
	bne-	1b \n"
490
	PPC_ATOMIC_EXIT_BARRIER
491 492 493 494 495 496 497 498 499
"	subf	%0,%2,%0 \n\
2:"
	: "=&r" (t)
	: "r" (&v->counter), "r" (a), "r" (u)
	: "cc", "memory");

	return t != u;
}

500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527
/**
 * atomic_inc64_not_zero - increment unless the number is zero
 * @v: pointer of type atomic64_t
 *
 * Atomically increments @v by 1, so long as @v is non-zero.
 * Returns non-zero if @v was non-zero, and zero otherwise.
 */
static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
{
	long t1, t2;

	__asm__ __volatile__ (
	PPC_ATOMIC_ENTRY_BARRIER
"1:	ldarx	%0,0,%2		# atomic64_inc_not_zero\n\
	cmpdi	0,%0,0\n\
	beq-	2f\n\
	addic	%1,%0,1\n\
	stdcx.	%1,0,%2\n\
	bne-	1b\n"
	PPC_ATOMIC_EXIT_BARRIER
	"\n\
2:"
	: "=&r" (t1), "=&r" (t2)
	: "r" (&v->counter)
	: "cc", "xer", "memory");

	return t1;
}
528

529 530
#endif /* __powerpc64__ */

L
Linus Torvalds 已提交
531
#endif /* __KERNEL__ */
532
#endif /* _ASM_POWERPC_ATOMIC_H_ */