atomic.h 6.5 KB
Newer Older
L
Linus Torvalds 已提交
1 2 3
#ifndef _ALPHA_ATOMIC_H
#define _ALPHA_ATOMIC_H

4
#include <linux/types.h>
5
#include <asm/barrier.h>
6
#include <asm/cmpxchg.h>
7

L
Linus Torvalds 已提交
8 9 10 11 12 13 14 15 16
/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc...
 *
 * But use these as seldom as possible since they are much slower
 * than regular operations.
 */


17 18
#define ATOMIC_INIT(i)		{ (i) }
#define ATOMIC64_INIT(i)	{ (i) }
L
Linus Torvalds 已提交
19

20 21
#define atomic_read(v)		READ_ONCE((v)->counter)
#define atomic64_read(v)	READ_ONCE((v)->counter)
L
Linus Torvalds 已提交
22

23 24
#define atomic_set(v,i)		WRITE_ONCE((v)->counter, (i))
#define atomic64_set(v,i)	WRITE_ONCE((v)->counter, (i))
L
Linus Torvalds 已提交
25 26 27 28 29 30 31

/*
 * To get proper branch prediction for the main line, we must branch
 * forward to code at the end of this object's .text section, then
 * branch back to restart the operation.
 */

32
#define ATOMIC_OP(op, asm_op)						\
33 34 35 36 37
static __inline__ void atomic_##op(int i, atomic_t * v)			\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
38
	"	" #asm_op " %0,%2,%0\n"					\
39 40 41 42 43 44 45 46 47
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter)				\
	:"Ir" (i), "m" (v->counter));					\
}									\

48
#define ATOMIC_OP_RETURN(op, asm_op)					\
49 50 51 52 53 54
static inline int atomic_##op##_return(int i, atomic_t *v)		\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
55 56
	"	" #asm_op " %0,%3,%2\n"					\
	"	" #asm_op " %0,%3,%0\n"					\
57 58 59 60 61 62 63 64 65
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter), "=&r" (result)		\
	:"Ir" (i), "m" (v->counter) : "memory");			\
	smp_mb();							\
	return result;							\
L
Linus Torvalds 已提交
66 67
}

68
#define ATOMIC64_OP(op, asm_op)						\
69 70 71 72 73
static __inline__ void atomic64_##op(long i, atomic64_t * v)		\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
74
	"	" #asm_op " %0,%2,%0\n"					\
75 76 77 78 79 80 81 82 83
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter)				\
	:"Ir" (i), "m" (v->counter));					\
}									\

84
#define ATOMIC64_OP_RETURN(op, asm_op)					\
85 86 87 88 89 90
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)	\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
91 92
	"	" #asm_op " %0,%3,%2\n"					\
	"	" #asm_op " %0,%3,%0\n"					\
93 94 95 96 97 98 99 100 101
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter), "=&r" (result)		\
	:"Ir" (i), "m" (v->counter) : "memory");			\
	smp_mb();							\
	return result;							\
L
Linus Torvalds 已提交
102 103
}

104 105 106 107 108
#define ATOMIC_OPS(op)							\
	ATOMIC_OP(op, op##l)						\
	ATOMIC_OP_RETURN(op, op##l)					\
	ATOMIC64_OP(op, op##q)						\
	ATOMIC64_OP_RETURN(op, op##q)
L
Linus Torvalds 已提交
109

110 111
ATOMIC_OPS(add)
ATOMIC_OPS(sub)
L
Linus Torvalds 已提交
112

113 114 115 116 117 118 119 120 121 122 123 124
#define atomic_andnot atomic_andnot
#define atomic64_andnot atomic64_andnot

ATOMIC_OP(and, and)
ATOMIC_OP(andnot, bic)
ATOMIC_OP(or, bis)
ATOMIC_OP(xor, xor)
ATOMIC64_OP(and, and)
ATOMIC64_OP(andnot, bic)
ATOMIC64_OP(or, bis)
ATOMIC64_OP(xor, xor)

125 126 127 128 129
#undef ATOMIC_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
L
Linus Torvalds 已提交
130

131 132 133 134
#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))

#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
135
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
N
Nick Piggin 已提交
136

137
/**
138
 * __atomic_add_unless - add unless the number is a given value
139 140 141 142 143
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
144
 * Returns the old value of @v.
145
 */
146
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
147
{
148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165
	int c, new, old;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldl_l	%[old],%[mem]\n"
	"	cmpeq	%[old],%[u],%[c]\n"
	"	addl	%[old],%[a],%[new]\n"
	"	bne	%[c],2f\n"
	"	stl_c	%[new],%[mem]\n"
	"	beq	%[new],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
	: "memory");
	smp_mb();
	return old;
166 167
}

N
Nick Piggin 已提交
168

169 170 171 172 173 174 175
/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
176
 * Returns true iff @v was not @u.
177
 */
178 179
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
	long c, tmp;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldq_l	%[tmp],%[mem]\n"
	"	cmpeq	%[tmp],%[u],%[c]\n"
	"	addq	%[tmp],%[a],%[tmp]\n"
	"	bne	%[c],2f\n"
	"	stq_c	%[tmp],%[mem]\n"
	"	beq	%[tmp],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [tmp] "=&r"(tmp), [c] "=&r"(c)
	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
	: "memory");
	smp_mb();
	return !c;
198 199
}

200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227
/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
	long old, tmp;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldq_l	%[old],%[mem]\n"
	"	subq	%[old],1,%[tmp]\n"
	"	ble	%[old],2f\n"
	"	stq_c	%[tmp],%[mem]\n"
	"	beq	%[tmp],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [old] "=&r"(old), [tmp] "=&r"(tmp)
	: [mem] "m"(*v)
	: "memory");
	smp_mb();
	return old - 1;
}

228 229
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

H
Hugh Dickins 已提交
230 231 232
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)

L
Linus Torvalds 已提交
233 234 235 236 237 238 239 240 241 242
#define atomic_dec_return(v) atomic_sub_return(1,(v))
#define atomic64_dec_return(v) atomic64_sub_return(1,(v))

#define atomic_inc_return(v) atomic_add_return(1,(v))
#define atomic64_inc_return(v) atomic64_add_return(1,(v))

#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)

#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
H
Hugh Dickins 已提交
243 244
#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)

L
Linus Torvalds 已提交
245 246 247 248 249 250 251 252 253 254
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)

#define atomic_inc(v) atomic_add(1,(v))
#define atomic64_inc(v) atomic64_add(1,(v))

#define atomic_dec(v) atomic_sub(1,(v))
#define atomic64_dec(v) atomic64_sub(1,(v))

#endif /* _ALPHA_ATOMIC_H */