atomic.h 6.2 KB
Newer Older
L
Linus Torvalds 已提交
1 2 3
#ifndef _ALPHA_ATOMIC_H
#define _ALPHA_ATOMIC_H

4
#include <linux/types.h>
5
#include <asm/barrier.h>
6
#include <asm/cmpxchg.h>
7

L
Linus Torvalds 已提交
8 9 10 11 12 13 14 15 16
/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc...
 *
 * But use these as seldom as possible since they are much slower
 * than regular operations.
 */


17 18
#define ATOMIC_INIT(i)		{ (i) }
#define ATOMIC64_INIT(i)	{ (i) }
L
Linus Torvalds 已提交
19

20 21
#define atomic_read(v)		ACCESS_ONCE((v)->counter)
#define atomic64_read(v)	ACCESS_ONCE((v)->counter)
L
Linus Torvalds 已提交
22 23 24 25 26 27 28 29 30 31

#define atomic_set(v,i)		((v)->counter = (i))
#define atomic64_set(v,i)	((v)->counter = (i))

/*
 * To get proper branch prediction for the main line, we must branch
 * forward to code at the end of this object's .text section, then
 * branch back to restart the operation.
 */

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
#define ATOMIC_OP(op)							\
static __inline__ void atomic_##op(int i, atomic_t * v)			\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
	"	" #op "l %0,%2,%0\n"					\
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter)				\
	:"Ir" (i), "m" (v->counter));					\
}									\

#define ATOMIC_OP_RETURN(op)						\
static inline int atomic_##op##_return(int i, atomic_t *v)		\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
	"	" #op "l %0,%3,%2\n"					\
	"	" #op "l %0,%3,%0\n"					\
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter), "=&r" (result)		\
	:"Ir" (i), "m" (v->counter) : "memory");			\
	smp_mb();							\
	return result;							\
L
Linus Torvalds 已提交
66 67
}

68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101
#define ATOMIC64_OP(op)							\
static __inline__ void atomic64_##op(long i, atomic64_t * v)		\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
	"	" #op "q %0,%2,%0\n"					\
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter)				\
	:"Ir" (i), "m" (v->counter));					\
}									\

#define ATOMIC64_OP_RETURN(op)						\
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)	\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
	"	" #op "q %0,%3,%2\n"					\
	"	" #op "q %0,%3,%0\n"					\
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
	"2:	br 1b\n"						\
	".previous"							\
	:"=&r" (temp), "=m" (v->counter), "=&r" (result)		\
	:"Ir" (i), "m" (v->counter) : "memory");			\
	smp_mb();							\
	return result;							\
L
Linus Torvalds 已提交
102 103
}

104 105 106 107 108
#define ATOMIC_OPS(opg)							\
	ATOMIC_OP(opg)							\
	ATOMIC_OP_RETURN(opg)						\
	ATOMIC64_OP(opg)						\
	ATOMIC64_OP_RETURN(opg)
L
Linus Torvalds 已提交
109

110 111
ATOMIC_OPS(add)
ATOMIC_OPS(sub)
L
Linus Torvalds 已提交
112

113 114 115 116 117
#undef ATOMIC_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
L
Linus Torvalds 已提交
118

119 120 121 122
#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))

#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
123
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
N
Nick Piggin 已提交
124

125
/**
126
 * __atomic_add_unless - add unless the number is a given value
127 128 129 130 131
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
132
 * Returns the old value of @v.
133
 */
134
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
135
{
136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
	int c, new, old;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldl_l	%[old],%[mem]\n"
	"	cmpeq	%[old],%[u],%[c]\n"
	"	addl	%[old],%[a],%[new]\n"
	"	bne	%[c],2f\n"
	"	stl_c	%[new],%[mem]\n"
	"	beq	%[new],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
	: "memory");
	smp_mb();
	return old;
154 155
}

N
Nick Piggin 已提交
156

157 158 159 160 161 162 163
/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
164
 * Returns true iff @v was not @u.
165
 */
166 167
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185
	long c, tmp;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldq_l	%[tmp],%[mem]\n"
	"	cmpeq	%[tmp],%[u],%[c]\n"
	"	addq	%[tmp],%[a],%[tmp]\n"
	"	bne	%[c],2f\n"
	"	stq_c	%[tmp],%[mem]\n"
	"	beq	%[tmp],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [tmp] "=&r"(tmp), [c] "=&r"(c)
	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
	: "memory");
	smp_mb();
	return !c;
186 187
}

188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215
/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
	long old, tmp;
	smp_mb();
	__asm__ __volatile__(
	"1:	ldq_l	%[old],%[mem]\n"
	"	subq	%[old],1,%[tmp]\n"
	"	ble	%[old],2f\n"
	"	stq_c	%[tmp],%[mem]\n"
	"	beq	%[tmp],3f\n"
	"2:\n"
	".subsection 2\n"
	"3:	br	1b\n"
	".previous"
	: [old] "=&r"(old), [tmp] "=&r"(tmp)
	: [mem] "m"(*v)
	: "memory");
	smp_mb();
	return old - 1;
}

216 217
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

H
Hugh Dickins 已提交
218 219 220
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)

L
Linus Torvalds 已提交
221 222 223 224 225 226 227 228 229 230
#define atomic_dec_return(v) atomic_sub_return(1,(v))
#define atomic64_dec_return(v) atomic64_sub_return(1,(v))

#define atomic_inc_return(v) atomic_add_return(1,(v))
#define atomic64_inc_return(v) atomic64_add_return(1,(v))

#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)

#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
H
Hugh Dickins 已提交
231 232
#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)

L
Linus Torvalds 已提交
233 234 235 236 237 238 239 240 241 242
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)

#define atomic_inc(v) atomic_add(1,(v))
#define atomic64_inc(v) atomic64_add(1,(v))

#define atomic_dec(v) atomic_sub(1,(v))
#define atomic64_dec(v) atomic64_sub(1,(v))

#endif /* _ALPHA_ATOMIC_H */