atomic64_64.h 6.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
#ifndef _ASM_X86_ATOMIC64_64_H
#define _ASM_X86_ATOMIC64_64_H

#include <linux/types.h>
#include <asm/alternative.h>
#include <asm/cmpxchg.h>

/* The 64-bit atomic type */

#define ATOMIC64_INIT(i)	{ (i) }

/**
 * atomic64_read - read atomic64 variable
 * @v: pointer of type atomic64_t
 *
 * Atomically reads the value of @v.
 * Doesn't imply a read memory barrier.
 */
static inline long atomic64_read(const atomic64_t *v)
{
21
	return READ_ONCE((v)->counter);
22 23 24 25 26 27 28 29 30 31 32
}

/**
 * atomic64_set - set atomic64 variable
 * @v: pointer to type atomic64_t
 * @i: required value
 *
 * Atomically sets the value of @v to @i.
 */
static inline void atomic64_set(atomic64_t *v, long i)
{
33
	WRITE_ONCE(v->counter, i);
34 35 36 37 38 39 40 41 42
}

/**
 * atomic64_add - add integer to atomic64 variable
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v.
 */
43
static __always_inline void atomic64_add(long i, atomic64_t *v)
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
{
	asm volatile(LOCK_PREFIX "addq %1,%0"
		     : "=m" (v->counter)
		     : "er" (i), "m" (v->counter));
}

/**
 * atomic64_sub - subtract the atomic64 variable
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
 *
 * Atomically subtracts @i from @v.
 */
static inline void atomic64_sub(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "subq %1,%0"
		     : "=m" (v->counter)
		     : "er" (i), "m" (v->counter));
}

/**
 * atomic64_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
73
static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
74
{
75
	GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
76 77 78 79 80 81 82 83
}

/**
 * atomic64_inc - increment atomic64 variable
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @v by 1.
 */
84
static __always_inline void atomic64_inc(atomic64_t *v)
85 86 87 88 89 90 91 92 93 94 95 96
{
	asm volatile(LOCK_PREFIX "incq %0"
		     : "=m" (v->counter)
		     : "m" (v->counter));
}

/**
 * atomic64_dec - decrement atomic64 variable
 * @v: pointer to type atomic64_t
 *
 * Atomically decrements @v by 1.
 */
97
static __always_inline void atomic64_dec(atomic64_t *v)
98 99 100 101 102 103 104 105 106 107 108 109 110 111
{
	asm volatile(LOCK_PREFIX "decq %0"
		     : "=m" (v->counter)
		     : "m" (v->counter));
}

/**
 * atomic64_dec_and_test - decrement and test
 * @v: pointer to type atomic64_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
112
static inline bool atomic64_dec_and_test(atomic64_t *v)
113
{
114
	GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
115 116 117 118 119 120 121 122 123 124
}

/**
 * atomic64_inc_and_test - increment and test
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
125
static inline bool atomic64_inc_and_test(atomic64_t *v)
126
{
127
	GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
128 129 130 131 132 133 134 135 136 137 138
}

/**
 * atomic64_add_negative - add and test if negative
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
139
static inline bool atomic64_add_negative(long i, atomic64_t *v)
140
{
141
	GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
142 143 144 145 146 147 148 149 150
}

/**
 * atomic64_add_return - add and return
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v and returns @i + @v
 */
151
static __always_inline long atomic64_add_return(long i, atomic64_t *v)
152
{
153
	return i + xadd(&v->counter, i);
154 155 156 157 158 159 160
}

static inline long atomic64_sub_return(long i, atomic64_t *v)
{
	return atomic64_add_return(-i, v);
}

161 162 163 164 165 166 167 168 169 170
static inline long atomic64_fetch_add(long i, atomic64_t *v)
{
	return xadd(&v->counter, i);
}

static inline long atomic64_fetch_sub(long i, atomic64_t *v)
{
	return xadd(&v->counter, -i);
}

171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))

static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
{
	return cmpxchg(&v->counter, old, new);
}

static inline long atomic64_xchg(atomic64_t *v, long new)
{
	return xchg(&v->counter, new);
}

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
191
 * Returns the old value of @v.
192
 */
193
static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
{
	long c, old;
	c = atomic64_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic64_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c != (u);
}

#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232
/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
	long c, old, dec;
	c = atomic64_read(v);
	for (;;) {
		dec = c - 1;
		if (unlikely(dec < 0))
			break;
		old = atomic64_cmpxchg((v), c, dec);
		if (likely(old == c))
			break;
		c = old;
	}
	return dec;
}

233 234 235 236 237 238 239 240 241
#define ATOMIC64_OP(op)							\
static inline void atomic64_##op(long i, atomic64_t *v)			\
{									\
	asm volatile(LOCK_PREFIX #op"q %1,%0"				\
			: "+m" (v->counter)				\
			: "er" (i)					\
			: "memory");					\
}

242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261
#define ATOMIC64_FETCH_OP(op, c_op)					\
static inline long atomic64_fetch_##op(long i, atomic64_t *v)		\
{									\
	long old, val = atomic64_read(v);				\
	for (;;) {							\
		old = atomic64_cmpxchg(v, val, val c_op i);		\
		if (old == val)						\
			break;						\
		val = old;						\
	}								\
	return old;							\
}

#define ATOMIC64_OPS(op, c_op)						\
	ATOMIC64_OP(op)							\
	ATOMIC64_FETCH_OP(op, c_op)

ATOMIC64_OPS(and, &)
ATOMIC64_OPS(or, |)
ATOMIC64_OPS(xor, ^)
262

263 264
#undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
265 266
#undef ATOMIC64_OP

267
#endif /* _ASM_X86_ATOMIC64_64_H */