atomic64_64.h 6.3 KB
Newer Older
1
/* SPDX-License-Identifier: GPL-2.0 */
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
#ifndef _ASM_X86_ATOMIC64_64_H
#define _ASM_X86_ATOMIC64_64_H

#include <linux/types.h>
#include <asm/alternative.h>
#include <asm/cmpxchg.h>

/* The 64-bit atomic type */

#define ATOMIC64_INIT(i)	{ (i) }

/**
 * atomic64_read - read atomic64 variable
 * @v: pointer of type atomic64_t
 *
 * Atomically reads the value of @v.
 * Doesn't imply a read memory barrier.
 */
static inline long atomic64_read(const atomic64_t *v)
{
22
	return READ_ONCE((v)->counter);
23 24 25 26 27 28 29 30 31 32 33
}

/**
 * atomic64_set - set atomic64 variable
 * @v: pointer to type atomic64_t
 * @i: required value
 *
 * Atomically sets the value of @v to @i.
 */
static inline void atomic64_set(atomic64_t *v, long i)
{
34
	WRITE_ONCE(v->counter, i);
35 36 37 38 39 40 41 42 43
}

/**
 * atomic64_add - add integer to atomic64 variable
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v.
 */
44
static __always_inline void atomic64_add(long i, atomic64_t *v)
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
{
	asm volatile(LOCK_PREFIX "addq %1,%0"
		     : "=m" (v->counter)
		     : "er" (i), "m" (v->counter));
}

/**
 * atomic64_sub - subtract the atomic64 variable
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
 *
 * Atomically subtracts @i from @v.
 */
static inline void atomic64_sub(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "subq %1,%0"
		     : "=m" (v->counter)
		     : "er" (i), "m" (v->counter));
}

/**
 * atomic64_sub_and_test - subtract value from variable and test result
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
 *
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
74
static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
75
{
76
	GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
77 78 79 80 81 82 83 84
}

/**
 * atomic64_inc - increment atomic64 variable
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @v by 1.
 */
85
static __always_inline void atomic64_inc(atomic64_t *v)
86 87 88 89 90 91 92 93 94 95 96 97
{
	asm volatile(LOCK_PREFIX "incq %0"
		     : "=m" (v->counter)
		     : "m" (v->counter));
}

/**
 * atomic64_dec - decrement atomic64 variable
 * @v: pointer to type atomic64_t
 *
 * Atomically decrements @v by 1.
 */
98
static __always_inline void atomic64_dec(atomic64_t *v)
99 100 101 102 103 104 105 106 107 108 109 110 111 112
{
	asm volatile(LOCK_PREFIX "decq %0"
		     : "=m" (v->counter)
		     : "m" (v->counter));
}

/**
 * atomic64_dec_and_test - decrement and test
 * @v: pointer to type atomic64_t
 *
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
113
static inline bool atomic64_dec_and_test(atomic64_t *v)
114
{
115
	GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
116 117 118 119 120 121 122 123 124 125
}

/**
 * atomic64_inc_and_test - increment and test
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
126
static inline bool atomic64_inc_and_test(atomic64_t *v)
127
{
128
	GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
129 130 131 132 133 134 135 136 137 138 139
}

/**
 * atomic64_add_negative - add and test if negative
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
140
static inline bool atomic64_add_negative(long i, atomic64_t *v)
141
{
142
	GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
143 144 145 146 147 148 149 150 151
}

/**
 * atomic64_add_return - add and return
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @i to @v and returns @i + @v
 */
152
static __always_inline long atomic64_add_return(long i, atomic64_t *v)
153
{
154
	return i + xadd(&v->counter, i);
155 156 157 158 159 160 161
}

static inline long atomic64_sub_return(long i, atomic64_t *v)
{
	return atomic64_add_return(-i, v);
}

162 163 164 165 166 167 168 169 170 171
static inline long atomic64_fetch_add(long i, atomic64_t *v)
{
	return xadd(&v->counter, i);
}

static inline long atomic64_fetch_sub(long i, atomic64_t *v)
{
	return xadd(&v->counter, -i);
}

172 173 174 175 176 177 178 179
#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))

static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
{
	return cmpxchg(&v->counter, old, new);
}

180
#define atomic64_try_cmpxchg atomic64_try_cmpxchg
181
static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new)
182 183 184 185
{
	return try_cmpxchg(&v->counter, old, new);
}

186 187 188 189 190 191 192 193 194 195 196 197
static inline long atomic64_xchg(atomic64_t *v, long new)
{
	return xchg(&v->counter, new);
}

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
198
 * Returns the old value of @v.
199
 */
200
static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
201
{
202
	s64 c = atomic64_read(v);
203 204 205 206 207
	do {
		if (unlikely(c == u))
			return false;
	} while (!atomic64_try_cmpxchg(v, &c, c + a));
	return true;
208 209 210 211
}

#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

212 213 214 215 216 217 218 219 220
/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
221
	s64 dec, c = atomic64_read(v);
222
	do {
223 224 225
		dec = c - 1;
		if (unlikely(dec < 0))
			break;
226
	} while (!atomic64_try_cmpxchg(v, &c, dec));
227 228 229
	return dec;
}

230 231 232 233 234 235
static inline void atomic64_and(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "andq %1,%0"
			: "+m" (v->counter)
			: "er" (i)
			: "memory");
236 237
}

238 239
static inline long atomic64_fetch_and(long i, atomic64_t *v)
{
240
	s64 val = atomic64_read(v);
241 242 243 244

	do {
	} while (!atomic64_try_cmpxchg(v, &val, val & i));
	return val;
245 246
}

247 248 249 250 251 252 253
static inline void atomic64_or(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "orq %1,%0"
			: "+m" (v->counter)
			: "er" (i)
			: "memory");
}
254

255 256
static inline long atomic64_fetch_or(long i, atomic64_t *v)
{
257
	s64 val = atomic64_read(v);
258

259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
	do {
	} while (!atomic64_try_cmpxchg(v, &val, val | i));
	return val;
}

static inline void atomic64_xor(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "xorq %1,%0"
			: "+m" (v->counter)
			: "er" (i)
			: "memory");
}

static inline long atomic64_fetch_xor(long i, atomic64_t *v)
{
274
	s64 val = atomic64_read(v);
275 276 277 278 279

	do {
	} while (!atomic64_try_cmpxchg(v, &val, val ^ i));
	return val;
}
280

281
#endif /* _ASM_X86_ATOMIC64_64_H */