atomic64_32.h 6.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#ifndef _ASM_X86_ATOMIC64_32_H
#define _ASM_X86_ATOMIC64_32_H

#include <linux/compiler.h>
#include <linux/types.h>
#include <asm/processor.h>
//#include <asm/cmpxchg.h>

/* An 64bit atomic type */

typedef struct {
	u64 __aligned(8) counter;
} atomic64_t;

#define ATOMIC64_INIT(val)	{ (val) }

17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38
#ifdef CONFIG_X86_CMPXCHG64
#define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8"
#else
#define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8)
#endif

#define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f)

/**
 * atomic64_cmpxchg - cmpxchg atomic64 variable
 * @p: pointer to type atomic64_t
 * @o: expected value
 * @n: new value
 *
 * Atomically sets @v to @n if it was equal to @o and returns
 * the old value.
 */

static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
{
	return cmpxchg64(&v->counter, o, n);
}
39 40 41

/**
 * atomic64_xchg - xchg atomic64 variable
42 43
 * @v: pointer to type atomic64_t
 * @n: value to assign
44
 *
45
 * Atomically xchgs the value of @v to @n and returns
46 47
 * the old value.
 */
48 49 50 51 52 53 54 55 56 57 58 59
static inline long long atomic64_xchg(atomic64_t *v, long long n)
{
	long long o;
	unsigned high = (unsigned)(n >> 32);
	unsigned low = (unsigned)n;
	asm volatile(ATOMIC64_ALTERNATIVE(xchg)
		     : "=A" (o), "+b" (low), "+c" (high)
		     : "S" (v)
		     : "memory"
		     );
	return o;
}
60 61 62

/**
 * atomic64_set - set atomic64 variable
63 64
 * @v: pointer to type atomic64_t
 * @n: value to assign
65
 *
66
 * Atomically sets the value of @v to @n.
67
 */
68 69 70 71 72 73 74 75 76 77
static inline void atomic64_set(atomic64_t *v, long long i)
{
	unsigned high = (unsigned)(i >> 32);
	unsigned low = (unsigned)i;
	asm volatile(ATOMIC64_ALTERNATIVE(set)
		     : "+b" (low), "+c" (high)
		     : "S" (v)
		     : "eax", "edx", "memory"
		     );
}
78 79 80

/**
 * atomic64_read - read atomic64 variable
81
 * @v: pointer to type atomic64_t
82
 *
83
 * Atomically reads the value of @v and returns it.
84
 */
85
static inline long long atomic64_read(atomic64_t *v)
86
{
87 88 89 90 91 92 93
	long long r;
	asm volatile(ATOMIC64_ALTERNATIVE(read)
		     : "=A" (r), "+c" (v)
		     : : "memory"
		     );
	return r;
 }
94 95 96

/**
 * atomic64_add_return - add and return
97 98
 * @i: integer value to add
 * @v: pointer to type atomic64_t
99
 *
100
 * Atomically adds @i to @v and returns @i + *@v
101
 */
102 103 104 105 106 107 108 109
static inline long long atomic64_add_return(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE(add_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}
110 111 112 113

/*
 * Other variants with different arithmetic operators:
 */
114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143
static inline long long atomic64_sub_return(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE(sub_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}

static inline long long atomic64_inc_return(atomic64_t *v)
{
	long long a;
	asm volatile(ATOMIC64_ALTERNATIVE(inc_return)
		     : "=A" (a)
		     : "S" (v)
		     : "memory", "ecx"
		     );
	return a;
}

static inline long long atomic64_dec_return(atomic64_t *v)
{
	long long a;
	asm volatile(ATOMIC64_ALTERNATIVE(dec_return)
		     : "=A" (a)
		     : "S" (v)
		     : "memory", "ecx"
		     );
	return a;
}
144 145 146

/**
 * atomic64_add - add integer to atomic64 variable
147 148
 * @i: integer value to add
 * @v: pointer to type atomic64_t
149
 *
150
 * Atomically adds @i to @v.
151
 */
152 153 154 155 156 157 158 159
static inline long long atomic64_add(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}
160 161 162

/**
 * atomic64_sub - subtract the atomic64 variable
163 164
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
165
 *
166
 * Atomically subtracts @i from @v.
167
 */
168 169 170 171 172 173 174 175
static inline long long atomic64_sub(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}
176 177 178

/**
 * atomic64_sub_and_test - subtract value from variable and test result
179 180 181 182
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
  *
 * Atomically subtracts @i from @v and returns
183 184 185
 * true if the result is zero, or false for all
 * other cases.
 */
186 187 188 189
static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
{
	return atomic64_sub_return(i, v) == 0;
}
190 191 192

/**
 * atomic64_inc - increment atomic64 variable
193
 * @v: pointer to type atomic64_t
194
 *
195
 * Atomically increments @v by 1.
196
 */
197 198 199 200 201 202 203
static inline void atomic64_inc(atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return)
		     : : "S" (v)
		     : "memory", "eax", "ecx", "edx"
		     );
}
204 205 206 207 208 209 210

/**
 * atomic64_dec - decrement atomic64 variable
 * @ptr: pointer to type atomic64_t
 *
 * Atomically decrements @ptr by 1.
 */
211 212 213 214 215 216 217
static inline void atomic64_dec(atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return)
		     : : "S" (v)
		     : "memory", "eax", "ecx", "edx"
		     );
}
218 219 220

/**
 * atomic64_dec_and_test - decrement and test
221
 * @v: pointer to type atomic64_t
222
 *
223
 * Atomically decrements @v by 1 and
224 225 226
 * returns true if the result is 0, or false for all other
 * cases.
 */
227 228 229 230
static inline int atomic64_dec_and_test(atomic64_t *v)
{
	return atomic64_dec_return(v) == 0;
}
231 232 233

/**
 * atomic64_inc_and_test - increment and test
234
 * @v: pointer to type atomic64_t
235
 *
236
 * Atomically increments @v by 1
237 238 239
 * and returns true if the result is zero, or false for all
 * other cases.
 */
240 241 242 243
static inline int atomic64_inc_and_test(atomic64_t *v)
{
	return atomic64_inc_return(v) == 0;
}
244 245 246

/**
 * atomic64_add_negative - add and test if negative
247 248
 * @i: integer value to add
 * @v: pointer to type atomic64_t
249
 *
250
 * Atomically adds @i to @v and returns true
251 252 253
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
254 255 256 257 258 259 260 261 262 263 264 265
static inline int atomic64_add_negative(long long i, atomic64_t *v)
{
	return atomic64_add_return(i, v) < 0;
}

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
266
 * Returns the old value of @v.
267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302
 */
static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
{
	unsigned low = (unsigned)u;
	unsigned high = (unsigned)(u >> 32);
	asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t"
		     : "+A" (a), "+c" (v), "+S" (low), "+D" (high)
		     : : "memory");
	return (int)a;
}


static inline int atomic64_inc_not_zero(atomic64_t *v)
{
	int r;
	asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero)
		     : "=a" (r)
		     : "S" (v)
		     : "ecx", "edx", "memory"
		     );
	return r;
}

static inline long long atomic64_dec_if_positive(atomic64_t *v)
{
	long long r;
	asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive)
		     : "=A" (r)
		     : "S" (v)
		     : "ecx", "memory"
		     );
	return r;
}

#undef ATOMIC64_ALTERNATIVE
#undef ATOMIC64_ALTERNATIVE_
303 304

#endif /* _ASM_X86_ATOMIC64_32_H */