cmpxchg_64.h 5.3 KB
Newer Older
H
H. Peter Anvin 已提交
1 2
#ifndef _ASM_X86_CMPXCHG_64_H
#define _ASM_X86_CMPXCHG_64_H
J
Jeff Dike 已提交
3 4 5

#include <asm/alternative.h> /* Provides LOCK_PREFIX */

6
static inline void set_64bit(volatile u64 *ptr, u64 val)
J
Jeff Dike 已提交
7 8 9 10
{
	*ptr = val;
}

11 12 13
extern void __xchg_wrong_size(void);
extern void __cmpxchg_wrong_size(void);

J
Jeff Dike 已提交
14
/*
15 16 17 18
 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
 * Since this is generally used to protect other memory information, we
 * use "asm volatile" and "memory" clobbers to prevent gcc from moving
 * information around.
J
Jeff Dike 已提交
19
 */
20 21 22 23 24
#define __xchg(x, ptr, size)						\
({									\
	__typeof(*(ptr)) __x = (x);					\
	switch (size) {							\
	case 1:								\
25 26 27 28
	{								\
		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
		asm volatile("xchgb %0,%1"				\
			     : "=q" (__x), "+m" (*__ptr)		\
29
			     : "0" (__x)				\
30 31
			     : "memory");				\
		break;							\
32
	}								\
33
	case 2:								\
34 35 36 37
	{								\
		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
		asm volatile("xchgw %0,%1"				\
			     : "=r" (__x), "+m" (*__ptr)		\
38
			     : "0" (__x)				\
39 40
			     : "memory");				\
		break;							\
41
	}								\
42
	case 4:								\
43 44 45 46
	{								\
		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
		asm volatile("xchgl %0,%1"				\
			     : "=r" (__x), "+m" (*__ptr)		\
47
			     : "0" (__x)				\
48 49
			     : "memory");				\
		break;							\
50
	}								\
51
	case 8:								\
52 53
	{								\
		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
54
		asm volatile("xchgq %0,%1"				\
55
			     : "=r" (__x), "+m" (*__ptr)		\
56
			     : "0" (__x)				\
57 58
			     : "memory");				\
		break;							\
59
	}								\
60 61 62 63 64 65 66 67 68 69
	default:							\
		__xchg_wrong_size();					\
	}								\
	__x;								\
})

#define xchg(ptr, v)							\
	__xchg((v), (ptr), sizeof(*ptr))

#define __HAVE_ARCH_CMPXCHG 1
J
Jeff Dike 已提交
70 71 72 73 74 75

/*
 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
 * store NEW in MEM.  Return the initial value in MEM.  Success is
 * indicated by comparing RETURN with OLD.
 */
76 77 78 79 80 81 82
#define __raw_cmpxchg(ptr, old, new, size, lock)			\
({									\
	__typeof__(*(ptr)) __ret;					\
	__typeof__(*(ptr)) __old = (old);				\
	__typeof__(*(ptr)) __new = (new);				\
	switch (size) {							\
	case 1:								\
83 84 85 86
	{								\
		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
		asm volatile(lock "cmpxchgb %2,%1"			\
			     : "=a" (__ret), "+m" (*__ptr)		\
87
			     : "q" (__new), "0" (__old)			\
88 89
			     : "memory");				\
		break;							\
90
	}								\
91
	case 2:								\
92 93 94 95
	{								\
		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
		asm volatile(lock "cmpxchgw %2,%1"			\
			     : "=a" (__ret), "+m" (*__ptr)		\
96
			     : "r" (__new), "0" (__old)			\
97 98
			     : "memory");				\
		break;							\
99
	}								\
100
	case 4:								\
101 102 103 104
	{								\
		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
		asm volatile(lock "cmpxchgl %2,%1"			\
			     : "=a" (__ret), "+m" (*__ptr)		\
105
			     : "r" (__new), "0" (__old)			\
106 107
			     : "memory");				\
		break;							\
108
	}								\
109
	case 8:								\
110 111
	{								\
		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
112
		asm volatile(lock "cmpxchgq %2,%1"			\
113
			     : "=a" (__ret), "+m" (*__ptr)		\
114
			     : "r" (__new), "0" (__old)			\
115 116
			     : "memory");				\
		break;							\
117
	}								\
118 119 120 121 122
	default:							\
		__cmpxchg_wrong_size();					\
	}								\
	__ret;								\
})
J
Jeff Dike 已提交
123

124 125
#define __cmpxchg(ptr, old, new, size)					\
	__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
J
Jeff Dike 已提交
126

127 128
#define __sync_cmpxchg(ptr, old, new, size)				\
	__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
J
Jeff Dike 已提交
129

130 131
#define __cmpxchg_local(ptr, old, new, size)				\
	__raw_cmpxchg((ptr), (old), (new), (size), "")
132

133 134 135 136 137 138 139 140
#define cmpxchg(ptr, old, new)						\
	__cmpxchg((ptr), (old), (new), sizeof(*ptr))

#define sync_cmpxchg(ptr, old, new)					\
	__sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))

#define cmpxchg_local(ptr, old, new)					\
	__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
J
Jeff Dike 已提交
141

142
#define cmpxchg64(ptr, o, n)						\
143
({									\
144 145
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	cmpxchg((ptr), (o), (n));					\
146
})
147

148
#define cmpxchg64_local(ptr, o, n)					\
149
({									\
150 151
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	cmpxchg_local((ptr), (o), (n));					\
152
})
J
Jeff Dike 已提交
153

154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
#define cmpxchg16b(ptr, o1, o2, n1, n2)				\
({								\
	char __ret;						\
	__typeof__(o2) __junk;					\
	__typeof__(*(ptr)) __old1 = (o1);			\
	__typeof__(o2) __old2 = (o2);				\
	__typeof__(*(ptr)) __new1 = (n1);			\
	__typeof__(o2) __new2 = (n2);				\
	asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1"	\
		       : "=d"(__junk), "=a"(__ret), "+m" (*ptr)	\
		       : "b"(__new1), "c"(__new2),		\
		         "a"(__old1), "d"(__old2));		\
	__ret; })


#define cmpxchg16b_local(ptr, o1, o2, n1, n2)			\
({								\
	char __ret;						\
	__typeof__(o2) __junk;					\
	__typeof__(*(ptr)) __old1 = (o1);			\
	__typeof__(o2) __old2 = (o2);				\
	__typeof__(*(ptr)) __new1 = (n1);			\
	__typeof__(o2) __new2 = (n2);				\
	asm volatile("cmpxchg16b %2;setz %1"			\
		       : "=d"(__junk), "=a"(__ret), "+m" (*ptr)	\
		       : "b"(__new1), "c"(__new2),		\
		         "a"(__old1), "d"(__old2));		\
	__ret; })

#define cmpxchg_double(ptr, o1, o2, n1, n2)				\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	VM_BUG_ON((unsigned long)(ptr) % 16);				\
	cmpxchg16b((ptr), (o1), (o2), (n1), (n2));			\
})

#define cmpxchg_double_local(ptr, o1, o2, n1, n2)			\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	VM_BUG_ON((unsigned long)(ptr) % 16);				\
	cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2));		\
})

#define system_has_cmpxchg_double() cpu_has_cx16

H
H. Peter Anvin 已提交
199
#endif /* _ASM_X86_CMPXCHG_64_H */