cmpxchg.h 4.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10
/*
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
 */
#ifndef __ASM_CMPXCHG_H
#define __ASM_CMPXCHG_H

11
#include <linux/bug.h>
12
#include <linux/irqflags.h>
13
#include <asm/compiler.h>
14 15
#include <asm/war.h>

16 17 18 19 20 21 22 23 24 25 26
/*
 * Using a branch-likely instruction to check the result of an sc instruction
 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
 * cause ll-sc sequences to execute non-atomically.
 */
#if R10000_LLSC_WAR
# define __scbeqz "beqzl"
#else
# define __scbeqz "beqz"
#endif

27 28 29 30 31 32 33 34 35 36
/*
 * These functions doesn't exist, so if they are called you'll either:
 *
 * - Get an error at compile-time due to __compiletime_error, if supported by
 *   your compiler.
 *
 * or:
 *
 * - Get an error at link-time due to the call to the missing function.
 */
37
extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 39 40 41
	__compiletime_error("Bad argument size for cmpxchg");
extern unsigned long __xchg_called_with_bad_pointer(void)
	__compiletime_error("Bad argument size for xchg");

42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
#define __xchg_asm(ld, st, m, val)					\
({									\
	__typeof(*(m)) __ret;						\
									\
	if (kernel_uses_llsc) {						\
		__asm__ __volatile__(					\
		"	.set	push				\n"	\
		"	.set	noat				\n"	\
		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
		"	.set	mips0				\n"	\
		"	move	$1, %z3				\n"	\
		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
		"	" st "	$1, %1				\n"	\
		"\t" __scbeqz "	$1, 1b				\n"	\
		"	.set	pop				\n"	\
		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
		: "memory");						\
	} else {							\
		unsigned long __flags;					\
									\
		raw_local_irq_save(__flags);				\
		__ret = *m;						\
		*m = val;						\
		raw_local_irq_restore(__flags);				\
	}								\
									\
	__ret;								\
})

73 74 75 76
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
	switch (size) {
	case 4:
77 78
		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);

79
	case 8:
80 81 82 83 84
		if (!IS_ENABLED(CONFIG_64BIT))
			return __xchg_called_with_bad_pointer();

		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);

85 86
	default:
		return __xchg_called_with_bad_pointer();
87 88 89 90 91
	}
}

#define xchg(ptr, x)							\
({									\
92 93
	__typeof__(*(ptr)) __res;					\
									\
94 95
	BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);				\
									\
96 97 98 99 100 101 102 103
	smp_mb__before_llsc();						\
									\
	__res = (__typeof__(*(ptr)))					\
		__xchg((unsigned long)(x), (ptr), sizeof(*(ptr)));	\
									\
	smp_llsc_mb();							\
									\
	__res;								\
104
})
105 106 107 108 109

#define __cmpxchg_asm(ld, st, m, old, new)				\
({									\
	__typeof(*(m)) __ret;						\
									\
110
	if (kernel_uses_llsc) {						\
111 112 113
		__asm__ __volatile__(					\
		"	.set	push				\n"	\
		"	.set	noat				\n"	\
114
		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
R
Ralf Baechle 已提交
115
		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
116 117 118
		"	bne	%0, %z3, 2f			\n"	\
		"	.set	mips0				\n"	\
		"	move	$1, %z4				\n"	\
119
		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
120
		"	" st "	$1, %1				\n"	\
121
		"\t" __scbeqz "	$1, 1b				\n"	\
122
		"	.set	pop				\n"	\
123
		"2:						\n"	\
124 125
		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)		\
126 127 128 129 130 131 132 133 134 135 136 137 138 139
		: "memory");						\
	} else {							\
		unsigned long __flags;					\
									\
		raw_local_irq_save(__flags);				\
		__ret = *m;						\
		if (__ret == old)					\
			*m = new;					\
		raw_local_irq_restore(__flags);				\
	}								\
									\
	__ret;								\
})

140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
				      unsigned long new, unsigned int size)
{
	switch (size) {
	case 4:
		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr, old, new);

	case 8:
		/* lld/scd are only available for MIPS64 */
		if (!IS_ENABLED(CONFIG_64BIT))
			return __cmpxchg_called_with_bad_pointer();

		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr, old, new);

	default:
		return __cmpxchg_called_with_bad_pointer();
	}
}

#define cmpxchg_local(ptr, old, new)					\
	((__typeof__(*(ptr)))						\
		__cmpxchg((ptr),					\
			  (unsigned long)(__typeof__(*(ptr)))(old),	\
			  (unsigned long)(__typeof__(*(ptr)))(new),	\
			  sizeof(*(ptr))))

#define cmpxchg(ptr, old, new)						\
167
({									\
168
	__typeof__(*(ptr)) __res;					\
169
									\
170 171 172
	smp_mb__before_llsc();						\
	__res = cmpxchg_local((ptr), (old), (new));			\
	smp_llsc_mb();							\
173 174 175 176
									\
	__res;								\
})

177 178
#ifdef CONFIG_64BIT
#define cmpxchg64_local(ptr, o, n)					\
179 180
  ({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
181
	cmpxchg_local((ptr), (o), (n));					\
182 183
  })

184
#define cmpxchg64(ptr, o, n)						\
185 186
  ({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
187
	cmpxchg((ptr), (o), (n));					\
188 189 190 191
  })
#else
#include <asm-generic/cmpxchg-local.h>
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
192
#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
193 194
#endif

195 196
#undef __scbeqz

197
#endif /* __ASM_CMPXCHG_H */