• M
    [ARM] 5218/1: arm: improved futex support · e589ed23
    Mikael Pettersson 提交于
    Linux/ARM currently doesn't support robust or PI futexes.
    The problem is that the kernel wants to perform certain ops
    (cmpxchg, set, add, or, andn, xor) atomically on user-space
    addresses, and ARM's futex.h doesn't support that.
    
    This patch adds that support, but only for uniprocessor machines.
    For UP it's enough to disable preemption to ensure mutual exclusion
    with other software agents (futexes don't need to care about other
    hardware agents, fortunately).
    
    This patch is based on one posted by Khem Raj on 2007-08-01
    <http://marc.info/?l=linux-arm-kernel&m=118599407413016&w=2>.
    (That patch is included in the -RT kernel patches.)
    My changes since that version include:
    * corrected implementation of FUTEX_OP_ANDN (must complement oparg)
    * added missing memory clobber to futex_atomic_cmpxchg_inatomic()
    * removed spinlock because it's unnecessary for UP and insufficient
      for SMP, instead the code is restricted to UP and relies on the
      fact that pagefault_disable() also disables preemption
    * coding style cleanups
    
    Tested on ARMv5 XScales with the glibc-2.6 nptl test suite.
    Tested-by: NBruce Ashfield <bruce.ashfield@windriver.com>
    Signed-off-by: NMikael Pettersson <mikpe@it.uu.se>
    Signed-off-by: NRussell King <rmk+kernel@arm.linux.org.uk>
    e589ed23
futex.h 2.9 KB
#ifndef _ASM_ARM_FUTEX_H
#define _ASM_ARM_FUTEX_H

#ifdef __KERNEL__

#ifdef CONFIG_SMP

#include <asm-generic/futex.h>

#else /* !SMP, we can work around lack of atomic ops by disabling preemption */

#include <linux/futex.h>
#include <linux/preempt.h>
#include <linux/uaccess.h>
#include <asm/errno.h>

#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
	__asm__ __volatile__(					\
	"1:	ldrt	%1, [%2]\n"				\
	"	" insn "\n"					\
	"2:	strt	%0, [%2]\n"				\
	"	mov	%0, #0\n"				\
	"3:\n"							\
	"	.section __ex_table,\"a\"\n"			\
	"	.align	3\n"					\
	"	.long	1b, 4f, 2b, 4f\n"			\
	"	.previous\n"					\
	"	.section .fixup,\"ax\"\n"			\
	"4:	mov	%0, %4\n"				\
	"	b	3b\n"					\
	"	.previous"					\
	: "=&r" (ret), "=&r" (oldval)				\
	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
	: "cc", "memory")

static inline int
futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
{
	int op = (encoded_op >> 28) & 7;
	int cmp = (encoded_op >> 24) & 15;
	int oparg = (encoded_op << 8) >> 20;
	int cmparg = (encoded_op << 20) >> 20;
	int oldval = 0, ret;

	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
		oparg = 1 << oparg;

	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
		return -EFAULT;

	pagefault_disable();	/* implies preempt_disable() */

	switch (op) {
	case FUTEX_OP_SET:
		__futex_atomic_op("mov	%0, %3", ret, oldval, uaddr, oparg);
		break;
	case FUTEX_OP_ADD:
		__futex_atomic_op("add	%0, %1, %3", ret, oldval, uaddr, oparg);
		break;
	case FUTEX_OP_OR:
		__futex_atomic_op("orr	%0, %1, %3", ret, oldval, uaddr, oparg);
		break;
	case FUTEX_OP_ANDN:
		__futex_atomic_op("and	%0, %1, %3", ret, oldval, uaddr, ~oparg);
		break;
	case FUTEX_OP_XOR:
		__futex_atomic_op("eor	%0, %1, %3", ret, oldval, uaddr, oparg);
		break;
	default:
		ret = -ENOSYS;
	}

	pagefault_enable();	/* subsumes preempt_enable() */

	if (!ret) {
		switch (cmp) {
		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
		default: ret = -ENOSYS;
		}
	}
	return ret;
}

static inline int
futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
{
	int val;

	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
		return -EFAULT;

	pagefault_disable();	/* implies preempt_disable() */

	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
	"1:	ldrt	%0, [%3]\n"
	"	teq	%0, %1\n"
	"2:	streqt	%2, [%3]\n"
	"3:\n"
	"	.section __ex_table,\"a\"\n"
	"	.align	3\n"
	"	.long	1b, 4f, 2b, 4f\n"
	"	.previous\n"
	"	.section .fixup,\"ax\"\n"
	"4:	mov	%0, %4\n"
	"	b	3b\n"
	"	.previous"
	: "=&r" (val)
	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
	: "cc", "memory");

	pagefault_enable();	/* subsumes preempt_enable() */

	return val;
}

#endif /* !SMP */

#endif /* __KERNEL__ */
#endif /* _ASM_ARM_FUTEX_H */
反馈
建议
客服 返回
顶部