提交 8ee5797a 编写于 作者: H Harvey Harrison 提交者: Ingo Molnar

x86: introduce asm helpers in local_{32|64}.h

Handle the use of long on X86_32 and quad on X86_64
Signed-off-by: NHarvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: NThomas Gleixner <tglx@linutronix.de>
Signed-off-by: NIngo Molnar <mingo@elte.hu>
上级 992b9592
......@@ -8,6 +8,12 @@
# define _ASM_ALIGN " .balign 4 "
# define _ASM_MOV_UL " movl "
# define _ASM_INC " incl "
# define _ASM_DEC " decl "
# define _ASM_ADD " addl "
# define _ASM_SUB " subl "
# define _ASM_XADD " xaddl "
#else
/* 64 bits */
......@@ -15,6 +21,12 @@
# define _ASM_ALIGN " .balign 8 "
# define _ASM_MOV_UL " movq "
# define _ASM_INC " incq "
# define _ASM_DEC " decq "
# define _ASM_ADD " addq "
# define _ASM_SUB " subq "
# define _ASM_XADD " xaddq "
#endif /* CONFIG_X86_32 */
#endif /* _ASM_X86_ASM_H */
......@@ -4,21 +4,21 @@
static inline void local_inc(local_t *l)
{
__asm__ __volatile__(
"incl %0"
_ASM_INC "%0"
:"+m" (l->a.counter));
}
static inline void local_dec(local_t *l)
{
__asm__ __volatile__(
"decl %0"
_ASM_DEC "%0"
:"+m" (l->a.counter));
}
static inline void local_add(long i, local_t *l)
{
__asm__ __volatile__(
"addl %1,%0"
_ASM_ADD "%1,%0"
:"+m" (l->a.counter)
:"ir" (i));
}
......@@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l)
static inline void local_sub(long i, local_t *l)
{
__asm__ __volatile__(
"subl %1,%0"
_ASM_SUB "%1,%0"
:"+m" (l->a.counter)
:"ir" (i));
}
......@@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l)
unsigned char c;
__asm__ __volatile__(
"subl %2,%0; sete %1"
_ASM_SUB "%2,%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
:"ir" (i) : "memory");
return c;
......@@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l)
unsigned char c;
__asm__ __volatile__(
"decl %0; sete %1"
_ASM_DEC "%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
: : "memory");
return c != 0;
......@@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l)
unsigned char c;
__asm__ __volatile__(
"incl %0; sete %1"
_ASM_INC "%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
: : "memory");
return c != 0;
......@@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l)
unsigned char c;
__asm__ __volatile__(
"addl %2,%0; sets %1"
_ASM_ADD "%2,%0; sets %1"
:"+m" (l->a.counter), "=qm" (c)
:"ir" (i) : "memory");
return c;
......@@ -127,7 +127,7 @@ static inline long local_add_return(long i, local_t *l)
/* Modern 486+ processor */
__i = i;
__asm__ __volatile__(
"xaddl %0, %1;"
_ASM_XADD "%0, %1;"
:"+r" (i), "+m" (l->a.counter)
: : "memory");
return i + __i;
......
......@@ -4,21 +4,21 @@
static inline void local_inc(local_t *l)
{
__asm__ __volatile__(
"incq %0"
_ASM_INC "%0"
:"+m" (l->a.counter));
}
static inline void local_dec(local_t *l)
{
__asm__ __volatile__(
"decq %0"
_ASM_DEC "%0"
:"+m" (l->a.counter));
}
static inline void local_add(long i, local_t *l)
{
__asm__ __volatile__(
"addq %1,%0"
_ASM_ADD "%1,%0"
:"+m" (l->a.counter)
:"ir" (i));
}
......@@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l)
static inline void local_sub(long i, local_t *l)
{
__asm__ __volatile__(
"subq %1,%0"
_ASM_SUB "%1,%0"
:"+m" (l->a.counter)
:"ir" (i));
}
......@@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l)
unsigned char c;
__asm__ __volatile__(
"subq %2,%0; sete %1"
_ASM_SUB "%2,%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
:"ir" (i) : "memory");
return c;
......@@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l)
unsigned char c;
__asm__ __volatile__(
"decq %0; sete %1"
_ASM_DEC "%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
: : "memory");
return c != 0;
......@@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l)
unsigned char c;
__asm__ __volatile__(
"incq %0; sete %1"
_ASM_INC "%0; sete %1"
:"+m" (l->a.counter), "=qm" (c)
: : "memory");
return c != 0;
......@@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l)
unsigned char c;
__asm__ __volatile__(
"addq %2,%0; sets %1"
_ASM_ADD "%2,%0; sets %1"
:"+m" (l->a.counter), "=qm" (c)
:"ir" (i) : "memory");
return c;
......@@ -120,7 +120,7 @@ static inline long local_add_return(long i, local_t *l)
{
long __i = i;
__asm__ __volatile__(
"xaddq %0, %1;"
_ASM_XADD "%0, %1;"
:"+r" (i), "+m" (l->a.counter)
: : "memory");
return i + __i;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册