提交 0b3d643f 编写于 作者: H Helge Deller 提交者: Kyle McMartin

[PARISC] add ASM_EXCEPTIONTABLE_ENTRY() macro

- this macro unifies the code to add exception table entries
- additionally use ENTRY()/ENDPROC() at more places
Signed-off-by: NHelge Deller <deller@gmx.de>
Signed-off-by: NKyle McMartin <kyle@parisc-linux.org>
上级 8e9e9844
...@@ -27,20 +27,11 @@ ...@@ -27,20 +27,11 @@
*/ */
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
#define ADDIB addib,*
#define CMPB cmpb,*
#define ANDCM andcm,*
.level 2.0w .level 2.0w
#else #else
#define ADDIB addib,
#define CMPB cmpb,
#define ANDCM andcm
.level 2.0 .level 2.0
#endif #endif
#include <asm/psw.h> #include <asm/psw.h>
#include <asm/assembly.h> #include <asm/assembly.h>
#include <asm/pgtable.h> #include <asm/pgtable.h>
......
...@@ -23,19 +23,7 @@ ...@@ -23,19 +23,7 @@
*/ */
#define KILL_INSN break 0,0 #define KILL_INSN break 0,0
#ifdef CONFIG_64BIT .level LEVEL
.level 2.0w
#else
.level 1.1
#endif
/* on 64bit pad to 64bit values */
#ifdef CONFIG_64BIT
#define ULONG_WORD(x) .word 0, x
#else
#define ULONG_WORD(x) .word x
#endif
.text .text
...@@ -603,16 +591,10 @@ cas_action: ...@@ -603,16 +591,10 @@ cas_action:
the other for the store. Either return -EFAULT. the other for the store. Either return -EFAULT.
Each of the entries must be relocated. */ Each of the entries must be relocated. */
.section __ex_table,"aw" .section __ex_table,"aw"
ULONG_WORD(2b - linux_gateway_page) ASM_ULONG_INSN (1b - linux_gateway_page), (3b - linux_gateway_page)
ULONG_WORD(3b - linux_gateway_page) ASM_ULONG_INSN (2b - linux_gateway_page), (3b - linux_gateway_page)
.previous
.section __ex_table,"aw"
ULONG_WORD(1b - linux_gateway_page)
ULONG_WORD(3b - linux_gateway_page)
.previous .previous
end_compare_and_swap:
/* Make sure nothing else is placed on this page */ /* Make sure nothing else is placed on this page */
.align ASM_PAGE_SIZE .align ASM_PAGE_SIZE
...@@ -622,7 +604,7 @@ ENTRY(end_linux_gateway_page) ...@@ -622,7 +604,7 @@ ENTRY(end_linux_gateway_page)
/* Relocate symbols assuming linux_gateway_page is mapped /* Relocate symbols assuming linux_gateway_page is mapped
to virtual address 0x0 */ to virtual address 0x0 */
#define LWS_ENTRY(_name_) ULONG_WORD(lws_##_name_ - linux_gateway_page) #define LWS_ENTRY(_name_) ASM_ULONG_INSN (lws_##_name_ - linux_gateway_page)
.section .rodata,"a" .section .rodata,"a"
......
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
#define DPRINTF(fmt, args...) #define DPRINTF(fmt, args...)
#endif #endif
#ifdef __LP64__ #ifdef CONFIG_64BIT
#define RFMT "%016lx" #define RFMT "%016lx"
#else #else
#define RFMT "%08lx" #define RFMT "%08lx"
...@@ -150,15 +150,8 @@ static int emulate_ldh(struct pt_regs *regs, int toreg) ...@@ -150,15 +150,8 @@ static int emulate_ldh(struct pt_regs *regs, int toreg)
"4: ldi -2, %1\n" "4: ldi -2, %1\n"
FIXUP_BRANCH(3b) FIXUP_BRANCH(3b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b, 4b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b, 4b)
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret) : "=r" (val), "=r" (ret)
: "0" (val), "r" (saddr), "r" (regs->isr) : "0" (val), "r" (saddr), "r" (regs->isr)
: "r20", FIXUP_BRANCH_CLOBBER ); : "r20", FIXUP_BRANCH_CLOBBER );
...@@ -195,15 +188,8 @@ static int emulate_ldw(struct pt_regs *regs, int toreg, int flop) ...@@ -195,15 +188,8 @@ static int emulate_ldw(struct pt_regs *regs, int toreg, int flop)
"4: ldi -2, %1\n" "4: ldi -2, %1\n"
FIXUP_BRANCH(3b) FIXUP_BRANCH(3b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b, 4b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b, 4b)
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret) : "=r" (val), "=r" (ret)
: "0" (val), "r" (saddr), "r" (regs->isr) : "0" (val), "r" (saddr), "r" (regs->isr)
: "r19", "r20", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", FIXUP_BRANCH_CLOBBER );
...@@ -227,7 +213,7 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop) ...@@ -227,7 +213,7 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
regs->isr, regs->ior, toreg); regs->isr, regs->ior, toreg);
#ifdef CONFIG_PA20 #ifdef CONFIG_PA20
#ifndef __LP64__ #ifndef CONFIG_64BIT
if (!flop) if (!flop)
return -1; return -1;
#endif #endif
...@@ -246,15 +232,8 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop) ...@@ -246,15 +232,8 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
"4: ldi -2, %1\n" "4: ldi -2, %1\n"
FIXUP_BRANCH(3b) FIXUP_BRANCH(3b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret) : "=r" (val), "=r" (ret)
: "0" (val), "r" (saddr), "r" (regs->isr) : "0" (val), "r" (saddr), "r" (regs->isr)
: "r19", "r20", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", FIXUP_BRANCH_CLOBBER );
...@@ -278,17 +257,9 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop) ...@@ -278,17 +257,9 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
"5: ldi -2, %2\n" "5: ldi -2, %2\n"
FIXUP_BRANCH(4b) FIXUP_BRANCH(4b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,5b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,5b)
" .dword 1b,5b\n" ASM_EXCEPTIONTABLE_ENTRY(3b,5b)
" .dword 2b,5b\n"
" .dword 3b,5b\n"
#else
" .word 1b,5b\n"
" .word 2b,5b\n"
" .word 3b,5b\n"
#endif
" .previous\n"
: "=r" (valh), "=r" (vall), "=r" (ret) : "=r" (valh), "=r" (vall), "=r" (ret)
: "0" (valh), "1" (vall), "r" (saddr), "r" (regs->isr) : "0" (valh), "1" (vall), "r" (saddr), "r" (regs->isr)
: "r19", "r20", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", FIXUP_BRANCH_CLOBBER );
...@@ -328,15 +299,8 @@ static int emulate_sth(struct pt_regs *regs, int frreg) ...@@ -328,15 +299,8 @@ static int emulate_sth(struct pt_regs *regs, int frreg)
"4: ldi -2, %0\n" "4: ldi -2, %0\n"
FIXUP_BRANCH(3b) FIXUP_BRANCH(3b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (ret) : "=r" (ret)
: "r" (val), "r" (regs->ior), "r" (regs->isr) : "r" (val), "r" (regs->ior), "r" (regs->isr)
: "r19", FIXUP_BRANCH_CLOBBER ); : "r19", FIXUP_BRANCH_CLOBBER );
...@@ -382,15 +346,8 @@ static int emulate_stw(struct pt_regs *regs, int frreg, int flop) ...@@ -382,15 +346,8 @@ static int emulate_stw(struct pt_regs *regs, int frreg, int flop)
"4: ldi -2, %0\n" "4: ldi -2, %0\n"
FIXUP_BRANCH(3b) FIXUP_BRANCH(3b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,4b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,4b)
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (ret) : "=r" (ret)
: "r" (val), "r" (regs->ior), "r" (regs->isr) : "r" (val), "r" (regs->ior), "r" (regs->isr)
: "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER );
...@@ -413,7 +370,7 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop) ...@@ -413,7 +370,7 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
val, regs->isr, regs->ior); val, regs->isr, regs->ior);
#ifdef CONFIG_PA20 #ifdef CONFIG_PA20
#ifndef __LP64__ #ifndef CONFIG_64BIT
if (!flop) if (!flop)
return -1; return -1;
#endif #endif
...@@ -439,19 +396,10 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop) ...@@ -439,19 +396,10 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
"6: ldi -2, %0\n" "6: ldi -2, %0\n"
FIXUP_BRANCH(5b) FIXUP_BRANCH(5b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,6b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,6b)
" .dword 1b,6b\n" ASM_EXCEPTIONTABLE_ENTRY(3b,6b)
" .dword 2b,6b\n" ASM_EXCEPTIONTABLE_ENTRY(4b,6b)
" .dword 3b,6b\n"
" .dword 4b,6b\n"
#else
" .word 1b,6b\n"
" .word 2b,6b\n"
" .word 3b,6b\n"
" .word 4b,6b\n"
#endif
" .previous\n"
: "=r" (ret) : "=r" (ret)
: "r" (val), "r" (regs->ior), "r" (regs->isr) : "r" (val), "r" (regs->ior), "r" (regs->isr)
: "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", "r21", "r22", "r1", FIXUP_BRANCH_CLOBBER );
...@@ -482,21 +430,11 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop) ...@@ -482,21 +430,11 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
"7: ldi -2, %0\n" "7: ldi -2, %0\n"
FIXUP_BRANCH(6b) FIXUP_BRANCH(6b)
" .previous\n" " .previous\n"
" .section __ex_table,\"aw\"\n" ASM_EXCEPTIONTABLE_ENTRY(1b,7b)
#ifdef __LP64__ ASM_EXCEPTIONTABLE_ENTRY(2b,7b)
" .dword 1b,7b\n" ASM_EXCEPTIONTABLE_ENTRY(3b,7b)
" .dword 2b,7b\n" ASM_EXCEPTIONTABLE_ENTRY(4b,7b)
" .dword 3b,7b\n" ASM_EXCEPTIONTABLE_ENTRY(5b,7b)
" .dword 4b,7b\n"
" .dword 5b,7b\n"
#else
" .word 1b,7b\n"
" .word 2b,7b\n"
" .word 3b,7b\n"
" .word 4b,7b\n"
" .word 5b,7b\n"
#endif
" .previous\n"
: "=r" (ret) : "=r" (ret)
: "r" (valh), "r" (vall), "r" (regs->ior), "r" (regs->isr) : "r" (valh), "r" (vall), "r" (regs->ior), "r" (regs->isr)
: "r19", "r20", "r21", "r1", FIXUP_BRANCH_CLOBBER ); : "r19", "r20", "r21", "r1", FIXUP_BRANCH_CLOBBER );
......
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
#include <asm/asm-offsets.h> #include <asm/asm-offsets.h>
#include <asm/assembly.h> #include <asm/assembly.h>
#include <asm/errno.h> #include <asm/errno.h>
#include <linux/linkage.h>
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
.macro get_fault_ip t1 t2 .macro get_fault_ip t1 t2
...@@ -30,7 +31,7 @@ ...@@ -30,7 +31,7 @@
/* t2 = smp_processor_id() */ /* t2 = smp_processor_id() */
mfctl 30,\t2 mfctl 30,\t2
ldw TI_CPU(\t2),\t2 ldw TI_CPU(\t2),\t2
#ifdef __LP64__ #ifdef CONFIG_64BIT
extrd,u \t2,63,32,\t2 extrd,u \t2,63,32,\t2
#endif #endif
/* t2 = &__per_cpu_offset[smp_processor_id()]; */ /* t2 = &__per_cpu_offset[smp_processor_id()]; */
...@@ -58,33 +59,34 @@ ...@@ -58,33 +59,34 @@
.section .fixup, "ax" .section .fixup, "ax"
/* get_user() fixups, store -EFAULT in r8, and 0 in r9 */ /* get_user() fixups, store -EFAULT in r8, and 0 in r9 */
.export fixup_get_user_skip_1 ENTRY(fixup_get_user_skip_1)
fixup_get_user_skip_1:
get_fault_ip %r1,%r8 get_fault_ip %r1,%r8
ldo 4(%r1), %r1 ldo 4(%r1), %r1
ldi -EFAULT, %r8 ldi -EFAULT, %r8
bv %r0(%r1) bv %r0(%r1)
copy %r0, %r9 copy %r0, %r9
ENDPROC(fixup_get_user_skip_1)
.export fixup_get_user_skip_2 ENTRY(fixup_get_user_skip_2)
fixup_get_user_skip_2:
get_fault_ip %r1,%r8 get_fault_ip %r1,%r8
ldo 8(%r1), %r1 ldo 8(%r1), %r1
ldi -EFAULT, %r8 ldi -EFAULT, %r8
bv %r0(%r1) bv %r0(%r1)
copy %r0, %r9 copy %r0, %r9
ENDPROC(fixup_get_user_skip_2)
/* put_user() fixups, store -EFAULT in r8 */ /* put_user() fixups, store -EFAULT in r8 */
.export fixup_put_user_skip_1 ENTRY(fixup_put_user_skip_1)
fixup_put_user_skip_1:
get_fault_ip %r1,%r8 get_fault_ip %r1,%r8
ldo 4(%r1), %r1 ldo 4(%r1), %r1
bv %r0(%r1) bv %r0(%r1)
ldi -EFAULT, %r8 ldi -EFAULT, %r8
ENDPROC(fixup_put_user_skip_1)
.export fixup_put_user_skip_2 ENTRY(fixup_put_user_skip_2)
fixup_put_user_skip_2:
get_fault_ip %r1,%r8 get_fault_ip %r1,%r8
ldo 8(%r1), %r1 ldo 8(%r1), %r1
bv %r0(%r1) bv %r0(%r1)
ldi -EFAULT, %r8 ldi -EFAULT, %r8
ENDPROC(fixup_put_user_skip_2)
...@@ -37,6 +37,7 @@ ...@@ -37,6 +37,7 @@
#include <asm/assembly.h> #include <asm/assembly.h>
#include <asm/errno.h> #include <asm/errno.h>
#include <linux/linkage.h>
/* /*
* get_sr gets the appropriate space value into * get_sr gets the appropriate space value into
...@@ -67,8 +68,7 @@ ...@@ -67,8 +68,7 @@
* otherwise strlen (i.e. excludes zero byte) * otherwise strlen (i.e. excludes zero byte)
*/ */
.export lstrncpy_from_user,code ENTRY(lstrncpy_from_user)
lstrncpy_from_user:
.proc .proc
.callinfo NO_CALLS .callinfo NO_CALLS
.entry .entry
...@@ -87,6 +87,7 @@ $lsfu_exit: ...@@ -87,6 +87,7 @@ $lsfu_exit:
bv %r0(%r2) bv %r0(%r2)
nop nop
.exit .exit
ENDPROC(lstrncpy_from_user)
.section .fixup,"ax" .section .fixup,"ax"
3: fixup_branch $lsfu_exit 3: fixup_branch $lsfu_exit
...@@ -94,13 +95,8 @@ $lsfu_exit: ...@@ -94,13 +95,8 @@ $lsfu_exit:
.previous .previous
.section __ex_table,"aw" .section __ex_table,"aw"
#ifdef __LP64__ ASM_ULONG_INSN 1b,3b
.dword 1b,3b ASM_ULONG_INSN 2b,3b
.dword 2b,3b
#else
.word 1b,3b
.word 2b,3b
#endif
.previous .previous
.procend .procend
...@@ -112,8 +108,7 @@ $lsfu_exit: ...@@ -112,8 +108,7 @@ $lsfu_exit:
* otherwise, returns number of bytes not transferred. * otherwise, returns number of bytes not transferred.
*/ */
.export lclear_user,code ENTRY(lclear_user)
lclear_user:
.proc .proc
.callinfo NO_CALLS .callinfo NO_CALLS
.entry .entry
...@@ -127,6 +122,7 @@ $lclu_done: ...@@ -127,6 +122,7 @@ $lclu_done:
bv %r0(%r2) bv %r0(%r2)
copy %r25,%r28 copy %r25,%r28
.exit .exit
ENDPROC(lclear_user)
.section .fixup,"ax" .section .fixup,"ax"
2: fixup_branch $lclu_done 2: fixup_branch $lclu_done
...@@ -134,11 +130,7 @@ $lclu_done: ...@@ -134,11 +130,7 @@ $lclu_done:
.previous .previous
.section __ex_table,"aw" .section __ex_table,"aw"
#ifdef __LP64__ ASM_ULONG_INSN 1b,2b
.dword 1b,2b
#else
.word 1b,2b
#endif
.previous .previous
.procend .procend
...@@ -151,8 +143,7 @@ $lclu_done: ...@@ -151,8 +143,7 @@ $lclu_done:
* else strlen + 1 (i.e. includes zero byte). * else strlen + 1 (i.e. includes zero byte).
*/ */
.export lstrnlen_user,code ENTRY(lstrnlen_user)
lstrnlen_user:
.proc .proc
.callinfo NO_CALLS .callinfo NO_CALLS
.entry .entry
...@@ -172,6 +163,7 @@ $lslen_done: ...@@ -172,6 +163,7 @@ $lslen_done:
$lslen_nzero: $lslen_nzero:
b $lslen_done b $lslen_done
ldo 1(%r26),%r26 /* special case for N == 0 */ ldo 1(%r26),%r26 /* special case for N == 0 */
ENDPROC(lstrnlen_user)
.section .fixup,"ax" .section .fixup,"ax"
3: fixup_branch $lslen_done 3: fixup_branch $lslen_done
...@@ -179,13 +171,8 @@ $lslen_nzero: ...@@ -179,13 +171,8 @@ $lslen_nzero:
.previous .previous
.section __ex_table,"aw" .section __ex_table,"aw"
#ifdef __LP64__ ASM_ULONG_INSN 1b,3b
.dword 1b,3b ASM_ULONG_INSN 2b,3b
.dword 2b,3b
#else
.word 1b,3b
.word 2b,3b
#endif
.previous .previous
.procend .procend
......
...@@ -96,30 +96,18 @@ DECLARE_PER_CPU(struct exception_data, exception_data); ...@@ -96,30 +96,18 @@ DECLARE_PER_CPU(struct exception_data, exception_data);
#define DPRINTF(fmt, args...) #define DPRINTF(fmt, args...)
#endif #endif
#ifndef __LP64__
#define EXC_WORD ".word"
#else
#define EXC_WORD ".dword"
#endif
#define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ #define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n" \ "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
"\t" EXC_WORD "\t1b\n" \
"\t" EXC_WORD "\t" #_e "\n" \
"\t.previous\n" \
: _tt(_t), "+r"(_a) \ : _tt(_t), "+r"(_a) \
: \ : \
: "r8") : "r8")
#define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \ #define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n" \ "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
"\t" EXC_WORD "\t1b\n" \
"\t" EXC_WORD "\t" #_e "\n" \
"\t.previous\n" \
: "+r"(_a) \ : "+r"(_a) \
: _tt(_t) \ : _tt(_t) \
: "r8") : "r8")
...@@ -133,22 +121,16 @@ DECLARE_PER_CPU(struct exception_data, exception_data); ...@@ -133,22 +121,16 @@ DECLARE_PER_CPU(struct exception_data, exception_data);
#define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \ #define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"1:\t" #_insn " " #_o "(" _s ",%1), %0\n" \ "1:\t" #_insn " " #_o "(" _s ",%1), %0\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
"\t" EXC_WORD "\t1b\n" \
"\t" EXC_WORD "\t" #_e "\n" \
"\t.previous\n" \
: _tt(_t) \ : _tt(_t) \
: "r"(_a) \ : "r"(_a) \
: "r8") : "r8")
#define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \ #define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"1:\t" #_insn " %0, " #_o "(" _s ",%1)\n" \ "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
"\t" EXC_WORD "\t1b\n" \
"\t" EXC_WORD "\t" #_e "\n" \
"\t.previous\n" \
: \ : \
: _tt(_t), "r"(_a) \ : _tt(_t), "r"(_a) \
: "r8") : "r8")
...@@ -167,8 +149,8 @@ extern inline void prefetch_dst(const void *addr) ...@@ -167,8 +149,8 @@ extern inline void prefetch_dst(const void *addr)
__asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr)); __asm__("ldd 0(" d_space ",%0), %%r0" : : "r" (addr));
} }
#else #else
#define prefetch_src(addr) #define prefetch_src(addr) do { } while(0)
#define prefetch_dst(addr) #define prefetch_dst(addr) do { } while(0)
#endif #endif
/* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words /* Copy from a not-aligned src to an aligned dst, using shifts. Handles 4 words
......
...@@ -31,9 +31,13 @@ ...@@ -31,9 +31,13 @@
#define STREGM std,ma #define STREGM std,ma
#define SHRREG shrd #define SHRREG shrd
#define SHLREG shld #define SHLREG shld
#define ADDIB addib,*
#define CMPB cmpb,*
#define ANDCM andcm,*
#define RP_OFFSET 16 #define RP_OFFSET 16
#define FRAME_SIZE 128 #define FRAME_SIZE 128
#define CALLEE_REG_FRAME_SIZE 144 #define CALLEE_REG_FRAME_SIZE 144
#define ASM_ULONG_INSN .dword
#else /* CONFIG_64BIT */ #else /* CONFIG_64BIT */
#define LDREG ldw #define LDREG ldw
#define STREG stw #define STREG stw
...@@ -42,9 +46,13 @@ ...@@ -42,9 +46,13 @@
#define STREGM stwm #define STREGM stwm
#define SHRREG shr #define SHRREG shr
#define SHLREG shlw #define SHLREG shlw
#define ADDIB addib,
#define CMPB cmpb,
#define ANDCM andcm
#define RP_OFFSET 20 #define RP_OFFSET 20
#define FRAME_SIZE 64 #define FRAME_SIZE 64
#define CALLEE_REG_FRAME_SIZE 128 #define CALLEE_REG_FRAME_SIZE 128
#define ASM_ULONG_INSN .word
#endif #endif
#define CALLEE_SAVE_FRAME_SIZE (CALLEE_REG_FRAME_SIZE + CALLEE_FLOAT_FRAME_SIZE) #define CALLEE_SAVE_FRAME_SIZE (CALLEE_REG_FRAME_SIZE + CALLEE_FLOAT_FRAME_SIZE)
......
...@@ -14,10 +14,10 @@ ...@@ -14,10 +14,10 @@
#define PARISC_BUG_BREAK_ASM "break 0x1f, 0x1fff" #define PARISC_BUG_BREAK_ASM "break 0x1f, 0x1fff"
#define PARISC_BUG_BREAK_INSN 0x03ffe01f /* PARISC_BUG_BREAK_ASM */ #define PARISC_BUG_BREAK_INSN 0x03ffe01f /* PARISC_BUG_BREAK_ASM */
#ifdef CONFIG_64BIT #if defined(CONFIG_64BIT)
#define ASM_ULONG_INSN ".dword" #define ASM_WORD_INSN ".dword\t"
#else #else
#define ASM_ULONG_INSN ".word" #define ASM_WORD_INSN ".word\t"
#endif #endif
#ifdef CONFIG_DEBUG_BUGVERBOSE #ifdef CONFIG_DEBUG_BUGVERBOSE
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
asm volatile("\n" \ asm volatile("\n" \
"1:\t" PARISC_BUG_BREAK_ASM "\n" \ "1:\t" PARISC_BUG_BREAK_ASM "\n" \
"\t.pushsection __bug_table,\"a\"\n" \ "\t.pushsection __bug_table,\"a\"\n" \
"2:\t" ASM_ULONG_INSN " 1b, %c0\n" \ "2:\t" ASM_WORD_INSN "1b, %c0\n" \
"\t.short %c1, %c2\n" \ "\t.short %c1, %c2\n" \
"\t.org 2b+%c3\n" \ "\t.org 2b+%c3\n" \
"\t.popsection" \ "\t.popsection" \
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
asm volatile("\n" \ asm volatile("\n" \
"1:\t" PARISC_BUG_BREAK_ASM "\n" \ "1:\t" PARISC_BUG_BREAK_ASM "\n" \
"\t.pushsection __bug_table,\"a\"\n" \ "\t.pushsection __bug_table,\"a\"\n" \
"2:\t" ASM_ULONG_INSN " 1b, %c0\n" \ "2:\t" ASM_WORD_INSN "1b, %c0\n" \
"\t.short %c1, %c2\n" \ "\t.short %c1, %c2\n" \
"\t.org 2b+%c3\n" \ "\t.org 2b+%c3\n" \
"\t.popsection" \ "\t.popsection" \
...@@ -63,7 +63,7 @@ ...@@ -63,7 +63,7 @@
asm volatile("\n" \ asm volatile("\n" \
"1:\t" PARISC_BUG_BREAK_ASM "\n" \ "1:\t" PARISC_BUG_BREAK_ASM "\n" \
"\t.pushsection __bug_table,\"a\"\n" \ "\t.pushsection __bug_table,\"a\"\n" \
"2:\t" ASM_ULONG_INSN " 1b\n" \ "2:\t" ASM_WORD_INSN "1b\n" \
"\t.short %c0\n" \ "\t.short %c0\n" \
"\t.org 2b+%c1\n" \ "\t.org 2b+%c1\n" \
"\t.popsection" \ "\t.popsection" \
......
...@@ -67,6 +67,11 @@ struct exception_table_entry { ...@@ -67,6 +67,11 @@ struct exception_table_entry {
long fixup; /* fixup routine */ long fixup; /* fixup routine */
}; };
#define ASM_EXCEPTIONTABLE_ENTRY( fault_addr, except_addr )\
".section __ex_table,\"aw\"\n" \
ASM_WORD_INSN #fault_addr ", " #except_addr "\n\t" \
".previous\n"
/* /*
* The page fault handler stores, in a per-cpu area, the following information * The page fault handler stores, in a per-cpu area, the following information
* if a fixup routine is available. * if a fixup routine is available.
...@@ -106,21 +111,15 @@ struct exception_data { ...@@ -106,21 +111,15 @@ struct exception_data {
}) })
#define __get_kernel_asm(ldx,ptr) \ #define __get_kernel_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%2),%0\n" \ __asm__("\n1:\t" ldx "\t0(%2),%0\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b, fixup_get_user_skip_1)\
"\t" ASM_WORD_INSN \
"1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \ : "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err) \ : "r"(ptr), "1"(__gu_err) \
: "r1"); : "r1");
#define __get_user_asm(ldx,ptr) \ #define __get_user_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \ __asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_get_user_skip_1)\
"\t" ASM_WORD_INSN \
"1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \ : "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err) \ : "r"(ptr), "1"(__gu_err) \
: "r1"); : "r1");
...@@ -164,22 +163,16 @@ struct exception_data { ...@@ -164,22 +163,16 @@ struct exception_data {
#define __put_kernel_asm(stx,x,ptr) \ #define __put_kernel_asm(stx,x,ptr) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%1)\n" \ "\n1:\t" stx "\t%2,0(%1)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_1)\
"\t" ASM_WORD_INSN \
"1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \ : "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err) \ : "r"(ptr), "r"(x), "0"(__pu_err) \
: "r1") : "r1")
#define __put_user_asm(stx,x,ptr) \ #define __put_user_asm(stx,x,ptr) \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \ "\n1:\t" stx "\t%2,0(%%sr3,%1)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_1)\
"\t" ASM_WORD_INSN \
"1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \ : "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err) \ : "r"(ptr), "r"(x), "0"(__pu_err) \
: "r1") : "r1")
...@@ -192,12 +185,10 @@ struct exception_data { ...@@ -192,12 +185,10 @@ struct exception_data {
u32 hi = (__val64) >> 32; \ u32 hi = (__val64) >> 32; \
u32 lo = (__val64) & 0xffffffff; \ u32 lo = (__val64) & 0xffffffff; \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"\n1:\tstw %2,0(%1)\n" \ "\n1:\tstw %2,0(%1)" \
"\n2:\tstw %3,4(%1)\n" \ "\n2:\tstw %3,4(%1)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_2)\
"\t.word\t1b,fixup_put_user_skip_2\n" \ ASM_EXCEPTIONTABLE_ENTRY(2b,fixup_put_user_skip_1)\
"\t.word\t2b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \ : "=r"(__pu_err) \
: "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \ : "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
: "r1"); \ : "r1"); \
...@@ -208,12 +199,10 @@ struct exception_data { ...@@ -208,12 +199,10 @@ struct exception_data {
u32 hi = (__val64) >> 32; \ u32 hi = (__val64) >> 32; \
u32 lo = (__val64) & 0xffffffff; \ u32 lo = (__val64) & 0xffffffff; \
__asm__ __volatile__ ( \ __asm__ __volatile__ ( \
"\n1:\tstw %2,0(%%sr3,%1)\n" \ "\n1:\tstw %2,0(%%sr3,%1)" \
"\n2:\tstw %3,4(%%sr3,%1)\n" \ "\n2:\tstw %3,4(%%sr3,%1)\n\t" \
"\t.section __ex_table,\"aw\"\n" \ ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_2)\
"\t.word\t1b,fixup_put_user_skip_2\n" \ ASM_EXCEPTIONTABLE_ENTRY(2b,fixup_put_user_skip_1)\
"\t.word\t2b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \ : "=r"(__pu_err) \
: "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \ : "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
: "r1"); \ : "r1"); \
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册