提交 872f6deb 编写于 作者: K Kyle McMartin

parisc: use conditional macro for 64-bit wide ops

This work enables us to remove -traditional from $AFLAGS on
parisc.
Signed-off-by: NKyle McMartin <kyle@mcmartin.ca>
上级 f54d8a1b
...@@ -41,16 +41,8 @@ ...@@ -41,16 +41,8 @@
#include <linux/init.h> #include <linux/init.h>
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
#define CMPIB cmpib,*
#define CMPB cmpb,*
#define COND(x) *x
.level 2.0w .level 2.0w
#else #else
#define CMPIB cmpib,
#define CMPB cmpb,
#define COND(x) x
.level 2.0 .level 2.0
#endif #endif
...@@ -958,9 +950,9 @@ intr_check_sig: ...@@ -958,9 +950,9 @@ intr_check_sig:
* Only do signals if we are returning to user space * Only do signals if we are returning to user space
*/ */
LDREG PT_IASQ0(%r16), %r20 LDREG PT_IASQ0(%r16), %r20
CMPIB=,n 0,%r20,intr_restore /* backward */ cmpib,COND(=),n 0,%r20,intr_restore /* backward */
LDREG PT_IASQ1(%r16), %r20 LDREG PT_IASQ1(%r16), %r20
CMPIB=,n 0,%r20,intr_restore /* backward */ cmpib,COND(=),n 0,%r20,intr_restore /* backward */
copy %r0, %r25 /* long in_syscall = 0 */ copy %r0, %r25 /* long in_syscall = 0 */
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
...@@ -1014,10 +1006,10 @@ intr_do_resched: ...@@ -1014,10 +1006,10 @@ intr_do_resched:
* we jump back to intr_restore. * we jump back to intr_restore.
*/ */
LDREG PT_IASQ0(%r16), %r20 LDREG PT_IASQ0(%r16), %r20
CMPIB= 0, %r20, intr_do_preempt cmpib,COND(=) 0, %r20, intr_do_preempt
nop nop
LDREG PT_IASQ1(%r16), %r20 LDREG PT_IASQ1(%r16), %r20
CMPIB= 0, %r20, intr_do_preempt cmpib,COND(=) 0, %r20, intr_do_preempt
nop nop
#ifdef CONFIG_64BIT #ifdef CONFIG_64BIT
...@@ -1046,7 +1038,7 @@ intr_do_preempt: ...@@ -1046,7 +1038,7 @@ intr_do_preempt:
/* current_thread_info()->preempt_count */ /* current_thread_info()->preempt_count */
mfctl %cr30, %r1 mfctl %cr30, %r1
LDREG TI_PRE_COUNT(%r1), %r19 LDREG TI_PRE_COUNT(%r1), %r19
CMPIB<> 0, %r19, intr_restore /* if preempt_count > 0 */ cmpib,COND(<>) 0, %r19, intr_restore /* if preempt_count > 0 */
nop /* prev insn branched backwards */ nop /* prev insn branched backwards */
/* check if we interrupted a critical path */ /* check if we interrupted a critical path */
...@@ -1065,7 +1057,7 @@ intr_do_preempt: ...@@ -1065,7 +1057,7 @@ intr_do_preempt:
*/ */
intr_extint: intr_extint:
CMPIB=,n 0,%r16,1f cmpib,COND(=),n 0,%r16,1f
get_stack_use_cr30 get_stack_use_cr30
b,n 2f b,n 2f
...@@ -1100,7 +1092,7 @@ ENDPROC(syscall_exit_rfi) ...@@ -1100,7 +1092,7 @@ ENDPROC(syscall_exit_rfi)
ENTRY(intr_save) /* for os_hpmc */ ENTRY(intr_save) /* for os_hpmc */
mfsp %sr7,%r16 mfsp %sr7,%r16
CMPIB=,n 0,%r16,1f cmpib,COND(=),n 0,%r16,1f
get_stack_use_cr30 get_stack_use_cr30
b 2f b 2f
copy %r8,%r26 copy %r8,%r26
...@@ -1122,7 +1114,7 @@ ENTRY(intr_save) /* for os_hpmc */ ...@@ -1122,7 +1114,7 @@ ENTRY(intr_save) /* for os_hpmc */
* adjust isr/ior below. * adjust isr/ior below.
*/ */
CMPIB=,n 6,%r26,skip_save_ior cmpib,COND(=),n 6,%r26,skip_save_ior
mfctl %cr20, %r16 /* isr */ mfctl %cr20, %r16 /* isr */
...@@ -1451,11 +1443,11 @@ nadtlb_emulate: ...@@ -1451,11 +1443,11 @@ nadtlb_emulate:
bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */ bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */
BL get_register,%r25 BL get_register,%r25
extrw,u %r9,15,5,%r8 /* Get index register # */ extrw,u %r9,15,5,%r8 /* Get index register # */
CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */
copy %r1,%r24 copy %r1,%r24
BL get_register,%r25 BL get_register,%r25
extrw,u %r9,10,5,%r8 /* Get base register # */ extrw,u %r9,10,5,%r8 /* Get base register # */
CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */
BL set_register,%r25 BL set_register,%r25
add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ add,l %r1,%r24,%r1 /* doesn't affect c/b bits */
...@@ -1487,7 +1479,7 @@ nadtlb_probe_check: ...@@ -1487,7 +1479,7 @@ nadtlb_probe_check:
cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/ cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/
BL get_register,%r25 /* Find the target register */ BL get_register,%r25 /* Find the target register */
extrw,u %r9,31,5,%r8 /* Get target register */ extrw,u %r9,31,5,%r8 /* Get target register */
CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */
BL set_register,%r25 BL set_register,%r25
copy %r0,%r1 /* Write zero to target register */ copy %r0,%r1 /* Write zero to target register */
b nadtlb_nullify /* Nullify return insn */ b nadtlb_nullify /* Nullify return insn */
...@@ -1571,12 +1563,12 @@ dbit_trap_20w: ...@@ -1571,12 +1563,12 @@ dbit_trap_20w:
L3_ptep ptp,pte,t0,va,dbit_fault L3_ptep ptp,pte,t0,va,dbit_fault
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nolock_20w cmpib,COND(=),n 0,spc,dbit_nolock_20w
load32 PA(pa_dbit_lock),t0 load32 PA(pa_dbit_lock),t0
dbit_spin_20w: dbit_spin_20w:
LDCW 0(t0),t1 LDCW 0(t0),t1
cmpib,= 0,t1,dbit_spin_20w cmpib,COND(=) 0,t1,dbit_spin_20w
nop nop
dbit_nolock_20w: dbit_nolock_20w:
...@@ -1587,7 +1579,7 @@ dbit_nolock_20w: ...@@ -1587,7 +1579,7 @@ dbit_nolock_20w:
idtlbt pte,prot idtlbt pte,prot
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nounlock_20w cmpib,COND(=),n 0,spc,dbit_nounlock_20w
ldi 1,t1 ldi 1,t1
stw t1,0(t0) stw t1,0(t0)
...@@ -1607,7 +1599,7 @@ dbit_trap_11: ...@@ -1607,7 +1599,7 @@ dbit_trap_11:
L2_ptep ptp,pte,t0,va,dbit_fault L2_ptep ptp,pte,t0,va,dbit_fault
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nolock_11 cmpib,COND(=),n 0,spc,dbit_nolock_11
load32 PA(pa_dbit_lock),t0 load32 PA(pa_dbit_lock),t0
dbit_spin_11: dbit_spin_11:
...@@ -1629,7 +1621,7 @@ dbit_nolock_11: ...@@ -1629,7 +1621,7 @@ dbit_nolock_11:
mtsp t1, %sr1 /* Restore sr1 */ mtsp t1, %sr1 /* Restore sr1 */
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nounlock_11 cmpib,COND(=),n 0,spc,dbit_nounlock_11
ldi 1,t1 ldi 1,t1
stw t1,0(t0) stw t1,0(t0)
...@@ -1647,7 +1639,7 @@ dbit_trap_20: ...@@ -1647,7 +1639,7 @@ dbit_trap_20:
L2_ptep ptp,pte,t0,va,dbit_fault L2_ptep ptp,pte,t0,va,dbit_fault
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nolock_20 cmpib,COND(=),n 0,spc,dbit_nolock_20
load32 PA(pa_dbit_lock),t0 load32 PA(pa_dbit_lock),t0
dbit_spin_20: dbit_spin_20:
...@@ -1666,7 +1658,7 @@ dbit_nolock_20: ...@@ -1666,7 +1658,7 @@ dbit_nolock_20:
idtlbt pte,prot idtlbt pte,prot
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
CMPIB=,n 0,spc,dbit_nounlock_20 cmpib,COND(=),n 0,spc,dbit_nounlock_20
ldi 1,t1 ldi 1,t1
stw t1,0(t0) stw t1,0(t0)
...@@ -1995,7 +1987,7 @@ ENTRY(syscall_exit) ...@@ -1995,7 +1987,7 @@ ENTRY(syscall_exit)
/* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */ /* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */
ldo -PER_HPUX(%r19), %r19 ldo -PER_HPUX(%r19), %r19
CMPIB<>,n 0,%r19,1f cmpib,COND(<>),n 0,%r19,1f
/* Save other hpux returns if personality is PER_HPUX */ /* Save other hpux returns if personality is PER_HPUX */
STREG %r22,TASK_PT_GR22(%r1) STREG %r22,TASK_PT_GR22(%r1)
......
...@@ -86,7 +86,7 @@ ENTRY(flush_tlb_all_local) ...@@ -86,7 +86,7 @@ ENTRY(flush_tlb_all_local)
LDREG ITLB_OFF_COUNT(%r1), %arg2 LDREG ITLB_OFF_COUNT(%r1), %arg2
LDREG ITLB_LOOP(%r1), %arg3 LDREG ITLB_LOOP(%r1), %arg3
ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */ addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */
movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */ movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
copy %arg0, %r28 /* Init base addr */ copy %arg0, %r28 /* Init base addr */
...@@ -96,14 +96,14 @@ fitmanyloop: /* Loop if LOOP >= 2 */ ...@@ -96,14 +96,14 @@ fitmanyloop: /* Loop if LOOP >= 2 */
copy %arg2, %r29 /* Init middle loop count */ copy %arg2, %r29 /* Init middle loop count */
fitmanymiddle: /* Loop if LOOP >= 2 */ fitmanymiddle: /* Loop if LOOP >= 2 */
ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
pitlbe 0(%sr1, %r28) pitlbe 0(%sr1, %r28)
pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */ addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */
copy %arg3, %r31 /* Re-init inner loop count */ copy %arg3, %r31 /* Re-init inner loop count */
movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */ addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
fitoneloop: /* Loop if LOOP = 1 */ fitoneloop: /* Loop if LOOP = 1 */
mtsp %r20, %sr1 mtsp %r20, %sr1
...@@ -111,10 +111,10 @@ fitoneloop: /* Loop if LOOP = 1 */ ...@@ -111,10 +111,10 @@ fitoneloop: /* Loop if LOOP = 1 */
copy %arg2, %r29 /* init middle loop count */ copy %arg2, %r29 /* init middle loop count */
fitonemiddle: /* Loop if LOOP = 1 */ fitonemiddle: /* Loop if LOOP = 1 */
ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */ addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */
pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */ addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
add %r21, %r20, %r20 /* increment space */ add %r21, %r20, %r20 /* increment space */
fitdone: fitdone:
...@@ -129,7 +129,7 @@ fitdone: ...@@ -129,7 +129,7 @@ fitdone:
LDREG DTLB_OFF_COUNT(%r1), %arg2 LDREG DTLB_OFF_COUNT(%r1), %arg2
LDREG DTLB_LOOP(%r1), %arg3 LDREG DTLB_LOOP(%r1), %arg3
ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */ addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */
movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */ movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
copy %arg0, %r28 /* Init base addr */ copy %arg0, %r28 /* Init base addr */
...@@ -139,14 +139,14 @@ fdtmanyloop: /* Loop if LOOP >= 2 */ ...@@ -139,14 +139,14 @@ fdtmanyloop: /* Loop if LOOP >= 2 */
copy %arg2, %r29 /* Init middle loop count */ copy %arg2, %r29 /* Init middle loop count */
fdtmanymiddle: /* Loop if LOOP >= 2 */ fdtmanymiddle: /* Loop if LOOP >= 2 */
ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
pdtlbe 0(%sr1, %r28) pdtlbe 0(%sr1, %r28)
pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */ addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */
copy %arg3, %r31 /* Re-init inner loop count */ copy %arg3, %r31 /* Re-init inner loop count */
movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */ addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
fdtoneloop: /* Loop if LOOP = 1 */ fdtoneloop: /* Loop if LOOP = 1 */
mtsp %r20, %sr1 mtsp %r20, %sr1
...@@ -154,10 +154,10 @@ fdtoneloop: /* Loop if LOOP = 1 */ ...@@ -154,10 +154,10 @@ fdtoneloop: /* Loop if LOOP = 1 */
copy %arg2, %r29 /* init middle loop count */ copy %arg2, %r29 /* init middle loop count */
fdtonemiddle: /* Loop if LOOP = 1 */ fdtonemiddle: /* Loop if LOOP = 1 */
ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */ addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */
pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */ pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */ addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
add %r21, %r20, %r20 /* increment space */ add %r21, %r20, %r20 /* increment space */
...@@ -210,18 +210,18 @@ ENTRY(flush_instruction_cache_local) ...@@ -210,18 +210,18 @@ ENTRY(flush_instruction_cache_local)
LDREG ICACHE_COUNT(%r1), %arg2 LDREG ICACHE_COUNT(%r1), %arg2
LDREG ICACHE_LOOP(%r1), %arg3 LDREG ICACHE_LOOP(%r1), %arg3
rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
ADDIB= -1, %arg3, fioneloop /* Preadjust and test */ addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */
movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */ movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
fimanyloop: /* Loop if LOOP >= 2 */ fimanyloop: /* Loop if LOOP >= 2 */
ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */ addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */
fice %r0(%sr1, %arg0) fice %r0(%sr1, %arg0)
fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */ fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */ movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */ addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */
fioneloop: /* Loop if LOOP = 1 */ fioneloop: /* Loop if LOOP = 1 */
ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */ addib,COND(>) -1, %arg2, fioneloop /* Outer loop count decr */
fice,m %arg1(%sr1, %arg0) /* Fice for one loop */ fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
fisync: fisync:
...@@ -251,18 +251,18 @@ ENTRY(flush_data_cache_local) ...@@ -251,18 +251,18 @@ ENTRY(flush_data_cache_local)
LDREG DCACHE_COUNT(%r1), %arg2 LDREG DCACHE_COUNT(%r1), %arg2
LDREG DCACHE_LOOP(%r1), %arg3 LDREG DCACHE_LOOP(%r1), %arg3
rsm PSW_SM_I, %r22 rsm PSW_SM_I, %r22
ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */ addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */
movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */ movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
fdmanyloop: /* Loop if LOOP >= 2 */ fdmanyloop: /* Loop if LOOP >= 2 */
ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */ addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */
fdce %r0(%sr1, %arg0) fdce %r0(%sr1, %arg0)
fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */ fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */ movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */ addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */
fdoneloop: /* Loop if LOOP = 1 */ fdoneloop: /* Loop if LOOP = 1 */
ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */ addib,COND(>) -1, %arg2, fdoneloop /* Outer loop count decr */
fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */ fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
fdsync: fdsync:
...@@ -343,7 +343,7 @@ ENTRY(copy_user_page_asm) ...@@ -343,7 +343,7 @@ ENTRY(copy_user_page_asm)
* non-taken backward branch. Note that .+4 is a backwards branch. * non-taken backward branch. Note that .+4 is a backwards branch.
* The ldd should only get executed if the branch is taken. * The ldd should only get executed if the branch is taken.
*/ */
ADDIB>,n -1, %r1, 1b /* bundle 10 */ addib,COND(>),n -1, %r1, 1b /* bundle 10 */
ldd 0(%r25), %r19 /* start next loads */ ldd 0(%r25), %r19 /* start next loads */
#else #else
...@@ -392,7 +392,7 @@ ENTRY(copy_user_page_asm) ...@@ -392,7 +392,7 @@ ENTRY(copy_user_page_asm)
stw %r21, 56(%r26) stw %r21, 56(%r26)
stw %r22, 60(%r26) stw %r22, 60(%r26)
ldo 64(%r26), %r26 ldo 64(%r26), %r26
ADDIB>,n -1, %r1, 1b addib,COND(>),n -1, %r1, 1b
ldw 0(%r25), %r19 ldw 0(%r25), %r19
#endif #endif
bv %r0(%r2) bv %r0(%r2)
...@@ -516,7 +516,7 @@ ENTRY(copy_user_page_asm) ...@@ -516,7 +516,7 @@ ENTRY(copy_user_page_asm)
stw %r21, 56(%r28) stw %r21, 56(%r28)
stw %r22, 60(%r28) stw %r22, 60(%r28)
ldo 64(%r28), %r28 ldo 64(%r28), %r28
ADDIB> -1, %r1,1b addib,COND(>) -1, %r1,1b
ldo 64(%r29), %r29 ldo 64(%r29), %r29
bv %r0(%r2) bv %r0(%r2)
...@@ -575,7 +575,7 @@ ENTRY(__clear_user_page_asm) ...@@ -575,7 +575,7 @@ ENTRY(__clear_user_page_asm)
std %r0, 104(%r28) std %r0, 104(%r28)
std %r0, 112(%r28) std %r0, 112(%r28)
std %r0, 120(%r28) std %r0, 120(%r28)
ADDIB> -1, %r1, 1b addib,COND(>) -1, %r1, 1b
ldo 128(%r28), %r28 ldo 128(%r28), %r28
#else /* ! CONFIG_64BIT */ #else /* ! CONFIG_64BIT */
...@@ -598,7 +598,7 @@ ENTRY(__clear_user_page_asm) ...@@ -598,7 +598,7 @@ ENTRY(__clear_user_page_asm)
stw %r0, 52(%r28) stw %r0, 52(%r28)
stw %r0, 56(%r28) stw %r0, 56(%r28)
stw %r0, 60(%r28) stw %r0, 60(%r28)
ADDIB> -1, %r1, 1b addib,COND(>) -1, %r1, 1b
ldo 64(%r28), %r28 ldo 64(%r28), %r28
#endif /* CONFIG_64BIT */ #endif /* CONFIG_64BIT */
...@@ -641,7 +641,7 @@ ENTRY(flush_kernel_dcache_page_asm) ...@@ -641,7 +641,7 @@ ENTRY(flush_kernel_dcache_page_asm)
fdc,m %r23(%r26) fdc,m %r23(%r26)
fdc,m %r23(%r26) fdc,m %r23(%r26)
fdc,m %r23(%r26) fdc,m %r23(%r26)
CMPB<< %r26, %r25,1b cmpb,COND(<<) %r26, %r25,1b
fdc,m %r23(%r26) fdc,m %r23(%r26)
sync sync
...@@ -684,7 +684,7 @@ ENTRY(flush_user_dcache_page) ...@@ -684,7 +684,7 @@ ENTRY(flush_user_dcache_page)
fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26)
fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26)
fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26)
CMPB<< %r26, %r25,1b cmpb,COND(<<) %r26, %r25,1b
fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26)
sync sync
...@@ -727,7 +727,7 @@ ENTRY(flush_user_icache_page) ...@@ -727,7 +727,7 @@ ENTRY(flush_user_icache_page)
fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26)
fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26)
fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26)
CMPB<< %r26, %r25,1b cmpb,COND(<<) %r26, %r25,1b
fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26)
sync sync
...@@ -770,7 +770,7 @@ ENTRY(purge_kernel_dcache_page) ...@@ -770,7 +770,7 @@ ENTRY(purge_kernel_dcache_page)
pdc,m %r23(%r26) pdc,m %r23(%r26)
pdc,m %r23(%r26) pdc,m %r23(%r26)
pdc,m %r23(%r26) pdc,m %r23(%r26)
CMPB<< %r26, %r25, 1b cmpb,COND(<<) %r26, %r25, 1b
pdc,m %r23(%r26) pdc,m %r23(%r26)
sync sync
...@@ -834,7 +834,7 @@ ENTRY(flush_alias_page) ...@@ -834,7 +834,7 @@ ENTRY(flush_alias_page)
fdc,m %r23(%r28) fdc,m %r23(%r28)
fdc,m %r23(%r28) fdc,m %r23(%r28)
fdc,m %r23(%r28) fdc,m %r23(%r28)
CMPB<< %r28, %r29, 1b cmpb,COND(<<) %r28, %r29, 1b
fdc,m %r23(%r28) fdc,m %r23(%r28)
sync sync
...@@ -857,7 +857,7 @@ flush_user_dcache_range_asm: ...@@ -857,7 +857,7 @@ flush_user_dcache_range_asm:
ldo -1(%r23), %r21 ldo -1(%r23), %r21
ANDCM %r26, %r21, %r26 ANDCM %r26, %r21, %r26
1: CMPB<<,n %r26, %r25, 1b 1: cmpb,COND(<<),n %r26, %r25, 1b
fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26)
sync sync
...@@ -878,7 +878,7 @@ ENTRY(flush_kernel_dcache_range_asm) ...@@ -878,7 +878,7 @@ ENTRY(flush_kernel_dcache_range_asm)
ldo -1(%r23), %r21 ldo -1(%r23), %r21
ANDCM %r26, %r21, %r26 ANDCM %r26, %r21, %r26
1: CMPB<<,n %r26, %r25,1b 1: cmpb,COND(<<),n %r26, %r25,1b
fdc,m %r23(%r26) fdc,m %r23(%r26)
sync sync
...@@ -900,7 +900,7 @@ ENTRY(flush_user_icache_range_asm) ...@@ -900,7 +900,7 @@ ENTRY(flush_user_icache_range_asm)
ldo -1(%r23), %r21 ldo -1(%r23), %r21
ANDCM %r26, %r21, %r26 ANDCM %r26, %r21, %r26
1: CMPB<<,n %r26, %r25,1b 1: cmpb,COND(<<),n %r26, %r25,1b
fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26)
sync sync
...@@ -943,7 +943,7 @@ ENTRY(flush_kernel_icache_page) ...@@ -943,7 +943,7 @@ ENTRY(flush_kernel_icache_page)
fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26)
fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26)
fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26)
CMPB<< %r26, %r25, 1b cmpb,COND(<<) %r26, %r25, 1b
fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26)
sync sync
...@@ -964,7 +964,7 @@ ENTRY(flush_kernel_icache_range_asm) ...@@ -964,7 +964,7 @@ ENTRY(flush_kernel_icache_range_asm)
ldo -1(%r23), %r21 ldo -1(%r23), %r21
ANDCM %r26, %r21, %r26 ANDCM %r26, %r21, %r26
1: CMPB<<,n %r26, %r25, 1b 1: cmpb,COND(<<),n %r26, %r25, 1b
fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26)
sync sync
......
...@@ -31,9 +31,8 @@ ...@@ -31,9 +31,8 @@
#define STREGM std,ma #define STREGM std,ma
#define SHRREG shrd #define SHRREG shrd
#define SHLREG shld #define SHLREG shld
#define ADDIB addib,*
#define CMPB cmpb,*
#define ANDCM andcm,* #define ANDCM andcm,*
#define COND(x) * ## x
#define RP_OFFSET 16 #define RP_OFFSET 16
#define FRAME_SIZE 128 #define FRAME_SIZE 128
#define CALLEE_REG_FRAME_SIZE 144 #define CALLEE_REG_FRAME_SIZE 144
...@@ -46,9 +45,8 @@ ...@@ -46,9 +45,8 @@
#define STREGM stwm #define STREGM stwm
#define SHRREG shr #define SHRREG shr
#define SHLREG shlw #define SHLREG shlw
#define ADDIB addib,
#define CMPB cmpb,
#define ANDCM andcm #define ANDCM andcm
#define COND(x) x
#define RP_OFFSET 20 #define RP_OFFSET 20
#define FRAME_SIZE 64 #define FRAME_SIZE 64
#define CALLEE_REG_FRAME_SIZE 128 #define CALLEE_REG_FRAME_SIZE 128
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册