hazards.h 4.1 KB
Newer Older
L
Linus Torvalds 已提交
1 2 3 4 5
/*
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
6 7 8
 * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
 * Copyright (C) MIPS Technologies, Inc.
 *   written by Ralf Baechle <ralf@linux-mips.org>
L
Linus Torvalds 已提交
9 10 11 12 13
 */
#ifndef _ASM_HAZARDS_H
#define _ASM_HAZARDS_H


14
#ifdef __ASSEMBLY__
R
Ralf Baechle 已提交
15
#define ASMMACRO(name, code...) .macro name; code; .endm
L
Linus Torvalds 已提交
16 17
#else

R
Ralf Baechle 已提交
18 19 20 21 22 23 24
#define ASMMACRO(name, code...)						\
__asm__(".macro " #name "; " #code "; .endm");				\
									\
static inline void name(void)						\
{									\
	__asm__ __volatile__ (#name);					\
}
L
Linus Torvalds 已提交
25 26 27

#endif

R
Ralf Baechle 已提交
28 29 30 31 32 33 34 35
ASMMACRO(_ssnop,
	 sll	$0, $0, 1
	)

ASMMACRO(_ehb,
	 sll	$0, $0, 3
	)

L
Linus Torvalds 已提交
36
/*
R
Ralf Baechle 已提交
37
 * TLB hazards
L
Linus Torvalds 已提交
38
 */
R
Ralf Baechle 已提交
39
#if defined(CONFIG_CPU_MIPSR2)
L
Linus Torvalds 已提交
40 41

/*
R
Ralf Baechle 已提交
42
 * MIPSR2 defines ehb for hazard avoidance
L
Linus Torvalds 已提交
43 44
 */

R
Ralf Baechle 已提交
45 46 47 48 49 50 51 52 53 54
ASMMACRO(mtc0_tlbw_hazard,
	 _ehb
	)
ASMMACRO(tlbw_use_hazard,
	 _ehb
	)
ASMMACRO(tlb_probe_hazard,
	 _ehb
	)
ASMMACRO(irq_enable_hazard,
R
Ralf Baechle 已提交
55
	 _ehb
R
Ralf Baechle 已提交
56 57
	)
ASMMACRO(irq_disable_hazard,
L
Linus Torvalds 已提交
58
	_ehb
R
Ralf Baechle 已提交
59 60 61 62
	)
ASMMACRO(back_to_back_c0_hazard,
	 _ehb
	)
L
Linus Torvalds 已提交
63
/*
R
Ralf Baechle 已提交
64 65 66 67 68 69
 * gcc has a tradition of misscompiling the previous construct using the
 * address of a label as argument to inline assembler.  Gas otoh has the
 * annoying difference between la and dla which are only usable for 32-bit
 * rsp. 64-bit code, so can't be used without conditional compilation.
 * The alterantive is switching the assembler to 64-bit code which happens
 * to work right even for 32-bit code ...
L
Linus Torvalds 已提交
70
 */
R
Ralf Baechle 已提交
71 72 73 74 75 76 77 78 79 80 81 82
#define instruction_hazard()						\
do {									\
	unsigned long tmp;						\
									\
	__asm__ __volatile__(						\
	"	.set	mips64r2				\n"	\
	"	dla	%0, 1f					\n"	\
	"	jr.hb	%0					\n"	\
	"	.set	mips0					\n"	\
	"1:							\n"	\
	: "=r" (tmp));							\
} while (0)
L
Linus Torvalds 已提交
83

R
Ralf Baechle 已提交
84
#elif defined(CONFIG_CPU_R10000)
L
Linus Torvalds 已提交
85 86

/*
R
Ralf Baechle 已提交
87
 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
L
Linus Torvalds 已提交
88 89
 */

R
Ralf Baechle 已提交
90 91 92 93 94 95 96 97 98 99 100 101 102
ASMMACRO(mtc0_tlbw_hazard,
	)
ASMMACRO(tlbw_use_hazard,
	)
ASMMACRO(tlb_probe_hazard,
	)
ASMMACRO(irq_enable_hazard,
	)
ASMMACRO(irq_disable_hazard,
	)
ASMMACRO(back_to_back_c0_hazard,
	)
#define instruction_hazard() do { } while (0)
L
Linus Torvalds 已提交
103

R
Ralf Baechle 已提交
104
#elif defined(CONFIG_CPU_RM9000)
105

L
Linus Torvalds 已提交
106 107 108 109 110 111
/*
 * RM9000 hazards.  When the JTLB is updated by tlbwi or tlbwr, a subsequent
 * use of the JTLB for instructions should not occur for 4 cpu cycles and use
 * for data translations should not occur for 3 cpu cycles.
 */

R
Ralf Baechle 已提交
112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
ASMMACRO(mtc0_tlbw_hazard,
	 _ssnop; _ssnop; _ssnop; _ssnop
	)
ASMMACRO(tlbw_use_hazard,
	 _ssnop; _ssnop; _ssnop; _ssnop
	)
ASMMACRO(tlb_probe_hazard,
	 _ssnop; _ssnop; _ssnop; _ssnop
	)
ASMMACRO(irq_enable_hazard,
	)
ASMMACRO(irq_disable_hazard,
	)
ASMMACRO(back_to_back_c0_hazard,
	)
#define instruction_hazard() do { } while (0)
L
Linus Torvalds 已提交
128

R
Ralf Baechle 已提交
129
#elif defined(CONFIG_CPU_SB1)
L
Linus Torvalds 已提交
130 131

/*
R
Ralf Baechle 已提交
132
 * Mostly like R4000 for historic reasons
L
Linus Torvalds 已提交
133
 */
R
Ralf Baechle 已提交
134 135 136 137 138 139 140 141 142 143 144 145 146 147
ASMMACRO(mtc0_tlbw_hazard,
	)
ASMMACRO(tlbw_use_hazard,
	)
ASMMACRO(tlb_probe_hazard,
	)
ASMMACRO(irq_enable_hazard,
	)
ASMMACRO(irq_disable_hazard,
	 _ssnop; _ssnop; _ssnop
	)
ASMMACRO(back_to_back_c0_hazard,
	)
#define instruction_hazard() do { } while (0)
148

L
Linus Torvalds 已提交
149 150 151
#else

/*
R
Ralf Baechle 已提交
152 153
 * Finally the catchall case for all other processors including R4000, R4400,
 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
154
 *
R
Ralf Baechle 已提交
155 156 157 158
 * The taken branch will result in a two cycle penalty for the two killed
 * instructions on R4000 / R4400.  Other processors only have a single cycle
 * hazard so this is nice trick to have an optimal code for a range of
 * processors.
159
 */
R
Ralf Baechle 已提交
160
ASMMACRO(mtc0_tlbw_hazard,
161
	nop; nop
R
Ralf Baechle 已提交
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
	)
ASMMACRO(tlbw_use_hazard,
	nop; nop; nop
	)
ASMMACRO(tlb_probe_hazard,
	 nop; nop; nop
	)
ASMMACRO(irq_enable_hazard,
	)
ASMMACRO(irq_disable_hazard,
	nop; nop; nop
	)
ASMMACRO(back_to_back_c0_hazard,
	 _ssnop; _ssnop; _ssnop;
	)
177
#define instruction_hazard() do { } while (0)
178

R
Ralf Baechle 已提交
179
#endif
L
Linus Torvalds 已提交
180

C
Chris Dearman 已提交
181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212

/* FPU hazards */

#if defined(CONFIG_CPU_SB1)
ASMMACRO(enable_fpu_hazard,
	 .set	push;
	 .set	mips64;
	 .set	noreorder;
	 _ssnop;
	 bnezl	$0,.+4;
	 _ssnop
	 .set	pop
)
ASMMACRO(disable_fpu_hazard,
)

#elif defined(CONFIG_CPU_MIPSR2)
ASMMACRO(enable_fpu_hazard,
	 _ehb
)
ASMMACRO(disable_fpu_hazard,
	 _ehb
)
#else
ASMMACRO(enable_fpu_hazard,
	 nop; nop; nop; nop
)
ASMMACRO(disable_fpu_hazard,
	 _ehb
)
#endif

L
Linus Torvalds 已提交
213
#endif /* _ASM_HAZARDS_H */