proc-v7.S 24.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11
/*
 *  linux/arch/arm/mm/proc-v7.S
 *
 *  Copyright (C) 2001 Deep Blue Solutions Ltd.
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 *  This is the "shell" of the ARMv7 processor support.
 */
12
#include <linux/arm-smccc.h>
13
#include <linux/init.h>
14 15 16
#include <linux/linkage.h>
#include <asm/assembler.h>
#include <asm/asm-offsets.h>
17
#include <asm/hwcap.h>
18 19
#include <asm/pgtable-hwdef.h>
#include <asm/pgtable.h>
20
#include <asm/memory.h>
21 22 23

#include "proc-macros.S"

24 25 26
#ifdef CONFIG_ARM_LPAE
#include "proc-v7-3level.S"
#else
27
#include "proc-v7-2level.S"
28
#endif
29

30
ENTRY(cpu_v7_proc_init)
31
	ret	lr
32
ENDPROC(cpu_v7_proc_init)
33 34

ENTRY(cpu_v7_proc_fin)
35 36 37 38
	mrc	p15, 0, r0, c1, c0, 0		@ ctrl register
	bic	r0, r0, #0x1000			@ ...i............
	bic	r0, r0, #0x0006			@ .............ca.
	mcr	p15, 0, r0, c1, c0, 0		@ disable caches
39
	ret	lr
40
ENDPROC(cpu_v7_proc_fin)
41 42

/*
43
 *	cpu_v7_reset(loc, hyp)
44 45 46 47 48 49
 *
 *	Perform a soft reset of the system.  Put the CPU into the
 *	same state as it would be if it had been reset, and branch
 *	to what would be the reset vector.
 *
 *	- loc   - location to jump to for soft reset
50
 *	- hyp   - indicate if restart occurs in HYP mode
51 52 53
 *
 *	This code must be executed using a flat identity mapping with
 *      caches disabled.
54 55
 */
	.align	5
56
	.pushsection	.idmap.text, "ax"
57
ENTRY(cpu_v7_reset)
58 59 60 61
	mrc	p15, 0, r2, c1, c0, 0		@ ctrl register
	bic	r2, r2, #0x1			@ ...............m
 THUMB(	bic	r2, r2, #1 << 30 )		@ SCTLR.TE (Thumb exceptions)
	mcr	p15, 0, r2, c1, c0, 0		@ disable MMU
62
	isb
63 64 65 66
#ifdef CONFIG_ARM_VIRT_EXT
	teq	r1, #0
	bne	__hyp_soft_restart
#endif
67
	bx	r0
68
ENDPROC(cpu_v7_reset)
69
	.popsection
70 71 72 73 74 75 76 77 78

/*
 *	cpu_v7_do_idle()
 *
 *	Idle the processor (eg, wait for interrupt).
 *
 *	IRQs are already disabled.
 */
ENTRY(cpu_v7_do_idle)
79
	dsb					@ WFI may enter a low-power mode
80
	wfi
81
	ret	lr
82
ENDPROC(cpu_v7_do_idle)
83 84

ENTRY(cpu_v7_dcache_clean_area)
85 86
	ALT_SMP(W(nop))			@ MP extensions imply L1 PTW
	ALT_UP_B(1f)
87
	ret	lr
88 89
1:	dcache_line_size r2, r3
2:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
90 91
	add	r0, r0, r2
	subs	r1, r1, r2
92
	bhi	2b
93
	dsb	ishst
94
	ret	lr
95
ENDPROC(cpu_v7_dcache_clean_area)
96

97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
#ifdef CONFIG_ARM_PSCI
	.arch_extension sec
ENTRY(cpu_v7_smc_switch_mm)
	stmfd	sp!, {r0 - r3}
	movw	r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
	movt	r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
	smc	#0
	ldmfd	sp!, {r0 - r3}
	b	cpu_v7_switch_mm
ENDPROC(cpu_v7_smc_switch_mm)
	.arch_extension virt
ENTRY(cpu_v7_hvc_switch_mm)
	stmfd	sp!, {r0 - r3}
	movw	r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
	movt	r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
	hvc	#0
	ldmfd	sp!, {r0 - r3}
	b	cpu_v7_switch_mm
ENDPROC(cpu_v7_smc_switch_mm)
#endif
117 118 119 120 121 122 123 124 125 126 127
ENTRY(cpu_v7_iciallu_switch_mm)
	mov	r3, #0
	mcr	p15, 0, r3, c7, c5, 0		@ ICIALLU
	b	cpu_v7_switch_mm
ENDPROC(cpu_v7_iciallu_switch_mm)
ENTRY(cpu_v7_bpiall_switch_mm)
	mov	r3, #0
	mcr	p15, 0, r3, c7, c5, 6		@ flush BTAC/BTB
	b	cpu_v7_switch_mm
ENDPROC(cpu_v7_bpiall_switch_mm)

128
	string	cpu_v7_name, "ARMv7 Processor"
129 130
	.align

131 132
/* Suspend/resume support: derived from arch/arm/mach-s5pv210/sleep.S */
.globl	cpu_v7_suspend_size
133
.equ	cpu_v7_suspend_size, 4 * 9
134
#ifdef CONFIG_ARM_CPU_SUSPEND
135
ENTRY(cpu_v7_do_suspend)
136
	stmfd	sp!, {r4 - r11, lr}
137
	mrc	p15, 0, r4, c13, c0, 0	@ FCSE/PID
138 139
	mrc	p15, 0, r5, c13, c0, 3	@ User r/o thread ID
	stmia	r0!, {r4 - r5}
140
#ifdef CONFIG_MMU
141
	mrc	p15, 0, r6, c3, c0, 0	@ Domain ID
142 143 144
#ifdef CONFIG_ARM_LPAE
	mrrc	p15, 1, r5, r7, c2	@ TTB 1
#else
145
	mrc	p15, 0, r7, c2, c0, 1	@ TTB 1
146
#endif
147
	mrc	p15, 0, r11, c2, c0, 2	@ TTB control register
148
#endif
149 150 151
	mrc	p15, 0, r8, c1, c0, 0	@ Control register
	mrc	p15, 0, r9, c1, c0, 1	@ Auxiliary control register
	mrc	p15, 0, r10, c1, c0, 2	@ Co-processor access control
152
	stmia	r0, {r5 - r11}
153
	ldmfd	sp!, {r4 - r11, pc}
154 155 156 157 158
ENDPROC(cpu_v7_do_suspend)

ENTRY(cpu_v7_do_resume)
	mov	ip, #0
	mcr	p15, 0, ip, c7, c5, 0	@ invalidate I cache
159 160
	mcr	p15, 0, ip, c13, c0, 1	@ set reserved context ID
	ldmia	r0!, {r4 - r5}
161
	mcr	p15, 0, r4, c13, c0, 0	@ FCSE/PID
162
	mcr	p15, 0, r5, c13, c0, 3	@ User r/o thread ID
163
	ldmia	r0, {r5 - r11}
164 165
#ifdef CONFIG_MMU
	mcr	p15, 0, ip, c8, c7, 0	@ invalidate TLBs
166
	mcr	p15, 0, r6, c3, c0, 0	@ Domain ID
167 168 169 170
#ifdef CONFIG_ARM_LPAE
	mcrr	p15, 0, r1, ip, c2	@ TTB 0
	mcrr	p15, 1, r5, r7, c2	@ TTB 1
#else
171 172 173 174
	ALT_SMP(orr	r1, r1, #TTB_FLAGS_SMP)
	ALT_UP(orr	r1, r1, #TTB_FLAGS_UP)
	mcr	p15, 0, r1, c2, c0, 0	@ TTB 0
	mcr	p15, 0, r7, c2, c0, 1	@ TTB 1
175
#endif
176
	mcr	p15, 0, r11, c2, c0, 2	@ TTB control register
177 178 179 180
	ldr	r4, =PRRR		@ PRRR
	ldr	r5, =NMRR		@ NMRR
	mcr	p15, 0, r4, c10, c2, 0	@ write PRRR
	mcr	p15, 0, r5, c10, c2, 1	@ write NMRR
181 182 183 184 185
#endif	/* CONFIG_MMU */
	mrc	p15, 0, r4, c1, c0, 1	@ Read Auxiliary control register
	teq	r4, r9			@ Is it already set?
	mcrne	p15, 0, r9, c1, c0, 1	@ No, so write it
	mcr	p15, 0, r10, c1, c0, 2	@ Co-processor access control
186
	isb
187
	dsb
188
	mov	r0, r8			@ control register
189 190
	b	cpu_resume_mmu
ENDPROC(cpu_v7_do_resume)
191 192
#endif

193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
.globl	cpu_ca9mp_suspend_size
.equ	cpu_ca9mp_suspend_size, cpu_v7_suspend_size + 4 * 2
#ifdef CONFIG_ARM_CPU_SUSPEND
ENTRY(cpu_ca9mp_do_suspend)
	stmfd	sp!, {r4 - r5}
	mrc	p15, 0, r4, c15, c0, 1		@ Diagnostic register
	mrc	p15, 0, r5, c15, c0, 0		@ Power register
	stmia	r0!, {r4 - r5}
	ldmfd	sp!, {r4 - r5}
	b	cpu_v7_do_suspend
ENDPROC(cpu_ca9mp_do_suspend)

ENTRY(cpu_ca9mp_do_resume)
	ldmia	r0!, {r4 - r5}
	mrc	p15, 0, r10, c15, c0, 1		@ Read Diagnostic register
	teq	r4, r10				@ Already restored?
	mcrne	p15, 0, r4, c15, c0, 1		@ No, so restore it
	mrc	p15, 0, r10, c15, c0, 0		@ Read Power register
	teq	r5, r10				@ Already restored?
	mcrne	p15, 0, r5, c15, c0, 0		@ No, so restore it
	b	cpu_v7_do_resume
ENDPROC(cpu_ca9mp_do_resume)
#endif

217 218 219 220 221 222 223 224 225 226 227
#ifdef CONFIG_CPU_PJ4B
	globl_equ	cpu_pj4b_switch_mm,     cpu_v7_switch_mm
	globl_equ	cpu_pj4b_set_pte_ext,	cpu_v7_set_pte_ext
	globl_equ	cpu_pj4b_proc_init,	cpu_v7_proc_init
	globl_equ	cpu_pj4b_proc_fin, 	cpu_v7_proc_fin
	globl_equ	cpu_pj4b_reset,	   	cpu_v7_reset
#ifdef CONFIG_PJ4B_ERRATA_4742
ENTRY(cpu_pj4b_do_idle)
	dsb					@ WFI may enter a low-power mode
	wfi
	dsb					@barrier
228
	ret	lr
229 230 231 232 233
ENDPROC(cpu_pj4b_do_idle)
#else
	globl_equ	cpu_pj4b_do_idle,  	cpu_v7_do_idle
#endif
	globl_equ	cpu_pj4b_dcache_clean_area,	cpu_v7_dcache_clean_area
234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
#ifdef CONFIG_ARM_CPU_SUSPEND
ENTRY(cpu_pj4b_do_suspend)
	stmfd	sp!, {r6 - r10}
	mrc	p15, 1, r6, c15, c1, 0  @ save CP15 - extra features
	mrc	p15, 1, r7, c15, c2, 0	@ save CP15 - Aux Func Modes Ctrl 0
	mrc	p15, 1, r8, c15, c1, 2	@ save CP15 - Aux Debug Modes Ctrl 2
	mrc	p15, 1, r9, c15, c1, 1  @ save CP15 - Aux Debug Modes Ctrl 1
	mrc	p15, 0, r10, c9, c14, 0  @ save CP15 - PMC
	stmia	r0!, {r6 - r10}
	ldmfd	sp!, {r6 - r10}
	b cpu_v7_do_suspend
ENDPROC(cpu_pj4b_do_suspend)

ENTRY(cpu_pj4b_do_resume)
	ldmia	r0!, {r6 - r10}
249 250 251 252 253
	mcr	p15, 1, r6, c15, c1, 0  @ restore CP15 - extra features
	mcr	p15, 1, r7, c15, c2, 0	@ restore CP15 - Aux Func Modes Ctrl 0
	mcr	p15, 1, r8, c15, c1, 2	@ restore CP15 - Aux Debug Modes Ctrl 2
	mcr	p15, 1, r9, c15, c1, 1  @ restore CP15 - Aux Debug Modes Ctrl 1
	mcr	p15, 0, r10, c9, c14, 0  @ restore CP15 - PMC
254 255 256 257
	b cpu_v7_do_resume
ENDPROC(cpu_pj4b_do_resume)
#endif
.globl	cpu_pj4b_suspend_size
258
.equ	cpu_pj4b_suspend_size, cpu_v7_suspend_size + 4 * 5
259

260 261
#endif

262 263 264 265 266 267
/*
 *	__v7_setup
 *
 *	Initialise TLB, Caches, and MMU state ready to switch the MMU
 *	on.  Return in r0 the new CP15 C1 control register setting.
 *
268
 *	r1, r2, r4, r5, r9, r13 must be preserved - r13 is not a stack
269 270 271 272 273
 *	r4: TTBR0 (low word)
 *	r5: TTBR0 (high word if LPAE)
 *	r8: TTBR1
 *	r9: Main ID register
 *
274 275 276 277 278
 *	This should be able to cover all ARMv7 cores.
 *
 *	It is assumed that:
 *	- cache type register is implemented
 */
P
Pawel Moll 已提交
279
__v7_ca5mp_setup:
280
__v7_ca9mp_setup:
281 282
__v7_cr7mp_setup:
	mov	r10, #(1 << 0)			@ Cache/TLB ops broadcasting
283
	b	1f
P
Pawel Moll 已提交
284
__v7_ca7mp_setup:
285
__v7_ca12mp_setup:
286
__v7_ca15mp_setup:
287
__v7_b15mp_setup:
288
__v7_ca17mp_setup:
289
	mov	r10, #0
290 291 292 293
1:	adr	r0, __v7_setup_stack_ptr
	ldr	r12, [r0]
	add	r12, r12, r0			@ the local stack
	stmia	r12, {r1-r6, lr}		@ v7_invalidate_l1 touches r0-r6
294
	bl      v7_invalidate_l1
295
	ldmia	r12, {r1-r6, lr}
296
#ifdef CONFIG_SMP
297
	orr	r10, r10, #(1 << 6)		@ Enable SMP/nAMP mode
298
	ALT_SMP(mrc	p15, 0, r0, c1, c0, 1)
299 300 301 302
	ALT_UP(mov	r0, r10)		@ fake it for UP
	orr	r10, r10, r0			@ Set required bits
	teq	r10, r0				@ Were they already set?
	mcrne	p15, 0, r10, c1, c0, 1		@ No, update register
303
#endif
304
	b	__v7_setup_cont
305

306 307 308 309 310 311 312 313
/*
 * Errata:
 *  r0, r10 available for use
 *  r1, r2, r4, r5, r9, r13: must be preserved
 *  r3: contains MIDR rX number in bits 23-20
 *  r6: contains MIDR rXpY as 8-bit XY number
 *  r9: MIDR
 */
314 315 316
__ca8_errata:
#if defined(CONFIG_ARM_ERRATA_430973) && !defined(CONFIG_ARCH_MULTIPLATFORM)
	teq	r3, #0x00100000			@ only present in r1p*
317 318 319
	mrceq	p15, 0, r0, c1, c0, 1		@ read aux control register
	orreq	r0, r0, #(1 << 6)		@ set IBE to 1
	mcreq	p15, 0, r0, c1, c0, 1		@ write aux control register
320 321 322
#endif
#ifdef CONFIG_ARM_ERRATA_458693
	teq	r6, #0x20			@ only present in r2p0
323 324 325 326
	mrceq	p15, 0, r0, c1, c0, 1		@ read aux control register
	orreq	r0, r0, #(1 << 5)		@ set L1NEON to 1
	orreq	r0, r0, #(1 << 9)		@ set PLDNOP to 1
	mcreq	p15, 0, r0, c1, c0, 1		@ write aux control register
327 328 329
#endif
#ifdef CONFIG_ARM_ERRATA_460075
	teq	r6, #0x20			@ only present in r2p0
330 331 332 333
	mrceq	p15, 1, r0, c9, c0, 2		@ read L2 cache aux ctrl register
	tsteq	r0, #1 << 22
	orreq	r0, r0, #(1 << 22)		@ set the Write Allocate disable bit
	mcreq	p15, 1, r0, c9, c0, 2		@ write the L2 cache aux ctrl register
334 335 336 337 338 339
#endif
	b	__errata_finish

__ca9_errata:
#ifdef CONFIG_ARM_ERRATA_742230
	cmp	r6, #0x22			@ only present up to r2p2
340 341 342
	mrcle	p15, 0, r0, c15, c0, 1		@ read diagnostic register
	orrle	r0, r0, #1 << 4			@ set bit #4
	mcrle	p15, 0, r0, c15, c0, 1		@ write diagnostic register
343 344 345 346 347
#endif
#ifdef CONFIG_ARM_ERRATA_742231
	teq	r6, #0x20			@ present in r2p0
	teqne	r6, #0x21			@ present in r2p1
	teqne	r6, #0x22			@ present in r2p2
348 349 350 351
	mrceq	p15, 0, r0, c15, c0, 1		@ read diagnostic register
	orreq	r0, r0, #1 << 12		@ set bit #12
	orreq	r0, r0, #1 << 22		@ set bit #22
	mcreq	p15, 0, r0, c15, c0, 1		@ write diagnostic register
352 353 354
#endif
#ifdef CONFIG_ARM_ERRATA_743622
	teq	r3, #0x00200000			@ only present in r2p*
355 356 357
	mrceq	p15, 0, r0, c15, c0, 1		@ read diagnostic register
	orreq	r0, r0, #1 << 6			@ set bit #6
	mcreq	p15, 0, r0, c15, c0, 1		@ write diagnostic register
358 359 360 361
#endif
#if defined(CONFIG_ARM_ERRATA_751472) && defined(CONFIG_SMP)
	ALT_SMP(cmp r6, #0x30)			@ present prior to r3p0
	ALT_UP_B(1f)
362 363 364
	mrclt	p15, 0, r0, c15, c0, 1		@ read diagnostic register
	orrlt	r0, r0, #1 << 11		@ set bit #11
	mcrlt	p15, 0, r0, c15, c0, 1		@ write diagnostic register
365 366 367 368 369 370 371
1:
#endif
	b	__errata_finish

__ca15_errata:
#ifdef CONFIG_ARM_ERRATA_773022
	cmp	r6, #0x4			@ only present up to r0p4
372 373 374
	mrcle	p15, 0, r0, c1, c0, 1		@ read aux control register
	orrle	r0, r0, #1 << 1			@ disable loop buffer
	mcrle	p15, 0, r0, c1, c0, 1		@ write aux control register
375 376 377
#endif
	b	__errata_finish

378 379 380 381 382
__ca12_errata:
#ifdef CONFIG_ARM_ERRATA_818325_852422
	mrc	p15, 0, r10, c15, c0, 1		@ read diagnostic register
	orr	r10, r10, #1 << 12		@ set bit #12
	mcr	p15, 0, r10, c15, c0, 1		@ write diagnostic register
383 384 385 386 387
#endif
#ifdef CONFIG_ARM_ERRATA_821420
	mrc	p15, 0, r10, c15, c0, 2		@ read internal feature reg
	orr	r10, r10, #1 << 1		@ set bit #1
	mcr	p15, 0, r10, c15, c0, 2		@ write internal feature reg
388 389 390 391 392
#endif
#ifdef CONFIG_ARM_ERRATA_825619
	mrc	p15, 0, r10, c15, c0, 1		@ read diagnostic register
	orr	r10, r10, #1 << 24		@ set bit #24
	mcr	p15, 0, r10, c15, c0, 1		@ write diagnostic register
393 394 395 396
#endif
	b	__errata_finish

__ca17_errata:
397 398 399 400 401 402
#ifdef CONFIG_ARM_ERRATA_852421
	cmp	r6, #0x12			@ only present up to r1p2
	mrcle	p15, 0, r10, c15, c0, 1		@ read diagnostic register
	orrle	r10, r10, #1 << 24		@ set bit #24
	mcrle	p15, 0, r10, c15, c0, 1		@ write diagnostic register
#endif
403 404 405 406 407 408 409 410
#ifdef CONFIG_ARM_ERRATA_852423
	cmp	r6, #0x12			@ only present up to r1p2
	mrcle	p15, 0, r10, c15, c0, 1		@ read diagnostic register
	orrle	r10, r10, #1 << 12		@ set bit #12
	mcrle	p15, 0, r10, c15, c0, 1		@ write diagnostic register
#endif
	b	__errata_finish

411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464
__v7_pj4b_setup:
#ifdef CONFIG_CPU_PJ4B

/* Auxiliary Debug Modes Control 1 Register */
#define PJ4B_STATIC_BP (1 << 2) /* Enable Static BP */
#define PJ4B_INTER_PARITY (1 << 8) /* Disable Internal Parity Handling */
#define PJ4B_CLEAN_LINE (1 << 16) /* Disable data transfer for clean line */

/* Auxiliary Debug Modes Control 2 Register */
#define PJ4B_FAST_LDR (1 << 23) /* Disable fast LDR */
#define PJ4B_SNOOP_DATA (1 << 25) /* Do not interleave write and snoop data */
#define PJ4B_CWF (1 << 27) /* Disable Critical Word First feature */
#define PJ4B_OUTSDNG_NC (1 << 29) /* Disable outstanding non cacheable rqst */
#define PJ4B_L1_REP_RR (1 << 30) /* L1 replacement - Strict round robin */
#define PJ4B_AUX_DBG_CTRL2 (PJ4B_SNOOP_DATA | PJ4B_CWF |\
			    PJ4B_OUTSDNG_NC | PJ4B_L1_REP_RR)

/* Auxiliary Functional Modes Control Register 0 */
#define PJ4B_SMP_CFB (1 << 1) /* Set SMP mode. Join the coherency fabric */
#define PJ4B_L1_PAR_CHK (1 << 2) /* Support L1 parity checking */
#define PJ4B_BROADCAST_CACHE (1 << 8) /* Broadcast Cache and TLB maintenance */

/* Auxiliary Debug Modes Control 0 Register */
#define PJ4B_WFI_WFE (1 << 22) /* WFI/WFE - serve the DVM and back to idle */

	/* Auxiliary Debug Modes Control 1 Register */
	mrc	p15, 1,	r0, c15, c1, 1
	orr     r0, r0, #PJ4B_CLEAN_LINE
	orr     r0, r0, #PJ4B_INTER_PARITY
	bic	r0, r0, #PJ4B_STATIC_BP
	mcr	p15, 1,	r0, c15, c1, 1

	/* Auxiliary Debug Modes Control 2 Register */
	mrc	p15, 1,	r0, c15, c1, 2
	bic	r0, r0, #PJ4B_FAST_LDR
	orr	r0, r0, #PJ4B_AUX_DBG_CTRL2
	mcr	p15, 1,	r0, c15, c1, 2

	/* Auxiliary Functional Modes Control Register 0 */
	mrc	p15, 1,	r0, c15, c2, 0
#ifdef CONFIG_SMP
	orr	r0, r0, #PJ4B_SMP_CFB
#endif
	orr	r0, r0, #PJ4B_L1_PAR_CHK
	orr	r0, r0, #PJ4B_BROADCAST_CACHE
	mcr	p15, 1,	r0, c15, c2, 0

	/* Auxiliary Debug Modes Control 0 Register */
	mrc	p15, 1,	r0, c15, c1, 0
	orr	r0, r0, #PJ4B_WFI_WFE
	mcr	p15, 1,	r0, c15, c1, 0

#endif /* CONFIG_CPU_PJ4B */

465
__v7_setup:
466 467 468 469
	adr	r0, __v7_setup_stack_ptr
	ldr	r12, [r0]
	add	r12, r12, r0			@ the local stack
	stmia	r12, {r1-r6, lr}		@ v7_invalidate_l1 touches r0-r6
470
	bl      v7_invalidate_l1
471
	ldmia	r12, {r1-r6, lr}
472

473
__v7_setup_cont:
474 475
	and	r0, r9, #0xff000000		@ ARM?
	teq	r0, #0x41000000
476
	bne	__errata_finish
477 478
	and	r3, r9, #0x00f00000		@ variant
	and	r6, r9, #0x0000000f		@ revision
479
	orr	r6, r6, r3, lsr #20-4		@ combine variant and revision
480
	ubfx	r0, r9, #4, #12			@ primary part number
481

482 483 484
	/* Cortex-A8 Errata */
	ldr	r10, =0x00000c08		@ Cortex-A8 primary part number
	teq	r0, r10
485
	beq	__ca8_errata
486 487

	/* Cortex-A9 Errata */
488
	ldr	r10, =0x00000c09		@ Cortex-A9 primary part number
489
	teq	r0, r10
490
	beq	__ca9_errata
491

492 493 494 495 496 497 498 499 500 501
	/* Cortex-A12 Errata */
	ldr	r10, =0x00000c0d		@ Cortex-A12 primary part number
	teq	r0, r10
	beq	__ca12_errata

	/* Cortex-A17 Errata */
	ldr	r10, =0x00000c0e		@ Cortex-A17 primary part number
	teq	r0, r10
	beq	__ca17_errata

502
	/* Cortex-A15 Errata */
503
	ldr	r10, =0x00000c0f		@ Cortex-A15 primary part number
504
	teq	r0, r10
505
	beq	__ca15_errata
506

507 508
__errata_finish:
	mov	r10, #0
509
	mcr	p15, 0, r10, c7, c5, 0		@ I+BTB cache invalidate
510
#ifdef CONFIG_MMU
511
	mcr	p15, 0, r10, c8, c7, 0		@ invalidate I + D TLBs
512 513
	v7_ttb_setup r10, r4, r5, r8, r3	@ TTBCR, TTBRx setup
	ldr	r3, =PRRR			@ PRRR
514
	ldr	r6, =NMRR			@ NMRR
515
	mcr	p15, 0, r3, c10, c2, 0		@ write PRRR
516
	mcr	p15, 0, r6, c10, c2, 1		@ write NMRR
517
#endif
518
	dsb					@ Complete invalidations
519 520 521 522 523
#ifndef CONFIG_ARM_THUMBEE
	mrc	p15, 0, r0, c0, c1, 0		@ read ID_PFR0 for ThumbEE
	and	r0, r0, #(0xf << 12)		@ ThumbEE enabled field
	teq	r0, #(1 << 12)			@ check if ThumbEE is present
	bne	1f
524 525
	mov	r3, #0
	mcr	p14, 6, r3, c1, c0, 0		@ Initialize TEEHBR to 0
526 527 528 529
	mrc	p14, 6, r0, c0, c0, 0		@ load TEECR
	orr	r0, r0, #1			@ set the 1st bit in order to
	mcr	p14, 6, r0, c0, c0, 0		@ stop userspace TEEHBR access
1:
530
#endif
531 532
	adr	r3, v7_crval
	ldmia	r3, {r3, r6}
533
 ARM_BE8(orr	r6, r6, #1 << 25)		@ big-endian page tables
534
#ifdef CONFIG_SWP_EMULATE
535
	orr     r3, r3, #(1 << 10)              @ set SW bit in "clear"
536
	bic     r6, r6, #(1 << 10)              @ clear it in "mmuset"
537
#endif
538
   	mrc	p15, 0, r0, c1, c0, 0		@ read control register
539
	bic	r0, r0, r3			@ clear bits them
540
	orr	r0, r0, r6			@ set them
541
 THUMB(	orr	r0, r0, #1 << 30	)	@ Thumb exceptions
542
	ret	lr				@ return to head.S:__ret
543 544 545

	.align	2
__v7_setup_stack_ptr:
546
	.word	PHYS_RELATIVE(__v7_setup_stack, .)
547
ENDPROC(__v7_setup)
548

549
	.bss
550
	.align	2
551
__v7_setup_stack:
552
	.space	4 * 7				@ 7 registers
553

554 555
	__INITDATA

556 557
	.weak cpu_v7_bugs_init

558
	@ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
559
	define_processor_functions v7, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
560 561 562 563 564 565 566 567 568 569 570 571 572 573

#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
	@ generic v7 bpiall on context switch
	globl_equ	cpu_v7_bpiall_proc_init,	cpu_v7_proc_init
	globl_equ	cpu_v7_bpiall_proc_fin,		cpu_v7_proc_fin
	globl_equ	cpu_v7_bpiall_reset,		cpu_v7_reset
	globl_equ	cpu_v7_bpiall_do_idle,		cpu_v7_do_idle
	globl_equ	cpu_v7_bpiall_dcache_clean_area, cpu_v7_dcache_clean_area
	globl_equ	cpu_v7_bpiall_set_pte_ext,	cpu_v7_set_pte_ext
	globl_equ	cpu_v7_bpiall_suspend_size,	cpu_v7_suspend_size
#ifdef CONFIG_ARM_CPU_SUSPEND
	globl_equ	cpu_v7_bpiall_do_suspend,	cpu_v7_do_suspend
	globl_equ	cpu_v7_bpiall_do_resume,	cpu_v7_do_resume
#endif
574
	define_processor_functions v7_bpiall, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
575 576 577 578 579 580

#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_bpiall_processor_functions
#else
#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_processor_functions
#endif

581
#ifndef CONFIG_ARM_LPAE
582 583 584 585 586 587 588 589 590 591 592 593 594
	@ Cortex-A8 - always needs bpiall switch_mm implementation
	globl_equ	cpu_ca8_proc_init,	cpu_v7_proc_init
	globl_equ	cpu_ca8_proc_fin,	cpu_v7_proc_fin
	globl_equ	cpu_ca8_reset,		cpu_v7_reset
	globl_equ	cpu_ca8_do_idle,	cpu_v7_do_idle
	globl_equ	cpu_ca8_dcache_clean_area, cpu_v7_dcache_clean_area
	globl_equ	cpu_ca8_set_pte_ext,	cpu_v7_set_pte_ext
	globl_equ	cpu_ca8_switch_mm,	cpu_v7_bpiall_switch_mm
	globl_equ	cpu_ca8_suspend_size,	cpu_v7_suspend_size
#ifdef CONFIG_ARM_CPU_SUSPEND
	globl_equ	cpu_ca8_do_suspend,	cpu_v7_do_suspend
	globl_equ	cpu_ca8_do_resume,	cpu_v7_do_resume
#endif
595
	define_processor_functions ca8, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca8_ibe
596 597 598 599 600 601 602 603 604 605 606 607 608 609

	@ Cortex-A9 - needs more registers preserved across suspend/resume
	@ and bpiall switch_mm for hardening
	globl_equ	cpu_ca9mp_proc_init,	cpu_v7_proc_init
	globl_equ	cpu_ca9mp_proc_fin,	cpu_v7_proc_fin
	globl_equ	cpu_ca9mp_reset,	cpu_v7_reset
	globl_equ	cpu_ca9mp_do_idle,	cpu_v7_do_idle
	globl_equ	cpu_ca9mp_dcache_clean_area, cpu_v7_dcache_clean_area
#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
	globl_equ	cpu_ca9mp_switch_mm,	cpu_v7_bpiall_switch_mm
#else
	globl_equ	cpu_ca9mp_switch_mm,	cpu_v7_switch_mm
#endif
	globl_equ	cpu_ca9mp_set_pte_ext,	cpu_v7_set_pte_ext
610
	define_processor_functions ca9mp, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
611
#endif
612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627

	@ Cortex-A15 - needs iciallu switch_mm for hardening
	globl_equ	cpu_ca15_proc_init,	cpu_v7_proc_init
	globl_equ	cpu_ca15_proc_fin,	cpu_v7_proc_fin
	globl_equ	cpu_ca15_reset,		cpu_v7_reset
	globl_equ	cpu_ca15_do_idle,	cpu_v7_do_idle
	globl_equ	cpu_ca15_dcache_clean_area, cpu_v7_dcache_clean_area
#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
	globl_equ	cpu_ca15_switch_mm,	cpu_v7_iciallu_switch_mm
#else
	globl_equ	cpu_ca15_switch_mm,	cpu_v7_switch_mm
#endif
	globl_equ	cpu_ca15_set_pte_ext,	cpu_v7_set_pte_ext
	globl_equ	cpu_ca15_suspend_size,	cpu_v7_suspend_size
	globl_equ	cpu_ca15_do_suspend,	cpu_v7_do_suspend
	globl_equ	cpu_ca15_do_resume,	cpu_v7_do_resume
628
	define_processor_functions ca15, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca15_ibe
629 630 631
#ifdef CONFIG_CPU_PJ4B
	define_processor_functions pj4b, dabort=v7_early_abort, pabort=v7_pabort, suspend=1
#endif
632

633 634
	.section ".rodata"

635 636
	string	cpu_arch_name, "armv7"
	string	cpu_elf_name, "v7"
637 638
	.align

639
	.section ".proc.info.init", #alloc
640

641 642 643
	/*
	 * Standard v7 proc info content
	 */
644
.macro __v7_proc name, initfunc, mm_mmuflags = 0, io_mmuflags = 0, hwcaps = 0, proc_fns = v7_processor_functions, cache_fns = v7_cache_fns
645
	ALT_SMP(.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
646
			PMD_SECT_AF | PMD_FLAGS_SMP | \mm_mmuflags)
647
	ALT_UP(.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
648 649 650
			PMD_SECT_AF | PMD_FLAGS_UP | \mm_mmuflags)
	.long	PMD_TYPE_SECT | PMD_SECT_AP_WRITE | \
		PMD_SECT_AP_READ | PMD_SECT_AF | \io_mmuflags
651
	initfn	\initfunc, \name
652 653
	.long	cpu_arch_name
	.long	cpu_elf_name
654 655
	.long	HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \
		HWCAP_EDSP | HWCAP_TLS | \hwcaps
656
	.long	cpu_v7_name
657
	.long	\proc_fns
658 659
	.long	v7wbi_tlb_fns
	.long	v6_user_fns
660
	.long	\cache_fns
661 662
.endm

663
#ifndef CONFIG_ARM_LPAE
P
Pawel Moll 已提交
664 665 666 667 668 669 670
	/*
	 * ARM Ltd. Cortex A5 processor.
	 */
	.type   __v7_ca5mp_proc_info, #object
__v7_ca5mp_proc_info:
	.long	0x410fc050
	.long	0xff0ffff0
671
	__v7_proc __v7_ca5mp_proc_info, __v7_ca5mp_setup
P
Pawel Moll 已提交
672 673
	.size	__v7_ca5mp_proc_info, . - __v7_ca5mp_proc_info

674 675 676 677 678 679 680
	/*
	 * ARM Ltd. Cortex A9 processor.
	 */
	.type   __v7_ca9mp_proc_info, #object
__v7_ca9mp_proc_info:
	.long	0x410fc090
	.long	0xff0ffff0
681
	__v7_proc __v7_ca9mp_proc_info, __v7_ca9mp_setup, proc_fns = ca9mp_processor_functions
682
	.size	__v7_ca9mp_proc_info, . - __v7_ca9mp_proc_info
683

684 685 686 687 688 689 690 691 692 693
	/*
	 * ARM Ltd. Cortex A8 processor.
	 */
	.type	__v7_ca8_proc_info, #object
__v7_ca8_proc_info:
	.long	0x410fc080
	.long	0xff0ffff0
	__v7_proc __v7_ca8_proc_info, __v7_setup, proc_fns = ca8_processor_functions
	.size	__v7_ca8_proc_info, . - __v7_ca8_proc_info

694 695
#endif	/* CONFIG_ARM_LPAE */

696 697 698
	/*
	 * Marvell PJ4B processor.
	 */
699
#ifdef CONFIG_CPU_PJ4B
700 701
	.type   __v7_pj4b_proc_info, #object
__v7_pj4b_proc_info:
702 703
	.long	0x560f5800
	.long	0xff0fff00
704
	__v7_proc __v7_pj4b_proc_info, __v7_pj4b_setup, proc_fns = pj4b_processor_functions
705
	.size	__v7_pj4b_proc_info, . - __v7_pj4b_proc_info
706
#endif
707

708 709 710 711 712 713 714
	/*
	 * ARM Ltd. Cortex R7 processor.
	 */
	.type	__v7_cr7mp_proc_info, #object
__v7_cr7mp_proc_info:
	.long	0x410fc170
	.long	0xff0ffff0
715
	__v7_proc __v7_cr7mp_proc_info, __v7_cr7mp_setup
716 717
	.size	__v7_cr7mp_proc_info, . - __v7_cr7mp_proc_info

718 719 720 721 722 723 724
	/*
	 * ARM Ltd. Cortex A7 processor.
	 */
	.type	__v7_ca7mp_proc_info, #object
__v7_ca7mp_proc_info:
	.long	0x410fc070
	.long	0xff0ffff0
725
	__v7_proc __v7_ca7mp_proc_info, __v7_ca7mp_setup
726 727
	.size	__v7_ca7mp_proc_info, . - __v7_ca7mp_proc_info

728 729 730 731 732 733 734
	/*
	 * ARM Ltd. Cortex A12 processor.
	 */
	.type	__v7_ca12mp_proc_info, #object
__v7_ca12mp_proc_info:
	.long	0x410fc0d0
	.long	0xff0ffff0
735
	__v7_proc __v7_ca12mp_proc_info, __v7_ca12mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
736 737
	.size	__v7_ca12mp_proc_info, . - __v7_ca12mp_proc_info

738 739 740 741 742 743 744
	/*
	 * ARM Ltd. Cortex A15 processor.
	 */
	.type	__v7_ca15mp_proc_info, #object
__v7_ca15mp_proc_info:
	.long	0x410fc0f0
	.long	0xff0ffff0
745
	__v7_proc __v7_ca15mp_proc_info, __v7_ca15mp_setup, proc_fns = ca15_processor_functions
746 747
	.size	__v7_ca15mp_proc_info, . - __v7_ca15mp_proc_info

748 749 750 751 752 753 754
	/*
	 * Broadcom Corporation Brahma-B15 processor.
	 */
	.type	__v7_b15mp_proc_info, #object
__v7_b15mp_proc_info:
	.long	0x420f00f0
	.long	0xff0ffff0
755
	__v7_proc __v7_b15mp_proc_info, __v7_b15mp_setup, proc_fns = ca15_processor_functions, cache_fns = b15_cache_fns
756 757
	.size	__v7_b15mp_proc_info, . - __v7_b15mp_proc_info

758 759 760 761 762 763 764
	/*
	 * ARM Ltd. Cortex A17 processor.
	 */
	.type	__v7_ca17mp_proc_info, #object
__v7_ca17mp_proc_info:
	.long	0x410fc0e0
	.long	0xff0ffff0
765
	__v7_proc __v7_ca17mp_proc_info, __v7_ca17mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
766 767
	.size	__v7_ca17mp_proc_info, . - __v7_ca17mp_proc_info

768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783
	/* ARM Ltd. Cortex A73 processor */
	.type	__v7_ca73_proc_info, #object
__v7_ca73_proc_info:
	.long	0x410fd090
	.long	0xff0ffff0
	__v7_proc __v7_ca73_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
	.size	__v7_ca73_proc_info, . - __v7_ca73_proc_info

	/* ARM Ltd. Cortex A75 processor */
	.type	__v7_ca75_proc_info, #object
__v7_ca75_proc_info:
	.long	0x410fd0a0
	.long	0xff0ffff0
	__v7_proc __v7_ca75_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
	.size	__v7_ca75_proc_info, . - __v7_ca75_proc_info

784 785 786 787 788 789 790 791 792 793
	/*
	 * Qualcomm Inc. Krait processors.
	 */
	.type	__krait_proc_info, #object
__krait_proc_info:
	.long	0x510f0400		@ Required ID value
	.long	0xff0ffc00		@ Mask for ID
	/*
	 * Some Krait processors don't indicate support for SDIV and UDIV
	 * instructions in the ARM instruction set, even though they actually
794 795
	 * do support them. They also don't indicate support for fused multiply
	 * instructions even though they actually do support them.
796
	 */
797
	__v7_proc __krait_proc_info, __v7_setup, hwcaps = HWCAP_IDIV | HWCAP_VFPv4
798 799
	.size	__krait_proc_info, . - __krait_proc_info

800 801 802 803 804 805 806
	/*
	 * Match any ARMv7 processor core.
	 */
	.type	__v7_proc_info, #object
__v7_proc_info:
	.long	0x000f0000		@ Required ID value
	.long	0x000f0000		@ Mask for ID
807
	__v7_proc __v7_proc_info, __v7_setup
808
	.size	__v7_proc_info, . - __v7_proc_info