提交 c8c90860 编写于 作者: M Mika Westerberg 提交者: Russell King

ARM: 6466/1: implement flush_icache_all for the rest of the CPUs

Commit 81d11955 ("ARM: 6405/1: Handle __flush_icache_all for
CONFIG_SMP_ON_UP") added a new function to struct cpu_cache_fns:
flush_icache_all(). It also implemented this for v6 and v7 but not
for v5 and backwards. Without the function pointer in place, we
will be calling wrong cache functions.

For example with ep93xx we get following:

    Unable to handle kernel paging request at virtual address ee070f38
    pgd = c0004000
    [ee070f38] *pgd=00000000
    Internal error: Oops: 80000005 [#1] PREEMPT
    last sysfs file:
    Modules linked in:
    CPU: 0    Not tainted  (2.6.36+ #1)
    PC is at 0xee070f38
    LR is at __dma_alloc+0x11c/0x2d0
    pc : [<ee070f38>]    lr : [<c0032c8c>]    psr: 60000013
    sp : c581bde0  ip : 00000000  fp : c0472000
    r10: c0472000  r9 : 000000d0  r8 : 00020000
    r7 : 0001ffff  r6 : 00000000  r5 : c0472400  r4 : c5980000
    r3 : c03ab7e0  r2 : 00000000  r1 : c59a0000  r0 : c5980000
    Flags: nZCv  IRQs on  FIQs on  Mode SVC_32  ISA ARM  Segment kernel
    Control: c000717f  Table: c0004000  DAC: 00000017
    Process swapper (pid: 1, stack limit = 0xc581a270)
    [<c0032c8c>] (__dma_alloc+0x11c/0x2d0)
    [<c0032e5c>] (dma_alloc_writecombine+0x1c/0x24)
    [<c0204148>] (ep93xx_pcm_preallocate_dma_buffer+0x44/0x60)
    [<c02041c0>] (ep93xx_pcm_new+0x5c/0x88)
    [<c01ff188>] (snd_soc_instantiate_cards+0x8a8/0xbc0)
    [<c01ff59c>] (soc_probe+0xfc/0x134)
    [<c01adafc>] (platform_drv_probe+0x18/0x1c)
    [<c01acca4>] (driver_probe_device+0xb0/0x16c)
    [<c01ac284>] (bus_for_each_drv+0x48/0x84)
    [<c01ace90>] (device_attach+0x50/0x68)
    [<c01ac0f8>] (bus_probe_device+0x24/0x44)
    [<c01aad7c>] (device_add+0x2fc/0x44c)
    [<c01adfa8>] (platform_device_add+0x104/0x15c)
    [<c0015eb8>] (simone_init+0x60/0x94)
    [<c0021410>] (do_one_initcall+0xd0/0x1a4)

__dma_alloc() calls (inlined) __dma_alloc_buffer() which ends up
calling dmac_flush_range(). Now since the entries in the
arm920_cache_fns are shifted by one, we jump into address 0xee070f38
which is actually next instruction after the arm920_cache_fns
structure.

So implement flush_icache_all() for the rest of the supported CPUs
using a generic 'invalidate I cache' instruction.
Signed-off-by: NMika Westerberg <mika.westerberg@iki.fi>
Signed-off-by: NRussell King <rmk+kernel@arm.linux.org.uk>
上级 4e54d93d
...@@ -37,6 +37,17 @@ ...@@ -37,6 +37,17 @@
/* FIXME: put optimal value here. Current one is just estimation */ /* FIXME: put optimal value here. Current one is just estimation */
#define CACHE_DLIMIT (CACHE_DSIZE * 2) #define CACHE_DLIMIT (CACHE_DSIZE * 2)
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(fa_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(fa_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -233,6 +244,7 @@ ENDPROC(fa_dma_unmap_area) ...@@ -233,6 +244,7 @@ ENDPROC(fa_dma_unmap_area)
.type fa_cache_fns, #object .type fa_cache_fns, #object
ENTRY(fa_cache_fns) ENTRY(fa_cache_fns)
.long fa_flush_icache_all
.long fa_flush_kern_cache_all .long fa_flush_kern_cache_all
.long fa_flush_user_cache_all .long fa_flush_user_cache_all
.long fa_flush_user_cache_range .long fa_flush_user_cache_range
......
...@@ -12,6 +12,15 @@ ...@@ -12,6 +12,15 @@
#include <asm/page.h> #include <asm/page.h>
#include "proc-macros.S" #include "proc-macros.S"
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(v3_flush_icache_all)
mov pc, lr
ENDPROC(v3_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -122,6 +131,7 @@ ENDPROC(v3_dma_map_area) ...@@ -122,6 +131,7 @@ ENDPROC(v3_dma_map_area)
.type v3_cache_fns, #object .type v3_cache_fns, #object
ENTRY(v3_cache_fns) ENTRY(v3_cache_fns)
.long v3_flush_icache_all
.long v3_flush_kern_cache_all .long v3_flush_kern_cache_all
.long v3_flush_user_cache_all .long v3_flush_user_cache_all
.long v3_flush_user_cache_range .long v3_flush_user_cache_range
......
...@@ -12,6 +12,15 @@ ...@@ -12,6 +12,15 @@
#include <asm/page.h> #include <asm/page.h>
#include "proc-macros.S" #include "proc-macros.S"
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(v4_flush_icache_all)
mov pc, lr
ENDPROC(v4_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -134,6 +143,7 @@ ENDPROC(v4_dma_map_area) ...@@ -134,6 +143,7 @@ ENDPROC(v4_dma_map_area)
.type v4_cache_fns, #object .type v4_cache_fns, #object
ENTRY(v4_cache_fns) ENTRY(v4_cache_fns)
.long v4_flush_icache_all
.long v4_flush_kern_cache_all .long v4_flush_kern_cache_all
.long v4_flush_user_cache_all .long v4_flush_user_cache_all
.long v4_flush_user_cache_range .long v4_flush_user_cache_range
......
...@@ -50,6 +50,17 @@ flush_base: ...@@ -50,6 +50,17 @@ flush_base:
.long FLUSH_BASE .long FLUSH_BASE
.text .text
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(v4wb_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(v4wb_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -244,6 +255,7 @@ ENDPROC(v4wb_dma_unmap_area) ...@@ -244,6 +255,7 @@ ENDPROC(v4wb_dma_unmap_area)
.type v4wb_cache_fns, #object .type v4wb_cache_fns, #object
ENTRY(v4wb_cache_fns) ENTRY(v4wb_cache_fns)
.long v4wb_flush_icache_all
.long v4wb_flush_kern_cache_all .long v4wb_flush_kern_cache_all
.long v4wb_flush_user_cache_all .long v4wb_flush_user_cache_all
.long v4wb_flush_user_cache_range .long v4wb_flush_user_cache_range
......
...@@ -40,6 +40,17 @@ ...@@ -40,6 +40,17 @@
*/ */
#define CACHE_DLIMIT 16384 #define CACHE_DLIMIT 16384
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(v4wt_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(v4wt_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -188,6 +199,7 @@ ENDPROC(v4wt_dma_map_area) ...@@ -188,6 +199,7 @@ ENDPROC(v4wt_dma_map_area)
.type v4wt_cache_fns, #object .type v4wt_cache_fns, #object
ENTRY(v4wt_cache_fns) ENTRY(v4wt_cache_fns)
.long v4wt_flush_icache_all
.long v4wt_flush_kern_cache_all .long v4wt_flush_kern_cache_all
.long v4wt_flush_user_cache_all .long v4wt_flush_user_cache_all
.long v4wt_flush_user_cache_range .long v4wt_flush_user_cache_range
......
...@@ -119,6 +119,20 @@ ENTRY(cpu_arm1020_do_idle) ...@@ -119,6 +119,20 @@ ENTRY(cpu_arm1020_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
.align 5 .align 5
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm1020_flush_icache_all)
#ifndef CONFIG_CPU_ICACHE_DISABLE
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
#endif
mov pc, lr
ENDPROC(arm1020_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -351,6 +365,7 @@ ENTRY(arm1020_dma_unmap_area) ...@@ -351,6 +365,7 @@ ENTRY(arm1020_dma_unmap_area)
ENDPROC(arm1020_dma_unmap_area) ENDPROC(arm1020_dma_unmap_area)
ENTRY(arm1020_cache_fns) ENTRY(arm1020_cache_fns)
.long arm1020_flush_icache_all
.long arm1020_flush_kern_cache_all .long arm1020_flush_kern_cache_all
.long arm1020_flush_user_cache_all .long arm1020_flush_user_cache_all
.long arm1020_flush_user_cache_range .long arm1020_flush_user_cache_range
......
...@@ -119,6 +119,20 @@ ENTRY(cpu_arm1020e_do_idle) ...@@ -119,6 +119,20 @@ ENTRY(cpu_arm1020e_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
.align 5 .align 5
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm1020e_flush_icache_all)
#ifndef CONFIG_CPU_ICACHE_DISABLE
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
#endif
mov pc, lr
ENDPROC(arm1020e_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -337,6 +351,7 @@ ENTRY(arm1020e_dma_unmap_area) ...@@ -337,6 +351,7 @@ ENTRY(arm1020e_dma_unmap_area)
ENDPROC(arm1020e_dma_unmap_area) ENDPROC(arm1020e_dma_unmap_area)
ENTRY(arm1020e_cache_fns) ENTRY(arm1020e_cache_fns)
.long arm1020e_flush_icache_all
.long arm1020e_flush_kern_cache_all .long arm1020e_flush_kern_cache_all
.long arm1020e_flush_user_cache_all .long arm1020e_flush_user_cache_all
.long arm1020e_flush_user_cache_range .long arm1020e_flush_user_cache_range
......
...@@ -108,6 +108,20 @@ ENTRY(cpu_arm1022_do_idle) ...@@ -108,6 +108,20 @@ ENTRY(cpu_arm1022_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
.align 5 .align 5
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm1022_flush_icache_all)
#ifndef CONFIG_CPU_ICACHE_DISABLE
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
#endif
mov pc, lr
ENDPROC(arm1022_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -326,6 +340,7 @@ ENTRY(arm1022_dma_unmap_area) ...@@ -326,6 +340,7 @@ ENTRY(arm1022_dma_unmap_area)
ENDPROC(arm1022_dma_unmap_area) ENDPROC(arm1022_dma_unmap_area)
ENTRY(arm1022_cache_fns) ENTRY(arm1022_cache_fns)
.long arm1022_flush_icache_all
.long arm1022_flush_kern_cache_all .long arm1022_flush_kern_cache_all
.long arm1022_flush_user_cache_all .long arm1022_flush_user_cache_all
.long arm1022_flush_user_cache_range .long arm1022_flush_user_cache_range
......
...@@ -108,6 +108,20 @@ ENTRY(cpu_arm1026_do_idle) ...@@ -108,6 +108,20 @@ ENTRY(cpu_arm1026_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
.align 5 .align 5
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm1026_flush_icache_all)
#ifndef CONFIG_CPU_ICACHE_DISABLE
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
#endif
mov pc, lr
ENDPROC(arm1026_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -320,6 +334,7 @@ ENTRY(arm1026_dma_unmap_area) ...@@ -320,6 +334,7 @@ ENTRY(arm1026_dma_unmap_area)
ENDPROC(arm1026_dma_unmap_area) ENDPROC(arm1026_dma_unmap_area)
ENTRY(arm1026_cache_fns) ENTRY(arm1026_cache_fns)
.long arm1026_flush_icache_all
.long arm1026_flush_kern_cache_all .long arm1026_flush_kern_cache_all
.long arm1026_flush_user_cache_all .long arm1026_flush_user_cache_all
.long arm1026_flush_user_cache_range .long arm1026_flush_user_cache_range
......
...@@ -109,6 +109,17 @@ ENTRY(cpu_arm920_do_idle) ...@@ -109,6 +109,17 @@ ENTRY(cpu_arm920_do_idle)
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm920_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm920_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -305,6 +316,7 @@ ENTRY(arm920_dma_unmap_area) ...@@ -305,6 +316,7 @@ ENTRY(arm920_dma_unmap_area)
ENDPROC(arm920_dma_unmap_area) ENDPROC(arm920_dma_unmap_area)
ENTRY(arm920_cache_fns) ENTRY(arm920_cache_fns)
.long arm920_flush_icache_all
.long arm920_flush_kern_cache_all .long arm920_flush_kern_cache_all
.long arm920_flush_user_cache_all .long arm920_flush_user_cache_all
.long arm920_flush_user_cache_range .long arm920_flush_user_cache_range
......
...@@ -111,6 +111,17 @@ ENTRY(cpu_arm922_do_idle) ...@@ -111,6 +111,17 @@ ENTRY(cpu_arm922_do_idle)
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm922_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm922_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -307,6 +318,7 @@ ENTRY(arm922_dma_unmap_area) ...@@ -307,6 +318,7 @@ ENTRY(arm922_dma_unmap_area)
ENDPROC(arm922_dma_unmap_area) ENDPROC(arm922_dma_unmap_area)
ENTRY(arm922_cache_fns) ENTRY(arm922_cache_fns)
.long arm922_flush_icache_all
.long arm922_flush_kern_cache_all .long arm922_flush_kern_cache_all
.long arm922_flush_user_cache_all .long arm922_flush_user_cache_all
.long arm922_flush_user_cache_range .long arm922_flush_user_cache_range
......
...@@ -144,6 +144,17 @@ ENTRY(cpu_arm925_do_idle) ...@@ -144,6 +144,17 @@ ENTRY(cpu_arm925_do_idle)
mcr p15, 0, r1, c1, c0, 0 @ Restore ICache enable mcr p15, 0, r1, c1, c0, 0 @ Restore ICache enable
mov pc, lr mov pc, lr
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm925_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm925_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -362,6 +373,7 @@ ENTRY(arm925_dma_unmap_area) ...@@ -362,6 +373,7 @@ ENTRY(arm925_dma_unmap_area)
ENDPROC(arm925_dma_unmap_area) ENDPROC(arm925_dma_unmap_area)
ENTRY(arm925_cache_fns) ENTRY(arm925_cache_fns)
.long arm925_flush_icache_all
.long arm925_flush_kern_cache_all .long arm925_flush_kern_cache_all
.long arm925_flush_user_cache_all .long arm925_flush_user_cache_all
.long arm925_flush_user_cache_range .long arm925_flush_user_cache_range
......
...@@ -110,6 +110,17 @@ ENTRY(cpu_arm926_do_idle) ...@@ -110,6 +110,17 @@ ENTRY(cpu_arm926_do_idle)
msr cpsr_c, r3 @ Restore FIQ state msr cpsr_c, r3 @ Restore FIQ state
mov pc, lr mov pc, lr
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm926_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm926_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -325,6 +336,7 @@ ENTRY(arm926_dma_unmap_area) ...@@ -325,6 +336,7 @@ ENTRY(arm926_dma_unmap_area)
ENDPROC(arm926_dma_unmap_area) ENDPROC(arm926_dma_unmap_area)
ENTRY(arm926_cache_fns) ENTRY(arm926_cache_fns)
.long arm926_flush_icache_all
.long arm926_flush_kern_cache_all .long arm926_flush_kern_cache_all
.long arm926_flush_user_cache_all .long arm926_flush_user_cache_all
.long arm926_flush_user_cache_range .long arm926_flush_user_cache_range
......
...@@ -67,6 +67,17 @@ ENTRY(cpu_arm940_do_idle) ...@@ -67,6 +67,17 @@ ENTRY(cpu_arm940_do_idle)
mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
mov pc, lr mov pc, lr
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm940_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm940_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
*/ */
...@@ -254,6 +265,7 @@ ENTRY(arm940_dma_unmap_area) ...@@ -254,6 +265,7 @@ ENTRY(arm940_dma_unmap_area)
ENDPROC(arm940_dma_unmap_area) ENDPROC(arm940_dma_unmap_area)
ENTRY(arm940_cache_fns) ENTRY(arm940_cache_fns)
.long arm940_flush_icache_all
.long arm940_flush_kern_cache_all .long arm940_flush_kern_cache_all
.long arm940_flush_user_cache_all .long arm940_flush_user_cache_all
.long arm940_flush_user_cache_range .long arm940_flush_user_cache_range
......
...@@ -74,6 +74,17 @@ ENTRY(cpu_arm946_do_idle) ...@@ -74,6 +74,17 @@ ENTRY(cpu_arm946_do_idle)
mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
mov pc, lr mov pc, lr
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(arm946_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(arm946_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
*/ */
...@@ -296,6 +307,7 @@ ENTRY(arm946_dma_unmap_area) ...@@ -296,6 +307,7 @@ ENTRY(arm946_dma_unmap_area)
ENDPROC(arm946_dma_unmap_area) ENDPROC(arm946_dma_unmap_area)
ENTRY(arm946_cache_fns) ENTRY(arm946_cache_fns)
.long arm946_flush_icache_all
.long arm946_flush_kern_cache_all .long arm946_flush_kern_cache_all
.long arm946_flush_user_cache_all .long arm946_flush_user_cache_all
.long arm946_flush_user_cache_range .long arm946_flush_user_cache_range
......
...@@ -123,6 +123,17 @@ ENTRY(cpu_feroceon_do_idle) ...@@ -123,6 +123,17 @@ ENTRY(cpu_feroceon_do_idle)
mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
mov pc, lr mov pc, lr
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(feroceon_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(feroceon_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -401,6 +412,7 @@ ENTRY(feroceon_dma_unmap_area) ...@@ -401,6 +412,7 @@ ENTRY(feroceon_dma_unmap_area)
ENDPROC(feroceon_dma_unmap_area) ENDPROC(feroceon_dma_unmap_area)
ENTRY(feroceon_cache_fns) ENTRY(feroceon_cache_fns)
.long feroceon_flush_icache_all
.long feroceon_flush_kern_cache_all .long feroceon_flush_kern_cache_all
.long feroceon_flush_user_cache_all .long feroceon_flush_user_cache_all
.long feroceon_flush_user_cache_range .long feroceon_flush_user_cache_range
...@@ -412,6 +424,7 @@ ENTRY(feroceon_cache_fns) ...@@ -412,6 +424,7 @@ ENTRY(feroceon_cache_fns)
.long feroceon_dma_flush_range .long feroceon_dma_flush_range
ENTRY(feroceon_range_cache_fns) ENTRY(feroceon_range_cache_fns)
.long feroceon_flush_icache_all
.long feroceon_flush_kern_cache_all .long feroceon_flush_kern_cache_all
.long feroceon_flush_user_cache_all .long feroceon_flush_user_cache_all
.long feroceon_flush_user_cache_range .long feroceon_flush_user_cache_range
......
...@@ -140,6 +140,17 @@ ENTRY(cpu_xsc3_do_idle) ...@@ -140,6 +140,17 @@ ENTRY(cpu_xsc3_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(xsc3_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(xsc3_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -325,6 +336,7 @@ ENTRY(xsc3_dma_unmap_area) ...@@ -325,6 +336,7 @@ ENTRY(xsc3_dma_unmap_area)
ENDPROC(xsc3_dma_unmap_area) ENDPROC(xsc3_dma_unmap_area)
ENTRY(xsc3_cache_fns) ENTRY(xsc3_cache_fns)
.long xsc3_flush_icache_all
.long xsc3_flush_kern_cache_all .long xsc3_flush_kern_cache_all
.long xsc3_flush_user_cache_all .long xsc3_flush_user_cache_all
.long xsc3_flush_user_cache_range .long xsc3_flush_user_cache_range
......
...@@ -180,6 +180,17 @@ ENTRY(cpu_xscale_do_idle) ...@@ -180,6 +180,17 @@ ENTRY(cpu_xscale_do_idle)
/* ================================= CACHE ================================ */ /* ================================= CACHE ================================ */
/*
* flush_icache_all()
*
* Unconditionally clean and invalidate the entire icache.
*/
ENTRY(xscale_flush_icache_all)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mov pc, lr
ENDPROC(xscale_flush_icache_all)
/* /*
* flush_user_cache_all() * flush_user_cache_all()
* *
...@@ -397,6 +408,7 @@ ENTRY(xscale_dma_unmap_area) ...@@ -397,6 +408,7 @@ ENTRY(xscale_dma_unmap_area)
ENDPROC(xscale_dma_unmap_area) ENDPROC(xscale_dma_unmap_area)
ENTRY(xscale_cache_fns) ENTRY(xscale_cache_fns)
.long xscale_flush_icache_all
.long xscale_flush_kern_cache_all .long xscale_flush_kern_cache_all
.long xscale_flush_user_cache_all .long xscale_flush_user_cache_all
.long xscale_flush_user_cache_range .long xscale_flush_user_cache_range
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册