Skip to content

Commit

Permalink
ARM / mach-shmobile: Invalidate caches when booting secondary cores
Browse files Browse the repository at this point in the history
Make sure L1 caches are invalidated when booting secondary
cores. Needed to boot all mach-shmobile SMP systems that
are using Cortex-A9 including sh73a0, r8a7779 and EMEV2.

Thanks to imx and tegra guys for actual code.

Signed-off-by: Magnus Damm <damm@opensource.se>
Tested-by: Kuninori Morimoto <kuninori.morimoto.gx@renesas.com>
Signed-off-by: Rafael J. Wysocki <rjw@sisk.pl>
  • Loading branch information
Magnus Damm authored and Rafael J. Wysocki committed May 12, 2012
1 parent d672000 commit e994d5e
Showing 1 changed file with 55 additions and 1 deletion.
56 changes: 55 additions & 1 deletion arch/arm/mach-shmobile/headsmp.S
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,59 @@

__CPUINIT

/* Cache invalidation nicked from arch/arm/mach-imx/head-v7.S, thanks!
*
* The secondary kernel init calls v7_flush_dcache_all before it enables
* the L1; however, the L1 comes out of reset in an undefined state, so
* the clean + invalidate performed by v7_flush_dcache_all causes a bunch
* of cache lines with uninitialized data and uninitialized tags to get
* written out to memory, which does really unpleasant things to the main
* processor. We fix this by performing an invalidate, rather than a
* clean + invalidate, before jumping into the kernel.
*
* This funciton is cloned from arch/arm/mach-tegra/headsmp.S, and needs
* to be called for both secondary cores startup and primary core resume
* procedures. Ideally, it should be moved into arch/arm/mm/cache-v7.S.
*/
ENTRY(v7_invalidate_l1)
mov r0, #0
mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
mcr p15, 2, r0, c0, c0, 0
mrc p15, 1, r0, c0, c0, 0

ldr r1, =0x7fff
and r2, r1, r0, lsr #13

ldr r1, =0x3ff

and r3, r1, r0, lsr #3 @ NumWays - 1
add r2, r2, #1 @ NumSets

and r0, r0, #0x7
add r0, r0, #4 @ SetShift

clz r1, r3 @ WayShift
add r4, r3, #1 @ NumWays
1: sub r2, r2, #1 @ NumSets--
mov r3, r4 @ Temp = NumWays
2: subs r3, r3, #1 @ Temp--
mov r5, r3, lsl r1
mov r6, r2, lsl r0
orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
mcr p15, 0, r5, c7, c6, 2
bgt 2b
cmp r2, #0
bgt 1b
dsb
isb
mov pc, lr
ENDPROC(v7_invalidate_l1)

ENTRY(shmobile_invalidate_start)
bl v7_invalidate_l1
b secondary_startup
ENDPROC(shmobile_invalidate_start)

/*
* Reset vector for secondary CPUs.
* This will be mapped at address 0 by SBAR register.
Expand All @@ -24,4 +77,5 @@
.align 12
ENTRY(shmobile_secondary_vector)
ldr pc, 1f
1: .long secondary_startup - PAGE_OFFSET + PLAT_PHYS_OFFSET
1: .long shmobile_invalidate_start - PAGE_OFFSET + PLAT_PHYS_OFFSET
ENDPROC(shmobile_secondary_vector)

0 comments on commit e994d5e

Please sign in to comment.