Skip to content

Commit

Permalink
Merge branch 'v3-removal' into for-linus
Browse files Browse the repository at this point in the history
Conflicts:
	arch/arm/boot/compressed/head.S
  • Loading branch information
Russell King committed May 21, 2012
2 parents 4175160 + 4cdfc2e commit 4ab1056
Show file tree
Hide file tree
Showing 19 changed files with 10 additions and 1,454 deletions.
2 changes: 0 additions & 2 deletions arch/arm/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,6 @@ arch-$(CONFIG_CPU_32v4) :=-D__LINUX_ARM_ARCH__=4 -march=armv4
arch-$(CONFIG_CPU_32v3) :=-D__LINUX_ARM_ARCH__=3 -march=armv3

# This selects how we optimise for the processor.
tune-$(CONFIG_CPU_ARM610) :=-mtune=arm610
tune-$(CONFIG_CPU_ARM710) :=-mtune=arm710
tune-$(CONFIG_CPU_ARM7TDMI) :=-mtune=arm7tdmi
tune-$(CONFIG_CPU_ARM720T) :=-mtune=arm7tdmi
tune-$(CONFIG_CPU_ARM740T) :=-mtune=arm7tdmi
Expand Down
44 changes: 4 additions & 40 deletions arch/arm/boot/compressed/head.S
Original file line number Diff line number Diff line change
Expand Up @@ -686,19 +686,6 @@ __fa526_cache_on:
mcr p15, 0, r0, c8, c7, 0 @ flush UTLB
mov pc, r12

__arm6_mmu_cache_on:
mov r12, lr
mov r6, #CB_BITS | 0x12 @ U
bl __setup_mmu
mov r0, #0
mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3
mov r0, #0x30
bl __common_mmu_cache_on
mov r0, #0
mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3
mov pc, r12

__common_mmu_cache_on:
#ifndef CONFIG_THUMB2_KERNEL
#ifndef DEBUG
Expand Down Expand Up @@ -763,16 +750,6 @@ call_cache_fn: adr r12, proc_types
.align 2
.type proc_types,#object
proc_types:
.word 0x41560600 @ ARM6/610
.word 0xffffffe0
W(b) __arm6_mmu_cache_off @ works, but slow
W(b) __arm6_mmu_cache_off
mov pc, lr
THUMB( nop )
@ b __arm6_mmu_cache_on @ untested
@ b __arm6_mmu_cache_off
@ b __armv3_mmu_cache_flush

.word 0x00000000 @ old ARM ID
.word 0x0000f000
mov pc, lr
Expand All @@ -784,8 +761,10 @@ proc_types:

.word 0x41007000 @ ARM7/710
.word 0xfff8fe00
W(b) __arm7_mmu_cache_off
W(b) __arm7_mmu_cache_off
mov pc, lr
THUMB( nop )
mov pc, lr
THUMB( nop )
mov pc, lr
THUMB( nop )

Expand Down Expand Up @@ -984,21 +963,6 @@ __armv7_mmu_cache_off:
mcr p15, 0, r0, c7, c5, 4 @ ISB
mov pc, r12

__arm6_mmu_cache_off:
mov r0, #0x00000030 @ ARM6 control reg.
b __armv3_mmu_cache_off

__arm7_mmu_cache_off:
mov r0, #0x00000070 @ ARM7 control reg.
b __armv3_mmu_cache_off

__armv3_mmu_cache_off:
mcr p15, 0, r0, c1, c0, 0 @ turn MMU and cache off
mov r0, #0
mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3
mov pc, lr

/*
* Clean and flush the cache to maintain consistency.
*
Expand Down
2 changes: 0 additions & 2 deletions arch/arm/configs/rpc_defconfig
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ CONFIG_MODULES=y
CONFIG_MODULE_UNLOAD=y
# CONFIG_BLK_DEV_BSG is not set
CONFIG_ARCH_RPC=y
CONFIG_CPU_ARM610=y
CONFIG_CPU_ARM710=y
CONFIG_CPU_SA110=y
CONFIG_ZBOOT_ROM_TEXT=0x0
CONFIG_ZBOOT_ROM_BSS=0x0
Expand Down
8 changes: 0 additions & 8 deletions arch/arm/include/asm/glue-df.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,6 @@
#undef CPU_DABORT_HANDLER
#undef MULTI_DABORT

#if defined(CONFIG_CPU_ARM610)
# ifdef CPU_DABORT_HANDLER
# define MULTI_DABORT 1
# else
# define CPU_DABORT_HANDLER cpu_arm6_data_abort
# endif
#endif

#if defined(CONFIG_CPU_ARM710)
# ifdef CPU_DABORT_HANDLER
# define MULTI_DABORT 1
Expand Down
18 changes: 0 additions & 18 deletions arch/arm/include/asm/glue-proc.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,6 @@
* CPU_NAME - the prefix for CPU related functions
*/

#ifdef CONFIG_CPU_ARM610
# ifdef CPU_NAME
# undef MULTI_CPU
# define MULTI_CPU
# else
# define CPU_NAME cpu_arm6
# endif
#endif

#ifdef CONFIG_CPU_ARM7TDMI
# ifdef CPU_NAME
# undef MULTI_CPU
Expand All @@ -41,15 +32,6 @@
# endif
#endif

#ifdef CONFIG_CPU_ARM710
# ifdef CPU_NAME
# undef MULTI_CPU
# define MULTI_CPU
# else
# define CPU_NAME cpu_arm7
# endif
#endif

#ifdef CONFIG_CPU_ARM720T
# ifdef CPU_NAME
# undef MULTI_CPU
Expand Down
9 changes: 0 additions & 9 deletions arch/arm/include/asm/page.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
* processor(s) we're building for.
*
* We have the following to choose from:
* v3 - ARMv3
* v4wt - ARMv4 with writethrough cache, without minicache
* v4wb - ARMv4 with writeback cache, without minicache
* v4_mc - ARMv4 with minicache
Expand All @@ -44,14 +43,6 @@
#undef _USER
#undef MULTI_USER

#ifdef CONFIG_CPU_COPY_V3
# ifdef _USER
# define MULTI_USER 1
# else
# define _USER v3
# endif
#endif

#ifdef CONFIG_CPU_COPY_V4WT
# ifdef _USER
# define MULTI_USER 1
Expand Down
21 changes: 2 additions & 19 deletions arch/arm/include/asm/tlbflush.h
Original file line number Diff line number Diff line change
Expand Up @@ -65,21 +65,6 @@
#define MULTI_TLB 1
#endif

#define v3_tlb_flags (TLB_V3_FULL | TLB_V3_PAGE)

#ifdef CONFIG_CPU_TLB_V3
# define v3_possible_flags v3_tlb_flags
# define v3_always_flags v3_tlb_flags
# ifdef _TLB
# define MULTI_TLB 1
# else
# define _TLB v3
# endif
#else
# define v3_possible_flags 0
# define v3_always_flags (-1UL)
#endif

#define v4_tlb_flags (TLB_V4_U_FULL | TLB_V4_U_PAGE)

#ifdef CONFIG_CPU_TLB_V4WT
Expand Down Expand Up @@ -298,17 +283,15 @@ extern struct cpu_tlb_fns cpu_tlb;
* implemented the "%?" method, but this has been discontinued due to too
* many people getting it wrong.
*/
#define possible_tlb_flags (v3_possible_flags | \
v4_possible_flags | \
#define possible_tlb_flags (v4_possible_flags | \
v4wbi_possible_flags | \
fr_possible_flags | \
v4wb_possible_flags | \
fa_possible_flags | \
v6wbi_possible_flags | \
v7wbi_possible_flags)

#define always_tlb_flags (v3_always_flags & \
v4_always_flags & \
#define always_tlb_flags (v4_always_flags & \
v4wbi_always_flags & \
fr_always_flags & \
v4wb_always_flags & \
Expand Down
4 changes: 0 additions & 4 deletions arch/arm/kernel/entry-armv.S
Original file line number Diff line number Diff line change
Expand Up @@ -556,10 +556,6 @@ call_fpe:
#endif
tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
tstne r0, #0x04000000 @ bit 26 set on both ARM and Thumb-2
#if defined(CONFIG_CPU_ARM610) || defined(CONFIG_CPU_ARM710)
and r8, r0, #0x0f000000 @ mask out op-code bits
teqne r8, #0x0f000000 @ SWI (ARM6/7 bug)?
#endif
moveq pc, lr
get_thread_info r10 @ get current thread
and r8, r0, #0x00000f00 @ mask out CP number
Expand Down
28 changes: 0 additions & 28 deletions arch/arm/kernel/entry-common.S
Original file line number Diff line number Diff line change
Expand Up @@ -335,20 +335,6 @@ ENDPROC(ftrace_stub)
*-----------------------------------------------------------------------------
*/

/* If we're optimising for StrongARM the resulting code won't
run on an ARM7 and we can save a couple of instructions.
--pb */
#ifdef CONFIG_CPU_ARM710
#define A710(code...) code
.Larm710bug:
ldmia sp, {r0 - lr}^ @ Get calling r0 - lr
mov r0, r0
add sp, sp, #S_FRAME_SIZE
subs pc, lr, #4
#else
#define A710(code...)
#endif

.align 5
ENTRY(vector_swi)
sub sp, sp, #S_FRAME_SIZE
Expand Down Expand Up @@ -379,9 +365,6 @@ ENTRY(vector_swi)
ldreq r10, [lr, #-4] @ get SWI instruction
#else
ldr r10, [lr, #-4] @ get SWI instruction
A710( and ip, r10, #0x0f000000 @ check for SWI )
A710( teq ip, #0x0f000000 )
A710( bne .Larm710bug )
#endif
#ifdef CONFIG_CPU_ENDIAN_BE8
rev r10, r10 @ little endian instruction
Expand All @@ -392,26 +375,15 @@ ENTRY(vector_swi)
/*
* Pure EABI user space always put syscall number into scno (r7).
*/
A710( ldr ip, [lr, #-4] @ get SWI instruction )
A710( and ip, ip, #0x0f000000 @ check for SWI )
A710( teq ip, #0x0f000000 )
A710( bne .Larm710bug )

#elif defined(CONFIG_ARM_THUMB)

/* Legacy ABI only, possibly thumb mode. */
tst r8, #PSR_T_BIT @ this is SPSR from save_user_regs
addne scno, r7, #__NR_SYSCALL_BASE @ put OS number in
ldreq scno, [lr, #-4]

#else

/* Legacy ABI only. */
ldr scno, [lr, #-4] @ get SWI instruction
A710( and ip, scno, #0x0f000000 @ check for SWI )
A710( teq ip, #0x0f000000 )
A710( bne .Larm710bug )

#endif

#ifdef CONFIG_ALIGNMENT_TRAP
Expand Down
23 changes: 3 additions & 20 deletions arch/arm/lib/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -17,30 +17,13 @@ lib-y := backtrace.o changebit.o csumipv6.o csumpartial.o \
call_with_stack.o

mmu-y := clear_user.o copy_page.o getuser.o putuser.o

# the code in uaccess.S is not preemption safe and
# probably faster on ARMv3 only
ifeq ($(CONFIG_PREEMPT),y)
mmu-y += copy_from_user.o copy_to_user.o
else
ifneq ($(CONFIG_CPU_32v3),y)
mmu-y += copy_from_user.o copy_to_user.o
else
mmu-y += uaccess.o
endif
endif
mmu-y += copy_from_user.o copy_to_user.o

# using lib_ here won't override already available weak symbols
obj-$(CONFIG_UACCESS_WITH_MEMCPY) += uaccess_with_memcpy.o

lib-$(CONFIG_MMU) += $(mmu-y)

ifeq ($(CONFIG_CPU_32v3),y)
lib-y += io-readsw-armv3.o io-writesw-armv3.o
else
lib-y += io-readsw-armv4.o io-writesw-armv4.o
endif

lib-$(CONFIG_MMU) += $(mmu-y)
lib-y += io-readsw-armv4.o io-writesw-armv4.o
lib-$(CONFIG_ARCH_RPC) += ecard.o io-acorn.o floppydma.o
lib-$(CONFIG_ARCH_SHARK) += io-shark.o

Expand Down
Loading

0 comments on commit 4ab1056

Please sign in to comment.