Skip to content

Commit

Permalink
---
Browse files Browse the repository at this point in the history
yaml
---
r: 11671
b: refs/heads/master
c: 25c8a78
h: refs/heads/master
i:
  11669: 6d36287
  11667: 304f721
  11663: 7deaaa5
v: v3
  • Loading branch information
David Gibson authored and Paul Mackerras committed Oct 27, 2005
1 parent fedb1dd commit 469e8a6
Show file tree
Hide file tree
Showing 23 changed files with 60 additions and 210 deletions.
2 changes: 1 addition & 1 deletion [refs]
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
---
refs/heads/master: fda262b8978d0089758ef9444508434c74113a61
refs/heads/master: 25c8a78b1e00ac0cc640677eda78b462c2cd4c6e
5 changes: 2 additions & 3 deletions trunk/arch/powerpc/kernel/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ extra-$(CONFIG_44x) := head_44x.o
extra-$(CONFIG_FSL_BOOKE) := head_fsl_booke.o
extra-$(CONFIG_8xx) := head_8xx.o
extra-$(CONFIG_PPC64) += entry_64.o
extra-$(CONFIG_PPC_FPU) += fpu.o
extra-y += vmlinux.lds

obj-y += process.o init_task.o time.o \
Expand All @@ -49,7 +48,7 @@ else
# stuff used from here for ARCH=ppc or ARCH=ppc64
obj-$(CONFIG_PPC64) += traps.o process.o init_task.o time.o

fpux-$(CONFIG_PPC32) += fpu.o
extra-$(CONFIG_PPC_FPU) += $(fpux-y)

endif

extra-$(CONFIG_PPC_FPU) += fpu.o
31 changes: 28 additions & 3 deletions trunk/arch/powerpc/kernel/fpu.S
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ _GLOBAL(load_up_fpu)
addi r4,r4,THREAD /* want last_task_used_math->thread */
SAVE_32FPRS(0, r4)
mffs fr0
stfd fr0,THREAD_FPSCR-4(r4)
stfd fr0,THREAD_FPSCR(r4)
LDL r5,PT_REGS(r4)
tophys(r5,r5)
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
Expand All @@ -71,7 +71,7 @@ _GLOBAL(load_up_fpu)
or r12,r12,r4
std r12,_MSR(r1)
#endif
lfd fr0,THREAD_FPSCR-4(r5)
lfd fr0,THREAD_FPSCR(r5)
mtfsf 0xff,fr0
REST_32FPRS(0, r5)
#ifndef CONFIG_SMP
Expand Down Expand Up @@ -104,7 +104,7 @@ _GLOBAL(giveup_fpu)
CMPI 0,r5,0
SAVE_32FPRS(0, r3)
mffs fr0
stfd fr0,THREAD_FPSCR-4(r3)
stfd fr0,THREAD_FPSCR(r3)
beq 1f
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
li r3,MSR_FP|MSR_FE0|MSR_FE1
Expand All @@ -117,3 +117,28 @@ _GLOBAL(giveup_fpu)
STL r5,OFF(last_task_used_math)(r4)
#endif /* CONFIG_SMP */
blr

/*
* These are used in the alignment trap handler when emulating
* single-precision loads and stores.
* We restore and save the fpscr so the task gets the same result
* and exceptions as if the cpu had performed the load or store.
*/

_GLOBAL(cvt_fd)
lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
mtfsf 0xff,0
lfs 0,0(r3)
stfd 0,0(r4)
mffs 0
stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */
blr

_GLOBAL(cvt_df)
lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
mtfsf 0xff,0
lfd 0,0(r3)
stfs 0,0(r4)
mffs 0
stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */
blr
27 changes: 0 additions & 27 deletions trunk/arch/powerpc/kernel/misc_32.S
Original file line number Diff line number Diff line change
Expand Up @@ -992,33 +992,6 @@ _GLOBAL(_get_SP)
mr r3,r1 /* Close enough */
blr

/*
* These are used in the alignment trap handler when emulating
* single-precision loads and stores.
* We restore and save the fpscr so the task gets the same result
* and exceptions as if the cpu had performed the load or store.
*/

#ifdef CONFIG_PPC_FPU
_GLOBAL(cvt_fd)
lfd 0,-4(r5) /* load up fpscr value */
mtfsf 0xff,0
lfs 0,0(r3)
stfd 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,-4(r5)
blr

_GLOBAL(cvt_df)
lfd 0,-4(r5) /* load up fpscr value */
mtfsf 0xff,0
lfd 0,0(r3)
stfs 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,-4(r5)
blr
#endif

/*
* Create a kernel thread
* kernel_thread(fn, arg, flags)
Expand Down
19 changes: 0 additions & 19 deletions trunk/arch/powerpc/kernel/misc_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -462,25 +462,6 @@ _GLOBAL(_outsl_ns)
sync
blr


_GLOBAL(cvt_fd)
lfd 0,0(r5) /* load up fpscr value */
mtfsf 0xff,0
lfs 0,0(r3)
stfd 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,0(r5)
blr

_GLOBAL(cvt_df)
lfd 0,0(r5) /* load up fpscr value */
mtfsf 0xff,0
lfd 0,0(r3)
stfs 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,0(r5)
blr

/*
* identify_cpu and calls setup_cpu
* In: r3 = base of the cpu_specs array
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/powerpc/kernel/process.c
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ void start_thread(struct pt_regs *regs, unsigned long start, unsigned long sp)
#endif
#endif /* CONFIG_SMP */
memset(current->thread.fpr, 0, sizeof(current->thread.fpr));
current->thread.fpscr = 0;
current->thread.fpscr.val = 0;
#ifdef CONFIG_ALTIVEC
memset(current->thread.vr, 0, sizeof(current->thread.vr));
memset(&current->thread.vscr, 0, sizeof(current->thread.vscr));
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/powerpc/kernel/signal_32.c
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ static int save_user_regs(struct pt_regs *regs, struct mcontext __user *frame,
ELF_NFPREG * sizeof(double)))
return 1;

current->thread.fpscr = 0; /* turn off all fp exceptions */
current->thread.fpscr.val = 0; /* turn off all fp exceptions */

#ifdef CONFIG_ALTIVEC
/* save altivec registers */
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/powerpc/kernel/traps.c
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ static void parse_fpe(struct pt_regs *regs)

flush_fp_to_thread(current);

fpscr = current->thread.fpscr;
fpscr = current->thread.fpscr.val;

/* Invalid operation */
if ((fpscr & FPSCR_VE) && (fpscr & FPSCR_VX))
Expand Down
4 changes: 2 additions & 2 deletions trunk/arch/ppc/kernel/align.c
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ fix_alignment(struct pt_regs *regs)
#ifdef CONFIG_PPC_FPU
preempt_disable();
enable_kernel_fp();
cvt_fd(&data.f, &data.d, &current->thread.fpscr);
cvt_fd(&data.f, &data.d, &current->thread);
preempt_enable();
#else
return 0;
Expand All @@ -385,7 +385,7 @@ fix_alignment(struct pt_regs *regs)
#ifdef CONFIG_PPC_FPU
preempt_disable();
enable_kernel_fp();
cvt_df(&data.d, &data.f, &current->thread.fpscr);
cvt_df(&data.d, &data.f, &current->thread);
preempt_enable();
#else
return 0;
Expand Down
27 changes: 0 additions & 27 deletions trunk/arch/ppc/kernel/misc.S
Original file line number Diff line number Diff line change
Expand Up @@ -967,33 +967,6 @@ _GLOBAL(_get_SP)
mr r3,r1 /* Close enough */
blr

/*
* These are used in the alignment trap handler when emulating
* single-precision loads and stores.
* We restore and save the fpscr so the task gets the same result
* and exceptions as if the cpu had performed the load or store.
*/

#ifdef CONFIG_PPC_FPU
_GLOBAL(cvt_fd)
lfd 0,-4(r5) /* load up fpscr value */
mtfsf 0xff,0
lfs 0,0(r3)
stfd 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,-4(r5)
blr

_GLOBAL(cvt_df)
lfd 0,-4(r5) /* load up fpscr value */
mtfsf 0xff,0
lfd 0,0(r3)
stfs 0,0(r4)
mffs 0 /* save new fpscr value */
stfd 0,-4(r5)
blr
#endif

/*
* Create a kernel thread
* kernel_thread(fn, arg, flags)
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/ppc/kernel/process.c
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ void start_thread(struct pt_regs *regs, unsigned long nip, unsigned long sp)
last_task_used_spe = NULL;
#endif
memset(current->thread.fpr, 0, sizeof(current->thread.fpr));
current->thread.fpscr = 0;
current->thread.fpscr.val = 0;
#ifdef CONFIG_ALTIVEC
memset(current->thread.vr, 0, sizeof(current->thread.vr));
memset(&current->thread.vscr, 0, sizeof(current->thread.vscr));
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/ppc/kernel/traps.c
Original file line number Diff line number Diff line change
Expand Up @@ -659,7 +659,7 @@ void program_check_exception(struct pt_regs *regs)
giveup_fpu(current);
preempt_enable();

fpscr = current->thread.fpscr;
fpscr = current->thread.fpscr.val;
fpscr &= fpscr << 22; /* mask summary bits with enables */
if (fpscr & FPSCR_VX)
code = FPE_FLTINV;
Expand Down
2 changes: 1 addition & 1 deletion trunk/arch/ppc/math-emu/sfp-machine.h
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ extern int fp_pack_ds(void *, long, unsigned long, unsigned long, long, long);
#include <linux/kernel.h>
#include <linux/sched.h>

#define __FPU_FPSCR (current->thread.fpscr)
#define __FPU_FPSCR (current->thread.fpscr.val)

/* We only actually write to the destination register
* if exceptions signalled (if any) will not trap.
Expand Down
3 changes: 3 additions & 0 deletions trunk/arch/ppc64/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,9 @@ config BOOTX_TEXT
config POWER4
def_bool y

config PPC_FPU
def_bool y

config POWER4_ONLY
bool "Optimize for POWER4"
default n
Expand Down
1 change: 1 addition & 0 deletions trunk/arch/ppc64/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ endif
CFLAGS += $(call cc-option,-funit-at-a-time)

head-y := arch/ppc64/kernel/head.o
head-y += arch/powerpc/kernel/fpu.o

libs-y += arch/ppc64/lib/
core-y += arch/ppc64/kernel/ arch/powerpc/kernel/
Expand Down
4 changes: 2 additions & 2 deletions trunk/arch/ppc64/kernel/align.c
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ fix_alignment(struct pt_regs *regs)
/* Doing stfs, have to convert to single */
preempt_disable();
enable_kernel_fp();
cvt_df(&current->thread.fpr[reg], (float *)&data.v[4], &current->thread.fpscr);
cvt_df(&current->thread.fpr[reg], (float *)&data.v[4], &current->thread);
disable_kernel_fp();
preempt_enable();
}
Expand Down Expand Up @@ -349,7 +349,7 @@ fix_alignment(struct pt_regs *regs)
/* Doing lfs, have to convert to double */
preempt_disable();
enable_kernel_fp();
cvt_fd((float *)&data.v[4], &current->thread.fpr[reg], &current->thread.fpscr);
cvt_fd((float *)&data.v[4], &current->thread.fpr[reg], &current->thread);
disable_kernel_fp();
preempt_enable();
}
Expand Down
59 changes: 2 additions & 57 deletions trunk/arch/ppc64/kernel/head.S
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ _stext:
_GLOBAL(__start)
/* NOP this out unconditionally */
BEGIN_FTR_SECTION
b .__start_initialization_multiplatform
b .__start_initialization_multiplatform
END_FTR_SECTION(0, 1)
#endif /* CONFIG_PPC_MULTIPLATFORM */

Expand Down Expand Up @@ -747,6 +747,7 @@ bad_stack:
* any task or sent any task a signal, you should use
* ret_from_except or ret_from_except_lite instead of this.
*/
.globl fast_exception_return
fast_exception_return:
ld r12,_MSR(r1)
ld r11,_NIP(r1)
Expand Down Expand Up @@ -858,62 +859,6 @@ fp_unavailable_common:
bl .kernel_fp_unavailable_exception
BUG_OPCODE

/*
* load_up_fpu(unused, unused, tsk)
* Disable FP for the task which had the FPU previously,
* and save its floating-point registers in its thread_struct.
* Enables the FPU for use in the kernel on return.
* On SMP we know the fpu is free, since we give it up every
* switch (ie, no lazy save of the FP registers).
* On entry: r13 == 'current' && last_task_used_math != 'current'
*/
_STATIC(load_up_fpu)
mfmsr r5 /* grab the current MSR */
ori r5,r5,MSR_FP
mtmsrd r5 /* enable use of fpu now */
isync
/*
* For SMP, we don't do lazy FPU switching because it just gets too
* horrendously complex, especially when a task switches from one CPU
* to another. Instead we call giveup_fpu in switch_to.
*
*/
#ifndef CONFIG_SMP
ld r3,last_task_used_math@got(r2)
ld r4,0(r3)
cmpdi 0,r4,0
beq 1f
/* Save FP state to last_task_used_math's THREAD struct */
addi r4,r4,THREAD
SAVE_32FPRS(0, r4)
mffs fr0
stfd fr0,THREAD_FPSCR(r4)
/* Disable FP for last_task_used_math */
ld r5,PT_REGS(r4)
ld r4,_MSR-STACK_FRAME_OVERHEAD(r5)
li r6,MSR_FP|MSR_FE0|MSR_FE1
andc r4,r4,r6
std r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1:
#endif /* CONFIG_SMP */
/* enable use of FP after return */
ld r4,PACACURRENT(r13)
addi r5,r4,THREAD /* Get THREAD */
ld r4,THREAD_FPEXC_MODE(r5)
ori r12,r12,MSR_FP
or r12,r12,r4
std r12,_MSR(r1)
lfd fr0,THREAD_FPSCR(r5)
mtfsf 0xff,fr0
REST_32FPRS(0, r5)
#ifndef CONFIG_SMP
/* Update last_task_used_math to 'current' */
subi r4,r5,THREAD /* Back to 'current' */
std r4,0(r3)
#endif /* CONFIG_SMP */
/* restore registers and return */
b fast_exception_return

.align 7
.globl altivec_unavailable_common
altivec_unavailable_common:
Expand Down
Loading

0 comments on commit 469e8a6

Please sign in to comment.