Skip to content

Commit

Permalink
xtensa: reorganize SR referencing
Browse files Browse the repository at this point in the history
- reference SRs by names where possible, not by numbers;
- get rid of __stringify around SR names where possible;
- remove unneeded SR names from asm/regs.h;
- add SREG_ prefix to remaining SR names;

Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Signed-off-by: Chris Zankel <chris@zankel.net>
  • Loading branch information
Max Filippov authored and Chris Zankel committed Oct 16, 2012
1 parent f4349b6 commit bc5378f
Show file tree
Hide file tree
Showing 20 changed files with 254 additions and 292 deletions.
8 changes: 4 additions & 4 deletions arch/xtensa/boot/boot-redboot/bootstrap.S
Original file line number Diff line number Diff line change
Expand Up @@ -51,17 +51,17 @@ _start:
/* 'reset' window registers */

movi a4, 1
wsr a4, PS
wsr a4, ps
rsync

rsr a5, WINDOWBASE
rsr a5, windowbase
ssl a5
sll a4, a4
wsr a4, WINDOWSTART
wsr a4, windowstart
rsync

movi a4, 0x00040000
wsr a4, PS
wsr a4, ps
rsync

/* copy the loader to its address
Expand Down
12 changes: 6 additions & 6 deletions arch/xtensa/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ static inline void atomic_add(int i, atomic_t * v)
"l32i %0, %2, 0 \n\t"
"add %0, %0, %1 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval)
: "a" (i), "a" (v)
Expand All @@ -97,7 +97,7 @@ static inline void atomic_sub(int i, atomic_t *v)
"l32i %0, %2, 0 \n\t"
"sub %0, %0, %1 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval)
: "a" (i), "a" (v)
Expand All @@ -118,7 +118,7 @@ static inline int atomic_add_return(int i, atomic_t * v)
"l32i %0, %2, 0 \n\t"
"add %0, %0, %1 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval)
: "a" (i), "a" (v)
Expand All @@ -137,7 +137,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
"l32i %0, %2, 0 \n\t"
"sub %0, %0, %1 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval)
: "a" (i), "a" (v)
Expand Down Expand Up @@ -260,7 +260,7 @@ static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
"xor %1, %4, %3 \n\t"
"and %0, %0, %4 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval), "=a" (mask)
: "a" (v), "a" (all_f), "1" (mask)
Expand All @@ -277,7 +277,7 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
"l32i %0, %2, 0 \n\t"
"or %0, %0, %1 \n\t"
"s32i %0, %2, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n"
: "=&a" (vval)
: "a" (mask), "a" (v)
Expand Down
2 changes: 1 addition & 1 deletion arch/xtensa/include/asm/cacheflush.h
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ extern void copy_from_user_page(struct vm_area_struct*, struct page*,
static inline u32 xtensa_get_cacheattr(void)
{
u32 r;
asm volatile(" rsr %0, CACHEATTR" : "=a"(r));
asm volatile(" rsr %0, cacheattr" : "=a"(r));
return r;
}

Expand Down
4 changes: 2 additions & 2 deletions arch/xtensa/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ __cmpxchg_u32(volatile int *p, int old, int new)
"bne %0, %2, 1f \n\t"
"s32i %3, %1, 0 \n\t"
"1: \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n\t"
: "=&a" (old)
: "a" (p), "a" (old), "r" (new)
Expand Down Expand Up @@ -97,7 +97,7 @@ static inline unsigned long xchg_u32(volatile int * m, unsigned long val)
__asm__ __volatile__("rsil a15, "__stringify(LOCKLEVEL)"\n\t"
"l32i %0, %1, 0 \n\t"
"s32i %2, %1, 0 \n\t"
"wsr a15, "__stringify(PS)" \n\t"
"wsr a15, ps \n\t"
"rsync \n\t"
: "=&a" (tmp)
: "a" (m), "a" (val)
Expand Down
5 changes: 2 additions & 3 deletions arch/xtensa/include/asm/coprocessor.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,10 @@
#if XCHAL_HAVE_CP

#define RSR_CPENABLE(x) do { \
__asm__ __volatile__("rsr %0," __stringify(CPENABLE) : "=a" (x)); \
__asm__ __volatile__("rsr %0, cpenable" : "=a" (x)); \
} while(0);
#define WSR_CPENABLE(x) do { \
__asm__ __volatile__("wsr %0," __stringify(CPENABLE) "; rsync" \
:: "a" (x)); \
__asm__ __volatile__("wsr %0, cpenable; rsync" :: "a" (x)); \
} while(0);

#endif /* XCHAL_HAVE_CP */
Expand Down
2 changes: 1 addition & 1 deletion arch/xtensa/include/asm/delay.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ static inline void __delay(unsigned long loops)
static __inline__ u32 xtensa_get_ccount(void)
{
u32 ccount;
asm volatile ("rsr %0, 234; # CCOUNT\n" : "=r" (ccount));
asm volatile ("rsr %0, ccount\n" : "=r" (ccount));
return ccount;
}

Expand Down
4 changes: 2 additions & 2 deletions arch/xtensa/include/asm/irqflags.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
static inline unsigned long arch_local_save_flags(void)
{
unsigned long flags;
asm volatile("rsr %0,"__stringify(PS) : "=a" (flags));
asm volatile("rsr %0, ps" : "=a" (flags));
return flags;
}

Expand All @@ -41,7 +41,7 @@ static inline void arch_local_irq_enable(void)

static inline void arch_local_irq_restore(unsigned long flags)
{
asm volatile("wsr %0, "__stringify(PS)" ; rsync"
asm volatile("wsr %0, ps; rsync"
:: "a" (flags) : "memory");
}

Expand Down
4 changes: 2 additions & 2 deletions arch/xtensa/include/asm/mmu_context.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,14 @@ extern unsigned long asid_cache;

static inline void set_rasid_register (unsigned long val)
{
__asm__ __volatile__ (" wsr %0, "__stringify(RASID)"\n\t"
__asm__ __volatile__ (" wsr %0, rasid\n\t"
" isync\n" : : "a" (val));
}

static inline unsigned long get_rasid_register (void)
{
unsigned long tmp;
__asm__ __volatile__ (" rsr %0,"__stringify(RASID)"\n\t" : "=a" (tmp));
__asm__ __volatile__ (" rsr %0, rasid\n\t" : "=a" (tmp));
return tmp;
}

Expand Down
55 changes: 9 additions & 46 deletions arch/xtensa/include/asm/regs.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,52 +27,15 @@

/* Special registers. */

#define LBEG 0
#define LEND 1
#define LCOUNT 2
#define SAR 3
#define BR 4
#define SCOMPARE1 12
#define ACCHI 16
#define ACCLO 17
#define MR 32
#define WINDOWBASE 72
#define WINDOWSTART 73
#define PTEVADDR 83
#define RASID 90
#define ITLBCFG 91
#define DTLBCFG 92
#define IBREAKENABLE 96
#define DDR 104
#define IBREAKA 128
#define DBREAKA 144
#define DBREAKC 160
#define EPC 176
#define EPC_1 177
#define DEPC 192
#define EPS 192
#define EPS_1 193
#define EXCSAVE 208
#define EXCSAVE_1 209
#define INTERRUPT 226
#define INTENABLE 228
#define PS 230
#define THREADPTR 231
#define EXCCAUSE 232
#define DEBUGCAUSE 233
#define CCOUNT 234
#define PRID 235
#define ICOUNT 236
#define ICOUNTLEVEL 237
#define EXCVADDR 238
#define CCOMPARE 240
#define MISC_SR 244

/* Special names for read-only and write-only interrupt registers. */

#define INTREAD 226
#define INTSET 226
#define INTCLEAR 227
#define SREG_MR 32
#define SREG_IBREAKA 128
#define SREG_DBREAKA 144
#define SREG_DBREAKC 160
#define SREG_EPC 176
#define SREG_EPS 192
#define SREG_EXCSAVE 208
#define SREG_CCOMPARE 240
#define SREG_MISC 244

/* EXCCAUSE register fields */

Expand Down
8 changes: 4 additions & 4 deletions arch/xtensa/include/asm/timex.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ extern cycles_t cacheflush_time;
* Register access.
*/

#define WSR_CCOUNT(r) asm volatile ("wsr %0,"__stringify(CCOUNT) :: "a" (r))
#define RSR_CCOUNT(r) asm volatile ("rsr %0,"__stringify(CCOUNT) : "=a" (r))
#define WSR_CCOMPARE(x,r) asm volatile ("wsr %0,"__stringify(CCOMPARE)"+"__stringify(x) :: "a"(r))
#define RSR_CCOMPARE(x,r) asm volatile ("rsr %0,"__stringify(CCOMPARE)"+"__stringify(x) : "=a"(r))
#define WSR_CCOUNT(r) asm volatile ("wsr %0, ccount" :: "a" (r))
#define RSR_CCOUNT(r) asm volatile ("rsr %0, ccount" : "=a" (r))
#define WSR_CCOMPARE(x,r) asm volatile ("wsr %0,"__stringify(SREG_CCOMPARE)"+"__stringify(x) :: "a"(r))
#define RSR_CCOMPARE(x,r) asm volatile ("rsr %0,"__stringify(SREG_CCOMPARE)"+"__stringify(x) : "=a"(r))

static inline unsigned long get_ccount (void)
{
Expand Down
8 changes: 4 additions & 4 deletions arch/xtensa/include/asm/tlbflush.h
Original file line number Diff line number Diff line change
Expand Up @@ -86,26 +86,26 @@ static inline void invalidate_dtlb_entry_no_isync (unsigned entry)

static inline void set_itlbcfg_register (unsigned long val)
{
__asm__ __volatile__("wsr %0, "__stringify(ITLBCFG)"\n\t" "isync\n\t"
__asm__ __volatile__("wsr %0, itlbcfg\n\t" "isync\n\t"
: : "a" (val));
}

static inline void set_dtlbcfg_register (unsigned long val)
{
__asm__ __volatile__("wsr %0, "__stringify(DTLBCFG)"; dsync\n\t"
__asm__ __volatile__("wsr %0, dtlbcfg; dsync\n\t"
: : "a" (val));
}

static inline void set_ptevaddr_register (unsigned long val)
{
__asm__ __volatile__(" wsr %0, "__stringify(PTEVADDR)"; isync\n"
__asm__ __volatile__(" wsr %0, ptevaddr; isync\n"
: : "a" (val));
}

static inline unsigned long read_ptevaddr_register (void)
{
unsigned long tmp;
__asm__ __volatile__("rsr %0, "__stringify(PTEVADDR)"\n\t" : "=a" (tmp));
__asm__ __volatile__("rsr %0, ptevaddr\n\t" : "=a" (tmp));
return tmp;
}

Expand Down
38 changes: 19 additions & 19 deletions arch/xtensa/kernel/align.S
Original file line number Diff line number Diff line change
Expand Up @@ -170,15 +170,15 @@ ENTRY(fast_unaligned)
s32i a7, a2, PT_AREG7
s32i a8, a2, PT_AREG8

rsr a0, DEPC
xsr a3, EXCSAVE_1
rsr a0, depc
xsr a3, excsave1
s32i a0, a2, PT_AREG2
s32i a3, a2, PT_AREG3

/* Keep value of SAR in a0 */

rsr a0, SAR
rsr a8, EXCVADDR # load unaligned memory address
rsr a0, sar
rsr a8, excvaddr # load unaligned memory address

/* Now, identify one of the following load/store instructions.
*
Expand All @@ -197,7 +197,7 @@ ENTRY(fast_unaligned)

/* Extract the instruction that caused the unaligned access. */

rsr a7, EPC_1 # load exception address
rsr a7, epc1 # load exception address
movi a3, ~3
and a3, a3, a7 # mask lower bits

Expand Down Expand Up @@ -275,16 +275,16 @@ ENTRY(fast_unaligned)
1:

#if XCHAL_HAVE_LOOPS
rsr a5, LEND # check if we reached LEND
rsr a5, lend # check if we reached LEND
bne a7, a5, 1f
rsr a5, LCOUNT # and LCOUNT != 0
rsr a5, lcount # and LCOUNT != 0
beqz a5, 1f
addi a5, a5, -1 # decrement LCOUNT and set
rsr a7, LBEG # set PC to LBEGIN
wsr a5, LCOUNT
rsr a7, lbeg # set PC to LBEGIN
wsr a5, lcount
#endif

1: wsr a7, EPC_1 # skip load instruction
1: wsr a7, epc1 # skip load instruction
extui a4, a4, INSN_T, 4 # extract target register
movi a5, .Lload_table
addx8 a4, a4, a5
Expand Down Expand Up @@ -355,16 +355,16 @@ ENTRY(fast_unaligned)

1:
#if XCHAL_HAVE_LOOPS
rsr a4, LEND # check if we reached LEND
rsr a4, lend # check if we reached LEND
bne a7, a4, 1f
rsr a4, LCOUNT # and LCOUNT != 0
rsr a4, lcount # and LCOUNT != 0
beqz a4, 1f
addi a4, a4, -1 # decrement LCOUNT and set
rsr a7, LBEG # set PC to LBEGIN
wsr a4, LCOUNT
rsr a7, lbeg # set PC to LBEGIN
wsr a4, lcount
#endif

1: wsr a7, EPC_1 # skip store instruction
1: wsr a7, epc1 # skip store instruction
movi a4, ~3
and a4, a4, a8 # align memory address

Expand Down Expand Up @@ -406,7 +406,7 @@ ENTRY(fast_unaligned)

.Lexit:
movi a4, 0
rsr a3, EXCSAVE_1
rsr a3, excsave1
s32i a4, a3, EXC_TABLE_FIXUP

/* Restore working register */
Expand All @@ -420,7 +420,7 @@ ENTRY(fast_unaligned)

/* restore SAR and return */

wsr a0, SAR
wsr a0, sar
l32i a0, a2, PT_AREG0
l32i a2, a2, PT_AREG2
rfe
Expand All @@ -438,10 +438,10 @@ ENTRY(fast_unaligned)
l32i a6, a2, PT_AREG6
l32i a5, a2, PT_AREG5
l32i a4, a2, PT_AREG4
wsr a0, SAR
wsr a0, sar
mov a1, a2

rsr a0, PS
rsr a0, ps
bbsi.l a2, PS_UM_BIT, 1f # jump if user mode

movi a0, _kernel_exception
Expand Down
Loading

0 comments on commit bc5378f

Please sign in to comment.