Skip to content

Commit

Permalink
[PATCH] x86-64: Merge msr.c with i386 version
Browse files Browse the repository at this point in the history
The only difference was the inline assembly, so move that into
asm/msr.h and merge with the i386 version.

This adds some missing sysfs support code to x86-64.

Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
  • Loading branch information
Andi Kleen authored and Linus Torvalds committed Sep 12, 2005
1 parent 55679ed commit 059bf0f
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 291 deletions.
1 change: 1 addition & 0 deletions arch/x86_64/kernel/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,4 @@ microcode-$(subst m,y,$(CONFIG_MICROCODE)) += ../../i386/kernel/microcode.o
intel_cacheinfo-y += ../../i386/kernel/cpu/intel_cacheinfo.o
quirks-y += ../../i386/kernel/quirks.o
i8237-y += ../../i386/kernel/i8237.o
msr-$(subst m,y,$(CONFIG_X86_MSR)) += ../../i386/kernel/msr.o
279 changes: 0 additions & 279 deletions arch/x86_64/kernel/msr.c

This file was deleted.

39 changes: 27 additions & 12 deletions include/asm-x86_64/msr.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,22 +29,37 @@
#define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32)

/* wrmsr with exception handling */
#define wrmsr_safe(msr,a,b) ({ int ret__; \
asm volatile("2: wrmsr ; xorl %0,%0\n" \
"1:\n\t" \
".section .fixup,\"ax\"\n\t" \
"3: movl %4,%0 ; jmp 1b\n\t" \
".previous\n\t" \
".section __ex_table,\"a\"\n" \
" .align 8\n\t" \
" .quad 2b,3b\n\t" \
".previous" \
: "=a" (ret__) \
: "c" (msr), "0" (a), "d" (b), "i" (-EFAULT));\
#define wrmsr_safe(msr,a,b) ({ int ret__; \
asm volatile("2: wrmsr ; xorl %0,%0\n" \
"1:\n\t" \
".section .fixup,\"ax\"\n\t" \
"3: movl %4,%0 ; jmp 1b\n\t" \
".previous\n\t" \
".section __ex_table,\"a\"\n" \
" .align 8\n\t" \
" .quad 2b,3b\n\t" \
".previous" \
: "=a" (ret__) \
: "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \
ret__; })

#define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32))

#define rdmsr_safe(msr,a,b) \
({ int ret__; \
asm volatile ("1: rdmsr\n" \
"2:\n" \
".section .fixup,\"ax\"\n" \
"3: movl %4,%0\n" \
" jmp 2b\n" \
".previous\n" \
".section __ex_table,\"a\"\n" \
" .align 8\n" \
" .quad 1b,3b\n" \
".previous":"=&bDS" (ret__), "=a"(a), "=d"(b)\
:"c"(msr), "i"(-EIO), "0"(0)); \
ret__; })

#define rdtsc(low,high) \
__asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high))

Expand Down

0 comments on commit 059bf0f

Please sign in to comment.