Skip to content

Commit

Permalink
x86/smpboot: Implement a bit spinlock to protect the realmode stack
Browse files Browse the repository at this point in the history
Parallel AP bringup requires that the APs can run fully parallel through
the early startup code including the real mode trampoline.

To prepare for this implement a bit-spinlock to serialize access to the
real mode stack so that parallel upcoming APs are not going to corrupt each
others stack while going through the real mode startup code.

Co-developed-by: David Woodhouse <dwmw@amazon.co.uk>
Signed-off-by: David Woodhouse <dwmw@amazon.co.uk>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Tested-by: Michael Kelley <mikelley@microsoft.com>
Tested-by: Oleksandr Natalenko <oleksandr@natalenko.name>
Tested-by: Helge Deller <deller@gmx.de> # parisc
Tested-by: Guilherme G. Piccoli <gpiccoli@igalia.com> # Steam Deck
Link: https://lore.kernel.org/r/20230512205257.355425551@linutronix.de
  • Loading branch information
Thomas Gleixner authored and Peter Zijlstra committed May 15, 2023
1 parent bea629d commit f6f1ae9
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 5 deletions.
3 changes: 3 additions & 0 deletions arch/x86/include/asm/realmode.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ struct trampoline_header {
u64 efer;
u32 cr4;
u32 flags;
u32 lock;
#endif
};

Expand All @@ -64,6 +65,8 @@ extern unsigned long initial_stack;
extern unsigned long initial_vc_handler;
#endif

extern u32 *trampoline_lock;

extern unsigned char real_mode_blob[];
extern unsigned char real_mode_relocs[];

Expand Down
12 changes: 12 additions & 0 deletions arch/x86/kernel/head_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,16 @@ SYM_INNER_LABEL(secondary_startup_64_no_verify, SYM_L_GLOBAL)
movq pcpu_hot + X86_current_task(%rdx), %rax
movq TASK_threadsp(%rax), %rsp

/*
* Now that this CPU is running on its own stack, drop the realmode
* protection. For the boot CPU the pointer is NULL!
*/
movq trampoline_lock(%rip), %rax
testq %rax, %rax
jz .Lsetup_gdt
movl $0, (%rax)

.Lsetup_gdt:
/*
* We must switch to a new descriptor in kernel space for the GDT
* because soon the kernel won't have access anymore to the userspace
Expand Down Expand Up @@ -433,6 +443,8 @@ SYM_DATA(initial_code, .quad x86_64_start_kernel)
#ifdef CONFIG_AMD_MEM_ENCRYPT
SYM_DATA(initial_vc_handler, .quad handle_vc_boot_ghcb)
#endif

SYM_DATA(trampoline_lock, .quad 0);
__FINITDATA

__INIT
Expand Down
3 changes: 3 additions & 0 deletions arch/x86/realmode/init.c
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,9 @@ static void __init setup_real_mode(void)

trampoline_header->flags = 0;

trampoline_lock = &trampoline_header->lock;
*trampoline_lock = 0;

trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd);

/* Map the real mode stub as virtual == physical */
Expand Down
23 changes: 18 additions & 5 deletions arch/x86/realmode/rm/trampoline_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,20 @@
.text
.code16

.macro LOAD_REALMODE_ESP
/*
* Make sure only one CPU fiddles with the realmode stack
*/
.Llock_rm\@:
lock btsl $0, tr_lock
jnc 2f
pause
jmp .Llock_rm\@
2:
# Setup stack
movl $rm_stack_end, %esp
.endm

.balign PAGE_SIZE
SYM_CODE_START(trampoline_start)
cli # We should be safe anyway
Expand All @@ -49,8 +63,7 @@ SYM_CODE_START(trampoline_start)
mov %ax, %es
mov %ax, %ss

# Setup stack
movl $rm_stack_end, %esp
LOAD_REALMODE_ESP

call verify_cpu # Verify the cpu supports long mode
testl %eax, %eax # Check for return code
Expand Down Expand Up @@ -93,8 +106,7 @@ SYM_CODE_START(sev_es_trampoline_start)
mov %ax, %es
mov %ax, %ss

# Setup stack
movl $rm_stack_end, %esp
LOAD_REALMODE_ESP

jmp .Lswitch_to_protected
SYM_CODE_END(sev_es_trampoline_start)
Expand Down Expand Up @@ -177,7 +189,7 @@ SYM_CODE_START(pa_trampoline_compat)
* In compatibility mode. Prep ESP and DX for startup_32, then disable
* paging and complete the switch to legacy 32-bit mode.
*/
movl $rm_stack_end, %esp
LOAD_REALMODE_ESP
movw $__KERNEL_DS, %dx

movl $(CR0_STATE & ~X86_CR0_PG), %eax
Expand Down Expand Up @@ -241,6 +253,7 @@ SYM_DATA_START(trampoline_header)
SYM_DATA(tr_efer, .space 8)
SYM_DATA(tr_cr4, .space 4)
SYM_DATA(tr_flags, .space 4)
SYM_DATA(tr_lock, .space 4)
SYM_DATA_END(trampoline_header)

#include "trampoline_common.S"

0 comments on commit f6f1ae9

Please sign in to comment.