Skip to content

Commit

Permalink
arm64: kernel: Convert to modern annotations for assembly functions
Browse files Browse the repository at this point in the history
In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the core kernel code to
the new macros.

Signed-off-by: Mark Brown <broonie@kernel.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20200501115430.37315-3-broonie@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
  • Loading branch information
Mark Brown authored and Will Deacon committed May 4, 2020
1 parent 06607c7 commit 0343a7e
Show file tree
Hide file tree
Showing 10 changed files with 68 additions and 68 deletions.
4 changes: 2 additions & 2 deletions arch/arm64/kernel/cpu-reset.S
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
* branch to what would be the reset vector. It must be executed with the
* flat identity mapping.
*/
ENTRY(__cpu_soft_restart)
SYM_CODE_START(__cpu_soft_restart)
/* Clear sctlr_el1 flags. */
mrs x12, sctlr_el1
mov_q x13, SCTLR_ELx_FLAGS
Expand All @@ -47,6 +47,6 @@ ENTRY(__cpu_soft_restart)
mov x1, x3 // arg1
mov x2, x4 // arg2
br x8
ENDPROC(__cpu_soft_restart)
SYM_CODE_END(__cpu_soft_restart)

.popsection
4 changes: 2 additions & 2 deletions arch/arm64/kernel/efi-rt-wrapper.S
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

#include <linux/linkage.h>

ENTRY(__efi_rt_asm_wrapper)
SYM_FUNC_START(__efi_rt_asm_wrapper)
stp x29, x30, [sp, #-32]!
mov x29, sp

Expand Down Expand Up @@ -35,4 +35,4 @@ ENTRY(__efi_rt_asm_wrapper)
b.ne 0f
ret
0: b efi_handle_corrupted_x18 // tail call
ENDPROC(__efi_rt_asm_wrapper)
SYM_FUNC_END(__efi_rt_asm_wrapper)
20 changes: 10 additions & 10 deletions arch/arm64/kernel/entry-fpsimd.S
Original file line number Diff line number Diff line change
Expand Up @@ -16,34 +16,34 @@
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_save_state)
SYM_FUNC_START(fpsimd_save_state)
fpsimd_save x0, 8
ret
ENDPROC(fpsimd_save_state)
SYM_FUNC_END(fpsimd_save_state)

/*
* Load the FP registers.
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_load_state)
SYM_FUNC_START(fpsimd_load_state)
fpsimd_restore x0, 8
ret
ENDPROC(fpsimd_load_state)
SYM_FUNC_END(fpsimd_load_state)

#ifdef CONFIG_ARM64_SVE
ENTRY(sve_save_state)
SYM_FUNC_START(sve_save_state)
sve_save 0, x1, 2
ret
ENDPROC(sve_save_state)
SYM_FUNC_END(sve_save_state)

ENTRY(sve_load_state)
SYM_FUNC_START(sve_load_state)
sve_load 0, x1, x2, 3, x4
ret
ENDPROC(sve_load_state)
SYM_FUNC_END(sve_load_state)

ENTRY(sve_get_vl)
SYM_FUNC_START(sve_get_vl)
_sve_rdvl 0, 1
ret
ENDPROC(sve_get_vl)
SYM_FUNC_END(sve_get_vl)
#endif /* CONFIG_ARM64_SVE */
16 changes: 8 additions & 8 deletions arch/arm64/kernel/hibernate-asm.S
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
* x5: physical address of a zero page that remains zero after resume
*/
.pushsection ".hibernate_exit.text", "ax"
ENTRY(swsusp_arch_suspend_exit)
SYM_CODE_START(swsusp_arch_suspend_exit)
/*
* We execute from ttbr0, change ttbr1 to our copied linear map tables
* with a break-before-make via the zero page
Expand Down Expand Up @@ -110,7 +110,7 @@ ENTRY(swsusp_arch_suspend_exit)
cbz x24, 3f /* Do we need to re-initialise EL2? */
hvc #0
3: ret
ENDPROC(swsusp_arch_suspend_exit)
SYM_CODE_END(swsusp_arch_suspend_exit)

/*
* Restore the hyp stub.
Expand All @@ -119,15 +119,15 @@ ENDPROC(swsusp_arch_suspend_exit)
*
* x24: The physical address of __hyp_stub_vectors
*/
el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
msr vbar_el2, x24
eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)

.macro invalid_vector label
\label:
SYM_CODE_START_LOCAL(\label)
b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm

invalid_vector el2_sync_invalid
Expand All @@ -141,7 +141,7 @@ ENDPROC(\label)

/* el2 vectors - switch el2 here while we restore the memory image. */
.align 11
ENTRY(hibernate_el2_vectors)
SYM_CODE_START(hibernate_el2_vectors)
ventry el2_sync_invalid // Synchronous EL2t
ventry el2_irq_invalid // IRQ EL2t
ventry el2_fiq_invalid // FIQ EL2t
Expand All @@ -161,6 +161,6 @@ ENTRY(hibernate_el2_vectors)
ventry el1_irq_invalid // IRQ 32-bit EL1
ventry el1_fiq_invalid // FIQ 32-bit EL1
ventry el1_error_invalid // Error 32-bit EL1
END(hibernate_el2_vectors)
SYM_CODE_END(hibernate_el2_vectors)

.popsection
20 changes: 10 additions & 10 deletions arch/arm64/kernel/hyp-stub.S
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

.align 11

ENTRY(__hyp_stub_vectors)
SYM_CODE_START(__hyp_stub_vectors)
ventry el2_sync_invalid // Synchronous EL2t
ventry el2_irq_invalid // IRQ EL2t
ventry el2_fiq_invalid // FIQ EL2t
Expand All @@ -41,11 +41,11 @@ ENTRY(__hyp_stub_vectors)
ventry el1_irq_invalid // IRQ 32-bit EL1
ventry el1_fiq_invalid // FIQ 32-bit EL1
ventry el1_error_invalid // Error 32-bit EL1
ENDPROC(__hyp_stub_vectors)
SYM_CODE_END(__hyp_stub_vectors)

.align 11

el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
cmp x0, #HVC_SET_VECTORS
b.ne 2f
msr vbar_el2, x1
Expand All @@ -68,12 +68,12 @@ el1_sync:

9: mov x0, xzr
eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)

.macro invalid_vector label
\label:
SYM_CODE_START_LOCAL(\label)
b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm

invalid_vector el2_sync_invalid
Expand Down Expand Up @@ -106,15 +106,15 @@ ENDPROC(\label)
* initialisation entry point.
*/

ENTRY(__hyp_set_vectors)
SYM_FUNC_START(__hyp_set_vectors)
mov x1, x0
mov x0, #HVC_SET_VECTORS
hvc #0
ret
ENDPROC(__hyp_set_vectors)
SYM_FUNC_END(__hyp_set_vectors)

ENTRY(__hyp_reset_vectors)
SYM_FUNC_START(__hyp_reset_vectors)
mov x0, #HVC_RESET_VECTORS
hvc #0
ret
ENDPROC(__hyp_reset_vectors)
SYM_FUNC_END(__hyp_reset_vectors)
4 changes: 2 additions & 2 deletions arch/arm64/kernel/probes/kprobes_trampoline.S
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
ldp x28, x29, [sp, #S_X28]
.endm

ENTRY(kretprobe_trampoline)
SYM_CODE_START(kretprobe_trampoline)
sub sp, sp, #S_FRAME_SIZE

save_all_base_regs
Expand All @@ -79,4 +79,4 @@ ENTRY(kretprobe_trampoline)
add sp, sp, #S_FRAME_SIZE
ret

ENDPROC(kretprobe_trampoline)
SYM_CODE_END(kretprobe_trampoline)
44 changes: 22 additions & 22 deletions arch/arm64/kernel/reloc_test_syms.S
Original file line number Diff line number Diff line change
Expand Up @@ -5,81 +5,81 @@

#include <linux/linkage.h>

ENTRY(absolute_data64)
SYM_FUNC_START(absolute_data64)
ldr x0, 0f
ret
0: .quad sym64_abs
ENDPROC(absolute_data64)
SYM_FUNC_END(absolute_data64)

ENTRY(absolute_data32)
SYM_FUNC_START(absolute_data32)
ldr w0, 0f
ret
0: .long sym32_abs
ENDPROC(absolute_data32)
SYM_FUNC_END(absolute_data32)

ENTRY(absolute_data16)
SYM_FUNC_START(absolute_data16)
adr x0, 0f
ldrh w0, [x0]
ret
0: .short sym16_abs, 0
ENDPROC(absolute_data16)
SYM_FUNC_END(absolute_data16)

ENTRY(signed_movw)
SYM_FUNC_START(signed_movw)
movz x0, #:abs_g2_s:sym64_abs
movk x0, #:abs_g1_nc:sym64_abs
movk x0, #:abs_g0_nc:sym64_abs
ret
ENDPROC(signed_movw)
SYM_FUNC_END(signed_movw)

ENTRY(unsigned_movw)
SYM_FUNC_START(unsigned_movw)
movz x0, #:abs_g3:sym64_abs
movk x0, #:abs_g2_nc:sym64_abs
movk x0, #:abs_g1_nc:sym64_abs
movk x0, #:abs_g0_nc:sym64_abs
ret
ENDPROC(unsigned_movw)
SYM_FUNC_END(unsigned_movw)

.align 12
.space 0xff8
ENTRY(relative_adrp)
SYM_FUNC_START(relative_adrp)
adrp x0, sym64_rel
add x0, x0, #:lo12:sym64_rel
ret
ENDPROC(relative_adrp)
SYM_FUNC_END(relative_adrp)

.align 12
.space 0xffc
ENTRY(relative_adrp_far)
SYM_FUNC_START(relative_adrp_far)
adrp x0, memstart_addr
add x0, x0, #:lo12:memstart_addr
ret
ENDPROC(relative_adrp_far)
SYM_FUNC_END(relative_adrp_far)

ENTRY(relative_adr)
SYM_FUNC_START(relative_adr)
adr x0, sym64_rel
ret
ENDPROC(relative_adr)
SYM_FUNC_END(relative_adr)

ENTRY(relative_data64)
SYM_FUNC_START(relative_data64)
adr x1, 0f
ldr x0, [x1]
add x0, x0, x1
ret
0: .quad sym64_rel - .
ENDPROC(relative_data64)
SYM_FUNC_END(relative_data64)

ENTRY(relative_data32)
SYM_FUNC_START(relative_data32)
adr x1, 0f
ldr w0, [x1]
add x0, x0, x1
ret
0: .long sym64_rel - .
ENDPROC(relative_data32)
SYM_FUNC_END(relative_data32)

ENTRY(relative_data16)
SYM_FUNC_START(relative_data16)
adr x1, 0f
ldrsh w0, [x1]
add x0, x0, x1
ret
0: .short sym64_rel - ., 0
ENDPROC(relative_data16)
SYM_FUNC_END(relative_data16)
4 changes: 2 additions & 2 deletions arch/arm64/kernel/relocate_kernel.S
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
* control_code_page, a special page which has been set up to be preserved
* during the copy operation.
*/
ENTRY(arm64_relocate_new_kernel)
SYM_CODE_START(arm64_relocate_new_kernel)

/* Setup the list loop variables. */
mov x18, x2 /* x18 = dtb address */
Expand Down Expand Up @@ -111,7 +111,7 @@ ENTRY(arm64_relocate_new_kernel)
mov x3, xzr
br x17

ENDPROC(arm64_relocate_new_kernel)
SYM_CODE_END(arm64_relocate_new_kernel)

.align 3 /* To keep the 64-bit values below naturally aligned. */

Expand Down
12 changes: 6 additions & 6 deletions arch/arm64/kernel/sleep.S
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
*
* x0 = struct sleep_stack_data area
*/
ENTRY(__cpu_suspend_enter)
SYM_FUNC_START(__cpu_suspend_enter)
stp x29, lr, [x0, #SLEEP_STACK_DATA_CALLEE_REGS]
stp x19, x20, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+16]
stp x21, x22, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+32]
Expand Down Expand Up @@ -95,10 +95,10 @@ ENTRY(__cpu_suspend_enter)
ldp x29, lr, [sp], #16
mov x0, #1
ret
ENDPROC(__cpu_suspend_enter)
SYM_FUNC_END(__cpu_suspend_enter)

.pushsection ".idmap.text", "awx"
ENTRY(cpu_resume)
SYM_CODE_START(cpu_resume)
bl el2_setup // if in EL2 drop to EL1 cleanly
mov x0, #ARM64_CPU_RUNTIME
bl __cpu_setup
Expand All @@ -107,11 +107,11 @@ ENTRY(cpu_resume)
bl __enable_mmu
ldr x8, =_cpu_resume
br x8
ENDPROC(cpu_resume)
SYM_CODE_END(cpu_resume)
.ltorg
.popsection

ENTRY(_cpu_resume)
SYM_FUNC_START(_cpu_resume)
mrs x1, mpidr_el1
adr_l x8, mpidr_hash // x8 = struct mpidr_hash virt address

Expand Down Expand Up @@ -147,4 +147,4 @@ ENTRY(_cpu_resume)
ldp x29, lr, [x29]
mov x0, #0
ret
ENDPROC(_cpu_resume)
SYM_FUNC_END(_cpu_resume)
8 changes: 4 additions & 4 deletions arch/arm64/kernel/smccc-call.S
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
* unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
* struct arm_smccc_quirk *quirk)
*/
ENTRY(__arm_smccc_smc)
SYM_FUNC_START(__arm_smccc_smc)
SMCCC smc
ENDPROC(__arm_smccc_smc)
SYM_FUNC_END(__arm_smccc_smc)
EXPORT_SYMBOL(__arm_smccc_smc)

/*
Expand All @@ -41,7 +41,7 @@ EXPORT_SYMBOL(__arm_smccc_smc)
* unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
* struct arm_smccc_quirk *quirk)
*/
ENTRY(__arm_smccc_hvc)
SYM_FUNC_START(__arm_smccc_hvc)
SMCCC hvc
ENDPROC(__arm_smccc_hvc)
SYM_FUNC_END(__arm_smccc_hvc)
EXPORT_SYMBOL(__arm_smccc_hvc)

0 comments on commit 0343a7e

Please sign in to comment.