Commit 0343a7e4 authored by Mark Brown's avatar Mark Brown Committed by Will Deacon
Browse files

arm64: kernel: Convert to modern annotations for assembly functions



In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the core kernel code to
the new macros.

Signed-off-by: default avatarMark Brown <broonie@kernel.org>
Acked-by: default avatarMark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20200501115430.37315-3-broonie@kernel.org


Signed-off-by: default avatarWill Deacon <will@kernel.org>
parent 06607c7e
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -29,7 +29,7 @@
 * branch to what would be the reset vector. It must be executed with the
 * flat identity mapping.
 */
ENTRY(__cpu_soft_restart)
SYM_CODE_START(__cpu_soft_restart)
	/* Clear sctlr_el1 flags. */
	mrs	x12, sctlr_el1
	mov_q	x13, SCTLR_ELx_FLAGS
@@ -47,6 +47,6 @@ ENTRY(__cpu_soft_restart)
	mov	x1, x3				// arg1
	mov	x2, x4				// arg2
	br	x8
ENDPROC(__cpu_soft_restart)
SYM_CODE_END(__cpu_soft_restart)

.popsection
+2 −2
Original line number Diff line number Diff line
@@ -5,7 +5,7 @@

#include <linux/linkage.h>

ENTRY(__efi_rt_asm_wrapper)
SYM_FUNC_START(__efi_rt_asm_wrapper)
	stp	x29, x30, [sp, #-32]!
	mov	x29, sp

@@ -35,4 +35,4 @@ ENTRY(__efi_rt_asm_wrapper)
	b.ne	0f
	ret
0:	b	efi_handle_corrupted_x18	// tail call
ENDPROC(__efi_rt_asm_wrapper)
SYM_FUNC_END(__efi_rt_asm_wrapper)
+10 −10
Original line number Diff line number Diff line
@@ -16,34 +16,34 @@
 *
 * x0 - pointer to struct fpsimd_state
 */
ENTRY(fpsimd_save_state)
SYM_FUNC_START(fpsimd_save_state)
	fpsimd_save x0, 8
	ret
ENDPROC(fpsimd_save_state)
SYM_FUNC_END(fpsimd_save_state)

/*
 * Load the FP registers.
 *
 * x0 - pointer to struct fpsimd_state
 */
ENTRY(fpsimd_load_state)
SYM_FUNC_START(fpsimd_load_state)
	fpsimd_restore x0, 8
	ret
ENDPROC(fpsimd_load_state)
SYM_FUNC_END(fpsimd_load_state)

#ifdef CONFIG_ARM64_SVE
ENTRY(sve_save_state)
SYM_FUNC_START(sve_save_state)
	sve_save 0, x1, 2
	ret
ENDPROC(sve_save_state)
SYM_FUNC_END(sve_save_state)

ENTRY(sve_load_state)
SYM_FUNC_START(sve_load_state)
	sve_load 0, x1, x2, 3, x4
	ret
ENDPROC(sve_load_state)
SYM_FUNC_END(sve_load_state)

ENTRY(sve_get_vl)
SYM_FUNC_START(sve_get_vl)
	_sve_rdvl	0, 1
	ret
ENDPROC(sve_get_vl)
SYM_FUNC_END(sve_get_vl)
#endif /* CONFIG_ARM64_SVE */
+8 −8
Original line number Diff line number Diff line
@@ -65,7 +65,7 @@
 * x5: physical address of a  zero page that remains zero after resume
 */
.pushsection    ".hibernate_exit.text", "ax"
ENTRY(swsusp_arch_suspend_exit)
SYM_CODE_START(swsusp_arch_suspend_exit)
	/*
	 * We execute from ttbr0, change ttbr1 to our copied linear map tables
	 * with a break-before-make via the zero page
@@ -110,7 +110,7 @@ ENTRY(swsusp_arch_suspend_exit)
	cbz	x24, 3f		/* Do we need to re-initialise EL2? */
	hvc	#0
3:	ret
ENDPROC(swsusp_arch_suspend_exit)
SYM_CODE_END(swsusp_arch_suspend_exit)

/*
 * Restore the hyp stub.
@@ -119,15 +119,15 @@ ENDPROC(swsusp_arch_suspend_exit)
 *
 * x24: The physical address of __hyp_stub_vectors
 */
el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
	msr	vbar_el2, x24
	eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)

.macro invalid_vector	label
\label:
SYM_CODE_START_LOCAL(\label)
	b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm

	invalid_vector	el2_sync_invalid
@@ -141,7 +141,7 @@ ENDPROC(\label)

/* el2 vectors - switch el2 here while we restore the memory image. */
	.align 11
ENTRY(hibernate_el2_vectors)
SYM_CODE_START(hibernate_el2_vectors)
	ventry	el2_sync_invalid		// Synchronous EL2t
	ventry	el2_irq_invalid			// IRQ EL2t
	ventry	el2_fiq_invalid			// FIQ EL2t
@@ -161,6 +161,6 @@ ENTRY(hibernate_el2_vectors)
	ventry	el1_irq_invalid			// IRQ 32-bit EL1
	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
	ventry	el1_error_invalid		// Error 32-bit EL1
END(hibernate_el2_vectors)
SYM_CODE_END(hibernate_el2_vectors)

.popsection
+10 −10
Original line number Diff line number Diff line
@@ -21,7 +21,7 @@

	.align 11

ENTRY(__hyp_stub_vectors)
SYM_CODE_START(__hyp_stub_vectors)
	ventry	el2_sync_invalid		// Synchronous EL2t
	ventry	el2_irq_invalid			// IRQ EL2t
	ventry	el2_fiq_invalid			// FIQ EL2t
@@ -41,11 +41,11 @@ ENTRY(__hyp_stub_vectors)
	ventry	el1_irq_invalid			// IRQ 32-bit EL1
	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
	ventry	el1_error_invalid		// Error 32-bit EL1
ENDPROC(__hyp_stub_vectors)
SYM_CODE_END(__hyp_stub_vectors)

	.align 11

el1_sync:
SYM_CODE_START_LOCAL(el1_sync)
	cmp	x0, #HVC_SET_VECTORS
	b.ne	2f
	msr	vbar_el2, x1
@@ -68,12 +68,12 @@ el1_sync:

9:	mov	x0, xzr
	eret
ENDPROC(el1_sync)
SYM_CODE_END(el1_sync)

.macro invalid_vector	label
\label:
SYM_CODE_START_LOCAL(\label)
	b \label
ENDPROC(\label)
SYM_CODE_END(\label)
.endm

	invalid_vector	el2_sync_invalid
@@ -106,15 +106,15 @@ ENDPROC(\label)
 * initialisation entry point.
 */

ENTRY(__hyp_set_vectors)
SYM_FUNC_START(__hyp_set_vectors)
	mov	x1, x0
	mov	x0, #HVC_SET_VECTORS
	hvc	#0
	ret
ENDPROC(__hyp_set_vectors)
SYM_FUNC_END(__hyp_set_vectors)

ENTRY(__hyp_reset_vectors)
SYM_FUNC_START(__hyp_reset_vectors)
	mov	x0, #HVC_RESET_VECTORS
	hvc	#0
	ret
ENDPROC(__hyp_reset_vectors)
SYM_FUNC_END(__hyp_reset_vectors)
Loading