Commit 806dc825 authored by Catalin Marinas's avatar Catalin Marinas
Browse files

Merge branch 'for-next/asm-cleanups' into for-next/core

* for-next/asm-cleanups:
  : Various asm clean-ups (alignment, mov_q vs ldr, .idmap)
  arm64: move kimage_vaddr to .rodata
  arm64: use mov_q instead of literal ldr
parents 0829a076 6cf9a2dc
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -32,7 +32,7 @@
ENTRY(__cpu_soft_restart)
	/* Clear sctlr_el1 flags. */
	mrs	x12, sctlr_el1
	ldr	x13, =SCTLR_ELx_FLAGS
	mov_q	x13, SCTLR_ELx_FLAGS
	bic	x12, x12, x13
	pre_disable_mmu_workaround
	msr	sctlr_el1, x12
+7 −5
Original line number Diff line number Diff line
@@ -457,17 +457,19 @@ SYM_FUNC_START_LOCAL(__primary_switched)
	b	start_kernel
SYM_FUNC_END(__primary_switched)

	.pushsection ".rodata", "a"
SYM_DATA_START(kimage_vaddr)
	.quad		_text - TEXT_OFFSET
SYM_DATA_END(kimage_vaddr)
EXPORT_SYMBOL(kimage_vaddr)
	.popsection

/*
 * end early head section, begin head code that is also used for
 * hotplug and needs to have the same protections as the text region
 */
	.section ".idmap.text","awx"

SYM_DATA_START(kimage_vaddr)
	.quad		_text - TEXT_OFFSET
SYM_DATA_END(kimage_vaddr)
EXPORT_SYMBOL(kimage_vaddr)

/*
 * If we're fortunate enough to boot at EL2, ensure that the world is
 * sane before dropping to EL1.
+1 −1
Original line number Diff line number Diff line
@@ -63,7 +63,7 @@ el1_sync:
	beq	9f				// Nothing to reset!

	/* Someone called kvm_call_hyp() against the hyp-stub... */
	ldr	x0, =HVC_STUB_ERR
	mov_q	x0, HVC_STUB_ERR
	eret

9:	mov	x0, xzr
+1 −3
Original line number Diff line number Diff line
@@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel)
	cmp	x0, #CurrentEL_EL2
	b.ne	1f
	mrs	x0, sctlr_el2
	ldr	x1, =SCTLR_ELx_FLAGS
	mov_q	x1, SCTLR_ELx_FLAGS
	bic	x0, x0, x1
	pre_disable_mmu_workaround
	msr	sctlr_el2, x0
@@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel)

ENDPROC(arm64_relocate_new_kernel)

.ltorg

.align 3	/* To keep the 64-bit values below naturally aligned. */

.Lcopy_end:
+4 −6
Original line number Diff line number Diff line
@@ -60,7 +60,7 @@ alternative_else_nop_endif
	msr	ttbr0_el2, x4

	mrs	x4, tcr_el1
	ldr	x5, =TCR_EL2_MASK
	mov_q	x5, TCR_EL2_MASK
	and	x4, x4, x5
	mov	x5, #TCR_EL2_RES1
	orr	x4, x4, x5
@@ -102,7 +102,7 @@ alternative_else_nop_endif
	 * as well as the EE bit on BE. Drop the A flag since the compiler
	 * is allowed to generate unaligned accesses.
	 */
	ldr	x4, =(SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
	mov_q	x4, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
CPU_BE(	orr	x4, x4, #SCTLR_ELx_EE)
	msr	sctlr_el2, x4
	isb
@@ -142,7 +142,7 @@ reset:
	 * case we coming via HVC_SOFT_RESTART.
	 */
	mrs	x5, sctlr_el2
	ldr	x6, =SCTLR_ELx_FLAGS
	mov_q	x6, SCTLR_ELx_FLAGS
	bic	x5, x5, x6		// Clear SCTL_M and etc
	pre_disable_mmu_workaround
	msr	sctlr_el2, x5
@@ -155,11 +155,9 @@ reset:
	eret

1:	/* Bad stub call */
	ldr	x0, =HVC_STUB_ERR
	mov_q	x0, HVC_STUB_ERR
	eret

SYM_CODE_END(__kvm_handle_stub_hvc)

	.ltorg

	.popsection
Loading