Commit dc374b47 authored by Remi Denis-Courmont's avatar Remi Denis-Courmont Committed by Catalin Marinas
Browse files

arm64: use mov_q instead of literal ldr



In practice, this requires only 2 instructions, or even only 1 for
the idmap_pg_dir size (with 4 or 64 KiB pages). Only the MAIR values
needed more than 2 instructions and it was already converted to mov_q
by 95b3f74b.

Signed-off-by: default avatarRemi Denis-Courmont <remi.denis.courmont@huawei.com>
Signed-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Acked-by: default avatarMark Rutland <mark.rutland@arm.com>
parent 9a25136a
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -32,7 +32,7 @@
ENTRY(__cpu_soft_restart)
	/* Clear sctlr_el1 flags. */
	mrs	x12, sctlr_el1
	ldr	x13, =SCTLR_ELx_FLAGS
	mov_q	x13, SCTLR_ELx_FLAGS
	bic	x12, x12, x13
	pre_disable_mmu_workaround
	msr	sctlr_el1, x12
+1 −1
Original line number Diff line number Diff line
@@ -63,7 +63,7 @@ el1_sync:
	beq	9f				// Nothing to reset!

	/* Someone called kvm_call_hyp() against the hyp-stub... */
	ldr	x0, =HVC_STUB_ERR
	mov_q	x0, HVC_STUB_ERR
	eret

9:	mov	x0, xzr
+1 −3
Original line number Diff line number Diff line
@@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel)
	cmp	x0, #CurrentEL_EL2
	b.ne	1f
	mrs	x0, sctlr_el2
	ldr	x1, =SCTLR_ELx_FLAGS
	mov_q	x1, SCTLR_ELx_FLAGS
	bic	x0, x0, x1
	pre_disable_mmu_workaround
	msr	sctlr_el2, x0
@@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel)

ENDPROC(arm64_relocate_new_kernel)

.ltorg

.align 3	/* To keep the 64-bit values below naturally aligned. */

.Lcopy_end:
+4 −6
Original line number Diff line number Diff line
@@ -60,7 +60,7 @@ alternative_else_nop_endif
	msr	ttbr0_el2, x4

	mrs	x4, tcr_el1
	ldr	x5, =TCR_EL2_MASK
	mov_q	x5, TCR_EL2_MASK
	and	x4, x4, x5
	mov	x5, #TCR_EL2_RES1
	orr	x4, x4, x5
@@ -102,7 +102,7 @@ alternative_else_nop_endif
	 * as well as the EE bit on BE. Drop the A flag since the compiler
	 * is allowed to generate unaligned accesses.
	 */
	ldr	x4, =(SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
	mov_q	x4, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
CPU_BE(	orr	x4, x4, #SCTLR_ELx_EE)
	msr	sctlr_el2, x4
	isb
@@ -142,7 +142,7 @@ reset:
	 * case we coming via HVC_SOFT_RESTART.
	 */
	mrs	x5, sctlr_el2
	ldr	x6, =SCTLR_ELx_FLAGS
	mov_q	x6, SCTLR_ELx_FLAGS
	bic	x5, x5, x6		// Clear SCTL_M and etc
	pre_disable_mmu_workaround
	msr	sctlr_el2, x5
@@ -155,11 +155,9 @@ reset:
	eret

1:	/* Bad stub call */
	ldr	x0, =HVC_STUB_ERR
	mov_q	x0, HVC_STUB_ERR
	eret

SYM_CODE_END(__kvm_handle_stub_hvc)

	.ltorg

	.popsection
+1 −1
Original line number Diff line number Diff line
@@ -436,7 +436,7 @@ SYM_FUNC_START(__cpu_setup)
	 * Set/prepare TCR and TTBR. We use 512GB (39-bit) address range for
	 * both user and kernel.
	 */
	ldr	x10, =TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
	mov_q	x10, TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
			TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_ASID16 | \
			TCR_TBI0 | TCR_A1 | TCR_KASAN_FLAGS
	tcr_clear_errata_bits x10, x9, x5