Commit cedbb8b7 authored by Marc Zyngier's avatar Marc Zyngier
Browse files

arm64: KVM: VHE: Patch out kern_hyp_va



The kern_hyp_va macro is pretty meaninless with VHE, as there is
only one mapping - the kernel one.

In order to keep the code readable and efficient, use runtime
patching to replace the 'and' instruction used to compute the VA
with a 'nop'.

Reviewed-by: default avatarChristoffer Dall <christoffer.dall@linaro.org>
Signed-off-by: default avatarMarc Zyngier <marc.zyngier@arm.com>
parent b81125c7
Loading
Loading
Loading
Loading
+11 −1
Original line number Diff line number Diff line
@@ -23,13 +23,16 @@
#include <asm/cpufeature.h>

/*
 * As we only have the TTBR0_EL2 register, we cannot express
 * As ARMv8.0 only has the TTBR0_EL2 register, we cannot express
 * "negative" addresses. This makes it impossible to directly share
 * mappings with the kernel.
 *
 * Instead, give the HYP mode its own VA region at a fixed offset from
 * the kernel by just masking the top bits (which are all ones for a
 * kernel address).
 *
 * ARMv8.1 (using VHE) does have a TTBR1_EL2, and doesn't use these
 * macros (the entire kernel runs at EL2).
 */
#define HYP_PAGE_OFFSET_SHIFT	VA_BITS
#define HYP_PAGE_OFFSET_MASK	((UL(1) << HYP_PAGE_OFFSET_SHIFT) - 1)
@@ -56,12 +59,19 @@

#ifdef __ASSEMBLY__

#include <asm/alternative.h>
#include <asm/cpufeature.h>

/*
 * Convert a kernel VA into a HYP VA.
 * reg: VA to be converted.
 */
.macro kern_hyp_va	reg
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN	
	and	\reg, \reg, #HYP_PAGE_OFFSET_MASK
alternative_else
	nop
alternative_endif
.endm

#else
+22 −3
Original line number Diff line number Diff line
@@ -25,9 +25,28 @@

#define __hyp_text __section(.hyp.text) notrace

#define kern_hyp_va(v) (typeof(v))((unsigned long)(v) & HYP_PAGE_OFFSET_MASK)
#define hyp_kern_va(v) (typeof(v))((unsigned long)(v) - HYP_PAGE_OFFSET \
						      + PAGE_OFFSET)
static inline unsigned long __kern_hyp_va(unsigned long v)
{
	asm volatile(ALTERNATIVE("and %0, %0, %1",
				 "nop",
				 ARM64_HAS_VIRT_HOST_EXTN)
		     : "+r" (v) : "i" (HYP_PAGE_OFFSET_MASK));
	return v;
}

#define kern_hyp_va(v) (typeof(v))(__kern_hyp_va((unsigned long)(v)))

static inline unsigned long __hyp_kern_va(unsigned long v)
{
	u64 offset = PAGE_OFFSET - HYP_PAGE_OFFSET;
	asm volatile(ALTERNATIVE("add %0, %0, %1",
				 "nop",
				 ARM64_HAS_VIRT_HOST_EXTN)
		     : "+r" (v) : "r" (offset));
	return v;
}

#define hyp_kern_va(v) (typeof(v))(__hyp_kern_va((unsigned long)(v)))

/**
 * hyp_alternate_select - Generates patchable code sequences that are