Commit ac3c76cc authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Ingo Molnar
Browse files

efi/libstub/x86: Use mandatory 16-byte stack alignment in mixed mode



Reduce the stack frame of the EFI stub's mixed mode thunk routine by
8 bytes, by moving the GDT and return addresses to EBP and EBX, which
we need to preserve anyway, since their top halves will be cleared by
the call into 32-bit firmware code. Doing so results in the UEFI code
being entered with a 16 byte aligned stack, as mandated by the UEFI
spec, fixing the last occurrence in the 64-bit kernel where we violate
this requirement.

Also, move the saved GDT from a global variable to an unused part of the
stack frame, and touch up some other parts of the code.

Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
Link: https://lore.kernel.org/r/20200113172245.27925-3-ardb@kernel.org
parent 796eb8d2
Loading
Loading
Loading
Loading
+13 −33
Original line number Diff line number Diff line
@@ -27,12 +27,9 @@ SYM_FUNC_START(__efi64_thunk)
	push	%rbp
	push	%rbx

	subq	$8, %rsp
	leaq	1f(%rip), %rax
	movl	%eax, 4(%rsp)
	leaq	efi_gdt64(%rip), %rax
	movl	%eax, (%rsp)
	movl	%eax, 2(%rax)		/* Fixup the gdt base address */
	leaq	1f(%rip), %rbp
	leaq	efi_gdt64(%rip), %rbx
	movl	%ebx, 2(%rbx)		/* Fixup the gdt base address */

	movl	%ds, %eax
	push	%rax
@@ -48,12 +45,10 @@ SYM_FUNC_START(__efi64_thunk)
	movl	%esi, 0x0(%rsp)
	movl	%edx, 0x4(%rsp)
	movl	%ecx, 0x8(%rsp)
	movq	%r8, %rsi
	movl	%esi, 0xc(%rsp)
	movq	%r9, %rsi
	movl	%esi,  0x10(%rsp)
	movl	%r8d, 0xc(%rsp)
	movl	%r9d, 0x10(%rsp)

	sgdt	save_gdt(%rip)
	sgdt	0x14(%rsp)

	/*
	 * Switch to gdt with 32-bit segments. This is the firmware GDT
@@ -68,11 +63,10 @@ SYM_FUNC_START(__efi64_thunk)
	pushq	%rax
	lretq

1:	addq	$32, %rsp
1:	lgdt	0x14(%rsp)
	addq	$32, %rsp
	movq	%rdi, %rax

	lgdt	save_gdt(%rip)

	pop	%rbx
	movl	%ebx, %ss
	pop	%rbx
@@ -83,15 +77,9 @@ SYM_FUNC_START(__efi64_thunk)
	/*
	 * Convert 32-bit status code into 64-bit.
	 */
	test	%rax, %rax
	jz	1f
	movl	%eax, %ecx
	andl	$0x0fffffff, %ecx
	andl	$0xf0000000, %eax
	shl	$32, %rax
	or	%rcx, %rax
1:
	addq	$8, %rsp
	roll	$1, %eax
	rorq	$1, %rax

	pop	%rbx
	pop	%rbp
	ret
@@ -135,9 +123,7 @@ SYM_FUNC_START_LOCAL(efi_enter32)
	 */
	cli

	movl	56(%esp), %eax
	movl	%eax, 2(%eax)
	lgdtl	(%eax)
	lgdtl	(%ebx)

	movl	%cr4, %eax
	btsl	$(X86_CR4_PAE_BIT), %eax
@@ -154,9 +140,8 @@ SYM_FUNC_START_LOCAL(efi_enter32)
	xorl	%eax, %eax
	lldt	%ax

	movl	60(%esp), %eax
	pushl	$__KERNEL_CS
	pushl	%eax
	pushl	%ebp

	/* Enable paging */
	movl	%cr0, %eax
@@ -172,11 +157,6 @@ SYM_DATA_START(efi32_boot_gdt)
	.quad	0
SYM_DATA_END(efi32_boot_gdt)

SYM_DATA_START_LOCAL(save_gdt)
	.word	0
	.quad	0
SYM_DATA_END(save_gdt)

SYM_DATA_START(efi_gdt64)
	.word	efi_gdt64_end - efi_gdt64
	.long	0			/* Filled out by user */