Commit 34fdce69 authored by Peter Zijlstra's avatar Peter Zijlstra
Browse files

x86: Change {JMP,CALL}_NOSPEC argument



In order to change the {JMP,CALL}_NOSPEC macros to call out-of-line
versions of the retpoline magic, we need to remove the '%' from the
argument, such that we can paste it onto symbol names.

Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: default avatarJosh Poimboeuf <jpoimboe@redhat.com>
Link: https://lkml.kernel.org/r/20200428191700.151623523@infradead.org
parent ca3f0d80
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
	pxor INC, STATE4
	movdqu IV, 0x30(OUTP)

	CALL_NOSPEC %r11
	CALL_NOSPEC r11

	movdqu 0x00(OUTP), INC
	pxor INC, STATE1
@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
	_aesni_gf128mul_x_ble()
	movups IV, (IVP)

	CALL_NOSPEC %r11
	CALL_NOSPEC r11

	movdqu 0x40(OUTP), INC
	pxor INC, STATE1
+1 −1
Original line number Diff line number Diff line
@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
	vpxor 14 * 16(%rax), %xmm15, %xmm14;
	vpxor 15 * 16(%rax), %xmm15, %xmm15;

	CALL_NOSPEC %r9;
	CALL_NOSPEC r9;

	addq $(16 * 16), %rsp;

+1 −1
Original line number Diff line number Diff line
@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
	vpxor 14 * 32(%rax), %ymm15, %ymm14;
	vpxor 15 * 32(%rax), %ymm15, %ymm15;

	CALL_NOSPEC %r9;
	CALL_NOSPEC r9;

	addq $(16 * 32), %rsp;

+13 −13
Original line number Diff line number Diff line
@@ -75,7 +75,7 @@

.text
SYM_FUNC_START(crc_pcl)
#define    bufp		%rdi
#define    bufp		rdi
#define    bufp_dw	%edi
#define    bufp_w	%di
#define    bufp_b	%dil
@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
	## 1) ALIGN:
	################################################################

	mov     bufp, bufptmp		# rdi = *buf
	neg     bufp
	and     $7, bufp		# calculate the unalignment amount of
	mov     %bufp, bufptmp		# rdi = *buf
	neg     %bufp
	and     $7, %bufp		# calculate the unalignment amount of
					# the address
	je      proc_block		# Skip if aligned

@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align:
	#### Calculate CRC of unaligned bytes of the buffer (if any)
	movq    (bufptmp), tmp		# load a quadward from the buffer
	add     bufp, bufptmp		# align buffer pointer for quadword
	add     %bufp, bufptmp		# align buffer pointer for quadword
					# processing
	sub     bufp, len		# update buffer length
	sub     %bufp, len		# update buffer length
align_loop:
	crc32b  %bl, crc_init_dw 	# compute crc32 of 1-byte
	shr     $8, tmp			# get next byte
	dec     bufp
	dec     %bufp
	jne     align_loop

proc_block:
@@ -169,10 +169,10 @@ continue_block:
	xor     crc2, crc2

	## branch into array
	lea	jump_table(%rip), bufp
	movzxw  (bufp, %rax, 2), len
	lea	crc_array(%rip), bufp
	lea     (bufp, len, 1), bufp
	lea	jump_table(%rip), %bufp
	movzxw  (%bufp, %rax, 2), len
	lea	crc_array(%rip), %bufp
	lea     (%bufp, len, 1), %bufp
	JMP_NOSPEC bufp

	################################################################
@@ -218,9 +218,9 @@ LABEL crc_ %i
	## 4) Combine three results:
	################################################################

	lea	(K_table-8)(%rip), bufp		# first entry is for idx 1
	lea	(K_table-8)(%rip), %bufp		# first entry is for idx 1
	shlq    $3, %rax			# rax *= 8
	pmovzxdq (bufp,%rax), %xmm0		# 2 consts: K1:K2
	pmovzxdq (%bufp,%rax), %xmm0		# 2 consts: K1:K2
	leal	(%eax,%eax,2), %eax		# rax *= 3 (total *24)
	subq    %rax, tmp			# tmp -= rax*24

+3 −3
Original line number Diff line number Diff line
@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)

	/* kernel thread */
1:	movl	%edi, %eax
	CALL_NOSPEC %ebx
	CALL_NOSPEC ebx
	/*
	 * A kernel thread is allowed to return here after successfully
	 * calling do_execve().  Exit to userspace to complete the execve()
@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)

	TRACE_IRQS_OFF
	movl	%esp, %eax			# pt_regs pointer
	CALL_NOSPEC %edi
	CALL_NOSPEC edi
	jmp	ret_from_exception
SYM_CODE_END(common_exception_read_cr2)

@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)

	TRACE_IRQS_OFF
	movl	%esp, %eax			# pt_regs pointer
	CALL_NOSPEC %edi
	CALL_NOSPEC edi
	jmp	ret_from_exception
SYM_CODE_END(common_exception)

Loading