Commit 69fc06f7 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull objtool updates from Ingo Molnar:
 "There are a lot of objtool changes in this cycle, all across the map:

   - Speed up objtool significantly, especially when there are large
     number of sections

   - Improve objtool's understanding of special instructions such as
     IRET, to reduce the number of annotations required

   - Implement 'noinstr' validation

   - Do baby steps for non-x86 objtool use

   - Simplify/fix retpoline decoding

   - Add vmlinux validation

   - Improve documentation

   - Fix various bugs and apply smaller cleanups"

* tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (54 commits)
  objtool: Enable compilation of objtool for all architectures
  objtool: Move struct objtool_file into arch-independent header
  objtool: Exit successfully when requesting help
  objtool: Add check_kcov_mode() to the uaccess safelist
  samples/ftrace: Fix asm function ELF annotations
  objtool: optimize add_dead_ends for split sections
  objtool: use gelf_getsymshndx to handle >64k sections
  objtool: Allow no-op CFI ops in alternatives
  x86/retpoline: Fix retpoline unwind
  x86: Change {JMP,CALL}_NOSPEC argument
  x86: Simplify retpoline declaration
  x86/speculation: Change FILL_RETURN_BUFFER to work with objtool
  objtool: Add support for intra-function calls
  objtool: Move the IRET hack into the arch decoder
  objtool: Remove INSN_STACK
  objtool: Make handle_insn_ops() unconditional
  objtool: Rework allocating stack_ops on decode
  objtool: UNWIND_HINT_RET_OFFSET should not check registers
  objtool: is_fentry_call() crashes if call has no destination
  x86,smap: Fix smap_{save,restore}() alternatives
  ...
parents 60056060 0decf1f8
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
	pxor INC, STATE4
	movdqu IV, 0x30(OUTP)

	CALL_NOSPEC %r11
	CALL_NOSPEC r11

	movdqu 0x00(OUTP), INC
	pxor INC, STATE1
@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
	_aesni_gf128mul_x_ble()
	movups IV, (IVP)

	CALL_NOSPEC %r11
	CALL_NOSPEC r11

	movdqu 0x40(OUTP), INC
	pxor INC, STATE1
+1 −1
Original line number Diff line number Diff line
@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
	vpxor 14 * 16(%rax), %xmm15, %xmm14;
	vpxor 15 * 16(%rax), %xmm15, %xmm15;

	CALL_NOSPEC %r9;
	CALL_NOSPEC r9;

	addq $(16 * 16), %rsp;

+1 −1
Original line number Diff line number Diff line
@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
	vpxor 14 * 32(%rax), %ymm15, %ymm14;
	vpxor 15 * 32(%rax), %ymm15, %ymm15;

	CALL_NOSPEC %r9;
	CALL_NOSPEC r9;

	addq $(16 * 32), %rsp;

+13 −13
Original line number Diff line number Diff line
@@ -75,7 +75,7 @@

.text
SYM_FUNC_START(crc_pcl)
#define    bufp		%rdi
#define    bufp		rdi
#define    bufp_dw	%edi
#define    bufp_w	%di
#define    bufp_b	%dil
@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
	## 1) ALIGN:
	################################################################

	mov     bufp, bufptmp		# rdi = *buf
	neg     bufp
	and     $7, bufp		# calculate the unalignment amount of
	mov     %bufp, bufptmp		# rdi = *buf
	neg     %bufp
	and     $7, %bufp		# calculate the unalignment amount of
					# the address
	je      proc_block		# Skip if aligned

@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align:
	#### Calculate CRC of unaligned bytes of the buffer (if any)
	movq    (bufptmp), tmp		# load a quadward from the buffer
	add     bufp, bufptmp		# align buffer pointer for quadword
	add     %bufp, bufptmp		# align buffer pointer for quadword
					# processing
	sub     bufp, len		# update buffer length
	sub     %bufp, len		# update buffer length
align_loop:
	crc32b  %bl, crc_init_dw 	# compute crc32 of 1-byte
	shr     $8, tmp			# get next byte
	dec     bufp
	dec     %bufp
	jne     align_loop

proc_block:
@@ -169,10 +169,10 @@ continue_block:
	xor     crc2, crc2

	## branch into array
	lea	jump_table(%rip), bufp
	movzxw  (bufp, %rax, 2), len
	lea	crc_array(%rip), bufp
	lea     (bufp, len, 1), bufp
	lea	jump_table(%rip), %bufp
	movzxw  (%bufp, %rax, 2), len
	lea	crc_array(%rip), %bufp
	lea     (%bufp, len, 1), %bufp
	JMP_NOSPEC bufp

	################################################################
@@ -218,9 +218,9 @@ LABEL crc_ %i
	## 4) Combine three results:
	################################################################

	lea	(K_table-8)(%rip), bufp		# first entry is for idx 1
	lea	(K_table-8)(%rip), %bufp		# first entry is for idx 1
	shlq    $3, %rax			# rax *= 8
	pmovzxdq (bufp,%rax), %xmm0		# 2 consts: K1:K2
	pmovzxdq (%bufp,%rax), %xmm0		# 2 consts: K1:K2
	leal	(%eax,%eax,2), %eax		# rax *= 3 (total *24)
	subq    %rax, tmp			# tmp -= rax*24

+3 −3
Original line number Diff line number Diff line
@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)

	/* kernel thread */
1:	movl	%edi, %eax
	CALL_NOSPEC %ebx
	CALL_NOSPEC ebx
	/*
	 * A kernel thread is allowed to return here after successfully
	 * calling do_execve().  Exit to userspace to complete the execve()
@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)

	TRACE_IRQS_OFF
	movl	%esp, %eax			# pt_regs pointer
	CALL_NOSPEC %edi
	CALL_NOSPEC edi
	jmp	ret_from_exception
SYM_CODE_END(common_exception_read_cr2)

@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)

	TRACE_IRQS_OFF
	movl	%esp, %eax			# pt_regs pointer
	CALL_NOSPEC %edi
	CALL_NOSPEC edi
	jmp	ret_from_exception
SYM_CODE_END(common_exception)

Loading