Commit bccc5898 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman
Browse files

powerpc/8xx: Always pin kernel text TLB



There is no big poing in not pinning kernel text anymore, as now
we can keep pinned TLB even with things like DEBUG_PAGEALLOC.

Remove CONFIG_PIN_TLB_TEXT, making it always right.

Signed-off-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
[mpe: Drop ifdef around mmu_pin_tlb() to fix build errors]
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/203b89de491e1379f1677a2685211b7c32adfff0.1606231483.git.christophe.leroy@csgroup.eu
parent 613df979
Loading
Loading
Loading
Loading
+1 −2
Original line number Diff line number Diff line
@@ -808,8 +808,7 @@ config DATA_SHIFT_BOOL
	bool "Set custom data alignment"
	depends on ADVANCED_OPTIONS
	depends on STRICT_KERNEL_RWX || DEBUG_PAGEALLOC
	depends on PPC_BOOK3S_32 || (PPC_8xx && !PIN_TLB_DATA && \
				     (!PIN_TLB_TEXT || !STRICT_KERNEL_RWX))
	depends on PPC_BOOK3S_32 || (PPC_8xx && !PIN_TLB_DATA && !STRICT_KERNEL_RWX)
	help
	  This option allows you to set the kernel data alignment. When
	  RAM is mapped by blocks, the alignment needs to fit the size and
+3 −19
Original line number Diff line number Diff line
@@ -42,15 +42,6 @@
#endif
.endm

/*
 * We need an ITLB miss handler for kernel addresses if:
 * - Either we have modules
 * - Or we have not pinned the first 8M
 */
#if defined(CONFIG_MODULES) || !defined(CONFIG_PIN_TLB_TEXT)
#define ITLB_MISS_KERNEL	1
#endif

/*
 * Value for the bits that have fixed value in RPN entries.
 * Also used for tagging DAR for DTLBerror.
@@ -209,12 +200,12 @@ InstructionTLBMiss:
	mfspr	r10, SPRN_SRR0	/* Get effective address of fault */
	INVALIDATE_ADJACENT_PAGES_CPU15(r10)
	mtspr	SPRN_MD_EPN, r10
#ifdef ITLB_MISS_KERNEL
#ifdef CONFIG_MODULES
	mfcr	r11
	compare_to_kernel_boundary r10, r10
#endif
	mfspr	r10, SPRN_M_TWB	/* Get level 1 table */
#ifdef ITLB_MISS_KERNEL
#ifdef CONFIG_MODULES
	blt+	3f
	rlwinm	r10, r10, 0, 20, 31
	oris	r10, r10, (swapper_pg_dir - PAGE_OFFSET)@ha
@@ -618,10 +609,6 @@ start_here:
	lis	r0, (MD_TWAM | MD_RSV4I)@h
	mtspr	SPRN_MD_CTR, r0
#endif
#ifndef CONFIG_PIN_TLB_TEXT
	li	r0, 0
	mtspr	SPRN_MI_CTR, r0
#endif
#if !defined(CONFIG_PIN_TLB_DATA) && !defined(CONFIG_PIN_TLB_IMMR)
	lis	r0, MD_TWAM@h
	mtspr	SPRN_MD_CTR, r0
@@ -717,7 +704,6 @@ initial_mmu:
	mtspr	SPRN_DER, r8
	blr

#ifdef CONFIG_PIN_TLB
_GLOBAL(mmu_pin_tlb)
	lis	r9, (1f - PAGE_OFFSET)@h
	ori	r9, r9, (1f - PAGE_OFFSET)@l
@@ -739,7 +725,6 @@ _GLOBAL(mmu_pin_tlb)
	mtspr	SPRN_MD_CTR, r6
	tlbia

#ifdef CONFIG_PIN_TLB_TEXT
	LOAD_REG_IMMEDIATE(r5, 28 << 8)
	LOAD_REG_IMMEDIATE(r6, PAGE_OFFSET)
	LOAD_REG_IMMEDIATE(r7, MI_SVALID | MI_PS8MEG | _PMD_ACCESSED)
@@ -760,7 +745,7 @@ _GLOBAL(mmu_pin_tlb)
	bdnzt	lt, 2b
	lis	r0, MI_RSV4I@h
	mtspr	SPRN_MI_CTR, r0
#endif

	LOAD_REG_IMMEDIATE(r5, 28 << 8 | MD_TWAM)
#ifdef CONFIG_PIN_TLB_DATA
	LOAD_REG_IMMEDIATE(r6, PAGE_OFFSET)
@@ -818,7 +803,6 @@ _GLOBAL(mmu_pin_tlb)
	mtspr	SPRN_SRR1, r10
	mtspr	SPRN_SRR0, r11
	rfi
#endif /* CONFIG_PIN_TLB */

/*
 * We put a few things here that have to be page-aligned.
+1 −2
Original line number Diff line number Diff line
@@ -186,7 +186,6 @@ void mmu_mark_initmem_nx(void)
	mmu_mapin_ram_chunk(0, boundary, PAGE_KERNEL_TEXT, false);
	mmu_mapin_ram_chunk(boundary, einittext8, PAGE_KERNEL, false);

	if (IS_ENABLED(CONFIG_PIN_TLB_TEXT))
	mmu_pin_tlb(block_mapped_ram, false);
}

+0 −7
Original line number Diff line number Diff line
@@ -194,13 +194,6 @@ config PIN_TLB_IMMR
	  CONFIG_PIN_TLB_DATA is also selected, it will reduce
	  CONFIG_PIN_TLB_DATA to 24 Mbytes.

config PIN_TLB_TEXT
	bool "Pinned TLB for TEXT"
	depends on PIN_TLB
	default y
	help
	  This pins kernel text with 8M pages.

endmenu

endmenu