Commit 865ae6f2 authored by Nicholas Piggin's avatar Nicholas Piggin Committed by Michael Ellerman
Browse files

powerpc/64s: Tidy machine check SLB logging



Since ISA v3.0, SLB no longer uses the slb_cache, and stab_rr is no
longer correlated with SLB allocation. Move those to pre-3.0.

While here, improve some alignments and reduce whitespace.

Signed-off-by: default avatarNicholas Piggin <npiggin@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20201128070728.825934-9-npiggin@gmail.com
parent 4a869531
Loading
Loading
Loading
Loading
+21 −18
Original line number Diff line number Diff line
@@ -255,7 +255,6 @@ void slb_dump_contents(struct slb_entry *slb_ptr)
		return;

	pr_err("SLB contents of cpu 0x%x\n", smp_processor_id());
	pr_err("Last SLB entry inserted at slot %d\n", get_paca()->stab_rr);

	for (i = 0; i < mmu_slb_size; i++) {
		e = slb_ptr->esid;
@@ -265,12 +264,12 @@ void slb_dump_contents(struct slb_entry *slb_ptr)
		if (!e && !v)
			continue;

		pr_err("%02d %016lx %016lx\n", i, e, v);
		pr_err("%02d %016lx %016lx %s\n", i, e, v,
				(e & SLB_ESID_V) ? "VALID" : "NOT VALID");

		if (!(e & SLB_ESID_V)) {
			pr_err("\n");
		if (!(e & SLB_ESID_V))
			continue;
		}

		llp = v & SLB_VSID_LLP;
		if (v & SLB_VSID_B_1T) {
			pr_err("     1T ESID=%9lx VSID=%13lx LLP:%3lx\n",
@@ -282,7 +281,10 @@ void slb_dump_contents(struct slb_entry *slb_ptr)
			       (v & ~SLB_VSID_B) >> SLB_VSID_SHIFT, llp);
		}
	}
	pr_err("----------------------------------\n");

	if (!early_cpu_has_feature(CPU_FTR_ARCH_300)) {
		/* RR is not so useful as it's often not used for allocation */
		pr_err("SLB RR allocator index %d\n", get_paca()->stab_rr);

		/* Dump slb cache entires as well. */
		pr_err("SLB cache ptr value = %d\n", get_paca()->slb_save_cache_ptr);
@@ -294,6 +296,7 @@ void slb_dump_contents(struct slb_entry *slb_ptr)
		for (i = n; i < SLB_CACHE_ENTRIES; i++)
			pr_err("%02d EA[0-35]=%9x\n", i, get_paca()->slb_cache[i]);
	}
}

void slb_vmalloc_update(void)
{