]> git.baikalelectronics.ru Git - kernel.git/commitdiff
powerpc/64e: Remove MMU_FTR_USE_TLBRSRV and MMU_FTR_USE_PAIRED_MAS
authorChristophe Leroy <christophe.leroy@csgroup.eu>
Tue, 28 Jun 2022 14:48:55 +0000 (16:48 +0200)
committerMichael Ellerman <mpe@ellerman.id.au>
Wed, 29 Jun 2022 07:04:14 +0000 (17:04 +1000)
Commit a28b266b4168 ("powerpc: Remove platforms/wsp and associated
pieces") removed the last CPU having features MMU_FTRS_A2 and
commit abe841db5849 ("powerpc: Clean up MMU_FTRS_A2 and
MMU_FTR_TYPE_3E") removed MMU_FTRS_A2 which was the last user of
MMU_FTR_USE_TLBRSRV and MMU_FTR_USE_PAIRED_MAS.

Remove all code that relies on MMU_FTR_USE_TLBRSRV and
MMU_FTR_USE_PAIRED_MAS.

With this change done, TLB miss can happen before the mmu feature
fixups.

Signed-off-by: Christophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/cfd5a0ecdb1598da968832e1bddf7431ec267200.1656427701.git.christophe.leroy@csgroup.eu
arch/powerpc/include/asm/mmu.h
arch/powerpc/kernel/setup_64.c
arch/powerpc/mm/nohash/book3e_hugetlbpage.c
arch/powerpc/mm/nohash/tlb_low_64e.S

index 5f41565a1e5d71a770bd8b5c1970540d2960c9ca..860d0290ca4def85a6527d9214f07c5859ffb4cc 100644 (file)
  */
 #define MMU_FTR_NEED_DTLB_SW_LRU       ASM_CONST(0x00200000)
 
-/* Enable use of TLB reservation.  Processor should support tlbsrx.
- * instruction and MAS0[WQ].
- */
-#define MMU_FTR_USE_TLBRSRV            ASM_CONST(0x00800000)
-
-/* Use paired MAS registers (MAS7||MAS3, etc.)
- */
-#define MMU_FTR_USE_PAIRED_MAS         ASM_CONST(0x01000000)
-
 /* Doesn't support the B bit (1T segment) in SLBIE
  */
 #define MMU_FTR_NO_SLBIE_B             ASM_CONST(0x02000000)
@@ -180,9 +171,6 @@ enum {
 #ifdef CONFIG_PPC_83xx
                MMU_FTR_NEED_DTLB_SW_LRU |
 #endif
-#ifdef CONFIG_PPC_BOOK3E_64
-               MMU_FTR_USE_TLBRSRV | MMU_FTR_USE_PAIRED_MAS |
-#endif
 #ifdef CONFIG_PPC_BOOK3S_64
                MMU_FTR_KERNEL_RO |
 #ifdef CONFIG_PPC_64S_HASH_MMU
index 5761f08dae958f4d3020da80d07d953f8abd4557..2b2d0b0fbb30d8252b5db3e427be1cbc1177c657 100644 (file)
@@ -113,7 +113,6 @@ void __init setup_tlb_core_data(void)
                 * Should we panic instead?
                 */
                WARN_ONCE(smt_enabled_at_boot >= 2 &&
-                         !mmu_has_feature(MMU_FTR_USE_TLBRSRV) &&
                          book3e_htw_mode != PPC_HTW_E6500,
                          "%s: unsupported MMU configuration\n", __func__);
        }
index 307ca919d393052a38c27cc21950d4e5cfb58a9e..c7d4b317a823a8c6083d2285fa57c3aa7930bdac 100644 (file)
@@ -103,21 +103,11 @@ static inline int book3e_tlb_exists(unsigned long ea, unsigned long pid)
        int found = 0;
 
        mtspr(SPRN_MAS6, pid << 16);
-       if (mmu_has_feature(MMU_FTR_USE_TLBRSRV)) {
-               asm volatile(
-                       "li     %0,0\n"
-                       "tlbsx. 0,%1\n"
-                       "bne    1f\n"
-                       "li     %0,1\n"
-                       "1:\n"
-                       : "=&r"(found) : "r"(ea));
-       } else {
-               asm volatile(
-                       "tlbsx  0,%1\n"
-                       "mfspr  %0,0x271\n"
-                       "srwi   %0,%0,31\n"
-                       : "=&r"(found) : "r"(ea));
-       }
+       asm volatile(
+               "tlbsx  0,%1\n"
+               "mfspr  %0,0x271\n"
+               "srwi   %0,%0,31\n"
+               : "=&r"(found) : "r"(ea));
 
        return found;
 }
@@ -169,13 +159,9 @@ book3e_hugetlb_preload(struct vm_area_struct *vma, unsigned long ea, pte_t pte)
        mtspr(SPRN_MAS1, mas1);
        mtspr(SPRN_MAS2, mas2);
 
-       if (mmu_has_feature(MMU_FTR_USE_PAIRED_MAS)) {
-               mtspr(SPRN_MAS7_MAS3, mas7_3);
-       } else {
-               if (mmu_has_feature(MMU_FTR_BIG_PHYS))
-                       mtspr(SPRN_MAS7, upper_32_bits(mas7_3));
-               mtspr(SPRN_MAS3, lower_32_bits(mas7_3));
-       }
+       if (mmu_has_feature(MMU_FTR_BIG_PHYS))
+               mtspr(SPRN_MAS7, upper_32_bits(mas7_3));
+       mtspr(SPRN_MAS3, lower_32_bits(mas7_3));
 
        asm volatile ("tlbwe");
 
index 9e9ab3803fb2ffc78bcce5cb0b5ebc14e1d224dd..a59485c549a76d56fb192ef863466a19b32a9421 100644 (file)
@@ -152,16 +152,7 @@ tlb_miss_common_bolted:
        clrrdi  r15,r15,3
        beq     tlb_miss_fault_bolted   /* No PGDIR, bail */
 
-BEGIN_MMU_FTR_SECTION
-       /* Set the TLB reservation and search for existing entry. Then load
-        * the entry.
-        */
-       PPC_TLBSRX_DOT(0,R16)
-       ldx     r14,r14,r15             /* grab pgd entry */
-       beq     tlb_miss_done_bolted    /* tlb exists already, bail */
-MMU_FTR_SECTION_ELSE
        ldx     r14,r14,r15             /* grab pgd entry */
-ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBRSRV)
 
        rldicl  r15,r16,64-PUD_SHIFT+3,64-PUD_INDEX_SIZE-3
        clrrdi  r15,r15,3
@@ -674,16 +665,7 @@ normal_tlb_miss:
        clrrdi  r14,r14,3
        or      r10,r15,r14
 
-BEGIN_MMU_FTR_SECTION
-       /* Set the TLB reservation and search for existing entry. Then load
-        * the entry.
-        */
-       PPC_TLBSRX_DOT(0,R16)
        ld      r14,0(r10)
-       beq     normal_tlb_miss_done
-MMU_FTR_SECTION_ELSE
-       ld      r14,0(r10)
-ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBRSRV)
 
 finish_normal_tlb_miss:
        /* Check if required permissions are met */
@@ -727,13 +709,9 @@ finish_normal_tlb_miss:
        li      r11,MAS3_SW|MAS3_UW
        andc    r15,r15,r11
 1:
-BEGIN_MMU_FTR_SECTION
        srdi    r16,r15,32
        mtspr   SPRN_MAS3,r15
        mtspr   SPRN_MAS7,r16
-MMU_FTR_SECTION_ELSE
-       mtspr   SPRN_MAS7_MAS3,r15
-ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
 
        tlbwe
 
@@ -809,13 +787,6 @@ virt_page_table_tlb_miss:
 #else
 1:
 #endif
-BEGIN_MMU_FTR_SECTION
-       /* Search if we already have a TLB entry for that virtual address, and
-        * if we do, bail out.
-        */
-       PPC_TLBSRX_DOT(0,R16)
-       beq     virt_page_table_tlb_miss_done
-END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV)
 
        /* Now, we need to walk the page tables. First check if we are in
         * range.
@@ -866,41 +837,12 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV)
        clrldi  r11,r15,4               /* remove region ID from RPN */
        ori     r10,r11,1               /* Or-in SR */
 
-BEGIN_MMU_FTR_SECTION
        srdi    r16,r10,32
        mtspr   SPRN_MAS3,r10
        mtspr   SPRN_MAS7,r16
-MMU_FTR_SECTION_ELSE
-       mtspr   SPRN_MAS7_MAS3,r10
-ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
 
        tlbwe
 
-BEGIN_MMU_FTR_SECTION
-virt_page_table_tlb_miss_done:
-
-       /* We have overridden MAS2:EPN but currently our primary TLB miss
-        * handler will always restore it so that should not be an issue,
-        * if we ever optimize the primary handler to not write MAS2 on
-        * some cases, we'll have to restore MAS2:EPN here based on the
-        * original fault's DEAR. If we do that we have to modify the
-        * ITLB miss handler to also store SRR0 in the exception frame
-        * as DEAR.
-        *
-        * However, one nasty thing we did is we cleared the reservation
-        * (well, potentially we did). We do a trick here thus if we
-        * are not a level 0 exception (we interrupted the TLB miss) we
-        * offset the return address by -4 in order to replay the tlbsrx
-        * instruction there
-        */
-       subf    r10,r13,r12
-       cmpldi  cr0,r10,PACA_EXTLB+EX_TLB_SIZE
-       bne-    1f
-       ld      r11,PACA_EXTLB+EX_TLB_SIZE+EX_TLB_SRR0(r13)
-       addi    r10,r11,-4
-       std     r10,PACA_EXTLB+EX_TLB_SIZE+EX_TLB_SRR0(r13)
-1:
-END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV)
        /* Return to caller, normal case */
        TLB_MISS_EPILOG_SUCCESS
        rfi
@@ -1115,13 +1057,9 @@ htw_tlb_miss:
         */
        ori     r10,r15,(BOOK3E_PAGESZ_4K << MAS3_SPSIZE_SHIFT)
 
-BEGIN_MMU_FTR_SECTION
        srdi    r16,r10,32
        mtspr   SPRN_MAS3,r10
        mtspr   SPRN_MAS7,r16
-MMU_FTR_SECTION_ELSE
-       mtspr   SPRN_MAS7_MAS3,r10
-ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
 
        tlbwe
 
@@ -1202,13 +1140,9 @@ tlb_load_linear:
        clrldi  r10,r10,4               /* clear region bits */
        ori     r10,r10,MAS3_SR|MAS3_SW|MAS3_SX
 
-BEGIN_MMU_FTR_SECTION
        srdi    r16,r10,32
        mtspr   SPRN_MAS3,r10
        mtspr   SPRN_MAS7,r16
-MMU_FTR_SECTION_ELSE
-       mtspr   SPRN_MAS7_MAS3,r10
-ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
 
        tlbwe