]> git.baikalelectronics.ru Git - kernel.git/commitdiff
powerpc/32: On syscall entry, enable instruction translation at the same time as...
authorChristophe Leroy <christophe.leroy@csgroup.eu>
Mon, 8 Feb 2021 15:10:22 +0000 (15:10 +0000)
committerMichael Ellerman <mpe@ellerman.id.au>
Thu, 11 Feb 2021 12:35:08 +0000 (23:35 +1100)
On 40x and 8xx, kernel text is pinned.
On book3s/32, kernel text is mapped by BATs.

Enable instruction translation at the same time as data translation, it
makes things simpler.

MSR_RI can also be set at the same time because srr0/srr1 are already
saved and r1 is set properly.

On booke, translation is always on, so at the end all PPC32
have translation on early.

This reduces null_syscall benchmark by 13 cycles on 8xx
(296 ==> 283 cycles).

Signed-off-by: Christophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/3fe8891c814103a3549efc1d4e7ffc828bba5993.1612796617.git.christophe.leroy@csgroup.eu
arch/powerpc/kernel/head_32.h
arch/powerpc/kernel/head_booke.h

index d481e351f006832063e9444f7b8d530d6cb672a5..97d8465eb12a3b9abfe7fc9a7331649466f938d7 100644 (file)
        lwz     r1,TASK_STACK-THREAD(r12)
        beq-    99f
        addi    r1, r1, THREAD_SIZE - INT_FRAME_SIZE
-       LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR | MSR_RI)) /* can take DTLB miss */
-       mtmsr   r10
-       isync
+       LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)             /* can take exceptions */
+       mtspr   SPRN_SRR1, r10
+       lis     r10, 1f@h
+       ori     r10, r10, 1f@l
+       mtspr   SPRN_SRR0, r10
+       rfi
+1:
        tovirt(r12, r12)
        stw     r11,GPR1(r1)
        stw     r11,0(r1)
        stw     r10,_CCR(r11)           /* save registers */
 #ifdef CONFIG_40x
        rlwinm  r9,r9,0,14,12           /* clear MSR_WE (necessary?) */
-#else
-       LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
-       mtmsr   r10                     /* (except for mach check in rtas) */
 #endif
        lis     r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
        stw     r2,GPR2(r11)
 #endif
 
 3:
-       lis     r11, transfer_to_syscall@h
-       ori     r11, r11, transfer_to_syscall@l
 #ifdef CONFIG_TRACE_IRQFLAGS
        /*
         * If MSR is changing we need to keep interrupts disabled at this point
 #else
        LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
 #endif
-#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
-       mtspr   SPRN_NRI, r0
-#endif
-       mtspr   SPRN_SRR1,r10
-       mtspr   SPRN_SRR0,r11
-       rfi                             /* jump to handler, enable MMU */
-#ifdef CONFIG_40x
-       b .     /* Prevent prefetch past rfi */
-#endif
+       mtmsr   r10
+       b       transfer_to_syscall             /* jump to handler */
 99:    b       ret_from_kernel_syscall
 .endm
 
index 706cd93689925fa23923edd9b49a821fd22959c1..b3c502c503a0a9ecafbbeafadfb146f437167279 100644 (file)
@@ -157,8 +157,6 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_EMB_HV)
        stw     r12,4(r11)
 
 3:
-       lis     r11, transfer_to_syscall@h
-       ori     r11, r11, transfer_to_syscall@l
 #ifdef CONFIG_TRACE_IRQFLAGS
        /*
         * If MSR is changing we need to keep interrupts disabled at this point
@@ -172,9 +170,8 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_EMB_HV)
        lis     r10, (MSR_KERNEL | MSR_EE)@h
        ori     r10, r10, (MSR_KERNEL | MSR_EE)@l
 #endif
-       mtspr   SPRN_SRR1,r10
-       mtspr   SPRN_SRR0,r11
-       rfi                             /* jump to handler, enable MMU */
+       mtmsr   r10
+       b       transfer_to_syscall     /* jump to handler */
 99:    b       ret_from_kernel_syscall
 .endm