Complete workaround for DTLB errata in e300c2/c3/c4 processors.
Due to the bug, the hardware-implemented LRU algorythm always goes to way
1 of the TLB. This fix implements the proposed software workaround in
form of a LRW table for chosing the TLB-way.
Based on patch from David Jander <david@protonic.nl>
Signed-off-by: Kumar Gala <galak@kernel.crashing.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
  */
 #define MMU_FTR_LOCK_BCAST_INVAL       ASM_CONST(0x00100000)
 
+/* This indicates that the processor doesn't handle way selection
+ * properly and needs SW to track and update the LRU state.  This
+ * is specific to an errata on e300c2/c3/c4 class parts
+ */
+#define MMU_FTR_NEED_DTLB_SW_LRU       ASM_CONST(0x00200000)
+
 #ifndef __ASSEMBLY__
 #include <asm/cputable.h>
 
 
 #include <asm/ppc_asm.h>
 #include <asm/asm-offsets.h>
 #include <asm/cache.h>
+#include <asm/mmu.h>
 
 _GLOBAL(__setup_cpu_603)
        mflr    r4
+BEGIN_MMU_FTR_SECTION
+       li      r10,0
+       mtspr   SPRN_SPRG4,r10          /* init SW LRU tracking */
+END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
 BEGIN_FTR_SECTION
        bl      __init_fpu_registers
 END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
 
                .cpu_name               = "e300c2",
                .cpu_features           = CPU_FTRS_E300C2,
                .cpu_user_features      = PPC_FEATURE_32 | PPC_FEATURE_HAS_MMU,
-               .mmu_features           = MMU_FTR_USE_HIGH_BATS,
+               .mmu_features           = MMU_FTR_USE_HIGH_BATS |
+                       MMU_FTR_NEED_DTLB_SW_LRU,
                .icache_bsize           = 32,
                .dcache_bsize           = 32,
                .cpu_setup              = __setup_cpu_603,
                .cpu_name               = "e300c3",
                .cpu_features           = CPU_FTRS_E300,
                .cpu_user_features      = COMMON_USER,
-               .mmu_features           = MMU_FTR_USE_HIGH_BATS,
+               .mmu_features           = MMU_FTR_USE_HIGH_BATS |
+                       MMU_FTR_NEED_DTLB_SW_LRU,
                .icache_bsize           = 32,
                .dcache_bsize           = 32,
                .cpu_setup              = __setup_cpu_603,
                .cpu_name               = "e300c4",
                .cpu_features           = CPU_FTRS_E300,
                .cpu_user_features      = COMMON_USER,
-               .mmu_features           = MMU_FTR_USE_HIGH_BATS,
+               .mmu_features           = MMU_FTR_USE_HIGH_BATS |
+                       MMU_FTR_NEED_DTLB_SW_LRU,
                .icache_bsize           = 32,
                .dcache_bsize           = 32,
                .cpu_setup              = __setup_cpu_603,
 
        rlwinm  r1,r1,0,~_PAGE_COHERENT /* clear M (coherence not required) */
 END_FTR_SECTION_IFCLR(CPU_FTR_NEED_COHERENT)
        mtspr   SPRN_RPA,r1
+       mfspr   r2,SPRN_SRR1            /* Need to restore CR0 */
+       mtcrf   0x80,r2
+BEGIN_MMU_FTR_SECTION
+       li      r0,1
+       mfspr   r1,SPRN_SPRG4
+       rlwinm  r2,r3,20,27,31          /* Get Address bits 15:19 */
+       slw     r0,r0,r2
+       xor     r1,r0,r1
+       srw     r0,r1,r2
+       mtspr   SPRN_SPRG4,r1
+       mfspr   r2,SPRN_SRR1
+       rlwimi  r2,r0,31-14,14,14
+       mtspr   SPRN_SRR1,r2
+END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
        tlbld   r3
-       mfspr   r3,SPRN_SRR1            /* Need to restore CR0 */
-       mtcrf   0x80,r3
        rfi
 DataAddressInvalid:
        mfspr   r3,SPRN_SRR1
        rlwinm  r1,r1,0,~_PAGE_COHERENT /* clear M (coherence not required) */
 END_FTR_SECTION_IFCLR(CPU_FTR_NEED_COHERENT)
        mtspr   SPRN_RPA,r1
+       mfspr   r2,SPRN_SRR1            /* Need to restore CR0 */
+       mtcrf   0x80,r2
+BEGIN_MMU_FTR_SECTION
+       li      r0,1
+       mfspr   r1,SPRN_SPRG4
+       rlwinm  r2,r3,20,27,31          /* Get Address bits 15:19 */
+       slw     r0,r0,r2
+       xor     r1,r0,r1
+       srw     r0,r1,r2
+       mtspr   SPRN_SPRG4,r1
+       mfspr   r2,SPRN_SRR1
+       rlwimi  r2,r0,31-14,14,14
+       mtspr   SPRN_SRR1,r2
+END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
        tlbld   r3
-       mfspr   r3,SPRN_SRR1            /* Need to restore CR0 */
-       mtcrf   0x80,r3
        rfi
 
 #ifndef CONFIG_ALTIVEC