diff options
Diffstat (limited to 'arch/xtensa/include/asm/initialize_mmu.h')
| -rw-r--r-- | arch/xtensa/include/asm/initialize_mmu.h | 137 |
1 files changed, 110 insertions, 27 deletions
diff --git a/arch/xtensa/include/asm/initialize_mmu.h b/arch/xtensa/include/asm/initialize_mmu.h index 722553f17db3..101bcb87e15b 100644 --- a/arch/xtensa/include/asm/initialize_mmu.h +++ b/arch/xtensa/include/asm/initialize_mmu.h @@ -6,7 +6,7 @@ * For the new V3 MMU we remap the TLB from virtual == physical * to the standard Linux mapping used in earlier MMU's. * - * The the MMU we also support a new configuration register that + * For the MMU we also support a new configuration register that * specifies how the S32C1I instruction operates with the cache * controller. * @@ -23,10 +23,18 @@ #ifndef _XTENSA_INITIALIZE_MMU_H #define _XTENSA_INITIALIZE_MMU_H -#include <asm/pgtable.h> +#include <linux/init.h> +#include <linux/pgtable.h> #include <asm/vectors.h> -#ifdef __ASSEMBLY__ +#if XCHAL_HAVE_PTP_MMU +#define CA_BYPASS (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC) +#define CA_WRITEBACK (_PAGE_CA_WB | _PAGE_HW_WRITE | _PAGE_HW_EXEC) +#else +#define CA_WRITEBACK (0x4) +#endif + +#ifdef __ASSEMBLER__ #define XTENSA_HWVERSION_RC_2009_0 230000 @@ -35,7 +43,7 @@ #if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0) /* * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it. - * For details see Documentation/xtensa/atomctl.txt + * For details see Documentation/arch/xtensa/atomctl.rst */ #if XCHAL_DCACHE_IS_COHERENT movi a3, 0x25 /* For SMP/MX -- internal for writeback, @@ -65,14 +73,17 @@ _j 2f .align 4 -1: movi a2, 0x10000000 - movi a3, 0x18000000 - add a2, a2, a0 -9: bgeu a2, a3, 9b /* PC is out of the expected range */ +1: + +#if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul +#define TEMP_MAPPING_VADDR 0x40000000 +#else +#define TEMP_MAPPING_VADDR 0x00000000 +#endif /* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */ - movi a2, 0x40000006 + movi a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY idtlb a2 iitlb a2 isync @@ -80,20 +91,18 @@ /* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code * and jump to the new mapping. */ -#define CA_BYPASS (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC) -#define CA_WRITEBACK (_PAGE_CA_WB | _PAGE_HW_WRITE | _PAGE_HW_EXEC) srli a3, a0, 27 slli a3, a3, 27 addi a3, a3, CA_BYPASS - addi a7, a2, -1 + addi a7, a2, 5 - XCHAL_SPANNING_WAY wdtlb a3, a7 witlb a3, a7 isync slli a4, a0, 5 srli a4, a4, 5 - addi a5, a2, -6 + addi a5, a2, -XCHAL_SPANNING_WAY add a4, a4, a5 jx a4 @@ -107,42 +116,52 @@ add a5, a5, a4 bne a5, a2, 3b - /* Step 4: Setup MMU with the old V2 mappings. */ + /* Step 4: Setup MMU with the requested static mappings. */ + movi a6, 0x01000000 wsr a6, ITLBCFG wsr a6, DTLBCFG isync - movi a5, 0xd0000005 - movi a4, CA_WRITEBACK + movi a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY + movi a4, XCHAL_KSEG_PADDR + CA_WRITEBACK wdtlb a4, a5 witlb a4, a5 - movi a5, 0xd8000005 - movi a4, CA_BYPASS + movi a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY + movi a4, XCHAL_KSEG_PADDR + CA_BYPASS wdtlb a4, a5 witlb a4, a5 - movi a5, 0xe0000006 - movi a4, 0xf0000000 + CA_WRITEBACK +#ifdef CONFIG_XTENSA_KSEG_512M + movi a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY + movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK wdtlb a4, a5 witlb a4, a5 - movi a5, 0xf0000006 - movi a4, 0xf0000000 + CA_BYPASS + movi a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY + movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS + wdtlb a4, a5 + witlb a4, a5 +#endif + + movi a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY + movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK + wdtlb a4, a5 + witlb a4, a5 + + movi a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY + movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS wdtlb a4, a5 witlb a4, a5 isync - /* Jump to self, using MMU v2 mappings. */ + /* Jump to self, using final mappings. */ movi a4, 1f jx a4 1: - movi a2, VECBASE_RESET_VADDR - wsr a2, vecbase - /* Step 5: remove temporary mapping. */ idtlb a7 iitlb a7 @@ -157,6 +176,70 @@ .endm -#endif /*__ASSEMBLY__*/ + .macro initialize_cacheattr + +#if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU) +#if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU +#error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU. +#endif + +#if XCHAL_HAVE_MPU + __REFCONST + .align 4 +.Lattribute_table: + .long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00 + .long 0x006600, 0x000000, 0x000000, 0x000000 + .long 0x000000, 0x000000, 0x000000, 0x000000 + .long 0x000000, 0x000000, 0x000000, 0x000000 + .previous + + movi a3, .Lattribute_table + movi a4, CONFIG_MEMMAP_CACHEATTR + movi a5, 1 + movi a6, XCHAL_MPU_ENTRIES + movi a10, 0x20000000 + movi a11, -1 +1: + sub a5, a5, a10 + extui a8, a4, 28, 4 + beq a8, a11, 2f + addi a6, a6, -1 + mov a11, a8 +2: + addx4 a9, a8, a3 + l32i a9, a9, 0 + or a9, a9, a6 + wptlb a9, a5 + slli a4, a4, 4 + bgeu a5, a10, 1b + +#else + movi a5, XCHAL_SPANNING_WAY + movi a6, ~_PAGE_ATTRIB_MASK + movi a4, CONFIG_MEMMAP_CACHEATTR + movi a8, 0x20000000 +1: + rdtlb1 a3, a5 + xor a3, a3, a4 + and a3, a3, a6 + xor a3, a3, a4 + wdtlb a3, a5 + ritlb1 a3, a5 + xor a3, a3, a4 + and a3, a3, a6 + xor a3, a3, a4 + witlb a3, a5 + + add a5, a5, a8 + srli a4, a4, 4 + bgeu a5, a8, 1b + + isync +#endif +#endif + + .endm + +#endif /*__ASSEMBLER__*/ #endif /* _XTENSA_INITIALIZE_MMU_H */ |
