diff options
Diffstat (limited to 'arch/loongarch/mm')
| -rw-r--r-- | arch/loongarch/mm/init.c | 4 | ||||
| -rw-r--r-- | arch/loongarch/mm/page.S | 118 | ||||
| -rw-r--r-- | arch/loongarch/mm/tlb.c | 12 | ||||
| -rw-r--r-- | arch/loongarch/mm/tlbex.S | 322 |
4 files changed, 284 insertions, 172 deletions
diff --git a/arch/loongarch/mm/init.c b/arch/loongarch/mm/init.c index 6bfd4b8dad1b..0946662afdd6 100644 --- a/arch/loongarch/mm/init.c +++ b/arch/loongarch/mm/init.c @@ -224,7 +224,7 @@ EXPORT_SYMBOL(invalid_pmd_table); pte_t invalid_pte_table[PTRS_PER_PTE] __page_aligned_bss; EXPORT_SYMBOL(invalid_pte_table); -#ifdef CONFIG_EXECMEM +#if defined(CONFIG_EXECMEM) && defined(MODULES_VADDR) static struct execmem_info execmem_info __ro_after_init; struct execmem_info __init *execmem_arch_setup(void) @@ -242,4 +242,4 @@ struct execmem_info __init *execmem_arch_setup(void) return &execmem_info; } -#endif /* CONFIG_EXECMEM */ +#endif /* CONFIG_EXECMEM && MODULES_VADDR */ diff --git a/arch/loongarch/mm/page.S b/arch/loongarch/mm/page.S index 7ad76551d313..7286b804756d 100644 --- a/arch/loongarch/mm/page.S +++ b/arch/loongarch/mm/page.S @@ -10,75 +10,75 @@ .align 5 SYM_FUNC_START(clear_page) - lu12i.w t0, 1 << (PAGE_SHIFT - 12) - add.d t0, t0, a0 + lu12i.w t0, 1 << (PAGE_SHIFT - 12) + PTR_ADD t0, t0, a0 1: - st.d zero, a0, 0 - st.d zero, a0, 8 - st.d zero, a0, 16 - st.d zero, a0, 24 - st.d zero, a0, 32 - st.d zero, a0, 40 - st.d zero, a0, 48 - st.d zero, a0, 56 - addi.d a0, a0, 128 - st.d zero, a0, -64 - st.d zero, a0, -56 - st.d zero, a0, -48 - st.d zero, a0, -40 - st.d zero, a0, -32 - st.d zero, a0, -24 - st.d zero, a0, -16 - st.d zero, a0, -8 - bne t0, a0, 1b + LONG_S zero, a0, (LONGSIZE * 0) + LONG_S zero, a0, (LONGSIZE * 1) + LONG_S zero, a0, (LONGSIZE * 2) + LONG_S zero, a0, (LONGSIZE * 3) + LONG_S zero, a0, (LONGSIZE * 4) + LONG_S zero, a0, (LONGSIZE * 5) + LONG_S zero, a0, (LONGSIZE * 6) + LONG_S zero, a0, (LONGSIZE * 7) + PTR_ADDI a0, a0, (LONGSIZE * 16) + LONG_S zero, a0, -(LONGSIZE * 8) + LONG_S zero, a0, -(LONGSIZE * 7) + LONG_S zero, a0, -(LONGSIZE * 6) + LONG_S zero, a0, -(LONGSIZE * 5) + LONG_S zero, a0, -(LONGSIZE * 4) + LONG_S zero, a0, -(LONGSIZE * 3) + LONG_S zero, a0, -(LONGSIZE * 2) + LONG_S zero, a0, -(LONGSIZE * 1) + bne t0, a0, 1b - jr ra + jr ra SYM_FUNC_END(clear_page) EXPORT_SYMBOL(clear_page) .align 5 SYM_FUNC_START(copy_page) - lu12i.w t8, 1 << (PAGE_SHIFT - 12) - add.d t8, t8, a0 + lu12i.w t8, 1 << (PAGE_SHIFT - 12) + PTR_ADD t8, t8, a0 1: - ld.d t0, a1, 0 - ld.d t1, a1, 8 - ld.d t2, a1, 16 - ld.d t3, a1, 24 - ld.d t4, a1, 32 - ld.d t5, a1, 40 - ld.d t6, a1, 48 - ld.d t7, a1, 56 + LONG_L t0, a1, (LONGSIZE * 0) + LONG_L t1, a1, (LONGSIZE * 1) + LONG_L t2, a1, (LONGSIZE * 2) + LONG_L t3, a1, (LONGSIZE * 3) + LONG_L t4, a1, (LONGSIZE * 4) + LONG_L t5, a1, (LONGSIZE * 5) + LONG_L t6, a1, (LONGSIZE * 6) + LONG_L t7, a1, (LONGSIZE * 7) - st.d t0, a0, 0 - st.d t1, a0, 8 - ld.d t0, a1, 64 - ld.d t1, a1, 72 - st.d t2, a0, 16 - st.d t3, a0, 24 - ld.d t2, a1, 80 - ld.d t3, a1, 88 - st.d t4, a0, 32 - st.d t5, a0, 40 - ld.d t4, a1, 96 - ld.d t5, a1, 104 - st.d t6, a0, 48 - st.d t7, a0, 56 - ld.d t6, a1, 112 - ld.d t7, a1, 120 - addi.d a0, a0, 128 - addi.d a1, a1, 128 + LONG_S t0, a0, (LONGSIZE * 0) + LONG_S t1, a0, (LONGSIZE * 1) + LONG_L t0, a1, (LONGSIZE * 8) + LONG_L t1, a1, (LONGSIZE * 9) + LONG_S t2, a0, (LONGSIZE * 2) + LONG_S t3, a0, (LONGSIZE * 3) + LONG_L t2, a1, (LONGSIZE * 10) + LONG_L t3, a1, (LONGSIZE * 11) + LONG_S t4, a0, (LONGSIZE * 4) + LONG_S t5, a0, (LONGSIZE * 5) + LONG_L t4, a1, (LONGSIZE * 12) + LONG_L t5, a1, (LONGSIZE * 13) + LONG_S t6, a0, (LONGSIZE * 6) + LONG_S t7, a0, (LONGSIZE * 7) + LONG_L t6, a1, (LONGSIZE * 14) + LONG_L t7, a1, (LONGSIZE * 15) + PTR_ADDI a0, a0, (LONGSIZE * 16) + PTR_ADDI a1, a1, (LONGSIZE * 16) - st.d t0, a0, -64 - st.d t1, a0, -56 - st.d t2, a0, -48 - st.d t3, a0, -40 - st.d t4, a0, -32 - st.d t5, a0, -24 - st.d t6, a0, -16 - st.d t7, a0, -8 + LONG_S t0, a0, -(LONGSIZE * 8) + LONG_S t1, a0, -(LONGSIZE * 7) + LONG_S t2, a0, -(LONGSIZE * 6) + LONG_S t3, a0, -(LONGSIZE * 5) + LONG_S t4, a0, -(LONGSIZE * 4) + LONG_S t5, a0, -(LONGSIZE * 3) + LONG_S t6, a0, -(LONGSIZE * 2) + LONG_S t7, a0, -(LONGSIZE * 1) - bne t8, a0, 1b - jr ra + bne t8, a0, 1b + jr ra SYM_FUNC_END(copy_page) EXPORT_SYMBOL(copy_page) diff --git a/arch/loongarch/mm/tlb.c b/arch/loongarch/mm/tlb.c index 3b427b319db2..6a3c91b9cacd 100644 --- a/arch/loongarch/mm/tlb.c +++ b/arch/loongarch/mm/tlb.c @@ -229,11 +229,11 @@ static void setup_ptwalker(void) if (cpu_has_ptw) pwctl1 |= CSR_PWCTL1_PTW; - csr_write64(pwctl0, LOONGARCH_CSR_PWCTL0); - csr_write64(pwctl1, LOONGARCH_CSR_PWCTL1); - csr_write64((long)swapper_pg_dir, LOONGARCH_CSR_PGDH); - csr_write64((long)invalid_pg_dir, LOONGARCH_CSR_PGDL); - csr_write64((long)smp_processor_id(), LOONGARCH_CSR_TMID); + csr_write(pwctl0, LOONGARCH_CSR_PWCTL0); + csr_write(pwctl1, LOONGARCH_CSR_PWCTL1); + csr_write((long)swapper_pg_dir, LOONGARCH_CSR_PGDH); + csr_write((long)invalid_pg_dir, LOONGARCH_CSR_PGDL); + csr_write((long)smp_processor_id(), LOONGARCH_CSR_TMID); } static void output_pgtable_bits_defines(void) @@ -251,8 +251,10 @@ static void output_pgtable_bits_defines(void) pr_define("_PAGE_GLOBAL_SHIFT %d\n", _PAGE_GLOBAL_SHIFT); pr_define("_PAGE_PRESENT_SHIFT %d\n", _PAGE_PRESENT_SHIFT); pr_define("_PAGE_WRITE_SHIFT %d\n", _PAGE_WRITE_SHIFT); +#ifdef CONFIG_64BIT pr_define("_PAGE_NO_READ_SHIFT %d\n", _PAGE_NO_READ_SHIFT); pr_define("_PAGE_NO_EXEC_SHIFT %d\n", _PAGE_NO_EXEC_SHIFT); +#endif pr_define("PFN_PTE_SHIFT %d\n", PFN_PTE_SHIFT); pr_debug("\n"); } diff --git a/arch/loongarch/mm/tlbex.S b/arch/loongarch/mm/tlbex.S index c08682a89c58..84a881a339a7 100644 --- a/arch/loongarch/mm/tlbex.S +++ b/arch/loongarch/mm/tlbex.S @@ -11,10 +11,18 @@ #define INVTLB_ADDR_GFALSE_AND_ASID 5 -#define PTRS_PER_PGD_BITS (PAGE_SHIFT - 3) -#define PTRS_PER_PUD_BITS (PAGE_SHIFT - 3) -#define PTRS_PER_PMD_BITS (PAGE_SHIFT - 3) -#define PTRS_PER_PTE_BITS (PAGE_SHIFT - 3) +#define PTRS_PER_PGD_BITS (PAGE_SHIFT - PTRLOG) +#define PTRS_PER_PUD_BITS (PAGE_SHIFT - PTRLOG) +#define PTRS_PER_PMD_BITS (PAGE_SHIFT - PTRLOG) +#define PTRS_PER_PTE_BITS (PAGE_SHIFT - PTRLOG) + +#ifdef CONFIG_32BIT +#define PTE_LL ll.w +#define PTE_SC sc.w +#else +#define PTE_LL ll.d +#define PTE_SC sc.d +#endif .macro tlb_do_page_fault, write SYM_CODE_START(tlb_do_page_fault_\write) @@ -60,52 +68,61 @@ SYM_CODE_START(handle_tlb_load) vmalloc_done_load: /* Get PGD offset in bytes */ - bstrpick.d ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT - alsl.d t1, ra, t1, 3 +#ifdef CONFIG_32BIT + PTR_BSTRPICK ra, t0, 31, PGDIR_SHIFT +#else + PTR_BSTRPICK ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT +#endif + PTR_ALSL t1, ra, t1, _PGD_T_LOG2 + #if CONFIG_PGTABLE_LEVELS > 3 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 + #endif #if CONFIG_PGTABLE_LEVELS > 2 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 + #endif - ld.d ra, t1, 0 + PTR_L ra, t1, 0 /* * For huge tlb entries, pmde doesn't contain an address but * instead contains the tlb pte. Check the PAGE_HUGE bit and * see if we need to jump to huge tlb processing. */ - rotri.d ra, ra, _PAGE_HUGE_SHIFT + 1 + PTR_ROTRI ra, ra, _PAGE_HUGE_SHIFT + 1 bltz ra, tlb_huge_update_load - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) - bstrpick.d t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT - alsl.d t1, t0, ra, _PTE_T_LOG2 + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) + PTR_BSTRPICK t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT + PTR_ALSL t1, t0, ra, _PTE_T_LOG2 #ifdef CONFIG_SMP smp_pgtable_change_load: - ll.d t0, t1, 0 + PTE_LL t0, t1, 0 #else - ld.d t0, t1, 0 + PTR_L t0, t1, 0 #endif andi ra, t0, _PAGE_PRESENT beqz ra, nopage_tlb_load ori t0, t0, _PAGE_VALID + #ifdef CONFIG_SMP - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, smp_pgtable_change_load #else - st.d t0, t1, 0 + PTR_S t0, t1, 0 #endif + tlbsrch - bstrins.d t1, zero, 3, 3 - ld.d t0, t1, 0 - ld.d t1, t1, 8 + PTR_BSTRINS t1, zero, _PTE_T_LOG2, _PTE_T_LOG2 + PTR_L t0, t1, 0 + PTR_L t1, t1, _PTE_T_SIZE csrwr t0, LOONGARCH_CSR_TLBELO0 csrwr t1, LOONGARCH_CSR_TLBELO1 tlbwr @@ -115,30 +132,28 @@ smp_pgtable_change_load: csrrd ra, EXCEPTION_KS2 ertn -#ifdef CONFIG_64BIT vmalloc_load: la_abs t1, swapper_pg_dir b vmalloc_done_load -#endif /* This is the entry point of a huge page. */ tlb_huge_update_load: #ifdef CONFIG_SMP - ll.d ra, t1, 0 + PTE_LL ra, t1, 0 #else - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) #endif andi t0, ra, _PAGE_PRESENT beqz t0, nopage_tlb_load #ifdef CONFIG_SMP ori t0, ra, _PAGE_VALID - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, tlb_huge_update_load ori t0, ra, _PAGE_VALID #else ori t0, ra, _PAGE_VALID - st.d t0, t1, 0 + PTR_S t0, t1, 0 #endif csrrd ra, LOONGARCH_CSR_ASID csrrd t1, LOONGARCH_CSR_BADV @@ -158,27 +173,27 @@ tlb_huge_update_load: xori t0, t0, _PAGE_HUGE lu12i.w t1, _PAGE_HGLOBAL >> 12 and t1, t0, t1 - srli.d t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) + PTR_SRLI t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) or t0, t0, t1 move ra, t0 csrwr ra, LOONGARCH_CSR_TLBELO0 /* Convert to entrylo1 */ - addi.d t1, zero, 1 - slli.d t1, t1, (HPAGE_SHIFT - 1) - add.d t0, t0, t1 + PTR_ADDI t1, zero, 1 + PTR_SLLI t1, t1, (HPAGE_SHIFT - 1) + PTR_ADD t0, t0, t1 csrwr t0, LOONGARCH_CSR_TLBELO1 /* Set huge page tlb entry size */ - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX tlbfill - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX csrrd t0, EXCEPTION_KS0 @@ -216,53 +231,71 @@ SYM_CODE_START(handle_tlb_store) vmalloc_done_store: /* Get PGD offset in bytes */ - bstrpick.d ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT - alsl.d t1, ra, t1, 3 +#ifdef CONFIG_32BIT + PTR_BSTRPICK ra, t0, 31, PGDIR_SHIFT +#else + PTR_BSTRPICK ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT +#endif + PTR_ALSL t1, ra, t1, _PGD_T_LOG2 + #if CONFIG_PGTABLE_LEVELS > 3 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 #endif #if CONFIG_PGTABLE_LEVELS > 2 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 #endif - ld.d ra, t1, 0 + PTR_L ra, t1, 0 /* * For huge tlb entries, pmde doesn't contain an address but * instead contains the tlb pte. Check the PAGE_HUGE bit and * see if we need to jump to huge tlb processing. */ - rotri.d ra, ra, _PAGE_HUGE_SHIFT + 1 + PTR_ROTRI ra, ra, _PAGE_HUGE_SHIFT + 1 bltz ra, tlb_huge_update_store - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) - bstrpick.d t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT - alsl.d t1, t0, ra, _PTE_T_LOG2 + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) + PTR_BSTRPICK t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT + PTR_ALSL t1, t0, ra, _PTE_T_LOG2 #ifdef CONFIG_SMP smp_pgtable_change_store: - ll.d t0, t1, 0 + PTE_LL t0, t1, 0 #else - ld.d t0, t1, 0 + PTR_L t0, t1, 0 #endif + +#ifdef CONFIG_64BIT andi ra, t0, _PAGE_PRESENT | _PAGE_WRITE xori ra, ra, _PAGE_PRESENT | _PAGE_WRITE +#else + PTR_LI ra, _PAGE_PRESENT | _PAGE_WRITE + and ra, ra, t0 + nor ra, ra, zero +#endif bnez ra, nopage_tlb_store +#ifdef CONFIG_64BIT ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) +#else + PTR_LI ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) + or t0, ra, t0 +#endif + #ifdef CONFIG_SMP - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, smp_pgtable_change_store #else - st.d t0, t1, 0 + PTR_S t0, t1, 0 #endif tlbsrch - bstrins.d t1, zero, 3, 3 - ld.d t0, t1, 0 - ld.d t1, t1, 8 + PTR_BSTRINS t1, zero, _PTE_T_LOG2, _PTE_T_LOG2 + PTR_L t0, t1, 0 + PTR_L t1, t1, _PTE_T_SIZE csrwr t0, LOONGARCH_CSR_TLBELO0 csrwr t1, LOONGARCH_CSR_TLBELO1 tlbwr @@ -272,31 +305,42 @@ smp_pgtable_change_store: csrrd ra, EXCEPTION_KS2 ertn -#ifdef CONFIG_64BIT vmalloc_store: la_abs t1, swapper_pg_dir b vmalloc_done_store -#endif /* This is the entry point of a huge page. */ tlb_huge_update_store: #ifdef CONFIG_SMP - ll.d ra, t1, 0 + PTE_LL ra, t1, 0 #else - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) #endif + +#ifdef CONFIG_64BIT andi t0, ra, _PAGE_PRESENT | _PAGE_WRITE xori t0, t0, _PAGE_PRESENT | _PAGE_WRITE +#else + PTR_LI t0, _PAGE_PRESENT | _PAGE_WRITE + and t0, t0, ra + nor t0, t0, zero +#endif + bnez t0, nopage_tlb_store #ifdef CONFIG_SMP ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, tlb_huge_update_store ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) #else +#ifdef CONFIG_64BIT ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) - st.d t0, t1, 0 +#else + PTR_LI t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) + or t0, ra, t0 +#endif + PTR_S t0, t1, 0 #endif csrrd ra, LOONGARCH_CSR_ASID csrrd t1, LOONGARCH_CSR_BADV @@ -316,28 +360,28 @@ tlb_huge_update_store: xori t0, t0, _PAGE_HUGE lu12i.w t1, _PAGE_HGLOBAL >> 12 and t1, t0, t1 - srli.d t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) + PTR_SRLI t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) or t0, t0, t1 move ra, t0 csrwr ra, LOONGARCH_CSR_TLBELO0 /* Convert to entrylo1 */ - addi.d t1, zero, 1 - slli.d t1, t1, (HPAGE_SHIFT - 1) - add.d t0, t0, t1 + PTR_ADDI t1, zero, 1 + PTR_SLLI t1, t1, (HPAGE_SHIFT - 1) + PTR_ADD t0, t0, t1 csrwr t0, LOONGARCH_CSR_TLBELO1 /* Set huge page tlb entry size */ - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX tlbfill /* Reset default page size */ - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX csrrd t0, EXCEPTION_KS0 @@ -375,52 +419,69 @@ SYM_CODE_START(handle_tlb_modify) vmalloc_done_modify: /* Get PGD offset in bytes */ - bstrpick.d ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT - alsl.d t1, ra, t1, 3 +#ifdef CONFIG_32BIT + PTR_BSTRPICK ra, t0, 31, PGDIR_SHIFT +#else + PTR_BSTRPICK ra, t0, PTRS_PER_PGD_BITS + PGDIR_SHIFT - 1, PGDIR_SHIFT +#endif + PTR_ALSL t1, ra, t1, _PGD_T_LOG2 + #if CONFIG_PGTABLE_LEVELS > 3 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PUD_BITS + PUD_SHIFT - 1, PUD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 #endif #if CONFIG_PGTABLE_LEVELS > 2 - ld.d t1, t1, 0 - bstrpick.d ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT - alsl.d t1, ra, t1, 3 + PTR_L t1, t1, 0 + PTR_BSTRPICK ra, t0, PTRS_PER_PMD_BITS + PMD_SHIFT - 1, PMD_SHIFT + PTR_ALSL t1, ra, t1, _PMD_T_LOG2 #endif - ld.d ra, t1, 0 + PTR_L ra, t1, 0 /* * For huge tlb entries, pmde doesn't contain an address but * instead contains the tlb pte. Check the PAGE_HUGE bit and * see if we need to jump to huge tlb processing. */ - rotri.d ra, ra, _PAGE_HUGE_SHIFT + 1 + PTR_ROTRI ra, ra, _PAGE_HUGE_SHIFT + 1 bltz ra, tlb_huge_update_modify - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) - bstrpick.d t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT - alsl.d t1, t0, ra, _PTE_T_LOG2 + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) + PTR_BSTRPICK t0, t0, PTRS_PER_PTE_BITS + PAGE_SHIFT - 1, PAGE_SHIFT + PTR_ALSL t1, t0, ra, _PTE_T_LOG2 #ifdef CONFIG_SMP smp_pgtable_change_modify: - ll.d t0, t1, 0 + PTE_LL t0, t1, 0 #else - ld.d t0, t1, 0 + PTR_L t0, t1, 0 #endif +#ifdef CONFIG_64BIT andi ra, t0, _PAGE_WRITE +#else + PTR_LI ra, _PAGE_WRITE + and ra, t0, ra +#endif + beqz ra, nopage_tlb_modify +#ifdef CONFIG_64BIT ori t0, t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) +#else + PTR_LI ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) + or t0, ra, t0 +#endif + #ifdef CONFIG_SMP - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, smp_pgtable_change_modify #else - st.d t0, t1, 0 + PTR_S t0, t1, 0 #endif tlbsrch - bstrins.d t1, zero, 3, 3 - ld.d t0, t1, 0 - ld.d t1, t1, 8 + PTR_BSTRINS t1, zero, _PTE_T_LOG2, _PTE_T_LOG2 + PTR_L t0, t1, 0 + PTR_L t1, t1, _PTE_T_SIZE csrwr t0, LOONGARCH_CSR_TLBELO0 csrwr t1, LOONGARCH_CSR_TLBELO1 tlbwr @@ -430,30 +491,40 @@ smp_pgtable_change_modify: csrrd ra, EXCEPTION_KS2 ertn -#ifdef CONFIG_64BIT vmalloc_modify: la_abs t1, swapper_pg_dir b vmalloc_done_modify -#endif /* This is the entry point of a huge page. */ tlb_huge_update_modify: #ifdef CONFIG_SMP - ll.d ra, t1, 0 + PTE_LL ra, t1, 0 #else - rotri.d ra, ra, 64 - (_PAGE_HUGE_SHIFT + 1) + PTR_ROTRI ra, ra, BITS_PER_LONG - (_PAGE_HUGE_SHIFT + 1) #endif + +#ifdef CONFIG_64BIT andi t0, ra, _PAGE_WRITE +#else + PTR_LI t0, _PAGE_WRITE + and t0, ra, t0 +#endif + beqz t0, nopage_tlb_modify #ifdef CONFIG_SMP ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) - sc.d t0, t1, 0 + PTE_SC t0, t1, 0 beqz t0, tlb_huge_update_modify ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) #else +#ifdef CONFIG_64BIT ori t0, ra, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) - st.d t0, t1, 0 +#else + PTR_LI t0, (_PAGE_VALID | _PAGE_DIRTY | _PAGE_MODIFIED) + or t0, ra, t0 +#endif + PTR_S t0, t1, 0 #endif csrrd ra, LOONGARCH_CSR_ASID csrrd t1, LOONGARCH_CSR_BADV @@ -473,28 +544,28 @@ tlb_huge_update_modify: xori t0, t0, _PAGE_HUGE lu12i.w t1, _PAGE_HGLOBAL >> 12 and t1, t0, t1 - srli.d t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) + PTR_SRLI t1, t1, (_PAGE_HGLOBAL_SHIFT - _PAGE_GLOBAL_SHIFT) or t0, t0, t1 move ra, t0 csrwr ra, LOONGARCH_CSR_TLBELO0 /* Convert to entrylo1 */ - addi.d t1, zero, 1 - slli.d t1, t1, (HPAGE_SHIFT - 1) - add.d t0, t0, t1 + PTR_ADDI t1, zero, 1 + PTR_SLLI t1, t1, (HPAGE_SHIFT - 1) + PTR_ADD t0, t0, t1 csrwr t0, LOONGARCH_CSR_TLBELO1 /* Set huge page tlb entry size */ - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX tlbfill /* Reset default page size */ - addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) - addu16i.d t1, zero, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) + PTR_LI t0, (CSR_TLBIDX_PS >> 16) << 16 + PTR_LI t1, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT)) csrxchg t1, t0, LOONGARCH_CSR_TLBIDX csrrd t0, EXCEPTION_KS0 @@ -517,6 +588,44 @@ SYM_CODE_START(handle_tlb_modify_ptw) jr t0 SYM_CODE_END(handle_tlb_modify_ptw) +#ifdef CONFIG_32BIT +SYM_CODE_START(handle_tlb_refill) + UNWIND_HINT_UNDEFINED + csrwr t0, EXCEPTION_KS0 + csrwr t1, EXCEPTION_KS1 + csrwr ra, EXCEPTION_KS2 + li.w ra, 0x1fffffff + + csrrd t0, LOONGARCH_CSR_PGD + csrrd t1, LOONGARCH_CSR_TLBRBADV + srli.w t1, t1, PGDIR_SHIFT + slli.w t1, t1, 0x2 + add.w t0, t0, t1 + and t0, t0, ra + + ld.w t0, t0, 0 + csrrd t1, LOONGARCH_CSR_TLBRBADV + slli.w t1, t1, (32 - PGDIR_SHIFT) + srli.w t1, t1, (32 - PGDIR_SHIFT + PAGE_SHIFT + 1) + slli.w t1, t1, (0x2 + 1) + add.w t0, t0, t1 + and t0, t0, ra + + ld.w t1, t0, 0x0 + csrwr t1, LOONGARCH_CSR_TLBRELO0 + + ld.w t1, t0, 0x4 + csrwr t1, LOONGARCH_CSR_TLBRELO1 + + tlbfill + csrrd t0, EXCEPTION_KS0 + csrrd t1, EXCEPTION_KS1 + csrrd ra, EXCEPTION_KS2 + ertn +SYM_CODE_END(handle_tlb_refill) +#endif + +#ifdef CONFIG_64BIT SYM_CODE_START(handle_tlb_refill) UNWIND_HINT_UNDEFINED csrwr t0, LOONGARCH_CSR_TLBRSAVE @@ -534,3 +643,4 @@ SYM_CODE_START(handle_tlb_refill) csrrd t0, LOONGARCH_CSR_TLBRSAVE ertn SYM_CODE_END(handle_tlb_refill) +#endif |
