diff options
Diffstat (limited to 'arch/arm64/lib/clear_user.S')
| -rw-r--r-- | arch/arm64/lib/clear_user.S | 91 |
1 files changed, 50 insertions, 41 deletions
diff --git a/arch/arm64/lib/clear_user.S b/arch/arm64/lib/clear_user.S index feb225bd4b80..de9a303b6ad0 100644 --- a/arch/arm64/lib/clear_user.S +++ b/arch/arm64/lib/clear_user.S @@ -1,24 +1,10 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ /* - * Based on arch/arm/lib/clear_user.S - * - * Copyright (C) 2012 ARM Ltd. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. + * Copyright (C) 2021 Arm Ltd. */ -#include <linux/linkage.h> +#include <linux/linkage.h> #include <asm/asm-uaccess.h> -#include <asm/assembler.h> .text @@ -30,34 +16,57 @@ * * Alignment fixed up by hardware. */ -ENTRY(__arch_clear_user) - uaccess_enable_not_uao x2, x3, x4 - mov x2, x1 // save the size for fixup return + +SYM_FUNC_START(__arch_clear_user) + add x2, x0, x1 + +#ifdef CONFIG_AS_HAS_MOPS + .arch_extension mops +alternative_if_not ARM64_HAS_MOPS + b .Lno_mops +alternative_else_nop_endif + +USER(9f, setpt [x0]!, x1!, xzr) +USER(6f, setmt [x0]!, x1!, xzr) +USER(6f, setet [x0]!, x1!, xzr) + mov x0, #0 + ret +.Lno_mops: +#endif + subs x1, x1, #8 b.mi 2f -1: -uao_user_alternative 9f, str, sttr, xzr, x0, 8 + +1: .p2align 4 +USER(9f, sttr xzr, [x0]) + add x0, x0, #8 subs x1, x1, #8 - b.pl 1b -2: adds x1, x1, #4 - b.mi 3f -uao_user_alternative 9f, str, sttr, wzr, x0, 4 - sub x1, x1, #4 -3: adds x1, x1, #2 - b.mi 4f -uao_user_alternative 9f, strh, sttrh, wzr, x0, 2 - sub x1, x1, #2 -4: adds x1, x1, #1 - b.mi 5f -uao_user_alternative 9f, strb, sttrb, wzr, x0, 0 + b.hi 1b +USER(9f, sttr xzr, [x2, #-8]) + mov x0, #0 + ret + +2: tbz x1, #2, 3f +USER(9f, sttr wzr, [x0]) +USER(8f, sttr wzr, [x2, #-4]) + mov x0, #0 + ret + +3: tbz x1, #1, 4f +USER(9f, sttrh wzr, [x0]) +4: tbz x1, #0, 5f +USER(7f, sttrb wzr, [x2, #-1]) 5: mov x0, #0 - uaccess_disable_not_uao x2, x3 ret -ENDPROC(__arch_clear_user) -EXPORT_SYMBOL(__arch_clear_user) - .section .fixup,"ax" - .align 2 -9: mov x0, x2 // return the original size + // Exception fixups +6: b.cs 9f + // Registers are in Option A format + add x0, x0, x1 + b 9f +7: sub x0, x2, #5 // Adjust for faulting on the final byte... +8: add x0, x0, #4 // ...or the second word of the 4-7 byte case +9: sub x0, x2, x0 ret - .previous +SYM_FUNC_END(__arch_clear_user) +EXPORT_SYMBOL(__arch_clear_user) |
