diff options
Diffstat (limited to 'arch/x86/lib/getuser.S')
| -rw-r--r-- | arch/x86/lib/getuser.S | 183 |
1 files changed, 106 insertions, 77 deletions
diff --git a/arch/x86/lib/getuser.S b/arch/x86/lib/getuser.S index 74fdff968ea3..9d5654b8a72a 100644 --- a/arch/x86/lib/getuser.S +++ b/arch/x86/lib/getuser.S @@ -26,118 +26,147 @@ * as they get called from within inline assembly. */ +#include <linux/export.h> #include <linux/linkage.h> +#include <linux/objtool.h> #include <asm/page_types.h> #include <asm/errno.h> #include <asm/asm-offsets.h> #include <asm/thread_info.h> #include <asm/asm.h> #include <asm/smap.h> -#include <asm/export.h> +#include <asm/runtime-const.h> + +#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC + +.macro check_range size:req +.if IS_ENABLED(CONFIG_X86_64) + RUNTIME_CONST_PTR USER_PTR_MAX, rdx + cmp %rdx, %rax + cmova %rdx, %rax +.else + cmp $TASK_SIZE_MAX-\size+1, %eax + jae .Lbad_get_user + sbb %edx, %edx /* array_index_mask_nospec() */ + and %edx, %eax +.endif +.endm + +.macro UACCESS op src dst +1: \op \src,\dst + _ASM_EXTABLE_UA(1b, __get_user_handle_exception) +.endm + .text -ENTRY(__get_user_1) - mov PER_CPU_VAR(current_task), %_ASM_DX - cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX - jae bad_get_user - sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */ - and %_ASM_DX, %_ASM_AX +SYM_FUNC_START(__get_user_1) + ANNOTATE_NOENDBR + check_range size=1 ASM_STAC -1: movzbl (%_ASM_AX),%edx + UACCESS movzbl (%_ASM_AX),%edx xor %eax,%eax ASM_CLAC - ret -ENDPROC(__get_user_1) + RET +SYM_FUNC_END(__get_user_1) EXPORT_SYMBOL(__get_user_1) -ENTRY(__get_user_2) - add $1,%_ASM_AX - jc bad_get_user - mov PER_CPU_VAR(current_task), %_ASM_DX - cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX - jae bad_get_user - sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */ - and %_ASM_DX, %_ASM_AX +SYM_FUNC_START(__get_user_2) + ANNOTATE_NOENDBR + check_range size=2 ASM_STAC -2: movzwl -1(%_ASM_AX),%edx + UACCESS movzwl (%_ASM_AX),%edx xor %eax,%eax ASM_CLAC - ret -ENDPROC(__get_user_2) + RET +SYM_FUNC_END(__get_user_2) EXPORT_SYMBOL(__get_user_2) -ENTRY(__get_user_4) - add $3,%_ASM_AX - jc bad_get_user - mov PER_CPU_VAR(current_task), %_ASM_DX - cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX - jae bad_get_user - sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */ - and %_ASM_DX, %_ASM_AX +SYM_FUNC_START(__get_user_4) + ANNOTATE_NOENDBR + check_range size=4 ASM_STAC -3: movl -3(%_ASM_AX),%edx + UACCESS movl (%_ASM_AX),%edx xor %eax,%eax ASM_CLAC - ret -ENDPROC(__get_user_4) + RET +SYM_FUNC_END(__get_user_4) EXPORT_SYMBOL(__get_user_4) -ENTRY(__get_user_8) -#ifdef CONFIG_X86_64 - add $7,%_ASM_AX - jc bad_get_user - mov PER_CPU_VAR(current_task), %_ASM_DX - cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX - jae bad_get_user - sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */ - and %_ASM_DX, %_ASM_AX +SYM_FUNC_START(__get_user_8) + ANNOTATE_NOENDBR +#ifndef CONFIG_X86_64 + xor %ecx,%ecx +#endif + check_range size=8 ASM_STAC -4: movq -7(%_ASM_AX),%rdx - xor %eax,%eax - ASM_CLAC - ret +#ifdef CONFIG_X86_64 + UACCESS movq (%_ASM_AX),%rdx #else - add $7,%_ASM_AX - jc bad_get_user_8 - mov PER_CPU_VAR(current_task), %_ASM_DX - cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX - jae bad_get_user_8 - sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */ - and %_ASM_DX, %_ASM_AX - ASM_STAC -4: movl -7(%_ASM_AX),%edx -5: movl -3(%_ASM_AX),%ecx + UACCESS movl (%_ASM_AX),%edx + UACCESS movl 4(%_ASM_AX),%ecx +#endif xor %eax,%eax ASM_CLAC - ret -#endif -ENDPROC(__get_user_8) + RET +SYM_FUNC_END(__get_user_8) EXPORT_SYMBOL(__get_user_8) +/* .. and the same for __get_user, just without the range checks */ +SYM_FUNC_START(__get_user_nocheck_1) + ANNOTATE_NOENDBR + ASM_STAC + ASM_BARRIER_NOSPEC + UACCESS movzbl (%_ASM_AX),%edx + xor %eax,%eax + ASM_CLAC + RET +SYM_FUNC_END(__get_user_nocheck_1) +EXPORT_SYMBOL(__get_user_nocheck_1) -bad_get_user: - xor %edx,%edx - mov $(-EFAULT),%_ASM_AX +SYM_FUNC_START(__get_user_nocheck_2) + ANNOTATE_NOENDBR + ASM_STAC + ASM_BARRIER_NOSPEC + UACCESS movzwl (%_ASM_AX),%edx + xor %eax,%eax ASM_CLAC - ret -END(bad_get_user) + RET +SYM_FUNC_END(__get_user_nocheck_2) +EXPORT_SYMBOL(__get_user_nocheck_2) -#ifdef CONFIG_X86_32 -bad_get_user_8: - xor %edx,%edx - xor %ecx,%ecx - mov $(-EFAULT),%_ASM_AX +SYM_FUNC_START(__get_user_nocheck_4) + ANNOTATE_NOENDBR + ASM_STAC + ASM_BARRIER_NOSPEC + UACCESS movl (%_ASM_AX),%edx + xor %eax,%eax ASM_CLAC - ret -END(bad_get_user_8) -#endif + RET +SYM_FUNC_END(__get_user_nocheck_4) +EXPORT_SYMBOL(__get_user_nocheck_4) - _ASM_EXTABLE_UA(1b, bad_get_user) - _ASM_EXTABLE_UA(2b, bad_get_user) - _ASM_EXTABLE_UA(3b, bad_get_user) +SYM_FUNC_START(__get_user_nocheck_8) + ANNOTATE_NOENDBR + ASM_STAC + ASM_BARRIER_NOSPEC #ifdef CONFIG_X86_64 - _ASM_EXTABLE_UA(4b, bad_get_user) + UACCESS movq (%_ASM_AX),%rdx #else - _ASM_EXTABLE_UA(4b, bad_get_user_8) - _ASM_EXTABLE_UA(5b, bad_get_user_8) + xor %ecx,%ecx + UACCESS movl (%_ASM_AX),%edx + UACCESS movl 4(%_ASM_AX),%ecx #endif + xor %eax,%eax + ASM_CLAC + RET +SYM_FUNC_END(__get_user_nocheck_8) +EXPORT_SYMBOL(__get_user_nocheck_8) + + +SYM_CODE_START_LOCAL(__get_user_handle_exception) + ASM_CLAC +.Lbad_get_user: + xor %edx,%edx + mov $(-EFAULT),%_ASM_AX + RET +SYM_CODE_END(__get_user_handle_exception) |
