From 6d685e5318e51b843ca50adeca50dc6300bf2cbb Mon Sep 17 00:00:00 2001 From: Jiri Slaby Date: Fri, 11 Oct 2019 13:51:07 +0200 Subject: x86/asm/32: Change all ENTRY+ENDPROC to SYM_FUNC_* These are all functions which are invoked from elsewhere, so annotate them as global using the new SYM_FUNC_START and their ENDPROC's by SYM_FUNC_END. Now, ENTRY/ENDPROC can be forced to be undefined on X86, so do so. Signed-off-by: Jiri Slaby Signed-off-by: Borislav Petkov Cc: Allison Randal Cc: Andrey Ryabinin Cc: Andy Lutomirski Cc: Andy Shevchenko Cc: Ard Biesheuvel Cc: Bill Metzenthen Cc: Boris Ostrovsky Cc: Darren Hart Cc: "David S. Miller" Cc: Greg Kroah-Hartman Cc: Herbert Xu Cc: "H. Peter Anvin" Cc: Ingo Molnar Cc: linux-arch@vger.kernel.org Cc: linux-crypto@vger.kernel.org Cc: linux-efi Cc: linux-efi@vger.kernel.org Cc: linux-pm@vger.kernel.org Cc: Mark Rutland Cc: Matt Fleming Cc: Pavel Machek Cc: platform-driver-x86@vger.kernel.org Cc: "Rafael J. Wysocki" Cc: Thomas Gleixner Cc: Will Deacon Cc: x86-ml Link: https://lkml.kernel.org/r/20191011115108.12392-28-jslaby@suse.cz --- arch/x86/math-emu/wm_shrx.S | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'arch/x86/math-emu/wm_shrx.S') diff --git a/arch/x86/math-emu/wm_shrx.S b/arch/x86/math-emu/wm_shrx.S index d588874eb6fb..4fc89174caf0 100644 --- a/arch/x86/math-emu/wm_shrx.S +++ b/arch/x86/math-emu/wm_shrx.S @@ -33,7 +33,7 @@ | Results returned in the 64 bit arg and eax. | +---------------------------------------------------------------------------*/ -ENTRY(FPU_shrx) +SYM_FUNC_START(FPU_shrx) push %ebp movl %esp,%ebp pushl %esi @@ -93,7 +93,7 @@ L_more_than_95: popl %esi leave ret -ENDPROC(FPU_shrx) +SYM_FUNC_END(FPU_shrx) /*---------------------------------------------------------------------------+ @@ -112,7 +112,7 @@ ENDPROC(FPU_shrx) | part which has been shifted out of the arg. | | Results returned in the 64 bit arg and eax. | +---------------------------------------------------------------------------*/ -ENTRY(FPU_shrxs) +SYM_FUNC_START(FPU_shrxs) push %ebp movl %esp,%ebp pushl %esi @@ -204,4 +204,4 @@ Ls_more_than_95: popl %esi leave ret -ENDPROC(FPU_shrxs) +SYM_FUNC_END(FPU_shrxs) -- cgit