summaryrefslogtreecommitdiff
path: root/arch/x86/kernel/ftrace_64.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/kernel/ftrace_64.S')
-rw-r--r--arch/x86/kernel/ftrace_64.S203
1 files changed, 131 insertions, 72 deletions
diff --git a/arch/x86/kernel/ftrace_64.S b/arch/x86/kernel/ftrace_64.S
index 083a3da7bb73..a132608265f6 100644
--- a/arch/x86/kernel/ftrace_64.S
+++ b/arch/x86/kernel/ftrace_64.S
@@ -3,10 +3,12 @@
* Copyright (C) 2014 Steven Rostedt, Red Hat Inc
*/
+#include <linux/export.h>
+#include <linux/cfi_types.h>
#include <linux/linkage.h>
+#include <asm/asm-offsets.h>
#include <asm/ptrace.h>
#include <asm/ftrace.h>
-#include <asm/export.h>
#include <asm/nospec-branch.h>
#include <asm/unwind_hints.h>
#include <asm/frame.h>
@@ -129,27 +131,60 @@
.endm
+SYM_TYPED_FUNC_START(ftrace_stub)
+ CALL_DEPTH_ACCOUNT
+ RET
+SYM_FUNC_END(ftrace_stub)
+
+#ifdef CONFIG_FUNCTION_GRAPH_TRACER
+SYM_TYPED_FUNC_START(ftrace_stub_graph)
+ CALL_DEPTH_ACCOUNT
+ RET
+SYM_FUNC_END(ftrace_stub_graph)
+#endif
+
#ifdef CONFIG_DYNAMIC_FTRACE
SYM_FUNC_START(__fentry__)
- retq
+ ANNOTATE_NOENDBR
+ CALL_DEPTH_ACCOUNT
+ RET
SYM_FUNC_END(__fentry__)
EXPORT_SYMBOL(__fentry__)
SYM_FUNC_START(ftrace_caller)
+ ANNOTATE_NOENDBR
/* save_mcount_regs fills in first two parameters */
save_mcount_regs
+ CALL_DEPTH_ACCOUNT
+
+ /* Stack - skipping return address of ftrace_caller */
+ leaq MCOUNT_REG_SIZE+8(%rsp), %rcx
+ movq %rcx, RSP(%rsp)
+
SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
/* Load the ftrace_ops into the 3rd parameter */
movq function_trace_op(%rip), %rdx
- /* regs go into 4th parameter (but make it NULL) */
- movq $0, %rcx
+ /* regs go into 4th parameter */
+ leaq (%rsp), %rcx
+
+ /* Only ops with REGS flag set should have CS register set */
+ movq $0, CS(%rsp)
+
+ /* Account for the function call below */
+ CALL_DEPTH_ACCOUNT
SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
call ftrace_stub
+ /* Handlers can change the RIP */
+ movq RIP(%rsp), %rax
+ movq %rax, MCOUNT_REG_SIZE(%rsp)
+
restore_mcount_regs
/*
@@ -158,25 +193,13 @@ SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
* layout here.
*/
SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
-
- jmp ftrace_epilogue
+ ANNOTATE_NOENDBR
+ RET
SYM_FUNC_END(ftrace_caller);
-
-SYM_FUNC_START(ftrace_epilogue)
-#ifdef CONFIG_FUNCTION_GRAPH_TRACER
-SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
- jmp ftrace_stub
-#endif
-
-/*
- * This is weak to keep gas from relaxing the jumps.
- * It is also used to copy the retq for trampolines.
- */
-SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
- retq
-SYM_FUNC_END(ftrace_epilogue)
+STACK_FRAME_NON_STANDARD_FP(ftrace_caller)
SYM_FUNC_START(ftrace_regs_caller)
+ ANNOTATE_NOENDBR
/* Save the current flags before any operations that can change them */
pushfq
@@ -184,7 +207,10 @@ SYM_FUNC_START(ftrace_regs_caller)
save_mcount_regs 8
/* save_mcount_regs fills in first two parameters */
+ CALL_DEPTH_ACCOUNT
+
SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
/* Load the ftrace_ops into the 3rd parameter */
movq function_trace_op(%rip), %rdx
@@ -213,7 +239,11 @@ SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
/* regs go into 4th parameter */
leaq (%rsp), %rcx
+ /* Account for the function call below */
+ CALL_DEPTH_ACCOUNT
+
SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
call ftrace_stub
/* Copy flags back to SS, to restore them */
@@ -239,56 +269,74 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
* If ORIG_RAX is anything but zero, make this a call to that.
* See arch_ftrace_set_direct_caller().
*/
- movq ORIG_RAX(%rsp), %rax
testq %rax, %rax
- jz 1f
+SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
+ jnz 1f
- /* Swap the flags with orig_rax */
- movq MCOUNT_REG_SIZE(%rsp), %rdi
- movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
- movq %rax, MCOUNT_REG_SIZE(%rsp)
+ restore_mcount_regs
+ /* Restore flags */
+ popfq
- restore_mcount_regs 8
+ /*
+ * The trampoline will add the return.
+ */
+SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
+ ANNOTATE_NOENDBR
+ RET
+
+1:
+ testb $1, %al
+ jz 2f
+ andq $0xfffffffffffffffe, %rax
+ movq %rax, MCOUNT_REG_SIZE+8(%rsp)
+ restore_mcount_regs
/* Restore flags */
popfq
+ RET
-SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL);
- UNWIND_HINT_RET_OFFSET
- jmp ftrace_epilogue
+ /* Swap the flags with orig_rax */
+2: movq MCOUNT_REG_SIZE(%rsp), %rdi
+ movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
+ movq %rax, MCOUNT_REG_SIZE(%rsp)
-1: restore_mcount_regs
+ restore_mcount_regs 8
/* Restore flags */
popfq
+ UNWIND_HINT_FUNC
/*
- * As this jmp to ftrace_epilogue can be a short jump
- * it must not be copied into the trampoline.
- * The trampoline will add the code to jump
- * to the return.
+ * The above left an extra return value on the stack; effectively
+ * doing a tail-call without using a register. This PUSH;RET
+ * pattern unbalances the RSB, inject a pointless CALL to rebalance.
*/
-SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
- jmp ftrace_epilogue
+ ANNOTATE_INTRA_FUNCTION_CALL
+ CALL .Ldo_rebalance
+ int3
+.Ldo_rebalance:
+ add $8, %rsp
+ ALTERNATIVE __stringify(RET), \
+ __stringify(ANNOTATE_UNRET_SAFE; ret; int3), \
+ X86_FEATURE_CALL_DEPTH
SYM_FUNC_END(ftrace_regs_caller)
+STACK_FRAME_NON_STANDARD_FP(ftrace_regs_caller)
+SYM_FUNC_START(ftrace_stub_direct_tramp)
+ ANNOTATE_NOENDBR
+ CALL_DEPTH_ACCOUNT
+ RET
+SYM_FUNC_END(ftrace_stub_direct_tramp)
#else /* ! CONFIG_DYNAMIC_FTRACE */
SYM_FUNC_START(__fentry__)
+ ANNOTATE_NOENDBR
+ CALL_DEPTH_ACCOUNT
+
cmpq $ftrace_stub, ftrace_trace_function
jnz trace
-
-fgraph_trace:
-#ifdef CONFIG_FUNCTION_GRAPH_TRACER
- cmpq $ftrace_stub, ftrace_graph_return
- jnz ftrace_graph_caller
-
- cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
- jnz ftrace_graph_caller
-#endif
-
-SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
- retq
+ RET
trace:
/* save_mcount_regs fills in first two parameters */
@@ -304,40 +352,51 @@ trace:
CALL_NOSPEC r8
restore_mcount_regs
- jmp fgraph_trace
+ jmp ftrace_stub
SYM_FUNC_END(__fentry__)
EXPORT_SYMBOL(__fentry__)
+STACK_FRAME_NON_STANDARD_FP(__fentry__)
+
#endif /* CONFIG_DYNAMIC_FTRACE */
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
-SYM_FUNC_START(ftrace_graph_caller)
- /* Saves rbp into %rdx and fills first parameter */
- save_mcount_regs
-
- leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
- movq $0, %rdx /* No framepointers needed */
- call prepare_ftrace_return
-
- restore_mcount_regs
+SYM_CODE_START(return_to_handler)
+ UNWIND_HINT_UNDEFINED
+ ANNOTATE_NOENDBR
- retq
-SYM_FUNC_END(ftrace_graph_caller)
+ /* Restore return_to_handler value that got eaten by previous ret instruction. */
+ subq $8, %rsp
+ UNWIND_HINT_FUNC
-SYM_CODE_START(return_to_handler)
- UNWIND_HINT_EMPTY
- subq $24, %rsp
+ /* Save ftrace_regs for function exit context */
+ subq $(FRAME_SIZE), %rsp
- /* Save the return values */
- movq %rax, (%rsp)
- movq %rdx, 8(%rsp)
- movq %rbp, %rdi
+ movq %rax, RAX(%rsp)
+ movq %rdx, RDX(%rsp)
+ movq %rbp, RBP(%rsp)
+ movq %rsp, RSP(%rsp)
+ movq %rsp, %rdi
call ftrace_return_to_handler
movq %rax, %rdi
- movq 8(%rsp), %rdx
- movq (%rsp), %rax
- addq $24, %rsp
- JMP_NOSPEC rdi
+ movq RDX(%rsp), %rdx
+ movq RAX(%rsp), %rax
+
+ addq $(FRAME_SIZE) + 8, %rsp
+
+ /*
+ * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
+ * since IBT would demand that contain ENDBR, which simply isn't so for
+ * return addresses. Use a retpoline here to keep the RSB balanced.
+ */
+ ANNOTATE_INTRA_FUNCTION_CALL
+ call .Ldo_rop
+ int3
+.Ldo_rop:
+ mov %rdi, (%rsp)
+ ALTERNATIVE __stringify(RET), \
+ __stringify(ANNOTATE_UNRET_SAFE; ret; int3), \
+ X86_FEATURE_CALL_DEPTH
SYM_CODE_END(return_to_handler)
#endif