forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/x86/kernel/ftrace_64.S
....@@ -12,31 +12,18 @@
1212 #include <asm/frame.h>
1313
1414 .code64
15
- .section .entry.text, "ax"
16
-
17
-#ifdef CC_USING_FENTRY
18
-# define function_hook __fentry__
19
-EXPORT_SYMBOL(__fentry__)
20
-#else
21
-# define function_hook mcount
22
-EXPORT_SYMBOL(mcount)
23
-#endif
15
+ .section .text, "ax"
2416
2517 #ifdef CONFIG_FRAME_POINTER
26
-# ifdef CC_USING_FENTRY
2718 /* Save parent and function stack frames (rip and rbp) */
2819 # define MCOUNT_FRAME_SIZE (8+16*2)
29
-# else
30
-/* Save just function stack frame (rip and rbp) */
31
-# define MCOUNT_FRAME_SIZE (8+16)
32
-# endif
3320 #else
3421 /* No need to save a stack frame */
3522 # define MCOUNT_FRAME_SIZE 0
3623 #endif /* CONFIG_FRAME_POINTER */
3724
3825 /* Size of stack used to save mcount regs in save_mcount_regs */
39
-#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
26
+#define MCOUNT_REG_SIZE (FRAME_SIZE + MCOUNT_FRAME_SIZE)
4027
4128 /*
4229 * gcc -pg option adds a call to 'mcount' in most functions.
....@@ -76,17 +63,13 @@
7663 * fentry is called before the stack frame is set up, where as mcount
7764 * is called afterward.
7865 */
79
-#ifdef CC_USING_FENTRY
66
+
8067 /* Save the parent pointer (skip orig rbp and our return address) */
8168 pushq \added+8*2(%rsp)
8269 pushq %rbp
8370 movq %rsp, %rbp
8471 /* Save the return address (now skip orig rbp, rbp and parent) */
8572 pushq \added+8*3(%rsp)
86
-#else
87
- /* Can't assume that rip is before this (unless added was zero) */
88
- pushq \added+8(%rsp)
89
-#endif
9073 pushq %rbp
9174 movq %rsp, %rbp
9275 #endif /* CONFIG_FRAME_POINTER */
....@@ -94,7 +77,7 @@
9477 /*
9578 * We add enough stack to save all regs.
9679 */
97
- subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
80
+ subq $(FRAME_SIZE), %rsp
9881 movq %rax, RAX(%rsp)
9982 movq %rcx, RCX(%rsp)
10083 movq %rdx, RDX(%rsp)
....@@ -102,6 +85,7 @@
10285 movq %rdi, RDI(%rsp)
10386 movq %r8, R8(%rsp)
10487 movq %r9, R9(%rsp)
88
+ movq $0, ORIG_RAX(%rsp)
10589 /*
10690 * Save the original RBP. Even though the mcount ABI does not
10791 * require this, it helps out callers.
....@@ -114,12 +98,7 @@
11498 movq %rdx, RBP(%rsp)
11599
116100 /* Copy the parent address into %rsi (second parameter) */
117
-#ifdef CC_USING_FENTRY
118101 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
119
-#else
120
- /* %rdx contains original %rbp */
121
- movq 8(%rdx), %rsi
122
-#endif
123102
124103 /* Move RIP to its proper location */
125104 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
....@@ -133,7 +112,11 @@
133112 subq $MCOUNT_INSN_SIZE, %rdi
134113 .endm
135114
136
-.macro restore_mcount_regs
115
+.macro restore_mcount_regs save=0
116
+
117
+ /* ftrace_regs_caller or frame pointers require this */
118
+ movq RBP(%rsp), %rbp
119
+
137120 movq R9(%rsp), %r9
138121 movq R8(%rsp), %r8
139122 movq RDI(%rsp), %rdi
....@@ -142,31 +125,29 @@
142125 movq RCX(%rsp), %rcx
143126 movq RAX(%rsp), %rax
144127
145
- /* ftrace_regs_caller can modify %rbp */
146
- movq RBP(%rsp), %rbp
147
-
148
- addq $MCOUNT_REG_SIZE, %rsp
128
+ addq $MCOUNT_REG_SIZE-\save, %rsp
149129
150130 .endm
151131
152132 #ifdef CONFIG_DYNAMIC_FTRACE
153133
154
-ENTRY(function_hook)
155
- retq
156
-ENDPROC(function_hook)
134
+SYM_FUNC_START(__fentry__)
135
+ RET
136
+SYM_FUNC_END(__fentry__)
137
+EXPORT_SYMBOL(__fentry__)
157138
158
-ENTRY(ftrace_caller)
139
+SYM_FUNC_START(ftrace_caller)
159140 /* save_mcount_regs fills in first two parameters */
160141 save_mcount_regs
161142
162
-GLOBAL(ftrace_caller_op_ptr)
143
+SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL)
163144 /* Load the ftrace_ops into the 3rd parameter */
164145 movq function_trace_op(%rip), %rdx
165146
166147 /* regs go into 4th parameter (but make it NULL) */
167148 movq $0, %rcx
168149
169
-GLOBAL(ftrace_call)
150
+SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
170151 call ftrace_stub
171152
172153 restore_mcount_regs
....@@ -176,22 +157,26 @@
176157 * think twice before adding any new code or changing the
177158 * layout here.
178159 */
179
-GLOBAL(ftrace_epilogue)
160
+SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
180161
162
+ jmp ftrace_epilogue
163
+SYM_FUNC_END(ftrace_caller);
164
+
165
+SYM_FUNC_START(ftrace_epilogue)
181166 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
182
-GLOBAL(ftrace_graph_call)
167
+SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
183168 jmp ftrace_stub
184169 #endif
185170
186171 /*
187172 * This is weak to keep gas from relaxing the jumps.
188
- * It is also used to copy the retq for trampolines.
189173 */
190
-WEAK(ftrace_stub)
191
- retq
192
-ENDPROC(ftrace_caller)
174
+SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
175
+ UNWIND_HINT_FUNC
176
+ RET
177
+SYM_FUNC_END(ftrace_epilogue)
193178
194
-ENTRY(ftrace_regs_caller)
179
+SYM_FUNC_START(ftrace_regs_caller)
195180 /* Save the current flags before any operations that can change them */
196181 pushfq
197182
....@@ -199,7 +184,7 @@
199184 save_mcount_regs 8
200185 /* save_mcount_regs fills in first two parameters */
201186
202
-GLOBAL(ftrace_regs_caller_op_ptr)
187
+SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
203188 /* Load the ftrace_ops into the 3rd parameter */
204189 movq function_trace_op(%rip), %rdx
205190
....@@ -228,7 +213,7 @@
228213 /* regs go into 4th parameter */
229214 leaq (%rsp), %rcx
230215
231
-GLOBAL(ftrace_regs_call)
216
+SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
232217 call ftrace_stub
233218
234219 /* Copy flags back to SS, to restore them */
....@@ -247,8 +232,19 @@
247232 movq R10(%rsp), %r10
248233 movq RBX(%rsp), %rbx
249234
250
- restore_mcount_regs
235
+ movq ORIG_RAX(%rsp), %rax
236
+ movq %rax, MCOUNT_REG_SIZE-8(%rsp)
251237
238
+ /*
239
+ * If ORIG_RAX is anything but zero, make this a call to that.
240
+ * See arch_ftrace_set_direct_caller().
241
+ */
242
+ movq ORIG_RAX(%rsp), %rax
243
+ testq %rax, %rax
244
+SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
245
+ jnz 1f
246
+
247
+ restore_mcount_regs
252248 /* Restore flags */
253249 popfq
254250
....@@ -258,16 +254,26 @@
258254 * The trampoline will add the code to jump
259255 * to the return.
260256 */
261
-GLOBAL(ftrace_regs_caller_end)
262
-
257
+SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
263258 jmp ftrace_epilogue
264259
265
-ENDPROC(ftrace_regs_caller)
260
+ /* Swap the flags with orig_rax */
261
+1: movq MCOUNT_REG_SIZE(%rsp), %rdi
262
+ movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
263
+ movq %rax, MCOUNT_REG_SIZE(%rsp)
264
+
265
+ restore_mcount_regs 8
266
+ /* Restore flags */
267
+ popfq
268
+ UNWIND_HINT_FUNC
269
+ jmp ftrace_epilogue
270
+
271
+SYM_FUNC_END(ftrace_regs_caller)
266272
267273
268274 #else /* ! CONFIG_DYNAMIC_FTRACE */
269275
270
-ENTRY(function_hook)
276
+SYM_FUNC_START(__fentry__)
271277 cmpq $ftrace_stub, ftrace_trace_function
272278 jnz trace
273279
....@@ -280,8 +286,8 @@
280286 jnz ftrace_graph_caller
281287 #endif
282288
283
-GLOBAL(ftrace_stub)
284
- retq
289
+SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
290
+ RET
285291
286292 trace:
287293 /* save_mcount_regs fills in first two parameters */
....@@ -294,37 +300,31 @@
294300 * function tracing is enabled.
295301 */
296302 movq ftrace_trace_function, %r8
297
- CALL_NOSPEC %r8
303
+ CALL_NOSPEC r8
298304 restore_mcount_regs
299305
300306 jmp fgraph_trace
301
-ENDPROC(function_hook)
307
+SYM_FUNC_END(__fentry__)
308
+EXPORT_SYMBOL(__fentry__)
302309 #endif /* CONFIG_DYNAMIC_FTRACE */
303310
304311 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
305
-ENTRY(ftrace_graph_caller)
312
+SYM_FUNC_START(ftrace_graph_caller)
306313 /* Saves rbp into %rdx and fills first parameter */
307314 save_mcount_regs
308315
309
-#ifdef CC_USING_FENTRY
310316 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
311317 movq $0, %rdx /* No framepointers needed */
312
-#else
313
- /* Save address of the return address of traced function */
314
- leaq 8(%rdx), %rsi
315
- /* ftrace does sanity checks against frame pointers */
316
- movq (%rdx), %rdx
317
-#endif
318318 call prepare_ftrace_return
319319
320320 restore_mcount_regs
321321
322
- retq
323
-ENDPROC(ftrace_graph_caller)
322
+ RET
323
+SYM_FUNC_END(ftrace_graph_caller)
324324
325
-ENTRY(return_to_handler)
325
+SYM_CODE_START(return_to_handler)
326326 UNWIND_HINT_EMPTY
327
- subq $24, %rsp
327
+ subq $16, %rsp
328328
329329 /* Save the return values */
330330 movq %rax, (%rsp)
....@@ -336,7 +336,19 @@
336336 movq %rax, %rdi
337337 movq 8(%rsp), %rdx
338338 movq (%rsp), %rax
339
- addq $24, %rsp
340
- JMP_NOSPEC %rdi
341
-END(return_to_handler)
339
+
340
+ addq $16, %rsp
341
+ /*
342
+ * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
343
+ * since IBT would demand that contain ENDBR, which simply isn't so for
344
+ * return addresses. Use a retpoline here to keep the RSB balanced.
345
+ */
346
+ ANNOTATE_INTRA_FUNCTION_CALL
347
+ call .Ldo_rop
348
+ int3
349
+.Ldo_rop:
350
+ mov %rdi, (%rsp)
351
+ UNWIND_HINT_FUNC
352
+ RET
353
+SYM_CODE_END(return_to_handler)
342354 #endif