hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/x86/entry/thunk_64.S
....@@ -1,9 +1,8 @@
1
+/* SPDX-License-Identifier: GPL-2.0-only */
12 /*
23 * Save registers before calling assembly functions. This avoids
34 * disturbance of register allocation in some inline assembly constructs.
45 * Copyright 2001,2002 by Andi Kleen, SuSE Labs.
5
- * Added trace_hardirqs callers - Copyright 2007 Steven Rostedt, Red Hat, Inc.
6
- * Subject to the GNU public license, v.2. No warranty of any kind.
76 */
87 #include <linux/linkage.h>
98 #include "calling.h"
....@@ -12,9 +11,7 @@
1211
1312 /* rdi: arg1 ... normal C conventions. rax is saved/restored. */
1413 .macro THUNK name, func, put_ret_addr_in_rdi=0
15
- .globl \name
16
- .type \name, @function
17
-\name:
14
+SYM_FUNC_START_NOALIGN(\name)
1815 pushq %rbp
1916 movq %rsp, %rbp
2017
....@@ -34,30 +31,17 @@
3431 .endif
3532
3633 call \func
37
- jmp .L_restore
34
+ jmp __thunk_restore
35
+SYM_FUNC_END(\name)
3836 _ASM_NOKPROBE(\name)
3937 .endm
4038
41
-#ifdef CONFIG_TRACE_IRQFLAGS
42
- THUNK trace_hardirqs_on_thunk,trace_hardirqs_on_caller,1
43
- THUNK trace_hardirqs_off_thunk,trace_hardirqs_off_caller,1
44
-#endif
39
+ THUNK preempt_schedule_thunk, preempt_schedule
40
+ THUNK preempt_schedule_notrace_thunk, preempt_schedule_notrace
41
+ EXPORT_SYMBOL(preempt_schedule_thunk)
42
+ EXPORT_SYMBOL(preempt_schedule_notrace_thunk)
4543
46
-#ifdef CONFIG_DEBUG_LOCK_ALLOC
47
- THUNK lockdep_sys_exit_thunk,lockdep_sys_exit
48
-#endif
49
-
50
-#ifdef CONFIG_PREEMPT
51
- THUNK ___preempt_schedule, preempt_schedule
52
- THUNK ___preempt_schedule_notrace, preempt_schedule_notrace
53
- EXPORT_SYMBOL(___preempt_schedule)
54
- EXPORT_SYMBOL(___preempt_schedule_notrace)
55
-#endif
56
-
57
-#if defined(CONFIG_TRACE_IRQFLAGS) \
58
- || defined(CONFIG_DEBUG_LOCK_ALLOC) \
59
- || defined(CONFIG_PREEMPT)
60
-.L_restore:
44
+SYM_CODE_START_LOCAL_NOALIGN(__thunk_restore)
6145 popq %r11
6246 popq %r10
6347 popq %r9
....@@ -68,6 +52,6 @@
6852 popq %rsi
6953 popq %rdi
7054 popq %rbp
71
- ret
72
- _ASM_NOKPROBE(.L_restore)
73
-#endif
55
+ RET
56
+ _ASM_NOKPROBE(__thunk_restore)
57
+SYM_CODE_END(__thunk_restore)