1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
| /* SPDX-License-Identifier: GPL-2.0 */
| /*
| * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
| *
| * This file implements mcount(), which is used to collect profiling data.
| * This can also be tweaked for kernel stack overflow detection.
| */
|
| #include <linux/linkage.h>
| #include <asm/export.h>
|
| /*
| * This is the main variant and is called by C code. GCC's -pg option
| * automatically instruments every C function with a call to this.
| */
|
| .text
| .align 32
| .globl _mcount
| .type _mcount,#function
| EXPORT_SYMBOL(_mcount)
| .globl mcount
| .type mcount,#function
| _mcount:
| mcount:
| #ifdef CONFIG_FUNCTION_TRACER
| #ifdef CONFIG_DYNAMIC_FTRACE
| /* Do nothing, the retl/nop below is all we need. */
| #else
| sethi %hi(ftrace_trace_function), %g1
| sethi %hi(ftrace_stub), %g2
| ldx [%g1 + %lo(ftrace_trace_function)], %g1
| or %g2, %lo(ftrace_stub), %g2
| cmp %g1, %g2
| be,pn %icc, 1f
| mov %i7, %g3
| save %sp, -176, %sp
| mov %g3, %o1
| jmpl %g1, %o7
| mov %i7, %o0
| ret
| restore
| /* not reached */
| 1:
| #ifdef CONFIG_FUNCTION_GRAPH_TRACER
| sethi %hi(ftrace_graph_return), %g1
| ldx [%g1 + %lo(ftrace_graph_return)], %g3
| cmp %g2, %g3
| bne,pn %xcc, 5f
| sethi %hi(ftrace_graph_entry_stub), %g2
| sethi %hi(ftrace_graph_entry), %g1
| or %g2, %lo(ftrace_graph_entry_stub), %g2
| ldx [%g1 + %lo(ftrace_graph_entry)], %g1
| cmp %g1, %g2
| be,pt %xcc, 2f
| nop
| 5: mov %i7, %g2
| mov %fp, %g3
| save %sp, -176, %sp
| mov %g2, %l0
| ba,pt %xcc, ftrace_graph_caller
| mov %g3, %l1
| #endif
| 2:
| #endif
| #endif
| retl
| nop
| .size _mcount,.-_mcount
| .size mcount,.-mcount
|
| #ifdef CONFIG_FUNCTION_TRACER
| .globl ftrace_stub
| .type ftrace_stub,#function
| ftrace_stub:
| retl
| nop
| .size ftrace_stub,.-ftrace_stub
| #ifdef CONFIG_DYNAMIC_FTRACE
| .globl ftrace_caller
| .type ftrace_caller,#function
| ftrace_caller:
| mov %i7, %g2
| mov %fp, %g3
| save %sp, -176, %sp
| mov %g2, %o1
| mov %g2, %l0
| mov %g3, %l1
| .globl ftrace_call
| ftrace_call:
| call ftrace_stub
| mov %i7, %o0
| #ifdef CONFIG_FUNCTION_GRAPH_TRACER
| .globl ftrace_graph_call
| ftrace_graph_call:
| call ftrace_stub
| nop
| #endif
| ret
| restore
| #ifdef CONFIG_FUNCTION_GRAPH_TRACER
| .size ftrace_graph_call,.-ftrace_graph_call
| #endif
| .size ftrace_call,.-ftrace_call
| .size ftrace_caller,.-ftrace_caller
| #endif
| #endif
|
| #ifdef CONFIG_FUNCTION_GRAPH_TRACER
| ENTRY(ftrace_graph_caller)
| mov %l0, %o0
| mov %i7, %o1
| call prepare_ftrace_return
| mov %l1, %o2
| ret
| restore %o0, -8, %i7
| END(ftrace_graph_caller)
|
| ENTRY(return_to_handler)
| save %sp, -176, %sp
| call ftrace_return_to_handler
| mov %fp, %o0
| jmpl %o0 + 8, %g0
| restore
| END(return_to_handler)
| #endif
|
|