hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/kernel/trace/trace_preemptirq.c
....@@ -9,81 +9,149 @@
99 #include <linux/uaccess.h>
1010 #include <linux/module.h>
1111 #include <linux/ftrace.h>
12
+#include <linux/kprobes.h>
1213 #include "trace.h"
1314
1415 #define CREATE_TRACE_POINTS
1516 #include <trace/events/preemptirq.h>
17
+#undef CREATE_TRACE_POINTS
18
+#include <trace/hooks/preemptirq.h>
1619
1720 #ifdef CONFIG_TRACE_IRQFLAGS
1821 /* Per-cpu variable to prevent redundant calls when IRQs already off */
1922 static DEFINE_PER_CPU(int, tracing_irq_cpu);
2023
24
+/*
25
+ * Like trace_hardirqs_on() but without the lockdep invocation. This is
26
+ * used in the low level entry code where the ordering vs. RCU is important
27
+ * and lockdep uses a staged approach which splits the lockdep hardirq
28
+ * tracking into a RCU on and a RCU off section.
29
+ */
30
+void trace_hardirqs_on_prepare(void)
31
+{
32
+ if (this_cpu_read(tracing_irq_cpu)) {
33
+ if (!in_nmi()) {
34
+ trace_irq_enable(CALLER_ADDR0, CALLER_ADDR1);
35
+ trace_android_rvh_irqs_enable(CALLER_ADDR0,
36
+ CALLER_ADDR1);
37
+ }
38
+ tracer_hardirqs_on(CALLER_ADDR0, CALLER_ADDR1);
39
+ this_cpu_write(tracing_irq_cpu, 0);
40
+ }
41
+}
42
+EXPORT_SYMBOL(trace_hardirqs_on_prepare);
43
+NOKPROBE_SYMBOL(trace_hardirqs_on_prepare);
44
+
2145 void trace_hardirqs_on(void)
2246 {
2347 if (this_cpu_read(tracing_irq_cpu)) {
24
- if (!in_nmi())
48
+ if (!in_nmi()) {
2549 trace_irq_enable_rcuidle(CALLER_ADDR0, CALLER_ADDR1);
50
+ trace_android_rvh_irqs_enable(CALLER_ADDR0,
51
+ CALLER_ADDR1);
52
+ }
2653 tracer_hardirqs_on(CALLER_ADDR0, CALLER_ADDR1);
2754 this_cpu_write(tracing_irq_cpu, 0);
2855 }
2956
57
+ lockdep_hardirqs_on_prepare(CALLER_ADDR0);
3058 lockdep_hardirqs_on(CALLER_ADDR0);
3159 }
3260 EXPORT_SYMBOL(trace_hardirqs_on);
61
+NOKPROBE_SYMBOL(trace_hardirqs_on);
3362
34
-void trace_hardirqs_off(void)
63
+/*
64
+ * Like trace_hardirqs_off() but without the lockdep invocation. This is
65
+ * used in the low level entry code where the ordering vs. RCU is important
66
+ * and lockdep uses a staged approach which splits the lockdep hardirq
67
+ * tracking into a RCU on and a RCU off section.
68
+ */
69
+void trace_hardirqs_off_finish(void)
3570 {
3671 if (!this_cpu_read(tracing_irq_cpu)) {
3772 this_cpu_write(tracing_irq_cpu, 1);
3873 tracer_hardirqs_off(CALLER_ADDR0, CALLER_ADDR1);
39
- if (!in_nmi())
40
- trace_irq_disable_rcuidle(CALLER_ADDR0, CALLER_ADDR1);
74
+ if (!in_nmi()) {
75
+ trace_irq_disable(CALLER_ADDR0, CALLER_ADDR1);
76
+ trace_android_rvh_irqs_disable(CALLER_ADDR0,
77
+ CALLER_ADDR1);
78
+ }
4179 }
4280
43
- lockdep_hardirqs_off(CALLER_ADDR0);
4481 }
45
-EXPORT_SYMBOL(trace_hardirqs_off);
82
+EXPORT_SYMBOL(trace_hardirqs_off_finish);
83
+NOKPROBE_SYMBOL(trace_hardirqs_off_finish);
4684
47
-__visible void trace_hardirqs_on_caller(unsigned long caller_addr)
48
-{
49
- if (this_cpu_read(tracing_irq_cpu)) {
50
- if (!in_nmi())
51
- trace_irq_enable_rcuidle(CALLER_ADDR0, caller_addr);
52
- tracer_hardirqs_on(CALLER_ADDR0, caller_addr);
53
- this_cpu_write(tracing_irq_cpu, 0);
54
- }
55
-
56
- lockdep_hardirqs_on(CALLER_ADDR0);
57
-}
58
-EXPORT_SYMBOL(trace_hardirqs_on_caller);
59
-
60
-__visible void trace_hardirqs_off_caller(unsigned long caller_addr)
85
+void trace_hardirqs_off(void)
6186 {
6287 lockdep_hardirqs_off(CALLER_ADDR0);
6388
6489 if (!this_cpu_read(tracing_irq_cpu)) {
6590 this_cpu_write(tracing_irq_cpu, 1);
91
+ tracer_hardirqs_off(CALLER_ADDR0, CALLER_ADDR1);
92
+ if (!in_nmi()) {
93
+ trace_irq_disable_rcuidle(CALLER_ADDR0, CALLER_ADDR1);
94
+ trace_android_rvh_irqs_disable(CALLER_ADDR0,
95
+ CALLER_ADDR1);
96
+ }
97
+ }
98
+}
99
+EXPORT_SYMBOL(trace_hardirqs_off);
100
+NOKPROBE_SYMBOL(trace_hardirqs_off);
101
+
102
+__visible void trace_hardirqs_on_caller(unsigned long caller_addr)
103
+{
104
+ if (this_cpu_read(tracing_irq_cpu)) {
105
+ if (!in_nmi()) {
106
+ trace_irq_enable_rcuidle(CALLER_ADDR0, caller_addr);
107
+ trace_android_rvh_irqs_enable(CALLER_ADDR0,
108
+ caller_addr);
109
+ }
110
+ tracer_hardirqs_on(CALLER_ADDR0, caller_addr);
111
+ this_cpu_write(tracing_irq_cpu, 0);
112
+ }
113
+
114
+ lockdep_hardirqs_on_prepare(caller_addr);
115
+ lockdep_hardirqs_on(caller_addr);
116
+}
117
+EXPORT_SYMBOL(trace_hardirqs_on_caller);
118
+NOKPROBE_SYMBOL(trace_hardirqs_on_caller);
119
+
120
+__visible void trace_hardirqs_off_caller(unsigned long caller_addr)
121
+{
122
+ lockdep_hardirqs_off(caller_addr);
123
+
124
+ if (!this_cpu_read(tracing_irq_cpu)) {
125
+ this_cpu_write(tracing_irq_cpu, 1);
66126 tracer_hardirqs_off(CALLER_ADDR0, caller_addr);
67
- if (!in_nmi())
127
+ if (!in_nmi()) {
68128 trace_irq_disable_rcuidle(CALLER_ADDR0, caller_addr);
129
+ trace_android_rvh_irqs_enable(CALLER_ADDR0,
130
+ caller_addr);
131
+ }
69132 }
70133 }
71134 EXPORT_SYMBOL(trace_hardirqs_off_caller);
135
+NOKPROBE_SYMBOL(trace_hardirqs_off_caller);
72136 #endif /* CONFIG_TRACE_IRQFLAGS */
73137
74138 #ifdef CONFIG_TRACE_PREEMPT_TOGGLE
75139
76140 void trace_preempt_on(unsigned long a0, unsigned long a1)
77141 {
78
- if (!in_nmi())
142
+ if (!in_nmi()) {
79143 trace_preempt_enable_rcuidle(a0, a1);
144
+ trace_android_rvh_preempt_enable(a0, a1);
145
+ }
80146 tracer_preempt_on(a0, a1);
81147 }
82148
83149 void trace_preempt_off(unsigned long a0, unsigned long a1)
84150 {
85
- if (!in_nmi())
151
+ if (!in_nmi()) {
86152 trace_preempt_disable_rcuidle(a0, a1);
153
+ trace_android_rvh_preempt_disable(a0, a1);
154
+ }
87155 tracer_preempt_off(a0, a1);
88156 }
89157 #endif