hc
2024-11-01 2f529f9b558ca1c1bd74be7437a84e4711743404
kernel/kernel/trace/ring_buffer.c
....@@ -3165,8 +3165,8 @@
31653165 static __always_inline int
31663166 trace_recursive_lock(struct ring_buffer_per_cpu *cpu_buffer)
31673167 {
3168
- unsigned int val = cpu_buffer->current_context;
3169
- unsigned long pc = preempt_count();
3168
+ unsigned int val;
3169
+ unsigned long pc = preempt_count(), flags;
31703170 int bit;
31713171
31723172 if (!(pc & (NMI_MASK | HARDIRQ_MASK | SOFTIRQ_OFFSET)))
....@@ -3175,6 +3175,10 @@
31753175 bit = pc & NMI_MASK ? RB_CTX_NMI :
31763176 pc & HARDIRQ_MASK ? RB_CTX_IRQ : RB_CTX_SOFTIRQ;
31773177
3178
+ flags = hard_cond_local_irq_save();
3179
+
3180
+ val = cpu_buffer->current_context;
3181
+
31783182 if (unlikely(val & (1 << (bit + cpu_buffer->nest)))) {
31793183 /*
31803184 * It is possible that this was called by transitioning
....@@ -3182,12 +3186,16 @@
31823186 * been updated yet. In this case, use the TRANSITION bit.
31833187 */
31843188 bit = RB_CTX_TRANSITION;
3185
- if (val & (1 << (bit + cpu_buffer->nest)))
3189
+ if (val & (1 << (bit + cpu_buffer->nest))) {
3190
+ hard_cond_local_irq_restore(flags);
31863191 return 1;
3192
+ }
31873193 }
31883194
31893195 val |= (1 << (bit + cpu_buffer->nest));
31903196 cpu_buffer->current_context = val;
3197
+
3198
+ hard_cond_local_irq_restore(flags);
31913199
31923200 return 0;
31933201 }
....@@ -3195,8 +3203,12 @@
31953203 static __always_inline void
31963204 trace_recursive_unlock(struct ring_buffer_per_cpu *cpu_buffer)
31973205 {
3206
+ unsigned long flags;
3207
+
3208
+ flags = hard_cond_local_irq_save();
31983209 cpu_buffer->current_context &=
31993210 cpu_buffer->current_context - (1 << cpu_buffer->nest);
3211
+ hard_cond_local_irq_restore(flags);
32003212 }
32013213
32023214 /* The recursive locking above uses 5 bits */