| .. | .. |
|---|
| 97 | 97 | struct lock_class_key *key, short inner); |
|---|
| 98 | 98 | |
|---|
| 99 | 99 | # define raw_spin_lock_init(lock) \ |
|---|
| 100 | + LOCK_ALTERNATIVES(lock, spin_lock_init, \ |
|---|
| 100 | 101 | do { \ |
|---|
| 101 | 102 | static struct lock_class_key __key; \ |
|---|
| 102 | 103 | \ |
|---|
| 103 | | - __raw_spin_lock_init((lock), #lock, &__key, LD_WAIT_SPIN); \ |
|---|
| 104 | | -} while (0) |
|---|
| 104 | + __raw_spin_lock_init(__RAWLOCK(lock), #lock, &__key, LD_WAIT_SPIN); \ |
|---|
| 105 | +} while (0)) |
|---|
| 105 | 106 | |
|---|
| 106 | 107 | #else |
|---|
| 107 | 108 | # define raw_spin_lock_init(lock) \ |
|---|
| 108 | | - do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0) |
|---|
| 109 | + LOCK_ALTERNATIVES(lock, spin_lock_init, \ |
|---|
| 110 | + do { *(__RAWLOCK(lock)) = __RAW_SPIN_LOCK_UNLOCKED(__RAWLOCK(lock)); } while (0)) |
|---|
| 109 | 111 | #endif |
|---|
| 110 | 112 | |
|---|
| 111 | | -#define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock) |
|---|
| 113 | +#define raw_spin_is_locked(lock) \ |
|---|
| 114 | + LOCK_ALTERNATIVES_RET(lock, spin_is_locked, \ |
|---|
| 115 | + arch_spin_is_locked(&(__RAWLOCK(lock))->raw_lock)) |
|---|
| 112 | 116 | |
|---|
| 113 | 117 | #ifdef arch_spin_is_contended |
|---|
| 114 | | -#define raw_spin_is_contended(lock) arch_spin_is_contended(&(lock)->raw_lock) |
|---|
| 118 | +#define raw_spin_is_contended(lock) \ |
|---|
| 119 | + LOCK_ALTERNATIVES_RET(lock, spin_is_contended, \ |
|---|
| 120 | + arch_spin_is_contended(&(__RAWLOCK(lock))->raw_lock)) |
|---|
| 115 | 121 | #else |
|---|
| 116 | 122 | #define raw_spin_is_contended(lock) (((void)(lock), 0)) |
|---|
| 117 | 123 | #endif /*arch_spin_is_contended*/ |
|---|
| .. | .. |
|---|
| 220 | 226 | * various methods are defined as nops in the case they are not |
|---|
| 221 | 227 | * required. |
|---|
| 222 | 228 | */ |
|---|
| 223 | | -#define raw_spin_trylock(lock) __cond_lock(lock, _raw_spin_trylock(lock)) |
|---|
| 229 | +#define raw_spin_trylock(lock) \ |
|---|
| 230 | + __cond_lock(lock, \ |
|---|
| 231 | + LOCK_ALTERNATIVES_RET(lock, \ |
|---|
| 232 | + spin_trylock, _raw_spin_trylock(__RAWLOCK(lock)))) |
|---|
| 224 | 233 | |
|---|
| 225 | | -#define raw_spin_lock(lock) _raw_spin_lock(lock) |
|---|
| 234 | +#define raw_spin_lock(lock) \ |
|---|
| 235 | + LOCK_ALTERNATIVES(lock, spin_lock, _raw_spin_lock(__RAWLOCK(lock))) |
|---|
| 226 | 236 | |
|---|
| 227 | 237 | #ifdef CONFIG_DEBUG_LOCK_ALLOC |
|---|
| 238 | + |
|---|
| 228 | 239 | # define raw_spin_lock_nested(lock, subclass) \ |
|---|
| 229 | | - _raw_spin_lock_nested(lock, subclass) |
|---|
| 240 | + LOCK_ALTERNATIVES(lock, spin_lock_nested, \ |
|---|
| 241 | + _raw_spin_lock_nested(__RAWLOCK(lock), subclass), subclass) |
|---|
| 230 | 242 | |
|---|
| 231 | 243 | # define raw_spin_lock_nest_lock(lock, nest_lock) \ |
|---|
| 232 | 244 | do { \ |
|---|
| .. | .. |
|---|
| 239 | 251 | * warns about set-but-not-used variables when building with |
|---|
| 240 | 252 | * CONFIG_DEBUG_LOCK_ALLOC=n and with W=1. |
|---|
| 241 | 253 | */ |
|---|
| 242 | | -# define raw_spin_lock_nested(lock, subclass) \ |
|---|
| 243 | | - _raw_spin_lock(((void)(subclass), (lock))) |
|---|
| 254 | +# define raw_spin_lock_nested(lock, subclass) \ |
|---|
| 255 | + LOCK_ALTERNATIVES(lock, spin_lock_nested, \ |
|---|
| 256 | + _raw_spin_lock(((void)(subclass), __RAWLOCK(lock))), subclass) |
|---|
| 244 | 257 | # define raw_spin_lock_nest_lock(lock, nest_lock) _raw_spin_lock(lock) |
|---|
| 245 | 258 | #endif |
|---|
| 246 | 259 | |
|---|
| 247 | 260 | #if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK) |
|---|
| 248 | 261 | |
|---|
| 249 | | -#define raw_spin_lock_irqsave(lock, flags) \ |
|---|
| 250 | | - do { \ |
|---|
| 251 | | - typecheck(unsigned long, flags); \ |
|---|
| 252 | | - flags = _raw_spin_lock_irqsave(lock); \ |
|---|
| 253 | | - } while (0) |
|---|
| 262 | +#define raw_spin_lock_irqsave(lock, flags) \ |
|---|
| 263 | + LOCK_ALTERNATIVES(lock, spin_lock_irqsave, \ |
|---|
| 264 | + do { \ |
|---|
| 265 | + typecheck(unsigned long, flags); \ |
|---|
| 266 | + flags = _raw_spin_lock_irqsave(__RAWLOCK(lock)); \ |
|---|
| 267 | + } while (0), flags) |
|---|
| 254 | 268 | |
|---|
| 255 | 269 | #ifdef CONFIG_DEBUG_LOCK_ALLOC |
|---|
| 256 | 270 | #define raw_spin_lock_irqsave_nested(lock, flags, subclass) \ |
|---|
| .. | .. |
|---|
| 268 | 282 | |
|---|
| 269 | 283 | #else |
|---|
| 270 | 284 | |
|---|
| 271 | | -#define raw_spin_lock_irqsave(lock, flags) \ |
|---|
| 272 | | - do { \ |
|---|
| 273 | | - typecheck(unsigned long, flags); \ |
|---|
| 274 | | - _raw_spin_lock_irqsave(lock, flags); \ |
|---|
| 275 | | - } while (0) |
|---|
| 285 | +#define raw_spin_lock_irqsave(lock, flags) \ |
|---|
| 286 | + LOCK_ALTERNATIVES(lock, spin_lock_irqsave, \ |
|---|
| 287 | + do { \ |
|---|
| 288 | + typecheck(unsigned long, flags); \ |
|---|
| 289 | + _raw_spin_lock_irqsave(__RAWLOCK(lock), flags); \ |
|---|
| 290 | + } while (0), flags) |
|---|
| 276 | 291 | |
|---|
| 277 | 292 | #define raw_spin_lock_irqsave_nested(lock, flags, subclass) \ |
|---|
| 278 | 293 | raw_spin_lock_irqsave(lock, flags) |
|---|
| 279 | 294 | |
|---|
| 280 | 295 | #endif |
|---|
| 281 | 296 | |
|---|
| 282 | | -#define raw_spin_lock_irq(lock) _raw_spin_lock_irq(lock) |
|---|
| 297 | +#define raw_spin_lock_irq(lock) \ |
|---|
| 298 | + LOCK_ALTERNATIVES(lock, spin_lock_irq, \ |
|---|
| 299 | + _raw_spin_lock_irq(__RAWLOCK(lock))) |
|---|
| 283 | 300 | #define raw_spin_lock_bh(lock) _raw_spin_lock_bh(lock) |
|---|
| 284 | | -#define raw_spin_unlock(lock) _raw_spin_unlock(lock) |
|---|
| 285 | | -#define raw_spin_unlock_irq(lock) _raw_spin_unlock_irq(lock) |
|---|
| 301 | +#define raw_spin_unlock(lock) \ |
|---|
| 302 | + LOCK_ALTERNATIVES(lock, spin_unlock, \ |
|---|
| 303 | + _raw_spin_unlock(__RAWLOCK(lock))) |
|---|
| 304 | +#define raw_spin_unlock_irq(lock) \ |
|---|
| 305 | + LOCK_ALTERNATIVES(lock, spin_unlock_irq, \ |
|---|
| 306 | + _raw_spin_unlock_irq(__RAWLOCK(lock))) |
|---|
| 286 | 307 | |
|---|
| 287 | | -#define raw_spin_unlock_irqrestore(lock, flags) \ |
|---|
| 288 | | - do { \ |
|---|
| 289 | | - typecheck(unsigned long, flags); \ |
|---|
| 290 | | - _raw_spin_unlock_irqrestore(lock, flags); \ |
|---|
| 291 | | - } while (0) |
|---|
| 308 | +#define raw_spin_unlock_irqrestore(lock, flags) \ |
|---|
| 309 | + LOCK_ALTERNATIVES(lock, spin_unlock_irqrestore, \ |
|---|
| 310 | + do { \ |
|---|
| 311 | + typecheck(unsigned long, flags); \ |
|---|
| 312 | + _raw_spin_unlock_irqrestore(__RAWLOCK(lock), flags); \ |
|---|
| 313 | + } while (0), flags) |
|---|
| 292 | 314 | #define raw_spin_unlock_bh(lock) _raw_spin_unlock_bh(lock) |
|---|
| 293 | 315 | |
|---|
| 294 | 316 | #define raw_spin_trylock_bh(lock) \ |
|---|
| 295 | 317 | __cond_lock(lock, _raw_spin_trylock_bh(lock)) |
|---|
| 296 | 318 | |
|---|
| 297 | 319 | #define raw_spin_trylock_irq(lock) \ |
|---|
| 320 | + LOCK_ALTERNATIVES_RET(lock, spin_trylock_irq, \ |
|---|
| 298 | 321 | ({ \ |
|---|
| 299 | 322 | local_irq_disable(); \ |
|---|
| 300 | | - raw_spin_trylock(lock) ? \ |
|---|
| 323 | + raw_spin_trylock(__RAWLOCK(lock)) ? \ |
|---|
| 301 | 324 | 1 : ({ local_irq_enable(); 0; }); \ |
|---|
| 302 | | -}) |
|---|
| 325 | +})) |
|---|
| 303 | 326 | |
|---|
| 304 | 327 | #define raw_spin_trylock_irqsave(lock, flags) \ |
|---|
| 328 | + LOCK_ALTERNATIVES_RET(lock, spin_trylock_irqsave, \ |
|---|
| 305 | 329 | ({ \ |
|---|
| 306 | 330 | local_irq_save(flags); \ |
|---|
| 307 | | - raw_spin_trylock(lock) ? \ |
|---|
| 331 | + raw_spin_trylock(__RAWLOCK(lock)) ? \ |
|---|
| 308 | 332 | 1 : ({ local_irq_restore(flags); 0; }); \ |
|---|
| 309 | | -}) |
|---|
| 333 | +}), flags) |
|---|
| 310 | 334 | |
|---|
| 311 | 335 | /* Include rwlock functions */ |
|---|
| 312 | 336 | #include <linux/rwlock.h> |
|---|
| .. | .. |
|---|
| 320 | 344 | # include <linux/spinlock_api_up.h> |
|---|
| 321 | 345 | #endif |
|---|
| 322 | 346 | |
|---|
| 347 | +/* Pull the lock types specific to the IRQ pipeline. */ |
|---|
| 348 | +#ifdef CONFIG_IRQ_PIPELINE |
|---|
| 349 | +#include <linux/spinlock_pipeline.h> |
|---|
| 350 | +#else |
|---|
| 351 | +static inline void check_spinlock_context(void) { } |
|---|
| 352 | +#endif |
|---|
| 353 | + |
|---|
| 323 | 354 | /* |
|---|
| 324 | 355 | * Map the spin_lock functions to the raw variants for PREEMPT_RT=n |
|---|
| 325 | 356 | */ |
|---|
| 326 | 357 | |
|---|
| 327 | 358 | static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock) |
|---|
| 328 | 359 | { |
|---|
| 360 | + check_spinlock_context(); |
|---|
| 329 | 361 | return &lock->rlock; |
|---|
| 330 | 362 | } |
|---|
| 331 | 363 | |
|---|