| .. | .. |
|---|
| 20 | 20 | |
|---|
| 21 | 21 | extern int spin_retry; |
|---|
| 22 | 22 | |
|---|
| 23 | | -#ifndef CONFIG_SMP |
|---|
| 24 | | -static inline bool arch_vcpu_is_preempted(int cpu) { return false; } |
|---|
| 25 | | -#else |
|---|
| 26 | 23 | bool arch_vcpu_is_preempted(int cpu); |
|---|
| 27 | | -#endif |
|---|
| 28 | 24 | |
|---|
| 29 | 25 | #define vcpu_is_preempted arch_vcpu_is_preempted |
|---|
| 30 | 26 | |
|---|
| .. | .. |
|---|
| 89 | 85 | static inline void arch_spin_unlock(arch_spinlock_t *lp) |
|---|
| 90 | 86 | { |
|---|
| 91 | 87 | typecheck(int, lp->lock); |
|---|
| 92 | | - asm volatile( |
|---|
| 88 | + asm_inline volatile( |
|---|
| 93 | 89 | ALTERNATIVE("", ".long 0xb2fa0070", 49) /* NIAI 7 */ |
|---|
| 94 | 90 | " sth %1,%0\n" |
|---|
| 95 | 91 | : "=Q" (((unsigned short *) &lp->lock)[1]) |
|---|