.. | .. |
---|
5 | 5 | * Copyright (C) 1996,1997,1998 Russell King. |
---|
6 | 6 | * ARM700 fix by Matthew Godbolt (linux-user@willothewisp.demon.co.uk) |
---|
7 | 7 | * nommu support by Hyok S. Choi (hyok.choi@samsung.com) |
---|
| 8 | + * Copyright (C) 2005 Stelian Pop. |
---|
8 | 9 | * |
---|
9 | 10 | * Low-level vector interface routines |
---|
10 | 11 | * |
---|
.. | .. |
---|
32 | 33 | #include "entry-header.S" |
---|
33 | 34 | #include <asm/entry-macro-multi.S> |
---|
34 | 35 | #include <asm/probes.h> |
---|
| 36 | +#include <asm/dovetail.h> |
---|
35 | 37 | |
---|
36 | 38 | /* |
---|
37 | 39 | * Interrupt handling. |
---|
38 | 40 | */ |
---|
39 | 41 | .macro irq_handler |
---|
40 | 42 | #ifdef CONFIG_GENERIC_IRQ_MULTI_HANDLER |
---|
41 | | - ldr r1, =handle_arch_irq |
---|
42 | 43 | mov r0, sp |
---|
43 | 44 | badr lr, 9997f |
---|
| 45 | +#ifdef CONFIG_IRQ_PIPELINE |
---|
| 46 | + ldr r1, =handle_arch_irq_pipelined |
---|
| 47 | + mov pc, r1 |
---|
| 48 | +#else |
---|
| 49 | + ldr r1, =handle_arch_irq |
---|
44 | 50 | ldr pc, [r1] |
---|
| 51 | +#endif |
---|
| 52 | +#elif CONFIG_IRQ_PIPELINE |
---|
| 53 | +#error "Legacy IRQ handling not pipelined" |
---|
45 | 54 | #else |
---|
46 | 55 | arch_irq_handler_default |
---|
47 | 56 | #endif |
---|
.. | .. |
---|
183 | 192 | uaccess_entry tsk, r0, r1, r2, \uaccess |
---|
184 | 193 | |
---|
185 | 194 | .if \trace |
---|
186 | | -#ifdef CONFIG_TRACE_IRQFLAGS |
---|
| 195 | +#ifdef CONFIG_IRQ_PIPELINE |
---|
| 196 | + mov r0, sp |
---|
| 197 | + bl kentry_enter_pipelined |
---|
| 198 | +#elif defined(CONFIG_TRACE_IRQFLAGS) |
---|
187 | 199 | bl trace_hardirqs_off |
---|
188 | 200 | #endif |
---|
189 | 201 | .endif |
---|
.. | .. |
---|
203 | 215 | __irq_svc: |
---|
204 | 216 | svc_entry |
---|
205 | 217 | irq_handler |
---|
| 218 | +#ifdef CONFIG_IRQ_PIPELINE |
---|
| 219 | + tst r0, r0 @ skip epilogue if oob or in-band stalled |
---|
| 220 | + beq 1f |
---|
| 221 | +#endif |
---|
206 | 222 | |
---|
207 | 223 | #ifdef CONFIG_PREEMPTION |
---|
208 | 224 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
---|
.. | .. |
---|
213 | 229 | blne svc_preempt |
---|
214 | 230 | #endif |
---|
215 | 231 | |
---|
| 232 | +1: |
---|
216 | 233 | svc_exit r5, irq = 1 @ return from exception |
---|
217 | 234 | UNWIND(.fnend ) |
---|
218 | 235 | ENDPROC(__irq_svc) |
---|
.. | .. |
---|
222 | 239 | #ifdef CONFIG_PREEMPTION |
---|
223 | 240 | svc_preempt: |
---|
224 | 241 | mov r8, lr |
---|
225 | | -1: bl preempt_schedule_irq @ irq en/disable is done inside |
---|
| 242 | +1: bl arm_preempt_schedule_irq @ irq en/disable is done inside |
---|
226 | 243 | ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS |
---|
227 | 244 | tst r0, #_TIF_NEED_RESCHED |
---|
228 | 245 | reteq r8 @ go again |
---|
.. | .. |
---|
252 | 269 | #else |
---|
253 | 270 | svc_entry |
---|
254 | 271 | #endif |
---|
| 272 | +#ifdef CONFIG_DOVETAIL |
---|
| 273 | + get_thread_info tsk |
---|
| 274 | + ldr r0, [tsk, #TI_PREEMPT] @ get preempt count |
---|
| 275 | + tst r0, #TI_OOB_MASK @ oob stage? |
---|
| 276 | + beq 1f |
---|
| 277 | + mov r0, #ARM_TRAP_UNDEFINSTR |
---|
| 278 | + mov r1, sp @ r1 = ®s |
---|
| 279 | + bl __oob_trap_notify |
---|
| 280 | +1: |
---|
| 281 | +#endif |
---|
255 | 282 | |
---|
256 | 283 | mov r1, #4 @ PC correction to apply |
---|
257 | 284 | THUMB( tst r5, #PSR_T_BIT ) @ exception taken in Thumb mode? |
---|
.. | .. |
---|
261 | 288 | |
---|
262 | 289 | __und_svc_finish: |
---|
263 | 290 | get_thread_info tsk |
---|
| 291 | +#ifdef CONFIG_DOVETAIL |
---|
| 292 | + ldr r0, [tsk, #TI_PREEMPT] @ get preempt count |
---|
| 293 | + tst r0, #TI_OOB_MASK @ oob stage? |
---|
| 294 | + beq 1f |
---|
| 295 | + mov r0, #ARM_TRAP_UNDEFINSTR |
---|
| 296 | + mov r1, sp @ r1 = ®s |
---|
| 297 | + bl __oob_trap_unwind |
---|
| 298 | +1: |
---|
| 299 | +#endif |
---|
264 | 300 | ldr r5, [sp, #S_PSR] @ Get SVC cpsr |
---|
265 | 301 | svc_exit r5 @ return from exception |
---|
266 | 302 | UNWIND(.fnend ) |
---|
.. | .. |
---|
391 | 427 | |
---|
392 | 428 | .if \trace |
---|
393 | 429 | #ifdef CONFIG_TRACE_IRQFLAGS |
---|
394 | | - bl trace_hardirqs_off |
---|
| 430 | + bl trace_hardirqs_off_pipelined |
---|
395 | 431 | #endif |
---|
396 | 432 | ct_user_exit save = 0 |
---|
397 | 433 | .endif |
---|
.. | .. |
---|
427 | 463 | usr_entry |
---|
428 | 464 | kuser_cmpxchg_check |
---|
429 | 465 | irq_handler |
---|
430 | | - get_thread_info tsk |
---|
431 | 466 | mov why, #0 |
---|
| 467 | +#ifdef CONFIG_IRQ_PIPELINE |
---|
| 468 | + tst r0, r0 |
---|
| 469 | + beq fast_ret_to_user @ skip epilogue if oob (in-band cannot be stalled) |
---|
| 470 | +#endif |
---|
| 471 | + get_thread_info tsk |
---|
432 | 472 | b ret_to_user_from_irq |
---|
433 | 473 | UNWIND(.fnend ) |
---|
434 | 474 | ENDPROC(__irq_usr) |
---|
.. | .. |
---|
721 | 761 | UNWIND(.cantunwind ) |
---|
722 | 762 | get_thread_info tsk |
---|
723 | 763 | mov why, #0 |
---|
724 | | - b ret_to_user |
---|
| 764 | + ret_to_user_pipelined r1 |
---|
725 | 765 | UNWIND(.fnend ) |
---|
726 | 766 | ENDPROC(__pabt_usr) |
---|
727 | 767 | ENDPROC(ret_from_exception) |
---|