.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
---|
1 | 2 | /* |
---|
2 | 3 | * linux/arch/arm/kernel/entry-armv.S |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (C) 1996,1997,1998 Russell King. |
---|
5 | 6 | * ARM700 fix by Matthew Godbolt (linux-user@willothewisp.demon.co.uk) |
---|
6 | 7 | * nommu support by Hyok S. Choi (hyok.choi@samsung.com) |
---|
7 | | - * |
---|
8 | | - * This program is free software; you can redistribute it and/or modify |
---|
9 | | - * it under the terms of the GNU General Public License version 2 as |
---|
10 | | - * published by the Free Software Foundation. |
---|
11 | 8 | * |
---|
12 | 9 | * Low-level vector interface routines |
---|
13 | 10 | * |
---|
.. | .. |
---|
207 | 204 | svc_entry |
---|
208 | 205 | irq_handler |
---|
209 | 206 | |
---|
210 | | -#ifdef CONFIG_PREEMPT |
---|
| 207 | +#ifdef CONFIG_PREEMPTION |
---|
211 | 208 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
---|
212 | | - ldr r0, [tsk, #TI_FLAGS] @ get flags |
---|
213 | 209 | teq r8, #0 @ if preempt count != 0 |
---|
| 210 | + bne 1f @ return from exeption |
---|
| 211 | + ldr r0, [tsk, #TI_FLAGS] @ get flags |
---|
| 212 | + tst r0, #_TIF_NEED_RESCHED @ if NEED_RESCHED is set |
---|
| 213 | + blne svc_preempt @ preempt! |
---|
| 214 | + |
---|
| 215 | + ldr r8, [tsk, #TI_PREEMPT_LAZY] @ get preempt lazy count |
---|
| 216 | + teq r8, #0 @ if preempt lazy count != 0 |
---|
214 | 217 | movne r0, #0 @ force flags to 0 |
---|
215 | | - tst r0, #_TIF_NEED_RESCHED |
---|
| 218 | + tst r0, #_TIF_NEED_RESCHED_LAZY |
---|
216 | 219 | blne svc_preempt |
---|
| 220 | +1: |
---|
217 | 221 | #endif |
---|
218 | 222 | |
---|
219 | 223 | svc_exit r5, irq = 1 @ return from exception |
---|
.. | .. |
---|
222 | 226 | |
---|
223 | 227 | .ltorg |
---|
224 | 228 | |
---|
225 | | -#ifdef CONFIG_PREEMPT |
---|
| 229 | +#ifdef CONFIG_PREEMPTION |
---|
226 | 230 | svc_preempt: |
---|
227 | 231 | mov r8, lr |
---|
228 | 232 | 1: bl preempt_schedule_irq @ irq en/disable is done inside |
---|
229 | 233 | ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS |
---|
230 | 234 | tst r0, #_TIF_NEED_RESCHED |
---|
| 235 | + bne 1b |
---|
| 236 | + tst r0, #_TIF_NEED_RESCHED_LAZY |
---|
231 | 237 | reteq r8 @ go again |
---|
232 | | - b 1b |
---|
| 238 | + ldr r0, [tsk, #TI_PREEMPT_LAZY] @ get preempt lazy count |
---|
| 239 | + teq r0, #0 @ if preempt lazy count != 0 |
---|
| 240 | + beq 1b |
---|
| 241 | + ret r8 @ go again |
---|
| 242 | + |
---|
233 | 243 | #endif |
---|
234 | 244 | |
---|
235 | 245 | __und_fault: |
---|
.. | .. |
---|
255 | 265 | #else |
---|
256 | 266 | svc_entry |
---|
257 | 267 | #endif |
---|
258 | | - @ |
---|
259 | | - @ call emulation code, which returns using r9 if it has emulated |
---|
260 | | - @ the instruction, or the more conventional lr if we are to treat |
---|
261 | | - @ this as a real undefined instruction |
---|
262 | | - @ |
---|
263 | | - @ r0 - instruction |
---|
264 | | - @ |
---|
265 | | -#ifndef CONFIG_THUMB2_KERNEL |
---|
266 | | - ldr r0, [r4, #-4] |
---|
267 | | -#else |
---|
268 | | - mov r1, #2 |
---|
269 | | - ldrh r0, [r4, #-2] @ Thumb instruction at LR - 2 |
---|
270 | | - cmp r0, #0xe800 @ 32-bit instruction if xx >= 0 |
---|
271 | | - blo __und_svc_fault |
---|
272 | | - ldrh r9, [r4] @ bottom 16 bits |
---|
273 | | - add r4, r4, #2 |
---|
274 | | - str r4, [sp, #S_PC] |
---|
275 | | - orr r0, r9, r0, lsl #16 |
---|
276 | | -#endif |
---|
277 | | - badr r9, __und_svc_finish |
---|
278 | | - mov r2, r4 |
---|
279 | | - bl call_fpe |
---|
280 | 268 | |
---|
281 | 269 | mov r1, #4 @ PC correction to apply |
---|
282 | | -__und_svc_fault: |
---|
| 270 | + THUMB( tst r5, #PSR_T_BIT ) @ exception taken in Thumb mode? |
---|
| 271 | + THUMB( movne r1, #2 ) @ if so, fix up PC correction |
---|
283 | 272 | mov r0, sp @ struct pt_regs *regs |
---|
284 | 273 | bl __und_fault |
---|
285 | 274 | |
---|
.. | .. |
---|
627 | 616 | @ Test if we need to give access to iWMMXt coprocessors |
---|
628 | 617 | ldr r5, [r10, #TI_FLAGS] |
---|
629 | 618 | rsbs r7, r8, #(1 << 8) @ CP 0 or 1 only |
---|
630 | | - movcss r7, r5, lsr #(TIF_USING_IWMMXT + 1) |
---|
| 619 | + movscs r7, r5, lsr #(TIF_USING_IWMMXT + 1) |
---|
631 | 620 | bcs iwmmxt_task_enable |
---|
632 | 621 | #endif |
---|
633 | 622 | ARM( add pc, pc, r8, lsr #6 ) |
---|
.. | .. |
---|
820 | 809 | * existing ones. This mechanism should be used only for things that are |
---|
821 | 810 | * really small and justified, and not be abused freely. |
---|
822 | 811 | * |
---|
823 | | - * See Documentation/arm/kernel_user_helpers.txt for formal definitions. |
---|
| 812 | + * See Documentation/arm/kernel_user_helpers.rst for formal definitions. |
---|
824 | 813 | */ |
---|
825 | 814 | THUMB( .arm ) |
---|
826 | 815 | |
---|
.. | .. |
---|
863 | 852 | smp_dmb arm |
---|
864 | 853 | 1: ldrexd r0, r1, [r2] @ load current val |
---|
865 | 854 | eors r3, r0, r4 @ compare with oldval (1) |
---|
866 | | - eoreqs r3, r1, r5 @ compare with oldval (2) |
---|
| 855 | + eorseq r3, r1, r5 @ compare with oldval (2) |
---|
867 | 856 | strexdeq r3, r6, r7, [r2] @ store newval if eq |
---|
868 | 857 | teqeq r3, #1 @ success? |
---|
869 | 858 | beq 1b @ if no then retry |
---|
.. | .. |
---|
887 | 876 | ldmia r1, {r6, lr} @ load new val |
---|
888 | 877 | 1: ldmia r2, {r0, r1} @ load current val |
---|
889 | 878 | eors r3, r0, r4 @ compare with oldval (1) |
---|
890 | | - eoreqs r3, r1, r5 @ compare with oldval (2) |
---|
891 | | -2: stmeqia r2, {r6, lr} @ store newval if eq |
---|
| 879 | + eorseq r3, r1, r5 @ compare with oldval (2) |
---|
| 880 | +2: stmiaeq r2, {r6, lr} @ store newval if eq |
---|
892 | 881 | rsbs r0, r3, #0 @ set return val and C flag |
---|
893 | 882 | ldmfd sp!, {r4, r5, r6, pc} |
---|
894 | 883 | |
---|
.. | .. |
---|
902 | 891 | mov r7, #0xffff0fff |
---|
903 | 892 | sub r7, r7, #(0xffff0fff - (0xffff0f60 + (1b - __kuser_cmpxchg64))) |
---|
904 | 893 | subs r8, r4, r7 |
---|
905 | | - rsbcss r8, r8, #(2b - 1b) |
---|
| 894 | + rsbscs r8, r8, #(2b - 1b) |
---|
906 | 895 | strcs r7, [sp, #S_PC] |
---|
907 | 896 | #if __LINUX_ARM_ARCH__ < 6 |
---|
908 | 897 | bcc kuser_cmpxchg32_fixup |
---|
.. | .. |
---|
960 | 949 | mov r7, #0xffff0fff |
---|
961 | 950 | sub r7, r7, #(0xffff0fff - (0xffff0fc0 + (1b - __kuser_cmpxchg))) |
---|
962 | 951 | subs r8, r4, r7 |
---|
963 | | - rsbcss r8, r8, #(2b - 1b) |
---|
| 952 | + rsbscs r8, r8, #(2b - 1b) |
---|
964 | 953 | strcs r7, [sp, #S_PC] |
---|
965 | 954 | ret lr |
---|
966 | 955 | .previous |
---|
.. | .. |
---|
1029 | 1018 | sub lr, lr, #\correction |
---|
1030 | 1019 | .endif |
---|
1031 | 1020 | |
---|
1032 | | - @ |
---|
1033 | | - @ Save r0, lr_<exception> (parent PC) and spsr_<exception> |
---|
1034 | | - @ (parent CPSR) |
---|
1035 | | - @ |
---|
| 1021 | + @ Save r0, lr_<exception> (parent PC) |
---|
1036 | 1022 | stmia sp, {r0, lr} @ save r0, lr |
---|
1037 | | - mrs lr, spsr |
---|
| 1023 | + |
---|
| 1024 | + @ Save spsr_<exception> (parent CPSR) |
---|
| 1025 | +2: mrs lr, spsr |
---|
1038 | 1026 | str lr, [sp, #8] @ save spsr |
---|
1039 | 1027 | |
---|
1040 | 1028 | @ |
---|
.. | .. |
---|
1055 | 1043 | movs pc, lr @ branch to handler in SVC mode |
---|
1056 | 1044 | ENDPROC(vector_\name) |
---|
1057 | 1045 | |
---|
| 1046 | +#ifdef CONFIG_HARDEN_BRANCH_HISTORY |
---|
| 1047 | + .subsection 1 |
---|
| 1048 | + .align 5 |
---|
| 1049 | +vector_bhb_loop8_\name: |
---|
| 1050 | + .if \correction |
---|
| 1051 | + sub lr, lr, #\correction |
---|
| 1052 | + .endif |
---|
| 1053 | + |
---|
| 1054 | + @ Save r0, lr_<exception> (parent PC) |
---|
| 1055 | + stmia sp, {r0, lr} |
---|
| 1056 | + |
---|
| 1057 | + @ bhb workaround |
---|
| 1058 | + mov r0, #8 |
---|
| 1059 | +3: W(b) . + 4 |
---|
| 1060 | + subs r0, r0, #1 |
---|
| 1061 | + bne 3b |
---|
| 1062 | + dsb |
---|
| 1063 | + isb |
---|
| 1064 | + b 2b |
---|
| 1065 | +ENDPROC(vector_bhb_loop8_\name) |
---|
| 1066 | + |
---|
| 1067 | +vector_bhb_bpiall_\name: |
---|
| 1068 | + .if \correction |
---|
| 1069 | + sub lr, lr, #\correction |
---|
| 1070 | + .endif |
---|
| 1071 | + |
---|
| 1072 | + @ Save r0, lr_<exception> (parent PC) |
---|
| 1073 | + stmia sp, {r0, lr} |
---|
| 1074 | + |
---|
| 1075 | + @ bhb workaround |
---|
| 1076 | + mcr p15, 0, r0, c7, c5, 6 @ BPIALL |
---|
| 1077 | + @ isb not needed due to "movs pc, lr" in the vector stub |
---|
| 1078 | + @ which gives a "context synchronisation". |
---|
| 1079 | + b 2b |
---|
| 1080 | +ENDPROC(vector_bhb_bpiall_\name) |
---|
| 1081 | + .previous |
---|
| 1082 | +#endif |
---|
| 1083 | + |
---|
1058 | 1084 | .align 2 |
---|
1059 | 1085 | @ handler addresses follow this label |
---|
1060 | 1086 | 1: |
---|
.. | .. |
---|
1063 | 1089 | .section .stubs, "ax", %progbits |
---|
1064 | 1090 | @ This must be the first word |
---|
1065 | 1091 | .word vector_swi |
---|
| 1092 | +#ifdef CONFIG_HARDEN_BRANCH_HISTORY |
---|
| 1093 | + .word vector_bhb_loop8_swi |
---|
| 1094 | + .word vector_bhb_bpiall_swi |
---|
| 1095 | +#endif |
---|
1066 | 1096 | |
---|
1067 | 1097 | vector_rst: |
---|
1068 | 1098 | ARM( swi SYS_ERROR0 ) |
---|
.. | .. |
---|
1177 | 1207 | * FIQ "NMI" handler |
---|
1178 | 1208 | *----------------------------------------------------------------------------- |
---|
1179 | 1209 | * Handle a FIQ using the SVC stack allowing FIQ act like NMI on x86 |
---|
1180 | | - * systems. |
---|
| 1210 | + * systems. This must be the last vector stub, so lets place it in its own |
---|
| 1211 | + * subsection. |
---|
1181 | 1212 | */ |
---|
| 1213 | + .subsection 2 |
---|
1182 | 1214 | vector_stub fiq, FIQ_MODE, 4 |
---|
1183 | 1215 | |
---|
1184 | 1216 | .long __fiq_usr @ 0 (USR_26 / USR_32) |
---|
.. | .. |
---|
1211 | 1243 | W(b) vector_irq |
---|
1212 | 1244 | W(b) vector_fiq |
---|
1213 | 1245 | |
---|
| 1246 | +#ifdef CONFIG_HARDEN_BRANCH_HISTORY |
---|
| 1247 | + .section .vectors.bhb.loop8, "ax", %progbits |
---|
| 1248 | +.L__vectors_bhb_loop8_start: |
---|
| 1249 | + W(b) vector_rst |
---|
| 1250 | + W(b) vector_bhb_loop8_und |
---|
| 1251 | + W(ldr) pc, .L__vectors_bhb_loop8_start + 0x1004 |
---|
| 1252 | + W(b) vector_bhb_loop8_pabt |
---|
| 1253 | + W(b) vector_bhb_loop8_dabt |
---|
| 1254 | + W(b) vector_addrexcptn |
---|
| 1255 | + W(b) vector_bhb_loop8_irq |
---|
| 1256 | + W(b) vector_bhb_loop8_fiq |
---|
| 1257 | + |
---|
| 1258 | + .section .vectors.bhb.bpiall, "ax", %progbits |
---|
| 1259 | +.L__vectors_bhb_bpiall_start: |
---|
| 1260 | + W(b) vector_rst |
---|
| 1261 | + W(b) vector_bhb_bpiall_und |
---|
| 1262 | + W(ldr) pc, .L__vectors_bhb_bpiall_start + 0x1008 |
---|
| 1263 | + W(b) vector_bhb_bpiall_pabt |
---|
| 1264 | + W(b) vector_bhb_bpiall_dabt |
---|
| 1265 | + W(b) vector_addrexcptn |
---|
| 1266 | + W(b) vector_bhb_bpiall_irq |
---|
| 1267 | + W(b) vector_bhb_bpiall_fiq |
---|
| 1268 | +#endif |
---|
| 1269 | + |
---|
1214 | 1270 | .data |
---|
1215 | 1271 | .align 2 |
---|
1216 | 1272 | |
---|