| .. | .. |
|---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Hypervisor stub |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (C) 2012 ARM Ltd. |
|---|
| 5 | 6 | * Author: Marc Zyngier <marc.zyngier@arm.com> |
|---|
| 6 | | - * |
|---|
| 7 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 8 | | - * it under the terms of the GNU General Public License version 2 as |
|---|
| 9 | | - * published by the Free Software Foundation. |
|---|
| 10 | | - * |
|---|
| 11 | | - * This program is distributed in the hope that it will be useful, |
|---|
| 12 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|---|
| 13 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|---|
| 14 | | - * GNU General Public License for more details. |
|---|
| 15 | | - * |
|---|
| 16 | | - * You should have received a copy of the GNU General Public License |
|---|
| 17 | | - * along with this program. If not, see <http://www.gnu.org/licenses/>. |
|---|
| 18 | 7 | */ |
|---|
| 19 | 8 | |
|---|
| 20 | 9 | #include <linux/init.h> |
|---|
| 21 | 10 | #include <linux/linkage.h> |
|---|
| 22 | | -#include <linux/irqchip/arm-gic-v3.h> |
|---|
| 23 | 11 | |
|---|
| 24 | 12 | #include <asm/assembler.h> |
|---|
| 13 | +#include <asm/el2_setup.h> |
|---|
| 25 | 14 | #include <asm/kvm_arm.h> |
|---|
| 26 | 15 | #include <asm/kvm_asm.h> |
|---|
| 27 | 16 | #include <asm/ptrace.h> |
|---|
| .. | .. |
|---|
| 32 | 21 | |
|---|
| 33 | 22 | .align 11 |
|---|
| 34 | 23 | |
|---|
| 35 | | -ENTRY(__hyp_stub_vectors) |
|---|
| 24 | +SYM_CODE_START(__hyp_stub_vectors) |
|---|
| 36 | 25 | ventry el2_sync_invalid // Synchronous EL2t |
|---|
| 37 | 26 | ventry el2_irq_invalid // IRQ EL2t |
|---|
| 38 | 27 | ventry el2_fiq_invalid // FIQ EL2t |
|---|
| .. | .. |
|---|
| 52 | 41 | ventry el1_irq_invalid // IRQ 32-bit EL1 |
|---|
| 53 | 42 | ventry el1_fiq_invalid // FIQ 32-bit EL1 |
|---|
| 54 | 43 | ventry el1_error_invalid // Error 32-bit EL1 |
|---|
| 55 | | -ENDPROC(__hyp_stub_vectors) |
|---|
| 44 | +SYM_CODE_END(__hyp_stub_vectors) |
|---|
| 56 | 45 | |
|---|
| 57 | 46 | .align 11 |
|---|
| 58 | 47 | |
|---|
| 59 | | -el1_sync: |
|---|
| 48 | +SYM_CODE_START_LOCAL(el1_sync) |
|---|
| 60 | 49 | cmp x0, #HVC_SET_VECTORS |
|---|
| 61 | | - b.ne 2f |
|---|
| 50 | + b.ne 1f |
|---|
| 62 | 51 | msr vbar_el2, x1 |
|---|
| 63 | 52 | b 9f |
|---|
| 53 | + |
|---|
| 54 | +1: cmp x0, #HVC_VHE_RESTART |
|---|
| 55 | + b.eq mutate_to_vhe |
|---|
| 64 | 56 | |
|---|
| 65 | 57 | 2: cmp x0, #HVC_SOFT_RESTART |
|---|
| 66 | 58 | b.ne 3f |
|---|
| .. | .. |
|---|
| 74 | 66 | beq 9f // Nothing to reset! |
|---|
| 75 | 67 | |
|---|
| 76 | 68 | /* Someone called kvm_call_hyp() against the hyp-stub... */ |
|---|
| 77 | | - ldr x0, =HVC_STUB_ERR |
|---|
| 69 | + mov_q x0, HVC_STUB_ERR |
|---|
| 78 | 70 | eret |
|---|
| 79 | 71 | |
|---|
| 80 | 72 | 9: mov x0, xzr |
|---|
| 81 | 73 | eret |
|---|
| 82 | | -ENDPROC(el1_sync) |
|---|
| 74 | +SYM_CODE_END(el1_sync) |
|---|
| 75 | + |
|---|
| 76 | +// nVHE? No way! Give me the real thing! |
|---|
| 77 | +SYM_CODE_START_LOCAL(mutate_to_vhe) |
|---|
| 78 | + // Sanity check: MMU *must* be off |
|---|
| 79 | + mrs x1, sctlr_el2 |
|---|
| 80 | + tbnz x1, #0, 1f |
|---|
| 81 | + |
|---|
| 82 | + // Needs to be VHE capable, obviously |
|---|
| 83 | + mrs x1, id_aa64mmfr1_el1 |
|---|
| 84 | + ubfx x1, x1, #ID_AA64MMFR1_VHE_SHIFT, #4 |
|---|
| 85 | + cbz x1, 1f |
|---|
| 86 | + |
|---|
| 87 | + // Check whether VHE is disabled from the command line |
|---|
| 88 | + adr_l x1, id_aa64mmfr1_override |
|---|
| 89 | + ldr x2, [x1, FTR_OVR_VAL_OFFSET] |
|---|
| 90 | + ldr x1, [x1, FTR_OVR_MASK_OFFSET] |
|---|
| 91 | + ubfx x2, x2, #ID_AA64MMFR1_VHE_SHIFT, #4 |
|---|
| 92 | + ubfx x1, x1, #ID_AA64MMFR1_VHE_SHIFT, #4 |
|---|
| 93 | + cmp x1, xzr |
|---|
| 94 | + and x2, x2, x1 |
|---|
| 95 | + csinv x2, x2, xzr, ne |
|---|
| 96 | + cbnz x2, 2f |
|---|
| 97 | + |
|---|
| 98 | +1: mov_q x0, HVC_STUB_ERR |
|---|
| 99 | + eret |
|---|
| 100 | +2: |
|---|
| 101 | + // Engage the VHE magic! |
|---|
| 102 | + mov_q x0, HCR_HOST_VHE_FLAGS |
|---|
| 103 | + msr hcr_el2, x0 |
|---|
| 104 | + isb |
|---|
| 105 | + |
|---|
| 106 | + // Use the EL1 allocated stack, per-cpu offset |
|---|
| 107 | + mrs x0, sp_el1 |
|---|
| 108 | + mov sp, x0 |
|---|
| 109 | + mrs x0, tpidr_el1 |
|---|
| 110 | + msr tpidr_el2, x0 |
|---|
| 111 | + |
|---|
| 112 | + // FP configuration, vectors |
|---|
| 113 | + mrs_s x0, SYS_CPACR_EL12 |
|---|
| 114 | + msr cpacr_el1, x0 |
|---|
| 115 | + mrs_s x0, SYS_VBAR_EL12 |
|---|
| 116 | + msr vbar_el1, x0 |
|---|
| 117 | + |
|---|
| 118 | + // Use EL2 translations for SPE & TRBE and disable access from EL1 |
|---|
| 119 | + mrs x0, mdcr_el2 |
|---|
| 120 | + bic x0, x0, #(MDCR_EL2_E2PB_MASK << MDCR_EL2_E2PB_SHIFT) |
|---|
| 121 | + bic x0, x0, #(MDCR_EL2_E2TB_MASK << MDCR_EL2_E2TB_SHIFT) |
|---|
| 122 | + msr mdcr_el2, x0 |
|---|
| 123 | + |
|---|
| 124 | + // Transfer the MM state from EL1 to EL2 |
|---|
| 125 | + mrs_s x0, SYS_TCR_EL12 |
|---|
| 126 | + msr tcr_el1, x0 |
|---|
| 127 | + mrs_s x0, SYS_TTBR0_EL12 |
|---|
| 128 | + msr ttbr0_el1, x0 |
|---|
| 129 | + mrs_s x0, SYS_TTBR1_EL12 |
|---|
| 130 | + msr ttbr1_el1, x0 |
|---|
| 131 | + mrs_s x0, SYS_MAIR_EL12 |
|---|
| 132 | + msr mair_el1, x0 |
|---|
| 133 | + isb |
|---|
| 134 | + |
|---|
| 135 | + // Hack the exception return to stay at EL2 |
|---|
| 136 | + mrs x0, spsr_el1 |
|---|
| 137 | + and x0, x0, #~PSR_MODE_MASK |
|---|
| 138 | + mov x1, #PSR_MODE_EL2h |
|---|
| 139 | + orr x0, x0, x1 |
|---|
| 140 | + msr spsr_el1, x0 |
|---|
| 141 | + |
|---|
| 142 | + b enter_vhe |
|---|
| 143 | +SYM_CODE_END(mutate_to_vhe) |
|---|
| 144 | + |
|---|
| 145 | + // At the point where we reach enter_vhe(), we run with |
|---|
| 146 | + // the MMU off (which is enforced by mutate_to_vhe()). |
|---|
| 147 | + // We thus need to be in the idmap, or everything will |
|---|
| 148 | + // explode when enabling the MMU. |
|---|
| 149 | + |
|---|
| 150 | + .pushsection .idmap.text, "ax" |
|---|
| 151 | + |
|---|
| 152 | +SYM_CODE_START_LOCAL(enter_vhe) |
|---|
| 153 | + // Invalidate TLBs before enabling the MMU |
|---|
| 154 | + tlbi vmalle1 |
|---|
| 155 | + dsb nsh |
|---|
| 156 | + isb |
|---|
| 157 | + |
|---|
| 158 | + // Enable the EL2 S1 MMU, as set up from EL1 |
|---|
| 159 | + mrs_s x0, SYS_SCTLR_EL12 |
|---|
| 160 | + set_sctlr_el1 x0 |
|---|
| 161 | + |
|---|
| 162 | + // Disable the EL1 S1 MMU for a good measure |
|---|
| 163 | + mov_q x0, INIT_SCTLR_EL1_MMU_OFF |
|---|
| 164 | + msr_s SYS_SCTLR_EL12, x0 |
|---|
| 165 | + |
|---|
| 166 | + mov x0, xzr |
|---|
| 167 | + |
|---|
| 168 | + eret |
|---|
| 169 | +SYM_CODE_END(enter_vhe) |
|---|
| 170 | + |
|---|
| 171 | + .popsection |
|---|
| 83 | 172 | |
|---|
| 84 | 173 | .macro invalid_vector label |
|---|
| 85 | | -\label: |
|---|
| 174 | +SYM_CODE_START_LOCAL(\label) |
|---|
| 86 | 175 | b \label |
|---|
| 87 | | -ENDPROC(\label) |
|---|
| 176 | +SYM_CODE_END(\label) |
|---|
| 88 | 177 | .endm |
|---|
| 89 | 178 | |
|---|
| 90 | 179 | invalid_vector el2_sync_invalid |
|---|
| .. | .. |
|---|
| 95 | 184 | invalid_vector el1_irq_invalid |
|---|
| 96 | 185 | invalid_vector el1_fiq_invalid |
|---|
| 97 | 186 | invalid_vector el1_error_invalid |
|---|
| 187 | + |
|---|
| 188 | + .popsection |
|---|
| 98 | 189 | |
|---|
| 99 | 190 | /* |
|---|
| 100 | 191 | * __hyp_set_vectors: Call this after boot to set the initial hypervisor |
|---|
| .. | .. |
|---|
| 117 | 208 | * initialisation entry point. |
|---|
| 118 | 209 | */ |
|---|
| 119 | 210 | |
|---|
| 120 | | -ENTRY(__hyp_set_vectors) |
|---|
| 211 | +SYM_FUNC_START(__hyp_set_vectors) |
|---|
| 121 | 212 | mov x1, x0 |
|---|
| 122 | 213 | mov x0, #HVC_SET_VECTORS |
|---|
| 123 | 214 | hvc #0 |
|---|
| 124 | 215 | ret |
|---|
| 125 | | -ENDPROC(__hyp_set_vectors) |
|---|
| 216 | +SYM_FUNC_END(__hyp_set_vectors) |
|---|
| 126 | 217 | |
|---|
| 127 | | -ENTRY(__hyp_reset_vectors) |
|---|
| 218 | +SYM_FUNC_START(__hyp_reset_vectors) |
|---|
| 128 | 219 | mov x0, #HVC_RESET_VECTORS |
|---|
| 129 | 220 | hvc #0 |
|---|
| 130 | 221 | ret |
|---|
| 131 | | -ENDPROC(__hyp_reset_vectors) |
|---|
| 222 | +SYM_FUNC_END(__hyp_reset_vectors) |
|---|
| 223 | + |
|---|
| 224 | +/* |
|---|
| 225 | + * Entry point to switch to VHE if deemed capable |
|---|
| 226 | + */ |
|---|
| 227 | +SYM_FUNC_START(switch_to_vhe) |
|---|
| 228 | +#ifdef CONFIG_ARM64_VHE |
|---|
| 229 | + // Need to have booted at EL2 |
|---|
| 230 | + adr_l x1, __boot_cpu_mode |
|---|
| 231 | + ldr w0, [x1] |
|---|
| 232 | + cmp w0, #BOOT_CPU_MODE_EL2 |
|---|
| 233 | + b.ne 1f |
|---|
| 234 | + |
|---|
| 235 | + // and still be at EL1 |
|---|
| 236 | + mrs x0, CurrentEL |
|---|
| 237 | + cmp x0, #CurrentEL_EL1 |
|---|
| 238 | + b.ne 1f |
|---|
| 239 | + |
|---|
| 240 | + // Turn the world upside down |
|---|
| 241 | + mov x0, #HVC_VHE_RESTART |
|---|
| 242 | + hvc #0 |
|---|
| 243 | +1: |
|---|
| 244 | +#endif |
|---|
| 245 | + ret |
|---|
| 246 | +SYM_FUNC_END(switch_to_vhe) |
|---|