.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
---|
1 | 2 | /* |
---|
2 | | - * This program is free software; you can redistribute it and/or modify |
---|
3 | | - * it under the terms of the GNU General Public License, version 2, as |
---|
4 | | - * published by the Free Software Foundation. |
---|
5 | | - * |
---|
6 | | - * This program is distributed in the hope that it will be useful, |
---|
7 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
8 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
9 | | - * GNU General Public License for more details. |
---|
10 | 3 | * |
---|
11 | 4 | * Derived from book3s_hv_rmhandlers.S, which is: |
---|
12 | 5 | * |
---|
13 | 6 | * Copyright 2011 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> |
---|
14 | | - * |
---|
15 | 7 | */ |
---|
16 | 8 | |
---|
17 | 9 | #include <asm/reg.h> |
---|
.. | .. |
---|
28 | 20 | * Save transactional state and TM-related registers. |
---|
29 | 21 | * Called with: |
---|
30 | 22 | * - r3 pointing to the vcpu struct |
---|
31 | | - * - r4 points to the MSR with current TS bits: |
---|
| 23 | + * - r4 containing the MSR with current TS bits: |
---|
32 | 24 | * (For HV KVM, it is VCPU_MSR ; For PR KVM, it is host MSR). |
---|
33 | | - * This can modify all checkpointed registers, but |
---|
34 | | - * restores r1, r2 before exit. |
---|
| 25 | + * - r5 containing a flag indicating that non-volatile registers |
---|
| 26 | + * must be preserved. |
---|
| 27 | + * If r5 == 0, this can modify all checkpointed registers, but |
---|
| 28 | + * restores r1, r2 before exit. If r5 != 0, this restores the |
---|
| 29 | + * MSR TM/FP/VEC/VSX bits to their state on entry. |
---|
35 | 30 | */ |
---|
36 | 31 | _GLOBAL(__kvmppc_save_tm) |
---|
37 | 32 | mflr r0 |
---|
38 | 33 | std r0, PPC_LR_STKOFF(r1) |
---|
| 34 | + stdu r1, -SWITCH_FRAME_SIZE(r1) |
---|
| 35 | + |
---|
| 36 | + mr r9, r3 |
---|
| 37 | + cmpdi cr7, r5, 0 |
---|
39 | 38 | |
---|
40 | 39 | /* Turn on TM. */ |
---|
41 | 40 | mfmsr r8 |
---|
| 41 | + mr r10, r8 |
---|
42 | 42 | li r0, 1 |
---|
43 | 43 | rldimi r8, r0, MSR_TM_LG, 63-MSR_TM_LG |
---|
44 | 44 | ori r8, r8, MSR_FP |
---|
.. | .. |
---|
51 | 51 | std r1, HSTATE_SCRATCH2(r13) |
---|
52 | 52 | std r3, HSTATE_SCRATCH1(r13) |
---|
53 | 53 | |
---|
| 54 | + /* Save CR on the stack - even if r5 == 0 we need to get cr7 back. */ |
---|
| 55 | + mfcr r6 |
---|
| 56 | + SAVE_GPR(6, r1) |
---|
| 57 | + |
---|
| 58 | + /* Save DSCR so we can restore it to avoid running with user value */ |
---|
| 59 | + mfspr r7, SPRN_DSCR |
---|
| 60 | + SAVE_GPR(7, r1) |
---|
| 61 | + |
---|
| 62 | + /* |
---|
| 63 | + * We are going to do treclaim., which will modify all checkpointed |
---|
| 64 | + * registers. Save the non-volatile registers on the stack if |
---|
| 65 | + * preservation of non-volatile state has been requested. |
---|
| 66 | + */ |
---|
| 67 | + beq cr7, 3f |
---|
| 68 | + SAVE_NVGPRS(r1) |
---|
| 69 | + |
---|
| 70 | + /* MSR[TS] will be 0 (non-transactional) once we do treclaim. */ |
---|
| 71 | + li r0, 0 |
---|
| 72 | + rldimi r10, r0, MSR_TS_S_LG, 63 - MSR_TS_T_LG |
---|
| 73 | + SAVE_GPR(10, r1) /* final MSR value */ |
---|
| 74 | +3: |
---|
54 | 75 | #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE |
---|
55 | 76 | BEGIN_FTR_SECTION |
---|
56 | 77 | /* Emulation of the treclaim instruction needs TEXASR before treclaim */ |
---|
.. | .. |
---|
74 | 95 | std r9, PACATMSCRATCH(r13) |
---|
75 | 96 | ld r9, HSTATE_SCRATCH1(r13) |
---|
76 | 97 | |
---|
77 | | - /* Get a few more GPRs free. */ |
---|
78 | | - std r29, VCPU_GPRS_TM(29)(r9) |
---|
79 | | - std r30, VCPU_GPRS_TM(30)(r9) |
---|
80 | | - std r31, VCPU_GPRS_TM(31)(r9) |
---|
81 | | - |
---|
82 | | - /* Save away PPR and DSCR soon so don't run with user values. */ |
---|
83 | | - mfspr r31, SPRN_PPR |
---|
| 98 | + /* Save away PPR soon so we don't run with user value. */ |
---|
| 99 | + std r0, VCPU_GPRS_TM(0)(r9) |
---|
| 100 | + mfspr r0, SPRN_PPR |
---|
84 | 101 | HMT_MEDIUM |
---|
85 | | - mfspr r30, SPRN_DSCR |
---|
86 | | -#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE |
---|
87 | | - ld r29, HSTATE_DSCR(r13) |
---|
88 | | - mtspr SPRN_DSCR, r29 |
---|
89 | | -#endif |
---|
90 | 102 | |
---|
91 | | - /* Save all but r9, r13 & r29-r31 */ |
---|
92 | | - reg = 0 |
---|
| 103 | + /* Reload stack pointer. */ |
---|
| 104 | + std r1, VCPU_GPRS_TM(1)(r9) |
---|
| 105 | + ld r1, HSTATE_SCRATCH2(r13) |
---|
| 106 | + |
---|
| 107 | + /* Set MSR RI now we have r1 and r13 back. */ |
---|
| 108 | + std r2, VCPU_GPRS_TM(2)(r9) |
---|
| 109 | + li r2, MSR_RI |
---|
| 110 | + mtmsrd r2, 1 |
---|
| 111 | + |
---|
| 112 | + /* Reload TOC pointer. */ |
---|
| 113 | + ld r2, PACATOC(r13) |
---|
| 114 | + |
---|
| 115 | + /* Save all but r0-r2, r9 & r13 */ |
---|
| 116 | + reg = 3 |
---|
93 | 117 | .rept 29 |
---|
94 | 118 | .if (reg != 9) && (reg != 13) |
---|
95 | 119 | std reg, VCPU_GPRS_TM(reg)(r9) |
---|
.. | .. |
---|
103 | 127 | ld r4, PACATMSCRATCH(r13) |
---|
104 | 128 | std r4, VCPU_GPRS_TM(9)(r9) |
---|
105 | 129 | |
---|
106 | | - /* Reload stack pointer and TOC. */ |
---|
107 | | - ld r1, HSTATE_SCRATCH2(r13) |
---|
108 | | - ld r2, PACATOC(r13) |
---|
109 | | - |
---|
110 | | - /* Set MSR RI now we have r1 and r13 back. */ |
---|
111 | | - li r5, MSR_RI |
---|
112 | | - mtmsrd r5, 1 |
---|
113 | | - |
---|
114 | | - /* Save away checkpinted SPRs. */ |
---|
115 | | - std r31, VCPU_PPR_TM(r9) |
---|
116 | | - std r30, VCPU_DSCR_TM(r9) |
---|
117 | | - mflr r5 |
---|
| 130 | + /* Restore host DSCR and CR values, after saving guest values */ |
---|
118 | 131 | mfcr r6 |
---|
| 132 | + mfspr r7, SPRN_DSCR |
---|
| 133 | + stw r6, VCPU_CR_TM(r9) |
---|
| 134 | + std r7, VCPU_DSCR_TM(r9) |
---|
| 135 | + REST_GPR(6, r1) |
---|
| 136 | + REST_GPR(7, r1) |
---|
| 137 | + mtcr r6 |
---|
| 138 | + mtspr SPRN_DSCR, r7 |
---|
| 139 | + |
---|
| 140 | + /* Save away checkpointed SPRs. */ |
---|
| 141 | + std r0, VCPU_PPR_TM(r9) |
---|
| 142 | + mflr r5 |
---|
119 | 143 | mfctr r7 |
---|
120 | 144 | mfspr r8, SPRN_AMR |
---|
121 | 145 | mfspr r10, SPRN_TAR |
---|
122 | 146 | mfxer r11 |
---|
123 | 147 | std r5, VCPU_LR_TM(r9) |
---|
124 | | - stw r6, VCPU_CR_TM(r9) |
---|
125 | 148 | std r7, VCPU_CTR_TM(r9) |
---|
126 | 149 | std r8, VCPU_AMR_TM(r9) |
---|
127 | 150 | std r10, VCPU_TAR_TM(r9) |
---|
128 | 151 | std r11, VCPU_XER_TM(r9) |
---|
129 | | - |
---|
130 | | - /* Restore r12 as trap number. */ |
---|
131 | | - lwz r12, VCPU_TRAP(r9) |
---|
132 | 152 | |
---|
133 | 153 | /* Save FP/VSX. */ |
---|
134 | 154 | addi r3, r9, VCPU_FPRS_TM |
---|
.. | .. |
---|
137 | 157 | bl store_vr_state |
---|
138 | 158 | mfspr r6, SPRN_VRSAVE |
---|
139 | 159 | stw r6, VCPU_VRSAVE_TM(r9) |
---|
| 160 | + |
---|
| 161 | + /* Restore non-volatile registers if requested to */ |
---|
| 162 | + beq cr7, 1f |
---|
| 163 | + REST_NVGPRS(r1) |
---|
| 164 | + REST_GPR(10, r1) |
---|
140 | 165 | 1: |
---|
141 | 166 | /* |
---|
142 | 167 | * We need to save these SPRs after the treclaim so that the software |
---|
.. | .. |
---|
146 | 171 | */ |
---|
147 | 172 | mfspr r7, SPRN_TEXASR |
---|
148 | 173 | std r7, VCPU_TEXASR(r9) |
---|
149 | | -11: |
---|
150 | 174 | mfspr r5, SPRN_TFHAR |
---|
151 | 175 | mfspr r6, SPRN_TFIAR |
---|
152 | 176 | std r5, VCPU_TFHAR(r9) |
---|
153 | 177 | std r6, VCPU_TFIAR(r9) |
---|
154 | 178 | |
---|
| 179 | + /* Restore MSR state if requested */ |
---|
| 180 | + beq cr7, 2f |
---|
| 181 | + mtmsrd r10, 0 |
---|
| 182 | +2: |
---|
| 183 | + addi r1, r1, SWITCH_FRAME_SIZE |
---|
155 | 184 | ld r0, PPC_LR_STKOFF(r1) |
---|
156 | 185 | mtlr r0 |
---|
157 | 186 | blr |
---|
.. | .. |
---|
161 | 190 | * be invoked from C function by PR KVM only. |
---|
162 | 191 | */ |
---|
163 | 192 | _GLOBAL(_kvmppc_save_tm_pr) |
---|
164 | | - mflr r5 |
---|
165 | | - std r5, PPC_LR_STKOFF(r1) |
---|
166 | | - stdu r1, -SWITCH_FRAME_SIZE(r1) |
---|
167 | | - SAVE_NVGPRS(r1) |
---|
168 | | - |
---|
169 | | - /* save MSR since TM/math bits might be impacted |
---|
170 | | - * by __kvmppc_save_tm(). |
---|
171 | | - */ |
---|
172 | | - mfmsr r5 |
---|
173 | | - SAVE_GPR(5, r1) |
---|
174 | | - |
---|
175 | | - /* also save DSCR/CR/TAR so that it can be recovered later */ |
---|
176 | | - mfspr r6, SPRN_DSCR |
---|
177 | | - SAVE_GPR(6, r1) |
---|
178 | | - |
---|
179 | | - mfcr r7 |
---|
180 | | - stw r7, _CCR(r1) |
---|
| 193 | + mflr r0 |
---|
| 194 | + std r0, PPC_LR_STKOFF(r1) |
---|
| 195 | + stdu r1, -PPC_MIN_STKFRM(r1) |
---|
181 | 196 | |
---|
182 | 197 | mfspr r8, SPRN_TAR |
---|
183 | | - SAVE_GPR(8, r1) |
---|
| 198 | + std r8, PPC_MIN_STKFRM-8(r1) |
---|
184 | 199 | |
---|
| 200 | + li r5, 1 /* preserve non-volatile registers */ |
---|
185 | 201 | bl __kvmppc_save_tm |
---|
186 | 202 | |
---|
187 | | - REST_GPR(8, r1) |
---|
| 203 | + ld r8, PPC_MIN_STKFRM-8(r1) |
---|
188 | 204 | mtspr SPRN_TAR, r8 |
---|
189 | 205 | |
---|
190 | | - ld r7, _CCR(r1) |
---|
191 | | - mtcr r7 |
---|
192 | | - |
---|
193 | | - REST_GPR(6, r1) |
---|
194 | | - mtspr SPRN_DSCR, r6 |
---|
195 | | - |
---|
196 | | - /* need preserve current MSR's MSR_TS bits */ |
---|
197 | | - REST_GPR(5, r1) |
---|
198 | | - mfmsr r6 |
---|
199 | | - rldicl r6, r6, 64 - MSR_TS_S_LG, 62 |
---|
200 | | - rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG |
---|
201 | | - mtmsrd r5 |
---|
202 | | - |
---|
203 | | - REST_NVGPRS(r1) |
---|
204 | | - addi r1, r1, SWITCH_FRAME_SIZE |
---|
205 | | - ld r5, PPC_LR_STKOFF(r1) |
---|
206 | | - mtlr r5 |
---|
| 206 | + addi r1, r1, PPC_MIN_STKFRM |
---|
| 207 | + ld r0, PPC_LR_STKOFF(r1) |
---|
| 208 | + mtlr r0 |
---|
207 | 209 | blr |
---|
208 | 210 | |
---|
209 | 211 | EXPORT_SYMBOL_GPL(_kvmppc_save_tm_pr); |
---|
.. | .. |
---|
215 | 217 | * - r4 is the guest MSR with desired TS bits: |
---|
216 | 218 | * For HV KVM, it is VCPU_MSR |
---|
217 | 219 | * For PR KVM, it is provided by caller |
---|
218 | | - * This potentially modifies all checkpointed registers. |
---|
219 | | - * It restores r1, r2 from the PACA. |
---|
| 220 | + * - r5 containing a flag indicating that non-volatile registers |
---|
| 221 | + * must be preserved. |
---|
| 222 | + * If r5 == 0, this potentially modifies all checkpointed registers, but |
---|
| 223 | + * restores r1, r2 from the PACA before exit. |
---|
| 224 | + * If r5 != 0, this restores the MSR TM/FP/VEC/VSX bits to their state on entry. |
---|
220 | 225 | */ |
---|
221 | 226 | _GLOBAL(__kvmppc_restore_tm) |
---|
222 | 227 | mflr r0 |
---|
223 | 228 | std r0, PPC_LR_STKOFF(r1) |
---|
224 | 229 | |
---|
| 230 | + cmpdi cr7, r5, 0 |
---|
| 231 | + |
---|
225 | 232 | /* Turn on TM/FP/VSX/VMX so we can restore them. */ |
---|
226 | 233 | mfmsr r5 |
---|
| 234 | + mr r10, r5 |
---|
227 | 235 | li r6, MSR_TM >> 32 |
---|
228 | 236 | sldi r6, r6, 32 |
---|
229 | 237 | or r5, r5, r6 |
---|
.. | .. |
---|
244 | 252 | |
---|
245 | 253 | mr r5, r4 |
---|
246 | 254 | rldicl. r5, r5, 64 - MSR_TS_S_LG, 62 |
---|
247 | | - beqlr /* TM not active in guest */ |
---|
248 | | - std r1, HSTATE_SCRATCH2(r13) |
---|
| 255 | + beq 9f /* TM not active in guest */ |
---|
249 | 256 | |
---|
250 | 257 | /* Make sure the failure summary is set, otherwise we'll program check |
---|
251 | 258 | * when we trechkpt. It's possible that this might have been not set |
---|
.. | .. |
---|
255 | 262 | oris r7, r7, (TEXASR_FS)@h |
---|
256 | 263 | mtspr SPRN_TEXASR, r7 |
---|
257 | 264 | |
---|
| 265 | + /* |
---|
| 266 | + * Make a stack frame and save non-volatile registers if requested. |
---|
| 267 | + */ |
---|
| 268 | + stdu r1, -SWITCH_FRAME_SIZE(r1) |
---|
| 269 | + std r1, HSTATE_SCRATCH2(r13) |
---|
| 270 | + |
---|
| 271 | + mfcr r6 |
---|
| 272 | + mfspr r7, SPRN_DSCR |
---|
| 273 | + SAVE_GPR(2, r1) |
---|
| 274 | + SAVE_GPR(6, r1) |
---|
| 275 | + SAVE_GPR(7, r1) |
---|
| 276 | + |
---|
| 277 | + beq cr7, 4f |
---|
| 278 | + SAVE_NVGPRS(r1) |
---|
| 279 | + |
---|
| 280 | + /* MSR[TS] will be 1 (suspended) once we do trechkpt */ |
---|
| 281 | + li r0, 1 |
---|
| 282 | + rldimi r10, r0, MSR_TS_S_LG, 63 - MSR_TS_T_LG |
---|
| 283 | + SAVE_GPR(10, r1) /* final MSR value */ |
---|
| 284 | +4: |
---|
258 | 285 | /* |
---|
259 | 286 | * We need to load up the checkpointed state for the guest. |
---|
260 | 287 | * We need to do this early as it will blow away any GPRs, VSRs and |
---|
.. | .. |
---|
291 | 318 | ld r29, VCPU_DSCR_TM(r3) |
---|
292 | 319 | ld r30, VCPU_PPR_TM(r3) |
---|
293 | 320 | |
---|
294 | | - std r2, PACATMSCRATCH(r13) /* Save TOC */ |
---|
295 | | - |
---|
296 | 321 | /* Clear the MSR RI since r1, r13 are all going to be foobar. */ |
---|
297 | 322 | li r5, 0 |
---|
298 | 323 | mtmsrd r5, 1 |
---|
.. | .. |
---|
318 | 343 | /* Now let's get back the state we need. */ |
---|
319 | 344 | HMT_MEDIUM |
---|
320 | 345 | GET_PACA(r13) |
---|
321 | | -#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE |
---|
322 | | - ld r29, HSTATE_DSCR(r13) |
---|
323 | | - mtspr SPRN_DSCR, r29 |
---|
324 | | -#endif |
---|
325 | 346 | ld r1, HSTATE_SCRATCH2(r13) |
---|
326 | | - ld r2, PACATMSCRATCH(r13) |
---|
| 347 | + REST_GPR(7, r1) |
---|
| 348 | + mtspr SPRN_DSCR, r7 |
---|
327 | 349 | |
---|
328 | 350 | /* Set the MSR RI since we have our registers back. */ |
---|
329 | 351 | li r5, MSR_RI |
---|
330 | 352 | mtmsrd r5, 1 |
---|
| 353 | + |
---|
| 354 | + /* Restore TOC pointer and CR */ |
---|
| 355 | + REST_GPR(2, r1) |
---|
| 356 | + REST_GPR(6, r1) |
---|
| 357 | + mtcr r6 |
---|
| 358 | + |
---|
| 359 | + /* Restore non-volatile registers if requested to. */ |
---|
| 360 | + beq cr7, 5f |
---|
| 361 | + REST_GPR(10, r1) |
---|
| 362 | + REST_NVGPRS(r1) |
---|
| 363 | + |
---|
| 364 | +5: addi r1, r1, SWITCH_FRAME_SIZE |
---|
331 | 365 | ld r0, PPC_LR_STKOFF(r1) |
---|
332 | 366 | mtlr r0 |
---|
| 367 | + |
---|
| 368 | +9: /* Restore MSR bits if requested */ |
---|
| 369 | + beqlr cr7 |
---|
| 370 | + mtmsrd r10, 0 |
---|
333 | 371 | blr |
---|
334 | 372 | |
---|
335 | 373 | /* |
---|
.. | .. |
---|
337 | 375 | * can be invoked from C function by PR KVM only. |
---|
338 | 376 | */ |
---|
339 | 377 | _GLOBAL(_kvmppc_restore_tm_pr) |
---|
340 | | - mflr r5 |
---|
341 | | - std r5, PPC_LR_STKOFF(r1) |
---|
342 | | - stdu r1, -SWITCH_FRAME_SIZE(r1) |
---|
343 | | - SAVE_NVGPRS(r1) |
---|
| 378 | + mflr r0 |
---|
| 379 | + std r0, PPC_LR_STKOFF(r1) |
---|
| 380 | + stdu r1, -PPC_MIN_STKFRM(r1) |
---|
344 | 381 | |
---|
345 | | - /* save MSR to avoid TM/math bits change */ |
---|
346 | | - mfmsr r5 |
---|
347 | | - SAVE_GPR(5, r1) |
---|
348 | | - |
---|
349 | | - /* also save DSCR/CR/TAR so that it can be recovered later */ |
---|
350 | | - mfspr r6, SPRN_DSCR |
---|
351 | | - SAVE_GPR(6, r1) |
---|
352 | | - |
---|
353 | | - mfcr r7 |
---|
354 | | - stw r7, _CCR(r1) |
---|
355 | | - |
---|
| 382 | + /* save TAR so that it can be recovered later */ |
---|
356 | 383 | mfspr r8, SPRN_TAR |
---|
357 | | - SAVE_GPR(8, r1) |
---|
| 384 | + std r8, PPC_MIN_STKFRM-8(r1) |
---|
358 | 385 | |
---|
| 386 | + li r5, 1 |
---|
359 | 387 | bl __kvmppc_restore_tm |
---|
360 | 388 | |
---|
361 | | - REST_GPR(8, r1) |
---|
| 389 | + ld r8, PPC_MIN_STKFRM-8(r1) |
---|
362 | 390 | mtspr SPRN_TAR, r8 |
---|
363 | 391 | |
---|
364 | | - ld r7, _CCR(r1) |
---|
365 | | - mtcr r7 |
---|
366 | | - |
---|
367 | | - REST_GPR(6, r1) |
---|
368 | | - mtspr SPRN_DSCR, r6 |
---|
369 | | - |
---|
370 | | - /* need preserve current MSR's MSR_TS bits */ |
---|
371 | | - REST_GPR(5, r1) |
---|
372 | | - mfmsr r6 |
---|
373 | | - rldicl r6, r6, 64 - MSR_TS_S_LG, 62 |
---|
374 | | - rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG |
---|
375 | | - mtmsrd r5 |
---|
376 | | - |
---|
377 | | - REST_NVGPRS(r1) |
---|
378 | | - addi r1, r1, SWITCH_FRAME_SIZE |
---|
379 | | - ld r5, PPC_LR_STKOFF(r1) |
---|
380 | | - mtlr r5 |
---|
| 392 | + addi r1, r1, PPC_MIN_STKFRM |
---|
| 393 | + ld r0, PPC_LR_STKOFF(r1) |
---|
| 394 | + mtlr r0 |
---|
381 | 395 | blr |
---|
382 | 396 | |
---|
383 | 397 | EXPORT_SYMBOL_GPL(_kvmppc_restore_tm_pr); |
---|