| .. | .. |
|---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * relocate_kernel.S - put the kernel image in place to boot |
|---|
| 3 | 4 | * Copyright (C) 2002-2004 Eric Biederman <ebiederm@xmission.com> |
|---|
| 4 | | - * |
|---|
| 5 | | - * This source code is licensed under the GNU General Public License, |
|---|
| 6 | | - * Version 2. See the file COPYING for more details. |
|---|
| 7 | 5 | */ |
|---|
| 8 | 6 | |
|---|
| 9 | 7 | #include <linux/linkage.h> |
|---|
| 10 | 8 | #include <asm/page_types.h> |
|---|
| 11 | 9 | #include <asm/kexec.h> |
|---|
| 10 | +#include <asm/nospec-branch.h> |
|---|
| 12 | 11 | #include <asm/processor-flags.h> |
|---|
| 13 | 12 | |
|---|
| 14 | 13 | /* |
|---|
| 15 | | - * Must be relocatable PIC code callable as a C function |
|---|
| 14 | + * Must be relocatable PIC code callable as a C function, in particular |
|---|
| 15 | + * there must be a plain RET and not jump to return thunk. |
|---|
| 16 | 16 | */ |
|---|
| 17 | 17 | |
|---|
| 18 | 18 | #define PTR(x) (x << 2) |
|---|
| .. | .. |
|---|
| 37 | 37 | #define CP_PA_BACKUP_PAGES_MAP DATA(0x1c) |
|---|
| 38 | 38 | |
|---|
| 39 | 39 | .text |
|---|
| 40 | | - .globl relocate_kernel |
|---|
| 41 | | -relocate_kernel: |
|---|
| 40 | +SYM_CODE_START_NOALIGN(relocate_kernel) |
|---|
| 42 | 41 | /* Save the CPU context, used for jumping back */ |
|---|
| 43 | 42 | |
|---|
| 44 | 43 | pushl %ebx |
|---|
| .. | .. |
|---|
| 94 | 93 | movl %edi, %eax |
|---|
| 95 | 94 | addl $(identity_mapped - relocate_kernel), %eax |
|---|
| 96 | 95 | pushl %eax |
|---|
| 96 | + ANNOTATE_UNRET_SAFE |
|---|
| 97 | 97 | ret |
|---|
| 98 | + int3 |
|---|
| 99 | +SYM_CODE_END(relocate_kernel) |
|---|
| 98 | 100 | |
|---|
| 99 | | -identity_mapped: |
|---|
| 101 | +SYM_CODE_START_LOCAL_NOALIGN(identity_mapped) |
|---|
| 100 | 102 | /* set return address to 0 if not preserving context */ |
|---|
| 101 | 103 | pushl $0 |
|---|
| 102 | 104 | /* store the start address on the stack */ |
|---|
| .. | .. |
|---|
| 161 | 163 | xorl %edx, %edx |
|---|
| 162 | 164 | xorl %esi, %esi |
|---|
| 163 | 165 | xorl %ebp, %ebp |
|---|
| 166 | + ANNOTATE_UNRET_SAFE |
|---|
| 164 | 167 | ret |
|---|
| 168 | + int3 |
|---|
| 165 | 169 | 1: |
|---|
| 166 | 170 | popl %edx |
|---|
| 167 | 171 | movl CP_PA_SWAP_PAGE(%edi), %esp |
|---|
| 168 | 172 | addl $PAGE_SIZE, %esp |
|---|
| 169 | 173 | 2: |
|---|
| 174 | + ANNOTATE_RETPOLINE_SAFE |
|---|
| 170 | 175 | call *%edx |
|---|
| 171 | 176 | |
|---|
| 172 | 177 | /* get the re-entry point of the peer system */ |
|---|
| .. | .. |
|---|
| 192 | 197 | movl %edi, %eax |
|---|
| 193 | 198 | addl $(virtual_mapped - relocate_kernel), %eax |
|---|
| 194 | 199 | pushl %eax |
|---|
| 200 | + ANNOTATE_UNRET_SAFE |
|---|
| 195 | 201 | ret |
|---|
| 202 | + int3 |
|---|
| 203 | +SYM_CODE_END(identity_mapped) |
|---|
| 196 | 204 | |
|---|
| 197 | | -virtual_mapped: |
|---|
| 205 | +SYM_CODE_START_LOCAL_NOALIGN(virtual_mapped) |
|---|
| 198 | 206 | movl CR4(%edi), %eax |
|---|
| 199 | 207 | movl %eax, %cr4 |
|---|
| 200 | 208 | movl CR3(%edi), %eax |
|---|
| .. | .. |
|---|
| 209 | 217 | popl %edi |
|---|
| 210 | 218 | popl %esi |
|---|
| 211 | 219 | popl %ebx |
|---|
| 220 | + ANNOTATE_UNRET_SAFE |
|---|
| 212 | 221 | ret |
|---|
| 222 | + int3 |
|---|
| 223 | +SYM_CODE_END(virtual_mapped) |
|---|
| 213 | 224 | |
|---|
| 214 | 225 | /* Do the copies */ |
|---|
| 215 | | -swap_pages: |
|---|
| 226 | +SYM_CODE_START_LOCAL_NOALIGN(swap_pages) |
|---|
| 216 | 227 | movl 8(%esp), %edx |
|---|
| 217 | 228 | movl 4(%esp), %ecx |
|---|
| 218 | 229 | pushl %ebp |
|---|
| .. | .. |
|---|
| 271 | 282 | popl %edi |
|---|
| 272 | 283 | popl %ebx |
|---|
| 273 | 284 | popl %ebp |
|---|
| 285 | + ANNOTATE_UNRET_SAFE |
|---|
| 274 | 286 | ret |
|---|
| 287 | + int3 |
|---|
| 288 | +SYM_CODE_END(swap_pages) |
|---|
| 275 | 289 | |
|---|
| 276 | 290 | .globl kexec_control_code_size |
|---|
| 277 | 291 | .set kexec_control_code_size, . - relocate_kernel |
|---|