forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-11 297b60346df8beafee954a0fd7c2d64f33f3b9bc
kernel/arch/x86/power/hibernate_asm_32.S
....@@ -12,10 +12,11 @@
1212 #include <asm/page_types.h>
1313 #include <asm/asm-offsets.h>
1414 #include <asm/processor-flags.h>
15
+#include <asm/frame.h>
1516
1617 .text
1718
18
-ENTRY(swsusp_arch_suspend)
19
+SYM_FUNC_START(swsusp_arch_suspend)
1920 movl %esp, saved_context_esp
2021 movl %ebx, saved_context_ebx
2122 movl %ebp, saved_context_ebp
....@@ -24,13 +25,31 @@
2425 pushfl
2526 popl saved_context_eflags
2627
27
- call swsusp_save
28
- ret
28
+ /* save cr3 */
29
+ movl %cr3, %eax
30
+ movl %eax, restore_cr3
2931
30
-ENTRY(restore_image)
32
+ FRAME_BEGIN
33
+ call swsusp_save
34
+ FRAME_END
35
+ RET
36
+SYM_FUNC_END(swsusp_arch_suspend)
37
+
38
+SYM_CODE_START(restore_image)
39
+ /* prepare to jump to the image kernel */
40
+ movl restore_jump_address, %ebx
41
+ movl restore_cr3, %ebp
42
+
3143 movl mmu_cr4_features, %ecx
32
- movl resume_pg_dir, %eax
33
- subl $__PAGE_OFFSET, %eax
44
+
45
+ /* jump to relocated restore code */
46
+ movl relocated_restore_code, %eax
47
+ jmpl *%eax
48
+SYM_CODE_END(restore_image)
49
+
50
+/* code below has been relocated to a safe page */
51
+SYM_CODE_START(core_restore_code)
52
+ movl temp_pgt, %eax
3453 movl %eax, %cr3
3554
3655 jecxz 1f # cr4 Pentium and higher, skip if zero
....@@ -49,7 +68,7 @@
4968 movl pbe_address(%edx), %esi
5069 movl pbe_orig_address(%edx), %edi
5170
52
- movl $1024, %ecx
71
+ movl $(PAGE_SIZE >> 2), %ecx
5372 rep
5473 movsl
5574
....@@ -58,10 +77,14 @@
5877 .p2align 4,,7
5978
6079 done:
80
+ jmpl *%ebx
81
+SYM_CODE_END(core_restore_code)
82
+
83
+ /* code below belongs to the image kernel */
84
+ .align PAGE_SIZE
85
+SYM_FUNC_START(restore_registers)
6186 /* go back to the original page tables */
62
- movl $swapper_pg_dir, %eax
63
- subl $__PAGE_OFFSET, %eax
64
- movl %eax, %cr3
87
+ movl %ebp, %cr3
6588 movl mmu_cr4_features, %ecx
6689 jecxz 1f # cr4 Pentium and higher, skip if zero
6790 movl %ecx, %cr4; # turn PGE back on
....@@ -82,4 +105,8 @@
82105
83106 xorl %eax, %eax
84107
85
- ret
108
+ /* tell the hibernation core that we've just restored the memory */
109
+ movl %eax, in_suspend
110
+
111
+ RET
112
+SYM_FUNC_END(restore_registers)