1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
| /*
| * r2300_switch.S: R2300 specific task switching code.
| *
| * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle
| * Copyright (C) 1994, 1995, 1996 by Andreas Busse
| *
| * Multi-cpu abstraction and macros for easier reading:
| * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
| *
| * Further modifications to make this work:
| * Copyright (c) 1998-2000 Harald Koerfgen
| */
| #include <asm/asm.h>
| #include <asm/cachectl.h>
| #include <asm/fpregdef.h>
| #include <asm/mipsregs.h>
| #include <asm/asm-offsets.h>
| #include <asm/regdef.h>
| #include <asm/stackframe.h>
| #include <asm/thread_info.h>
|
| #include <asm/asmmacro.h>
|
| .set mips1
| .align 5
|
| /*
| * Offset to the current process status flags, the first 32 bytes of the
| * stack are not used.
| */
| #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
|
| /*
| * task_struct *resume(task_struct *prev, task_struct *next,
| * struct thread_info *next_ti)
| */
| LEAF(resume)
| mfc0 t1, CP0_STATUS
| sw t1, THREAD_STATUS(a0)
| cpu_save_nonscratch a0
| sw ra, THREAD_REG31(a0)
|
| #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
| PTR_LA t8, __stack_chk_guard
| LONG_L t9, TASK_STACK_CANARY(a1)
| LONG_S t9, 0(t8)
| #endif
|
| /*
| * The order of restoring the registers takes care of the race
| * updating $28, $29 and kernelsp without disabling ints.
| */
| move $28, a2
| cpu_restore_nonscratch a1
|
| addiu t1, $28, _THREAD_SIZE - 32
| sw t1, kernelsp
|
| mfc0 t1, CP0_STATUS /* Do we really need this? */
| li a3, 0xff01
| and t1, a3
| lw a2, THREAD_STATUS(a1)
| nor a3, $0, a3
| and a2, a3
| or a2, t1
| mtc0 a2, CP0_STATUS
| move v0, a0
| jr ra
| END(resume)
|
| /*
| * Save a thread's fp context.
| */
| LEAF(_save_fp)
| fpu_save_single a0, t1 # clobbers t1
| jr ra
| END(_save_fp)
|
| /*
| * Restore a thread's fp context.
| */
| LEAF(_restore_fp)
| fpu_restore_single a0, t1 # clobbers t1
| jr ra
| END(_restore_fp)
|
| /*
| * Load the FPU with signalling NANS. This bit pattern we're using has
| * the property that no matter whether considered as single or as double
| * precision represents signaling NANS.
| *
| * The value to initialize fcr31 to comes in $a0.
| */
|
| .set push
| SET_HARDFLOAT
|
| LEAF(_init_fpu)
| mfc0 t0, CP0_STATUS
| li t1, ST0_CU1
| or t0, t1
| mtc0 t0, CP0_STATUS
|
| ctc1 a0, fcr31
|
| li t0, -1
|
| mtc1 t0, $f0
| mtc1 t0, $f1
| mtc1 t0, $f2
| mtc1 t0, $f3
| mtc1 t0, $f4
| mtc1 t0, $f5
| mtc1 t0, $f6
| mtc1 t0, $f7
| mtc1 t0, $f8
| mtc1 t0, $f9
| mtc1 t0, $f10
| mtc1 t0, $f11
| mtc1 t0, $f12
| mtc1 t0, $f13
| mtc1 t0, $f14
| mtc1 t0, $f15
| mtc1 t0, $f16
| mtc1 t0, $f17
| mtc1 t0, $f18
| mtc1 t0, $f19
| mtc1 t0, $f20
| mtc1 t0, $f21
| mtc1 t0, $f22
| mtc1 t0, $f23
| mtc1 t0, $f24
| mtc1 t0, $f25
| mtc1 t0, $f26
| mtc1 t0, $f27
| mtc1 t0, $f28
| mtc1 t0, $f29
| mtc1 t0, $f30
| mtc1 t0, $f31
| jr ra
| END(_init_fpu)
|
| .set pop
|
|