hc
2024-02-20 102a0743326a03cd1a1202ceda21e175b7d3575c
kernel/arch/x86/lib/memset_64.S
....@@ -3,7 +3,7 @@
33
44 #include <linux/linkage.h>
55 #include <asm/cpufeatures.h>
6
-#include <asm/alternative-asm.h>
6
+#include <asm/alternative.h>
77 #include <asm/export.h>
88
99 /*
....@@ -17,10 +17,8 @@
1717 *
1818 * rax original destination
1919 */
20
-.weak memset
21
-.p2align 4, 0x90
22
-memset:
23
-ENTRY(__memset)
20
+SYM_FUNC_START_WEAK(memset)
21
+SYM_FUNC_START(__memset)
2422 /*
2523 * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
2624 * to use it when possible. If not available, use fast string instructions.
....@@ -42,9 +40,9 @@
4240 movl %edx,%ecx
4341 rep stosb
4442 movq %r9,%rax
45
- ret
46
-ENDPROC(memset)
47
-ENDPROC(__memset)
43
+ RET
44
+SYM_FUNC_END(__memset)
45
+SYM_FUNC_END_ALIAS(memset)
4846 EXPORT_SYMBOL(memset)
4947 EXPORT_SYMBOL(__memset)
5048
....@@ -59,16 +57,16 @@
5957 *
6058 * rax original destination
6159 */
62
-ENTRY(memset_erms)
60
+SYM_FUNC_START_LOCAL(memset_erms)
6361 movq %rdi,%r9
6462 movb %sil,%al
6563 movq %rdx,%rcx
6664 rep stosb
6765 movq %r9,%rax
68
- ret
69
-ENDPROC(memset_erms)
66
+ RET
67
+SYM_FUNC_END(memset_erms)
7068
71
-ENTRY(memset_orig)
69
+SYM_FUNC_START_LOCAL(memset_orig)
7270 movq %rdi,%r10
7371
7472 /* expand byte value */
....@@ -127,7 +125,7 @@
127125
128126 .Lende:
129127 movq %r10,%rax
130
- ret
128
+ RET
131129
132130 .Lbad_alignment:
133131 cmpq $7,%rdx
....@@ -139,4 +137,4 @@
139137 subq %r8,%rdx
140138 jmp .Lafter_bad_alignment
141139 .Lfinal:
142
-ENDPROC(memset_orig)
140
+SYM_FUNC_END(memset_orig)