1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
| /* SPDX-License-Identifier: GPL-2.0-or-later */
| /*
| * vDSO provided cache flush routines
| *
| * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
| * IBM Corp.
| */
| #include <asm/processor.h>
| #include <asm/ppc_asm.h>
| #include <asm/vdso.h>
| #include <asm/vdso_datapage.h>
| #include <asm/asm-offsets.h>
| #include <asm/cache.h>
|
| .text
|
| /*
| * Default "generic" version of __kernel_sync_dicache.
| *
| * void __kernel_sync_dicache(unsigned long start, unsigned long end)
| *
| * Flushes the data cache & invalidate the instruction cache for the
| * provided range [start, end[
| */
| V_FUNCTION_BEGIN(__kernel_sync_dicache)
| .cfi_startproc
| #ifdef CONFIG_PPC64
| mflr r12
| .cfi_register lr,r12
| get_datapage r10, r0
| mtlr r12
| #endif
|
| #ifdef CONFIG_PPC64
| lwz r7,CFG_DCACHE_BLOCKSZ(r10)
| addi r5,r7,-1
| #else
| li r5, L1_CACHE_BYTES - 1
| #endif
| andc r6,r3,r5 /* round low to line bdy */
| subf r8,r6,r4 /* compute length */
| add r8,r8,r5 /* ensure we get enough */
| #ifdef CONFIG_PPC64
| lwz r9,CFG_DCACHE_LOGBLOCKSZ(r10)
| srw. r8,r8,r9 /* compute line count */
| #else
| srwi. r8, r8, L1_CACHE_SHIFT
| mr r7, r6
| #endif
| crclr cr0*4+so
| beqlr /* nothing to do? */
| mtctr r8
| 1: dcbst 0,r6
| #ifdef CONFIG_PPC64
| add r6,r6,r7
| #else
| addi r6, r6, L1_CACHE_BYTES
| #endif
| bdnz 1b
| sync
|
| /* Now invalidate the instruction cache */
|
| #ifdef CONFIG_PPC64
| lwz r7,CFG_ICACHE_BLOCKSZ(r10)
| addi r5,r7,-1
| andc r6,r3,r5 /* round low to line bdy */
| subf r8,r6,r4 /* compute length */
| add r8,r8,r5
| lwz r9,CFG_ICACHE_LOGBLOCKSZ(r10)
| srw. r8,r8,r9 /* compute line count */
| crclr cr0*4+so
| beqlr /* nothing to do? */
| #endif
| mtctr r8
| #ifdef CONFIG_PPC64
| 2: icbi 0,r6
| add r6,r6,r7
| #else
| 2: icbi 0, r7
| addi r7, r7, L1_CACHE_BYTES
| #endif
| bdnz 2b
| isync
| li r3,0
| blr
| .cfi_endproc
| V_FUNCTION_END(__kernel_sync_dicache)
|
|
| /*
| * POWER5 version of __kernel_sync_dicache
| */
| V_FUNCTION_BEGIN(__kernel_sync_dicache_p5)
| .cfi_startproc
| crclr cr0*4+so
| sync
| isync
| li r3,0
| blr
| .cfi_endproc
| V_FUNCTION_END(__kernel_sync_dicache_p5)
|
|