.. | .. |
---|
6 | 6 | |
---|
7 | 7 | |
---|
8 | 8 | /* bytes per L1 cache line */ |
---|
9 | | -#if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX) |
---|
| 9 | +#if defined(CONFIG_PPC_8xx) |
---|
10 | 10 | #define L1_CACHE_SHIFT 4 |
---|
11 | 11 | #define MAX_COPY_PREFETCH 1 |
---|
12 | 12 | #define IFETCH_ALIGN_SHIFT 2 |
---|
.. | .. |
---|
33 | 33 | |
---|
34 | 34 | #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT) |
---|
35 | 35 | |
---|
36 | | -#if defined(__powerpc64__) && !defined(__ASSEMBLY__) |
---|
| 36 | +#if !defined(__ASSEMBLY__) |
---|
| 37 | +#ifdef CONFIG_PPC64 |
---|
37 | 38 | |
---|
38 | 39 | struct ppc_cache_info { |
---|
39 | 40 | u32 size; |
---|
.. | .. |
---|
53 | 54 | }; |
---|
54 | 55 | |
---|
55 | 56 | extern struct ppc64_caches ppc64_caches; |
---|
56 | | -#endif /* __powerpc64__ && ! __ASSEMBLY__ */ |
---|
57 | 57 | |
---|
58 | | -#if defined(__ASSEMBLY__) |
---|
59 | | -/* |
---|
60 | | - * For a snooping icache, we still need a dummy icbi to purge all the |
---|
61 | | - * prefetched instructions from the ifetch buffers. We also need a sync |
---|
62 | | - * before the icbi to order the the actual stores to memory that might |
---|
63 | | - * have modified instructions with the icbi. |
---|
64 | | - */ |
---|
65 | | -#define PURGE_PREFETCHED_INS \ |
---|
66 | | - sync; \ |
---|
67 | | - icbi 0,r3; \ |
---|
68 | | - sync; \ |
---|
69 | | - isync |
---|
| 58 | +static inline u32 l1_dcache_shift(void) |
---|
| 59 | +{ |
---|
| 60 | + return ppc64_caches.l1d.log_block_size; |
---|
| 61 | +} |
---|
70 | 62 | |
---|
| 63 | +static inline u32 l1_dcache_bytes(void) |
---|
| 64 | +{ |
---|
| 65 | + return ppc64_caches.l1d.block_size; |
---|
| 66 | +} |
---|
| 67 | + |
---|
| 68 | +static inline u32 l1_icache_shift(void) |
---|
| 69 | +{ |
---|
| 70 | + return ppc64_caches.l1i.log_block_size; |
---|
| 71 | +} |
---|
| 72 | + |
---|
| 73 | +static inline u32 l1_icache_bytes(void) |
---|
| 74 | +{ |
---|
| 75 | + return ppc64_caches.l1i.block_size; |
---|
| 76 | +} |
---|
71 | 77 | #else |
---|
72 | | -#define __read_mostly __attribute__((__section__(".data..read_mostly"))) |
---|
| 78 | +static inline u32 l1_dcache_shift(void) |
---|
| 79 | +{ |
---|
| 80 | + return L1_CACHE_SHIFT; |
---|
| 81 | +} |
---|
73 | 82 | |
---|
74 | | -#ifdef CONFIG_6xx |
---|
| 83 | +static inline u32 l1_dcache_bytes(void) |
---|
| 84 | +{ |
---|
| 85 | + return L1_CACHE_BYTES; |
---|
| 86 | +} |
---|
| 87 | + |
---|
| 88 | +static inline u32 l1_icache_shift(void) |
---|
| 89 | +{ |
---|
| 90 | + return L1_CACHE_SHIFT; |
---|
| 91 | +} |
---|
| 92 | + |
---|
| 93 | +static inline u32 l1_icache_bytes(void) |
---|
| 94 | +{ |
---|
| 95 | + return L1_CACHE_BYTES; |
---|
| 96 | +} |
---|
| 97 | + |
---|
| 98 | +#endif |
---|
| 99 | + |
---|
| 100 | +#define __read_mostly __section(".data..read_mostly") |
---|
| 101 | + |
---|
| 102 | +#ifdef CONFIG_PPC_BOOK3S_32 |
---|
75 | 103 | extern long _get_L2CR(void); |
---|
76 | 104 | extern long _get_L3CR(void); |
---|
77 | 105 | extern void _set_L2CR(unsigned long); |
---|
.. | .. |
---|
102 | 130 | { |
---|
103 | 131 | __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory"); |
---|
104 | 132 | } |
---|
| 133 | + |
---|
| 134 | +static inline void icbi(void *addr) |
---|
| 135 | +{ |
---|
| 136 | + asm volatile ("icbi 0, %0" : : "r"(addr) : "memory"); |
---|
| 137 | +} |
---|
| 138 | + |
---|
| 139 | +static inline void iccci(void *addr) |
---|
| 140 | +{ |
---|
| 141 | + asm volatile ("iccci 0, %0" : : "r"(addr) : "memory"); |
---|
| 142 | +} |
---|
| 143 | + |
---|
105 | 144 | #endif /* !__ASSEMBLY__ */ |
---|
106 | 145 | #endif /* __KERNEL__ */ |
---|
107 | 146 | #endif /* _ASM_POWERPC_CACHE_H */ |
---|