hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/mips/include/asm/r4kcache.h
....@@ -15,12 +15,14 @@
1515 #include <linux/stringify.h>
1616
1717 #include <asm/asm.h>
18
+#include <asm/asm-eva.h>
1819 #include <asm/cacheops.h>
1920 #include <asm/compiler.h>
2021 #include <asm/cpu-features.h>
2122 #include <asm/cpu-type.h>
2223 #include <asm/mipsmtregs.h>
2324 #include <asm/mmzone.h>
25
+#include <asm/unroll.h>
2426 #include <linux/uaccess.h> /* for uaccess_kernel() */
2527
2628 extern void (*r4k_blast_dcache)(void);
....@@ -39,68 +41,27 @@
3941 */
4042 #define INDEX_BASE CKSEG0
4143
42
-#define cache_op(op,addr) \
44
+#define _cache_op(insn, op, addr) \
4345 __asm__ __volatile__( \
4446 " .set push \n" \
4547 " .set noreorder \n" \
4648 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
47
- " cache %0, %1 \n" \
49
+ " " insn("%0", "%1") " \n" \
4850 " .set pop \n" \
4951 : \
5052 : "i" (op), "R" (*(unsigned char *)(addr)))
5153
52
-#ifdef CONFIG_MIPS_MT
53
-
54
-#define __iflush_prologue \
55
- unsigned long redundance; \
56
- extern int mt_n_iflushes; \
57
- for (redundance = 0; redundance < mt_n_iflushes; redundance++) {
58
-
59
-#define __iflush_epilogue \
60
- }
61
-
62
-#define __dflush_prologue \
63
- unsigned long redundance; \
64
- extern int mt_n_dflushes; \
65
- for (redundance = 0; redundance < mt_n_dflushes; redundance++) {
66
-
67
-#define __dflush_epilogue \
68
- }
69
-
70
-#define __inv_dflush_prologue __dflush_prologue
71
-#define __inv_dflush_epilogue __dflush_epilogue
72
-#define __sflush_prologue {
73
-#define __sflush_epilogue }
74
-#define __inv_sflush_prologue __sflush_prologue
75
-#define __inv_sflush_epilogue __sflush_epilogue
76
-
77
-#else /* CONFIG_MIPS_MT */
78
-
79
-#define __iflush_prologue {
80
-#define __iflush_epilogue }
81
-#define __dflush_prologue {
82
-#define __dflush_epilogue }
83
-#define __inv_dflush_prologue {
84
-#define __inv_dflush_epilogue }
85
-#define __sflush_prologue {
86
-#define __sflush_epilogue }
87
-#define __inv_sflush_prologue {
88
-#define __inv_sflush_epilogue }
89
-
90
-#endif /* CONFIG_MIPS_MT */
54
+#define cache_op(op, addr) \
55
+ _cache_op(kernel_cache, op, addr)
9156
9257 static inline void flush_icache_line_indexed(unsigned long addr)
9358 {
94
- __iflush_prologue
9559 cache_op(Index_Invalidate_I, addr);
96
- __iflush_epilogue
9760 }
9861
9962 static inline void flush_dcache_line_indexed(unsigned long addr)
10063 {
101
- __dflush_prologue
10264 cache_op(Index_Writeback_Inv_D, addr);
103
- __dflush_epilogue
10465 }
10566
10667 static inline void flush_scache_line_indexed(unsigned long addr)
....@@ -110,9 +71,8 @@
11071
11172 static inline void flush_icache_line(unsigned long addr)
11273 {
113
- __iflush_prologue
11474 switch (boot_cpu_type()) {
115
- case CPU_LOONGSON2:
75
+ case CPU_LOONGSON2EF:
11676 cache_op(Hit_Invalidate_I_Loongson2, addr);
11777 break;
11878
....@@ -120,21 +80,16 @@
12080 cache_op(Hit_Invalidate_I, addr);
12181 break;
12282 }
123
- __iflush_epilogue
12483 }
12584
12685 static inline void flush_dcache_line(unsigned long addr)
12786 {
128
- __dflush_prologue
12987 cache_op(Hit_Writeback_Inv_D, addr);
130
- __dflush_epilogue
13188 }
13289
13390 static inline void invalidate_dcache_line(unsigned long addr)
13491 {
135
- __dflush_prologue
13692 cache_op(Hit_Invalidate_D, addr);
137
- __dflush_epilogue
13893 }
13994
14095 static inline void invalidate_scache_line(unsigned long addr)
....@@ -199,7 +154,7 @@
199154 static inline int protected_flush_icache_line(unsigned long addr)
200155 {
201156 switch (boot_cpu_type()) {
202
- case CPU_LOONGSON2:
157
+ case CPU_LOONGSON2EF:
203158 return protected_cache_op(Hit_Invalidate_I_Loongson2, addr);
204159
205160 default:
....@@ -243,338 +198,10 @@
243198 cache_op(Page_Invalidate_T, addr);
244199 }
245200
246
-#ifndef CONFIG_CPU_MIPSR6
247
-#define cache16_unroll32(base,op) \
248
- __asm__ __volatile__( \
249
- " .set push \n" \
250
- " .set noreorder \n" \
251
- " .set mips3 \n" \
252
- " cache %1, 0x000(%0); cache %1, 0x010(%0) \n" \
253
- " cache %1, 0x020(%0); cache %1, 0x030(%0) \n" \
254
- " cache %1, 0x040(%0); cache %1, 0x050(%0) \n" \
255
- " cache %1, 0x060(%0); cache %1, 0x070(%0) \n" \
256
- " cache %1, 0x080(%0); cache %1, 0x090(%0) \n" \
257
- " cache %1, 0x0a0(%0); cache %1, 0x0b0(%0) \n" \
258
- " cache %1, 0x0c0(%0); cache %1, 0x0d0(%0) \n" \
259
- " cache %1, 0x0e0(%0); cache %1, 0x0f0(%0) \n" \
260
- " cache %1, 0x100(%0); cache %1, 0x110(%0) \n" \
261
- " cache %1, 0x120(%0); cache %1, 0x130(%0) \n" \
262
- " cache %1, 0x140(%0); cache %1, 0x150(%0) \n" \
263
- " cache %1, 0x160(%0); cache %1, 0x170(%0) \n" \
264
- " cache %1, 0x180(%0); cache %1, 0x190(%0) \n" \
265
- " cache %1, 0x1a0(%0); cache %1, 0x1b0(%0) \n" \
266
- " cache %1, 0x1c0(%0); cache %1, 0x1d0(%0) \n" \
267
- " cache %1, 0x1e0(%0); cache %1, 0x1f0(%0) \n" \
268
- " .set pop \n" \
269
- : \
270
- : "r" (base), \
271
- "i" (op));
272
-
273
-#define cache32_unroll32(base,op) \
274
- __asm__ __volatile__( \
275
- " .set push \n" \
276
- " .set noreorder \n" \
277
- " .set mips3 \n" \
278
- " cache %1, 0x000(%0); cache %1, 0x020(%0) \n" \
279
- " cache %1, 0x040(%0); cache %1, 0x060(%0) \n" \
280
- " cache %1, 0x080(%0); cache %1, 0x0a0(%0) \n" \
281
- " cache %1, 0x0c0(%0); cache %1, 0x0e0(%0) \n" \
282
- " cache %1, 0x100(%0); cache %1, 0x120(%0) \n" \
283
- " cache %1, 0x140(%0); cache %1, 0x160(%0) \n" \
284
- " cache %1, 0x180(%0); cache %1, 0x1a0(%0) \n" \
285
- " cache %1, 0x1c0(%0); cache %1, 0x1e0(%0) \n" \
286
- " cache %1, 0x200(%0); cache %1, 0x220(%0) \n" \
287
- " cache %1, 0x240(%0); cache %1, 0x260(%0) \n" \
288
- " cache %1, 0x280(%0); cache %1, 0x2a0(%0) \n" \
289
- " cache %1, 0x2c0(%0); cache %1, 0x2e0(%0) \n" \
290
- " cache %1, 0x300(%0); cache %1, 0x320(%0) \n" \
291
- " cache %1, 0x340(%0); cache %1, 0x360(%0) \n" \
292
- " cache %1, 0x380(%0); cache %1, 0x3a0(%0) \n" \
293
- " cache %1, 0x3c0(%0); cache %1, 0x3e0(%0) \n" \
294
- " .set pop \n" \
295
- : \
296
- : "r" (base), \
297
- "i" (op));
298
-
299
-#define cache64_unroll32(base,op) \
300
- __asm__ __volatile__( \
301
- " .set push \n" \
302
- " .set noreorder \n" \
303
- " .set mips3 \n" \
304
- " cache %1, 0x000(%0); cache %1, 0x040(%0) \n" \
305
- " cache %1, 0x080(%0); cache %1, 0x0c0(%0) \n" \
306
- " cache %1, 0x100(%0); cache %1, 0x140(%0) \n" \
307
- " cache %1, 0x180(%0); cache %1, 0x1c0(%0) \n" \
308
- " cache %1, 0x200(%0); cache %1, 0x240(%0) \n" \
309
- " cache %1, 0x280(%0); cache %1, 0x2c0(%0) \n" \
310
- " cache %1, 0x300(%0); cache %1, 0x340(%0) \n" \
311
- " cache %1, 0x380(%0); cache %1, 0x3c0(%0) \n" \
312
- " cache %1, 0x400(%0); cache %1, 0x440(%0) \n" \
313
- " cache %1, 0x480(%0); cache %1, 0x4c0(%0) \n" \
314
- " cache %1, 0x500(%0); cache %1, 0x540(%0) \n" \
315
- " cache %1, 0x580(%0); cache %1, 0x5c0(%0) \n" \
316
- " cache %1, 0x600(%0); cache %1, 0x640(%0) \n" \
317
- " cache %1, 0x680(%0); cache %1, 0x6c0(%0) \n" \
318
- " cache %1, 0x700(%0); cache %1, 0x740(%0) \n" \
319
- " cache %1, 0x780(%0); cache %1, 0x7c0(%0) \n" \
320
- " .set pop \n" \
321
- : \
322
- : "r" (base), \
323
- "i" (op));
324
-
325
-#define cache128_unroll32(base,op) \
326
- __asm__ __volatile__( \
327
- " .set push \n" \
328
- " .set noreorder \n" \
329
- " .set mips3 \n" \
330
- " cache %1, 0x000(%0); cache %1, 0x080(%0) \n" \
331
- " cache %1, 0x100(%0); cache %1, 0x180(%0) \n" \
332
- " cache %1, 0x200(%0); cache %1, 0x280(%0) \n" \
333
- " cache %1, 0x300(%0); cache %1, 0x380(%0) \n" \
334
- " cache %1, 0x400(%0); cache %1, 0x480(%0) \n" \
335
- " cache %1, 0x500(%0); cache %1, 0x580(%0) \n" \
336
- " cache %1, 0x600(%0); cache %1, 0x680(%0) \n" \
337
- " cache %1, 0x700(%0); cache %1, 0x780(%0) \n" \
338
- " cache %1, 0x800(%0); cache %1, 0x880(%0) \n" \
339
- " cache %1, 0x900(%0); cache %1, 0x980(%0) \n" \
340
- " cache %1, 0xa00(%0); cache %1, 0xa80(%0) \n" \
341
- " cache %1, 0xb00(%0); cache %1, 0xb80(%0) \n" \
342
- " cache %1, 0xc00(%0); cache %1, 0xc80(%0) \n" \
343
- " cache %1, 0xd00(%0); cache %1, 0xd80(%0) \n" \
344
- " cache %1, 0xe00(%0); cache %1, 0xe80(%0) \n" \
345
- " cache %1, 0xf00(%0); cache %1, 0xf80(%0) \n" \
346
- " .set pop \n" \
347
- : \
348
- : "r" (base), \
349
- "i" (op));
350
-
351
-#else
352
-/*
353
- * MIPS R6 changed the cache opcode and moved to a 8-bit offset field.
354
- * This means we now need to increment the base register before we flush
355
- * more cache lines
356
- */
357
-#define cache16_unroll32(base,op) \
358
- __asm__ __volatile__( \
359
- " .set push\n" \
360
- " .set noreorder\n" \
361
- " .set mips64r6\n" \
362
- " .set noat\n" \
363
- " cache %1, 0x000(%0); cache %1, 0x010(%0)\n" \
364
- " cache %1, 0x020(%0); cache %1, 0x030(%0)\n" \
365
- " cache %1, 0x040(%0); cache %1, 0x050(%0)\n" \
366
- " cache %1, 0x060(%0); cache %1, 0x070(%0)\n" \
367
- " cache %1, 0x080(%0); cache %1, 0x090(%0)\n" \
368
- " cache %1, 0x0a0(%0); cache %1, 0x0b0(%0)\n" \
369
- " cache %1, 0x0c0(%0); cache %1, 0x0d0(%0)\n" \
370
- " cache %1, 0x0e0(%0); cache %1, 0x0f0(%0)\n" \
371
- " "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n" \
372
- " cache %1, 0x000($1); cache %1, 0x010($1)\n" \
373
- " cache %1, 0x020($1); cache %1, 0x030($1)\n" \
374
- " cache %1, 0x040($1); cache %1, 0x050($1)\n" \
375
- " cache %1, 0x060($1); cache %1, 0x070($1)\n" \
376
- " cache %1, 0x080($1); cache %1, 0x090($1)\n" \
377
- " cache %1, 0x0a0($1); cache %1, 0x0b0($1)\n" \
378
- " cache %1, 0x0c0($1); cache %1, 0x0d0($1)\n" \
379
- " cache %1, 0x0e0($1); cache %1, 0x0f0($1)\n" \
380
- " .set pop\n" \
381
- : \
382
- : "r" (base), \
383
- "i" (op));
384
-
385
-#define cache32_unroll32(base,op) \
386
- __asm__ __volatile__( \
387
- " .set push\n" \
388
- " .set noreorder\n" \
389
- " .set mips64r6\n" \
390
- " .set noat\n" \
391
- " cache %1, 0x000(%0); cache %1, 0x020(%0)\n" \
392
- " cache %1, 0x040(%0); cache %1, 0x060(%0)\n" \
393
- " cache %1, 0x080(%0); cache %1, 0x0a0(%0)\n" \
394
- " cache %1, 0x0c0(%0); cache %1, 0x0e0(%0)\n" \
395
- " "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n" \
396
- " cache %1, 0x000($1); cache %1, 0x020($1)\n" \
397
- " cache %1, 0x040($1); cache %1, 0x060($1)\n" \
398
- " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \
399
- " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \
400
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
401
- " cache %1, 0x000($1); cache %1, 0x020($1)\n" \
402
- " cache %1, 0x040($1); cache %1, 0x060($1)\n" \
403
- " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \
404
- " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \
405
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100\n" \
406
- " cache %1, 0x000($1); cache %1, 0x020($1)\n" \
407
- " cache %1, 0x040($1); cache %1, 0x060($1)\n" \
408
- " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \
409
- " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \
410
- " .set pop\n" \
411
- : \
412
- : "r" (base), \
413
- "i" (op));
414
-
415
-#define cache64_unroll32(base,op) \
416
- __asm__ __volatile__( \
417
- " .set push\n" \
418
- " .set noreorder\n" \
419
- " .set mips64r6\n" \
420
- " .set noat\n" \
421
- " cache %1, 0x000(%0); cache %1, 0x040(%0)\n" \
422
- " cache %1, 0x080(%0); cache %1, 0x0c0(%0)\n" \
423
- " "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n" \
424
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
425
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
426
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
427
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
428
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
429
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
430
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
431
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
432
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
433
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
434
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
435
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
436
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
437
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
438
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
439
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
440
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
441
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
442
- " cache %1, 0x000($1); cache %1, 0x040($1)\n" \
443
- " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \
444
- " .set pop\n" \
445
- : \
446
- : "r" (base), \
447
- "i" (op));
448
-
449
-#define cache128_unroll32(base,op) \
450
- __asm__ __volatile__( \
451
- " .set push\n" \
452
- " .set noreorder\n" \
453
- " .set mips64r6\n" \
454
- " .set noat\n" \
455
- " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \
456
- " "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n" \
457
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
458
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
459
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
460
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
461
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
462
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
463
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
464
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
465
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
466
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
467
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
468
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
469
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
470
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
471
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
472
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
473
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
474
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
475
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
476
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
477
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
478
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
479
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
480
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
481
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
482
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
483
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
484
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
485
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
486
- " "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n" \
487
- " cache %1, 0x000($1); cache %1, 0x080($1)\n" \
488
- " .set pop\n" \
489
- : \
490
- : "r" (base), \
491
- "i" (op));
492
-#endif /* CONFIG_CPU_MIPSR6 */
493
-
494
-/*
495
- * Perform the cache operation specified by op using a user mode virtual
496
- * address while in kernel mode.
497
- */
498
-#define cache16_unroll32_user(base,op) \
499
- __asm__ __volatile__( \
500
- " .set push \n" \
501
- " .set noreorder \n" \
502
- " .set mips0 \n" \
503
- " .set eva \n" \
504
- " cachee %1, 0x000(%0); cachee %1, 0x010(%0) \n" \
505
- " cachee %1, 0x020(%0); cachee %1, 0x030(%0) \n" \
506
- " cachee %1, 0x040(%0); cachee %1, 0x050(%0) \n" \
507
- " cachee %1, 0x060(%0); cachee %1, 0x070(%0) \n" \
508
- " cachee %1, 0x080(%0); cachee %1, 0x090(%0) \n" \
509
- " cachee %1, 0x0a0(%0); cachee %1, 0x0b0(%0) \n" \
510
- " cachee %1, 0x0c0(%0); cachee %1, 0x0d0(%0) \n" \
511
- " cachee %1, 0x0e0(%0); cachee %1, 0x0f0(%0) \n" \
512
- " cachee %1, 0x100(%0); cachee %1, 0x110(%0) \n" \
513
- " cachee %1, 0x120(%0); cachee %1, 0x130(%0) \n" \
514
- " cachee %1, 0x140(%0); cachee %1, 0x150(%0) \n" \
515
- " cachee %1, 0x160(%0); cachee %1, 0x170(%0) \n" \
516
- " cachee %1, 0x180(%0); cachee %1, 0x190(%0) \n" \
517
- " cachee %1, 0x1a0(%0); cachee %1, 0x1b0(%0) \n" \
518
- " cachee %1, 0x1c0(%0); cachee %1, 0x1d0(%0) \n" \
519
- " cachee %1, 0x1e0(%0); cachee %1, 0x1f0(%0) \n" \
520
- " .set pop \n" \
521
- : \
522
- : "r" (base), \
523
- "i" (op));
524
-
525
-#define cache32_unroll32_user(base, op) \
526
- __asm__ __volatile__( \
527
- " .set push \n" \
528
- " .set noreorder \n" \
529
- " .set mips0 \n" \
530
- " .set eva \n" \
531
- " cachee %1, 0x000(%0); cachee %1, 0x020(%0) \n" \
532
- " cachee %1, 0x040(%0); cachee %1, 0x060(%0) \n" \
533
- " cachee %1, 0x080(%0); cachee %1, 0x0a0(%0) \n" \
534
- " cachee %1, 0x0c0(%0); cachee %1, 0x0e0(%0) \n" \
535
- " cachee %1, 0x100(%0); cachee %1, 0x120(%0) \n" \
536
- " cachee %1, 0x140(%0); cachee %1, 0x160(%0) \n" \
537
- " cachee %1, 0x180(%0); cachee %1, 0x1a0(%0) \n" \
538
- " cachee %1, 0x1c0(%0); cachee %1, 0x1e0(%0) \n" \
539
- " cachee %1, 0x200(%0); cachee %1, 0x220(%0) \n" \
540
- " cachee %1, 0x240(%0); cachee %1, 0x260(%0) \n" \
541
- " cachee %1, 0x280(%0); cachee %1, 0x2a0(%0) \n" \
542
- " cachee %1, 0x2c0(%0); cachee %1, 0x2e0(%0) \n" \
543
- " cachee %1, 0x300(%0); cachee %1, 0x320(%0) \n" \
544
- " cachee %1, 0x340(%0); cachee %1, 0x360(%0) \n" \
545
- " cachee %1, 0x380(%0); cachee %1, 0x3a0(%0) \n" \
546
- " cachee %1, 0x3c0(%0); cachee %1, 0x3e0(%0) \n" \
547
- " .set pop \n" \
548
- : \
549
- : "r" (base), \
550
- "i" (op));
551
-
552
-#define cache64_unroll32_user(base, op) \
553
- __asm__ __volatile__( \
554
- " .set push \n" \
555
- " .set noreorder \n" \
556
- " .set mips0 \n" \
557
- " .set eva \n" \
558
- " cachee %1, 0x000(%0); cachee %1, 0x040(%0) \n" \
559
- " cachee %1, 0x080(%0); cachee %1, 0x0c0(%0) \n" \
560
- " cachee %1, 0x100(%0); cachee %1, 0x140(%0) \n" \
561
- " cachee %1, 0x180(%0); cachee %1, 0x1c0(%0) \n" \
562
- " cachee %1, 0x200(%0); cachee %1, 0x240(%0) \n" \
563
- " cachee %1, 0x280(%0); cachee %1, 0x2c0(%0) \n" \
564
- " cachee %1, 0x300(%0); cachee %1, 0x340(%0) \n" \
565
- " cachee %1, 0x380(%0); cachee %1, 0x3c0(%0) \n" \
566
- " cachee %1, 0x400(%0); cachee %1, 0x440(%0) \n" \
567
- " cachee %1, 0x480(%0); cachee %1, 0x4c0(%0) \n" \
568
- " cachee %1, 0x500(%0); cachee %1, 0x540(%0) \n" \
569
- " cachee %1, 0x580(%0); cachee %1, 0x5c0(%0) \n" \
570
- " cachee %1, 0x600(%0); cachee %1, 0x640(%0) \n" \
571
- " cachee %1, 0x680(%0); cachee %1, 0x6c0(%0) \n" \
572
- " cachee %1, 0x700(%0); cachee %1, 0x740(%0) \n" \
573
- " cachee %1, 0x780(%0); cachee %1, 0x7c0(%0) \n" \
574
- " .set pop \n" \
575
- : \
576
- : "r" (base), \
577
- "i" (op));
201
+#define cache_unroll(times, insn, op, addr, lsize) do { \
202
+ int i = 0; \
203
+ unroll(times, _cache_op, insn, op, (addr) + (i++ * (lsize))); \
204
+} while (0)
578205
579206 /* build blast_xxx, blast_xxx_page, blast_xxx_page_indexed */
580207 #define __BUILD_BLAST_CACHE(pfx, desc, indexop, hitop, lsize, extra) \
....@@ -587,13 +214,10 @@
587214 current_cpu_data.desc.waybit; \
588215 unsigned long ws, addr; \
589216 \
590
- __##pfx##flush_prologue \
591
- \
592217 for (ws = 0; ws < ws_end; ws += ws_inc) \
593218 for (addr = start; addr < end; addr += lsize * 32) \
594
- cache##lsize##_unroll32(addr|ws, indexop); \
595
- \
596
- __##pfx##flush_epilogue \
219
+ cache_unroll(32, kernel_cache, indexop, \
220
+ addr | ws, lsize); \
597221 } \
598222 \
599223 static inline void extra##blast_##pfx##cache##lsize##_page(unsigned long page) \
....@@ -601,14 +225,10 @@
601225 unsigned long start = page; \
602226 unsigned long end = page + PAGE_SIZE; \
603227 \
604
- __##pfx##flush_prologue \
605
- \
606228 do { \
607
- cache##lsize##_unroll32(start, hitop); \
229
+ cache_unroll(32, kernel_cache, hitop, start, lsize); \
608230 start += lsize * 32; \
609231 } while (start < end); \
610
- \
611
- __##pfx##flush_epilogue \
612232 } \
613233 \
614234 static inline void extra##blast_##pfx##cache##lsize##_page_indexed(unsigned long page) \
....@@ -621,13 +241,10 @@
621241 current_cpu_data.desc.waybit; \
622242 unsigned long ws, addr; \
623243 \
624
- __##pfx##flush_prologue \
625
- \
626244 for (ws = 0; ws < ws_end; ws += ws_inc) \
627245 for (addr = start; addr < end; addr += lsize * 32) \
628
- cache##lsize##_unroll32(addr|ws, indexop); \
629
- \
630
- __##pfx##flush_epilogue \
246
+ cache_unroll(32, kernel_cache, indexop, \
247
+ addr | ws, lsize); \
631248 }
632249
633250 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16, )
....@@ -657,14 +274,10 @@
657274 unsigned long start = page; \
658275 unsigned long end = page + PAGE_SIZE; \
659276 \
660
- __##pfx##flush_prologue \
661
- \
662277 do { \
663
- cache##lsize##_unroll32_user(start, hitop); \
278
+ cache_unroll(32, user_cache, hitop, start, lsize); \
664279 start += lsize * 32; \
665280 } while (start < end); \
666
- \
667
- __##pfx##flush_epilogue \
668281 }
669282
670283 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
....@@ -686,16 +299,12 @@
686299 unsigned long addr = start & ~(lsize - 1); \
687300 unsigned long aend = (end - 1) & ~(lsize - 1); \
688301 \
689
- __##pfx##flush_prologue \
690
- \
691302 while (1) { \
692303 prot##cache_op(hitop, addr); \
693304 if (addr == aend) \
694305 break; \
695306 addr += lsize; \
696307 } \
697
- \
698
- __##pfx##flush_epilogue \
699308 }
700309
701310 #ifndef CONFIG_EVA
....@@ -713,8 +322,6 @@
713322 unsigned long addr = start & ~(lsize - 1); \
714323 unsigned long aend = (end - 1) & ~(lsize - 1); \
715324 \
716
- __##pfx##flush_prologue \
717
- \
718325 if (!uaccess_kernel()) { \
719326 while (1) { \
720327 protected_cachee_op(hitop, addr); \
....@@ -731,7 +338,6 @@
731338 } \
732339 \
733340 } \
734
- __##pfx##flush_epilogue \
735341 }
736342
737343 __BUILD_PROT_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D)
....@@ -761,7 +367,8 @@
761367 \
762368 for (ws = 0; ws < ws_end; ws += ws_inc) \
763369 for (addr = start; addr < end; addr += lsize * 32) \
764
- cache##lsize##_unroll32(addr|ws, indexop); \
370
+ cache_unroll(32, kernel_cache, indexop, \
371
+ addr | ws, lsize); \
765372 }
766373
767374 __BUILD_BLAST_CACHE_NODE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16)