.. | .. |
---|
20 | 20 | #include <asm/inst.h> |
---|
21 | 21 | #include <asm/io.h> |
---|
22 | 22 | #include <asm/page.h> |
---|
23 | | -#include <asm/pgtable.h> |
---|
24 | 23 | #include <asm/prefetch.h> |
---|
25 | 24 | #include <asm/bootinfo.h> |
---|
26 | 25 | #include <asm/mipsregs.h> |
---|
.. | .. |
---|
187 | 186 | } |
---|
188 | 187 | break; |
---|
189 | 188 | |
---|
190 | | - case CPU_LOONGSON3: |
---|
| 189 | + case CPU_LOONGSON64: |
---|
191 | 190 | /* Loongson-3 only support the Pref_Load/Pref_Store. */ |
---|
192 | 191 | pref_bias_clear_store = 128; |
---|
193 | 192 | pref_bias_copy_load = 128; |
---|
.. | .. |
---|
251 | 250 | if (cpu_has_cache_cdex_s) { |
---|
252 | 251 | uasm_i_cache(buf, Create_Dirty_Excl_SD, off, A0); |
---|
253 | 252 | } else if (cpu_has_cache_cdex_p) { |
---|
254 | | - if (R4600_V1_HIT_CACHEOP_WAR && cpu_is_r4600_v1_x()) { |
---|
| 253 | + if (IS_ENABLED(CONFIG_WAR_R4600_V1_HIT_CACHEOP) && |
---|
| 254 | + cpu_is_r4600_v1_x()) { |
---|
255 | 255 | uasm_i_nop(buf); |
---|
256 | 256 | uasm_i_nop(buf); |
---|
257 | 257 | uasm_i_nop(buf); |
---|
258 | 258 | uasm_i_nop(buf); |
---|
259 | 259 | } |
---|
260 | 260 | |
---|
261 | | - if (R4600_V2_HIT_CACHEOP_WAR && cpu_is_r4600_v2_x()) |
---|
| 261 | + if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && |
---|
| 262 | + cpu_is_r4600_v2_x()) |
---|
262 | 263 | uasm_i_lw(buf, ZERO, ZERO, AT); |
---|
263 | 264 | |
---|
264 | 265 | uasm_i_cache(buf, Create_Dirty_Excl_D, off, A0); |
---|
.. | .. |
---|
303 | 304 | else |
---|
304 | 305 | uasm_i_ori(&buf, A2, A0, off); |
---|
305 | 306 | |
---|
306 | | - if (R4600_V2_HIT_CACHEOP_WAR && cpu_is_r4600_v2_x()) |
---|
| 307 | + if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && cpu_is_r4600_v2_x()) |
---|
307 | 308 | uasm_i_lui(&buf, AT, uasm_rel_hi(0xa0000000)); |
---|
308 | 309 | |
---|
309 | 310 | off = cache_line_size ? min(8, pref_bias_clear_store / cache_line_size) |
---|
.. | .. |
---|
403 | 404 | if (cpu_has_cache_cdex_s) { |
---|
404 | 405 | uasm_i_cache(buf, Create_Dirty_Excl_SD, off, A0); |
---|
405 | 406 | } else if (cpu_has_cache_cdex_p) { |
---|
406 | | - if (R4600_V1_HIT_CACHEOP_WAR && cpu_is_r4600_v1_x()) { |
---|
| 407 | + if (IS_ENABLED(CONFIG_WAR_R4600_V1_HIT_CACHEOP) && |
---|
| 408 | + cpu_is_r4600_v1_x()) { |
---|
407 | 409 | uasm_i_nop(buf); |
---|
408 | 410 | uasm_i_nop(buf); |
---|
409 | 411 | uasm_i_nop(buf); |
---|
410 | 412 | uasm_i_nop(buf); |
---|
411 | 413 | } |
---|
412 | 414 | |
---|
413 | | - if (R4600_V2_HIT_CACHEOP_WAR && cpu_is_r4600_v2_x()) |
---|
| 415 | + if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && |
---|
| 416 | + cpu_is_r4600_v2_x()) |
---|
414 | 417 | uasm_i_lw(buf, ZERO, ZERO, AT); |
---|
415 | 418 | |
---|
416 | 419 | uasm_i_cache(buf, Create_Dirty_Excl_D, off, A0); |
---|
.. | .. |
---|
454 | 457 | else |
---|
455 | 458 | uasm_i_ori(&buf, A2, A0, off); |
---|
456 | 459 | |
---|
457 | | - if (R4600_V2_HIT_CACHEOP_WAR && cpu_is_r4600_v2_x()) |
---|
| 460 | + if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && cpu_is_r4600_v2_x()) |
---|
458 | 461 | uasm_i_lui(&buf, AT, uasm_rel_hi(0xa0000000)); |
---|
459 | 462 | |
---|
460 | 463 | off = cache_line_size ? min(8, pref_bias_copy_load / cache_line_size) * |
---|