.. | .. |
---|
31 | 31 | extern void ia64_bad_param_for_getreg (void); |
---|
32 | 32 | |
---|
33 | 33 | |
---|
34 | | -#define ia64_native_setreg(regnum, val) \ |
---|
| 34 | +#define ia64_setreg(regnum, val) \ |
---|
35 | 35 | ({ \ |
---|
36 | 36 | switch (regnum) { \ |
---|
37 | 37 | case _IA64_REG_PSR_L: \ |
---|
.. | .. |
---|
60 | 60 | } \ |
---|
61 | 61 | }) |
---|
62 | 62 | |
---|
63 | | -#define ia64_native_getreg(regnum) \ |
---|
| 63 | +#define ia64_getreg(regnum) \ |
---|
64 | 64 | ({ \ |
---|
65 | 65 | __u64 ia64_intri_res; \ |
---|
66 | 66 | \ |
---|
.. | .. |
---|
384 | 384 | |
---|
385 | 385 | #define ia64_invala() asm volatile ("invala" ::: "memory") |
---|
386 | 386 | |
---|
387 | | -#define ia64_native_thash(addr) \ |
---|
| 387 | +#define ia64_thash(addr) \ |
---|
388 | 388 | ({ \ |
---|
389 | 389 | unsigned long ia64_intri_res; \ |
---|
390 | 390 | asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \ |
---|
.. | .. |
---|
437 | 437 | #define ia64_set_pmd(index, val) \ |
---|
438 | 438 | asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory") |
---|
439 | 439 | |
---|
440 | | -#define ia64_native_set_rr(index, val) \ |
---|
| 440 | +#define ia64_set_rr(index, val) \ |
---|
441 | 441 | asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory"); |
---|
442 | 442 | |
---|
443 | | -#define ia64_native_get_cpuid(index) \ |
---|
| 443 | +#define ia64_get_cpuid(index) \ |
---|
444 | 444 | ({ \ |
---|
445 | 445 | unsigned long ia64_intri_res; \ |
---|
446 | 446 | asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \ |
---|
.. | .. |
---|
476 | 476 | }) |
---|
477 | 477 | |
---|
478 | 478 | |
---|
479 | | -#define ia64_native_get_pmd(index) \ |
---|
| 479 | +#define ia64_get_pmd(index) \ |
---|
480 | 480 | ({ \ |
---|
481 | 481 | unsigned long ia64_intri_res; \ |
---|
482 | 482 | asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \ |
---|
483 | 483 | ia64_intri_res; \ |
---|
484 | 484 | }) |
---|
485 | 485 | |
---|
486 | | -#define ia64_native_get_rr(index) \ |
---|
| 486 | +#define ia64_get_rr(index) \ |
---|
487 | 487 | ({ \ |
---|
488 | 488 | unsigned long ia64_intri_res; \ |
---|
489 | 489 | asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \ |
---|
490 | 490 | ia64_intri_res; \ |
---|
491 | 491 | }) |
---|
492 | 492 | |
---|
493 | | -#define ia64_native_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory") |
---|
| 493 | +#define ia64_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory") |
---|
494 | 494 | |
---|
495 | 495 | |
---|
496 | 496 | #define ia64_sync_i() asm volatile (";; sync.i" ::: "memory") |
---|
497 | 497 | |
---|
498 | | -#define ia64_native_ssm(mask) asm volatile ("ssm %0":: "i"((mask)) : "memory") |
---|
499 | | -#define ia64_native_rsm(mask) asm volatile ("rsm %0":: "i"((mask)) : "memory") |
---|
| 498 | +#define ia64_ssm(mask) asm volatile ("ssm %0":: "i"((mask)) : "memory") |
---|
| 499 | +#define ia64_rsm(mask) asm volatile ("rsm %0":: "i"((mask)) : "memory") |
---|
500 | 500 | #define ia64_sum(mask) asm volatile ("sum %0":: "i"((mask)) : "memory") |
---|
501 | 501 | #define ia64_rum(mask) asm volatile ("rum %0":: "i"((mask)) : "memory") |
---|
502 | 502 | |
---|
503 | 503 | #define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr)) |
---|
504 | 504 | |
---|
505 | | -#define ia64_native_ptcga(addr, size) \ |
---|
| 505 | +#define ia64_ptcga(addr, size) \ |
---|
506 | 506 | do { \ |
---|
507 | 507 | asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory"); \ |
---|
508 | 508 | ia64_dv_serialize_data(); \ |
---|
.. | .. |
---|
607 | 607 | } \ |
---|
608 | 608 | }) |
---|
609 | 609 | |
---|
610 | | -#define ia64_native_intrin_local_irq_restore(x) \ |
---|
| 610 | +#define ia64_intrin_local_irq_restore(x) \ |
---|
611 | 611 | do { \ |
---|
612 | 612 | asm volatile (";; cmp.ne p6,p7=%0,r0;;" \ |
---|
613 | 613 | "(p6) ssm psr.i;" \ |
---|