| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 1 | 2 | /* |
|---|
| 2 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 3 | | - * it under the terms of the GNU General Public License, version 2, as |
|---|
| 4 | | - * published by the Free Software Foundation. |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is distributed in the hope that it will be useful, |
|---|
| 7 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|---|
| 8 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|---|
| 9 | | - * GNU General Public License for more details. |
|---|
| 10 | | - * |
|---|
| 11 | | - * You should have received a copy of the GNU General Public License |
|---|
| 12 | | - * along with this program; if not, write to the Free Software |
|---|
| 13 | | - * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. |
|---|
| 14 | 3 | * |
|---|
| 15 | 4 | * Copyright Novell Inc 2010 |
|---|
| 16 | 5 | * |
|---|
| .. | .. |
|---|
| 180 | 169 | kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE); |
|---|
| 181 | 170 | } |
|---|
| 182 | 171 | |
|---|
| 183 | | -static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu, |
|---|
| 172 | +static int kvmppc_emulate_fpr_load(struct kvm_vcpu *vcpu, |
|---|
| 184 | 173 | int rs, ulong addr, int ls_type) |
|---|
| 185 | 174 | { |
|---|
| 186 | 175 | int emulated = EMULATE_FAIL; |
|---|
| .. | .. |
|---|
| 199 | 188 | kvmppc_inject_pf(vcpu, addr, false); |
|---|
| 200 | 189 | goto done_load; |
|---|
| 201 | 190 | } else if (r == EMULATE_DO_MMIO) { |
|---|
| 202 | | - emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs, |
|---|
| 191 | + emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs, |
|---|
| 203 | 192 | len, 1); |
|---|
| 204 | 193 | goto done_load; |
|---|
| 205 | 194 | } |
|---|
| .. | .. |
|---|
| 224 | 213 | return emulated; |
|---|
| 225 | 214 | } |
|---|
| 226 | 215 | |
|---|
| 227 | | -static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu, |
|---|
| 216 | +static int kvmppc_emulate_fpr_store(struct kvm_vcpu *vcpu, |
|---|
| 228 | 217 | int rs, ulong addr, int ls_type) |
|---|
| 229 | 218 | { |
|---|
| 230 | 219 | int emulated = EMULATE_FAIL; |
|---|
| .. | .. |
|---|
| 259 | 248 | if (r < 0) { |
|---|
| 260 | 249 | kvmppc_inject_pf(vcpu, addr, true); |
|---|
| 261 | 250 | } else if (r == EMULATE_DO_MMIO) { |
|---|
| 262 | | - emulated = kvmppc_handle_store(run, vcpu, val, len, 1); |
|---|
| 251 | + emulated = kvmppc_handle_store(vcpu, val, len, 1); |
|---|
| 263 | 252 | } else { |
|---|
| 264 | 253 | emulated = EMULATE_DONE; |
|---|
| 265 | 254 | } |
|---|
| .. | .. |
|---|
| 270 | 259 | return emulated; |
|---|
| 271 | 260 | } |
|---|
| 272 | 261 | |
|---|
| 273 | | -static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu, |
|---|
| 262 | +static int kvmppc_emulate_psq_load(struct kvm_vcpu *vcpu, |
|---|
| 274 | 263 | int rs, ulong addr, bool w, int i) |
|---|
| 275 | 264 | { |
|---|
| 276 | 265 | int emulated = EMULATE_FAIL; |
|---|
| .. | .. |
|---|
| 290 | 279 | kvmppc_inject_pf(vcpu, addr, false); |
|---|
| 291 | 280 | goto done_load; |
|---|
| 292 | 281 | } else if ((r == EMULATE_DO_MMIO) && w) { |
|---|
| 293 | | - emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs, |
|---|
| 282 | + emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs, |
|---|
| 294 | 283 | 4, 1); |
|---|
| 295 | 284 | vcpu->arch.qpr[rs] = tmp[1]; |
|---|
| 296 | 285 | goto done_load; |
|---|
| 297 | 286 | } else if (r == EMULATE_DO_MMIO) { |
|---|
| 298 | | - emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FQPR | rs, |
|---|
| 287 | + emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FQPR | rs, |
|---|
| 299 | 288 | 8, 1); |
|---|
| 300 | 289 | goto done_load; |
|---|
| 301 | 290 | } |
|---|
| .. | .. |
|---|
| 313 | 302 | return emulated; |
|---|
| 314 | 303 | } |
|---|
| 315 | 304 | |
|---|
| 316 | | -static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu, |
|---|
| 305 | +static int kvmppc_emulate_psq_store(struct kvm_vcpu *vcpu, |
|---|
| 317 | 306 | int rs, ulong addr, bool w, int i) |
|---|
| 318 | 307 | { |
|---|
| 319 | 308 | int emulated = EMULATE_FAIL; |
|---|
| .. | .. |
|---|
| 329 | 318 | if (r < 0) { |
|---|
| 330 | 319 | kvmppc_inject_pf(vcpu, addr, true); |
|---|
| 331 | 320 | } else if ((r == EMULATE_DO_MMIO) && w) { |
|---|
| 332 | | - emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1); |
|---|
| 321 | + emulated = kvmppc_handle_store(vcpu, tmp[0], 4, 1); |
|---|
| 333 | 322 | } else if (r == EMULATE_DO_MMIO) { |
|---|
| 334 | 323 | u64 val = ((u64)tmp[0] << 32) | tmp[1]; |
|---|
| 335 | | - emulated = kvmppc_handle_store(run, vcpu, val, 8, 1); |
|---|
| 324 | + emulated = kvmppc_handle_store(vcpu, val, 8, 1); |
|---|
| 336 | 325 | } else { |
|---|
| 337 | 326 | emulated = EMULATE_DONE; |
|---|
| 338 | 327 | } |
|---|
| .. | .. |
|---|
| 629 | 618 | return EMULATE_DONE; |
|---|
| 630 | 619 | } |
|---|
| 631 | 620 | |
|---|
| 632 | | -int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu) |
|---|
| 621 | +int kvmppc_emulate_paired_single(struct kvm_vcpu *vcpu) |
|---|
| 633 | 622 | { |
|---|
| 634 | 623 | u32 inst; |
|---|
| 635 | 624 | enum emulation_result emulated = EMULATE_DONE; |
|---|
| .. | .. |
|---|
| 691 | 680 | int i = inst_get_field(inst, 17, 19); |
|---|
| 692 | 681 | |
|---|
| 693 | 682 | addr += get_d_signext(inst); |
|---|
| 694 | | - emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); |
|---|
| 683 | + emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i); |
|---|
| 695 | 684 | break; |
|---|
| 696 | 685 | } |
|---|
| 697 | 686 | case OP_PSQ_LU: |
|---|
| .. | .. |
|---|
| 701 | 690 | int i = inst_get_field(inst, 17, 19); |
|---|
| 702 | 691 | |
|---|
| 703 | 692 | addr += get_d_signext(inst); |
|---|
| 704 | | - emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); |
|---|
| 693 | + emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i); |
|---|
| 705 | 694 | |
|---|
| 706 | 695 | if (emulated == EMULATE_DONE) |
|---|
| 707 | 696 | kvmppc_set_gpr(vcpu, ax_ra, addr); |
|---|
| .. | .. |
|---|
| 714 | 703 | int i = inst_get_field(inst, 17, 19); |
|---|
| 715 | 704 | |
|---|
| 716 | 705 | addr += get_d_signext(inst); |
|---|
| 717 | | - emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); |
|---|
| 706 | + emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i); |
|---|
| 718 | 707 | break; |
|---|
| 719 | 708 | } |
|---|
| 720 | 709 | case OP_PSQ_STU: |
|---|
| .. | .. |
|---|
| 724 | 713 | int i = inst_get_field(inst, 17, 19); |
|---|
| 725 | 714 | |
|---|
| 726 | 715 | addr += get_d_signext(inst); |
|---|
| 727 | | - emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); |
|---|
| 716 | + emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i); |
|---|
| 728 | 717 | |
|---|
| 729 | 718 | if (emulated == EMULATE_DONE) |
|---|
| 730 | 719 | kvmppc_set_gpr(vcpu, ax_ra, addr); |
|---|
| .. | .. |
|---|
| 744 | 733 | int i = inst_get_field(inst, 22, 24); |
|---|
| 745 | 734 | |
|---|
| 746 | 735 | addr += kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 747 | | - emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); |
|---|
| 736 | + emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i); |
|---|
| 748 | 737 | break; |
|---|
| 749 | 738 | } |
|---|
| 750 | 739 | case OP_4X_PS_CMPO0: |
|---|
| .. | .. |
|---|
| 758 | 747 | int i = inst_get_field(inst, 22, 24); |
|---|
| 759 | 748 | |
|---|
| 760 | 749 | addr += kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 761 | | - emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); |
|---|
| 750 | + emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i); |
|---|
| 762 | 751 | |
|---|
| 763 | 752 | if (emulated == EMULATE_DONE) |
|---|
| 764 | 753 | kvmppc_set_gpr(vcpu, ax_ra, addr); |
|---|
| .. | .. |
|---|
| 835 | 824 | int i = inst_get_field(inst, 22, 24); |
|---|
| 836 | 825 | |
|---|
| 837 | 826 | addr += kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 838 | | - emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); |
|---|
| 827 | + emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i); |
|---|
| 839 | 828 | break; |
|---|
| 840 | 829 | } |
|---|
| 841 | 830 | case OP_4XW_PSQ_STUX: |
|---|
| .. | .. |
|---|
| 845 | 834 | int i = inst_get_field(inst, 22, 24); |
|---|
| 846 | 835 | |
|---|
| 847 | 836 | addr += kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 848 | | - emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); |
|---|
| 837 | + emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i); |
|---|
| 849 | 838 | |
|---|
| 850 | 839 | if (emulated == EMULATE_DONE) |
|---|
| 851 | 840 | kvmppc_set_gpr(vcpu, ax_ra, addr); |
|---|
| .. | .. |
|---|
| 933 | 922 | { |
|---|
| 934 | 923 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; |
|---|
| 935 | 924 | |
|---|
| 936 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, |
|---|
| 925 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr, |
|---|
| 937 | 926 | FPU_LS_SINGLE); |
|---|
| 938 | 927 | break; |
|---|
| 939 | 928 | } |
|---|
| .. | .. |
|---|
| 941 | 930 | { |
|---|
| 942 | 931 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; |
|---|
| 943 | 932 | |
|---|
| 944 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, |
|---|
| 933 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr, |
|---|
| 945 | 934 | FPU_LS_SINGLE); |
|---|
| 946 | 935 | |
|---|
| 947 | 936 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 952 | 941 | { |
|---|
| 953 | 942 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; |
|---|
| 954 | 943 | |
|---|
| 955 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, |
|---|
| 944 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr, |
|---|
| 956 | 945 | FPU_LS_DOUBLE); |
|---|
| 957 | 946 | break; |
|---|
| 958 | 947 | } |
|---|
| .. | .. |
|---|
| 960 | 949 | { |
|---|
| 961 | 950 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; |
|---|
| 962 | 951 | |
|---|
| 963 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, |
|---|
| 952 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr, |
|---|
| 964 | 953 | FPU_LS_DOUBLE); |
|---|
| 965 | 954 | |
|---|
| 966 | 955 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 971 | 960 | { |
|---|
| 972 | 961 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; |
|---|
| 973 | 962 | |
|---|
| 974 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, |
|---|
| 963 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr, |
|---|
| 975 | 964 | FPU_LS_SINGLE); |
|---|
| 976 | 965 | break; |
|---|
| 977 | 966 | } |
|---|
| .. | .. |
|---|
| 979 | 968 | { |
|---|
| 980 | 969 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; |
|---|
| 981 | 970 | |
|---|
| 982 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, |
|---|
| 971 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr, |
|---|
| 983 | 972 | FPU_LS_SINGLE); |
|---|
| 984 | 973 | |
|---|
| 985 | 974 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 990 | 979 | { |
|---|
| 991 | 980 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; |
|---|
| 992 | 981 | |
|---|
| 993 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, |
|---|
| 982 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr, |
|---|
| 994 | 983 | FPU_LS_DOUBLE); |
|---|
| 995 | 984 | break; |
|---|
| 996 | 985 | } |
|---|
| .. | .. |
|---|
| 998 | 987 | { |
|---|
| 999 | 988 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; |
|---|
| 1000 | 989 | |
|---|
| 1001 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, |
|---|
| 990 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr, |
|---|
| 1002 | 991 | FPU_LS_DOUBLE); |
|---|
| 1003 | 992 | |
|---|
| 1004 | 993 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 1012 | 1001 | ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; |
|---|
| 1013 | 1002 | |
|---|
| 1014 | 1003 | addr += kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1015 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, |
|---|
| 1004 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, |
|---|
| 1016 | 1005 | addr, FPU_LS_SINGLE); |
|---|
| 1017 | 1006 | break; |
|---|
| 1018 | 1007 | } |
|---|
| .. | .. |
|---|
| 1021 | 1010 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + |
|---|
| 1022 | 1011 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1023 | 1012 | |
|---|
| 1024 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, |
|---|
| 1013 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, |
|---|
| 1025 | 1014 | addr, FPU_LS_SINGLE); |
|---|
| 1026 | 1015 | |
|---|
| 1027 | 1016 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 1033 | 1022 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + |
|---|
| 1034 | 1023 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1035 | 1024 | |
|---|
| 1036 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, |
|---|
| 1025 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, |
|---|
| 1037 | 1026 | addr, FPU_LS_DOUBLE); |
|---|
| 1038 | 1027 | break; |
|---|
| 1039 | 1028 | } |
|---|
| .. | .. |
|---|
| 1042 | 1031 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + |
|---|
| 1043 | 1032 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1044 | 1033 | |
|---|
| 1045 | | - emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, |
|---|
| 1034 | + emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, |
|---|
| 1046 | 1035 | addr, FPU_LS_DOUBLE); |
|---|
| 1047 | 1036 | |
|---|
| 1048 | 1037 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 1054 | 1043 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + |
|---|
| 1055 | 1044 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1056 | 1045 | |
|---|
| 1057 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, |
|---|
| 1046 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, |
|---|
| 1058 | 1047 | addr, FPU_LS_SINGLE); |
|---|
| 1059 | 1048 | break; |
|---|
| 1060 | 1049 | } |
|---|
| .. | .. |
|---|
| 1063 | 1052 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + |
|---|
| 1064 | 1053 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1065 | 1054 | |
|---|
| 1066 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, |
|---|
| 1055 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, |
|---|
| 1067 | 1056 | addr, FPU_LS_SINGLE); |
|---|
| 1068 | 1057 | |
|---|
| 1069 | 1058 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 1075 | 1064 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + |
|---|
| 1076 | 1065 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1077 | 1066 | |
|---|
| 1078 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, |
|---|
| 1067 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, |
|---|
| 1079 | 1068 | addr, FPU_LS_DOUBLE); |
|---|
| 1080 | 1069 | break; |
|---|
| 1081 | 1070 | } |
|---|
| .. | .. |
|---|
| 1084 | 1073 | ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + |
|---|
| 1085 | 1074 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1086 | 1075 | |
|---|
| 1087 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, |
|---|
| 1076 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, |
|---|
| 1088 | 1077 | addr, FPU_LS_DOUBLE); |
|---|
| 1089 | 1078 | |
|---|
| 1090 | 1079 | if (emulated == EMULATE_DONE) |
|---|
| .. | .. |
|---|
| 1096 | 1085 | ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + |
|---|
| 1097 | 1086 | kvmppc_get_gpr(vcpu, ax_rb); |
|---|
| 1098 | 1087 | |
|---|
| 1099 | | - emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, |
|---|
| 1088 | + emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, |
|---|
| 1100 | 1089 | addr, |
|---|
| 1101 | 1090 | FPU_LS_SINGLE_LOW); |
|---|
| 1102 | 1091 | break; |
|---|