Lines Matching refs:r9
594 ld r9, VCORE_KVM(r5) /* pointer to struct kvm */
617 lwz r7,KVM_LPID(r9)
619 ld r6,KVM_SDR1(r9)
629 mr r3, r9 /* kvm pointer */
944 ld r9,VCPU_SLB_V(r6)
945 slbmte r9,r8
953 li r9, TM_QW1_OS
965 stdx r11,r9,r10
972 stdcix r11,r9,r10
974 3: li r9, 1
975 stb r9, VCPU_XIVE_PUSHED(r4)
995 li r9, XIVE_ESB_SET_PQ_01
998 ldx r0, r10, r9
1001 ldcix r0, r10, r9
1078 li r9, KVM_GUEST_MODE_GUEST_HV
1079 stb r9, HSTATE_IN_GUEST(r13)
1103 ld r9, VCPU_GPR(R9)(r4)
1202 std reg, __VCPU_GPR(reg)(r9)
1252 mr r9, r4
1277 std r9, HSTATE_SCRATCH2(r13)
1278 lbz r9, HSTATE_IN_GUEST(r13)
1279 cmpwi r9, KVM_GUEST_MODE_HOST_HV
1282 cmpwi r9, KVM_GUEST_MODE_GUEST
1283 ld r9, HSTATE_SCRATCH2(r13)
1287 li r9, KVM_GUEST_MODE_HOST_HV
1288 stb r9, HSTATE_IN_GUEST(r13)
1290 ld r9, HSTATE_KVM_VCPU(r13)
1294 std r0, VCPU_GPR(R0)(r9)
1295 std r1, VCPU_GPR(R1)(r9)
1296 std r2, VCPU_GPR(R2)(r9)
1297 std r3, VCPU_GPR(R3)(r9)
1298 std r4, VCPU_GPR(R4)(r9)
1299 std r5, VCPU_GPR(R5)(r9)
1300 std r6, VCPU_GPR(R6)(r9)
1301 std r7, VCPU_GPR(R7)(r9)
1302 std r8, VCPU_GPR(R8)(r9)
1304 std r0, VCPU_GPR(R9)(r9)
1305 std r10, VCPU_GPR(R10)(r9)
1306 std r11, VCPU_GPR(R11)(r9)
1308 std r3, VCPU_GPR(R12)(r9)
1311 std r4, VCPU_CR(r9)
1314 std r3, VCPU_CFAR(r9)
1318 std r4, VCPU_PPR(r9)
1327 std r10, VCPU_SRR0(r9)
1328 std r11, VCPU_SRR1(r9)
1336 1: std r10, VCPU_PC(r9)
1337 std r11, VCPU_MSR(r9)
1341 std r3, VCPU_GPR(R13)(r9)
1342 std r4, VCPU_LR(r9)
1344 stw r12,VCPU_TRAP(r9)
1356 addi r3, r9, VCPU_TB_RMINTR
1357 mr r4, r9
1359 ld r5, VCPU_GPR(R5)(r9)
1360 ld r6, VCPU_GPR(R6)(r9)
1361 ld r7, VCPU_GPR(R7)(r9)
1362 ld r8, VCPU_GPR(R8)(r9)
1368 stw r3,VCPU_LAST_INST(r9)
1372 11: stw r3,VCPU_HEIR(r9)
1377 std r3, VCPU_CTR(r9)
1378 std r4, VCPU_XER(r9)
1383 std r3, VCPU_DAR(r9)
1384 stw r4, VCPU_DSISR(r9)
1389 std r3, VCPU_FAULT_DAR(r9)
1390 stw r4, VCPU_FAULT_DSISR(r9)
1406 mr r4,r9
1420 ld r0, VCPU_NESTED(r9)
1433 std r3, VCPU_HFSCR(r9)
1446 guest_exit_cont: /* r9 = vcpu, r12 = trap, r13 = paca */
1449 addi r3, r9, VCPU_TB_RMEXIT
1450 mr r4, r9
1455 lbz r0, VCPU_XIVE_PUSHED(r9)
1480 3: std r11, VCPU_XIVE_SAVED_STATE(r9)
1484 stb r10, VCPU_XIVE_PUSHED(r9)
1485 stb r10, (VCPU_XIVE_SAVED_STATE+3)(r9)
1486 stb r0, (VCPU_XIVE_SAVED_STATE+4)(r9)
1504 ld r5, VCPU_KVM(r9)
1509 lwz r0,VCPU_SLB_NR(r9) /* number of entries in SLB */
1512 addi r7,r9,VCPU_SLB
1529 3: stw r5,VCPU_SLB_MAX(r9)
1568 std r5,VCPU_DEC_EXPIRES(r9)
1574 ld r9, HSTATE_KVM_VCPU(r13)
1578 stw r0, VCPU_CPU(r9)
1579 stw r0, VCPU_THREAD_CPU(r9)
1583 stw r6,VCPU_CTRL(r9)
1594 ld r7,VCPU_PURR(r9)
1595 ld r8,VCPU_SPURR(r9)
1596 std r5,VCPU_PURR(r9)
1597 std r6,VCPU_SPURR(r9)
1619 std r5, VCPU_IAMR(r9)
1620 stw r6, VCPU_PSPB(r9)
1621 std r7, VCPU_FSCR(r9)
1624 std r5, VCPU_IC(r9)
1625 std r7, VCPU_TAR(r9)
1627 std r8, VCPU_EBBHR(r9)
1632 std r5, VCPU_EBBRR(r9)
1633 std r6, VCPU_BESCR(r9)
1634 stw r7, VCPU_GUEST_PID(r9)
1635 std r8, VCPU_WORT(r9)
1641 std r5, VCPU_TCSCR(r9)
1642 std r6, VCPU_ACOP(r9)
1643 std r7, VCPU_CSIGR(r9)
1644 std r8, VCPU_TACR(r9)
1648 std r5, VCPU_TID(r9)
1651 std r6, VCPU_PSSCR(r9)
1678 std r5,VCPU_AMR(r9)
1679 std r6,VCPU_UAMOR(r9)
1688 std r8, VCPU_DSCR(r9)
1692 std r14, VCPU_GPR(R14)(r9)
1693 std r15, VCPU_GPR(R15)(r9)
1694 std r16, VCPU_GPR(R16)(r9)
1695 std r17, VCPU_GPR(R17)(r9)
1696 std r18, VCPU_GPR(R18)(r9)
1697 std r19, VCPU_GPR(R19)(r9)
1698 std r20, VCPU_GPR(R20)(r9)
1699 std r21, VCPU_GPR(R21)(r9)
1700 std r22, VCPU_GPR(R22)(r9)
1701 std r23, VCPU_GPR(R23)(r9)
1702 std r24, VCPU_GPR(R24)(r9)
1703 std r25, VCPU_GPR(R25)(r9)
1704 std r26, VCPU_GPR(R26)(r9)
1705 std r27, VCPU_GPR(R27)(r9)
1706 std r28, VCPU_GPR(R28)(r9)
1707 std r29, VCPU_GPR(R29)(r9)
1708 std r30, VCPU_GPR(R30)(r9)
1709 std r31, VCPU_GPR(R31)(r9)
1716 std r3, VCPU_SPRG0(r9)
1717 std r4, VCPU_SPRG1(r9)
1718 std r5, VCPU_SPRG2(r9)
1719 std r6, VCPU_SPRG3(r9)
1722 mr r3, r9
1736 mr r3, r9
1741 ld r9, HSTATE_KVM_VCPU(r13)
1746 ld r8, VCPU_VPA(r9) /* do they have a VPA? */
1754 stb r3, VCPU_VPA_DIRTY(r9)
1758 mr r3, r9
1763 ld r9, HSTATE_KVM_VCPU(r13)
1791 ld r5, VCPU_KVM(r9)
1807 lwz r3, VCPU_GUEST_PID(r9)
1824 ld r6,VCPU_KVM(r9)
2016 ld r9, HSTATE_KVM_VCPU(r13)
2051 stw r12, VCPU_TRAP(r9)
2063 mr r4, r9
2076 stw r3, VCPU_HEIR(r9)
2091 mr r3, r9
2094 ld r9, HSTATE_KVM_VCPU(r13)
2098 ld r10, VCPU_PC(r9)
2099 ld r11, VCPU_MSR(r9)
2111 ld r3, VCPU_KVM(r9)
2135 4: std r4, VCPU_FAULT_DAR(r9)
2136 stw r6, VCPU_FAULT_DSISR(r9)
2139 mr r3, r9 /* vcpu pointer */
2142 ld r9, HSTATE_KVM_VCPU(r13)
2143 ld r10, VCPU_PC(r9)
2144 ld r11, VCPU_MSR(r9)
2154 ld r4, VCPU_FAULT_DAR(r9)
2164 6: ld r7, VCPU_CTR(r9)
2165 ld r8, VCPU_XER(r9)
2168 mr r4, r9
2171 3: ld r5, VCPU_KVM(r9) /* not relocated, use VRMA */
2191 stw r8, VCPU_LAST_INST(r9)
2199 std r4, VCPU_FAULT_DAR(r9)
2200 stw r6, VCPU_FAULT_DSISR(r9)
2203 std r5, VCPU_FAULT_GPA(r9)
2211 ld r3, VCPU_KVM(r9)
2229 mr r3, r9 /* vcpu pointer */
2234 ld r9, HSTATE_KVM_VCPU(r13)
2235 ld r10, VCPU_PC(r9)
2236 ld r11, VCPU_MSR(r9)
2252 3: ld r6, VCPU_KVM(r9) /* not relocated, use VRMA */
2263 * r9 = vcpu, r10 = pc, r11 = msr, r12 = trap, r13 = paca
2266 ld r3,VCPU_GPR(R3)(r9)
2271 ld r0, VCPU_NESTED(r9)
2278 ld r4, VCPU_KVM(r9)
2294 mr r3,r9 /* get vcpu pointer */
2295 ld r4,VCPU_GPR(R4)(r9)
2310 mr r4,r9
2318 ld r9, HSTATE_KVM_VCPU(r13)
2888 mr r9, r4
2904 ld r9, HSTATE_KVM_VCPU(r13)
2907 ld r10, VCPU_XIVE_ESC_VADDR(r9)
2919 lbz r5, VCPU_XIVE_ESC_ON(r9)
2923 stb r0, VCPU_CEDED(r9)
2932 stb r0, VCPU_XIVE_ESC_ON(r9)
2941 1: ld r10, VCPU_XIVE_ESC_RADDR(r9)
2949 mr r3, r9 /* get vcpu pointer */
2953 ld r9, HSTATE_KVM_VCPU(r13)
2961 * r9 points to the vcpu on entry
2968 ld r9, HSTATE_KVM_VCPU(r13)
3105 mfmsr r9
3106 ori r8,r9,MSR_FP
3227 ld r9, HSTATE_KVM_VCPU(r13)
3230 std r5, VCPU_TFHAR(r9)
3231 std r6, VCPU_TFIAR(r9)
3306 * r9 is saved in HSTATE_SCRATCH2(r13)
3316 mr r9, r1
3320 std r9, 0(r1)
3322 std r9, GPR1(r1)
3345 ld r9, HSTATE_SCRATCH2(r13)
3376 ld r9, HSTATE_KVM_VCPU(r13)
3377 ld r10, VCPU_KVM(r9)
3433 * r9 has a vcpu pointer (in)
3439 ld r11, VCPU_INTR_MSR(r9)
3468 lwz r9, VCPU_PMC + 20(r4)
3474 mtspr SPRN_PMC6, r9
3500 ld r9, VCPU_MMCRS(r4)
3503 mtspr SPRN_MMCRS, r9
3531 lwz r9, HSTATE_PMC6(r13)
3537 mtspr SPRN_PMC6, r9
3549 ld r9, HSTATE_SIER(r13)
3551 mtspr SPRN_SIER, r9
3572 mr r9, r3
3609 std r3, VCPU_MMCR(r9) /* if not, set saved MMCR0 to FC */
3614 std r4, VCPU_MMCR(r9)
3615 std r5, VCPU_MMCR + 8(r9)
3616 std r6, VCPU_MMCRA(r9)
3618 std r10, VCPU_MMCR + 16(r9)
3624 std r5, VCPU_MMCR + 24(r9)
3625 std r6, VCPU_SIER + 8(r9)
3626 std r7, VCPU_SIER + 16(r9)
3628 std r7, VCPU_SIAR(r9)
3629 std r8, VCPU_SDAR(r9)
3636 stw r3, VCPU_PMC(r9)
3637 stw r4, VCPU_PMC + 4(r9)
3638 stw r5, VCPU_PMC + 8(r9)
3639 stw r6, VCPU_PMC + 12(r9)
3640 stw r7, VCPU_PMC + 16(r9)
3641 stw r8, VCPU_PMC + 20(r9)
3644 std r5, VCPU_SIER(r9)
3649 stw r6, VCPU_PMC + 24(r9)
3650 stw r7, VCPU_PMC + 28(r9)
3651 std r8, VCPU_MMCRS(r9)