Lines Matching defs:K1

48 #define K1		27
225 UASM_i_ADDIU(&p, K1, SP, -(int)sizeof(struct pt_regs));
229 UASM_i_SW(&p, i, offsetof(struct pt_regs, regs[i]), K1);
234 UASM_i_SW(&p, V0, offsetof(struct pt_regs, cp0_status), K1);
237 kvm_mips_build_save_scratch(&p, V1, K1);
243 UASM_i_ADDIU(&p, K1, A0, offsetof(struct kvm_vcpu, arch));
249 UASM_i_SW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1);
252 UASM_i_SW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1);
263 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1);
305 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1);
314 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, host_pgd), K1);
325 (int)offsetof(struct kvm_vcpu, arch), K1);
365 K1);
372 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, cop0), K1);
378 UASM_i_ADDIU(&p, T1, K1, offsetof(struct kvm_vcpu_arch,
381 UASM_i_ADDIU(&p, T1, K1, offsetof(struct kvm_vcpu_arch,
435 if (i == K0 || i == K1)
437 UASM_i_LW(&p, i, offsetof(struct kvm_vcpu_arch, gprs[i]), K1);
442 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, hi), K1);
445 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, lo), K1);
450 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, gprs[K0]), K1);
451 UASM_i_LW(&p, K1, offsetof(struct kvm_vcpu_arch, gprs[K1]), K1);
484 UASM_i_MTC0(&p, K1, scratch_tmp[0], scratch_tmp[1]);
487 UASM_i_MFC0(&p, K1, scratch_vcpu[0], scratch_vcpu[1]);
490 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu, arch.gprs[K0]), K1);
499 UASM_i_MFC0(&p, K1, C0_PGD);
500 uasm_i_lddir(&p, K0, K1, 3); /* global page dir */
502 uasm_i_lddir(&p, K1, K0, 1); /* middle page dir */
504 uasm_i_ldpte(&p, K1, 0); /* even */
505 uasm_i_ldpte(&p, K1, 1); /* odd */
520 build_get_pmde64(&p, &l, &r, K0, K1); /* get pmd in K1 */
522 build_get_pgde32(&p, K0, K1); /* get pgd in K1 */
527 build_get_ptep(&p, K0, K1);
528 build_update_entries(&p, K0, K1);
535 UASM_i_MFC0(&p, K1, scratch_vcpu[0], scratch_vcpu[1]);
538 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu, arch.gprs[K0]), K1);
540 UASM_i_MFC0(&p, K1, scratch_tmp[0], scratch_tmp[1]);
570 UASM_i_MTC0(&p, K1, scratch_tmp[0], scratch_tmp[1]);
573 UASM_i_MFC0(&p, K1, scratch_vcpu[0], scratch_vcpu[1]);
574 UASM_i_ADDIU(&p, K1, K1, offsetof(struct kvm_vcpu, arch));
577 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, gprs[K0]), K1);
625 if (i == K0 || i == K1)
627 UASM_i_SW(&p, i, offsetof(struct kvm_vcpu_arch, gprs[i]), K1);
633 UASM_i_SW(&p, T0, offsetof(struct kvm_vcpu_arch, hi), K1);
636 UASM_i_SW(&p, T0, offsetof(struct kvm_vcpu_arch, lo), K1);
642 UASM_i_SW(&p, T0, offsetof(struct kvm_vcpu_arch, gprs[K1]), K1);
654 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, pc), K1);
658 K1);
661 uasm_i_sw(&p, K0, offsetof(struct kvm_vcpu_arch, host_cp0_cause), K1);
666 host_cp0_badinstr), K1);
672 host_cp0_badinstrp), K1);
702 K1);
718 K1);
727 K1);
738 offsetof(struct kvm_vcpu_arch, host_pgd), K1);
754 offsetof(struct kvm_vcpu_arch, host_cp0_guestctl0), K1);
781 UASM_i_LW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1);
784 UASM_i_LW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1);
850 uasm_i_move(&p, K1, S0);
851 UASM_i_ADDIU(&p, K1, K1, offsetof(struct kvm_vcpu, arch));
888 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1);
926 UASM_i_LW(&p, K1, offsetof(struct kvm_vcpu_arch, host_stack), K1);
927 UASM_i_ADDIU(&p, K1, K1, -(int)sizeof(struct pt_regs));
940 UASM_i_LW(&p, i, offsetof(struct pt_regs, regs[i]), K1);
949 UASM_i_LW(&p, RA, offsetof(struct pt_regs, regs[RA]), K1);