/kernel/liteos_a/testsuites/unittest/net/resolv/full/ |
H A D | net_resolv_test_005.cpp | 37 struct ether_addr addr, *eaddr = &addr; in EtherLineTest() local 41 ret = ether_line("localhost 01:02:03:04:05:06", eaddr, buf); in EtherLineTest() 44 ret = ether_line("01:02:03:04:05:06 localhost", eaddr, buf); in EtherLineTest() 48 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], 0x01, eaddr->ether_addr_octet[0]); in EtherLineTest() 49 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], 0x02, eaddr->ether_addr_octet[1]); in EtherLineTest() 50 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], 0x03, eaddr->ether_addr_octet[2]); in EtherLineTest() 51 ICUNIT_ASSERT_EQUAL(eaddr in EtherLineTest() [all...] |
H A D | net_resolv_test_004.cpp | 48 struct ether_addr addr, *eaddr = &addr; in EtherHosttonTest() local 49 int ret = ether_hostton("localhost", eaddr); in EtherHosttonTest() 52 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], 0x00, eaddr->ether_addr_octet[0]); in EtherHosttonTest() 53 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], 0x00, eaddr->ether_addr_octet[1]); in EtherHosttonTest() 54 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], 0x00, eaddr->ether_addr_octet[2]); in EtherHosttonTest() 55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[3], 0x00, eaddr in EtherHosttonTest() [all...] |
/kernel/liteos_a/testsuites/unittest/net/resolv/smoke/ |
H A D | net_resolv_test_003.cpp | 40 struct ether_addr *eaddr = ether_aton_r("::01:EF", &addr); in EtherAtonrTest() local 42 ICUNIT_ASSERT_EQUAL(eaddr, NULL, -1); in EtherAtonrTest() 52 eaddr = ether_aton_r(mac_addr, &addr); in EtherAtonrTest() 54 ICUNIT_ASSERT_EQUAL(eaddr, &addr, -1); in EtherAtonrTest() 55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], r[0], eaddr->ether_addr_octet[0]); in EtherAtonrTest() 56 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], r[1], eaddr->ether_addr_octet[1]); in EtherAtonrTest() 57 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], r[2], eaddr in EtherAtonrTest() [all...] |
H A D | net_resolv_test_002.cpp | 39 struct ether_addr *eaddr = ether_aton("01::EF"); in EtherAtonTest() local 41 ICUNIT_ASSERT_EQUAL(eaddr, NULL, -1); in EtherAtonTest() 51 eaddr = ether_aton(mac_addr); in EtherAtonTest() 52 ICUNIT_ASSERT_NOT_EQUAL(eaddr, NULL, -1); in EtherAtonTest() 54 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], r[0], eaddr->ether_addr_octet[0]); in EtherAtonTest() 55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], r[1], eaddr->ether_addr_octet[1]); in EtherAtonTest() 56 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], r[2], eaddr in EtherAtonTest() [all...] |
H A D | net_resolv_test_007.cpp | 38 struct ether_addr addr, *eaddr = &addr; in EtherNtoarTest() local 43 eaddr->ether_addr_octet[i] = r[i]; in EtherNtoarTest() 45 char buf[100], *p = ether_ntoa_r(eaddr, buf); in EtherNtoarTest()
|
H A D | net_resolv_test_006.cpp | 39 struct ether_addr addr, *eaddr = &addr; in EtherNtoaTest() local 44 eaddr->ether_addr_octet[i] = r[i]; in EtherNtoaTest() 46 char *buf = ether_ntoa(eaddr); in EtherNtoaTest()
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | book3s_32_mmu.c | 69 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, 75 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) in find_sr() argument 77 return kvmppc_get_sr(vcpu, (eaddr >> 28) & 0xf); in find_sr() 80 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_ea_to_vp() argument 86 if (!kvmppc_mmu_book3s_32_xlate_bat(vcpu, eaddr, &pte, data, false)) in kvmppc_mmu_book3s_32_ea_to_vp() 89 kvmppc_mmu_book3s_32_esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_ea_to_vp() 90 return (((u64)eaddr >> 12) & 0xffff) | (vsid << 16); in kvmppc_mmu_book3s_32_ea_to_vp() 94 u32 sre, gva_t eaddr, in kvmppc_mmu_book3s_32_get_pteg() 101 page = (eaddr & 0x0FFFFFFF) >> 12; in kvmppc_mmu_book3s_32_get_pteg() 111 dprintk("MMU: pc=0x%lx eaddr in kvmppc_mmu_book3s_32_get_pteg() 93 kvmppc_mmu_book3s_32_get_pteg(struct kvm_vcpu *vcpu, u32 sre, gva_t eaddr, bool primary) kvmppc_mmu_book3s_32_get_pteg() argument 121 kvmppc_mmu_book3s_32_get_ptem(u32 sre, gva_t eaddr, bool primary) kvmppc_mmu_book3s_32_get_ptem() argument 127 kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite) kvmppc_mmu_book3s_32_xlate_bat() argument 182 kvmppc_mmu_book3s_32_xlate_pte(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite, bool primary) kvmppc_mmu_book3s_32_xlate_pte() argument 290 kvmppc_mmu_book3s_32_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite) kvmppc_mmu_book3s_32_xlate() argument [all...] |
H A D | book3s_64_mmu.c | 29 gva_t eaddr) in kvmppc_mmu_book3s_64_find_slbe() 32 u64 esid = GET_ESID(eaddr); in kvmppc_mmu_book3s_64_find_slbe() 33 u64 esid_1t = GET_ESID_1T(eaddr); in kvmppc_mmu_book3s_64_find_slbe() 49 eaddr, esid, esid_1t); in kvmppc_mmu_book3s_64_find_slbe() 73 static u64 kvmppc_slb_calc_vpn(struct kvmppc_slb *slb, gva_t eaddr) in kvmppc_slb_calc_vpn() argument 75 eaddr &= kvmppc_slb_offset_mask(slb); in kvmppc_slb_calc_vpn() 77 return (eaddr >> VPN_SHIFT) | in kvmppc_slb_calc_vpn() 81 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_ea_to_vp() argument 86 slb = kvmppc_mmu_book3s_64_find_slbe(vcpu, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp() 90 return kvmppc_slb_calc_vpn(slb, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp() 27 kvmppc_mmu_book3s_64_find_slbe( struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_book3s_64_find_slbe() argument 109 kvmppc_mmu_book3s_64_get_page(struct kvmppc_slb *slbe, gva_t eaddr) kvmppc_mmu_book3s_64_get_page() argument 116 kvmppc_mmu_book3s_64_get_pteg(struct kvm_vcpu *vcpu, struct kvmppc_slb *slbe, gva_t eaddr, bool second) kvmppc_mmu_book3s_64_get_pteg() argument 155 kvmppc_mmu_book3s_64_get_avpn(struct kvmppc_slb *slbe, gva_t eaddr) kvmppc_mmu_book3s_64_get_avpn() argument 191 kvmppc_mmu_book3s_64_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, bool data, bool iswrite) kvmppc_mmu_book3s_64_xlate() argument 413 kvmppc_mmu_book3s_64_slbfee(struct kvm_vcpu *vcpu, gva_t eaddr, ulong *ret_slb) kvmppc_mmu_book3s_64_slbfee() argument [all...] |
H A D | trace_pr.h | 39 __field( unsigned long, eaddr ) 49 __entry->eaddr = orig_pte->eaddr; 57 __entry->flag_w, __entry->flag_x, __entry->eaddr, 70 __field( ulong, eaddr ) 79 __entry->eaddr = pte->pte.eaddr; 88 __entry->host_vpn, __entry->pfn, __entry->eaddr, 99 __field( ulong, eaddr ) 108 __entry->eaddr [all...] |
H A D | book3s_32_mmu_host.c | 59 asm volatile ("tlbie %0" : : "r" (pte->pte.eaddr) : "memory"); in kvmppc_mmu_invalidate_pte() 106 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument 112 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg() 138 u32 eaddr = orig_pte->eaddr; in kvmppc_mmu_map_page() local 158 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page() 161 kvmppc_mmu_map_segment(vcpu, eaddr); in kvmppc_mmu_map_page() 168 ((eaddr & ~ESID_MASK) >> VPN_SHIFT); in kvmppc_mmu_map_page() 176 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page() 194 pteg0 = ((eaddr in kvmppc_mmu_map_page() 302 kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) kvmppc_mmu_map_segment() argument [all...] |
H A D | e500_mmu.c | 81 gva_t eaddr, int tlbsel, unsigned int pid, int as) in kvmppc_e500_tlb_index() 88 set_base = gtlb0_set_base(vcpu_e500, eaddr); in kvmppc_e500_tlb_index() 91 if (eaddr < vcpu_e500->tlb1_min_eaddr || in kvmppc_e500_tlb_index() 92 eaddr > vcpu_e500->tlb1_max_eaddr) in kvmppc_e500_tlb_index() 104 if (eaddr < get_tlb_eaddr(tlbe)) in kvmppc_e500_tlb_index() 107 if (eaddr > get_tlb_end(tlbe)) in kvmppc_e500_tlb_index() 127 gva_t eaddr, int as) in kvmppc_e500_deliver_tlb_miss() 143 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss() 155 gva_t eaddr; in kvmppc_recalc_tlb1map_range() local 169 eaddr in kvmppc_recalc_tlb1map_range() 80 kvmppc_e500_tlb_index(struct kvmppc_vcpu_e500 *vcpu_e500, gva_t eaddr, int tlbsel, unsigned int pid, int as) kvmppc_e500_tlb_index() argument 126 kvmppc_e500_deliver_tlb_miss(struct kvm_vcpu *vcpu, gva_t eaddr, int as) kvmppc_e500_deliver_tlb_miss() argument 435 u64 eaddr = get_tlb_eaddr(gtlbe); kvmppc_e500_emul_tlbwe() local 453 kvmppc_e500_tlb_search(struct kvm_vcpu *vcpu, gva_t eaddr, unsigned int pid, int as) kvmppc_e500_tlb_search() argument 473 gva_t eaddr; kvmppc_core_vcpu_translate() local 495 kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_itlb_index() argument 502 kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_dtlb_index() argument 523 kvmppc_mmu_xlate(struct kvm_vcpu *vcpu, unsigned int index, gva_t eaddr) kvmppc_mmu_xlate() argument [all...] |
H A D | book3s_mmu_hpte.c | 26 static inline u64 kvmppc_mmu_hash_pte(u64 eaddr) in kvmppc_mmu_hash_pte() argument 28 return hash_64(eaddr >> PTE_SIZE, HPTEG_HASH_BITS_PTE); in kvmppc_mmu_hash_pte() 31 static inline u64 kvmppc_mmu_hash_pte_long(u64 eaddr) in kvmppc_mmu_hash_pte_long() argument 33 return hash_64((eaddr & 0x0ffff000) >> PTE_SIZE, in kvmppc_mmu_hash_pte_long() 66 index = kvmppc_mmu_hash_pte(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map() 70 index = kvmppc_mmu_hash_pte_long(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map() 163 if ((pte->pte.eaddr & ~0xfffUL) == guest_ea) in kvmppc_mmu_pte_flush_page() 183 if ((pte->pte.eaddr & 0x0ffff000UL) == guest_ea) in kvmppc_mmu_pte_flush_long()
|
H A D | book3s_64_mmu_radix.c | 33 gva_t eaddr, void *to, void *from, in __kvmhv_copy_tofrom_guest_radix() 42 return plpar_hcall_norets(H_COPY_TOFROM_GUEST, lpid, pid, eaddr, in __kvmhv_copy_tofrom_guest_radix() 50 from = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix() 52 to = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix() 87 static long kvmhv_copy_tofrom_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmhv_copy_tofrom_guest_radix() argument 94 if (eaddr & (0x3FFUL << 52)) in kvmhv_copy_tofrom_guest_radix() 102 if (((eaddr >> 62) & 0x3) == 0x3) in kvmhv_copy_tofrom_guest_radix() 105 eaddr &= ~(0xFFFUL << 52); in kvmhv_copy_tofrom_guest_radix() 107 return __kvmhv_copy_tofrom_guest_radix(lpid, pid, eaddr, to, from, n); in kvmhv_copy_tofrom_guest_radix() 110 long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, voi argument 32 __kvmhv_copy_tofrom_guest_radix(int lpid, int pid, gva_t eaddr, void *to, void *from, unsigned long n) __kvmhv_copy_tofrom_guest_radix() argument 123 kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *from, unsigned long n) kvmhv_copy_to_guest_radix() argument 130 kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, u64 root, u64 *pte_ret_p) kvmppc_mmu_walk_radix_tree() argument 225 kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, u64 table, int table_index, u64 *pte_ret_p) kvmppc_mmu_radix_translate_table() argument 256 kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, bool data, bool iswrite) kvmppc_mmu_radix_xlate() argument [all...] |
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | book3s_32_mmu.c | 69 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, 75 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) in find_sr() argument 77 return kvmppc_get_sr(vcpu, (eaddr >> 28) & 0xf); in find_sr() 80 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_ea_to_vp() argument 86 if (!kvmppc_mmu_book3s_32_xlate_bat(vcpu, eaddr, &pte, data, false)) in kvmppc_mmu_book3s_32_ea_to_vp() 89 kvmppc_mmu_book3s_32_esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_ea_to_vp() 90 return (((u64)eaddr >> 12) & 0xffff) | (vsid << 16); in kvmppc_mmu_book3s_32_ea_to_vp() 94 u32 sre, gva_t eaddr, in kvmppc_mmu_book3s_32_get_pteg() 101 page = (eaddr & 0x0FFFFFFF) >> 12; in kvmppc_mmu_book3s_32_get_pteg() 111 dprintk("MMU: pc=0x%lx eaddr in kvmppc_mmu_book3s_32_get_pteg() 93 kvmppc_mmu_book3s_32_get_pteg(struct kvm_vcpu *vcpu, u32 sre, gva_t eaddr, bool primary) kvmppc_mmu_book3s_32_get_pteg() argument 121 kvmppc_mmu_book3s_32_get_ptem(u32 sre, gva_t eaddr, bool primary) kvmppc_mmu_book3s_32_get_ptem() argument 127 kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite) kvmppc_mmu_book3s_32_xlate_bat() argument 182 kvmppc_mmu_book3s_32_xlate_pte(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite, bool primary) kvmppc_mmu_book3s_32_xlate_pte() argument 290 kvmppc_mmu_book3s_32_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *pte, bool data, bool iswrite) kvmppc_mmu_book3s_32_xlate() argument [all...] |
H A D | book3s_64_mmu.c | 29 gva_t eaddr) in kvmppc_mmu_book3s_64_find_slbe() 32 u64 esid = GET_ESID(eaddr); in kvmppc_mmu_book3s_64_find_slbe() 33 u64 esid_1t = GET_ESID_1T(eaddr); in kvmppc_mmu_book3s_64_find_slbe() 49 eaddr, esid, esid_1t); in kvmppc_mmu_book3s_64_find_slbe() 73 static u64 kvmppc_slb_calc_vpn(struct kvmppc_slb *slb, gva_t eaddr) in kvmppc_slb_calc_vpn() argument 75 eaddr &= kvmppc_slb_offset_mask(slb); in kvmppc_slb_calc_vpn() 77 return (eaddr >> VPN_SHIFT) | in kvmppc_slb_calc_vpn() 81 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_ea_to_vp() argument 86 slb = kvmppc_mmu_book3s_64_find_slbe(vcpu, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp() 90 return kvmppc_slb_calc_vpn(slb, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp() 27 kvmppc_mmu_book3s_64_find_slbe( struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_book3s_64_find_slbe() argument 109 kvmppc_mmu_book3s_64_get_page(struct kvmppc_slb *slbe, gva_t eaddr) kvmppc_mmu_book3s_64_get_page() argument 116 kvmppc_mmu_book3s_64_get_pteg(struct kvm_vcpu *vcpu, struct kvmppc_slb *slbe, gva_t eaddr, bool second) kvmppc_mmu_book3s_64_get_pteg() argument 155 kvmppc_mmu_book3s_64_get_avpn(struct kvmppc_slb *slbe, gva_t eaddr) kvmppc_mmu_book3s_64_get_avpn() argument 191 kvmppc_mmu_book3s_64_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, bool data, bool iswrite) kvmppc_mmu_book3s_64_xlate() argument 412 kvmppc_mmu_book3s_64_slbfee(struct kvm_vcpu *vcpu, gva_t eaddr, ulong *ret_slb) kvmppc_mmu_book3s_64_slbfee() argument [all...] |
H A D | trace_pr.h | 39 __field( unsigned long, eaddr ) 49 __entry->eaddr = orig_pte->eaddr; 57 __entry->flag_w, __entry->flag_x, __entry->eaddr, 70 __field( ulong, eaddr ) 79 __entry->eaddr = pte->pte.eaddr; 88 __entry->host_vpn, __entry->pfn, __entry->eaddr, 99 __field( ulong, eaddr ) 108 __entry->eaddr [all...] |
H A D | book3s_32_mmu_host.c | 59 asm volatile ("tlbie %0" : : "r" (pte->pte.eaddr) : "memory"); in kvmppc_mmu_invalidate_pte() 106 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument 112 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg() 138 u32 eaddr = orig_pte->eaddr; in kvmppc_mmu_map_page() local 158 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page() 161 kvmppc_mmu_map_segment(vcpu, eaddr); in kvmppc_mmu_map_page() 168 ((eaddr & ~ESID_MASK) >> VPN_SHIFT); in kvmppc_mmu_map_page() 176 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page() 194 pteg0 = ((eaddr in kvmppc_mmu_map_page() 302 kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) kvmppc_mmu_map_segment() argument [all...] |
H A D | e500_mmu.c | 81 gva_t eaddr, int tlbsel, unsigned int pid, int as) in kvmppc_e500_tlb_index() 88 set_base = gtlb0_set_base(vcpu_e500, eaddr); in kvmppc_e500_tlb_index() 91 if (eaddr < vcpu_e500->tlb1_min_eaddr || in kvmppc_e500_tlb_index() 92 eaddr > vcpu_e500->tlb1_max_eaddr) in kvmppc_e500_tlb_index() 104 if (eaddr < get_tlb_eaddr(tlbe)) in kvmppc_e500_tlb_index() 107 if (eaddr > get_tlb_end(tlbe)) in kvmppc_e500_tlb_index() 127 gva_t eaddr, int as) in kvmppc_e500_deliver_tlb_miss() 143 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss() 155 gva_t eaddr; in kvmppc_recalc_tlb1map_range() local 169 eaddr in kvmppc_recalc_tlb1map_range() 80 kvmppc_e500_tlb_index(struct kvmppc_vcpu_e500 *vcpu_e500, gva_t eaddr, int tlbsel, unsigned int pid, int as) kvmppc_e500_tlb_index() argument 126 kvmppc_e500_deliver_tlb_miss(struct kvm_vcpu *vcpu, gva_t eaddr, int as) kvmppc_e500_deliver_tlb_miss() argument 435 u64 eaddr = get_tlb_eaddr(gtlbe); kvmppc_e500_emul_tlbwe() local 453 kvmppc_e500_tlb_search(struct kvm_vcpu *vcpu, gva_t eaddr, unsigned int pid, int as) kvmppc_e500_tlb_search() argument 473 gva_t eaddr; kvmppc_core_vcpu_translate() local 495 kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_itlb_index() argument 502 kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_dtlb_index() argument 523 kvmppc_mmu_xlate(struct kvm_vcpu *vcpu, unsigned int index, gva_t eaddr) kvmppc_mmu_xlate() argument [all...] |
H A D | book3s_mmu_hpte.c | 26 static inline u64 kvmppc_mmu_hash_pte(u64 eaddr) in kvmppc_mmu_hash_pte() argument 28 return hash_64(eaddr >> PTE_SIZE, HPTEG_HASH_BITS_PTE); in kvmppc_mmu_hash_pte() 31 static inline u64 kvmppc_mmu_hash_pte_long(u64 eaddr) in kvmppc_mmu_hash_pte_long() argument 33 return hash_64((eaddr & 0x0ffff000) >> PTE_SIZE, in kvmppc_mmu_hash_pte_long() 66 index = kvmppc_mmu_hash_pte(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map() 70 index = kvmppc_mmu_hash_pte_long(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map() 163 if ((pte->pte.eaddr & ~0xfffUL) == guest_ea) in kvmppc_mmu_pte_flush_page() 183 if ((pte->pte.eaddr & 0x0ffff000UL) == guest_ea) in kvmppc_mmu_pte_flush_long()
|
H A D | book3s_64_mmu_radix.c | 36 gva_t eaddr, void *to, void *from, in __kvmhv_copy_tofrom_guest_radix() 45 return plpar_hcall_norets(H_COPY_TOFROM_GUEST, lpid, pid, eaddr, in __kvmhv_copy_tofrom_guest_radix() 49 if (eaddr & (0xFFFUL << 52)) in __kvmhv_copy_tofrom_guest_radix() 56 from = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix() 58 to = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix() 96 static long kvmhv_copy_tofrom_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmhv_copy_tofrom_guest_radix() argument 103 if (eaddr & (0x3FFUL << 52)) in kvmhv_copy_tofrom_guest_radix() 111 if (((eaddr >> 62) & 0x3) == 0x3) in kvmhv_copy_tofrom_guest_radix() 114 eaddr &= ~(0xFFFUL << 52); in kvmhv_copy_tofrom_guest_radix() 116 return __kvmhv_copy_tofrom_guest_radix(lpid, pid, eaddr, t in kvmhv_copy_tofrom_guest_radix() 35 __kvmhv_copy_tofrom_guest_radix(int lpid, int pid, gva_t eaddr, void *to, void *from, unsigned long n) __kvmhv_copy_tofrom_guest_radix() argument 119 kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *to, unsigned long n) kvmhv_copy_from_guest_radix() argument 131 kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *from, unsigned long n) kvmhv_copy_to_guest_radix() argument 137 kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, u64 root, u64 *pte_ret_p) kvmppc_mmu_walk_radix_tree() argument 233 kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, u64 table, int table_index, u64 *pte_ret_p) kvmppc_mmu_radix_translate_table() argument 264 kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, struct kvmppc_pte *gpte, bool data, bool iswrite) kvmppc_mmu_radix_xlate() argument [all...] |
/kernel/linux/linux-6.6/drivers/edac/ |
H A D | igen6_edac.c | 137 u64 (*err_addr_to_sys_addr)(u64 eaddr, int mc); 139 u64 (*err_addr_to_imc_addr)(u64 eaddr, int mc); 235 static u64 ehl_err_addr_to_sys_addr(u64 eaddr, int mc) in ehl_err_addr_to_sys_addr() argument 237 return eaddr; in ehl_err_addr_to_sys_addr() 240 static u64 ehl_err_addr_to_imc_addr(u64 eaddr, int mc) in ehl_err_addr_to_imc_addr() argument 242 if (eaddr < igen6_tolud) in ehl_err_addr_to_imc_addr() 243 return eaddr; in ehl_err_addr_to_imc_addr() 246 return eaddr + igen6_tolud - _4GB; in ehl_err_addr_to_imc_addr() 248 if (eaddr < _4GB) in ehl_err_addr_to_imc_addr() 249 return eaddr in ehl_err_addr_to_imc_addr() 301 tgl_err_addr_to_mem_addr(u64 eaddr, int mc) tgl_err_addr_to_mem_addr() argument 324 tgl_err_addr_to_sys_addr(u64 eaddr, int mc) tgl_err_addr_to_sys_addr() argument 331 tgl_err_addr_to_imc_addr(u64 eaddr, int mc) tgl_err_addr_to_imc_addr() argument 336 adl_err_addr_to_sys_addr(u64 eaddr, int mc) adl_err_addr_to_sys_addr() argument 341 adl_err_addr_to_imc_addr(u64 eaddr, int mc) adl_err_addr_to_imc_addr() argument 674 u64 eaddr; ecclog_work_cb() local [all...] |
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | kvm_book3s.h | 155 extern int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr); 156 extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size); 160 extern long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, 179 gva_t eaddr, void *to, void *from, 181 extern long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 183 extern long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 185 extern int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, 188 extern int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, 191 extern int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, 226 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, in [all...] |
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | kvm_book3s.h | 155 extern int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr); 156 extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size); 160 extern long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, 179 gva_t eaddr, void *to, void *from, 181 extern long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 183 extern long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 185 extern int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, 188 extern int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, 191 extern int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, 226 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, in [all...] |
/kernel/linux/linux-6.6/arch/arm64/kernel/ |
H A D | compat_alignment.c | 118 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local 125 newaddr = eaddr = regs->regs[rn]; in do_alignment_ldmstm() 131 eaddr = newaddr; in do_alignment_ldmstm() 134 eaddr += 4; in do_alignment_ldmstm() 140 if (get_user(val, (u32 __user *)eaddr)) in do_alignment_ldmstm() 155 if (put_user(val, (u32 __user *)eaddr)) in do_alignment_ldmstm() 158 eaddr += 4; in do_alignment_ldmstm()
|
/kernel/linux/linux-5.10/arch/arm/mm/ |
H A D | alignment.c | 495 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local 509 newaddr = eaddr = regs->uregs[rn]; in do_alignment_ldmstm() 515 eaddr = newaddr; in do_alignment_ldmstm() 518 eaddr += 4; in do_alignment_ldmstm() 529 * This is a "hint" - we already have eaddr worked out by the in do_alignment_ldmstm() 532 if (addr != eaddr) { in do_alignment_ldmstm() 534 "addr = %08lx, eaddr = %08lx\n", in do_alignment_ldmstm() 535 instruction_pointer(regs), instr, addr, eaddr); in do_alignment_ldmstm() 547 get32t_unaligned_check(val, eaddr); in do_alignment_ldmstm() 550 put32t_unaligned_check(regs->uregs[rd], eaddr); in do_alignment_ldmstm() [all...] |