Home
last modified time | relevance | path

Searched refs:tlb_state (Results 1 - 8 of 8) sorted by relevance

/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dtlbflush.h57 * 6 because 6 should be plenty and struct tlb_state will fit in two cache
67 struct tlb_state { struct
154 DECLARE_PER_CPU_SHARED_ALIGNED(struct tlb_state, cpu_tlbstate);
/kernel/linux/linux-6.6/arch/x86/include/asm/
H A Dtlbflush.h62 * 6 because 6 should be plenty and struct tlb_state will fit in two cache
72 struct tlb_state { struct
152 DECLARE_PER_CPU_ALIGNED(struct tlb_state, cpu_tlbstate);
/kernel/linux/linux-5.10/arch/x86/kernel/
H A Dasm-offsets.c93 OFFSET(TLB_STATE_user_pcid_flush_mask, tlb_state, user_pcid_flush_mask); in common()
H A Dhw_breakpoint.c308 sizeof(struct tlb_state))) in within_cpu_entry()
/kernel/linux/linux-6.6/arch/x86/kernel/
H A Dasm-offsets.c106 OFFSET(TLB_STATE_user_pcid_flush_mask, tlb_state, user_pcid_flush_mask); in common()
H A Dhw_breakpoint.c308 sizeof(struct tlb_state))) in within_cpu_entry()
/kernel/linux/linux-5.10/arch/x86/mm/
H A Dinit.c1049 __visible DEFINE_PER_CPU_SHARED_ALIGNED(struct tlb_state, cpu_tlbstate) = {
/kernel/linux/linux-6.6/arch/x86/mm/
H A Dinit.c1060 __visible DEFINE_PER_CPU_ALIGNED(struct tlb_state, cpu_tlbstate) = {

Completed in 6 milliseconds