/kernel/linux/linux-5.10/arch/parisc/include/asm/ |
H A D | cache.h | 43 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 46 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 47 ALTERNATIVE(ALT_COND_NO_SPLIT_TLB, INSN_NOP) \ 50 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 54 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 55 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) \ 58 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 59 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) :::"memory")
|
H A D | alternative.h | 36 #define ALTERNATIVE(cond, replacement) "!0:" \ macro 45 #define ALTERNATIVE(from, to, cond, replacement)\ macro
|
/kernel/linux/linux-6.6/arch/parisc/include/asm/ |
H A D | cache.h | 44 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 47 ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ 48 ALTERNATIVE(ALT_COND_NO_SPLIT_TLB, INSN_NOP) \ 52 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 53 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) \ 56 ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ 57 ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) :::"memory")
|
H A D | alternative.h | 36 #define ALTERNATIVE(cond, replacement) "!0:" \ macro 47 #define ALTERNATIVE(from, to, cond, replacement)\ macro
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | smap.h | 25 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP 28 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP 58 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) in smap_save() 69 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) in smap_restore() 77 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) 79 ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)
|
H A D | barrier.h | 15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ 17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ 19 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
|
H A D | alternative.h | 161 #define ALTERNATIVE(oldinstr, newinstr, feature) \ macro 212 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory") 229 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 248 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 253 asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \ 328 .macro ALTERNATIVE oldinstr, newinstr, feature 360 * Same as ALTERNATIVE macro above but for two alternatives. If CPU
|
H A D | arch_hweight.h | 19 asm (ALTERNATIVE("call __sw_hweight32", "popcntl %1, %0", X86_FEATURE_POPCNT) in __arch_hweight32() 47 asm (ALTERNATIVE("call __sw_hweight64", "popcntq %1, %0", X86_FEATURE_POPCNT) in __arch_hweight64()
|
/kernel/linux/linux-6.6/arch/x86/include/asm/ |
H A D | smap.h | 23 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP 26 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP 47 ALTERNATIVE("", "pushf; pop %0; " __ASM_CLAC "\n\t", in smap_save() 57 ALTERNATIVE("", "push %0; popf\n\t", in smap_restore() 64 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) 66 ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)
|
H A D | barrier.h | 15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ 17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ 19 #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
|
H A D | alternative.h | 211 #define ALTERNATIVE(oldinstr, newinstr, ft_flags) \ macro 263 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory") 280 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \ 299 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \ 304 asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \ 379 .macro ALTERNATIVE oldinstr, newinstr, ft_flags 413 * Same as ALTERNATIVE macro above but for two alternatives. If CPU
|
H A D | arch_hweight.h | 19 asm (ALTERNATIVE("call __sw_hweight32", "popcntl %1, %0", X86_FEATURE_POPCNT) in __arch_hweight32() 47 asm (ALTERNATIVE("call __sw_hweight64", "popcntq %1, %0", X86_FEATURE_POPCNT) in __arch_hweight64()
|
H A D | uaccess_64.h | 26 asm (ALTERNATIVE("", in __untagged_addr() 114 ALTERNATIVE("rep movsb", in copy_user_generic() 176 ALTERNATIVE("rep stosb", in __clear_user()
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
H A D | irqflags.h | 37 asm volatile(ALTERNATIVE( in arch_local_irq_enable() 56 asm volatile(ALTERNATIVE( in arch_local_irq_disable() 72 asm volatile(ALTERNATIVE( in arch_local_save_flags() 87 asm volatile(ALTERNATIVE( in arch_irqs_disabled_flags() 124 asm volatile(ALTERNATIVE( in arch_local_irq_restore()
|
H A D | uaccess.h | 47 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(1), ARM64_HAS_UAO)); in set_fs() 49 asm(ALTERNATIVE("nop", SET_PSTATE_UAO(0), ARM64_HAS_UAO, in set_fs() 179 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, in __uaccess_disable_hw_pan() 185 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, in __uaccess_enable_hw_pan() 192 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), alt, \ 199 asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), alt, \ 258 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \ 323 "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \
|
H A D | io.h | 52 asm volatile(ALTERNATIVE("ldrb %w0, [%1]", in __raw_readb() 64 asm volatile(ALTERNATIVE("ldrh %w0, [%1]", in __raw_readw() 75 asm volatile(ALTERNATIVE("ldr %w0, [%1]", in __raw_readl() 86 asm volatile(ALTERNATIVE("ldr %0, [%1]", in __raw_readq()
|
/kernel/linux/linux-5.10/arch/x86/entry/ |
H A D | calling.h | 188 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID 194 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 205 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 208 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID variable 245 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI 263 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 265 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID variable 327 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 356 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 384 ALTERNATIVE "", "lfenc [all...] |
/kernel/linux/linux-6.6/arch/x86/entry/ |
H A D | calling.h | 162 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID 168 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 179 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 182 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID variable 219 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI 237 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI 239 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID variable 301 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 330 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS 358 ALTERNATIVE "", "lfenc [all...] |
H A D | entry_64.S | 129 ALTERNATIVE "", "jmp swapgs_restore_regs_and_return_to_usermode", \ 150 ALTERNATIVE "shl $(64 - 48), %rcx; sar $(64 - 48), %rcx", \ 353 ALTERNATIVE "call error_entry; movq %rax, %rsp", \ 633 ALTERNATIVE "", "jmp xenpv_restore_regs_and_return_to_usermode", X86_FEATURE_XENPV 803 2: ALTERNATIVE "", "mfence", X86_BUG_SWAPGS_FENCE 816 ALTERNATIVE "", "ZAP_GS", X86_BUG_NULL_SEG 950 ALTERNATIVE "jmp .Lparanoid_entry_checkgs", "", X86_FEATURE_FSGSBASE 1036 ALTERNATIVE "jmp .Lparanoid_exit_checkgs", "", X86_FEATURE_FSGSBASE 1475 ALTERNATIVE "jmp nmi_no_fsgsbase", "", X86_FEATURE_FSGSBASE
|
/kernel/linux/linux-6.6/arch/riscv/include/asm/ |
H A D | errata_list.h | 35 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault), \ 41 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault), \ 48 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID, \ 77 asm volatile(ALTERNATIVE( \ 154 asm volatile(ALTERNATIVE( \
|
/kernel/linux/linux-6.6/arch/s390/include/asm/ |
H A D | alternative.h | 63 #define ALTERNATIVE(oldinstr, altinstr, facility) \ macro 96 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory") 104 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ 109 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) \
|
/kernel/linux/linux-6.6/arch/arm64/include/asm/ |
H A D | arch_timer.h | 69 asm volatile(ALTERNATIVE("isb\n mrs %0, cntpct_el0", in arch_timer_read_cntpct_el0() 81 asm volatile(ALTERNATIVE("isb\n mrs %0, cntvct_el0", in arch_timer_read_cntvct_el0() 183 asm volatile(ALTERNATIVE("isb\n mrs %0, cntpct_el0", in __arch_counter_get_cntpct() 204 asm volatile(ALTERNATIVE("isb\n mrs %0, cntvct_el0", in __arch_counter_get_cntvct()
|
H A D | io.h | 52 asm volatile(ALTERNATIVE("ldrb %w0, [%1]", in __raw_readb() 64 asm volatile(ALTERNATIVE("ldrh %w0, [%1]", in __raw_readw() 75 asm volatile(ALTERNATIVE("ldr %w0, [%1]", in __raw_readl() 86 asm volatile(ALTERNATIVE("ldr %0, [%1]", in __raw_readq()
|
/kernel/linux/linux-6.6/arch/s390/kernel/ |
H A D | entry.S | 35 ALTERNATIVE "nop", ".insn s,0xb2010000,\address", 193 39 ALTERNATIVE "nop", ".insn s,0xb2000000,\address", 193 43 ALTERNATIVE "b \lpswe; nopr", ".insn siy,0xeb0000000071,\address,0", 193 47 ALTERNATIVE "brcl 0,0", __stringify(mvc __PT_LAST_BREAK(8,\reg),__LC_LAST_BREAK), 193 103 ALTERNATIVE "nop", ".insn rrf,0xb2e80000,0,0,12,0", 82 107 ALTERNATIVE "nop", ".insn rrf,0xb2e80000,0,0,13,0", 82 111 ALTERNATIVE "TSTMSK \tif_ptr,\tif_mask; jz .+8; .insn rrf,0xb2e80000,0,0,13,0", \ 117 ALTERNATIVE "jz .+8; .insn rrf,0xb2e80000,0,0,12,0", \ 194 ALTERNATIVE "nop", "lpp _LPP_OFFSET", 40 559 0: ALTERNATIVE "no [all...] |
/kernel/linux/linux-5.10/arch/parisc/kernel/ |
H A D | pacache.S | 106 ALTERNATIVE(88b, fitdone, ALT_COND_NO_SPLIT_TLB, INSN_NOP) 244 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) 306 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) 570 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 571 ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB) 709 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 785 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 817 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) 844 ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB) 876 89: ALTERNATIVE(8 [all...] |