/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | sync_bitops.h | 85 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(bts), *addr, c, "Ir", nr); in sync_test_and_set_bit() 98 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btr), *addr, c, "Ir", nr); in sync_test_and_clear_bit() 111 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btc), *addr, c, "Ir", nr); in sync_test_and_change_bit()
|
H A D | bitops.h | 138 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr); in arch_test_and_set_bit() 162 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr); in arch_test_and_clear_bit() 201 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr); in arch_test_and_change_bit()
|
H A D | local.h | 56 return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i); in local_sub_and_test() 96 return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i); in local_add_negative()
|
H A D | atomic.h | 83 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); in arch_atomic_sub_and_test() 152 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); in arch_atomic_add_negative()
|
H A D | atomic64_64.h | 76 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); in arch_atomic64_sub_and_test() 147 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i); in arch_atomic64_add_negative()
|
H A D | qspinlock.h | 19 * We can't use GEN_BINARY_RMWcc() inside an if() stmt because asm goto in queued_fetch_set_pending_acquire() 23 val = GEN_BINARY_RMWcc(LOCK_PREFIX "btsl", lock->val.counter, c, in queued_fetch_set_pending_acquire()
|
H A D | rmwcc.h | 60 #define GEN_BINARY_RMWcc(X...) RMWcc_CONCAT(GEN_BINARY_RMWcc_, RMWcc_ARGS(X))(X) macro
|
/kernel/linux/linux-6.6/arch/x86/include/asm/ |
H A D | sync_bitops.h | 85 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(bts), *addr, c, "Ir", nr); in sync_test_and_set_bit() 98 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btr), *addr, c, "Ir", nr); in sync_test_and_clear_bit() 111 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btc), *addr, c, "Ir", nr); in sync_test_and_change_bit()
|
H A D | bitops.h | 138 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr); in arch_test_and_set_bit() 162 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr); in arch_test_and_clear_bit() 201 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr); in arch_test_and_change_bit()
|
H A D | local.h | 56 return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i); in local_sub_and_test() 96 return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i); in local_add_negative()
|
H A D | atomic.h | 47 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); in arch_atomic_sub_and_test() 79 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); in arch_atomic_add_negative()
|
H A D | atomic64_64.h | 39 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); in arch_atomic64_sub_and_test() 73 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i); in arch_atomic64_add_negative()
|
H A D | qspinlock.h | 19 * We can't use GEN_BINARY_RMWcc() inside an if() stmt because asm goto in queued_fetch_set_pending_acquire() 23 val = GEN_BINARY_RMWcc(LOCK_PREFIX "btsl", lock->val.counter, c, in queued_fetch_set_pending_acquire()
|
H A D | rmwcc.h | 55 #define GEN_BINARY_RMWcc(X...) CONCATENATE(GEN_BINARY_RMWcc_, COUNT_ARGS(X))(X) macro
|
/kernel/linux/linux-5.10/tools/arch/x86/include/asm/ |
H A D | rmwcc.h | 20 #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \ macro 37 #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \ macro
|
/kernel/linux/linux-6.6/tools/arch/x86/include/asm/ |
H A D | atomic.h | 76 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, "Ir", nr, "%0", "c"); in test_and_set_bit() 81 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, "Ir", nr, "%0", "c"); in test_and_clear_bit()
|
H A D | rmwcc.h | 18 #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \ macro
|