/kernel/linux/linux-5.10/arch/arm64/include/asm/vdso/ |
H A D | compat_barrier.h | 17 #ifdef dmb 18 #undef dmb macro 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 24 #define aarch32_smp_mb() dmb(ish) 25 #define aarch32_smp_rmb() dmb(ishld) 26 #define aarch32_smp_wmb() dmb(ishst) 28 #define aarch32_smp_mb() dmb(ish) 30 #define aarch32_smp_wmb() dmb(ishst)
|
/kernel/linux/linux-5.10/drivers/s390/net/ |
H A D | ism_drv.c | 218 static void ism_free_dmb(struct ism_dev *ism, struct smcd_dmb *dmb) in ism_free_dmb() argument 220 clear_bit(dmb->sba_idx, ism->sba_bitmap); in ism_free_dmb() 221 dma_free_coherent(&ism->pdev->dev, dmb->dmb_len, in ism_free_dmb() 222 dmb->cpu_addr, dmb->dma_addr); in ism_free_dmb() 225 static int ism_alloc_dmb(struct ism_dev *ism, struct smcd_dmb *dmb) in ism_alloc_dmb() argument 229 if (PAGE_ALIGN(dmb->dmb_len) > dma_get_max_seg_size(&ism->pdev->dev)) in ism_alloc_dmb() 232 if (!dmb->sba_idx) { in ism_alloc_dmb() 238 dmb->sba_idx = bit; in ism_alloc_dmb() 240 if (dmb in ism_alloc_dmb() 253 ism_register_dmb(struct smcd_dev *smcd, struct smcd_dmb *dmb) ism_register_dmb() argument 284 ism_unregister_dmb(struct smcd_dev *smcd, struct smcd_dmb *dmb) ism_unregister_dmb() argument [all...] |
H A D | ism.h | 116 u64 dmb; member 207 #define ISM_CREATE_REQ(dmb, idx, sf, offset) \ 208 ((dmb) | (idx) << 24 | (sf) << 23 | (offset))
|
/kernel/linux/linux-5.10/arch/arm/include/asm/ |
H A D | barrier.h | 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 33 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ macro 40 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 45 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 67 #define dma_rmb() dmb(osh) 68 #define dma_wmb() dmb(oshst) 77 #define __smp_mb() dmb(ish) 79 #define __smp_wmb() dmb(ishst)
|
/kernel/linux/linux-6.6/arch/arm/include/asm/ |
H A D | barrier.h | 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 33 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ macro 40 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 45 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 67 #define dma_rmb() dmb(osh) 68 #define dma_wmb() dmb(oshst) 77 #define __smp_mb() dmb(ish) 79 #define __smp_wmb() dmb(ishst)
|
/kernel/linux/linux-6.6/arch/arm64/include/asm/vdso/ |
H A D | compat_barrier.h | 17 #ifdef dmb 18 #undef dmb macro 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 23 #define aarch32_smp_mb() dmb(ish) 24 #define aarch32_smp_rmb() dmb(ishld) 25 #define aarch32_smp_wmb() dmb(ishst)
|
/kernel/linux/linux-6.6/drivers/s390/net/ |
H A D | ism_drv.c | 104 WARN(1, "%s: attempt to unregister '%s' with registered dmb(s)\n", in ism_unregister_client() 291 static void ism_free_dmb(struct ism_dev *ism, struct ism_dmb *dmb) in ism_free_dmb() argument 293 clear_bit(dmb->sba_idx, ism->sba_bitmap); in ism_free_dmb() 294 dma_free_coherent(&ism->pdev->dev, dmb->dmb_len, in ism_free_dmb() 295 dmb->cpu_addr, dmb->dma_addr); in ism_free_dmb() 298 static int ism_alloc_dmb(struct ism_dev *ism, struct ism_dmb *dmb) in ism_alloc_dmb() argument 302 if (PAGE_ALIGN(dmb->dmb_len) > dma_get_max_seg_size(&ism->pdev->dev)) in ism_alloc_dmb() 305 if (!dmb->sba_idx) { in ism_alloc_dmb() 311 dmb in ism_alloc_dmb() 327 ism_register_dmb(struct ism_dev *ism, struct ism_dmb *dmb, struct ism_client *client) ism_register_dmb() argument 363 ism_unregister_dmb(struct ism_dev *ism, struct ism_dmb *dmb) ism_unregister_dmb() argument 752 smcd_register_dmb(struct smcd_dev *smcd, struct smcd_dmb *dmb, struct ism_client *client) smcd_register_dmb() argument 758 smcd_unregister_dmb(struct smcd_dev *smcd, struct smcd_dmb *dmb) smcd_unregister_dmb() argument [all...] |
H A D | ism.h | 116 u64 dmb; member 192 #define ISM_CREATE_REQ(dmb, idx, sf, offset) \ 193 ((dmb) | (idx) << 24 | (sf) << 23 | (offset))
|
/kernel/linux/linux-5.10/net/smc/ |
H A D | smc_ism.c | 161 struct smcd_dmb dmb; in smc_ism_unregister_dmb() local 167 memset(&dmb, 0, sizeof(dmb)); in smc_ism_unregister_dmb() 168 dmb.dmb_tok = dmb_desc->token; in smc_ism_unregister_dmb() 169 dmb.sba_idx = dmb_desc->sba_idx; in smc_ism_unregister_dmb() 170 dmb.cpu_addr = dmb_desc->cpu_addr; in smc_ism_unregister_dmb() 171 dmb.dma_addr = dmb_desc->dma_addr; in smc_ism_unregister_dmb() 172 dmb.dmb_len = dmb_desc->len; in smc_ism_unregister_dmb() 173 rc = smcd->ops->unregister_dmb(smcd, &dmb); in smc_ism_unregister_dmb() 185 struct smcd_dmb dmb; in smc_ism_register_dmb() local [all...] |
/kernel/linux/linux-6.6/net/smc/ |
H A D | smc_ism.c | 179 struct smcd_dmb dmb; in smc_ism_unregister_dmb() local 185 memset(&dmb, 0, sizeof(dmb)); in smc_ism_unregister_dmb() 186 dmb.dmb_tok = dmb_desc->token; in smc_ism_unregister_dmb() 187 dmb.sba_idx = dmb_desc->sba_idx; in smc_ism_unregister_dmb() 188 dmb.cpu_addr = dmb_desc->cpu_addr; in smc_ism_unregister_dmb() 189 dmb.dma_addr = dmb_desc->dma_addr; in smc_ism_unregister_dmb() 190 dmb.dmb_len = dmb_desc->len; in smc_ism_unregister_dmb() 191 rc = smcd->ops->unregister_dmb(smcd, &dmb); in smc_ism_unregister_dmb() 204 struct smcd_dmb dmb; in smc_ism_register_dmb() local [all...] |
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
H A D | barrier.h | 22 #define dmb(opt) asm volatile("dmb " #opt : : : "memory") macro 48 #define dma_mb() dmb(osh) 49 #define dma_rmb() dmb(oshld) 50 #define dma_wmb() dmb(oshst) 92 #define __smp_mb() dmb(ish) 93 #define __smp_rmb() dmb(ishld) 94 #define __smp_wmb() dmb(ishst)
|
H A D | atomic_ll_sc.h | 86 ATOMIC_OP_RETURN( , dmb ish, , l, "memory", __VA_ARGS__)\ 90 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ 101 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ 182 ATOMIC64_OP_RETURN(, dmb ish, , l, "memory", __VA_ARGS__) \ 186 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 197 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 230 " dmb ish\n" in __ll_sc_atomic64_dec_if_positive() 290 __CMPXCHG_CASE(w, b, mb_, 8, dmb ish, , l, "memory", K) 291 __CMPXCHG_CASE(w, h, mb_, 16, dmb ish, , l, "memory", K) 292 __CMPXCHG_CASE(w, , mb_, 32, dmb is [all...] |
H A D | cmpxchg.h | 18 * barrier case is generated as release+dmb for the former and 57 __XCHG_CASE(w, b, mb_, 8, dmb ish, nop, , a, l, "memory") 58 __XCHG_CASE(w, h, mb_, 16, dmb ish, nop, , a, l, "memory") 59 __XCHG_CASE(w, , mb_, 32, dmb ish, nop, , a, l, "memory") 60 __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
|
/kernel/linux/linux-6.6/arch/arm64/include/asm/ |
H A D | barrier.h | 28 #define dmb(opt) asm volatile("dmb " #opt : : : "memory") macro 60 #define __dma_mb() dmb(osh) 61 #define __dma_rmb() dmb(oshld) 62 #define __dma_wmb() dmb(oshst) 119 #define __smp_mb() dmb(ish) 120 #define __smp_rmb() dmb(ishld) 121 #define __smp_wmb() dmb(ishst)
|
H A D | atomic_ll_sc.h | 86 ATOMIC_OP_RETURN( , dmb ish, , l, "memory", __VA_ARGS__)\ 90 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ 101 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ 182 ATOMIC64_OP_RETURN(, dmb ish, , l, "memory", __VA_ARGS__) \ 186 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 197 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 230 " dmb ish\n" in __ll_sc_atomic64_dec_if_positive() 290 __CMPXCHG_CASE(w, b, mb_, 8, dmb ish, , l, "memory", K) 291 __CMPXCHG_CASE(w, h, mb_, 16, dmb ish, , l, "memory", K) 292 __CMPXCHG_CASE(w, , mb_, 32, dmb is [all...] |
H A D | cmpxchg.h | 18 * barrier case is generated as release+dmb for the former and 57 __XCHG_CASE(w, b, mb_, 8, dmb ish, nop, , a, l, "memory") 58 __XCHG_CASE(w, h, mb_, 16, dmb ish, nop, , a, l, "memory") 59 __XCHG_CASE(w, , mb_, 32, dmb ish, nop, , a, l, "memory") 60 __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
|
/kernel/linux/linux-6.6/tools/virtio/asm/ |
H A D | barrier.h | 20 #define dmb(opt) asm volatile("dmb " #opt : : : "memory") macro 22 #define virt_rmb() dmb(ishld) 23 #define virt_wmb() dmb(ishst) 24 #define virt_store_mb(var, value) do { WRITE_ONCE(var, value); dmb(ish); } while (0)
|
/kernel/linux/linux-5.10/arch/arm/common/ |
H A D | vlock.S | 29 dmb 33 dmb 80 dmb 93 dmb
|
H A D | mcpm_head.S | 121 dmb 136 dmb 148 dmb 152 dmb 173 dmb 182 dmb 196 dmb
|
/kernel/linux/linux-6.6/arch/arm/common/ |
H A D | vlock.S | 31 dmb 35 dmb 82 dmb 95 dmb
|
H A D | mcpm_head.S | 123 dmb 138 dmb 150 dmb 154 dmb 175 dmb 184 dmb 198 dmb
|
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/include/aarch64/ |
H A D | processor.h | 142 #define dmb(opt) asm volatile("dmb " #opt : : : "memory") macro 144 #define dma_wmb() dmb(oshst) 147 #define dma_rmb() dmb(oshld)
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | ism.h | 84 int ism_register_dmb(struct ism_dev *dev, struct ism_dmb *dmb, 86 int ism_unregister_dmb(struct ism_dev *dev, struct ism_dmb *dmb);
|
/kernel/linux/linux-6.6/include/net/ |
H A D | smc.h | 58 int (*register_dmb)(struct smcd_dev *dev, struct smcd_dmb *dmb, 60 int (*unregister_dmb)(struct smcd_dev *dev, struct smcd_dmb *dmb);
|
/kernel/liteos_a/arch/arm/arm/src/ |
H A D | los_dispatch.S | 216 dmb 225 dmb 230 dmb
|