xref: /third_party/libbpf/include/linux/compiler.h (revision 7c2aad20)
1/* SPDX-License-Identifier: (LGPL-2.1 OR BSD-2-Clause) */
2
3#ifndef __LINUX_COMPILER_H
4#define __LINUX_COMPILER_H
5
6#define likely(x)		__builtin_expect(!!(x), 1)
7#define unlikely(x)		__builtin_expect(!!(x), 0)
8
9#define READ_ONCE(x)		(*(volatile typeof(x) *)&x)
10#define WRITE_ONCE(x, v)	(*(volatile typeof(x) *)&x) = (v)
11
12#define barrier()		asm volatile("" ::: "memory")
13
14#if defined(__x86_64__)
15
16# define smp_rmb()		barrier()
17# define smp_wmb()		barrier()
18# define smp_mb()		asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc")
19
20# define smp_store_release(p, v)		\
21do {						\
22	barrier();				\
23	WRITE_ONCE(*p, v);			\
24} while (0)
25
26# define smp_load_acquire(p)			\
27({						\
28	typeof(*p) ___p = READ_ONCE(*p);	\
29	barrier();				\
30	___p;					\
31})
32
33#elif defined(__aarch64__)
34
35# define smp_rmb()		asm volatile("dmb ishld" ::: "memory")
36# define smp_wmb()		asm volatile("dmb ishst" ::: "memory")
37# define smp_mb()		asm volatile("dmb ish" ::: "memory")
38
39#endif
40
41#ifndef smp_mb
42# define smp_mb()		__sync_synchronize()
43#endif
44
45#ifndef smp_rmb
46# define smp_rmb()		smp_mb()
47#endif
48
49#ifndef smp_wmb
50# define smp_wmb()		smp_mb()
51#endif
52
53#ifndef smp_store_release
54# define smp_store_release(p, v)		\
55do {						\
56	smp_mb();				\
57	WRITE_ONCE(*p, v);			\
58} while (0)
59#endif
60
61#ifndef smp_load_acquire
62# define smp_load_acquire(p)			\
63({						\
64	typeof(*p) ___p = READ_ONCE(*p);	\
65	smp_mb();				\
66	___p;					\
67})
68#endif
69
70#endif /* __LINUX_COMPILER_H */
71