Lines Matching refs:ptr

28 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
39 prefetchw((const void *)ptr);
51 : "r" (x), "r" (ptr)
61 : "r" (x), "r" (ptr)
72 : "r" (x), "r" (ptr)
81 ret = *(volatile unsigned char *)ptr;
82 *(volatile unsigned char *)ptr = x;
88 ret = *(volatile unsigned long *)ptr;
89 *(volatile unsigned long *)ptr = x;
97 : "r" (x), "r" (ptr)
104 : "r" (x), "r" (ptr)
110 __bad_xchg(ptr, size), ret = 0;
117 #define xchg_relaxed(ptr, x) ({ \
118 (__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
119 sizeof(*(ptr))); \
137 #define cmpxchg_local(ptr, o, n) ({ \
138 (__typeof(*ptr))__cmpxchg_local_generic((ptr), \
141 sizeof(*(ptr))); \
144 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
150 extern void __bad_cmpxchg(volatile void *ptr, int size);
156 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
161 prefetchw((const void *)ptr);
173 : "r" (ptr), "Ir" (old), "r" (new)
185 : "r" (ptr), "Ir" (old), "r" (new)
198 : "r" (ptr), "Ir" (old), "r" (new)
203 __bad_cmpxchg(ptr, size);
210 #define cmpxchg_relaxed(ptr,o,n) ({ \
211 (__typeof__(*(ptr)))__cmpxchg((ptr), \
214 sizeof(*(ptr))); \
217 static inline unsigned long __cmpxchg_local(volatile void *ptr,
227 ret = __cmpxchg_local_generic(ptr, old, new, size);
231 ret = __cmpxchg(ptr, old, new, size);
237 #define cmpxchg_local(ptr, o, n) ({ \
238 (__typeof(*ptr))__cmpxchg_local((ptr), \
241 sizeof(*(ptr))); \
244 static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
251 prefetchw(ptr);
262 : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
263 : "r" (ptr), "r" (old), "r" (new)
269 #define cmpxchg64_relaxed(ptr, o, n) ({ \
270 (__typeof__(*(ptr)))__cmpxchg64((ptr), \
275 #define cmpxchg64_local(ptr, o, n) cmpxchg64_relaxed((ptr), (o), (n))