Lines Matching refs:ptr

29 __arch_xchg(unsigned long x, volatile void *ptr, int size)
40 prefetchw((const void *)ptr);
52 : "r" (x), "r" (ptr)
62 : "r" (x), "r" (ptr)
73 : "r" (x), "r" (ptr)
82 ret = *(volatile unsigned char *)ptr;
83 *(volatile unsigned char *)ptr = x;
89 ret = *(volatile unsigned long *)ptr;
90 *(volatile unsigned long *)ptr = x;
98 : "r" (x), "r" (ptr)
105 : "r" (x), "r" (ptr)
111 __bad_xchg(ptr, size), ret = 0;
118 #define arch_xchg_relaxed(ptr, x) ({ \
119 (__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr), \
120 sizeof(*(ptr))); \
138 #define arch_cmpxchg_local(ptr, o, n) ({ \
139 (__typeof(*ptr))__generic_cmpxchg_local((ptr), \
142 sizeof(*(ptr))); \
145 #define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
151 extern void __bad_cmpxchg(volatile void *ptr, int size);
157 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
162 prefetchw((const void *)ptr);
174 : "r" (ptr), "Ir" (old), "r" (new)
186 : "r" (ptr), "Ir" (old), "r" (new)
199 : "r" (ptr), "Ir" (old), "r" (new)
204 __bad_cmpxchg(ptr, size);
211 #define arch_cmpxchg_relaxed(ptr,o,n) ({ \
212 (__typeof__(*(ptr)))__cmpxchg((ptr), \
215 sizeof(*(ptr))); \
218 static inline unsigned long __cmpxchg_local(volatile void *ptr,
228 ret = __generic_cmpxchg_local(ptr, old, new, size);
232 ret = __cmpxchg(ptr, old, new, size);
238 #define arch_cmpxchg_local(ptr, o, n) ({ \
239 (__typeof(*ptr))__cmpxchg_local((ptr), \
242 sizeof(*(ptr))); \
245 static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
252 prefetchw(ptr);
263 : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
264 : "r" (ptr), "r" (old), "r" (new)
270 #define arch_cmpxchg64_relaxed(ptr, o, n) ({ \
271 (__typeof__(*(ptr)))__cmpxchg64((ptr), \
276 #define arch_cmpxchg64_local(ptr, o, n) arch_cmpxchg64_relaxed((ptr), (o), (n))