1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ARCH_H8300_CMPXCHG__
3#define __ARCH_H8300_CMPXCHG__
4
5#include <linux/irqflags.h>
6
7#define xchg(ptr, x) \
8	((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
9				    sizeof(*(ptr))))
10
11struct __xchg_dummy { unsigned long a[100]; };
12#define __xg(x) ((volatile struct __xchg_dummy *)(x))
13
14static inline unsigned long __xchg(unsigned long x,
15				   volatile void *ptr, int size)
16{
17	unsigned long tmp, flags;
18
19	local_irq_save(flags);
20
21	switch (size) {
22	case 1:
23		__asm__ __volatile__
24			("mov.b %2,%0\n\t"
25			 "mov.b %1,%2"
26			 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
27		break;
28	case 2:
29		__asm__ __volatile__
30			("mov.w %2,%0\n\t"
31			 "mov.w %1,%2"
32			 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
33		break;
34	case 4:
35		__asm__ __volatile__
36			("mov.l %2,%0\n\t"
37			 "mov.l %1,%2"
38			 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
39		break;
40	default:
41		tmp = 0;
42	}
43	local_irq_restore(flags);
44	return tmp;
45}
46
47#include <asm-generic/cmpxchg-local.h>
48
49/*
50 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
51 * them available.
52 */
53#define cmpxchg_local(ptr, o, n)					 \
54	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr),		 \
55						     (unsigned long)(o), \
56						     (unsigned long)(n), \
57						     sizeof(*(ptr))))
58#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
59
60#ifndef CONFIG_SMP
61#include <asm-generic/cmpxchg.h>
62#endif
63
64#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
65
66#endif /* __ARCH_H8300_CMPXCHG__ */
67