1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2012 ARM Ltd.
4 */
5#ifndef __ASM_FUTEX_H
6#define __ASM_FUTEX_H
7
8#include <linux/futex.h>
9#include <linux/uaccess.h>
10
11#include <asm/errno.h>
12
13#define FUTEX_MAX_LOOPS	128 /* What's the largest number you can think of? */
14
15#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)		\
16do {									\
17	unsigned int loops = FUTEX_MAX_LOOPS;				\
18									\
19	uaccess_enable();						\
20	asm volatile(							\
21"	prfm	pstl1strm, %2\n"					\
22"1:	ldxr	%w1, %2\n"						\
23	insn "\n"							\
24"2:	stlxr	%w0, %w3, %2\n"						\
25"	cbz	%w0, 3f\n"						\
26"	sub	%w4, %w4, %w0\n"					\
27"	cbnz	%w4, 1b\n"						\
28"	mov	%w0, %w7\n"						\
29"3:\n"									\
30"	dmb	ish\n"							\
31"	.pushsection .fixup,\"ax\"\n"					\
32"	.align	2\n"							\
33"4:	mov	%w0, %w6\n"						\
34"	b	3b\n"							\
35"	.popsection\n"							\
36	_ASM_EXTABLE(1b, 4b)						\
37	_ASM_EXTABLE(2b, 4b)						\
38	: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp),	\
39	  "+r" (loops)							\
40	: "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN)			\
41	: "memory");							\
42	uaccess_disable();						\
43} while (0)
44
45static inline int
46arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
47{
48	int oldval = 0, ret, tmp;
49	u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
50
51	if (!access_ok(_uaddr, sizeof(u32)))
52		return -EFAULT;
53
54	switch (op) {
55	case FUTEX_OP_SET:
56		__futex_atomic_op("mov	%w3, %w5",
57				  ret, oldval, uaddr, tmp, oparg);
58		break;
59	case FUTEX_OP_ADD:
60		__futex_atomic_op("add	%w3, %w1, %w5",
61				  ret, oldval, uaddr, tmp, oparg);
62		break;
63	case FUTEX_OP_OR:
64		__futex_atomic_op("orr	%w3, %w1, %w5",
65				  ret, oldval, uaddr, tmp, oparg);
66		break;
67	case FUTEX_OP_ANDN:
68		__futex_atomic_op("and	%w3, %w1, %w5",
69				  ret, oldval, uaddr, tmp, ~oparg);
70		break;
71	case FUTEX_OP_XOR:
72		__futex_atomic_op("eor	%w3, %w1, %w5",
73				  ret, oldval, uaddr, tmp, oparg);
74		break;
75	default:
76		ret = -ENOSYS;
77	}
78
79	if (!ret)
80		*oval = oldval;
81
82	return ret;
83}
84
85static inline int
86futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
87			      u32 oldval, u32 newval)
88{
89	int ret = 0;
90	unsigned int loops = FUTEX_MAX_LOOPS;
91	u32 val, tmp;
92	u32 __user *uaddr;
93
94	if (!access_ok(_uaddr, sizeof(u32)))
95		return -EFAULT;
96
97	uaddr = __uaccess_mask_ptr(_uaddr);
98	uaccess_enable();
99	asm volatile("// futex_atomic_cmpxchg_inatomic\n"
100"	prfm	pstl1strm, %2\n"
101"1:	ldxr	%w1, %2\n"
102"	sub	%w3, %w1, %w5\n"
103"	cbnz	%w3, 4f\n"
104"2:	stlxr	%w3, %w6, %2\n"
105"	cbz	%w3, 3f\n"
106"	sub	%w4, %w4, %w3\n"
107"	cbnz	%w4, 1b\n"
108"	mov	%w0, %w8\n"
109"3:\n"
110"	dmb	ish\n"
111"4:\n"
112"	.pushsection .fixup,\"ax\"\n"
113"5:	mov	%w0, %w7\n"
114"	b	4b\n"
115"	.popsection\n"
116	_ASM_EXTABLE(1b, 5b)
117	_ASM_EXTABLE(2b, 5b)
118	: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
119	: "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
120	: "memory");
121	uaccess_disable();
122
123	if (!ret)
124		*uval = val;
125
126	return ret;
127}
128
129#endif /* __ASM_FUTEX_H */
130