1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2013 ARM Ltd.
4 */
5#ifndef __ASM_WORD_AT_A_TIME_H
6#define __ASM_WORD_AT_A_TIME_H
7
8#include <linux/uaccess.h>
9
10#ifndef __AARCH64EB__
11
12#include <linux/kernel.h>
13
14struct word_at_a_time {
15	const unsigned long one_bits, high_bits;
16};
17
18#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
19
20static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
21				     const struct word_at_a_time *c)
22{
23	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
24	*bits = mask;
25	return mask;
26}
27
28#define prep_zero_mask(a, bits, c) (bits)
29
30static inline unsigned long create_zero_mask(unsigned long bits)
31{
32	bits = (bits - 1) & ~bits;
33	return bits >> 7;
34}
35
36static inline unsigned long find_zero(unsigned long mask)
37{
38	return fls64(mask) >> 3;
39}
40
41#define zero_bytemask(mask) (mask)
42
43#else	/* __AARCH64EB__ */
44#include <asm-generic/word-at-a-time.h>
45#endif
46
47/*
48 * Load an unaligned word from kernel space.
49 *
50 * In the (very unlikely) case of the word being a page-crosser
51 * and the next page not being mapped, take the exception and
52 * return zeroes in the non-existing part.
53 */
54static inline unsigned long load_unaligned_zeropad(const void *addr)
55{
56	unsigned long ret, tmp;
57
58	/* Load word from unaligned pointer addr */
59	asm(
60	"1:	ldr	%0, %3\n"
61	"2:\n"
62	"	.pushsection .fixup,\"ax\"\n"
63	"	.align 2\n"
64	"3:	bic	%1, %2, #0x7\n"
65	"	ldr	%0, [%1]\n"
66	"	and	%1, %2, #0x7\n"
67	"	lsl	%1, %1, #0x3\n"
68#ifndef __AARCH64EB__
69	"	lsr	%0, %0, %1\n"
70#else
71	"	lsl	%0, %0, %1\n"
72#endif
73	"	b	2b\n"
74	"	.popsection\n"
75	_ASM_EXTABLE(1b, 3b)
76	: "=&r" (ret), "=&r" (tmp)
77	: "r" (addr), "Q" (*(unsigned long *)addr));
78
79	return ret;
80}
81
82#endif /* __ASM_WORD_AT_A_TIME_H */
83