1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2013 ARM Ltd.
4 */
5#ifndef __ASM_WORD_AT_A_TIME_H
6#define __ASM_WORD_AT_A_TIME_H
7
8#include <linux/uaccess.h>
9
10#ifndef __AARCH64EB__
11
12#include <linux/kernel.h>
13
14struct word_at_a_time {
15	const unsigned long one_bits, high_bits;
16};
17
18#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
19
20static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
21				     const struct word_at_a_time *c)
22{
23	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
24	*bits = mask;
25	return mask;
26}
27
28#define prep_zero_mask(a, bits, c) (bits)
29
30static inline unsigned long create_zero_mask(unsigned long bits)
31{
32	bits = (bits - 1) & ~bits;
33	return bits >> 7;
34}
35
36static inline unsigned long find_zero(unsigned long mask)
37{
38	return fls64(mask) >> 3;
39}
40
41#define zero_bytemask(mask) (mask)
42
43#else	/* __AARCH64EB__ */
44#include <asm-generic/word-at-a-time.h>
45#endif
46
47/*
48 * Load an unaligned word from kernel space.
49 *
50 * In the (very unlikely) case of the word being a page-crosser
51 * and the next page not being mapped, take the exception and
52 * return zeroes in the non-existing part.
53 */
54static inline unsigned long load_unaligned_zeropad(const void *addr)
55{
56	unsigned long ret;
57
58	__mte_enable_tco_async();
59
60	/* Load word from unaligned pointer addr */
61	asm(
62	"1:	ldr	%0, %2\n"
63	"2:\n"
64	_ASM_EXTABLE_LOAD_UNALIGNED_ZEROPAD(1b, 2b, %0, %1)
65	: "=&r" (ret)
66	: "r" (addr), "Q" (*(unsigned long *)addr));
67
68	__mte_disable_tco_async();
69
70	return ret;
71}
72
73#endif /* __ASM_WORD_AT_A_TIME_H */
74