1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  *  linux/arch/arm/lib/findbit.S
4  *
5  *  Copyright (C) 1995-2000 Russell King
6  *
7  * 16th March 2001 - John Ripley <jripley@sonicblue.com>
8  *   Fixed so that "size" is an exclusive not an inclusive quantity.
9  *   All users of these functions expect exclusive sizes, and may
10  *   also call with zero size.
11  * Reworked by rmk.
12  */
13 #include <linux/linkage.h>
14 #include <asm/assembler.h>
15 #include <asm/unwind.h>
16                 .text
17 
18 #ifdef __ARMEB__
19 #define SWAB_ENDIAN le
20 #else
21 #define SWAB_ENDIAN be
22 #endif
23 
24 		.macro	find_first, endian, set, name
25 ENTRY(_find_first_\name\()bit_\endian)
26 	UNWIND(	.fnstart)
27 		teq	r1, #0
28 		beq	3f
29 		mov	r2, #0
30 1:		ldr	r3, [r0], #4
31 		.ifeq \set
32 		mvns	r3, r3			@ invert/test bits
33 		.else
34 		movs	r3, r3			@ test bits
35 		.endif
36 		.ifc \endian, SWAB_ENDIAN
37 		bne	.L_found_swab
38 		.else
39 		bne	.L_found		@ found the bit?
40 		.endif
41 		add	r2, r2, #32		@ next index
42 2:		cmp	r2, r1			@ any more?
43 		blo	1b
44 3:		mov	r0, r1			@ no more bits
45 		ret	lr
46 	UNWIND(	.fnend)
47 ENDPROC(_find_first_\name\()bit_\endian)
48 		.endm
49 
50 		.macro	find_next, endian, set, name
51 ENTRY(_find_next_\name\()bit_\endian)
52 	UNWIND(	.fnstart)
53 		cmp	r2, r1
54 		bhs	3b
55 		mov	ip, r2, lsr #5		@ word index
56 		add	r0, r0, ip, lsl #2
57 		ands	ip, r2, #31		@ bit position
58 		beq	1b
59 		ldr	r3, [r0], #4
60 		.ifeq \set
61 		mvn	r3, r3			@ invert bits
62 		.endif
63 		.ifc \endian, SWAB_ENDIAN
64 		rev_l	r3, ip
65 		.if	.Lrev_l_uses_tmp
66 		@ we need to recompute ip because rev_l will have overwritten
67 		@ it.
68 		and	ip, r2, #31		@ bit position
69 		.endif
70 		.endif
71 		movs	r3, r3, lsr ip		@ shift off unused bits
72 		bne	.L_found
73 		orr	r2, r2, #31		@ no zero bits
74 		add	r2, r2, #1		@ align bit pointer
75 		b	2b			@ loop for next bit
76 	UNWIND(	.fnend)
77 ENDPROC(_find_next_\name\()bit_\endian)
78 		.endm
79 
80 		.macro	find_bit, endian, set, name
81 		find_first \endian, \set, \name
82 		find_next  \endian, \set, \name
83 		.endm
84 
85 /* _find_first_zero_bit_le and _find_next_zero_bit_le */
86 		find_bit le, 0, zero_
87 
88 /* _find_first_bit_le and _find_next_bit_le */
89 		find_bit le, 1
90 
91 #ifdef __ARMEB__
92 
93 /* _find_first_zero_bit_be and _find_next_zero_bit_be */
94 		find_bit be, 0, zero_
95 
96 /* _find_first_bit_be and _find_next_bit_be */
97 		find_bit be, 1
98 
99 #endif
100 
101 /*
102  * One or more bits in the LSB of r3 are assumed to be set.
103  */
104 .L_found_swab:
105 	UNWIND(	.fnstart)
106 		rev_l	r3, ip
107 .L_found:
108 #if __LINUX_ARM_ARCH__ >= 7
109 		rbit	r3, r3			@ reverse bits
110 		clz	r3, r3			@ count high zero bits
111 		add	r0, r2, r3		@ add offset of first set bit
112 #elif __LINUX_ARM_ARCH__ >= 5
113 		rsb	r0, r3, #0
114 		and	r3, r3, r0		@ mask out lowest bit set
115 		clz	r3, r3			@ count high zero bits
116 		rsb	r3, r3, #31		@ offset of first set bit
117 		add	r0, r2, r3		@ add offset of first set bit
118 #else
119 		mov	ip, #~0
120 		tst	r3, ip, lsr #16		@ test bits 0-15
121 		addeq	r2, r2, #16
122 		moveq	r3, r3, lsr #16
123 		tst	r3, #0x00ff
124 		addeq	r2, r2, #8
125 		moveq	r3, r3, lsr #8
126 		tst	r3, #0x000f
127 		addeq	r2, r2, #4
128 		moveq	r3, r3, lsr #4
129 		tst	r3, #0x0003
130 		addeq	r2, r2, #2
131 		moveq	r3, r3, lsr #2
132 		tst	r3, #0x0001
133 		addeq	r2, r2, #1
134 		mov	r0, r2
135 #endif
136 		cmp	r1, r0			@ Clamp to maxbit
137 		movlo	r0, r1
138 		ret	lr
139 	UNWIND(	.fnend)
140