1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7  * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8  * Copyright (C) 2007 by Maciej W. Rozycki
9  * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10  */
11 #include <linux/export.h>
12 #include <asm/asm.h>
13 #include <asm/asm-offsets.h>
14 #include <asm/regdef.h>
15 
16 #if LONGSIZE == 4
17 #define LONG_S_L swl
18 #define LONG_S_R swr
19 #else
20 #define LONG_S_L sdl
21 #define LONG_S_R sdr
22 #endif
23 
24 #ifdef CONFIG_CPU_MICROMIPS
25 #define STORSIZE (LONGSIZE * 2)
26 #define STORMASK (STORSIZE - 1)
27 #define FILL64RG t8
28 #define FILLPTRG t7
29 #undef  LONG_S
30 #define LONG_S LONG_SP
31 #else
32 #define STORSIZE LONGSIZE
33 #define STORMASK LONGMASK
34 #define FILL64RG a1
35 #define FILLPTRG t0
36 #endif
37 
38 #define LEGACY_MODE 1
39 #define EVA_MODE    2
40 
41 /*
42  * No need to protect it with EVA #ifdefery. The generated block of code
43  * will never be assembled if EVA is not enabled.
44  */
45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
47 
48 #define EX(insn,reg,addr,handler)			\
49 	.if \mode == LEGACY_MODE;			\
50 9:		insn	reg, addr;			\
51 	.else;						\
52 9:		___BUILD_EVA_INSN(insn, reg, addr);	\
53 	.endif;						\
54 	.section __ex_table,"a";			\
55 	PTR_WD	9b, handler;				\
56 	.previous
57 
58 	.macro	f_fill64 dst, offset, val, fixup, mode
59 	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
60 	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
61 	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
62 	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
65 	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
66 	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
67 	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
68 #endif
69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
71 	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
72 	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
78 #endif
79 	.endm
80 
81 	.align	5
82 
83 	/*
84 	 * Macro to generate the __bzero{,_user} symbol
85 	 * Arguments:
86 	 * mode: LEGACY_MODE or EVA_MODE
87 	 */
88 	.macro __BUILD_BZERO mode
89 	/* Initialize __memset if this is the first time we call this macro */
90 	.ifnotdef __memset
91 	.set __memset, 1
92 	.hidden __memset /* Make sure it does not leak */
93 	.endif
94 
95 	sltiu		t0, a2, STORSIZE	/* very small region? */
96 	.set		noreorder
97 	bnez		t0, .Lsmall_memset\@
98 	 andi		t0, a0, STORMASK	/* aligned? */
99 	.set		reorder
100 
101 #ifdef CONFIG_CPU_MICROMIPS
102 	move		t8, a1			/* used by 'swp' instruction */
103 	move		t9, a1
104 #endif
105 	.set		noreorder
106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
107 	beqz		t0, 1f
108 	 PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
109 #else
110 	.set		noat
111 	li		AT, STORSIZE
112 	beqz		t0, 1f
113 	 PTR_SUBU	t0, AT			/* alignment in bytes */
114 	.set		at
115 #endif
116 	.set		reorder
117 
118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
119 	R10KCBARRIER(0(ra))
120 #ifdef __MIPSEB__
121 	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
122 #else
123 	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
124 #endif
125 	PTR_SUBU	a0, t0			/* long align ptr */
126 	PTR_ADDU	a2, t0			/* correct size */
127 
128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
129 #define STORE_BYTE(N)				\
130 	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
131 	.set		noreorder;		\
132 	beqz		t0, 0f;			\
133 	 PTR_ADDU	t0, 1;			\
134 	.set		reorder;
135 
136 	PTR_ADDU	a2, t0			/* correct size */
137 	PTR_ADDU	t0, 1
138 	STORE_BYTE(0)
139 	STORE_BYTE(1)
140 #if LONGSIZE == 4
141 	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
142 #else
143 	STORE_BYTE(2)
144 	STORE_BYTE(3)
145 	STORE_BYTE(4)
146 	STORE_BYTE(5)
147 	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
148 #endif
149 0:
150 	ori		a0, STORMASK
151 	xori		a0, STORMASK
152 	PTR_ADDIU	a0, STORSIZE
153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
154 1:	ori		t1, a2, 0x3f		/* # of full blocks */
155 	xori		t1, 0x3f
156 	andi		t0, a2, 0x40-STORSIZE
157 	beqz		t1, .Lmemset_partial\@	/* no block to fill */
158 
159 	PTR_ADDU	t1, a0			/* end address */
160 1:	PTR_ADDIU	a0, 64
161 	R10KCBARRIER(0(ra))
162 	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
163 	bne		t1, a0, 1b
164 
165 .Lmemset_partial\@:
166 	R10KCBARRIER(0(ra))
167 	PTR_LA		t1, 2f			/* where to start */
168 #ifdef CONFIG_CPU_MICROMIPS
169 	LONG_SRL	t7, t0, 1
170 #endif
171 #if LONGSIZE == 4
172 	PTR_SUBU	t1, FILLPTRG
173 #else
174 	.set		noat
175 	LONG_SRL	AT, FILLPTRG, 1
176 	PTR_SUBU	t1, AT
177 	.set		at
178 #endif
179 	PTR_ADDU	a0, t0			/* dest ptr */
180 	jr		t1
181 
182 	/* ... but first do longs ... */
183 	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
184 2:	andi		a2, STORMASK		/* At most one long to go */
185 
186 	.set		noreorder
187 	beqz		a2, 1f
188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
189 	 PTR_ADDU	a0, a2			/* What's left */
190 	.set		reorder
191 	R10KCBARRIER(0(ra))
192 #ifdef __MIPSEB__
193 	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
194 #else
195 	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
196 #endif
197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
198 	 PTR_SUBU	t0, $0, a2
199 	.set		reorder
200 	move		a2, zero		/* No remaining longs */
201 	PTR_ADDIU	t0, 1
202 	STORE_BYTE(0)
203 	STORE_BYTE(1)
204 #if LONGSIZE == 4
205 	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
206 #else
207 	STORE_BYTE(2)
208 	STORE_BYTE(3)
209 	STORE_BYTE(4)
210 	STORE_BYTE(5)
211 	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
212 #endif
213 0:
214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
215 1:	move		a2, zero
216 	jr		ra
217 
218 .Lsmall_memset\@:
219 	PTR_ADDU	t1, a0, a2
220 	beqz		a2, 2f
221 
222 1:	PTR_ADDIU	a0, 1			/* fill bytewise */
223 	R10KCBARRIER(0(ra))
224 	.set		noreorder
225 	bne		t1, a0, 1b
226 	 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
227 	.set		reorder
228 
229 2:	move		a2, zero
230 	jr		ra			/* done */
231 	.if __memset == 1
232 	END(memset)
233 	.set __memset, 0
234 	.hidden __memset
235 	.endif
236 
237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR
238 .Lbyte_fixup\@:
239 	/*
240 	 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
241 	 *      a2     =             a2                -              t0                   + 1
242 	 */
243 	PTR_SUBU	a2, t0
244 	PTR_ADDIU	a2, 1
245 	jr		ra
246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
247 
248 .Lfirst_fixup\@:
249 	/* unset_bytes already in a2 */
250 	jr	ra
251 
252 .Lfwd_fixup\@:
253 	/*
254 	 * unset_bytes = partial_start_addr +  #bytes   -     fault_addr
255 	 *      a2     =         t1         + (a2 & 3f) - $28->task->BUADDR
256 	 */
257 	PTR_L		t0, TI_TASK($28)
258 	andi		a2, 0x3f
259 	LONG_L		t0, THREAD_BUADDR(t0)
260 	LONG_ADDU	a2, t1
261 	LONG_SUBU	a2, t0
262 	jr		ra
263 
264 .Lpartial_fixup\@:
265 	/*
266 	 * unset_bytes = partial_end_addr +      #bytes     -     fault_addr
267 	 *      a2     =       a0         + (a2 & STORMASK) - $28->task->BUADDR
268 	 */
269 	PTR_L		t0, TI_TASK($28)
270 	andi		a2, STORMASK
271 	LONG_L		t0, THREAD_BUADDR(t0)
272 	LONG_ADDU	a2, a0
273 	LONG_SUBU	a2, t0
274 	jr		ra
275 
276 .Llast_fixup\@:
277 	/* unset_bytes already in a2 */
278 	jr		ra
279 
280 .Lsmall_fixup\@:
281 	/*
282 	 * unset_bytes = end_addr - current_addr + 1
283 	 *      a2     =    t1    -      a0      + 1
284 	 */
285 	PTR_SUBU	a2, t1, a0
286 	PTR_ADDIU	a2, 1
287 	jr		ra
288 
289 	.endm
290 
291 /*
292  * memset(void *s, int c, size_t n)
293  *
294  * a0: start of area to clear
295  * a1: char to fill with
296  * a2: size of area to clear
297  */
298 
299 LEAF(memset)
300 EXPORT_SYMBOL(memset)
301 	move		v0, a0			/* result */
302 	beqz		a1, 1f
303 
304 	andi		a1, 0xff		/* spread fillword */
305 	LONG_SLL		t1, a1, 8
306 	or		a1, t1
307 	LONG_SLL		t1, a1, 16
308 #if LONGSIZE == 8
309 	or		a1, t1
310 	LONG_SLL		t1, a1, 32
311 #endif
312 	or		a1, t1
313 1:
314 #ifndef CONFIG_EVA
315 FEXPORT(__bzero)
316 EXPORT_SYMBOL(__bzero)
317 #endif
318 	__BUILD_BZERO LEGACY_MODE
319 
320 #ifdef CONFIG_EVA
321 LEAF(__bzero)
322 EXPORT_SYMBOL(__bzero)
323 	__BUILD_BZERO EVA_MODE
324 END(__bzero)
325 #endif
326