1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * blockops.S: Common block zero optimized routines.
4  *
5  * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
6  */
7 
8 #include <linux/linkage.h>
9 #include <asm/page.h>
10 #include <asm/export.h>
11 
12 	/* Zero out 64 bytes of memory at (buf + offset).
13 	 * Assumes %g1 contains zero.
14 	 */
15 #define BLAST_BLOCK(buf, offset) \
16 	std	%g0, [buf + offset + 0x38]; \
17 	std	%g0, [buf + offset + 0x30]; \
18 	std	%g0, [buf + offset + 0x28]; \
19 	std	%g0, [buf + offset + 0x20]; \
20 	std	%g0, [buf + offset + 0x18]; \
21 	std	%g0, [buf + offset + 0x10]; \
22 	std	%g0, [buf + offset + 0x08]; \
23 	std	%g0, [buf + offset + 0x00];
24 
25 	/* Copy 32 bytes of memory at (src + offset) to
26 	 * (dst + offset).
27 	 */
28 #define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
29 	ldd	[src + offset + 0x18], t0; \
30 	ldd	[src + offset + 0x10], t2; \
31 	ldd	[src + offset + 0x08], t4; \
32 	ldd	[src + offset + 0x00], t6; \
33 	std	t0, [dst + offset + 0x18]; \
34 	std	t2, [dst + offset + 0x10]; \
35 	std	t4, [dst + offset + 0x08]; \
36 	std	t6, [dst + offset + 0x00];
37 
38 	/* Profiling evidence indicates that memset() is
39 	 * commonly called for blocks of size PAGE_SIZE,
40 	 * and (2 * PAGE_SIZE) (for kernel stacks)
41 	 * and with a second arg of zero.  We assume in
42 	 * all of these cases that the buffer is aligned
43 	 * on at least an 8 byte boundary.
44 	 *
45 	 * Therefore we special case them to make them
46 	 * as fast as possible.
47 	 */
48 
49 	.text
50 ENTRY(bzero_1page)
51 /* NOTE: If you change the number of insns of this routine, please check
52  * arch/sparc/mm/hypersparc.S */
53 	/* %o0 = buf */
54 	or	%g0, %g0, %g1
55 	or	%o0, %g0, %o1
56 	or	%g0, (PAGE_SIZE >> 8), %g2
57 1:
58 	BLAST_BLOCK(%o0, 0x00)
59 	BLAST_BLOCK(%o0, 0x40)
60 	BLAST_BLOCK(%o0, 0x80)
61 	BLAST_BLOCK(%o0, 0xc0)
62 	subcc	%g2, 1, %g2
63 	bne	1b
64 	 add	%o0, 0x100, %o0
65 
66 	retl
67 	 nop
68 ENDPROC(bzero_1page)
69 EXPORT_SYMBOL(bzero_1page)
70 
71 ENTRY(__copy_1page)
72 /* NOTE: If you change the number of insns of this routine, please check
73  * arch/sparc/mm/hypersparc.S */
74 	/* %o0 = dst, %o1 = src */
75 	or	%g0, (PAGE_SIZE >> 8), %g1
76 1:
77 	MIRROR_BLOCK(%o0, %o1, 0x00, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
78 	MIRROR_BLOCK(%o0, %o1, 0x20, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
79 	MIRROR_BLOCK(%o0, %o1, 0x40, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
80 	MIRROR_BLOCK(%o0, %o1, 0x60, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
81 	MIRROR_BLOCK(%o0, %o1, 0x80, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
82 	MIRROR_BLOCK(%o0, %o1, 0xa0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
83 	MIRROR_BLOCK(%o0, %o1, 0xc0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
84 	MIRROR_BLOCK(%o0, %o1, 0xe0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
85 	subcc	%g1, 1, %g1
86 	add	%o0, 0x100, %o0
87 	bne	1b
88 	 add	%o1, 0x100, %o1
89 
90 	retl
91 	 nop
92 ENDPROC(__copy_1page)
93 EXPORT_SYMBOL(__copy_1page)
94