1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */
3 
4 #include <linux/linkage.h>
5 #include <asm/copy_mc_test.h>
6 #include <asm/export.h>
7 #include <asm/asm.h>
8 
9 #ifndef CONFIG_UML
10 
11 #ifdef CONFIG_X86_MCE
12 COPY_MC_TEST_CTL
13 
14 /*
15  * copy_mc_fragile - copy memory with indication if an exception / fault happened
16  *
17  * The 'fragile' version is opted into by platform quirks and takes
18  * pains to avoid unrecoverable corner cases like 'fast-string'
19  * instruction sequences, and consuming poison across a cacheline
20  * boundary. The non-fragile version is equivalent to memcpy()
21  * regardless of CPU machine-check-recovery capability.
22  */
23 SYM_FUNC_START(copy_mc_fragile)
24 	cmpl $8, %edx
25 	/* Less than 8 bytes? Go to byte copy loop */
26 	jb .L_no_whole_words
27 
28 	/* Check for bad alignment of source */
29 	testl $7, %esi
30 	/* Already aligned */
31 	jz .L_8byte_aligned
32 
33 	/* Copy one byte at a time until source is 8-byte aligned */
34 	movl %esi, %ecx
35 	andl $7, %ecx
36 	subl $8, %ecx
37 	negl %ecx
38 	subl %ecx, %edx
39 .L_read_leading_bytes:
40 	movb (%rsi), %al
41 	COPY_MC_TEST_SRC %rsi 1 .E_leading_bytes
42 	COPY_MC_TEST_DST %rdi 1 .E_leading_bytes
43 .L_write_leading_bytes:
44 	movb %al, (%rdi)
45 	incq %rsi
46 	incq %rdi
47 	decl %ecx
48 	jnz .L_read_leading_bytes
49 
50 .L_8byte_aligned:
51 	movl %edx, %ecx
52 	andl $7, %edx
53 	shrl $3, %ecx
54 	jz .L_no_whole_words
55 
56 .L_read_words:
57 	movq (%rsi), %r8
58 	COPY_MC_TEST_SRC %rsi 8 .E_read_words
59 	COPY_MC_TEST_DST %rdi 8 .E_write_words
60 .L_write_words:
61 	movq %r8, (%rdi)
62 	addq $8, %rsi
63 	addq $8, %rdi
64 	decl %ecx
65 	jnz .L_read_words
66 
67 	/* Any trailing bytes? */
68 .L_no_whole_words:
69 	andl %edx, %edx
70 	jz .L_done_memcpy_trap
71 
72 	/* Copy trailing bytes */
73 	movl %edx, %ecx
74 .L_read_trailing_bytes:
75 	movb (%rsi), %al
76 	COPY_MC_TEST_SRC %rsi 1 .E_trailing_bytes
77 	COPY_MC_TEST_DST %rdi 1 .E_trailing_bytes
78 .L_write_trailing_bytes:
79 	movb %al, (%rdi)
80 	incq %rsi
81 	incq %rdi
82 	decl %ecx
83 	jnz .L_read_trailing_bytes
84 
85 	/* Copy successful. Return zero */
86 .L_done_memcpy_trap:
87 	xorl %eax, %eax
88 .L_done:
89 	RET
90 SYM_FUNC_END(copy_mc_fragile)
91 EXPORT_SYMBOL_GPL(copy_mc_fragile)
92 
93 	.section .fixup, "ax"
94 	/*
95 	 * Return number of bytes not copied for any failure. Note that
96 	 * there is no "tail" handling since the source buffer is 8-byte
97 	 * aligned and poison is cacheline aligned.
98 	 */
99 .E_read_words:
100 	shll	$3, %ecx
101 .E_leading_bytes:
102 	addl	%edx, %ecx
103 .E_trailing_bytes:
104 	mov	%ecx, %eax
105 	jmp	.L_done
106 
107 	/*
108 	 * For write fault handling, given the destination is unaligned,
109 	 * we handle faults on multi-byte writes with a byte-by-byte
110 	 * copy up to the write-protected page.
111 	 */
112 .E_write_words:
113 	shll	$3, %ecx
114 	addl	%edx, %ecx
115 	movl	%ecx, %edx
116 	jmp copy_mc_fragile_handle_tail
117 
118 	.previous
119 
120 	_ASM_EXTABLE_FAULT(.L_read_leading_bytes, .E_leading_bytes)
121 	_ASM_EXTABLE_FAULT(.L_read_words, .E_read_words)
122 	_ASM_EXTABLE_FAULT(.L_read_trailing_bytes, .E_trailing_bytes)
123 	_ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes)
124 	_ASM_EXTABLE(.L_write_words, .E_write_words)
125 	_ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes)
126 #endif /* CONFIG_X86_MCE */
127 
128 /*
129  * copy_mc_enhanced_fast_string - memory copy with exception handling
130  *
131  * Fast string copy + fault / exception handling. If the CPU does
132  * support machine check exception recovery, but does not support
133  * recovering from fast-string exceptions then this CPU needs to be
134  * added to the copy_mc_fragile_key set of quirks. Otherwise, absent any
135  * machine check recovery support this version should be no slower than
136  * standard memcpy.
137  */
138 SYM_FUNC_START(copy_mc_enhanced_fast_string)
139 	movq %rdi, %rax
140 	movq %rdx, %rcx
141 .L_copy:
142 	rep movsb
143 	/* Copy successful. Return zero */
144 	xorl %eax, %eax
145 	RET
146 SYM_FUNC_END(copy_mc_enhanced_fast_string)
147 
148 	.section .fixup, "ax"
149 .E_copy:
150 	/*
151 	 * On fault %rcx is updated such that the copy instruction could
152 	 * optionally be restarted at the fault position, i.e. it
153 	 * contains 'bytes remaining'. A non-zero return indicates error
154 	 * to copy_mc_generic() users, or indicate short transfers to
155 	 * user-copy routines.
156 	 */
157 	movq %rcx, %rax
158 	RET
159 
160 	.previous
161 
162 	_ASM_EXTABLE_FAULT(.L_copy, .E_copy)
163 #endif /* !CONFIG_UML */
164