1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Cache maintenance
4  *
5  * Copyright (C) 2001 Deep Blue Solutions Ltd.
6  * Copyright (C) 2012 ARM Ltd.
7  */
8 
9 #include <linux/errno.h>
10 #include <linux/linkage.h>
11 #include <linux/init.h>
12 #include <asm/assembler.h>
13 #include <asm/cpufeature.h>
14 #include <asm/alternative.h>
15 #include <asm/asm-uaccess.h>
16 
17 /*
18  *	flush_icache_range(start,end)
19  *
20  *	Ensure that the I and D caches are coherent within specified region.
21  *	This is typically used when code has been written to a memory region,
22  *	and will be executed.
23  *
24  *	- start   - virtual start address of region
25  *	- end     - virtual end address of region
26  */
27 SYM_FUNC_START(__flush_icache_range)
28 	/* FALLTHROUGH */
29 
30 /*
31  *	__flush_cache_user_range(start,end)
32  *
33  *	Ensure that the I and D caches are coherent within specified region.
34  *	This is typically used when code has been written to a memory region,
35  *	and will be executed.
36  *
37  *	- start   - virtual start address of region
38  *	- end     - virtual end address of region
39  */
40 SYM_FUNC_START(__flush_cache_user_range)
41 	uaccess_ttbr0_enable x2, x3, x4
42 alternative_if ARM64_HAS_CACHE_IDC
43 	dsb	ishst
44 	b	7f
45 alternative_else_nop_endif
46 	dcache_line_size x2, x3
47 	sub	x3, x2, #1
48 	bic	x4, x0, x3
49 1:
50 user_alt 9f, "dc cvau, x4",  "dc civac, x4",  ARM64_WORKAROUND_CLEAN_CACHE
51 	add	x4, x4, x2
52 	cmp	x4, x1
53 	b.lo	1b
54 	dsb	ish
55 
56 7:
57 alternative_if ARM64_HAS_CACHE_DIC
58 	isb
59 	b	8f
60 alternative_else_nop_endif
61 	invalidate_icache_by_line x0, x1, x2, x3, 9f
62 8:	mov	x0, #0
63 1:
64 	uaccess_ttbr0_disable x1, x2
65 	ret
66 9:
67 	mov	x0, #-EFAULT
68 	b	1b
69 SYM_FUNC_END(__flush_icache_range)
70 SYM_FUNC_END(__flush_cache_user_range)
71 
72 /*
73  *	invalidate_icache_range(start,end)
74  *
75  *	Ensure that the I cache is invalid within specified region.
76  *
77  *	- start   - virtual start address of region
78  *	- end     - virtual end address of region
79  */
80 SYM_FUNC_START(invalidate_icache_range)
81 alternative_if ARM64_HAS_CACHE_DIC
82 	mov	x0, xzr
83 	isb
84 	ret
85 alternative_else_nop_endif
86 
87 	uaccess_ttbr0_enable x2, x3, x4
88 
89 	invalidate_icache_by_line x0, x1, x2, x3, 2f
90 	mov	x0, xzr
91 1:
92 	uaccess_ttbr0_disable x1, x2
93 	ret
94 2:
95 	mov	x0, #-EFAULT
96 	b	1b
97 SYM_FUNC_END(invalidate_icache_range)
98 
99 /*
100  *	__flush_dcache_area(kaddr, size)
101  *
102  *	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
103  *	are cleaned and invalidated to the PoC.
104  *
105  *	- kaddr   - kernel address
106  *	- size    - size in question
107  */
108 SYM_FUNC_START_PI(__flush_dcache_area)
109 	dcache_by_line_op civac, sy, x0, x1, x2, x3
110 	ret
111 SYM_FUNC_END_PI(__flush_dcache_area)
112 
113 /*
114  *	__clean_dcache_area_pou(kaddr, size)
115  *
116  * 	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
117  * 	are cleaned to the PoU.
118  *
119  *	- kaddr   - kernel address
120  *	- size    - size in question
121  */
122 SYM_FUNC_START(__clean_dcache_area_pou)
123 alternative_if ARM64_HAS_CACHE_IDC
124 	dsb	ishst
125 	ret
126 alternative_else_nop_endif
127 	dcache_by_line_op cvau, ish, x0, x1, x2, x3
128 	ret
129 SYM_FUNC_END(__clean_dcache_area_pou)
130 
131 /*
132  *	__inval_dcache_area(kaddr, size)
133  *
134  * 	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
135  * 	are invalidated. Any partial lines at the ends of the interval are
136  *	also cleaned to PoC to prevent data loss.
137  *
138  *	- kaddr   - kernel address
139  *	- size    - size in question
140  */
141 SYM_FUNC_START_LOCAL(__dma_inv_area)
142 SYM_FUNC_START_PI(__inval_dcache_area)
143 	/* FALLTHROUGH */
144 
145 /*
146  *	__dma_inv_area(start, size)
147  *	- start   - virtual start address of region
148  *	- size    - size in question
149  */
150 	add	x1, x1, x0
151 	dcache_line_size x2, x3
152 	sub	x3, x2, #1
153 	tst	x1, x3				// end cache line aligned?
154 	bic	x1, x1, x3
155 	b.eq	1f
156 	dc	civac, x1			// clean & invalidate D / U line
157 1:	tst	x0, x3				// start cache line aligned?
158 	bic	x0, x0, x3
159 	b.eq	2f
160 	dc	civac, x0			// clean & invalidate D / U line
161 	b	3f
162 2:	dc	ivac, x0			// invalidate D / U line
163 3:	add	x0, x0, x2
164 	cmp	x0, x1
165 	b.lo	2b
166 	dsb	sy
167 	ret
168 SYM_FUNC_END_PI(__inval_dcache_area)
169 SYM_FUNC_END(__dma_inv_area)
170 
171 /*
172  *	__clean_dcache_area_poc(kaddr, size)
173  *
174  * 	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
175  * 	are cleaned to the PoC.
176  *
177  *	- kaddr   - kernel address
178  *	- size    - size in question
179  */
180 SYM_FUNC_START_LOCAL(__dma_clean_area)
181 SYM_FUNC_START_PI(__clean_dcache_area_poc)
182 	/* FALLTHROUGH */
183 
184 /*
185  *	__dma_clean_area(start, size)
186  *	- start   - virtual start address of region
187  *	- size    - size in question
188  */
189 	dcache_by_line_op cvac, sy, x0, x1, x2, x3
190 	ret
191 SYM_FUNC_END_PI(__clean_dcache_area_poc)
192 SYM_FUNC_END(__dma_clean_area)
193 
194 /*
195  *	__clean_dcache_area_pop(kaddr, size)
196  *
197  * 	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
198  * 	are cleaned to the PoP.
199  *
200  *	- kaddr   - kernel address
201  *	- size    - size in question
202  */
203 SYM_FUNC_START_PI(__clean_dcache_area_pop)
204 	alternative_if_not ARM64_HAS_DCPOP
205 	b	__clean_dcache_area_poc
206 	alternative_else_nop_endif
207 	dcache_by_line_op cvap, sy, x0, x1, x2, x3
208 	ret
209 SYM_FUNC_END_PI(__clean_dcache_area_pop)
210 
211 /*
212  *	__dma_flush_area(start, size)
213  *
214  *	clean & invalidate D / U line
215  *
216  *	- start   - virtual start address of region
217  *	- size    - size in question
218  */
219 SYM_FUNC_START_PI(__dma_flush_area)
220 	dcache_by_line_op civac, sy, x0, x1, x2, x3
221 	ret
222 SYM_FUNC_END_PI(__dma_flush_area)
223 
224 /*
225  *	__dma_map_area(start, size, dir)
226  *	- start	- kernel virtual start address
227  *	- size	- size of region
228  *	- dir	- DMA direction
229  */
230 SYM_FUNC_START_PI(__dma_map_area)
231 	b	__dma_clean_area
232 SYM_FUNC_END_PI(__dma_map_area)
233 
234 /*
235  *	__dma_unmap_area(start, size, dir)
236  *	- start	- kernel virtual start address
237  *	- size	- size of region
238  *	- dir	- DMA direction
239  */
240 SYM_FUNC_START_PI(__dma_unmap_area)
241 	cmp	w2, #DMA_TO_DEVICE
242 	b.ne	__dma_inv_area
243 	ret
244 SYM_FUNC_END_PI(__dma_unmap_area)
245