1 /* SPDX-License-Identifier: GPL-2.0 */
2 #include <asm/reg.h>
3 #include <asm/ppc_asm.h>
4 #include <asm/processor.h>
5 #include <asm/cache.h>
6 
7 
8 #define SDRAM_CTRL	0x104
9 #define SC_MODE_EN	(1<<31)
10 #define SC_CKE		(1<<30)
11 #define SC_REF_EN	(1<<28)
12 #define SC_SOFT_PRE	(1<<1)
13 
14 #define GPIOW_GPIOE	0xc00
15 #define GPIOW_DDR	0xc08
16 #define GPIOW_DVO	0xc0c
17 
18 #define CDM_CE		0x214
19 #define CDM_SDRAM	(1<<3)
20 
21 
22 /* helpers... beware: r10 and r4 are overwritten */
23 #define SAVE_SPRN(reg, addr)		\
24 	mfspr	r10, SPRN_##reg;	\
25 	stw	r10, ((addr)*4)(r4);
26 
27 #define LOAD_SPRN(reg, addr)		\
28 	lwz	r10, ((addr)*4)(r4);	\
29 	mtspr	SPRN_##reg, r10;	\
30 	sync;				\
31 	isync;
32 
33 
34 	.data
35 registers:
36 	.space 0x5c*4
37 	.text
38 
39 /* ---------------------------------------------------------------------- */
40 /* low-power mode with help of M68HLC908QT1 */
41 
42 	.globl lite5200_low_power
43 lite5200_low_power:
44 
45 	mr	r7, r3	/* save SRAM va */
46 	mr	r8, r4	/* save MBAR va */
47 
48 	/* setup wakeup address for u-boot at physical location 0x0 */
49 	lis	r3, CONFIG_KERNEL_START@h
50 	lis	r4, lite5200_wakeup@h
51 	ori	r4, r4, lite5200_wakeup@l
52 	sub	r4, r4, r3
53 	stw	r4, 0(r3)
54 
55 
56 	/*
57 	 * save stuff BDI overwrites
58 	 * 0xf0 (0xe0->0x100 gets overwritten when BDI connected;
59 	 *   even when CONFIG_BDI_SWITCH is disabled and MMU XLAT commented; heisenbug?))
60 	 * WARNING: self-refresh doesn't seem to work when BDI2000 is connected,
61 	 *   possibly because BDI sets SDRAM registers before wakeup code does
62 	 */
63 	lis	r4, registers@h
64 	ori	r4, r4, registers@l
65 	lwz	r10, 0xf0(r3)
66 	stw	r10, (0x1d*4)(r4)
67 
68 	/* save registers to r4 [destroys r10] */
69 	SAVE_SPRN(LR, 0x1c)
70 	bl	save_regs
71 
72 	/* flush caches [destroys r3, r4] */
73 	bl	flush_data_cache
74 
75 
76 	/* copy code to sram */
77 	mr	r4, r7
78 	li	r3, (sram_code_end - sram_code)/4
79 	mtctr	r3
80 	lis	r3, sram_code@h
81 	ori	r3, r3, sram_code@l
82 1:
83 	lwz	r5, 0(r3)
84 	stw	r5, 0(r4)
85 	addi	r3, r3, 4
86 	addi	r4, r4, 4
87 	bdnz	1b
88 
89 	/* get tb_ticks_per_usec */
90 	lis	r3, tb_ticks_per_usec@h
91 	lwz	r11, tb_ticks_per_usec@l(r3)
92 
93 	/* disable I and D caches */
94 	mfspr	r3, SPRN_HID0
95 	ori	r3, r3, HID0_ICE | HID0_DCE
96 	xori	r3, r3, HID0_ICE | HID0_DCE
97 	sync; isync;
98 	mtspr	SPRN_HID0, r3
99 	sync; isync;
100 
101 	/* jump to sram */
102 	mtlr	r7
103 	blrl
104 	/* doesn't return */
105 
106 
107 sram_code:
108 	/* self refresh */
109 	lwz	r4, SDRAM_CTRL(r8)
110 
111 	/* send NOP (precharge) */
112 	oris	r4, r4, SC_MODE_EN@h	/* mode_en */
113 	stw	r4, SDRAM_CTRL(r8)
114 	sync
115 
116 	ori	r4, r4, SC_SOFT_PRE	/* soft_pre */
117 	stw	r4, SDRAM_CTRL(r8)
118 	sync
119 	xori	r4, r4, SC_SOFT_PRE
120 
121 	xoris	r4, r4, SC_MODE_EN@h	/* !mode_en */
122 	stw	r4, SDRAM_CTRL(r8)
123 	sync
124 
125 	/* delay (for NOP to finish) */
126 	li	r12, 1
127 	bl	udelay
128 
129 	/*
130 	 * mode_en must not be set when enabling self-refresh
131 	 * send AR with CKE low (self-refresh)
132 	 */
133 	oris	r4, r4, (SC_REF_EN | SC_CKE)@h
134 	xoris	r4, r4, (SC_CKE)@h	/* ref_en !cke */
135 	stw	r4, SDRAM_CTRL(r8)
136 	sync
137 
138 	/* delay (after !CKE there should be two cycles) */
139 	li	r12, 1
140 	bl	udelay
141 
142 	/* disable clock */
143 	lwz	r4, CDM_CE(r8)
144 	ori	r4, r4, CDM_SDRAM
145 	xori	r4, r4, CDM_SDRAM
146 	stw	r4, CDM_CE(r8)
147 	sync
148 
149 	/* delay a bit */
150 	li	r12, 1
151 	bl	udelay
152 
153 
154 	/* turn off with QT chip */
155 	li	r4, 0x02
156 	stb	r4, GPIOW_GPIOE(r8)	/* enable gpio_wkup1 */
157 	sync
158 
159 	stb	r4, GPIOW_DVO(r8)	/* "output" high */
160 	sync
161 	stb	r4, GPIOW_DDR(r8)	/* output */
162 	sync
163 	stb	r4, GPIOW_DVO(r8)	/* output high */
164 	sync
165 
166 	/* 10uS delay */
167 	li	r12, 10
168 	bl	udelay
169 
170 	/* turn off */
171 	li	r4, 0
172 	stb	r4, GPIOW_DVO(r8)	/* output low */
173 	sync
174 
175 	/* wait until we're offline */
176   1:
177 	b	1b
178 
179 
180 	/* local udelay in sram is needed */
181   udelay: /* r11 - tb_ticks_per_usec, r12 - usecs, overwrites r13 */
182 	mullw	r12, r12, r11
183 	mftb	r13	/* start */
184 	add	r12, r13, r12 /* end */
185     1:
186 	mftb	r13	/* current */
187 	cmp	cr0, r13, r12
188 	blt	1b
189 	blr
190 
191 sram_code_end:
192 
193 
194 
195 /* uboot jumps here on resume */
196 lite5200_wakeup:
197 	bl	restore_regs
198 
199 
200 	/* HIDs, MSR */
201 	LOAD_SPRN(HID1, 0x19)
202 	LOAD_SPRN(HID2, 0x1a)
203 
204 
205 	/* address translation is tricky (see turn_on_mmu) */
206 	mfmsr	r10
207 	ori	r10, r10, MSR_DR | MSR_IR
208 
209 
210 	mtspr	SPRN_SRR1, r10
211 	lis	r10, mmu_on@h
212 	ori	r10, r10, mmu_on@l
213 	mtspr	SPRN_SRR0, r10
214 	sync
215 	rfi
216 mmu_on:
217 	/* kernel offset (r4 is still set from restore_registers) */
218 	addis	r4, r4, CONFIG_KERNEL_START@h
219 
220 
221 	/* restore MSR */
222 	lwz	r10, (4*0x1b)(r4)
223 	mtmsr	r10
224 	sync; isync;
225 
226 	/* invalidate caches */
227 	mfspr	r10, SPRN_HID0
228 	ori	r5, r10, HID0_ICFI | HID0_DCI
229 	mtspr	SPRN_HID0, r5	/* invalidate caches */
230 	sync; isync;
231 	mtspr	SPRN_HID0, r10
232 	sync; isync;
233 
234 	/* enable caches */
235 	lwz	r10, (4*0x18)(r4)
236 	mtspr	SPRN_HID0, r10	/* restore (enable caches, DPM) */
237 	/* ^ this has to be after address translation set in MSR */
238 	sync
239 	isync
240 
241 
242 	/* restore 0xf0 (BDI2000) */
243 	lis	r3, CONFIG_KERNEL_START@h
244 	lwz	r10, (0x1d*4)(r4)
245 	stw	r10, 0xf0(r3)
246 
247 	LOAD_SPRN(LR, 0x1c)
248 
249 
250 	blr
251 _ASM_NOKPROBE_SYMBOL(lite5200_wakeup)
252 
253 
254 /* ---------------------------------------------------------------------- */
255 /* boring code: helpers */
256 
257 /* save registers */
258 #define SAVE_BAT(n, addr)		\
259 	SAVE_SPRN(DBAT##n##L, addr);	\
260 	SAVE_SPRN(DBAT##n##U, addr+1);	\
261 	SAVE_SPRN(IBAT##n##L, addr+2);	\
262 	SAVE_SPRN(IBAT##n##U, addr+3);
263 
264 #define SAVE_SR(n, addr)		\
265 	mfsr	r10, n;			\
266 	stw	r10, ((addr)*4)(r4);
267 
268 #define SAVE_4SR(n, addr)	\
269 	SAVE_SR(n, addr);	\
270 	SAVE_SR(n+1, addr+1);	\
271 	SAVE_SR(n+2, addr+2);	\
272 	SAVE_SR(n+3, addr+3);
273 
274 save_regs:
275 	stw	r0, 0(r4)
276 	stw	r1, 0x4(r4)
277 	stw	r2, 0x8(r4)
278 	stmw	r11, 0xc(r4) /* 0xc -> 0x5f, (0x18*4-1) */
279 
280 	SAVE_SPRN(HID0, 0x18)
281 	SAVE_SPRN(HID1, 0x19)
282 	SAVE_SPRN(HID2, 0x1a)
283 	mfmsr	r10
284 	stw	r10, (4*0x1b)(r4)
285 	/*SAVE_SPRN(LR, 0x1c) have to save it before the call */
286 	/* 0x1d reserved by 0xf0 */
287 	SAVE_SPRN(RPA,   0x1e)
288 	SAVE_SPRN(SDR1,  0x1f)
289 
290 	/* save MMU regs */
291 	SAVE_BAT(0, 0x20)
292 	SAVE_BAT(1, 0x24)
293 	SAVE_BAT(2, 0x28)
294 	SAVE_BAT(3, 0x2c)
295 	SAVE_BAT(4, 0x30)
296 	SAVE_BAT(5, 0x34)
297 	SAVE_BAT(6, 0x38)
298 	SAVE_BAT(7, 0x3c)
299 
300 	SAVE_4SR(0, 0x40)
301 	SAVE_4SR(4, 0x44)
302 	SAVE_4SR(8, 0x48)
303 	SAVE_4SR(12, 0x4c)
304 
305 	SAVE_SPRN(SPRG0, 0x50)
306 	SAVE_SPRN(SPRG1, 0x51)
307 	SAVE_SPRN(SPRG2, 0x52)
308 	SAVE_SPRN(SPRG3, 0x53)
309 	SAVE_SPRN(SPRG4, 0x54)
310 	SAVE_SPRN(SPRG5, 0x55)
311 	SAVE_SPRN(SPRG6, 0x56)
312 	SAVE_SPRN(SPRG7, 0x57)
313 
314 	SAVE_SPRN(IABR,  0x58)
315 	SAVE_SPRN(DABR,  0x59)
316 	SAVE_SPRN(TBRL,  0x5a)
317 	SAVE_SPRN(TBRU,  0x5b)
318 
319 	blr
320 
321 
322 /* restore registers */
323 #define LOAD_BAT(n, addr)		\
324 	LOAD_SPRN(DBAT##n##L, addr);	\
325 	LOAD_SPRN(DBAT##n##U, addr+1);	\
326 	LOAD_SPRN(IBAT##n##L, addr+2);	\
327 	LOAD_SPRN(IBAT##n##U, addr+3);
328 
329 #define LOAD_SR(n, addr)		\
330 	lwz	r10, ((addr)*4)(r4);	\
331 	mtsr	n, r10;
332 
333 #define LOAD_4SR(n, addr)	\
334 	LOAD_SR(n, addr);	\
335 	LOAD_SR(n+1, addr+1);	\
336 	LOAD_SR(n+2, addr+2);	\
337 	LOAD_SR(n+3, addr+3);
338 
339 restore_regs:
340 	lis	r4, registers@h
341 	ori	r4, r4, registers@l
342 
343 	/* MMU is not up yet */
344 	subis	r4, r4, CONFIG_KERNEL_START@h
345 
346 	lwz	r0, 0(r4)
347 	lwz	r1, 0x4(r4)
348 	lwz	r2, 0x8(r4)
349 	lmw	r11, 0xc(r4)
350 
351 	/*
352 	 * these are a bit tricky
353 	 *
354 	 * 0x18 - HID0
355 	 * 0x19 - HID1
356 	 * 0x1a - HID2
357 	 * 0x1b - MSR
358 	 * 0x1c - LR
359 	 * 0x1d - reserved by 0xf0 (BDI2000)
360 	 */
361 	LOAD_SPRN(RPA,   0x1e);
362 	LOAD_SPRN(SDR1,  0x1f);
363 
364 	/* restore MMU regs */
365 	LOAD_BAT(0, 0x20)
366 	LOAD_BAT(1, 0x24)
367 	LOAD_BAT(2, 0x28)
368 	LOAD_BAT(3, 0x2c)
369 	LOAD_BAT(4, 0x30)
370 	LOAD_BAT(5, 0x34)
371 	LOAD_BAT(6, 0x38)
372 	LOAD_BAT(7, 0x3c)
373 
374 	LOAD_4SR(0, 0x40)
375 	LOAD_4SR(4, 0x44)
376 	LOAD_4SR(8, 0x48)
377 	LOAD_4SR(12, 0x4c)
378 
379 	/* rest of regs */
380 	LOAD_SPRN(SPRG0, 0x50);
381 	LOAD_SPRN(SPRG1, 0x51);
382 	LOAD_SPRN(SPRG2, 0x52);
383 	LOAD_SPRN(SPRG3, 0x53);
384 	LOAD_SPRN(SPRG4, 0x54);
385 	LOAD_SPRN(SPRG5, 0x55);
386 	LOAD_SPRN(SPRG6, 0x56);
387 	LOAD_SPRN(SPRG7, 0x57);
388 
389 	LOAD_SPRN(IABR,  0x58);
390 	LOAD_SPRN(DABR,  0x59);
391 	LOAD_SPRN(TBWL,  0x5a);	/* these two have separate R/W regs */
392 	LOAD_SPRN(TBWU,  0x5b);
393 
394 	blr
395 _ASM_NOKPROBE_SYMBOL(restore_regs)
396 
397 
398 
399 /* cache flushing code. copied from arch/ppc/boot/util.S */
400 #define NUM_CACHE_LINES (128*8)
401 
402 /*
403  * Flush data cache
404  * Do this by just reading lots of stuff into the cache.
405  */
406 flush_data_cache:
407 	lis	r3,CONFIG_KERNEL_START@h
408 	ori	r3,r3,CONFIG_KERNEL_START@l
409 	li	r4,NUM_CACHE_LINES
410 	mtctr	r4
411 1:
412 	lwz	r4,0(r3)
413 	addi	r3,r3,L1_CACHE_BYTES	/* Next line, please */
414 	bdnz	1b
415 	blr
416