1 /*
2  * Low level PM code for TI EMIF
3  *
4  * Copyright (C) 2016-2017 Texas Instruments Incorporated - http://www.ti.com/
5  *	Dave Gerlach
6  *
7  * This program is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU General Public License as
9  * published by the Free Software Foundation version 2.
10  *
11  * This program is distributed "as is" WITHOUT ANY WARRANTY of any
12  * kind, whether express or implied; without even the implied warranty
13  * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  */
16 
17 #include <linux/linkage.h>
18 #include <asm/assembler.h>
19 #include <asm/memory.h>
20 
21 #include "emif.h"
22 #include "ti-emif-asm-offsets.h"
23 
24 #define EMIF_POWER_MGMT_WAIT_SELF_REFRESH_8192_CYCLES	0x00a0
25 #define EMIF_POWER_MGMT_SR_TIMER_MASK			0x00f0
26 #define EMIF_POWER_MGMT_SELF_REFRESH_MODE		0x0200
27 #define EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK		0x0700
28 
29 #define EMIF_SDCFG_TYPE_DDR2				0x2 << SDRAM_TYPE_SHIFT
30 #define EMIF_SDCFG_TYPE_DDR3				0x3 << SDRAM_TYPE_SHIFT
31 #define EMIF_STATUS_READY				0x4
32 
33 #define AM43XX_EMIF_PHY_CTRL_REG_COUNT                  0x120
34 
35 #define EMIF_AM437X_REGISTERS				0x1
36 
37 	.arm
38 	.align 3
39 
40 ENTRY(ti_emif_sram)
41 
42 /*
43  * void ti_emif_save_context(void)
44  *
45  * Used during suspend to save the context of all required EMIF registers
46  * to local memory if the EMIF is going to lose context during the sleep
47  * transition. Operates on the VIRTUAL address of the EMIF.
48  */
49 ENTRY(ti_emif_save_context)
50 	stmfd   sp!, {r4 - r11, lr}     @ save registers on stack
51 
52 	adr	r4, ti_emif_pm_sram_data
53 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
54 	ldr	r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
55 
56 	/* Save EMIF configuration */
57 	ldr	r1, [r0, #EMIF_SDRAM_CONFIG]
58 	str	r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
59 
60 	ldr	r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
61 	str	r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
62 
63 	ldr	r1, [r0, #EMIF_SDRAM_TIMING_1]
64 	str     r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
65 
66 	ldr	r1, [r0, #EMIF_SDRAM_TIMING_2]
67 	str     r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
68 
69 	ldr	r1, [r0, #EMIF_SDRAM_TIMING_3]
70 	str     r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
71 
72 	ldr	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
73 	str     r1, [r2, #EMIF_PMCR_VAL_OFFSET]
74 
75 	ldr	r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
76 	str     r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
77 
78 	ldr	r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
79 	str     r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
80 
81 	ldr	r1, [r0, #EMIF_DDR_PHY_CTRL_1]
82 	str     r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
83 
84 	ldr	r1, [r0, #EMIF_COS_CONFIG]
85 	str     r1, [r2, #EMIF_COS_CONFIG_OFFSET]
86 
87 	ldr	r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
88 	str     r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
89 
90 	ldr	r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
91 	str     r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
92 
93 	ldr	r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
94 	str     r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
95 
96 	ldr	r1, [r0, #EMIF_OCP_CONFIG]
97 	str     r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
98 
99 	ldr	r5, [r4, #EMIF_PM_CONFIG_OFFSET]
100 	cmp	r5, #EMIF_SRAM_AM43_REG_LAYOUT
101 	bne	emif_skip_save_extra_regs
102 
103 	ldr	r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
104 	str     r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
105 
106 	ldr	r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
107 	str     r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
108 
109 	ldr	r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
110 	str     r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
111 
112 	ldr	r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
113 	str     r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
114 
115 	ldr	r1, [r0, #EMIF_DLL_CALIB_CTRL]
116 	str     r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
117 
118 	ldr	r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
119 	str     r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
120 
121 	/* Loop and save entire block of emif phy regs */
122 	mov	r5, #0x0
123 	add	r4, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
124 	add	r3, r0, #EMIF_EXT_PHY_CTRL_1
125 ddr_phy_ctrl_save:
126 	ldr	r1, [r3, r5]
127 	str	r1, [r4, r5]
128 	add	r5, r5, #0x4
129 	cmp	r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
130 	bne	ddr_phy_ctrl_save
131 
132 emif_skip_save_extra_regs:
133 	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
134 ENDPROC(ti_emif_save_context)
135 
136 /*
137  * void ti_emif_restore_context(void)
138  *
139  * Used during resume to restore the context of all required EMIF registers
140  * from local memory after the EMIF has lost context during a sleep transition.
141  * Operates on the PHYSICAL address of the EMIF.
142  */
143 ENTRY(ti_emif_restore_context)
144 	adr	r4, ti_emif_pm_sram_data
145 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
146 	ldr	r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
147 
148 	/* Config EMIF Timings */
149 	ldr     r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
150 	str	r1, [r0, #EMIF_DDR_PHY_CTRL_1]
151 	str	r1, [r0, #EMIF_DDR_PHY_CTRL_1_SHDW]
152 
153 	ldr     r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
154 	str	r1, [r0, #EMIF_SDRAM_TIMING_1]
155 	str	r1, [r0, #EMIF_SDRAM_TIMING_1_SHDW]
156 
157 	ldr     r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
158 	str	r1, [r0, #EMIF_SDRAM_TIMING_2]
159 	str	r1, [r0, #EMIF_SDRAM_TIMING_2_SHDW]
160 
161 	ldr     r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
162 	str	r1, [r0, #EMIF_SDRAM_TIMING_3]
163 	str	r1, [r0, #EMIF_SDRAM_TIMING_3_SHDW]
164 
165 	ldr     r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
166 	str	r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
167 	str	r1, [r0, #EMIF_SDRAM_REFRESH_CTRL_SHDW]
168 
169 	ldr     r1, [r2, #EMIF_PMCR_VAL_OFFSET]
170 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
171 
172 	ldr     r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
173 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
174 
175 	ldr     r1, [r2, #EMIF_COS_CONFIG_OFFSET]
176 	str	r1, [r0, #EMIF_COS_CONFIG]
177 
178 	ldr     r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
179 	str	r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
180 
181 	ldr	r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
182 	str	r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
183 
184 	ldr     r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
185 	str	r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
186 
187 	ldr     r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
188 	str	r1, [r0, #EMIF_OCP_CONFIG]
189 
190 	ldr	r5, [r4, #EMIF_PM_CONFIG_OFFSET]
191 	cmp	r5, #EMIF_SRAM_AM43_REG_LAYOUT
192 	bne	emif_skip_restore_extra_regs
193 
194 	ldr     r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
195 	str	r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
196 
197 	ldr     r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
198 	str	r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
199 
200 	ldr     r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
201 	str	r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
202 
203 	ldr     r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
204 	str	r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
205 
206 	ldr     r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
207 	str	r1, [r0, #EMIF_DLL_CALIB_CTRL]
208 
209 	ldr     r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
210 	str	r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
211 
212 	ldr     r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
213 	str	r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
214 
215 	/* Loop and restore entire block of emif phy regs */
216 	mov	r5, #0x0
217 	/* Load ti_emif_regs_amx3 + EMIF_EXT_PHY_CTRL_VALS_OFFSET for address
218 	 * to phy register save space
219 	 */
220 	add	r3, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
221 	add	r4, r0, #EMIF_EXT_PHY_CTRL_1
222 ddr_phy_ctrl_restore:
223 	ldr	r1, [r3, r5]
224 	str	r1, [r4, r5]
225 	add	r5, r5, #0x4
226 	cmp	r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
227 	bne	ddr_phy_ctrl_restore
228 
229 emif_skip_restore_extra_regs:
230 	/*
231 	 * Output impedence calib needed only for DDR3
232 	 * but since the initial state of this will be
233 	 * disabled for DDR2 no harm in restoring the
234 	 * old configuration
235 	 */
236 	ldr     r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
237 	str	r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
238 
239 	/* Write to sdcfg last for DDR2 only */
240 	ldr	r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
241 	and	r2, r1, #SDRAM_TYPE_MASK
242 	cmp	r2, #EMIF_SDCFG_TYPE_DDR2
243 	streq	r1, [r0, #EMIF_SDRAM_CONFIG]
244 
245 	mov	pc, lr
246 ENDPROC(ti_emif_restore_context)
247 
248 /*
249  * void ti_emif_run_hw_leveling(void)
250  *
251  * Used during resume to run hardware leveling again and restore the
252  * configuration of the EMIF PHY, only for DDR3.
253  */
254 ENTRY(ti_emif_run_hw_leveling)
255 	adr	r4, ti_emif_pm_sram_data
256 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
257 
258 	ldr	r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
259 	orr	r3, r3, #RDWRLVLFULL_START
260 	ldr	r2, [r0, #EMIF_SDRAM_CONFIG]
261 	and	r2, r2, #SDRAM_TYPE_MASK
262 	cmp	r2, #EMIF_SDCFG_TYPE_DDR3
263 	bne	skip_hwlvl
264 
265 	str	r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
266 
267 	/*
268 	 * If EMIF registers are touched during initial stage of HW
269 	 * leveling sequence there will be an L3 NOC timeout error issued
270 	 * as the EMIF will not respond, which is not fatal, but it is
271 	 * avoidable. This small wait loop is enough time for this condition
272 	 * to clear, even at worst case of CPU running at max speed of 1Ghz.
273 	 */
274 	mov	r2, #0x2000
275 1:
276 	subs	r2, r2, #0x1
277 	bne	1b
278 
279 	/* Bit clears when operation is complete */
280 2:	ldr     r1, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
281 	tst     r1, #RDWRLVLFULL_START
282 	bne     2b
283 
284 skip_hwlvl:
285 	mov	pc, lr
286 ENDPROC(ti_emif_run_hw_leveling)
287 
288 /*
289  * void ti_emif_enter_sr(void)
290  *
291  * Programs the EMIF to tell the SDRAM to enter into self-refresh
292  * mode during a sleep transition. Operates on the VIRTUAL address
293  * of the EMIF.
294  */
295 ENTRY(ti_emif_enter_sr)
296 	stmfd   sp!, {r4 - r11, lr}     @ save registers on stack
297 
298 	adr	r4, ti_emif_pm_sram_data
299 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
300 	ldr	r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
301 
302 	ldr	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
303 	bic	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
304 	orr	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
305 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
306 
307 	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
308 ENDPROC(ti_emif_enter_sr)
309 
310 /*
311  * void ti_emif_exit_sr(void)
312  *
313  * Programs the EMIF to tell the SDRAM to exit self-refresh mode
314  * after a sleep transition. Operates on the PHYSICAL address of
315  * the EMIF.
316  */
317 ENTRY(ti_emif_exit_sr)
318 	adr	r4, ti_emif_pm_sram_data
319 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
320 	ldr	r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
321 
322 	/*
323 	 * Toggle EMIF to exit refresh mode:
324 	 * if EMIF lost context, PWR_MGT_CTRL is currently 0, writing disable
325 	 *   (0x0), wont do diddly squat! so do a toggle from SR(0x2) to disable
326 	 *   (0x0) here.
327 	 * *If* EMIF did not lose context, nothing broken as we write the same
328 	 *   value(0x2) to reg before we write a disable (0x0).
329 	 */
330 	ldr	r1, [r2, #EMIF_PMCR_VAL_OFFSET]
331 	bic	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
332 	orr	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
333 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
334 	bic	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
335 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
336 
337         /* Wait for EMIF to become ready */
338 1:	ldr     r1, [r0, #EMIF_STATUS]
339 	tst     r1, #EMIF_STATUS_READY
340 	beq     1b
341 
342 	mov	pc, lr
343 ENDPROC(ti_emif_exit_sr)
344 
345 /*
346  * void ti_emif_abort_sr(void)
347  *
348  * Disables self-refresh after a failed transition to a low-power
349  * state so the kernel can jump back to DDR and follow abort path.
350  * Operates on the VIRTUAL address of the EMIF.
351  */
352 ENTRY(ti_emif_abort_sr)
353 	stmfd   sp!, {r4 - r11, lr}     @ save registers on stack
354 
355 	adr	r4, ti_emif_pm_sram_data
356 	ldr	r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
357 	ldr	r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
358 
359 	ldr	r1, [r2, #EMIF_PMCR_VAL_OFFSET]
360 	bic	r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
361 	str	r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
362 
363 	/* Wait for EMIF to become ready */
364 1:	ldr     r1, [r0, #EMIF_STATUS]
365 	tst     r1, #EMIF_STATUS_READY
366 	beq     1b
367 
368 	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
369 ENDPROC(ti_emif_abort_sr)
370 
371 	.align 3
372 ENTRY(ti_emif_pm_sram_data)
373 	.space EMIF_PM_DATA_SIZE
374 ENTRY(ti_emif_sram_sz)
375         .word   . - ti_emif_save_context
376