1 /*
2  * Copyright 2021-2024 NXP
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <assert.h>
8 #include <stdbool.h>
9 
10 #include <arch_helpers.h>
11 #include <bl31/interrupt_mgmt.h>
12 #include <common/runtime_svc.h>
13 #include <lib/mmio.h>
14 #include <lib/spinlock.h>
15 #include <plat/common/platform.h>
16 
17 #include <platform_def.h>
18 
19 #include <dram.h>
20 #include <upower_api.h>
21 
22 #define PHY_FREQ_SEL_INDEX(x)		((x) << 16)
23 #define PHY_FREQ_MULTICAST_EN(x)	((x) << 8)
24 #define DENALI_PHY_1537			U(0x5804)
25 
26 #define IMX_DDRC_BASE			U(0x2E060000)
27 #define SAVED_DRAM_DATA_BASE		U(0x20055000)
28 #define DENALI_CTL_143			U(0x23C)
29 #define DENALI_CTL_144			U(0x240)
30 #define DENALI_CTL_146			U(0x248)
31 #define LP_STATE_CS_IDLE		U(0x404000)
32 #define LP_STATE_CS_PD_CG		U(0x4F4F00)
33 #define LPI_WAKEUP_EN_SHIFT		U(8)
34 #define IMX_LPAV_SIM_BASE		0x2DA50000
35 #define LPDDR_CTRL			0x14
36 #define LPDDR_AUTO_LP_MODE_DISABLE	BIT(24)
37 #define SOC_LP_CMD_SHIFT		U(15)
38 #define LPDDR_CTRL2			0x18
39 #define LPDDR_EN_CLKGATE		(0x1<<17)
40 #define LPDDR_MAX_CLKDIV_EN		(0x1 << 16)
41 #define LP_AUTO_ENTRY_EN		0x4
42 #define LP_AUTO_EXIT_EN			0xF
43 
44 #define DENALI_CTL_00			U(0x0)
45 #define DENALI_CTL_23			U(0x5c)
46 #define DFIBUS_FREQ_INIT_SHIFT		U(24)
47 #define TSREF2PHYMSTR_SHIFT		U(8)
48 #define TSREF2PHYMSTR_MASK		GENMASK(13, 8)
49 
50 #define DENALI_CTL_24			U(0x60)
51 #define DENALI_CTL_25			U(0x64)
52 
53 #define DENALI_CTL_93			U(0x174)
54 #define PWRUP_SREFRESH_EXIT		BIT(0)
55 
56 #define DENALI_CTL_127				U(0x1fc)
57 #define PHYMSTR_TRAIN_AFTER_INIT_COMPLETE	BIT(16)
58 
59 #define DENALI_CTL_147			U(0x24c)
60 #define DENALI_CTL_153			U(0x264)
61 #define PCPCS_PD_EN			BIT(8)
62 
63 #define DENALI_CTL_249			U(0x3E4)
64 #define DENALI_CTL_266			U(0x428)
65 
66 #define DENALI_PHY_1547			U(0x582c)
67 #define PHY_LP4_BOOT_DISABLE		BIT(8)
68 
69 #define DENALI_PHY_1559			U(0x585c)
70 #define DENALI_PHY_1590			U(0x58D8)
71 
72 #define DENALI_PI_00			U(0x2000)
73 #define DENALI_PI_04			U(0x2010)
74 #define DENALI_PI_52			U(0x20D0)
75 #define DENALI_PI_26			U(0x2068)
76 #define DENALI_PI_33			U(0x2084)
77 #define DENALI_PI_65			U(0x2104)
78 #define DENALI_PI_77			U(0x2134)
79 #define DENALI_PI_134			U(0x2218)
80 #define DENALI_PI_131			U(0x220C)
81 #define DENALI_PI_132			U(0x2210)
82 #define DENALI_PI_134			U(0x2218)
83 #define DENALI_PI_137			U(0x2224)
84 #define DENALI_PI_174			U(0x22B8)
85 #define DENALI_PI_175			U(0x22BC)
86 #define DENALI_PI_181			U(0x22D4)
87 #define DENALI_PI_182			U(0x22D8)
88 #define DENALI_PI_191			U(0x22FC)
89 #define DENALI_PI_192			U(0x2300)
90 #define DENALI_PI_212			U(0x2350)
91 #define DENALI_PI_214			U(0x2358)
92 #define DENALI_PI_217			U(0x2364)
93 
94 #define LPDDR3_TYPE	U(0x7)
95 #define LPDDR4_TYPE	U(0xB)
96 
97 extern void upower_wait_resp(void);
98 
99 struct dram_cfg_param {
100 	uint32_t reg;
101 	uint32_t val;
102 };
103 
104 struct dram_timing_info {
105 	/* ddr controller config */
106 	struct dram_cfg_param *ctl_cfg;
107 	unsigned int ctl_cfg_num;
108 	/* pi config */
109 	struct dram_cfg_param *pi_cfg;
110 	unsigned int pi_cfg_num;
111 	/* phy freq1 config */
112 	struct dram_cfg_param *phy_f1_cfg;
113 	unsigned int phy_f1_cfg_num;
114 	/* phy freq2 config */
115 	struct dram_cfg_param *phy_f2_cfg;
116 	unsigned int phy_f2_cfg_num;
117 	/* automatic low power config */
118 	struct dram_cfg_param *auto_lp_cfg;
119 	unsigned int auto_lp_cfg_num;
120 	/* initialized drate table */
121 	unsigned int fsp_table[3];
122 };
123 
124 #define CTL_NUM		U(680)
125 #define PI_NUM		U(298)
126 #define PHY_NUM		U(1654)
127 #define PHY_DIFF_NUM	U(49)
128 #define AUTO_LP_NUM	U(3)
129 struct dram_cfg {
130 	uint32_t ctl_cfg[CTL_NUM];
131 	uint32_t pi_cfg[PI_NUM];
132 	uint32_t phy_full[PHY_NUM];
133 	uint32_t phy_diff[PHY_DIFF_NUM];
134 	uint32_t auto_lp_cfg[AUTO_LP_NUM];
135 };
136 
137 struct dram_timing_info *info;
138 struct dram_cfg *dram_timing_cfg;
139 
140 /* mark if dram cfg is already saved */
141 static bool dram_cfg_saved;
142 static bool dram_auto_lp_true;
143 static uint32_t dram_class, dram_ctl_143;
144 
145 /* PHY register index for frequency diff */
146 uint32_t freq_specific_reg_array[PHY_DIFF_NUM] = {
147 90, 92, 93, 96, 97, 100, 101, 102, 103, 104, 114,
148 346, 348, 349, 352, 353, 356, 357, 358, 359, 360,
149 370, 602, 604, 605, 608, 609, 612, 613, 614, 615,
150 616, 626, 858, 860, 861, 864, 865, 868, 869, 870,
151 871, 872, 882, 1063, 1319, 1566, 1624, 1625
152 };
153 
154 /* lock used for DDR DVFS */
155 spinlock_t dfs_lock;
156 static volatile uint32_t core_count;
157 static volatile bool in_progress;
158 static volatile bool sys_dvfs;
159 static int num_fsp;
160 
ddr_init(void)161 static void ddr_init(void)
162 {
163 	unsigned int i;
164 
165 	/* restore the ddr ctl config */
166 	for (i = 0U; i < CTL_NUM; i++) {
167 		mmio_write_32(IMX_DDRC_BASE + i * 4, dram_timing_cfg->ctl_cfg[i]);
168 	}
169 
170 	/* load the PI registers */
171 	for (i = 0U; i < PI_NUM; i++) {
172 		mmio_write_32(IMX_DDRC_BASE + 0x2000 + i * 4, dram_timing_cfg->pi_cfg[i]);
173 	}
174 
175 
176 	 /* restore all PHY registers for all the fsp. */
177 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x100);
178 	/* restore all the phy configs */
179 	for (i = 0U; i < PHY_NUM; i++) {
180 		/* skip the reserved registers space */
181 		if (i >= 121U && i <= 255U) {
182 			continue;
183 		}
184 		if (i >= 377U && i <= 511U) {
185 			continue;
186 		}
187 		if (i >= 633U && i <= 767U) {
188 			continue;
189 		}
190 		if (i >= 889U && i <= 1023U) {
191 			continue;
192 		}
193 		if (i >= 1065U && i <= 1279U) {
194 			continue;
195 		}
196 		if (i >= 1321U && i <= 1535U) {
197 			continue;
198 		}
199 		mmio_write_32(IMX_DDRC_BASE + 0x4000 + i * 4, dram_timing_cfg->phy_full[i]);
200 	}
201 
202 	if (dram_class == LPDDR4_TYPE) {
203 		/* restore only the diff. */
204 		mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0);
205 		for (i = 0U; i < PHY_DIFF_NUM; i++) {
206 			mmio_write_32(IMX_DDRC_BASE + 0x4000 + freq_specific_reg_array[i] * 4,
207 				      dram_timing_cfg->phy_diff[i]);
208 		}
209 	}
210 
211 	/* Re-enable MULTICAST mode */
212 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, PHY_FREQ_MULTICAST_EN(1));
213 }
214 
dram_lp_auto_disable(void)215 void dram_lp_auto_disable(void)
216 {
217 	uint32_t lp_auto_en;
218 
219 	dram_timing_cfg = (struct dram_cfg *)(SAVED_DRAM_DATA_BASE +
220 					      sizeof(struct dram_timing_info));
221 	lp_auto_en = (mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) & (LP_AUTO_ENTRY_EN << 24));
222 	/* Save initial config */
223 	dram_ctl_143 = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_143);
224 
225 	if (lp_auto_en && !dram_auto_lp_true) {
226 		/* 0.a Save DDRC auto low-power mode parameter */
227 		dram_timing_cfg->auto_lp_cfg[0] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_144);
228 		dram_timing_cfg->auto_lp_cfg[1] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_147);
229 		dram_timing_cfg->auto_lp_cfg[2] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146);
230 		/* Set LPI_SRPD_LONG_MCCLK_GATE_WAKEUP_F2 to Maximum */
231 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_143, 0xF << 24);
232 		/* 0.b Disable DDRC auto low-power mode interface */
233 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_CTL_146, LP_AUTO_ENTRY_EN << 24);
234 		/* 0.c Read any location to get DRAM out of Self-refresh */
235 		mmio_read_32(DEVICE2_BASE);
236 		/* 0.d Confirm DRAM is out of Self-refresh */
237 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) &
238 			LP_STATE_CS_PD_CG) != LP_STATE_CS_IDLE) {
239 			;
240 		}
241 		/* 0.e Disable DDRC auto low-power exit */
242 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_CTL_147, LP_AUTO_EXIT_EN);
243 		/* dram low power mode flag */
244 		dram_auto_lp_true = true;
245 	}
246 }
247 
dram_lp_auto_enable(void)248 void dram_lp_auto_enable(void)
249 {
250 	/* Switch back to Auto Low-power mode */
251 	if (dram_auto_lp_true) {
252 		/* 12.a Confirm DRAM is out of Self-refresh */
253 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) &
254 			LP_STATE_CS_PD_CG) != LP_STATE_CS_IDLE) {
255 			;
256 		}
257 		/* 12.b Enable DDRC auto low-power exit */
258 		/*
259 		 * 12.c TBC! : Set DENALI_CTL_144 [LPI_CTRL_REQ_EN[24]] and
260 		 * [DFI_LP_VERSION[16]] back to default settings = 1b'1.
261 		 */
262 		/*
263 		 * 12.d Reconfigure DENALI_CTL_144 [LPI_WAKEUP_EN[5:0]] bit
264 		 * LPI_WAKEUP_EN[3] = 1b'1.
265 		 */
266 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, dram_timing_cfg->auto_lp_cfg[0]);
267 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_147, dram_timing_cfg->auto_lp_cfg[1]);
268 		/* 12.e Re-enable DDRC auto low-power mode interface */
269 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_146, dram_timing_cfg->auto_lp_cfg[2]);
270 		/* restore ctl config */
271 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_143, dram_ctl_143);
272 		/* dram low power mode flag */
273 		dram_auto_lp_true = false;
274 	}
275 }
276 
dram_enter_self_refresh(void)277 void dram_enter_self_refresh(void)
278 {
279 	/* disable auto low power interface */
280 	dram_lp_auto_disable();
281 	/* 1. config the PCC_LPDDR4[SSADO] to 2b'11 for ACK domain 0/1's STOP */
282 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, 0x2 << 22);
283 	/* 1.a Clock gate PCC_LPDDR4[CGC] and no software reset PCC_LPDDR4[SWRST] */
284 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, (BIT(30) | BIT(28)));
285 
286 	/*
287 	 * 2. Make sure the DENALI_CTL_144[LPI_WAKEUP_EN[5:0]] has the bit
288 	 * LPI_WAKEUP_EN[3] = 1b'1. This enables the option 'self-refresh
289 	 * long with mem and ctlr clk gating or self-refresh power-down long
290 	 * with mem and ctlr clk gating'
291 	 */
292 	mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(3) << LPI_WAKEUP_EN_SHIFT);
293 	/* TODO: Needed ? 2.a DENALI_CTL_144[LPI_TIMER_WAKEUP_F2] */
294 	//mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(0));
295 
296 	/*
297 	 * 3a. Config SIM_LPAV LPDDR_CTRL[LPDDR_AUTO_LP_MODE_DISABLE] to 1b'0(enable
298 	 * the logic to automatic handles low power entry/exit. This is the recommended
299 	 * option over handling through software.
300 	 * 3b. Config the SIM_LPAV LPDDR_CTRL[SOC_LP_CMD] to 6b'101001(encoding for
301 	 * self_refresh with both DDR controller and DRAM clock gate. THis is mandatory
302 	 * since LPPDR logic will be power gated).
303 	 */
304 	mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, LPDDR_AUTO_LP_MODE_DISABLE);
305 	mmio_clrsetbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL,
306 			   0x3f << SOC_LP_CMD_SHIFT, 0x29 << SOC_LP_CMD_SHIFT);
307 	/* 3.c clock gate ddr controller */
308 	mmio_setbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL2, LPDDR_EN_CLKGATE);
309 	/* 3.d lpddr max clk div en */
310 	mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL2, LPDDR_MAX_CLKDIV_EN);
311 }
312 
dram_exit_self_refresh(void)313 void dram_exit_self_refresh(void)
314 {
315 	dram_lp_auto_enable();
316 }
317 
dram_enter_retention(void)318 void dram_enter_retention(void)
319 {
320 	unsigned int i;
321 
322 	dram_lp_auto_disable();
323 
324 	/* 1. config the PCC_LPDDR4[SSADO] to 2b'11 for ACK domain 0/1's STOP */
325 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, 0x2 << 22);
326 
327 	/*
328 	 * 2. Make sure the DENALI_CTL_144[LPI_WAKEUP_EN[5:0]] has the bit
329 	 * LPI_WAKEUP_EN[3] = 1b'1. This enables the option 'self-refresh
330 	 * long with mem and ctlr clk gating or self-refresh  power-down
331 	 * long with mem and ctlr clk gating'
332 	 */
333 	mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(3) << LPI_WAKEUP_EN_SHIFT);
334 
335 	/*
336 	 * 3a. Config SIM_LPAV LPDDR_CTRL[LPDDR_AUTO_LP_MODE_DISABLE] to 1b'0(enable
337 	 * the logic to automatic handles low power entry/exit. This is the recommended
338 	 * option over handling through software.
339 	 * 3b. Config the SIM_LPAV LPDDR_CTRL[SOC_LP_CMD] to 6b'101001(encoding for
340 	 * self_refresh with both DDR controller and DRAM clock gate. THis is mandatory
341 	 * since LPPDR logic will be power gated).
342 	 */
343 	mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, LPDDR_AUTO_LP_MODE_DISABLE);
344 	mmio_clrsetbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL,
345 			   0x3f << SOC_LP_CMD_SHIFT, 0x29 << SOC_LP_CMD_SHIFT);
346 
347 	/* Save DDR Controller & PHY config.
348 	 * Set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=1. Read and store all
349 	 * the PHY registers for F2 into phy_f1_cfg, then read/store the diff between
350 	 * F1 & F2 into phy_f2_cfg.
351 	 */
352 	if (!dram_cfg_saved) {
353 		info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE;
354 		dram_timing_cfg = (struct dram_cfg *)(SAVED_DRAM_DATA_BASE +
355 					sizeof(struct dram_timing_info));
356 
357 		/* get the dram type */
358 		dram_class = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_00);
359 		dram_class = (dram_class >> 8) & 0xf;
360 
361 		/* save the ctl registers */
362 		for (i = 0U; i < CTL_NUM; i++) {
363 			dram_timing_cfg->ctl_cfg[i] = mmio_read_32(IMX_DDRC_BASE + i * 4);
364 		}
365 		dram_timing_cfg->ctl_cfg[0] = dram_timing_cfg->ctl_cfg[0] & 0xFFFFFFFE;
366 
367 		/* save the PI registers */
368 		for (i = 0U; i < PI_NUM; i++) {
369 			dram_timing_cfg->pi_cfg[i] = mmio_read_32(IMX_DDRC_BASE + 0x2000 + i * 4);
370 		}
371 		dram_timing_cfg->pi_cfg[0] = dram_timing_cfg->pi_cfg[0] & 0xFFFFFFFE;
372 
373 		/*
374 		 * Read and store all PHY registers. full array is a full
375 		 * copy for all the setpoint
376 		 */
377 		if (dram_class == LPDDR4_TYPE) {
378 			mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x10000);
379 			for (i = 0U; i < PHY_NUM; i++) {
380 				/* Make sure MULTICASE is enabled */
381 				if (i == 1537U) {
382 					dram_timing_cfg->phy_full[i] = 0x100;
383 				} else {
384 					dram_timing_cfg->phy_full[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 + i * 4);
385 				}
386 			}
387 
388 			/*
389 			 * set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=0.
390 			 * Read and store only the diff.
391 			 */
392 			mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0);
393 			/* save only the frequency based diff config to save memory */
394 			for (i = 0U; i < PHY_DIFF_NUM; i++) {
395 				dram_timing_cfg->phy_diff[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 +
396 									    freq_specific_reg_array[i] * 4);
397 			}
398 		} else {
399 			/* LPDDR3, only f1 need to save */
400 			for (i = 0U; i < info->phy_f1_cfg_num; i++) {
401 				info->phy_f1_cfg[i].val = mmio_read_32(info->phy_f1_cfg[i].reg);
402 			}
403 		}
404 
405 		dram_cfg_saved = true;
406 	}
407 }
408 
dram_exit_retention(void)409 void dram_exit_retention(void)
410 {
411 	uint32_t val;
412 
413 	/* 1. Config the LPAV PLL4 and DDR clock for the desired LPDDR operating frequency. */
414 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
415 
416 	/* 2. Write PCC5.PCC_LPDDR4[SWRST] to 1b'1 to release LPDDR from reset. */
417 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(28));
418 
419 	/* 3. Reload the LPDDR CTL/PI/PHY register */
420 	ddr_init();
421 
422 	if (dram_class == LPDDR4_TYPE) {
423 		/* 4a. FIXME Set PHY_SET_DFI_INPUT_N parameters to 4'h1. LPDDR4 only */
424 		mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1559, 0x01010101);
425 
426 		/*
427 		 * 4b. CTL PWRUP_SREFRESH_EXIT=1'b0 for disabling self refresh exit
428 		 * from controller.
429 		 */
430 		/*
431 		 * 4c. PI_PWRUP_SELF_REF_EXIT=1, PI_MC_PWRUP_SELF_REF_EXIT=0 for enabling
432 		 * self refresh exit from PI
433 		 */
434 		/* 4c. PI_INT_LVL_EN=0 to skip Initialization trainings. */
435 		/*
436 		 * 4d. PI_WRLVL_EN_F0/1/2= PI_CALVL_EN_F0/1/2= PI_RDLVL_EN_F0/1/2=
437 		 * PI_RDLVL_GATE_EN_F0/1/2= PI_WDQLVL_EN_F0/1/2=0x2.
438 		 * Enable non initialization trainings.
439 		 */
440 		/* 4e. PI_PWRUP_SREFRESH_EXIT_CS=0xF */
441 		/* 4f. PI_DLL_RESET=0x1 */
442 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1);
443 		/* PI_PWRUP_SELF_REF_EXIT = 1 */
444 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000);
445 		/* PI_MC_PWRUP_SELF_REF_EXIT = 0 */
446 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16));
447 		/* PI_INT_LVL_EN = 0 */
448 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0));
449 		/* PI_WRLVL_EN_F0 = 3, PI_WRLVL_EN_F1 = 3 */
450 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x03030000);
451 		/* PI_WRLVL_EN_F2 = 3 */
452 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_175, 0x03);
453 		/* PI_CALVL_EN_F0 = 3, PI_CALVL_EN_F1 = 3 */
454 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x03030000);
455 		/* PI_CALVL_EN_F2 = 3 */
456 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_192, 0x03);
457 		/* PI_WDQLVL_EN_F0 = 3 */
458 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_212, 0x300);
459 		/* PI_WDQLVL_EN_F1 = 3 */
460 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_214, 0x03000000);
461 		/* PI_WDQLVL_EN_F2 = 3 */
462 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_217, 0x300);
463 		/* PI_EDLVL_EN_F0 = 3, PI_EDLVL_GATE_EN_F0 = 3 */
464 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000);
465 		/*
466 		 * PI_RDLVL_EN_F1 = 3, PI_RDLVL_GATE_EN_F1 = 3,
467 		 * PI_RDLVL_EN_F2 = 3, PI_RDLVL_GATE_EN_F2 = 3
468 		 */
469 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_182, 0x03030303);
470 		/* PI_PWRUP_SREFRESH_EXIT_CS = 0xF */
471 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000);
472 	} else {
473 		/* PI_DLL_RESET=1 */
474 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1);
475 		/* PI_PWRUP_SELF_REF_EXIT=1 */
476 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000);
477 		/* PI_MC_PWRUP_SELF_REF_EXIT=0 */
478 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16));
479 		/* PI_INT_LVL_EN=0 */
480 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0));
481 		/* PI_WRLVL_EN_F0=3 */
482 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x00030000);
483 		/* PI_CALVL_EN_F0=3 */
484 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x00030000);
485 		/* PI_RDLVL_EN_F0=3,PI_RDLVL_GATE_EN_F0=3 */
486 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000);
487 		/* PI_PWRUP_SREFRESH_EXIT_CS=0xF */
488 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000);
489 	}
490 
491 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x00002D00);
492 
493 	/* Force in-order AXI read data */
494 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x1);
495 
496 	/*
497 	 * Disable special R/W group switches so that R/W group placement
498 	 * is always at END of R/W group.
499 	 */
500 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_249, 0x0);
501 
502 	/* Reduce time for IO pad calibration */
503 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1590, 0x01000000);
504 
505 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_25, 0x00020100);
506 
507 	/* PD disable */
508 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_153, 0x04040000);
509 	/*
510 	 * 5. Disable automatic LP entry and PCPCS modes LP_AUTO_ENTRY_EN
511 	 * to 1b'0, PCPCS_PD_EN to 1b'0
512 	 */
513 
514 	upwr_xcp_set_ddr_retention(APD_DOMAIN, 0, NULL);
515 	upower_wait_resp();
516 
517 	if (dram_class == LPDDR4_TYPE) {
518 		/* 7. Write PI START parameter to 1'b1 */
519 		mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000b01);
520 
521 		/* 8. Write CTL START parameter to 1'b1 */
522 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000b01);
523 	} else {
524 		/* 7. Write PI START parameter to 1'b1 */
525 		mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000701);
526 
527 		/* 8. Write CTL START parameter to 1'b1 */
528 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000701);
529 	}
530 
531 	/* 9. DENALI_CTL_266:  Wait for INT_STATUS_INIT=0x2 */
532 	do {
533 		val = (mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_266) >> 8) & 0xFF;
534 	} while (val != 0x2);
535 
536 	/*
537 	 * 10. Run SW trainings by setting PI_CALVL_REQ,PI_WRLVL_REQ,PI_RDLVL_GATE_REQ,
538 	 * PI_RDLVL_REQ,PI_WDQLVL_REQ(NA for LPDDR3) in same order.
539 	 */
540 	if (dram_class == LPDDR4_TYPE) {
541 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */
542 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */
543 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */
544 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */
545 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_65, 0x10000); /* WDQLVL */
546 
547 		/* 11. Wait for trainings to get complete by polling PI_INT_STATUS */
548 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x07E00000) != 0x07E00000) {
549 			;
550 		}
551 	} else {
552 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */
553 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */
554 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */
555 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */
556 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x05E00000) != 0x05E00000) {
557 			;
558 		}
559 	}
560 
561 	dram_lp_auto_enable();
562 }
563 
564 #define LPDDR_DONE       (0x1<<4)
565 #define SOC_FREQ_CHG_ACK (0x1<<6)
566 #define SOC_FREQ_CHG_REQ (0x1<<7)
567 #define LPI_WAKEUP_EN    (0x4<<8)
568 #define SOC_FREQ_REQ     (0x1<<11)
569 
set_cgc2_ddrclk(uint8_t src,uint8_t div)570 static void set_cgc2_ddrclk(uint8_t src, uint8_t div)
571 {
572 
573 	/* Wait until the reg is unlocked for writing */
574 	while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31))
575 		;
576 
577 	mmio_write_32(IMX_CGC2_BASE + 0x40, (src << 28) | (div << 21));
578 	/* Wait for the clock switching done */
579 	while (!(mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(27)))
580 		;
581 }
set_ddr_clk(uint32_t ddr_freq)582 static void set_ddr_clk(uint32_t ddr_freq)
583 {
584 	/* Disable DDR clock */
585 	mmio_clrbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
586 	switch (ddr_freq) {
587 	/* boot frequency ? */
588 	case 48:
589 		set_cgc2_ddrclk(2, 0);
590 		break;
591 	/* default bypass frequency for fsp 1 */
592 	case 192:
593 		set_cgc2_ddrclk(0, 1);
594 		break;
595 	case 384:
596 		set_cgc2_ddrclk(0, 0);
597 		break;
598 	case 264:
599 		set_cgc2_ddrclk(4, 3);
600 		break;
601 	case 528:
602 		set_cgc2_ddrclk(4, 1);
603 		break;
604 	default:
605 		break;
606 	}
607 	/* Enable DDR clock */
608 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
609 
610 	/* Wait until the reg is unlocked for writing */
611 	while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31)) {
612 		;
613 	}
614 }
615 
616 #define AVD_SIM_LPDDR_CTRL	(IMX_LPAV_SIM_BASE + 0x14)
617 #define AVD_SIM_LPDDR_CTRL2	(IMX_LPAV_SIM_BASE + 0x18)
618 #define MAX_FSP_NUM	U(3)
619 #define DDR_DFS_GET_FSP_COUNT	0x10
620 #define DDR_BYPASS_DRATE	U(400)
621 
622 extern int upower_pmic_i2c_write(uint32_t reg_addr, uint32_t reg_val);
623 
624 /* Normally, we only switch frequency between 1(bypass) and 2(highest) */
lpddr4_dfs(uint32_t freq_index)625 int lpddr4_dfs(uint32_t freq_index)
626 {
627 	uint32_t lpddr_ctrl, lpddr_ctrl2;
628 	uint32_t ddr_ctl_144;
629 
630 	/*
631 	 * Valid index: 0 to 2
632 	 * index 0: boot frequency
633 	 * index 1: bypass frequency
634 	 * index 2: highest frequency
635 	 */
636 	if (freq_index > 2U) {
637 		return -1;
638 	}
639 
640 	/*
641 	 * increase the voltage to 1.1V firstly before increase frequency
642 	 * and APD enter OD mode
643 	 */
644 	if (freq_index == 2U && sys_dvfs) {
645 		upower_pmic_i2c_write(0x22, 0x28);
646 	}
647 
648 	/* Enable LPI_WAKEUP_EN */
649 	ddr_ctl_144 = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_144);
650 	mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, LPI_WAKEUP_EN);
651 
652 	/* put DRAM into long self-refresh & clock gating */
653 	lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL);
654 	lpddr_ctrl = (lpddr_ctrl & ~((0x3f << 15) | (0x3 << 9))) | (0x28 << 15) | (freq_index << 9);
655 	mmio_write_32(AVD_SIM_LPDDR_CTRL, lpddr_ctrl);
656 
657 	/* Gating the clock */
658 	lpddr_ctrl2 = mmio_read_32(AVD_SIM_LPDDR_CTRL2);
659 	mmio_setbits_32(AVD_SIM_LPDDR_CTRL2, LPDDR_EN_CLKGATE);
660 
661 	/* Request frequency change */
662 	mmio_setbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_REQ);
663 
664 	do {
665 		lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL);
666 		if (lpddr_ctrl & SOC_FREQ_CHG_REQ) {
667 			/* Bypass mode */
668 			if (info->fsp_table[freq_index] < DDR_BYPASS_DRATE) {
669 				/* Change to PLL bypass mode */
670 				mmio_write_32(IMX_LPAV_SIM_BASE, 0x1);
671 				/* change the ddr clock source & frequency */
672 				set_ddr_clk(info->fsp_table[freq_index]);
673 			} else {
674 				/* Change to PLL unbypass mode */
675 				mmio_write_32(IMX_LPAV_SIM_BASE, 0x0);
676 				/* change the ddr clock source & frequency */
677 				set_ddr_clk(info->fsp_table[freq_index] >> 1);
678 			}
679 
680 			mmio_clrsetbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_CHG_REQ, SOC_FREQ_CHG_ACK);
681 			continue;
682 		}
683 	} while ((lpddr_ctrl & LPDDR_DONE) != 0); /* several try? */
684 
685 	/* restore the original setting */
686 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, ddr_ctl_144);
687 	mmio_write_32(AVD_SIM_LPDDR_CTRL2, lpddr_ctrl2);
688 
689 	/* Check the DFS result */
690 	lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL) & 0xF;
691 	if (lpddr_ctrl != 0U) {
692 		/* Must be something wrong, return failure */
693 		return -1;
694 	}
695 
696 	/* decrease the BUCK3 voltage after frequency changed to lower
697 	 * and APD in ND_MODE
698 	 */
699 	if (freq_index == 1U && sys_dvfs) {
700 		upower_pmic_i2c_write(0x22, 0x20);
701 	}
702 
703 	/* DFS done successfully */
704 	return 0;
705 }
706 
707 /* for the non-primary core, waiting for DFS done */
waiting_dvfs(uint32_t id,uint32_t flags,void * handle,void * cookie)708 static uint64_t waiting_dvfs(uint32_t id, uint32_t flags,
709 		void *handle, void *cookie)
710 {
711 	uint32_t irq;
712 
713 	irq = plat_ic_acknowledge_interrupt();
714 	if (irq < 1022U) {
715 		plat_ic_end_of_interrupt(irq);
716 	}
717 
718 	/* set the WFE done status */
719 	spin_lock(&dfs_lock);
720 	core_count++;
721 	dsb();
722 	spin_unlock(&dfs_lock);
723 
724 	while (in_progress) {
725 		wfe();
726 	}
727 
728 	return 0;
729 }
730 
dram_dvfs_handler(uint32_t smc_fid,void * handle,u_register_t x1,u_register_t x2,u_register_t x3)731 int dram_dvfs_handler(uint32_t smc_fid, void *handle,
732 		u_register_t x1, u_register_t x2, u_register_t x3)
733 {
734 	unsigned int fsp_index = x1;
735 	uint32_t online_cpus = x2 - 1;
736 	uint64_t mpidr = read_mpidr_el1();
737 	unsigned int cpu_id = MPIDR_AFFLVL0_VAL(mpidr);
738 
739 	/* Get the number of FSPs */
740 	if (x1 == DDR_DFS_GET_FSP_COUNT) {
741 		SMC_RET2(handle, num_fsp, info->fsp_table[1]);
742 	}
743 
744 	/* start lpddr frequency scaling */
745 	in_progress = true;
746 	sys_dvfs = x3 ? true : false;
747 	dsb();
748 
749 	/* notify other core wait for scaling done */
750 	for (unsigned int i = 0; i < PLATFORM_CORE_COUNT; i++)
751 		/* Skip raise SGI for current CPU */
752 		if (i != cpu_id) {
753 			plat_ic_raise_el3_sgi(0x8, i);
754 		}
755 
756 	/* Make sure all the cpu in WFE */
757 	while (online_cpus != core_count) {
758 		;
759 	}
760 
761 	/* Flush the L1/L2 cache */
762 	dcsw_op_all(DCCSW);
763 
764 	lpddr4_dfs(fsp_index);
765 
766 	in_progress = false;
767 	core_count = 0;
768 	dsb();
769 	sev();
770 	isb();
771 
772 	SMC_RET1(handle, 0);
773 }
774 
dram_init(void)775 void dram_init(void)
776 {
777 	uint32_t flags = 0;
778 	uint32_t rc;
779 	unsigned int i;
780 
781 	/* Register the EL3 handler for DDR DVFS */
782 	set_interrupt_rm_flag(flags, NON_SECURE);
783 	rc = register_interrupt_type_handler(INTR_TYPE_EL3, waiting_dvfs, flags);
784 	if (rc) {
785 		panic();
786 	}
787 
788 	info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE;
789 
790 	/* Get the num of the supported Fsp */
791 	for (i = 0; i < MAX_FSP_NUM; i++) {
792 		if (!info->fsp_table[i]) {
793 			break;
794 		}
795 	}
796 
797 	num_fsp = (i > MAX_FSP_NUM) ? MAX_FSP_NUM : i;
798 }
799