1 /*
2 * Copyright (c) 2019-2023, Intel Corporation. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #include <errno.h>
8
9 #include <common/debug.h>
10 #include <drivers/delay_timer.h>
11 #include <lib/mmio.h>
12 #include <lib/utils.h>
13
14 #include "agilex5_memory_controller.h"
15 #include <platform_def.h>
16
17 #define ALT_CCU_NOC_DI_SET_MSK 0x10
18
19 #define DDR_READ_LATENCY_DELAY 40
20 #define MAX_MEM_CAL_RETRY 3
21 #define PRE_CALIBRATION_DELAY 1
22 #define POST_CALIBRATION_DELAY 1
23 #define TIMEOUT_EMIF_CALIBRATION 1000
24 #define CLEAR_EMIF_DELAY 1000
25 #define CLEAR_EMIF_TIMEOUT 1000
26
27 #define DDR_CONFIG(A, B, C, R) (((A) << 24) | ((B) << 16) | ((C) << 8) | (R))
28 #define DDR_CONFIG_ELEMENTS (ARRAY_SIZE(ddr_config))
29
30 /* tWR = Min. 15ns constant, see JEDEC standard eg. DDR4 is JESD79-4.pdf */
31 #define tWR_IN_NS 15
32
33 void configure_hmc_adaptor_regs(void);
34 void configure_ddr_sched_ctrl_regs(void);
35
36 /* The followring are the supported configurations */
37 uint32_t ddr_config[] = {
38 /* DDR_CONFIG(Address order,Bank,Column,Row) */
39 /* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */
40 DDR_CONFIG(0, 3, 10, 12),
41 DDR_CONFIG(0, 3, 9, 13),
42 DDR_CONFIG(0, 3, 10, 13),
43 DDR_CONFIG(0, 3, 9, 14),
44 DDR_CONFIG(0, 3, 10, 14),
45 DDR_CONFIG(0, 3, 10, 15),
46 DDR_CONFIG(0, 3, 11, 14),
47 DDR_CONFIG(0, 3, 11, 15),
48 DDR_CONFIG(0, 3, 10, 16),
49 DDR_CONFIG(0, 3, 11, 16),
50 DDR_CONFIG(0, 3, 12, 15), /* 0xa */
51 /* List for DDR4 only (pinout order > chip, bank, row, column) */
52 DDR_CONFIG(1, 3, 10, 14),
53 DDR_CONFIG(1, 4, 10, 14),
54 DDR_CONFIG(1, 3, 10, 15),
55 DDR_CONFIG(1, 4, 10, 15),
56 DDR_CONFIG(1, 3, 10, 16),
57 DDR_CONFIG(1, 4, 10, 16),
58 DDR_CONFIG(1, 3, 10, 17),
59 DDR_CONFIG(1, 4, 10, 17),
60 };
61
match_ddr_conf(uint32_t ddr_conf)62 static int match_ddr_conf(uint32_t ddr_conf)
63 {
64 int i;
65
66 for (i = 0; i < DDR_CONFIG_ELEMENTS; i++) {
67 if (ddr_conf == ddr_config[i])
68 return i;
69 }
70 return 0;
71 }
72
check_hmc_clk(void)73 static int check_hmc_clk(void)
74 {
75 unsigned long timeout = 0;
76 uint32_t hmc_clk;
77
78 do {
79 hmc_clk = mmio_read_32(AGX_SYSMGR_CORE_HMC_CLK);
80 if (hmc_clk & AGX_SYSMGR_CORE_HMC_CLK_STATUS)
81 break;
82 udelay(1);
83 } while (++timeout < 1000);
84 if (timeout >= 1000)
85 return -ETIMEDOUT;
86
87 return 0;
88 }
89
clear_emif(void)90 static int clear_emif(void)
91 {
92 uint32_t data;
93 unsigned long timeout;
94
95 mmio_write_32(AGX_MPFE_HMC_ADP_RSTHANDSHAKECTRL, 0);
96
97 timeout = 0;
98 do {
99 data = mmio_read_32(AGX_MPFE_HMC_ADP_RSTHANDSHAKESTAT);
100 if ((data & AGX_MPFE_HMC_ADP_RSTHANDSHAKESTAT_SEQ2CORE) == 0)
101 break;
102 udelay(CLEAR_EMIF_DELAY);
103 } while (++timeout < CLEAR_EMIF_TIMEOUT);
104 if (timeout >= CLEAR_EMIF_TIMEOUT)
105 return -ETIMEDOUT;
106
107 return 0;
108 }
109
mem_calibration(void)110 static int mem_calibration(void)
111 {
112 int status;
113 uint32_t data;
114 unsigned long timeout;
115 unsigned long retry = 0;
116
117 udelay(PRE_CALIBRATION_DELAY);
118
119 do {
120 if (retry != 0)
121 INFO("DDR: Retrying DRAM calibration\n");
122
123 timeout = 0;
124 do {
125 data = mmio_read_32(AGX_MPFE_HMC_ADP_DDRCALSTAT);
126 if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 1)
127 break;
128 udelay(500);
129 } while (++timeout < TIMEOUT_EMIF_CALIBRATION);
130
131 if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
132 status = clear_emif();
133 if (status)
134 ERROR("Failed to clear Emif\n");
135 } else {
136 break;
137 }
138 } while (++retry < MAX_MEM_CAL_RETRY);
139
140 if (AGX_MPFE_HMC_ADP_DDRCALSTAT_CAL(data) == 0) {
141 ERROR("DDR: DRAM calibration failed.\n");
142 status = -EIO;
143 } else {
144 INFO("DDR: DRAM calibration success.\n");
145 status = 0;
146 }
147
148 udelay(POST_CALIBRATION_DELAY);
149
150 return status;
151 }
152
init_hard_memory_controller(void)153 int init_hard_memory_controller(void)
154 {
155 int status;
156
157 status = check_hmc_clk();
158 if (status) {
159 ERROR("DDR: Error, HMC clock not running\n");
160 return status;
161 }
162
163 status = mem_calibration();
164 if (status) {
165 ERROR("DDR: Memory Calibration Failed\n");
166 return status;
167 }
168
169 configure_hmc_adaptor_regs();
170
171 return 0;
172 }
173
configure_ddr_sched_ctrl_regs(void)174 void configure_ddr_sched_ctrl_regs(void)
175 {
176 uint32_t data, dram_addr_order, ddr_conf, bank, row, col,
177 rd_to_miss, wr_to_miss, burst_len, burst_len_ddr_clk,
178 burst_len_sched_clk, act_to_act, rd_to_wr, wr_to_rd, bw_ratio,
179 t_rtp, t_rp, t_rcd, rd_latency, tw_rin_clk_cycles,
180 bw_ratio_extended, auto_precharge = 0, act_to_act_bank, faw,
181 faw_bank, bus_rd_to_rd, bus_rd_to_wr, bus_wr_to_rd;
182
183 INFO("Init HPS NOC's DDR Scheduler.\n");
184
185 data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG1);
186 dram_addr_order = AGX_MPFE_IOHMC_CTRLCFG1_CFG_ADDR_ORDER(data);
187
188 data = mmio_read_32(AGX_MPFE_IOHMC_DRAMADDRW);
189
190 col = IOHMC_DRAMADDRW_COL_ADDR_WIDTH(data);
191 row = IOHMC_DRAMADDRW_ROW_ADDR_WIDTH(data);
192 bank = IOHMC_DRAMADDRW_BANK_ADDR_WIDTH(data) +
193 IOHMC_DRAMADDRW_BANK_GRP_ADDR_WIDTH(data);
194
195 ddr_conf = match_ddr_conf(DDR_CONFIG(dram_addr_order, bank, col, row));
196
197 if (ddr_conf) {
198 mmio_clrsetbits_32(
199 AGX_MPFE_DDR_MAIN_SCHED_DDRCONF,
200 AGX_MPFE_DDR_MAIN_SCHED_DDRCONF_SET_MSK,
201 AGX_MPFE_DDR_MAIN_SCHED_DDRCONF_SET(ddr_conf));
202 } else {
203 ERROR("DDR: Cannot find predefined ddrConf configuration.\n");
204 }
205
206 mmio_write_32(AGX_MPFE_HMC_ADP(ADP_DRAMADDRWIDTH), data);
207
208 data = mmio_read_32(AGX_MPFE_IOHMC_DRAMTIMING0);
209 rd_latency = AGX_MPFE_IOHMC_REG_DRAMTIMING0_CFG_TCL(data);
210
211 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING0);
212 act_to_act = ACT_TO_ACT(data);
213 t_rcd = ACT_TO_RDWR(data);
214 act_to_act_bank = ACT_TO_ACT_DIFF_BANK(data);
215
216 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING1);
217 rd_to_wr = RD_TO_WR(data);
218 bus_rd_to_rd = RD_TO_RD_DIFF_CHIP(data);
219 bus_rd_to_wr = RD_TO_WR_DIFF_CHIP(data);
220
221 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING2);
222 t_rtp = RD_TO_PCH(data);
223
224 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING3);
225 wr_to_rd = CALTIMING3_WR_TO_RD(data);
226 bus_wr_to_rd = CALTIMING3_WR_TO_RD_DIFF_CHIP(data);
227
228 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING4);
229 t_rp = PCH_TO_VALID(data);
230
231 data = mmio_read_32(AGX_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
232 bw_ratio = ((HMC_ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 0 : 1);
233
234 data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG0);
235 burst_len = HMC_ADP_DDRIOCTRL_CTRL_BURST_LENGTH(data);
236 burst_len_ddr_clk = burst_len / 2;
237 burst_len_sched_clk = ((burst_len/2) / 2);
238
239 data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG0);
240 switch (AGX_MPFE_IOHMC_REG_CTRLCFG0_CFG_MEM_TYPE(data)) {
241 case 1:
242 /* DDR4 - 1333MHz */
243 /* 20 (19.995) clock cycles = 15ns */
244 /* Calculate with rounding */
245 tw_rin_clk_cycles = (((tWR_IN_NS * 1333) % 1000) >= 500) ?
246 ((tWR_IN_NS * 1333) / 1000) + 1 :
247 ((tWR_IN_NS * 1333) / 1000);
248 break;
249 default:
250 /* Others - 1066MHz or slower */
251 /* 16 (15.990) clock cycles = 15ns */
252 /* Calculate with rounding */
253 tw_rin_clk_cycles = (((tWR_IN_NS * 1066) % 1000) >= 500) ?
254 ((tWR_IN_NS * 1066) / 1000) + 1 :
255 ((tWR_IN_NS * 1066) / 1000);
256 break;
257 }
258
259 rd_to_miss = t_rtp + t_rp + t_rcd - burst_len_sched_clk;
260 wr_to_miss = ((rd_latency + burst_len_ddr_clk + 2 + tw_rin_clk_cycles)
261 / 2) - rd_to_wr + t_rp + t_rcd;
262
263 mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DDRTIMING,
264 bw_ratio << DDRTIMING_BWRATIO_OFST |
265 wr_to_rd << DDRTIMING_WRTORD_OFST|
266 rd_to_wr << DDRTIMING_RDTOWR_OFST |
267 burst_len_sched_clk << DDRTIMING_BURSTLEN_OFST |
268 wr_to_miss << DDRTIMING_WRTOMISS_OFST |
269 rd_to_miss << DDRTIMING_RDTOMISS_OFST |
270 act_to_act << DDRTIMING_ACTTOACT_OFST);
271
272 data = mmio_read_32(AGX_MPFE_HMC_ADP(HMC_ADP_DDRIOCTRL));
273 bw_ratio_extended = ((ADP_DDRIOCTRL_IO_SIZE(data) == 0) ? 1 : 0);
274
275 mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DDRMODE,
276 bw_ratio_extended << DDRMODE_BWRATIOEXTENDED_OFST |
277 auto_precharge << DDRMODE_AUTOPRECHARGE_OFST);
278
279 mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_READLATENCY,
280 (rd_latency / 2) + DDR_READ_LATENCY_DELAY);
281
282 data = mmio_read_32(AGX_MPFE_IOHMC_CALTIMING9);
283 faw = AGX_MPFE_IOHMC_CALTIMING9_ACT_TO_ACT(data);
284
285 faw_bank = 1; // always 1 because we always have 4 bank DDR.
286
287 mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE,
288 faw_bank << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAWBANK_OFST |
289 faw << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_FAW_OFST |
290 act_to_act_bank << AGX_MPFE_DDR_MAIN_SCHED_ACTIVATE_RRD_OFST);
291
292 mmio_write_32(AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV,
293 ((bus_rd_to_rd
294 << AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_OFST)
295 & AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTORD_MSK) |
296 ((bus_rd_to_wr
297 << AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_OFST)
298 & AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSRDTOWR_MSK) |
299 ((bus_wr_to_rd
300 << AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_OFST)
301 & AGX_MPFE_DDR_MAIN_SCHED_DEVTODEV_BUSWRTORD_MSK));
302
303 }
304
get_physical_dram_size(void)305 unsigned long get_physical_dram_size(void)
306 {
307 uint32_t data;
308 unsigned long ram_addr_width, ram_ext_if_io_width;
309
310 data = mmio_read_32(AGX_MPFE_HMC_ADP_DDRIOCTRL);
311 switch (AGX_MPFE_HMC_ADP_DDRIOCTRL_IO_SIZE(data)) {
312 case 0:
313 ram_ext_if_io_width = 16;
314 break;
315 case 1:
316 ram_ext_if_io_width = 32;
317 break;
318 case 2:
319 ram_ext_if_io_width = 64;
320 break;
321 default:
322 ram_ext_if_io_width = 0;
323 break;
324 }
325
326 data = mmio_read_32(AGX_MPFE_IOHMC_REG_DRAMADDRW);
327 ram_addr_width = IOHMC_DRAMADDRW_CFG_COL_ADDR_WIDTH(data) +
328 IOHMC_DRAMADDRW_CFG_ROW_ADDR_WIDTH(data) +
329 IOHMC_DRAMADDRW_CFG_BANK_ADDR_WIDTH(data) +
330 IOHMC_DRAMADDRW_CFG_BANK_GROUP_ADDR_WIDTH(data) +
331 IOHMC_DRAMADDRW_CFG_CS_ADDR_WIDTH(data);
332
333 return (1 << ram_addr_width) * (ram_ext_if_io_width / 8);
334 }
335
336
337
configure_hmc_adaptor_regs(void)338 void configure_hmc_adaptor_regs(void)
339 {
340 uint32_t data;
341 uint32_t dram_io_width;
342
343 /* Configure DDR data rate */
344 dram_io_width = AGX_MPFE_IOHMC_NIOSRESERVE0_NIOS_RESERVE0(
345 mmio_read_32(AGX_MPFE_IOHMC_REG_NIOSRESERVE0_OFST));
346 dram_io_width = (dram_io_width & 0xFF) >> 5;
347
348 data = mmio_read_32(AGX_MPFE_IOHMC_CTRLCFG3);
349
350 dram_io_width |= (data & 0x4);
351
352 mmio_write_32(AGX_MPFE_HMC_ADP_DDRIOCTRL, dram_io_width);
353
354 /* Copy dram addr width from IOHMC to HMC ADP */
355 data = mmio_read_32(AGX_MPFE_IOHMC_DRAMADDRW);
356 mmio_write_32(AGX_MPFE_HMC_ADP(ADP_DRAMADDRWIDTH), data);
357
358 /* Enable nonsecure access to DDR */
359 data = get_physical_dram_size();
360
361 if (data < AGX_DDR_SIZE)
362 data = AGX_DDR_SIZE;
363
364 mmio_write_32(AGX_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMIT, data - 1);
365 mmio_write_32(AGX_NOC_FW_DDR_SCR_MPUREGION0ADDR_LIMITEXT, 0x1f);
366
367 mmio_write_32(AGX_NOC_FW_DDR_SCR_NONMPUREGION0ADDR_LIMIT, data - 1);
368
369 mmio_write_32(AGX_SOC_NOC_FW_DDR_SCR_ENABLESET, BIT(0) | BIT(8));
370
371 /* ECC enablement */
372 data = mmio_read_32(AGX_MPFE_IOHMC_REG_CTRLCFG1);
373 if (data & (1 << AGX_IOHMC_CTRLCFG1_ENABLE_ECC_OFST)) {
374 mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL1,
375 AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
376 AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
377 AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
378 AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
379 AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK);
380
381 mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL2,
382 AGX_MPFE_HMC_ADP_ECCCTRL2_OVRW_RB_ECC_EN_SET_MSK |
383 AGX_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
384 AGX_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK,
385 AGX_MPFE_HMC_ADP_ECCCTRL2_RMW_EN_SET_MSK |
386 AGX_MPFE_HMC_ADP_ECCCTRL2_AUTOWB_EN_SET_MSK);
387
388 mmio_clrsetbits_32(AGX_MPFE_HMC_ADP_ECCCTRL1,
389 AGX_MPFE_HMC_ADP_ECCCTRL1_AUTOWB_CNT_RST_SET_MSK |
390 AGX_MPFE_HMC_ADP_ECCCTRL1_CNT_RST_SET_MSK |
391 AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK,
392 AGX_MPFE_HMC_ADP_ECCCTRL1_ECC_EN_SET_MSK);
393 INFO("Scrubbing ECC\n");
394
395 /* ECC Scrubbing */
396 zeromem((void *)DRAM_BASE, DRAM_SIZE);
397 } else {
398 INFO("ECC is disabled.\n");
399 }
400 }
401