1 /* SPDX-License-Identifier: BSD-3-Clause */
2
3 //=============================================================================
4 // Include Files
5 //=============================================================================
6 //#include <common.h>
7 //#include <ett_common.h>
8 //#include <test_case_controller.h>
9 //#include <api.h>
10 //#include "gpio.h"
11 //#include "ett_cust.h"
12 //#include "emi_setting.h"
13 //#include "pll.h"
14 //#include "dramc_pi_api.h"
15
16 #include "dramc_common.h"
17 #include "dramc_int_global.h"
18 #include "dramc_top.h"
19 #include "dramc_custom.h"
20
21
22
23
24 #if !__FLASH_TOOL_DA__ && !__ETT__
25 //#include "custom_emi.h" //[FOR_CHROMEOS]
26 #endif
27
28 #include <emi_hw.h>
29 #include <emi.h>
30 // #include "voltage.h"
31
32 #include <soc/dramc_param.h>
33 #include <soc/emi.h>
34 #include <soc/regulator.h>
35
36 #if DRAM_AUXADC_CONFIG
37 #include <mtk_auxadc_sw.h>
38 #endif
39
40
41 #if (FOR_DV_SIMULATION_USED==0)
42 #if 0 /* FIXME: need regulator control */
43 #include <pmic.h>
44 /* now we can use definition MTK_PMIC_MT6330
45 * ==============================================================
46 * PMIC |Power |Dflt. Volt. |Step |Support FPWM |Cmt.
47 * --------------------------------------------------------------
48 * MT6359 |Vcore |0.8v |6.25mV |Yes |
49 * |vio18_2 |1.8v |0.1V |No |
50 * --------------------------------------------------------------
51 * MT6362 |Vdram |1.125v |5mV |Yes |(DRAM Vdram)
52 * |vmddr |0.75v |10mV |No |(AP Vdram)
53 * |Vddq |0.6v |10mV |No |
54 * ==============================================================
55 */
56 #define MTK_PMIC_MT6359
57 #endif
58 #endif
59
60 #if !__ETT__
61 #define mt_reg_sync_write(x,y) mt_reg_sync_writel(y,x)
62 #endif
63
64 #ifdef MTK_PMIC_MT6359
65 #include <regulator/mtk_regulator.h>
66 #include <mt6359.h>
67 #endif
68
69 #if !__ETT__
70 #define CQ_DMA_BASE (0x10212000)
71 #endif
72
73
74 #if !__ETT__ && CFG_ENABLE_DCACHE
75 #define DRAMC_ASSERT(expr) \
76 do { \
77 if (!(expr)) { \
78 plat_clean_invalidate_dcache(); \
79 ASSERT(expr); \
80 } \
81 } while (0)
82 #else
83 #define DRAMC_ASSERT(expr) ASSERT(expr)
84 #endif
85
86 //=============================================================================
87 // Definition
88 //=============================================================================
89
90 #if DRAM_AUXADC_CONFIG
91 static unsigned int get_ch_num_by_auxadc(void);
92 #endif
93 //=============================================================================
94 // Global Variables
95 //=============================================================================
96 static unsigned char auto_detect_done;
97 //static int enable_combo_dis = 0;
98 //static unsigned short mr5;
99 //static unsigned short mr6;
100 //static unsigned short mr7;
101 //static unsigned short mr8;
102 unsigned int channel_num_auxadc = 4;
103 #if DRAM_AUXADC_CONFIG
104 unsigned int dram_type_auxadc;
105 #endif
106
107 #ifdef MTK_PMIC_MT6359
108 //static struct mtk_regulator reg_vio18_2, reg_vmdd2, reg_vcore, reg_vmddq, reg_vmddr;
109 static struct mtk_regulator reg_vio18, reg_vdram, reg_vcore, reg_vddq, reg_vmddr;
110 #endif
111
112 #ifdef LAST_DRAMC
113 static LAST_DRAMC_INFO_T* last_dramc_info_ptr;
114 #endif
115
116 #ifdef VOLTAGE_SEL
117 static VOLTAGE_SEL_INFO_T voltage_sel_info_ptr;
118 #endif
119
120 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
121 #if (!__ETT__)
122 //#include <storage_api.h> //[FOR_CHROMEOS]
123 #endif
124 static int read_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo);
125 static int write_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo);
126 #endif
127
128 #if defined(SLT) && (!__ETT__)
129 #include <pl_version.h>
130 static u64 part_dram_data_addr_slt = 0;
131 int read_slt_data(DRAM_SLT_DATA_T *data);
132 int write_slt_data(DRAM_SLT_DATA_T *data);
133 int clean_slt_data(void);
134 #endif
135 //=============================================================================
136 // External references
137 //=============================================================================
138 extern char* opt_dle_value;
139
140 void print_DBG_info(DRAMC_CTX_T *p);
141
142 #if ENABLE_PINMUX_FOR_RANK_SWAP
EMI_rank_swap_emi_setting(EMI_SETTINGS * emi_set)143 static void EMI_rank_swap_emi_setting(EMI_SETTINGS *emi_set)
144 {
145 static unsigned int temp;
146
147 if (emi_set->EMI_CONA_VAL & 0x20000) {
148 temp = emi_set->EMI_CONA_VAL;
149 emi_set->EMI_CONA_VAL &= ~(0xF3F0F0F0);
150 emi_set->EMI_CONA_VAL |= (temp & 0xC0C0C0C0) >> 2;
151 emi_set->EMI_CONA_VAL |= (temp & 0x30303030) << 2;
152 emi_set->EMI_CONA_VAL |= (temp & 0x02000000) >> 1;
153 emi_set->EMI_CONA_VAL |= (temp & 0x01000000) << 1;
154
155 temp = emi_set->EMI_CONH_VAL;
156 emi_set->EMI_CONH_VAL &= ~(0xFFFF0030);
157 emi_set->EMI_CONH_VAL |= (temp & 0xF0F00000) >> 4;
158 emi_set->EMI_CONH_VAL |= (temp & 0x0F0F0000) << 4;
159 emi_set->EMI_CONH_VAL |= (temp & 0x00000020) >> 1;
160 emi_set->EMI_CONH_VAL |= (temp & 0x00000010) << 1;
161
162 temp = emi_set->CHN0_EMI_CONA_VAL;
163 emi_set->CHN0_EMI_CONA_VAL &= ~(0x00FFF0FC);
164 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x00F00000) >> 4;
165 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x000F0000) << 4;
166 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x0000C0C0) >> 2;
167 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x00003030) << 2;
168 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x00000008) >> 1;
169 emi_set->CHN0_EMI_CONA_VAL |= (temp & 0x00000004) << 1;
170
171 temp = emi_set->CHN1_EMI_CONA_VAL;
172 emi_set->CHN1_EMI_CONA_VAL &= ~(0x00FFF0FC);
173 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x00F00000) >> 4;
174 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x000F0000) << 4;
175 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x0000C0C0) >> 2;
176 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x00003030) << 2;
177 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x00000008) >> 1;
178 emi_set->CHN1_EMI_CONA_VAL |= (temp & 0x00000004) << 1;
179
180 emi_set->CHN0_EMI_CONA_VAL |= 0x80000000;
181 emi_set->CHN1_EMI_CONA_VAL |= 0x80000000;
182
183 temp = emi_set->DRAM_RANK_SIZE[0];
184 emi_set->DRAM_RANK_SIZE[0] = emi_set->DRAM_RANK_SIZE[1];
185 emi_set->DRAM_RANK_SIZE[1] = temp;
186
187 if (emi_set->dram_cbt_mode_extern == CBT_R0_NORMAL_R1_BYTE)
188 emi_set->dram_cbt_mode_extern = CBT_R0_BYTE_R1_NORMAL;
189 else if (emi_set->dram_cbt_mode_extern == CBT_R0_BYTE_R1_NORMAL)
190 emi_set->dram_cbt_mode_extern = CBT_R0_NORMAL_R1_BYTE;
191 }
192 }
193
EMI_rank_swap_handle(void)194 void EMI_rank_swap_handle(void)
195 {
196 static unsigned int handled = 0;
197 int i;
198
199 if (!handled) {
200 EMI_rank_swap_emi_setting(&g_default_emi_setting);
201 handled = 1;
202 }
203 }
204 #endif
205
mdl_setting(DRAMC_CTX_T * p)206 void mdl_setting(DRAMC_CTX_T *p)
207 {
208 EMI_SETTINGS *emi_set = &g_default_emi_setting;
209
210 emi_init();
211
212 enable_infra_emi_broadcast(1);
213
214 set_cen_emi_cona(emi_set->EMI_CONA_VAL);
215 set_cen_emi_conf(emi_set->EMI_CONF_VAL);
216 set_cen_emi_conh(emi_set->EMI_CONH_VAL);
217
218
219 set_chn_emi_cona(emi_set->CHN0_EMI_CONA_VAL);
220 //set_chn_emi_conc(0x4);
221 enable_infra_emi_broadcast(0);
222
223 p->vendor_id = emi_set->iLPDDR3_MODE_REG_5;
224 }
225 #if 0
226 unsigned int check_gating_error(void)
227 {
228 unsigned int ret = 0, i, phy_base, err_code = 0;
229 unsigned int misc_stberr_all, misc_stberr_rk0_r, misc_stberr_rk0_f, misc_stberr_rk1_r, misc_stberr_rk1_f;
230
231 phy_base = Channel_A_DDRPHY_AO_BASE_ADDRESS;
232
233 for (i = 0; i < CHANNEL_NUM; ++i, phy_base += 0x10000, err_code = 0) {
234 misc_stberr_all = *(volatile unsigned int*)(phy_base + 0x1500);
235 misc_stberr_rk0_r = *(volatile unsigned int*)(phy_base + 0x1504);
236 misc_stberr_rk0_f = *(volatile unsigned int*)(phy_base + 0x1508);
237 misc_stberr_rk1_r = *(volatile unsigned int*)(phy_base + 0x150c);
238 misc_stberr_rk1_f = *(volatile unsigned int*)(phy_base + 0x1510);
239
240 if (misc_stberr_all & (1 << 16)) {
241 ret |= (1 << i);
242 #ifdef LAST_DRAMC
243 if ((misc_stberr_rk0_r & 0xffff) != 0) {
244 err_code |= ERR_DRAM_GATING_RK0_R;
245 }
246 if ((misc_stberr_rk0_f & 0xffff) != 0) {
247 err_code |= ERR_DRAM_GATING_RK0_F;
248 }
249 if ((misc_stberr_rk1_r & 0xffff) != 0) {
250 err_code |= ERR_DRAM_GATING_RK1_R;
251 }
252 if ((misc_stberr_rk1_f & 0xffff) != 0) {
253 err_code |= ERR_DRAM_GATING_RK1_F;
254 }
255 dram_fatal_set_gating_err(i, err_code);
256 dram_fatal_set_stberr(i, 0, (misc_stberr_rk0_r & 0xffff) | ((misc_stberr_rk0_f & 0xffff) << 16));
257 dram_fatal_set_stberr(i, 1, (misc_stberr_rk1_r & 0xffff) | ((misc_stberr_rk1_f & 0xffff) << 16));
258 } else {
259 dram_fatal_set_gating_err(i, 0);
260 dram_fatal_set_stberr(i, 0, 0);
261 dram_fatal_set_stberr(i, 1, 0);
262 #endif
263 }
264 }
265
266 return ret;
267 }
268 #endif
269 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
print_DBG_info(DRAMC_CTX_T * p)270 void print_DBG_info(DRAMC_CTX_T *p)
271 {
272 #ifndef OLYMPUS_TO_BE_PORTING
273 //unsigned int addr = 0x0;
274 //U32 u4value;
275
276 #ifdef DDR_INIT_TIME_PROFILING
277 return;
278 #endif
279
280 mcSHOW_DBG_MSG(("EMI_CONA=%x\n", get_cen_emi_cona()));
281
282 //RISCReadAll();
283 #endif
284 }
285 #endif
286
mt_get_dram_type(void)287 int mt_get_dram_type(void)
288 {
289 unsigned int dtype = mt_get_dram_type_from_hw_trap();
290
291 if (dtype == TYPE_LPDDR4X)
292 return DTYPE_LPDDR4X;
293 else
294 DRAMC_ASSERT(0);
295
296 return 0;
297 }
298
mt_get_freq_setting(DRAMC_CTX_T * p)299 int mt_get_freq_setting(DRAMC_CTX_T *p)
300 {
301 return p->frequency;
302 }
303
304 #ifdef DDR_RESERVE_MODE
305 extern u32 g_ddr_reserve_enable;
306 extern u32 g_ddr_reserve_success;
307 #define TIMEOUT 3
308 extern void before_Dramc_DDR_Reserved_Mode_setting(void);
309
310 #define CHAN_DRAMC_NAO_MISC_STATUSA(base) (base + 0x80)
311 #define SREF_STATE (1 << 16)
312
is_dramc_exit_slf(void)313 static unsigned int is_dramc_exit_slf(void)
314 {
315 unsigned int ret;
316
317 ret = *(volatile unsigned *)CHAN_DRAMC_NAO_MISC_STATUSA(Channel_A_DRAMC_NAO_BASE_ADDRESS);
318 if ((ret & SREF_STATE) != 0) {
319 dramc_crit("DRAM CHAN-A is in self-refresh (MISC_STATUSA = 0x%x)\n", ret);
320 return 0;
321 }
322
323 ret = *(volatile unsigned *)CHAN_DRAMC_NAO_MISC_STATUSA(Channel_B_DRAMC_NAO_BASE_ADDRESS);
324 if ((ret & SREF_STATE) != 0) {
325 dramc_crit("DRAM CHAN-B is in self-refresh (MISC_STATUSA = 0x%x)\n", ret);
326 return 0;
327 }
328
329 #if CHANNEL_NUM > 2
330 if (channel_num_auxadc > 2) {
331 ret = *(volatile unsigned *)CHAN_DRAMC_NAO_MISC_STATUSA(Channel_C_DRAMC_NAO_BASE_ADDRESS);
332 if ((ret & SREF_STATE) != 0) {
333 dramc_crit("DRAM CHAN-C is in self-refresh (MISC_STATUSA = 0x%x)\n", ret);
334 return 0;
335 }
336
337 ret = *(volatile unsigned *)CHAN_DRAMC_NAO_MISC_STATUSA(Channel_D_DRAMC_NAO_BASE_ADDRESS);
338 if ((ret & SREF_STATE) != 0) {
339 dramc_crit("DRAM CHAN-D is in self-refresh (MISC_STATUSA = 0x%x)\n", ret);
340 return 0;
341 }
342 }
343 #endif
344
345 dramc_crit("ALL DRAM CHAN is not in self-refresh\n");
346 return 1;
347 }
348
349 #endif
350
dramc_set_vcore_voltage(unsigned int vcore)351 unsigned int dramc_set_vcore_voltage(unsigned int vcore)
352 {
353 #ifdef MTK_PMIC_MT6359
354 return mtk_regulator_set_voltage(®_vcore, vcore, MAX_VCORE);
355 #elif defined(FOR_COREBOOT)
356 dramc_debug("%s set vcore to %u\n", __func__, vcore);
357 mainboard_set_regulator_voltage(MTK_REGULATOR_VCORE, vcore);
358 return 0;
359 #else
360 return 0;
361 #endif
362 }
363
dramc_get_vcore_voltage(void)364 unsigned int dramc_get_vcore_voltage(void)
365 {
366 #ifdef MTK_PMIC_MT6359
367 return mtk_regulator_get_voltage(®_vcore);
368 #elif defined(FOR_COREBOOT)
369 return mainboard_get_regulator_voltage(MTK_REGULATOR_VCORE);
370 #else
371 return 0;
372 #endif
373 }
374
dramc_set_vmdd_voltage(unsigned int ddr_type,unsigned int vdram)375 unsigned int dramc_set_vmdd_voltage(unsigned int ddr_type, unsigned int vdram)
376 {
377 #ifdef MTK_PMIC_MT6359
378 mtk_regulator_set_voltage(®_vdram, vdram, MAX_VDRAM);
379 #elif defined(FOR_COREBOOT)
380 mainboard_set_regulator_voltage(MTK_REGULATOR_VDD2, vdram);
381 #endif
382 return 0;
383 }
384
dramc_get_vmdd_voltage(unsigned int ddr_type)385 unsigned int dramc_get_vmdd_voltage(unsigned int ddr_type)
386 {
387 #ifdef MTK_PMIC_MT6359
388 return mtk_regulator_get_voltage(®_vdram);
389 #elif defined(FOR_COREBOOT)
390 return mainboard_get_regulator_voltage(MTK_REGULATOR_VDD2);
391 #else
392 return 0;
393 #endif
394 }
395
dramc_set_vmddq_voltage(unsigned int ddr_type,unsigned int vddq)396 unsigned int dramc_set_vmddq_voltage(unsigned int ddr_type, unsigned int vddq)
397 {
398 #ifdef MTK_PMIC_MT6359
399 mtk_regulator_set_voltage(®_vddq, vddq, MAX_VDDQ);
400 #elif defined(FOR_COREBOOT)
401 mainboard_set_regulator_voltage(MTK_REGULATOR_VDDQ, vddq);
402 #endif
403 return 0;
404 }
405
dramc_get_vmddq_voltage(unsigned int ddr_type)406 unsigned int dramc_get_vmddq_voltage(unsigned int ddr_type)
407 {
408 #ifdef MTK_PMIC_MT6359
409 return mtk_regulator_get_voltage(®_vddq);
410 #elif defined(FOR_COREBOOT)
411 return mainboard_get_regulator_voltage(MTK_REGULATOR_VDDQ);
412 #else
413 return 0;
414 #endif
415 }
416
dramc_set_vmddr_voltage(unsigned int vmddr)417 unsigned int dramc_set_vmddr_voltage(unsigned int vmddr)
418 {
419 #ifdef MTK_PMIC_MT6359
420 return mtk_regulator_set_voltage(®_vmddr, vmddr, MAX_VMDDR);
421 #elif defined(FOR_COREBOOT)
422 mainboard_set_regulator_voltage(MTK_REGULATOR_VMDDR, vmddr);
423 return 0;
424 #else
425 return 0;
426 #endif
427 }
428
dramc_get_vmddr_voltage(void)429 unsigned int dramc_get_vmddr_voltage(void)
430 {
431 #ifdef MTK_PMIC_MT6359
432 return mtk_regulator_get_voltage(®_vmddr);
433 #elif defined(FOR_COREBOOT)
434 return mainboard_get_regulator_voltage(MTK_REGULATOR_VMDDR);
435 #else
436 return 0;
437 #endif
438 }
439
dramc_set_vio18_voltage(unsigned int vio18)440 unsigned int dramc_set_vio18_voltage(unsigned int vio18)
441 {
442 #ifdef MTK_PMIC_MT6359
443 return mtk_regulator_set_voltage(®_vio18, vio18, MAX_VIO18);
444 #elif defined(FOR_COREBOOT)
445 mainboard_set_regulator_voltage(MTK_REGULATOR_VDD1, vio18);
446 return 0;
447 #else
448 return 0;
449 #endif
450 }
451
452
dramc_get_vio18_voltage(void)453 unsigned int dramc_get_vio18_voltage(void)
454 {
455 #ifdef MTK_PMIC_MT6359
456 return mtk_regulator_get_voltage(®_vio18);
457 #elif defined(FOR_COREBOOT)
458 return mainboard_get_regulator_voltage(MTK_REGULATOR_VDD1);
459 #else
460 return 0;
461 #endif
462 }
463
is_discrete_lpddr4(void)464 unsigned int is_discrete_lpddr4(void)
465 {
466 #if DRAM_AUXADC_CONFIG
467 return dram_type_auxadc;
468 #else
469 return TRUE;
470 #endif
471 }
472
mt_get_dram_type_from_hw_trap(void)473 unsigned int mt_get_dram_type_from_hw_trap(void)
474 {
475 #if 1
476 return TYPE_LPDDR4X;
477 #else
478 unsigned int trap = get_dram_type() & 0x7;
479 switch (trap) {
480 case 0:
481 return TYPE_LPDDR4X;
482 //case 1:
483 //case 2:
484 //case 3:
485 //case 4:
486 //case 5:
487 // return TYPE_LPDDR3;
488 //case 6:
489 // return TYPE_LPDDR4;
490 default:
491 printf("[dramc] Wrond HW Trapping.\n");
492 DRAMC_ASSERT(0);
493 break;
494 }
495 #endif
496 return 0;
497
498 }
499
setup_dramc_voltage_by_pmic(void)500 void setup_dramc_voltage_by_pmic(void)
501 {
502 #ifdef VOLTAGE_SEL
503 int vcore;
504 #endif
505 #ifdef MTK_PMIC_MT6359
506 int ret;
507
508 ret = mtk_regulator_get("vio18", ®_vio18);
509 if (ret)
510 dramc_debug("mtk_regulator_get vio18 fail\n");
511
512 ret = mtk_regulator_get("vgpu11", ®_vcore);
513 if (ret)
514 dramc_debug("mtk_regulator_get vcore fail\n");
515
516 ret = mtk_regulator_get("VDRAM1", ®_vdram);
517 if (ret)
518 printf("mtk_regulator_get vdram fail\n");
519
520 ret = mtk_regulator_get("VDRAM2", ®_vddq);
521 if (ret)
522 printf("mtk_regulator_get vddq fail\n");
523
524 ret = mtk_regulator_get("VMDDR", ®_vmddr);
525 if (ret)
526 printf("mtk_regulator_get vmddr fail\n");
527
528 mtk_regulator_set_mode(®_vcore, 0x1);
529 mtk_regulator_set_mode(®_vdram, 0x1);
530
531 #ifdef VOLTAGE_SEL
532 //dramc_set_vio18_voltage(vio18_voltage_select());
533 #else
534 //dramc_set_vio18_voltage(SEL_VIO18);
535 #endif
536 #if defined(VCORE_BIN)
537 #ifdef VOLTAGE_SEL
538 vcore = vcore_voltage_select(KSHU0);
539 if (vcore)
540 dramc_set_vcore_voltage(vcore);
541 else
542 #endif
543 dramc_set_vcore_voltage(get_vcore_uv_table(0));
544 #else
545 #ifdef VOLTAGE_SEL
546 dramc_set_vcore_voltage(vcore_voltage_select(KSHU0));
547 #else
548 dramc_set_vcore_voltage(SEL_PREFIX_VCORE(LP4, KSHU0));
549 #endif
550 #endif
551 #ifdef VOLTAGE_SEL
552 dramc_set_vmdd_voltage(TYPE_LPDDR4, vdram_voltage_select());
553 #else
554 dramc_set_vmdd_voltage(TYPE_LPDDR4, SEL_PREFIX_VDRAM(LP4));
555 #endif
556
557 #ifdef VOLTAGE_SEL
558 dramc_set_vmddq_voltage(TYPE_LPDDR4, vddq_voltage_select());
559 #else
560 dramc_set_vmddq_voltage(TYPE_LPDDR4, SEL_PREFIX_VDDQ);
561 #endif
562
563 #ifdef VOLTAGE_SEL
564 dramc_set_vmddr_voltage(vmddr_voltage_select());
565 #else
566 dramc_set_vmddr_voltage(SEL_PREFIX_VMDDR);
567 #endif
568
569 dramc_debug("Vio18 = %d\n", dramc_get_vio18_voltage());
570 dramc_debug("Vcore = %d\n", dramc_get_vcore_voltage());
571 dramc_debug("Vdram = %d\n", dramc_get_vmdd_voltage(TYPE_LPDDR4));
572 dramc_debug("Vddq = %d\n", dramc_get_vmddq_voltage(TYPE_LPDDR4));
573 dramc_debug("vmddr = %d\n", dramc_get_vmddr_voltage());
574 #endif
575 }
576
restore_vcore_setting(void)577 static void restore_vcore_setting(void)
578 {
579 #ifdef VOLTAGE_SEL
580 int vcore;
581 #endif
582 #ifdef MTK_PMIC_MT6359
583 int ret;
584
585 ret = mtk_regulator_get("vgpu11", ®_vcore);
586 if (ret)
587 printf("mtk_regulator_get vcore fail\n");
588
589 #if defined(VCORE_BIN)
590 #ifdef VOLTAGE_SEL
591 vcore = vcore_voltage_select(KSHU0);
592 if ((doe_get_config("dram_fix_3094_0825")) || (doe_get_config("dram_all_3094_0825")) || (doe_get_config("dram_opp0_3733_others_3094_0825")))
593 dramc_set_vcore_voltage(825000);
594 else if (doe_get_config("dram_fix_3094_0725") || (doe_get_config("dram_fix_2400_0725")) || (doe_get_config("dram_fix_1534_0725")) || (doe_get_config("dram_fix_1200_0725")) || (doe_get_config("dram_all_3094_0725")) || (doe_get_config("dram_all_1534_0725")) || (doe_get_config("dram_opp0_3094_others_1534_0725")) || (doe_get_config("dram_opp0_2400_others_1534_0725")))
595 dramc_set_vcore_voltage(725000);
596 else if ((doe_get_config("dram_fix_1200_065")) || (doe_get_config("dram_fix_800_065")))
597 dramc_set_vcore_voltage(650000);
598 else if (vcore)
599 dramc_set_vcore_voltage(vcore);
600 else
601 #endif
602 dramc_set_vcore_voltage(get_vcore_uv_table(0));
603 #else
604 #ifdef VOLTAGE_SEL
605 dramc_set_vcore_voltage(vcore_voltage_select(KSHU0));
606 #else
607 dramc_set_vcore_voltage(SEL_PREFIX_VCORE(LP4, KSHU0));
608 #endif
609 #endif
610
611 dramc_debug("Vcore = %d\n", dramc_get_vcore_voltage());
612 #endif
613 }
614 #if 0
615 static void restore_pmic_setting(void)
616 {
617 #ifdef MTK_PMIC_MT6359
618 int ret;
619
620 restore_vcore_setting();
621
622 ret = mtk_regulator_get("VDRAM1", ®_vdram);
623 if (ret) {
624 printf("mtk_regulator_get vdram fail\n");
625 return;
626 }
627
628 ret = mtk_regulator_get("VDRAM2", ®_vddq);
629 if (ret) {
630 printf("mtk_regulator_get vddq fail\n");
631 return;
632 }
633
634 ret = mtk_regulator_get("VMDDR", ®_vmddr);
635 if (ret) {
636 printf("mtk_regulator_get vmddr fail\n");
637 return;
638 }
639
640 ret = mtk_regulator_get("vio18", ®_vio18);
641 if (ret) {
642 printf("mtk_regulator_get vio18 fail\n");
643 return;
644 }
645
646 #if 0
647 dramc_set_vmdd_voltage(TYPE_LPDDR4, 1125000);
648 dramc_set_vmddq_voltage(TYPE_LPDDR4, 600000);
649 dramc_set_vmddr_voltage(750000);
650 #else
651 #ifdef VOLTAGE_SEL
652 dramc_set_vmdd_voltage(TYPE_LPDDR4, vdram_voltage_select());
653 #else
654 dramc_set_vmdd_voltage(TYPE_LPDDR4, SEL_PREFIX_VDRAM(LP4));
655 #endif
656
657 #ifdef VOLTAGE_SEL
658 dramc_set_vmddq_voltage(TYPE_LPDDR4, vddq_voltage_select());
659 #else
660 dramc_set_vmddq_voltage(TYPE_LPDDR4, SEL_PREFIX_VDDQ);
661 #endif
662
663 #ifdef VOLTAGE_SEL
664 dramc_set_vmddr_voltage(vmddr_voltage_select());
665 #else
666 dramc_set_vmddr_voltage(SEL_PREFIX_VMDDR);
667 #endif
668
669 #ifdef VOLTAGE_SEL
670 dramc_set_vio18_voltage(vio18_voltage_select());
671 #else
672 dramc_set_vio18_voltage(SEL_VIO18);
673 #endif
674 #endif
675 dramc_debug("Vdram = %d\n", dramc_get_vmdd_voltage(TYPE_LPDDR4));
676 dramc_debug("Vddq = %d\n", dramc_get_vmddq_voltage(TYPE_LPDDR4));
677 dramc_debug("vmddr = %d\n", dramc_get_vmddr_voltage());
678 dramc_debug("Vio18 = %d\n", dramc_get_vio18_voltage());
679 #endif
680 }
681 #endif
switch_dramc_voltage_to_auto_mode(void)682 void switch_dramc_voltage_to_auto_mode(void)
683 {
684 #ifdef MTK_PMIC_MT6359
685 mtk_regulator_set_mode(®_vcore, 0x0);
686 mtk_regulator_set_mode(®_vdram, 0x0);
687 #endif
688 }
689
release_dram(void)690 void release_dram(void)
691 {
692 #ifdef DDR_RESERVE_MODE
693 int i;
694 int counter = TIMEOUT;
695
696
697 restore_pmic_setting();
698
699 drm_release_rg_dramc_conf_iso();
700
701 #if DDR_RESERVE_NEW_MODE
702 ASVA5_8_New_Mode_1();
703 Dramc_DDR_Reserved_Mode_setting();
704 drm_release_rg_dramc_iso();
705 ASVA5_8_New_Mode_2();
706 #else
707 Dramc_DDR_Reserved_Mode_setting();
708 ASVA5_8_CSCA_Pull_Down_EN();
709 drm_release_rg_dramc_iso();
710 ASVA5_8_CSCA_Pull_Down_DIS();
711 #endif
712
713 drm_release_rg_dramc_sref();
714
715 while(counter)
716 {
717 if(is_dramc_exit_slf() == 1)
718 break;
719 counter--;
720 }
721
722 if(counter == 0)
723 {
724 if(g_ddr_reserve_enable==1 && g_ddr_reserve_success==1)
725 {
726 dramc_crit("[DDR Reserve] release dram from self-refresh FAIL!\n");
727 g_ddr_reserve_success = 0;
728 }
729 }
730 else
731 {
732 dramc_crit("[DDR Reserve] release dram from self-refresh PASS!\n");
733 }
734 Dramc_DDR_Reserved_Mode_AfterSR();
735
736 #if DDR_RESERVE_NEW_MODE
737 ASVA5_8_New_Mode_3();
738 #endif
739
740
741 dramc_crit("[DDR reserve] EMI CEN CONA: %x\n", get_cen_emi_cona());
742 dramc_crit("[DDR reserve] EMI CHN CONA: %x\n", get_chn_emi_cona());
743 for (i=0;i<10;i++);
744
745 return;
746 #endif
747 }
748
DRAM_MRR(int MRR_num)749 unsigned int DRAM_MRR(int MRR_num)
750 {
751 u16 MRR_value = 0x0;
752 DRAMC_CTX_T *p = psCurrDramCtx;
753
754 DramcModeRegRead(p, MRR_num, &MRR_value);
755 return MRR_value;
756 }
757
mt_get_dram_type_for_dis(void)758 static int mt_get_dram_type_for_dis(void)
759 {
760 return TYPE_LPDDR4X;
761 }
762
763 #ifdef DRAM_QVL_CHECK
check_qvl(DRAM_INFO_BY_MRR_T * dram_info,unsigned int dram_type)764 static int check_qvl(DRAM_INFO_BY_MRR_T *dram_info, unsigned int dram_type)
765 {
766 unsigned int mr5;
767 unsigned long long rank_size[2];
768 char id[22];
769 int emmc_nand_id_len=16;
770 int fw_id_len;
771 int result;
772 int i;
773
774 mr5 = dram_info->u2MR5VendorID & 0xFF;
775
776 rank_size[0] = dram_info->u8MR8Density[0];
777 rank_size[1] = dram_info->u8MR8Density[1];
778
779 result = platform_get_mcp_id(id, emmc_nand_id_len,&fw_id_len);
780 for (i = 0; i < num_of_emi_records; i++) {
781 dramc_crit("[DRAMC] %s(%d),%s(%x),%s(%x),%s(0x%llx),%s(0x%llx)\n",
782 "qvl", i,
783 "type", qvl_list[i].type,
784 "mr5", qvl_list[i].iLPDDR3_MODE_REG_5,
785 "rank0_size", qvl_list[i].DRAM_RANK_SIZE[0],
786 "rank1_size", qvl_list[i].DRAM_RANK_SIZE[1]);
787
788 if ((qvl_list[i].type & 0xF) != (dram_type & 0xF))
789 continue;
790
791
792 if (qvl_list[i].iLPDDR3_MODE_REG_5 != mr5)
793 continue;
794
795
796 if (qvl_list[i].DRAM_RANK_SIZE[0] != rank_size[0])
797 continue;
798 if (qvl_list[i].DRAM_RANK_SIZE[1] != rank_size[1])
799 continue;
800
801
802 if (qvl_list[i].type & 0xF00) {
803 if (!result) {
804 if (memcmp(id, qvl_list[i].ID, qvl_list[i].id_length)) {
805 dramc_crit("[DRAMC] storage id mismatch\n", i);
806 continue;
807 } else
808 return 0;
809 }
810 } else
811 return 0;
812 }
813
814 return -1;
815 }
816 #endif
817
get_dram_channel_support_nr(void)818 int get_dram_channel_support_nr(void)
819 {
820 return DRAMC_MAX_CH;
821 }
822
get_dram_channel_nr(void)823 int get_dram_channel_nr(void)
824 {
825 return get_channel_nr_by_emi();
826 }
827
get_dram_rank_nr(void)828 int get_dram_rank_nr(void)
829 {
830 int cen_emi_cona;
831
832 #ifdef DDR_RESERVE_MODE
833 if (g_ddr_reserve_enable==1 && g_ddr_reserve_success==1)
834 return get_rank_nr_by_emi();
835 #endif
836
837 #ifdef DRAM_ADAPTIVE
838 if (!auto_detect_done)
839 DRAMC_ASSERT(0);
840 #endif
841
842 cen_emi_cona = g_default_emi_setting.EMI_CONA_VAL;
843
844 if ((cen_emi_cona & (1 << 17)) != 0 ||
845 (cen_emi_cona & (1 << 16)) != 0 )
846 return 2;
847 else
848 return 1;
849 }
850
get_dram_mr_cnt(void)851 int get_dram_mr_cnt(void)
852 {
853 return DRAMC_MR_CNT;
854 }
855
get_dram_freq_cnt(void)856 int get_dram_freq_cnt(void)
857 {
858 return DRAMC_FREQ_CNT;
859 }
860
861 #if (FOR_DV_SIMULATION_USED==0)
862 #if !__FLASH_TOOL_DA__ && !__ETT__
863
864
get_dram_rank_size(u64 dram_rank_size[DRAMC_MAX_RK])865 void get_dram_rank_size(u64 dram_rank_size[DRAMC_MAX_RK])
866 {
867 int rank_nr, i;
868
869 #ifdef DDR_RESERVE_MODE
870 if(g_ddr_reserve_enable==1 && g_ddr_reserve_success==1){
871 get_rank_size_by_emi(dram_rank_size);
872 return;
873 }
874 #endif
875
876 #ifdef DRAM_ADAPTIVE
877 if (!auto_detect_done)
878 DRAMC_ASSERT(0);
879 #endif
880
881 rank_nr = get_dram_rank_nr();
882
883 for(i = 0; i < rank_nr; i++) {
884 dram_rank_size[i] = g_default_emi_setting.DRAM_RANK_SIZE[i];
885 if (channel_num_auxadc > 2)
886 dram_rank_size[i] = dram_rank_size[i] << 1;
887 dramc_debug("%d:dram_rank_size:%llx\n",i,dram_rank_size[i]);
888 }
889 }
890
get_dram_freq_step(u32 dram_freq_step[])891 void get_dram_freq_step(u32 dram_freq_step[])
892 {
893 unsigned int i;
894 unsigned int defined_step[DRAMC_FREQ_CNT] = {
895 4266, 3200, 2400, 1866, 1600, 1200, 800};
896
897 for (i = 0; i < DRAMC_FREQ_CNT; i++) {
898 dram_freq_step[i] = defined_step[i];
899 }
900 }
901
set_dram_mr(unsigned int index,unsigned short value)902 void set_dram_mr(unsigned int index, unsigned short value)
903 {
904 #if 0
905 unsigned short value_2rk;
906
907 value_2rk = value & 0xFF;
908 value_2rk |= (value_2rk << 8);
909
910 switch (index) {
911 case 5:
912 mr5 = value_2rk;
913 break;
914 case 6:
915 mr6 = value_2rk;
916 break;
917 case 7:
918 mr7 = value_2rk;
919 break;
920 case 8:
921 mr8 = value;
922 default:
923 break;
924 }
925 #endif
926 }
927
get_dram_mr(unsigned int index)928 unsigned short get_dram_mr(unsigned int index)
929 {
930 unsigned int value = 0;
931 #if 0
932 switch (index) {
933 case 5:
934 value = last_dramc_info_ptr->mr5;
935 break;
936 case 6:
937 value = last_dramc_info_ptr->mr6;
938 break;
939 case 7:
940 value = last_dramc_info_ptr->mr7;
941 break;
942 case 8:
943 value = last_dramc_info_ptr->mr8;
944 default:
945 break;
946 }
947 return (unsigned short)(value & 0xFFFF);
948 #else
949 return (unsigned short)(value & 0xFFFF);
950 #endif
951 }
952
get_dram_mr_info(struct mr_info_t mr_info[])953 void get_dram_mr_info(struct mr_info_t mr_info[])
954 {
955 #if 0
956 unsigned int i;
957 unsigned int mr_list[DRAMC_MR_CNT] = {5, 6, 7, 8};
958
959 for (i = 0; i < DRAMC_MR_CNT; i++) {
960 mr_info[i].mr_index = mr_list[i];
961 mr_info[i].mr_value = get_dram_mr(mr_list[i]);
962 }
963 #endif
964 }
965
966 #endif //#if !__FLASH_TOOL_DA__ && !__ETT__
967 #endif
968 #if 0
969 void freq_table_are_all_3094(void)
970 {
971 gFreqTbl[0].freq_sel = LP4_DDR3200;
972 gFreqTbl[0].divmode = DIV8_MODE;
973 gFreqTbl[0].SRAMIdx= SRAM_SHU1;
974 gFreqTbl[0].duty_calibration_mode = DUTY_NEED_K;
975 gFreqTbl[0].vref_calibartion_enable = VREF_CALI_ON;
976 gFreqTbl[0].ddr_loop_mode = CLOSE_LOOP_MODE;
977
978 gFreqTbl[1].freq_sel = LP4_DDR3200;
979 gFreqTbl[1].divmode = DIV8_MODE;
980 gFreqTbl[1].SRAMIdx = SRAM_SHU3;
981 gFreqTbl[1].duty_calibration_mode = DUTY_NEED_K;
982 gFreqTbl[1].vref_calibartion_enable = VREF_CALI_ON;
983 gFreqTbl[1].ddr_loop_mode = CLOSE_LOOP_MODE;
984
985 gFreqTbl[2].freq_sel = LP4_DDR3200;
986 gFreqTbl[2].divmode = DIV8_MODE;
987 gFreqTbl[2].SRAMIdx = SRAM_SHU2;
988 gFreqTbl[2].duty_calibration_mode = DUTY_NEED_K;
989 gFreqTbl[2].vref_calibartion_enable = VREF_CALI_ON;
990 gFreqTbl[2].ddr_loop_mode = CLOSE_LOOP_MODE;
991
992 gFreqTbl[3].freq_sel = LP4_DDR3200;
993 gFreqTbl[3].divmode = DIV8_MODE;
994 gFreqTbl[3].SRAMIdx = SRAM_SHU0;
995 gFreqTbl[3].duty_calibration_mode = DUTY_NEED_K;
996 gFreqTbl[3].vref_calibartion_enable = VREF_CALI_ON;
997 gFreqTbl[3].ddr_loop_mode = CLOSE_LOOP_MODE;
998
999 gFreqTbl[4].freq_sel = LP4_DDR3200;
1000 gFreqTbl[4].divmode = DIV8_MODE;
1001 gFreqTbl[4].SRAMIdx = SRAM_SHU5;
1002 gFreqTbl[4].duty_calibration_mode = DUTY_NEED_K;
1003 gFreqTbl[4].vref_calibartion_enable = VREF_CALI_ON;
1004 gFreqTbl[4].ddr_loop_mode = CLOSE_LOOP_MODE;
1005
1006 gFreqTbl[5].freq_sel = LP4_DDR3200;
1007 gFreqTbl[5].divmode = DIV8_MODE;
1008 gFreqTbl[5].SRAMIdx = SRAM_SHU4;
1009 gFreqTbl[5].duty_calibration_mode = DUTY_NEED_K;
1010 gFreqTbl[5].vref_calibartion_enable = VREF_CALI_ON;
1011 gFreqTbl[5].ddr_loop_mode = CLOSE_LOOP_MODE;
1012
1013 gFreqTbl[6].freq_sel = LP4_DDR3200;
1014 gFreqTbl[6].divmode = DIV8_MODE;
1015 gFreqTbl[6].SRAMIdx = SRAM_SHU6;
1016 gFreqTbl[6].duty_calibration_mode = DUTY_NEED_K;
1017 gFreqTbl[6].vref_calibartion_enable = VREF_CALI_ON;
1018 gFreqTbl[6].ddr_loop_mode = CLOSE_LOOP_MODE;
1019
1020 }
1021
1022 void freq_table_are_all_1534(void)
1023 {
1024 gFreqTbl[0].freq_sel = LP4_DDR1600;
1025 gFreqTbl[0].divmode = DIV8_MODE;
1026 gFreqTbl[0].SRAMIdx = SRAM_SHU1;
1027 gFreqTbl[0].duty_calibration_mode = DUTY_DEFAULT;
1028 gFreqTbl[0].vref_calibartion_enable = VREF_CALI_ON;
1029 gFreqTbl[0].ddr_loop_mode = CLOSE_LOOP_MODE;
1030
1031 gFreqTbl[1].freq_sel = LP4_DDR1600;
1032 gFreqTbl[1].divmode = DIV8_MODE;
1033 gFreqTbl[1].SRAMIdx = SRAM_SHU3;
1034 gFreqTbl[1].duty_calibration_mode = DUTY_DEFAULT;
1035 gFreqTbl[1].vref_calibartion_enable = VREF_CALI_ON;
1036 gFreqTbl[1].ddr_loop_mode = CLOSE_LOOP_MODE;
1037
1038 gFreqTbl[2].freq_sel = LP4_DDR1600;
1039 gFreqTbl[2].divmode = DIV8_MODE;
1040 gFreqTbl[2].SRAMIdx = SRAM_SHU2;
1041 gFreqTbl[2].duty_calibration_mode = DUTY_DEFAULT;
1042 gFreqTbl[2].vref_calibartion_enable = VREF_CALI_ON;
1043 gFreqTbl[2].ddr_loop_mode = CLOSE_LOOP_MODE;
1044
1045 gFreqTbl[3].freq_sel = LP4_DDR1600;
1046 gFreqTbl[3].divmode = DIV8_MODE;
1047 gFreqTbl[3].SRAMIdx = SRAM_SHU0;
1048 gFreqTbl[3].duty_calibration_mode = DUTY_DEFAULT;
1049 gFreqTbl[3].vref_calibartion_enable = VREF_CALI_ON;
1050 gFreqTbl[3].ddr_loop_mode = CLOSE_LOOP_MODE;
1051
1052 gFreqTbl[4].freq_sel = LP4_DDR1600;
1053 gFreqTbl[4].divmode = DIV8_MODE;
1054 gFreqTbl[4].SRAMIdx = SRAM_SHU5;
1055 gFreqTbl[4].duty_calibration_mode = DUTY_DEFAULT;
1056 gFreqTbl[4].vref_calibartion_enable = VREF_CALI_ON;
1057 gFreqTbl[4].ddr_loop_mode = CLOSE_LOOP_MODE;
1058
1059 gFreqTbl[5].freq_sel = LP4_DDR1600;
1060 gFreqTbl[5].divmode = DIV8_MODE;
1061 gFreqTbl[5].SRAMIdx = SRAM_SHU4;
1062 gFreqTbl[5].duty_calibration_mode = DUTY_DEFAULT;
1063 gFreqTbl[5].vref_calibartion_enable = VREF_CALI_ON;
1064 gFreqTbl[5].ddr_loop_mode = CLOSE_LOOP_MODE;
1065
1066 gFreqTbl[6].freq_sel = LP4_DDR1600;
1067 gFreqTbl[6].divmode = DIV8_MODE;
1068 gFreqTbl[6].SRAMIdx = SRAM_SHU6;
1069 gFreqTbl[6].duty_calibration_mode = DUTY_DEFAULT;
1070 gFreqTbl[6].vref_calibartion_enable = VREF_CALI_ON;
1071 gFreqTbl[6].ddr_loop_mode = CLOSE_LOOP_MODE;
1072
1073 }
1074
1075 void freq_table_opp0_3733_others_3094(void)
1076 {
1077 gFreqTbl[0].freq_sel = LP4_DDR3200;
1078 gFreqTbl[0].divmode = DIV8_MODE;
1079 gFreqTbl[0].SRAMIdx = SRAM_SHU1;
1080 gFreqTbl[0].duty_calibration_mode = DUTY_NEED_K;
1081 gFreqTbl[0].vref_calibartion_enable = VREF_CALI_ON;
1082 gFreqTbl[0].ddr_loop_mode = CLOSE_LOOP_MODE;
1083
1084 gFreqTbl[1].freq_sel = LP4_DDR3200;
1085 gFreqTbl[1].divmode = DIV8_MODE;
1086 gFreqTbl[1].SRAMIdx = SRAM_SHU3;
1087 gFreqTbl[1].duty_calibration_mode = DUTY_NEED_K;
1088 gFreqTbl[1].vref_calibartion_enable = VREF_CALI_ON;
1089 gFreqTbl[1].ddr_loop_mode = CLOSE_LOOP_MODE;
1090
1091 gFreqTbl[2].freq_sel = LP4_DDR3200;
1092 gFreqTbl[2].divmode = DIV8_MODE;
1093 gFreqTbl[2].SRAMIdx = SRAM_SHU2;
1094 gFreqTbl[2].duty_calibration_mode = DUTY_NEED_K;
1095 gFreqTbl[2].vref_calibartion_enable = VREF_CALI_ON;
1096 gFreqTbl[2].ddr_loop_mode = CLOSE_LOOP_MODE;
1097
1098 gFreqTbl[3].freq_sel = LP4_DDR3733;
1099 gFreqTbl[3].divmode = DIV8_MODE;
1100 gFreqTbl[3].SRAMIdx = SRAM_SHU0;
1101 gFreqTbl[3].duty_calibration_mode = DUTY_NEED_K;
1102 gFreqTbl[3].vref_calibartion_enable = VREF_CALI_ON;
1103 gFreqTbl[3].ddr_loop_mode = CLOSE_LOOP_MODE;
1104
1105 gFreqTbl[4].freq_sel = LP4_DDR3200;
1106 gFreqTbl[4].divmode = DIV8_MODE;
1107 gFreqTbl[4].SRAMIdx = SRAM_SHU5;
1108 gFreqTbl[4].duty_calibration_mode = DUTY_NEED_K;
1109 gFreqTbl[4].vref_calibartion_enable = VREF_CALI_ON;
1110 gFreqTbl[4].ddr_loop_mode = CLOSE_LOOP_MODE;
1111
1112 gFreqTbl[5].freq_sel = LP4_DDR3200;
1113 gFreqTbl[5].divmode = DIV8_MODE;
1114 gFreqTbl[5].SRAMIdx = SRAM_SHU4;
1115 gFreqTbl[5].duty_calibration_mode = DUTY_NEED_K;
1116 gFreqTbl[5].vref_calibartion_enable = VREF_CALI_ON;
1117 gFreqTbl[5].ddr_loop_mode = CLOSE_LOOP_MODE;
1118
1119 gFreqTbl[6].freq_sel = LP4_DDR3200;
1120 gFreqTbl[6].divmode = DIV8_MODE;
1121 gFreqTbl[6].SRAMIdx = SRAM_SHU6;
1122 gFreqTbl[6].duty_calibration_mode = DUTY_NEED_K;
1123 gFreqTbl[6].vref_calibartion_enable = VREF_CALI_ON;
1124 gFreqTbl[6].ddr_loop_mode = CLOSE_LOOP_MODE;
1125 }
1126
1127 void freq_table_opp0_3094_others_1534(void)
1128 {
1129 gFreqTbl[0].freq_sel = LP4_DDR1600;
1130 gFreqTbl[0].divmode = DIV8_MODE;
1131 gFreqTbl[0].SRAMIdx = SRAM_SHU1;
1132 gFreqTbl[0].duty_calibration_mode = DUTY_DEFAULT;
1133 gFreqTbl[0].vref_calibartion_enable = VREF_CALI_ON;
1134 gFreqTbl[0].ddr_loop_mode = CLOSE_LOOP_MODE;
1135
1136 gFreqTbl[1].freq_sel = LP4_DDR1600;
1137 gFreqTbl[1].divmode = DIV8_MODE;
1138 gFreqTbl[1].SRAMIdx = SRAM_SHU3;
1139 gFreqTbl[1].duty_calibration_mode = DUTY_DEFAULT;
1140 gFreqTbl[1].vref_calibartion_enable = VREF_CALI_ON;
1141 gFreqTbl[1].ddr_loop_mode = CLOSE_LOOP_MODE;
1142
1143 gFreqTbl[2].freq_sel = LP4_DDR1600;
1144 gFreqTbl[2].divmode = DIV8_MODE;
1145 gFreqTbl[2].SRAMIdx = SRAM_SHU2;
1146 gFreqTbl[2].duty_calibration_mode = DUTY_DEFAULT;
1147 gFreqTbl[2].vref_calibartion_enable = VREF_CALI_ON;
1148 gFreqTbl[2].ddr_loop_mode = CLOSE_LOOP_MODE;
1149
1150 gFreqTbl[3].freq_sel = LP4_DDR3200;
1151 gFreqTbl[3].divmode = DIV8_MODE;
1152 gFreqTbl[3].SRAMIdx = SRAM_SHU0;
1153 gFreqTbl[3].duty_calibration_mode = DUTY_NEED_K;
1154 gFreqTbl[3].vref_calibartion_enable = VREF_CALI_ON;
1155 gFreqTbl[3].ddr_loop_mode = CLOSE_LOOP_MODE;
1156
1157 gFreqTbl[4].freq_sel = LP4_DDR1600;
1158 gFreqTbl[4].divmode = DIV8_MODE;
1159 gFreqTbl[4].SRAMIdx = SRAM_SHU5;
1160 gFreqTbl[4].duty_calibration_mode = DUTY_DEFAULT;
1161 gFreqTbl[4].vref_calibartion_enable = VREF_CALI_ON;
1162 gFreqTbl[4].ddr_loop_mode = CLOSE_LOOP_MODE;
1163
1164 gFreqTbl[5].freq_sel = LP4_DDR1600;
1165 gFreqTbl[5].divmode = DIV8_MODE;
1166 gFreqTbl[5].SRAMIdx = SRAM_SHU4;
1167 gFreqTbl[5].duty_calibration_mode = DUTY_DEFAULT;
1168 gFreqTbl[5].vref_calibartion_enable = VREF_CALI_ON;
1169 gFreqTbl[5].ddr_loop_mode = CLOSE_LOOP_MODE;
1170
1171 gFreqTbl[6].freq_sel = LP4_DDR1600;
1172 gFreqTbl[6].divmode = DIV8_MODE;
1173 gFreqTbl[6].SRAMIdx = SRAM_SHU6;
1174 gFreqTbl[6].duty_calibration_mode = DUTY_DEFAULT;
1175 gFreqTbl[6].vref_calibartion_enable = VREF_CALI_ON;
1176 gFreqTbl[6].ddr_loop_mode = CLOSE_LOOP_MODE;
1177 }
1178
1179 void freq_table_opp0_2400_others_1534(void)
1180 {
1181 gFreqTbl[0].freq_sel = LP4_DDR1600;
1182 gFreqTbl[0].divmode = DIV8_MODE;
1183 gFreqTbl[0].SRAMIdx = SRAM_SHU1;
1184 gFreqTbl[0].duty_calibration_mode = DUTY_DEFAULT;
1185 gFreqTbl[0].vref_calibartion_enable = VREF_CALI_ON;
1186 gFreqTbl[0].ddr_loop_mode = CLOSE_LOOP_MODE;
1187
1188 gFreqTbl[1].freq_sel = LP4_DDR1600;
1189 gFreqTbl[1].divmode = DIV8_MODE;
1190 gFreqTbl[1].SRAMIdx = SRAM_SHU3;
1191 gFreqTbl[1].duty_calibration_mode = DUTY_DEFAULT;
1192 gFreqTbl[1].vref_calibartion_enable = VREF_CALI_ON;
1193 gFreqTbl[1].ddr_loop_mode = CLOSE_LOOP_MODE;
1194
1195 gFreqTbl[2].freq_sel = LP4_DDR1600;
1196 gFreqTbl[2].divmode = DIV8_MODE;
1197 gFreqTbl[2].SRAMIdx = SRAM_SHU2;
1198 gFreqTbl[2].duty_calibration_mode = DUTY_DEFAULT;
1199 gFreqTbl[2].vref_calibartion_enable = VREF_CALI_ON;
1200 gFreqTbl[2].ddr_loop_mode = CLOSE_LOOP_MODE;
1201
1202 gFreqTbl[3].freq_sel = LP4_DDR2400;
1203 gFreqTbl[3].divmode = DIV8_MODE;
1204 gFreqTbl[3].SRAMIdx = SRAM_SHU0;
1205 gFreqTbl[3].duty_calibration_mode = DUTY_NEED_K;
1206 gFreqTbl[3].vref_calibartion_enable = VREF_CALI_ON;
1207 gFreqTbl[3].ddr_loop_mode = CLOSE_LOOP_MODE;
1208
1209 gFreqTbl[4].freq_sel = LP4_DDR1600;
1210 gFreqTbl[4].divmode = DIV8_MODE;
1211 gFreqTbl[4].SRAMIdx = SRAM_SHU5;
1212 gFreqTbl[4].duty_calibration_mode = DUTY_DEFAULT;
1213 gFreqTbl[4].vref_calibartion_enable = VREF_CALI_ON;
1214 gFreqTbl[4].ddr_loop_mode = CLOSE_LOOP_MODE;
1215
1216 gFreqTbl[5].freq_sel = LP4_DDR1600;
1217 gFreqTbl[5].divmode = DIV8_MODE;
1218 gFreqTbl[5].SRAMIdx = SRAM_SHU4;
1219 gFreqTbl[5].duty_calibration_mode = DUTY_DEFAULT;
1220 gFreqTbl[5].vref_calibartion_enable = VREF_CALI_ON;
1221 gFreqTbl[5].ddr_loop_mode = CLOSE_LOOP_MODE;
1222
1223 gFreqTbl[6].freq_sel = LP4_DDR1600;
1224 gFreqTbl[6].divmode = DIV8_MODE;
1225 gFreqTbl[6].SRAMIdx = SRAM_SHU6;
1226 gFreqTbl[6].duty_calibration_mode = DUTY_DEFAULT;
1227 gFreqTbl[6].vref_calibartion_enable = VREF_CALI_ON;
1228 gFreqTbl[6].ddr_loop_mode = CLOSE_LOOP_MODE;
1229 }
1230 #endif
1231
1232 #if (CFG_DRAM_LOG_TO_STORAGE)
1233 #include <blkdev.h>
1234 #include <partition.h>
1235 #include <storage_api.h>
1236
1237 extern u64 get_part_addr(const char *name);
1238 u64 part_dram_data_addr_uart = 0;
1239 u32 log_start = 0;
1240 static char logbuf[1024];
1241 static int logcount;
1242 #endif
1243
1244 #ifdef VOLTAGE_SEL
update_voltage_select_info(void)1245 void update_voltage_select_info(void)
1246 {
1247 voltage_sel_info_ptr.vcore = doe_get_config("vcore");
1248 voltage_sel_info_ptr.vdram = doe_get_config("vdram");
1249 voltage_sel_info_ptr.vddq = doe_get_config("vddq");
1250 voltage_sel_info_ptr.vmddr = doe_get_config("vmddr");
1251 voltage_sel_info_ptr.vio18 = doe_get_config("vio18");
1252
1253 print("DOE setting: vcore %d, vdram %d, vddq %d, vmddr %d, vio18 %d \n",
1254 voltage_sel_info_ptr.vcore, voltage_sel_info_ptr.vdram,
1255 voltage_sel_info_ptr.vddq, voltage_sel_info_ptr.vmddr,
1256 voltage_sel_info_ptr.vio18);
1257 }
1258
vio18_voltage_select()1259 int vio18_voltage_select()
1260 {
1261 if (voltage_sel_info_ptr.vio18 == LEVEL_LV) {
1262 return HQA_VIO18_LV;
1263 } else if (voltage_sel_info_ptr.vio18 == LEVEL_HV) {
1264 return HQA_VIO18_HV;
1265 } else {
1266 return HQA_VIO18_NV;
1267 }
1268 }
1269
vmddr_voltage_select()1270 int vmddr_voltage_select()
1271 {
1272 if (voltage_sel_info_ptr.vmddr == LEVEL_LV) {
1273 return HQA_VMDDR_LV_LP4;
1274 } else if (voltage_sel_info_ptr.vmddr == LEVEL_HV) {
1275 return HQA_VMDDR_HV_LP4;
1276 } else {
1277 return HQA_VMDDR_NV_LP4;
1278 }
1279 }
1280
vddq_voltage_select()1281 int vddq_voltage_select()
1282 {
1283 if (voltage_sel_info_ptr.vddq == LEVEL_LV) {
1284 return HQA_VDDQ_LV_LP4;
1285 } else if (voltage_sel_info_ptr.vddq == LEVEL_HV) {
1286 return HQA_VDDQ_HV_LP4;
1287 } else {
1288 return HQA_VDDQ_NV_LP4;
1289 }
1290 }
1291
vdram_voltage_select(void)1292 int vdram_voltage_select(void)
1293 {
1294 if (voltage_sel_info_ptr.vdram == LEVEL_LV) {
1295 return HQA_VDRAM_LV_LP4;
1296 } else if (voltage_sel_info_ptr.vdram == LEVEL_HV) {
1297 return HQA_VDRAM_HV_LP4;
1298 } else {
1299 return HQA_VDRAM_NV_LP4;
1300 }
1301 }
1302
vcore_voltage_select(DRAM_KSHU kshu)1303 int vcore_voltage_select(DRAM_KSHU kshu)
1304 {
1305 int ret = 0;
1306 if (voltage_sel_info_ptr.vcore == LEVEL_LV) {
1307 switch(kshu) {
1308 case KSHU0:
1309 ret = HQA_VCORE_LV_LP4_KSHU0_PL;
1310 break;
1311 case KSHU1:
1312 ret = HQA_VCORE_LV_LP4_KSHU1_PL;
1313 break;
1314 case KSHU2:
1315 ret = HQA_VCORE_LV_LP4_KSHU2_PL;
1316 break;
1317 case KSHU3:
1318 ret = HQA_VCORE_LV_LP4_KSHU3_PL;
1319 break;
1320 case KSHU4:
1321 ret = HQA_VCORE_LV_LP4_KSHU4_PL;
1322 break;
1323 case KSHU5:
1324 ret = HQA_VCORE_LV_LP4_KSHU5_PL;
1325 break;
1326 case KSHU6:
1327 ret = HQA_VCORE_LV_LP4_KSHU6_PL;
1328 break;
1329 };
1330 } else if (voltage_sel_info_ptr.vcore == LEVEL_HV) {
1331 switch(kshu) {
1332 case KSHU0:
1333 ret = HQA_VCORE_HV_LP4_KSHU0_PL;
1334 break;
1335 case KSHU1:
1336 ret = HQA_VCORE_HV_LP4_KSHU1_PL;
1337 break;
1338 case KSHU2:
1339 ret = HQA_VCORE_HV_LP4_KSHU2_PL;
1340 break;
1341 case KSHU3:
1342 ret = HQA_VCORE_HV_LP4_KSHU3_PL;
1343 break;
1344 case KSHU4:
1345 ret = HQA_VCORE_HV_LP4_KSHU4_PL;
1346 break;
1347 case KSHU5:
1348 ret = HQA_VCORE_HV_LP4_KSHU5_PL;
1349 break;
1350 case KSHU6:
1351 ret = HQA_VCORE_HV_LP4_KSHU6_PL;
1352 break;
1353 };
1354 } else {
1355 #if defined(VCORE_BIN)
1356 ret = 0;
1357 #else
1358 switch(kshu) {
1359 case KSHU0:
1360 ret = HQA_VCORE_NV_LP4_KSHU0_PL;
1361 break;
1362 case KSHU1:
1363 ret = HQA_VCORE_NV_LP4_KSHU1_PL;
1364 break;
1365 case KSHU2:
1366 ret = HQA_VCORE_NV_LP4_KSHU2_PL;
1367 break;
1368 case KSHU3:
1369 ret = HQA_VCORE_NV_LP4_KSHU3_PL;
1370 break;
1371 case KSHU4:
1372 ret = HQA_VCORE_NV_LP4_KSHU4_PL;
1373 break;
1374 case KSHU5:
1375 ret = HQA_VCORE_NV_LP4_KSHU5_PL;
1376 break;
1377 case KSHU6:
1378 ret = HQA_VCORE_NV_LP4_KSHU6_PL;
1379 break;
1380 };
1381 #endif
1382 }
1383
1384 return ret;
1385 }
1386
1387 #endif
1388
1389 #ifdef DRAM_ADAPTIVE
update_dram_setting(EMI_SETTINGS * default_emi_setting,unsigned int dram_type,DRAM_INFO_BY_MRR_T * dram_info)1390 static int update_dram_setting(EMI_SETTINGS *default_emi_setting, unsigned int dram_type, DRAM_INFO_BY_MRR_T *dram_info)
1391 {
1392 default_emi_setting->type = dram_type;
1393
1394 if (dram_info != NULL) {
1395 default_emi_setting->DRAM_RANK_SIZE[0] = (u64)dram_info->u8MR8Density[0];
1396 default_emi_setting->DRAM_RANK_SIZE[1] = (u64)dram_info->u8MR8Density[1];
1397
1398 default_emi_setting->iLPDDR3_MODE_REG_5 = dram_info->u2MR5VendorID;
1399
1400 if (dram_info->u4RankNum == 1) {
1401 if (dram_info->u1DieNum[RANK_0] == 1)
1402 default_emi_setting->dram_cbt_mode_extern = CBT_R0_R1_NORMAL;
1403 else if (dram_info->u1DieNum[RANK_0] == 2)
1404 default_emi_setting->dram_cbt_mode_extern = CBT_R0_R1_BYTE;
1405 else
1406 return -1;
1407 } else if (dram_info->u4RankNum == 2) {
1408 if ((dram_info->u1DieNum[RANK_0] == 1) && (dram_info->u1DieNum[RANK_1] == 1))
1409 default_emi_setting->dram_cbt_mode_extern = CBT_R0_R1_NORMAL;
1410 else if ((dram_info->u1DieNum[RANK_0] == 1) && (dram_info->u1DieNum[RANK_1] == 2))
1411 default_emi_setting->dram_cbt_mode_extern = CBT_R0_NORMAL_R1_BYTE;
1412 else if ((dram_info->u1DieNum[RANK_0] == 2) && (dram_info->u1DieNum[RANK_1] == 1))
1413 default_emi_setting->dram_cbt_mode_extern = CBT_R0_BYTE_R1_NORMAL;
1414 else if ((dram_info->u1DieNum[RANK_0] == 2) && (dram_info->u1DieNum[RANK_1] == 2))
1415 default_emi_setting->dram_cbt_mode_extern = CBT_R0_R1_BYTE;
1416 else
1417 return -2;
1418 } else
1419 return -3;
1420 } else
1421 return -4;
1422
1423 return 0;
1424 }
1425
decode_emi_info(EMI_INFO_T * emi_info,unsigned int dram_type,DRAM_INFO_BY_MRR_T * dram_info)1426 static int decode_emi_info(EMI_INFO_T *emi_info, unsigned int dram_type, DRAM_INFO_BY_MRR_T *dram_info)
1427 {
1428 unsigned int i;
1429 unsigned long long die_size;
1430
1431 emi_info->dram_type = dram_type;
1432 emi_info->ch_num = 2;
1433 emi_info->bank_width[0] = 3;
1434 emi_info->bank_width[1] = 3;
1435 emi_info->col_width[0] = 10;
1436 emi_info->col_width[1] = 10;
1437
1438 if (dram_info != NULL) {
1439 emi_info->rank_size[0] = (u64)dram_info->u8MR8Density[0];
1440 emi_info->rank_size[1] = (u64)dram_info->u8MR8Density[1];
1441 /**
1442 * die size = chn * rank_num * rank_size
1443 **/
1444 //emi_info->rank_size[0] /= emi_info->ch_num;
1445 //emi_info->rank_size[1] /= emi_info->ch_num;
1446 emi_info->rk_num = dram_info->u4RankNum;
1447
1448 for (i = 0; i < emi_info->rk_num; i++) {
1449 die_size = emi_info->rank_size[i] / dram_info->u1DieNum[i];
1450
1451 switch (die_size | (dram_info->u1DieNum[i] << 4) | u1IsLP4Family(dram_type)) {
1452 case 0x20000011ULL:
1453 case 0x20000021ULL:
1454 case 0x40000021ULL:
1455 case 0x30000011ULL:
1456 case 0x40000011ULL:
1457 emi_info->row_width[i] = 15;
1458 break;
1459 case 0x30000021ULL:
1460 case 0x60000011ULL:
1461 case 0x80000011ULL:
1462 emi_info->row_width[i] = 16;
1463 break;
1464 case 0x060000021ULL:
1465 case 0x080000021ULL:
1466 case 0x0C0000011ULL:
1467 case 0x100000011ULL:
1468 emi_info->row_width[i] = 17;
1469 break;
1470 case 0x0C0000021ULL:
1471 case 0x100000021ULL:
1472 emi_info->row_width[i] = 18;
1473 break;
1474 default:
1475 return -1;
1476 }
1477 }
1478 } else
1479 return -1;
1480 return 0;
1481 }
1482 #endif
1483
1484 #if (FOR_DV_SIMULATION_USED==0)
dram_auto_detection(void)1485 void dram_auto_detection(void)
1486 {
1487 DRAM_INFO_BY_MRR_T dram_info;
1488 EMI_INFO_T emi_info;
1489 DRAM_CBT_MODE_EXTERN_T dram_mode;
1490 unsigned int dram_type;
1491 int ret;
1492
1493 dram_type = (unsigned int)mt_get_dram_type_for_dis();
1494 g_default_emi_setting.type &= ~0xFF;
1495 g_default_emi_setting.type |= (dram_type & 0xFF);
1496 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
1497 if (!u1IsLP4Family(dram_type) ||
1498 read_offline_dram_mdl_data(&dram_info) < 0) {
1499 #endif
1500 dram_mode = (DRAM_CBT_MODE_EXTERN_T)((u1IsLP4Family(dram_type))?
1501 CBT_BYTE_MODE1 : CBT_NORMAL_MODE);
1502 #if defined(SLT)
1503 SLT_Test_Main_Flow(dram_type, dram_mode, &dram_info, SLT_USED);
1504 #endif
1505 Init_DRAM(dram_type, dram_mode, &dram_info, GET_MDL_USED);
1506 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
1507 if (u1IsLP4Family(dram_type))
1508 write_offline_dram_mdl_data(&dram_info);
1509 }
1510 #endif
1511
1512 #ifdef DRAM_QVL_CHECK
1513 ret = check_qvl(&dram_info, dram_type);
1514 if (ret) {
1515 dramc_crit("[DRAMC] check_qvl err %d\n", ret);
1516 DRAMC_ASSERT(0);
1517 }
1518 #endif
1519
1520 ret = update_dram_setting(&g_default_emi_setting, dram_type, &dram_info);
1521 if (ret) {
1522 dramc_crit("[DRAMC] update_dram_setting err %d\n", ret);
1523 DRAMC_ASSERT(0);
1524 }
1525
1526 ret = decode_emi_info(&emi_info, dram_type, &dram_info);
1527 if (ret) {
1528 dramc_crit("[DRAMC] decode_emi_info err %d\n", ret);
1529 DRAMC_ASSERT(0);
1530 }
1531
1532
1533 ret = update_emi_setting(&g_default_emi_setting, &emi_info);
1534 if (ret) {
1535 dramc_crit("[DRAMC] update_emi_setting err %d\n", ret);
1536 DRAMC_ASSERT(0);
1537 }
1538
1539 auto_detect_done = 1;
1540 }
1541
mt_set_emi(struct dramc_param * dparam)1542 void mt_set_emi(struct dramc_param *dparam)
1543 {
1544 //int index;
1545 /*unsigned int SW_CTRL_VC, HW_CTRL_VC;*/
1546 EMI_SETTINGS *emi_set;
1547 dramc_crit("[DRAMC] Dram fast K start\n");
1548 #ifdef VOLTAGE_SEL
1549 update_voltage_select_info();
1550 #endif
1551
1552 #if ENABLE_PINMUX_FOR_RANK_SWAP
1553 EMI_rank_swap_handle();
1554 #endif
1555
1556
1557 setup_dramc_voltage_by_pmic();
1558
1559 #if DRAM_AUXADC_CONFIG
1560 get_ch_num_by_auxadc();
1561 #endif
1562
1563 #ifdef DRAM_ADAPTIVE
1564 dram_auto_detection();
1565 #endif
1566 emi_set = &g_default_emi_setting;
1567
1568 #ifdef DDR_RESERVE_MODE
1569 if(g_ddr_reserve_enable==1 && g_ddr_reserve_success==0)
1570 Before_Init_DRAM_While_Reserve_Mode_fail(emi_set->type & 0xF);
1571 #endif
1572
1573 #if (CFG_DRAM_LOG_TO_STORAGE)
1574 log_start = 1;
1575 print("log_start=0x%x part_dram_data_addr_uart=0x%llx \n",log_start,part_dram_data_addr_uart);
1576 #endif
1577 #if defined(SLT)
1578 SLT_Init_DRAM((emi_set->type & 0xF), emi_set->dram_cbt_mode_extern, NULL, NORMAL_USED);
1579 #else
1580 Init_DRAM((emi_set->type & 0xF), emi_set->dram_cbt_mode_extern, NULL, NORMAL_USED);
1581 #endif
1582 switch_dramc_voltage_to_auto_mode();
1583 restore_vcore_setting();
1584
1585 #if (CFG_DRAM_LOG_TO_STORAGE)
1586 log_start = 0;
1587 print("log_start=0x%x part_dram_data_addr_uart=0x%llx \n",log_start,part_dram_data_addr_uart);
1588 #endif
1589 #if 0
1590 {
1591 DRAMC_CTX_T * p = psCurrDramCtx;
1592 DramcRegDump(p);
1593 }
1594 #endif
1595 }
1596 #endif
1597
1598 #define DRAMC_ADDR_SHIFT_CHN(addr, channel) (addr + (channel * 0x10000))
1599
put_dummy_read_pattern(unsigned long long dst_pa,unsigned long long src_pa,unsigned int len)1600 static void put_dummy_read_pattern(unsigned long long dst_pa, unsigned long long src_pa, unsigned int len)
1601 {
1602 *((volatile unsigned int *)(CQ_DMA_BASE + 0x018)) = 7 << 16;
1603
1604 *((volatile unsigned int *)(CQ_DMA_BASE + 0x01c)) = src_pa;
1605 *((volatile unsigned int *)(CQ_DMA_BASE + 0x060)) = 0;
1606
1607 *((volatile unsigned int *)(CQ_DMA_BASE + 0x020)) = dst_pa & 0xffffffff;
1608 *((volatile unsigned int *)(CQ_DMA_BASE + 0x064)) = dst_pa >> 32;
1609
1610 *((volatile unsigned int *)(CQ_DMA_BASE + 0x024)) = len;
1611 dsb();
1612 *((volatile unsigned int *)(CQ_DMA_BASE + 0x008)) = 0x1;
1613
1614 while(*((volatile unsigned int *)(CQ_DMA_BASE + 0x008)));
1615 }
1616
get_dramc_addr(dram_addr_t * dram_addr,unsigned int offset)1617 unsigned int get_dramc_addr(dram_addr_t *dram_addr, unsigned int offset)
1618 {
1619 static char init_pattern = 0;
1620 unsigned int channel_num, rank_num;
1621 unsigned long long dummy_read_addr;
1622 unsigned long long rank_size[DRAMC_MAX_RK];
1623 unsigned int index;
1624 unsigned int *src_addr;
1625
1626 channel_num = (unsigned int) get_dram_channel_nr();
1627 rank_num = (unsigned int) get_dram_rank_nr();
1628 get_rank_size_by_emi(rank_size);
1629 dummy_read_addr = 0x40000000;
1630 src_addr = (unsigned int *) 0x40000000;
1631
1632 if (dram_addr->ch >= channel_num) {
1633 mcSHOW_DBG_MSG(("[DRAMC] invalid channel: %d\n", dram_addr->ch));
1634 return 0;
1635 }
1636
1637 if (dram_addr->rk >= rank_num) {
1638 mcSHOW_DBG_MSG(("[DRAMC] invalid rank: %d\n", dram_addr->rk));
1639 return 0;
1640 }
1641
1642 for (index = 0; index <= dram_addr->rk; index++)
1643 dummy_read_addr += rank_size[index];
1644 dummy_read_addr -= offset;
1645 dummy_read_addr &= ~(0x300);
1646
1647 if (offset == 0x20) {
1648 for (index = 0; index < 4; index++)
1649 *(src_addr + index) = 0xAAAA5555;
1650
1651 if (!init_pattern) {
1652 for (index = 0; index < channel_num; index++) {
1653 put_dummy_read_pattern(dummy_read_addr | (index << 8),
1654 (unsigned long long) src_addr, 16);
1655 }
1656
1657 init_pattern = 1;
1658 }
1659 }
1660
1661 dram_addr->full_sys_addr = dummy_read_addr;
1662 phy_addr_to_dram_addr(dram_addr, dummy_read_addr);
1663
1664 return dram_addr->addr;
1665 }
1666
get_dummy_read_addr(dram_addr_t * dram_addr)1667 unsigned int get_dummy_read_addr(dram_addr_t *dram_addr)
1668 {
1669 return get_dramc_addr(dram_addr, 0x20);
1670 }
1671
get_ta2_addr(dram_addr_t * dram_addr)1672 static unsigned int get_ta2_addr(dram_addr_t *dram_addr)
1673 {
1674 return (get_dramc_addr(dram_addr, 0x1000) >> 2) & 0xFFFFFFF8;
1675 }
1676
init_ta2_single_channel(unsigned int channel)1677 void init_ta2_single_channel(unsigned int channel)
1678 {
1679 unsigned int temp;
1680 dram_addr_t dram_addr;
1681 DRAMC_CTX_T *p = psCurrDramCtx;
1682 int test_cnt;
1683
1684
1685 temp = u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel)) & 0x1FFFFFFF;
1686 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel), temp);
1687
1688
1689 temp = u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A4, channel)) & 0x8FFFFFFF;
1690 temp |= (0x4 << 28);
1691 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A4, channel), temp);
1692
1693
1694 temp = u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel)) & 0xFFFFFFF0;
1695 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel), temp | 0x1);
1696
1697
1698 dram_addr.ch = channel;
1699 dram_addr.rk = 0;
1700 temp = (u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_RK_TEST2_A1, channel)) & 0x00000007);
1701 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_RK_TEST2_A1, channel), temp | get_ta2_addr(&dram_addr));
1702 dram_addr.rk = 1;
1703 temp = (u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_RK_TEST2_A1+0x200, channel)) & 0x00000007);
1704 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_RK_TEST2_A1+0x200, channel), temp | get_ta2_addr(&dram_addr));
1705
1706
1707 temp = (u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A2, channel)) & 0x0000000F) | (0x20 << 4);
1708 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A2, channel), temp);
1709
1710
1711 test_cnt = (get_dram_rank_nr() > 1) ? 1 : 0;
1712 vIO32WriteFldAlign(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel), 0, TEST2_A3_TESTAUDPAT);
1713 vIO32WriteFldAlign(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A3, channel), test_cnt, TEST2_A3_TESTCNT);
1714 vIO32WriteFldAlign(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_TEST2_A4, channel), 1, TEST2_A4_TESTXTALKPAT);
1715
1716 return;
1717 }
1718
1719 #ifdef LAST_DRAMC
1720
is_last_dramc_initialized(void)1721 static unsigned int is_last_dramc_initialized(void)
1722 {
1723 if(last_dramc_info_ptr->ta2_result_magic != LAST_DRAMC_MAGIC_PATTERN) {
1724 return 0;
1725 } else {
1726 return 1;
1727 }
1728 }
1729
update_last_dramc_info(void)1730 void update_last_dramc_info(void)
1731 {
1732 unsigned int chn;
1733 unsigned long long latch_result = 0;
1734 unsigned int temp;
1735 unsigned int *curr;
1736 DRAMC_CTX_T *p = psCurrDramCtx;
1737
1738
1739 if(last_dramc_info_ptr->ta2_result_magic != LAST_DRAMC_MAGIC_PATTERN) {
1740 last_dramc_info_ptr->ta2_result_magic = LAST_DRAMC_MAGIC_PATTERN;
1741 last_dramc_info_ptr->ta2_result_last = 0;
1742 last_dramc_info_ptr->ta2_result_past = 0;
1743 last_dramc_info_ptr->ta2_result_checksum = LAST_DRAMC_MAGIC_PATTERN;
1744 last_dramc_info_ptr->reboot_count = 0;
1745 last_dramc_info_ptr->mr5 = mr5;
1746 last_dramc_info_ptr->mr6 = mr6;
1747 last_dramc_info_ptr->mr7 = mr7;
1748 last_dramc_info_ptr->mr8 = mr8;
1749 } else {
1750 last_dramc_info_ptr->ta2_result_checksum ^= last_dramc_info_ptr->reboot_count;
1751 last_dramc_info_ptr->reboot_count++;
1752 last_dramc_info_ptr->ta2_result_checksum ^= last_dramc_info_ptr->reboot_count;
1753 }
1754
1755
1756
1757
1758 for (chn = 0; chn < CHANNEL_NUM; ++chn) {
1759 //dramc_crit("[LastDRAMC] latch result before RST: %x\n", u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)));
1760 latch_result = (latch_result << 16) | u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)) & 0xFFFF;
1761 temp = u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_RST, chn));
1762 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_RST, chn), temp | 0x00000001);
1763 vIO32Write4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_RST, chn), temp & 0xFFFFFFFE);
1764 //dramc_crit("[LastDRAMC] latch result after RST: %x\n", u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)));
1765
1766 }
1767
1768 last_dramc_info_ptr->ta2_result_checksum ^= last_dramc_info_ptr->ta2_result_past ^ latch_result;
1769 last_dramc_info_ptr->ta2_result_past = last_dramc_info_ptr->ta2_result_last;
1770 last_dramc_info_ptr->ta2_result_last = latch_result;
1771 for (temp = 0; temp < sizeof(LAST_DRAMC_INFO_T) / sizeof(temp); temp++) {
1772 curr = (unsigned int *)last_dramc_info_ptr + temp;
1773 dramc_crit("[LastDRAMC] 0x%x: 0x%x\n", curr, *curr);
1774 }
1775
1776 return;
1777 }
1778
init_ta2_all_channel(void)1779 void init_ta2_all_channel(void)
1780 {
1781 unsigned int chn;
1782
1783 update_last_dramc_info();
1784
1785
1786 #if CFG_ENABLE_DCACHE
1787 plat_clean_invalidate_dcache();
1788 #endif
1789
1790 for (chn = 0; chn < CHANNEL_NUM; ++chn)
1791 init_ta2_single_channel(chn);
1792 }
1793
1794
check_gating_err_in_dramc_latch(void)1795 unsigned int check_gating_err_in_dramc_latch(void)
1796 {
1797 unsigned int chn, ret = 0;
1798 DRAMC_CTX_T *p = psCurrDramCtx;
1799
1800 if ((g_boot_reason == BR_POWER_KEY) || (g_boot_reason == BR_USB)
1801 || mtk_wdt_is_pmic_full_reset() || (is_last_dramc_initialized() == 0)){
1802 dramc_crit("for cold boot, always return 0\n");
1803 return 0;
1804 }
1805
1806 for (chn = 0; chn <= 3; ++chn) {
1807 dramc_crit("[dramc] latch check in channel %d (0x%x)\n",
1808 chn, u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)));
1809 if (u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)) & 0x4000) {
1810 dramc_crit("[dramc] found gating error in channel %d (0x%x)\n",
1811 chn, u4IO32Read4B(DRAMC_ADDR_SHIFT_CHN(DRAMC_REG_WDT_DBG_SIGNAL, chn)));
1812 ret |= (1 << chn);
1813 }
1814 }
1815
1816 return ret;
1817 }
1818
dram_fatal_exception_detection_start(void)1819 void dram_fatal_exception_detection_start(void)
1820 {
1821 last_dramc_info_ptr = (LAST_DRAMC_INFO_T *) get_dbg_info_base(KEY_LAST_DRAMC);
1822
1823 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
1824 part_dram_data_addr = get_part_addr("boot_para") + 0x100000;
1825 if (part_dram_data_addr != 0x0)
1826 dramc_crit("[dramc] init partition address is 0x%llx\n", part_dram_data_addr);
1827 else {
1828 dramc_crit("[dramc] init partition address is incorrect !!!\n");
1829 }
1830 #endif
1831
1832 #if defined(SLT)
1833 part_dram_data_addr_slt = get_part_addr("boot_para") + 0x100000;
1834 #endif
1835
1836 if ((g_boot_reason == BR_POWER_KEY) || (g_boot_reason == BR_USB)
1837 || mtk_wdt_is_pmic_full_reset() || (is_last_dramc_initialized() == 0)){
1838
1839 dramc_crit("[dramc] init SRAM region for DRAM exception detection\n");
1840 last_dramc_info_ptr->last_fatal_err_flag = 0x0;
1841 last_dramc_info_ptr->storage_api_err_flag = 0x0;
1842 dram_fatal_init_stberr();
1843 } else {
1844 last_dramc_info_ptr->last_fatal_err_flag = last_dramc_info_ptr->fatal_err_flag;
1845 last_dramc_info_ptr->storage_api_err_flag = 0x0;
1846 dram_fatal_backup_stberr();
1847 dram_fatal_init_stberr();
1848 }
1849
1850 last_dramc_info_ptr->fatal_err_flag = 1 << OFFSET_DRAM_FATAL_ERR;
1851 dsb();
1852 }
1853
dram_fatal_exception_detection_end(void)1854 void dram_fatal_exception_detection_end(void)
1855 {
1856 last_dramc_info_ptr->fatal_err_flag = 0x0;
1857 dsb();
1858 }
1859
check_dram_fatal_exception(void)1860 unsigned int check_dram_fatal_exception(void)
1861 {
1862 dramc_crit("[dramc] DRAM_FATAL_ERR_FLAG = 0x%x\n", last_dramc_info_ptr->fatal_err_flag);
1863
1864 return ((last_dramc_info_ptr->fatal_err_flag & ~((1 << OFFSET_DRAM_FATAL_ERR)|DDR_RSV_MODE_ERR_MASK)) != 0x0) ? 1 : 0;
1865 }
1866
check_last_dram_fatal_exception(void)1867 unsigned int check_last_dram_fatal_exception(void)
1868 {
1869 dramc_crit("[dramc] LAST_DRAM_FATAL_ERR_FLAG = 0x%x\n", last_dramc_info_ptr->last_fatal_err_flag);
1870
1871 return ((last_dramc_info_ptr->last_fatal_err_flag & ~(DDR_RSV_MODE_ERR_MASK)) != 0x0) ? 1 : 0;
1872 }
1873
dram_fatal_set_ta2_err(unsigned int chn,unsigned int err_code)1874 void dram_fatal_set_ta2_err(unsigned int chn, unsigned int err_code)
1875 {
1876 unsigned int shift = OFFSET_DRAM_TA2_ERR + 2 * chn, ret;
1877
1878 if (chn > 3)
1879 return;
1880
1881 ret = last_dramc_info_ptr->fatal_err_flag & ~(0x7 << shift);
1882 last_dramc_info_ptr->fatal_err_flag = ret | ((err_code & 0x7) << shift);
1883 dsb();
1884 }
1885
dram_fatal_set_gating_err(unsigned int chn,unsigned int err_code)1886 void dram_fatal_set_gating_err(unsigned int chn, unsigned int err_code)
1887 {
1888 unsigned int shift = OFFSET_DRAM_GATING_ERR + 4 * chn, ret;
1889
1890 if (chn > 3)
1891 return;
1892
1893 ret = last_dramc_info_ptr->fatal_err_flag & ~(0xf << shift);
1894 last_dramc_info_ptr->fatal_err_flag = ret | ((err_code & 0xf) << shift);
1895 dsb();
1896 }
1897
dram_fatal_init_stberr(void)1898 void dram_fatal_init_stberr(void)
1899 {
1900 last_dramc_info_ptr->gating_err[0][0] = 0x0;
1901 last_dramc_info_ptr->gating_err[0][1] = 0x0;
1902 last_dramc_info_ptr->gating_err[1][0] = 0x0;
1903 last_dramc_info_ptr->gating_err[1][1] = 0x0;
1904 last_dramc_info_ptr->gating_err[2][0] = 0x0;
1905 last_dramc_info_ptr->gating_err[2][1] = 0x0;
1906 last_dramc_info_ptr->gating_err[3][0] = 0x0;
1907 last_dramc_info_ptr->gating_err[3][1] = 0x0;
1908
1909 dsb();
1910 }
1911
dram_fatal_backup_stberr(void)1912 void dram_fatal_backup_stberr(void)
1913 {
1914 last_dramc_info_ptr->last_gating_err[0][0] = last_dramc_info_ptr->gating_err[0][0];
1915 last_dramc_info_ptr->last_gating_err[0][1] = last_dramc_info_ptr->gating_err[0][1];
1916 last_dramc_info_ptr->last_gating_err[1][0] = last_dramc_info_ptr->gating_err[1][0];
1917 last_dramc_info_ptr->last_gating_err[1][1] = last_dramc_info_ptr->gating_err[1][1];
1918 last_dramc_info_ptr->last_gating_err[2][0] = last_dramc_info_ptr->gating_err[2][0];
1919 last_dramc_info_ptr->last_gating_err[2][1] = last_dramc_info_ptr->gating_err[2][1];
1920 last_dramc_info_ptr->last_gating_err[3][0] = last_dramc_info_ptr->gating_err[3][0];
1921 last_dramc_info_ptr->last_gating_err[3][1] = last_dramc_info_ptr->gating_err[3][1];
1922
1923 dsb();
1924 }
1925
dram_fatal_set_stberr(unsigned int chn,unsigned int rk,unsigned int err_code)1926 void dram_fatal_set_stberr(unsigned int chn, unsigned int rk, unsigned int err_code)
1927 {
1928 if ((chn > 3) || (rk > 1))
1929 return;
1930
1931 last_dramc_info_ptr->gating_err[chn][rk] = err_code;
1932
1933 dsb();
1934 }
1935
dram_fatal_set_err(unsigned int err_code,unsigned int mask,unsigned int offset)1936 void dram_fatal_set_err(unsigned int err_code, unsigned int mask, unsigned int offset)
1937 {
1938 unsigned int ret;
1939
1940 ret = last_dramc_info_ptr->fatal_err_flag & ~(mask << offset);
1941 last_dramc_info_ptr->fatal_err_flag = ret | ((err_code & mask) << offset);
1942 dsb();
1943 }
1944
1945 #endif
1946
1947 #if (FOR_DV_SIMULATION_USED==0)
doe_get_config(const char * feature)1948 int doe_get_config(const char* feature)
1949 {
1950 #if defined(ENABLE_DOE)
1951 char *doe_feature = dconfig_getenv(feature);
1952 int doe_result = atoi(doe_feature);
1953 dramc_crit("DOE force setting %s=%d\n", feature, doe_result);
1954 return doe_result;
1955 #elif defined(NVCORE_NVDRAM)
1956 return LEVEL_NV;
1957 #elif defined(LVCORE_LVDRAM)
1958 return LEVEL_LV;
1959 #elif defined(HVCORE_HVDRAM)
1960 return LEVEL_HV;
1961 #else
1962 return 0;
1963 #endif
1964 }
1965 #endif
1966
1967 #if (CFG_DRAM_LOG_TO_STORAGE)
log_to_storage(const char c)1968 void log_to_storage(const char c)
1969 {
1970 int ret, clr_count;
1971 blkdev_t *bootdev = NULL;
1972 static u8 logen = 0;
1973
1974 bootdev = blkdev_get(CFG_BOOT_DEV);
1975
1976 if (log_start && (!logen)) {
1977 logen = 1;
1978 logcount = 0;
1979 part_dram_data_addr_uart = get_part_addr("boot_para") + 0x100000;
1980 memset(&logbuf, 0, sizeof(logbuf));
1981 for (clr_count = 0; clr_count < 3072 ; clr_count++)
1982 ret = blkdev_write(bootdev, (part_dram_data_addr_uart + (1024 * clr_count)), 1024, (u8*)&logbuf, storage_get_part_id(STORAGE_PHYS_PART_USER));
1983 }
1984
1985 if (log_start) {
1986 // if (((((char) c >> 4) & 0x7) > 1) & ((((char) c >> 4) & 0x7) < 7))
1987 // logbuf[logcount] = ((char) c & 0xF0) | (((char) c >> 2) & 0x03) | (((char) c << 2) & 0x0C);
1988 // else
1989 logbuf[logcount] = (char) c;
1990 logcount = logcount + 1;
1991
1992 if (logcount==1024) {
1993 logcount = 0;
1994 ret = blkdev_write(bootdev, part_dram_data_addr_uart, 1024, (u8*)&logbuf, storage_get_part_id(STORAGE_PHYS_PART_USER));
1995 part_dram_data_addr_uart = part_dram_data_addr_uart + 1024;
1996 }
1997 }
1998 }
1999 #endif
2000 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
2001
2002 #if !__ETT__
2003 //[FOR_CHROMEOS]
2004 //#include <blkdev.h>
2005 //#include <partition.h>
2006 //#include <pl_version.h>
2007 #else
2008 #include "ett_common.h"
2009 #include "emi.h"
2010 #endif
2011 u32 g_dram_storage_api_err_code;
2012 #if 0 //[FOR_CHROMEOS]
2013 static u16 crc16(const u8* data, u32 length){
2014 u8 x;
2015 u16 crc = 0xFFFF;
2016
2017 while (length--) {
2018 x = crc >> 8 ^ *data++;
2019 x ^= x >> 4;
2020 crc = (crc << 8) ^ ((u8)(x << 12)) ^ ((u8)(x <<5)) ^ ((u8)x);
2021 }
2022 return crc;
2023 }
2024
2025 static void assign_checksum_for_dram_data(DRAM_CALIBRATION_SHU_DATA_T *shu_data)
2026 {
2027
2028 shu_data->checksum = 0;
2029 shu_data->checksum = crc16((u8*)shu_data, sizeof(*shu_data));
2030 }
2031
2032 static int check_checksum_for_dram_data(DRAM_CALIBRATION_SHU_DATA_T *shu_data)
2033 {
2034 u16 checksum_in_storage = shu_data->checksum;
2035
2036 assign_checksum_for_dram_data(shu_data);
2037
2038 return (shu_data->checksum == checksum_in_storage) ? 1 : 0;
2039 }
2040
2041 #if !__ETT__
2042 static void assign_checksum_for_mdl_data(DRAM_CALIBRATION_MRR_DATA_T *mrr_info)
2043 {
2044
2045 mrr_info->checksum = 0;
2046 mrr_info->checksum = crc16((u8*)mrr_info, sizeof(*mrr_info));
2047 }
2048
2049
2050 static int check_checksum_for_mdl_data(DRAM_CALIBRATION_MRR_DATA_T *mrr_info)
2051 {
2052 u16 checksum_in_storage = mrr_info->checksum;
2053
2054 assign_checksum_for_mdl_data(mrr_info);
2055
2056 return (mrr_info->checksum == checksum_in_storage) ? 1 : 0;
2057 }
2058 #endif
2059 #endif
fastk_data_dump(struct sdram_params * params,u8 shu)2060 static void fastk_data_dump(struct sdram_params *params, u8 shu){
2061 #if 0
2062 print("[Full_K]Fastk data dump \n");
2063 print("shuffle %d(For verify: cbt_final_vref CHA:%u, CHB: %u)\n", shu, params->cbt_final_vref[CHANNEL_A][RANK_0], params->cbt_final_vref[CHANNEL_B][RANK_0]);
2064 print("shuffle %d(For verify: WL B0:%u, B1: %u)\n", shu, params->wr_level[CHANNEL_A][RANK_0][0], params->wr_level[CHANNEL_B][RANK_0][0]);
2065 print("shuffle %d(For verify: tx_window_vref CHA:%u, CHB: %u)\n", shu, params->tx_window_vref[CHANNEL_A][RANK_0], params->tx_window_vref[CHANNEL_B][RANK_0]);
2066 print("shuffle %d(For verify: rx_datlat CHA:%u, CHB: %u)\n", shu, params->rx_datlat[CHANNEL_A][RANK_0], params->rx_datlat[CHANNEL_B][RANK_0]);
2067 print("shuffle %d(For verify: rx_datlat CHA:%u, CHB: %u)\n", shu, params->rx_datlat[CHANNEL_A][RANK_0], params->rx_datlat[CHANNEL_B][RANK_0]);
2068
2069 print("shuffle %d(For verify: cbt_final_vref CHC:%u, CHD: %u)\n", shu, params->cbt_final_vref[CHANNEL_C][RANK_0], params->cbt_final_vref[CHANNEL_D][RANK_0]);
2070 print("shuffle %d(For verify: WL CHC:%u, CHD: %u)\n", shu, params->wr_level[CHANNEL_C][RANK_0][0], params->wr_level[CHANNEL_D][RANK_0][0]);
2071 print("shuffle %d(For verify: tx_window_vref CHC:%u, CHD: %u)\n", shu, params->tx_window_vref[CHANNEL_C][RANK_0], params->tx_window_vref[CHANNEL_D][RANK_0]);
2072 print("shuffle %d(For verify: rx_datlat CHC:%u, CHD: %u)\n", shu, params->rx_datlat[CHANNEL_C][RANK_0], params->rx_datlat[CHANNEL_D][RANK_0]);
2073 print("shuffle %d(For verify: rx_datlat CHC:%u, CHD: %u)\n", shu, params->rx_datlat[CHANNEL_C][RANK_0], params->rx_datlat[CHANNEL_D][RANK_0]);
2074 print("\n");
2075 #endif
2076 }
read_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T * DramInfo)2077 static int read_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo)
2078 {
2079 return -1;
2080 }
2081
write_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T * DramInfo)2082 static int write_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo)
2083 {
2084 return -1;
2085 }
read_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle,SAVE_TIME_FOR_CALIBRATION_T * offLine_SaveData)2086 int read_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle, SAVE_TIME_FOR_CALIBRATION_T *offLine_SaveData)
2087 {
2088
2089 struct sdram_params *params;
2090
2091 if (!dramc_params)
2092 return -1;
2093 params = &dramc_params->dramc_datas.freq_params[shuffle];
2094
2095 dramc_info("read calibration data from shuffle %d(For verify: WL B0:%u, B1: %u)\n",
2096 shuffle, params->wr_level[CHANNEL_A][RANK_0][0], params->wr_level[CHANNEL_B][RANK_0][0]);
2097
2098 memcpy(offLine_SaveData, params, sizeof(*offLine_SaveData));
2099 fastk_data_dump(params, shuffle);
2100
2101 return 0;
2102 }
2103
write_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle,SAVE_TIME_FOR_CALIBRATION_T * offLine_SaveData)2104 int write_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle, SAVE_TIME_FOR_CALIBRATION_T *offLine_SaveData)
2105 {
2106 return 0;
2107 }
2108
clean_dram_calibration_data(void)2109 int clean_dram_calibration_data(void)
2110 {
2111 return 0;
2112 }
2113
2114 #else
2115 #if 0
2116 DRAM_CALIBRATION_DATA_T dram_data;
2117
2118 static int read_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo)
2119 {
2120 return -1;
2121 }
2122 static int write_offline_dram_mdl_data(DRAM_INFO_BY_MRR_T *DramInfo)
2123 {
2124 return -1;
2125 }
2126
2127 int read_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle, SAVE_TIME_FOR_CALIBRATION_T *offLine_SaveData)
2128 {
2129 return 0;
2130 }
2131
2132 int write_offline_dram_calibration_data(DRAM_DFS_SRAM_SHU_T shuffle, SAVE_TIME_FOR_CALIBRATION_T *offLine_SaveData)
2133 {
2134 return 0;
2135 }
2136
2137 int clean_dram_calibration_data(void)
2138 {
2139 return;
2140 }
2141 #endif
2142 #endif
2143
2144 #ifdef LAST_DRAMC
set_err_code_for_storage_api(void)2145 void set_err_code_for_storage_api(void)
2146 {
2147
2148 last_dramc_info_ptr->storage_api_err_flag = g_dram_storage_api_err_code;
2149 dsb();
2150
2151 }
2152 #endif
2153
2154 #if defined(SLT) && (!__ETT__)
2155 #include <storage_api.h>
2156 #include <emi.h>
clean_slt_data(void)2157 int clean_slt_data(void)
2158 {
2159 DRAM_SLT_DATA_T data;
2160 data.header.stage_status = -1;
2161 data.header.pl_version = PL_VERSION;
2162 return write_slt_data(&data);
2163 }
2164
read_slt_data(DRAM_SLT_DATA_T * data)2165 int read_slt_data(DRAM_SLT_DATA_T *data)
2166 {
2167 int i, ret;
2168 blkdev_t *bootdev = NULL;
2169
2170 if (data == NULL) {
2171 dramc_crit("[dramc_slt] SLT data == NULL, skip\n");
2172 }
2173
2174 bootdev = blkdev_get(CFG_BOOT_DEV);
2175 if (bootdev == NULL) {
2176 dramc_crit("[dramc_slt] can't find boot device(%d)\n", CFG_BOOT_DEV);
2177 return SLT_ERR_NO_DEV;
2178 }
2179
2180 if (!part_dram_data_addr_slt) {
2181 return SLT_ERR_NO_ADDR;
2182 }
2183
2184 ret = blkdev_read(bootdev, part_dram_data_addr_slt, sizeof(DRAM_SLT_DATA_T), (u8*)data, storage_get_part_id(STORAGE_PHYS_PART_USER));
2185
2186 if (ret != 0) {
2187 return SLT_ERR_READ_FAIL;
2188 }
2189
2190 if (data->header.pl_version != PL_VERSION) {
2191 dramc_crit("[dramc_slt] PL_VERSION mismatch\n");
2192 clean_slt_data();
2193 blkdev_read(bootdev, part_dram_data_addr_slt, sizeof(DRAM_SLT_DATA_T), (u8*)data, storage_get_part_id(STORAGE_PHYS_PART_USER));
2194 }
2195
2196 return 0;
2197 }
2198
write_slt_data(DRAM_SLT_DATA_T * data)2199 int write_slt_data(DRAM_SLT_DATA_T *data)
2200 {
2201 int ret;
2202 blkdev_t *bootdev = NULL;
2203
2204 if (data == NULL) {
2205 dramc_crit("[dramc_slt] data == NULL, skip\n");
2206 return SLT_ERR_NO_DATA;
2207 }
2208
2209 bootdev = blkdev_get(CFG_BOOT_DEV);
2210 if (bootdev == NULL) {
2211 dramc_crit("[dramc_slt] can't find boot device(%d)\n", CFG_BOOT_DEV);
2212 return SLT_ERR_NO_DEV;
2213 }
2214
2215 if (!part_dram_data_addr_slt) {
2216 return SLT_ERR_NO_ADDR;
2217 }
2218
2219 data->header.pl_version = PL_VERSION;
2220
2221 ret = blkdev_write(bootdev, part_dram_data_addr_slt, sizeof(DRAM_SLT_DATA_T), (u8*)data, storage_get_part_id(STORAGE_PHYS_PART_USER));
2222 if (ret != 0) {
2223 dramc_crit("[dramc_slt] blkdev_write failed\n");
2224 return SLT_ERR_WRITE_FAIL;
2225 }
2226
2227 return 0;
2228 }
2229 #endif
2230
2231 #if __FLASH_TOOL_DA__
get_mr8_by_mrr(U8 channel,U8 rank)2232 unsigned int get_mr8_by_mrr(U8 channel, U8 rank)
2233 {
2234 DRAMC_CTX_T *p = psCurrDramCtx;
2235 unsigned int mr8_value;
2236
2237 p->channel = channel;
2238
2239 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), rank, SWCMD_CTRL0_MRRRK);
2240 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), 8, SWCMD_CTRL0_MRSMA);
2241 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_MRREN);
2242 while (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_MRR_RESPONSE) ==0)
2243 mcDELAY_US(1);
2244 mr8_value = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MRR_STATUS), MRR_STATUS_MRR_REG);
2245 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_MRREN);
2246 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), 0, SWCMD_CTRL0_MRRRK);
2247
2248 return (mr8_value & 0xff);
2249 }
2250 #endif
2251
2252
2253 #if DRAM_AUXADC_CONFIG
get_ch_num_by_auxadc(void)2254 static unsigned int get_ch_num_by_auxadc(void)
2255 {
2256 unsigned int ret = 0, voltage = 0;
2257 ret = iio_read_channel_processed(5, &voltage);
2258 if (ret == 0) {
2259 if (voltage < 700)
2260 {
2261 channel_num_auxadc = CHANNEL_FOURTH;
2262 dram_type_auxadc = PINMUX_DSC;
2263 }
2264 else if (voltage >= 700 && voltage < 1200)
2265 {
2266 channel_num_auxadc = CHANNEL_DUAL;
2267 dram_type_auxadc = PINMUX_EMCP;
2268 }
2269 else
2270 {
2271 channel_num_auxadc = CHANNEL_DUAL;
2272 dram_type_auxadc = PINMUX_DSC;
2273 }
2274 dramc_crit("Channel num from auxadc: %d, \n", channel_num_auxadc);
2275 dramc_crit("dram_type_auxadc from auxadc: %d, \n", dram_type_auxadc);
2276 dramc_crit("voltage from auxadc: %d, \n", voltage);
2277 }
2278 else
2279 dramc_crit("Error! Read AUXADC value fail\n");
2280
2281 }
2282 #endif
2283